id
stringlengths 24
57
| type
stringclasses 1
value | granularity
stringclasses 4
values | content
stringlengths 8.08k
87.1k
| metadata
dict |
|---|---|---|---|---|
module_4703496128884488171
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: euclid
Module: crates/euclid/src/frontend/ast
Files: 2
</path>
<module>
// File: crates/euclid/src/frontend/ast/lowering.rs
//! Analysis for the Lowering logic in ast
//!
//!Certain functions that can be used to perform the complete lowering of ast to dir.
//!This includes lowering of enums, numbers, strings as well as Comparison logics.
use std::str::FromStr;
use crate::{
dssa::types::{AnalysisError, AnalysisErrorType},
enums::CollectVariants,
frontend::{
ast,
dir::{self, enums as dir_enums, EuclidDirFilter},
},
types::{self, DataType},
};
/// lowers the provided key (enum variant) & value to the respective DirValue
///
/// For example
/// ```notrust
/// CardType = Visa
/// ```notrust
///
/// This serves for the purpose were we have the DirKey as an explicit Enum type and value as one
/// of the member of the same Enum.
/// So particularly it lowers a predefined Enum from DirKey to an Enum of DirValue.
macro_rules! lower_enum {
($key:ident, $value:ident) => {
match $value {
ast::ValueType::EnumVariant(ev) => Ok(vec![dir::DirValue::$key(
dir_enums::$key::from_str(&ev).map_err(|_| AnalysisErrorType::InvalidVariant {
key: dir::DirKeyKind::$key.to_string(),
got: ev,
expected: dir_enums::$key::variants(),
})?,
)]),
ast::ValueType::EnumVariantArray(eva) => eva
.into_iter()
.map(|ev| {
Ok(dir::DirValue::$key(
dir_enums::$key::from_str(&ev).map_err(|_| {
AnalysisErrorType::InvalidVariant {
key: dir::DirKeyKind::$key.to_string(),
got: ev,
expected: dir_enums::$key::variants(),
}
})?,
))
})
.collect(),
_ => Err(AnalysisErrorType::InvalidType {
key: dir::DirKeyKind::$key.to_string(),
expected: DataType::EnumVariant,
got: $value.get_type(),
}),
}
};
}
/// lowers the provided key for a numerical value
///
/// For example
/// ```notrust
/// payment_amount = 17052001
/// ```notrust
/// This is for the cases in which there are numerical values involved and they are lowered
/// accordingly on basis of the supplied key, currently payment_amount is the only key having this
/// use case
macro_rules! lower_number {
($key:ident, $value:ident, $comp:ident) => {
match $value {
ast::ValueType::Number(num) => Ok(vec![dir::DirValue::$key(types::NumValue {
number: num,
refinement: $comp.into(),
})]),
ast::ValueType::NumberArray(na) => na
.into_iter()
.map(|num| {
Ok(dir::DirValue::$key(types::NumValue {
number: num,
refinement: $comp.clone().into(),
}))
})
.collect(),
ast::ValueType::NumberComparisonArray(nca) => nca
.into_iter()
.map(|nc| {
Ok(dir::DirValue::$key(types::NumValue {
number: nc.number,
refinement: nc.comparison_type.into(),
}))
})
.collect(),
_ => Err(AnalysisErrorType::InvalidType {
key: dir::DirKeyKind::$key.to_string(),
expected: DataType::Number,
got: $value.get_type(),
}),
}
};
}
/// lowers the provided key & value to the respective DirValue
///
/// For example
/// ```notrust
/// card_bin = "123456"
/// ```notrust
///
/// This serves for the purpose were we have the DirKey as Card_bin and value as an arbitrary string
/// So particularly it lowers an arbitrary value to a predefined key.
macro_rules! lower_str {
($key:ident, $value:ident $(, $validation_closure:expr)?) => {
match $value {
ast::ValueType::StrValue(st) => {
$($validation_closure(&st)?;)?
Ok(vec![dir::DirValue::$key(types::StrValue { value: st })])
}
_ => Err(AnalysisErrorType::InvalidType {
key: dir::DirKeyKind::$key.to_string(),
expected: DataType::StrValue,
got: $value.get_type(),
}),
}
};
}
macro_rules! lower_metadata {
($key:ident, $value:ident) => {
match $value {
ast::ValueType::MetadataVariant(md) => {
Ok(vec![dir::DirValue::$key(types::MetadataValue {
key: md.key,
value: md.value,
})])
}
_ => Err(AnalysisErrorType::InvalidType {
key: dir::DirKeyKind::$key.to_string(),
expected: DataType::MetadataValue,
got: $value.get_type(),
}),
}
};
}
/// lowers the comparison operators for different subtle value types present
/// by throwing required errors for comparisons that can't be performed for a certain value type
/// for example
/// can't have greater/less than operations on enum types
fn lower_comparison_inner<O: EuclidDirFilter>(
comp: ast::Comparison,
) -> Result<Vec<dir::DirValue>, AnalysisErrorType> {
let key_enum = dir::DirKeyKind::from_str(comp.lhs.as_str())
.map_err(|_| AnalysisErrorType::InvalidKey(comp.lhs.clone()))?;
if !O::is_key_allowed(&key_enum) {
return Err(AnalysisErrorType::InvalidKey(key_enum.to_string()));
}
match (&comp.comparison, &comp.value) {
(
ast::ComparisonType::LessThan
| ast::ComparisonType::GreaterThan
| ast::ComparisonType::GreaterThanEqual
| ast::ComparisonType::LessThanEqual,
ast::ValueType::EnumVariant(_),
) => {
Err(AnalysisErrorType::InvalidComparison {
operator: comp.comparison.clone(),
value_type: DataType::EnumVariant,
})?;
}
(
ast::ComparisonType::LessThan
| ast::ComparisonType::GreaterThan
| ast::ComparisonType::GreaterThanEqual
| ast::ComparisonType::LessThanEqual,
ast::ValueType::NumberArray(_),
) => {
Err(AnalysisErrorType::InvalidComparison {
operator: comp.comparison.clone(),
value_type: DataType::Number,
})?;
}
(
ast::ComparisonType::LessThan
| ast::ComparisonType::GreaterThan
| ast::ComparisonType::GreaterThanEqual
| ast::ComparisonType::LessThanEqual,
ast::ValueType::EnumVariantArray(_),
) => {
Err(AnalysisErrorType::InvalidComparison {
operator: comp.comparison.clone(),
value_type: DataType::EnumVariant,
})?;
}
(
ast::ComparisonType::LessThan
| ast::ComparisonType::GreaterThan
| ast::ComparisonType::GreaterThanEqual
| ast::ComparisonType::LessThanEqual,
ast::ValueType::NumberComparisonArray(_),
) => {
Err(AnalysisErrorType::InvalidComparison {
operator: comp.comparison.clone(),
value_type: DataType::Number,
})?;
}
_ => {}
}
let value = comp.value;
let comparison = comp.comparison;
match key_enum {
dir::DirKeyKind::PaymentMethod => lower_enum!(PaymentMethod, value),
dir::DirKeyKind::CardType => lower_enum!(CardType, value),
dir::DirKeyKind::CardNetwork => lower_enum!(CardNetwork, value),
dir::DirKeyKind::PayLaterType => lower_enum!(PayLaterType, value),
dir::DirKeyKind::WalletType => lower_enum!(WalletType, value),
dir::DirKeyKind::BankDebitType => lower_enum!(BankDebitType, value),
dir::DirKeyKind::BankRedirectType => lower_enum!(BankRedirectType, value),
dir::DirKeyKind::CryptoType => lower_enum!(CryptoType, value),
dir::DirKeyKind::PaymentType => lower_enum!(PaymentType, value),
dir::DirKeyKind::MandateType => lower_enum!(MandateType, value),
dir::DirKeyKind::MandateAcceptanceType => lower_enum!(MandateAcceptanceType, value),
dir::DirKeyKind::RewardType => lower_enum!(RewardType, value),
dir::DirKeyKind::PaymentCurrency => lower_enum!(PaymentCurrency, value),
dir::DirKeyKind::AuthenticationType => lower_enum!(AuthenticationType, value),
dir::DirKeyKind::CaptureMethod => lower_enum!(CaptureMethod, value),
dir::DirKeyKind::BusinessCountry => lower_enum!(BusinessCountry, value),
dir::DirKeyKind::BillingCountry => lower_enum!(BillingCountry, value),
dir::DirKeyKind::SetupFutureUsage => lower_enum!(SetupFutureUsage, value),
dir::DirKeyKind::UpiType => lower_enum!(UpiType, value),
dir::DirKeyKind::OpenBankingType => lower_enum!(OpenBankingType, value),
dir::DirKeyKind::VoucherType => lower_enum!(VoucherType, value),
dir::DirKeyKind::GiftCardType => lower_enum!(GiftCardType, value),
dir::DirKeyKind::BankTransferType => lower_enum!(BankTransferType, value),
dir::DirKeyKind::CardRedirectType => lower_enum!(CardRedirectType, value),
dir::DirKeyKind::MobilePaymentType => lower_enum!(MobilePaymentType, value),
dir::DirKeyKind::RealTimePaymentType => lower_enum!(RealTimePaymentType, value),
dir::DirKeyKind::CardBin => {
let validation_closure = |st: &String| -> Result<(), AnalysisErrorType> {
if st.len() == 6 && st.chars().all(|x| x.is_ascii_digit()) {
Ok(())
} else {
Err(AnalysisErrorType::InvalidValue {
key: dir::DirKeyKind::CardBin,
value: st.clone(),
message: Some("Expected 6 digits".to_string()),
})
}
};
lower_str!(CardBin, value, validation_closure)
}
dir::DirKeyKind::BusinessLabel => lower_str!(BusinessLabel, value),
dir::DirKeyKind::MetaData => lower_metadata!(MetaData, value),
dir::DirKeyKind::PaymentAmount => lower_number!(PaymentAmount, value, comparison),
dir::DirKeyKind::Connector => Err(AnalysisErrorType::InvalidKey(
dir::DirKeyKind::Connector.to_string(),
)),
dir::DirKeyKind::IssuerName => lower_str!(IssuerName, value),
dir::DirKeyKind::IssuerCountry => lower_enum!(IssuerCountry, value),
dir::DirKeyKind::CustomerDevicePlatform => lower_enum!(CustomerDevicePlatform, value),
dir::DirKeyKind::CustomerDeviceType => lower_enum!(CustomerDeviceType, value),
dir::DirKeyKind::CustomerDeviceDisplaySize => lower_enum!(CustomerDeviceDisplaySize, value),
dir::DirKeyKind::AcquirerCountry => lower_enum!(AcquirerCountry, value),
dir::DirKeyKind::AcquirerFraudRate => lower_number!(AcquirerFraudRate, value, comparison),
}
}
/// returns all the comparison values by matching them appropriately to ComparisonTypes and in turn
/// calls the lower_comparison_inner function
fn lower_comparison<O: EuclidDirFilter>(
comp: ast::Comparison,
) -> Result<dir::DirComparison, AnalysisError> {
let metadata = comp.metadata.clone();
let logic = match &comp.comparison {
ast::ComparisonType::Equal => dir::DirComparisonLogic::PositiveDisjunction,
ast::ComparisonType::NotEqual => dir::DirComparisonLogic::NegativeConjunction,
ast::ComparisonType::LessThan => dir::DirComparisonLogic::PositiveDisjunction,
ast::ComparisonType::LessThanEqual => dir::DirComparisonLogic::PositiveDisjunction,
ast::ComparisonType::GreaterThanEqual => dir::DirComparisonLogic::PositiveDisjunction,
ast::ComparisonType::GreaterThan => dir::DirComparisonLogic::PositiveDisjunction,
};
let values = lower_comparison_inner::<O>(comp).map_err(|etype| AnalysisError {
error_type: etype,
metadata: metadata.clone(),
})?;
Ok(dir::DirComparison {
values,
logic,
metadata,
})
}
/// lowers the if statement accordingly with a condition and following nested if statements (if
/// present)
fn lower_if_statement<O: EuclidDirFilter>(
stmt: ast::IfStatement,
) -> Result<dir::DirIfStatement, AnalysisError> {
Ok(dir::DirIfStatement {
condition: stmt
.condition
.into_iter()
.map(lower_comparison::<O>)
.collect::<Result<_, _>>()?,
nested: stmt
.nested
.map(|n| n.into_iter().map(lower_if_statement::<O>).collect())
.transpose()?,
})
}
/// lowers the rules supplied accordingly to DirRule struct by specifying the rule_name,
/// connector_selection and statements that are a bunch of if statements
pub fn lower_rule<O: EuclidDirFilter>(
rule: ast::Rule<O>,
) -> Result<dir::DirRule<O>, AnalysisError> {
Ok(dir::DirRule {
name: rule.name,
connector_selection: rule.connector_selection,
statements: rule
.statements
.into_iter()
.map(lower_if_statement::<O>)
.collect::<Result<_, _>>()?,
})
}
/// uses the above rules and lowers the whole ast Program into DirProgram by specifying
/// default_selection that is ast ConnectorSelection, a vector of DirRules and clones the metadata
/// whatever comes in the ast_program
pub fn lower_program<O: EuclidDirFilter>(
program: ast::Program<O>,
) -> Result<dir::DirProgram<O>, AnalysisError> {
Ok(dir::DirProgram {
default_selection: program.default_selection,
rules: program
.rules
.into_iter()
.map(lower_rule)
.collect::<Result<_, _>>()?,
metadata: program.metadata,
})
}
// File: crates/euclid/src/frontend/ast/parser.rs
use common_utils::types::MinorUnit;
use nom::{
branch, bytes::complete, character::complete as pchar, combinator, error, multi, sequence,
};
use crate::{frontend::ast, types::DummyOutput};
pub type ParseResult<T, U> = nom::IResult<T, U, error::VerboseError<T>>;
pub enum EuclidError {
InvalidPercentage(String),
InvalidConnector(String),
InvalidOperator(String),
InvalidNumber(String),
}
pub trait EuclidParsable: Sized {
fn parse_output(input: &str) -> ParseResult<&str, Self>;
}
impl EuclidParsable for DummyOutput {
fn parse_output(input: &str) -> ParseResult<&str, Self> {
let string_w = sequence::delimited(
skip_ws(complete::tag("\"")),
complete::take_while(|c| c != '"'),
skip_ws(complete::tag("\"")),
);
let full_sequence = multi::many0(sequence::preceded(
skip_ws(complete::tag(",")),
sequence::delimited(
skip_ws(complete::tag("\"")),
complete::take_while(|c| c != '"'),
skip_ws(complete::tag("\"")),
),
));
let sequence = sequence::pair(string_w, full_sequence);
error::context(
"dummy_strings",
combinator::map(
sequence::delimited(
skip_ws(complete::tag("[")),
sequence,
skip_ws(complete::tag("]")),
),
|out: (&str, Vec<&str>)| {
let mut first = out.1;
first.insert(0, out.0);
let v = first.iter().map(|s| s.to_string()).collect();
Self { outputs: v }
},
),
)(input)
}
}
pub fn skip_ws<'a, F, O>(inner: F) -> impl FnMut(&'a str) -> ParseResult<&'a str, O>
where
F: FnMut(&'a str) -> ParseResult<&'a str, O> + 'a,
{
sequence::preceded(pchar::multispace0, inner)
}
pub fn num_i64(input: &str) -> ParseResult<&str, i64> {
error::context(
"num_i32",
combinator::map_res(
complete::take_while1(|c: char| c.is_ascii_digit()),
|o: &str| {
o.parse::<i64>()
.map_err(|_| EuclidError::InvalidNumber(o.to_string()))
},
),
)(input)
}
pub fn string_str(input: &str) -> ParseResult<&str, String> {
error::context(
"String",
combinator::map(
sequence::delimited(
complete::tag("\""),
complete::take_while1(|c: char| c != '"'),
complete::tag("\""),
),
|val: &str| val.to_string(),
),
)(input)
}
pub fn identifier(input: &str) -> ParseResult<&str, String> {
error::context(
"identifier",
combinator::map(
sequence::pair(
complete::take_while1(|c: char| c.is_ascii_alphabetic() || c == '_'),
complete::take_while(|c: char| c.is_ascii_alphanumeric() || c == '_'),
),
|out: (&str, &str)| out.0.to_string() + out.1,
),
)(input)
}
pub fn percentage(input: &str) -> ParseResult<&str, u8> {
error::context(
"volume_split_percentage",
combinator::map_res(
sequence::terminated(
complete::take_while_m_n(1, 2, |c: char| c.is_ascii_digit()),
complete::tag("%"),
),
|o: &str| {
o.parse::<u8>()
.map_err(|_| EuclidError::InvalidPercentage(o.to_string()))
},
),
)(input)
}
pub fn number_value(input: &str) -> ParseResult<&str, ast::ValueType> {
error::context(
"number_value",
combinator::map(num_i64, |n| ast::ValueType::Number(MinorUnit::new(n))),
)(input)
}
pub fn str_value(input: &str) -> ParseResult<&str, ast::ValueType> {
error::context(
"str_value",
combinator::map(string_str, ast::ValueType::StrValue),
)(input)
}
pub fn enum_value_string(input: &str) -> ParseResult<&str, String> {
combinator::map(
sequence::pair(
complete::take_while1(|c: char| c.is_ascii_alphabetic() || c == '_'),
complete::take_while(|c: char| c.is_ascii_alphanumeric() || c == '_'),
),
|out: (&str, &str)| out.0.to_string() + out.1,
)(input)
}
pub fn enum_variant_value(input: &str) -> ParseResult<&str, ast::ValueType> {
error::context(
"enum_variant_value",
combinator::map(enum_value_string, ast::ValueType::EnumVariant),
)(input)
}
pub fn number_array_value(input: &str) -> ParseResult<&str, ast::ValueType> {
fn num_minor_unit(input: &str) -> ParseResult<&str, MinorUnit> {
combinator::map(num_i64, MinorUnit::new)(input)
}
let many_with_comma = multi::many0(sequence::preceded(
skip_ws(complete::tag(",")),
skip_ws(num_minor_unit),
));
let full_sequence = sequence::pair(skip_ws(num_minor_unit), many_with_comma);
error::context(
"number_array_value",
combinator::map(
sequence::delimited(
skip_ws(complete::tag("(")),
full_sequence,
skip_ws(complete::tag(")")),
),
|tup: (MinorUnit, Vec<MinorUnit>)| {
let mut rest = tup.1;
rest.insert(0, tup.0);
ast::ValueType::NumberArray(rest)
},
),
)(input)
}
pub fn enum_variant_array_value(input: &str) -> ParseResult<&str, ast::ValueType> {
let many_with_comma = multi::many0(sequence::preceded(
skip_ws(complete::tag(",")),
skip_ws(enum_value_string),
));
let full_sequence = sequence::pair(skip_ws(enum_value_string), many_with_comma);
error::context(
"enum_variant_array_value",
combinator::map(
sequence::delimited(
skip_ws(complete::tag("(")),
full_sequence,
skip_ws(complete::tag(")")),
),
|tup: (String, Vec<String>)| {
let mut rest = tup.1;
rest.insert(0, tup.0);
ast::ValueType::EnumVariantArray(rest)
},
),
)(input)
}
pub fn number_comparison(input: &str) -> ParseResult<&str, ast::NumberComparison> {
let operator = combinator::map_res(
branch::alt((
complete::tag(">="),
complete::tag("<="),
complete::tag(">"),
complete::tag("<"),
)),
|s: &str| match s {
">=" => Ok(ast::ComparisonType::GreaterThanEqual),
"<=" => Ok(ast::ComparisonType::LessThanEqual),
">" => Ok(ast::ComparisonType::GreaterThan),
"<" => Ok(ast::ComparisonType::LessThan),
_ => Err(EuclidError::InvalidOperator(s.to_string())),
},
);
error::context(
"number_comparison",
combinator::map(
sequence::pair(operator, num_i64),
|tup: (ast::ComparisonType, i64)| ast::NumberComparison {
comparison_type: tup.0,
number: MinorUnit::new(tup.1),
},
),
)(input)
}
pub fn number_comparison_array_value(input: &str) -> ParseResult<&str, ast::ValueType> {
let many_with_comma = multi::many0(sequence::preceded(
skip_ws(complete::tag(",")),
skip_ws(number_comparison),
));
let full_sequence = sequence::pair(skip_ws(number_comparison), many_with_comma);
error::context(
"number_comparison_array_value",
combinator::map(
sequence::delimited(
skip_ws(complete::tag("(")),
full_sequence,
skip_ws(complete::tag(")")),
),
|tup: (ast::NumberComparison, Vec<ast::NumberComparison>)| {
let mut rest = tup.1;
rest.insert(0, tup.0);
ast::ValueType::NumberComparisonArray(rest)
},
),
)(input)
}
pub fn value_type(input: &str) -> ParseResult<&str, ast::ValueType> {
error::context(
"value_type",
branch::alt((
number_value,
enum_variant_value,
enum_variant_array_value,
number_array_value,
number_comparison_array_value,
str_value,
)),
)(input)
}
pub fn comparison_type(input: &str) -> ParseResult<&str, ast::ComparisonType> {
error::context(
"comparison_operator",
combinator::map_res(
branch::alt((
complete::tag("/="),
complete::tag(">="),
complete::tag("<="),
complete::tag("="),
complete::tag(">"),
complete::tag("<"),
)),
|s: &str| match s {
"/=" => Ok(ast::ComparisonType::NotEqual),
">=" => Ok(ast::ComparisonType::GreaterThanEqual),
"<=" => Ok(ast::ComparisonType::LessThanEqual),
"=" => Ok(ast::ComparisonType::Equal),
">" => Ok(ast::ComparisonType::GreaterThan),
"<" => Ok(ast::ComparisonType::LessThan),
_ => Err(EuclidError::InvalidOperator(s.to_string())),
},
),
)(input)
}
pub fn comparison(input: &str) -> ParseResult<&str, ast::Comparison> {
error::context(
"condition",
combinator::map(
sequence::tuple((
skip_ws(complete::take_while1(|c: char| {
c.is_ascii_alphabetic() || c == '.' || c == '_'
})),
skip_ws(comparison_type),
skip_ws(value_type),
)),
|tup: (&str, ast::ComparisonType, ast::ValueType)| ast::Comparison {
lhs: tup.0.to_string(),
comparison: tup.1,
value: tup.2,
metadata: std::collections::HashMap::new(),
},
),
)(input)
}
pub fn arbitrary_comparison(input: &str) -> ParseResult<&str, ast::Comparison> {
error::context(
"condition",
combinator::map(
sequence::tuple((
skip_ws(string_str),
skip_ws(comparison_type),
skip_ws(string_str),
)),
|tup: (String, ast::ComparisonType, String)| ast::Comparison {
lhs: "metadata".to_string(),
comparison: tup.1,
value: ast::ValueType::MetadataVariant(ast::MetadataValue {
key: tup.0,
value: tup.2,
}),
metadata: std::collections::HashMap::new(),
},
),
)(input)
}
pub fn comparison_array(input: &str) -> ParseResult<&str, Vec<ast::Comparison>> {
let many_with_ampersand = error::context(
"many_with_amp",
multi::many0(sequence::preceded(skip_ws(complete::tag("&")), comparison)),
);
let full_sequence = sequence::pair(
skip_ws(branch::alt((comparison, arbitrary_comparison))),
many_with_ampersand,
);
error::context(
"comparison_array",
combinator::map(
full_sequence,
|tup: (ast::Comparison, Vec<ast::Comparison>)| {
let mut rest = tup.1;
rest.insert(0, tup.0);
rest
},
),
)(input)
}
pub fn if_statement(input: &str) -> ParseResult<&str, ast::IfStatement> {
let nested_block = sequence::delimited(
skip_ws(complete::tag("{")),
multi::many0(if_statement),
skip_ws(complete::tag("}")),
);
error::context(
"if_statement",
combinator::map(
sequence::pair(comparison_array, combinator::opt(nested_block)),
|tup: (ast::IfCondition, Option<Vec<ast::IfStatement>>)| ast::IfStatement {
condition: tup.0,
nested: tup.1,
},
),
)(input)
}
pub fn rule_conditions_array(input: &str) -> ParseResult<&str, Vec<ast::IfStatement>> {
error::context(
"rules_array",
sequence::delimited(
skip_ws(complete::tag("{")),
multi::many1(if_statement),
skip_ws(complete::tag("}")),
),
)(input)
}
pub fn rule<O: EuclidParsable>(input: &str) -> ParseResult<&str, ast::Rule<O>> {
let rule_name = error::context(
"rule_name",
combinator::map(
skip_ws(sequence::pair(
complete::take_while1(|c: char| c.is_ascii_alphabetic() || c == '_'),
complete::take_while(|c: char| c.is_ascii_alphanumeric() || c == '_'),
)),
|out: (&str, &str)| out.0.to_string() + out.1,
),
);
let connector_selection = error::context(
"parse_output",
sequence::preceded(skip_ws(complete::tag(":")), output),
);
error::context(
"rule",
combinator::map(
sequence::tuple((rule_name, connector_selection, rule_conditions_array)),
|tup: (String, O, Vec<ast::IfStatement>)| ast::Rule {
name: tup.0,
connector_selection: tup.1,
statements: tup.2,
},
),
)(input)
}
pub fn output<O: EuclidParsable>(input: &str) -> ParseResult<&str, O> {
O::parse_output(input)
}
pub fn default_output<O: EuclidParsable + 'static>(input: &str) -> ParseResult<&str, O> {
error::context(
"default_output",
sequence::preceded(
sequence::pair(skip_ws(complete::tag("default")), skip_ws(pchar::char(':'))),
skip_ws(output),
),
)(input)
}
pub fn program<O: EuclidParsable + 'static>(input: &str) -> ParseResult<&str, ast::Program<O>> {
error::context(
"program",
combinator::map(
sequence::pair(default_output, multi::many1(skip_ws(rule::<O>))),
|tup: (O, Vec<ast::Rule<O>>)| ast::Program {
default_selection: tup.0,
rules: tup.1,
metadata: std::collections::HashMap::new(),
},
),
)(input)
}
</module>
|
{
"crate": "euclid",
"file": null,
"files": [
"crates/euclid/src/frontend/ast/lowering.rs",
"crates/euclid/src/frontend/ast/parser.rs"
],
"module": "crates/euclid/src/frontend/ast",
"num_files": 2,
"token_count": 6427
}
|
module_7795850758304973321
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: euclid
Module: crates/euclid/src/backend
Files: 3
</path>
<module>
// File: crates/euclid/src/backend/vir_interpreter.rs
pub mod types;
use std::fmt::Debug;
use serde::{Deserialize, Serialize};
use crate::{
backend::{self, inputs, EuclidBackend},
frontend::{
ast,
dir::{self, EuclidDirFilter},
vir,
},
};
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct VirInterpreterBackend<O> {
program: vir::ValuedProgram<O>,
}
impl<O> VirInterpreterBackend<O>
where
O: Clone,
{
#[inline]
fn eval_comparison(comp: &vir::ValuedComparison, ctx: &types::Context) -> bool {
match &comp.logic {
vir::ValuedComparisonLogic::PositiveDisjunction => {
comp.values.iter().any(|v| ctx.check_presence(v))
}
vir::ValuedComparisonLogic::NegativeConjunction => {
comp.values.iter().all(|v| !ctx.check_presence(v))
}
}
}
#[inline]
fn eval_condition(cond: &vir::ValuedIfCondition, ctx: &types::Context) -> bool {
cond.iter().all(|comp| Self::eval_comparison(comp, ctx))
}
fn eval_statement(stmt: &vir::ValuedIfStatement, ctx: &types::Context) -> bool {
if Self::eval_condition(&stmt.condition, ctx) {
{
stmt.nested.as_ref().is_none_or(|nested_stmts| {
nested_stmts.iter().any(|s| Self::eval_statement(s, ctx))
})
}
} else {
false
}
}
fn eval_rule(rule: &vir::ValuedRule<O>, ctx: &types::Context) -> bool {
rule.statements
.iter()
.any(|stmt| Self::eval_statement(stmt, ctx))
}
fn eval_program(
program: &vir::ValuedProgram<O>,
ctx: &types::Context,
) -> backend::BackendOutput<O> {
program
.rules
.iter()
.find(|rule| Self::eval_rule(rule, ctx))
.map_or_else(
|| backend::BackendOutput {
connector_selection: program.default_selection.clone(),
rule_name: None,
},
|rule| backend::BackendOutput {
connector_selection: rule.connector_selection.clone(),
rule_name: Some(rule.name.clone()),
},
)
}
}
impl<O> EuclidBackend<O> for VirInterpreterBackend<O>
where
O: Clone + EuclidDirFilter,
{
type Error = types::VirInterpreterError;
fn with_program(program: ast::Program<O>) -> Result<Self, Self::Error> {
let dir_program = ast::lowering::lower_program(program)
.map_err(types::VirInterpreterError::LoweringError)?;
let vir_program = dir::lowering::lower_program(dir_program)
.map_err(types::VirInterpreterError::LoweringError)?;
Ok(Self {
program: vir_program,
})
}
fn execute(
&self,
input: inputs::BackendInput,
) -> Result<backend::BackendOutput<O>, Self::Error> {
let ctx = types::Context::from_input(input);
Ok(Self::eval_program(&self.program, &ctx))
}
}
#[cfg(all(test, feature = "ast_parser"))]
mod test {
#![allow(clippy::expect_used)]
use common_utils::types::MinorUnit;
use rustc_hash::FxHashMap;
use super::*;
use crate::{enums, types::DummyOutput};
#[test]
fn test_execution() {
let program_str = r#"
default: [ "stripe", "adyen"]
rule_1: ["stripe"]
{
pay_later = klarna
}
rule_2: ["adyen"]
{
pay_later = affirm
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
card_bin: None,
currency: enums::Currency::USD,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_2");
}
#[test]
fn test_payment_type() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
payment_type = setup_mandate
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: Some("123456".to_string()),
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: Some(enums::PaymentType::SetupMandate),
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_ppt_flow() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
payment_type = ppt_mandate
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: Some("123456".to_string()),
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: Some(enums::PaymentType::PptMandate),
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_mandate_type() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
mandate_type = single_use
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: Some("123456".to_string()),
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: Some(enums::MandateType::SingleUse),
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_mandate_acceptance_type() {
let program_str = r#"
default: ["stripe","adyen"]
rule_1: ["stripe"]
{
mandate_acceptance_type = online
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: Some("123456".to_string()),
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: Some(enums::MandateAcceptanceType::Online),
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_card_bin() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
card_bin="123456"
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: Some("123456".to_string()),
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_payment_amount() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
amount = 32
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: None,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_payment_method() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
payment_method = pay_later
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: None,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_future_usage() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
setup_future_usage = off_session
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
currency: enums::Currency::USD,
card_bin: None,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: Some(enums::SetupFutureUsage::OffSession),
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_metadata_execution() {
let program_str = r#"
default: ["stripe"," adyen"]
rule_1: ["stripe"]
{
"metadata_key" = "arbitrary meta"
}
"#;
let mut meta_map = FxHashMap::default();
meta_map.insert("metadata_key".to_string(), "arbitrary meta".to_string());
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp = inputs::BackendInput {
metadata: Some(meta_map),
payment: inputs::PaymentInput {
amount: MinorUnit::new(32),
card_bin: None,
currency: enums::Currency::USD,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result = backend.execute(inp).expect("Execution");
assert_eq!(result.rule_name.expect("Rule Name").as_str(), "rule_1");
}
#[test]
fn test_less_than_operator() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
amount>=123
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp_greater = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(150),
card_bin: None,
currency: enums::Currency::USD,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let mut inp_equal = inp_greater.clone();
inp_equal.payment.amount = MinorUnit::new(123);
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result_greater = backend.execute(inp_greater).expect("Execution");
let result_equal = backend.execute(inp_equal).expect("Execution");
assert_eq!(
result_equal.rule_name.expect("Rule Name").as_str(),
"rule_1"
);
assert_eq!(
result_greater.rule_name.expect("Rule Name").as_str(),
"rule_1"
);
}
#[test]
fn test_greater_than_operator() {
let program_str = r#"
default: ["stripe", "adyen"]
rule_1: ["stripe"]
{
amount<=123
}
"#;
let (_, program) = ast::parser::program::<DummyOutput>(program_str).expect("Program");
let inp_lower = inputs::BackendInput {
metadata: None,
payment: inputs::PaymentInput {
amount: MinorUnit::new(120),
card_bin: None,
currency: enums::Currency::USD,
authentication_type: Some(enums::AuthenticationType::NoThreeDs),
capture_method: Some(enums::CaptureMethod::Automatic),
business_country: Some(enums::Country::UnitedStatesOfAmerica),
billing_country: Some(enums::Country::France),
business_label: None,
setup_future_usage: None,
},
payment_method: inputs::PaymentMethodInput {
payment_method: Some(enums::PaymentMethod::PayLater),
payment_method_type: Some(enums::PaymentMethodType::Affirm),
card_network: None,
},
mandate: inputs::MandateData {
mandate_acceptance_type: None,
mandate_type: None,
payment_type: None,
},
acquirer_data: None,
customer_device_data: None,
issuer_data: None,
};
let mut inp_equal = inp_lower.clone();
inp_equal.payment.amount = MinorUnit::new(123);
let backend = VirInterpreterBackend::<DummyOutput>::with_program(program).expect("Program");
let result_equal = backend.execute(inp_equal).expect("Execution");
let result_lower = backend.execute(inp_lower).expect("Execution");
assert_eq!(
result_equal.rule_name.expect("Rule Name").as_str(),
"rule_1"
);
assert_eq!(
result_lower.rule_name.expect("Rule Name").as_str(),
"rule_1"
);
}
}
// File: crates/euclid/src/backend/inputs.rs
use rustc_hash::FxHashMap;
use serde::{Deserialize, Serialize};
use crate::{
enums,
frontend::dir::enums::{CustomerDeviceDisplaySize, CustomerDevicePlatform, CustomerDeviceType},
};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MandateData {
pub mandate_acceptance_type: Option<enums::MandateAcceptanceType>,
pub mandate_type: Option<enums::MandateType>,
pub payment_type: Option<enums::PaymentType>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PaymentMethodInput {
pub payment_method: Option<enums::PaymentMethod>,
pub payment_method_type: Option<enums::PaymentMethodType>,
pub card_network: Option<enums::CardNetwork>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PaymentInput {
pub amount: common_utils::types::MinorUnit,
pub currency: enums::Currency,
pub authentication_type: Option<enums::AuthenticationType>,
pub card_bin: Option<String>,
pub capture_method: Option<enums::CaptureMethod>,
pub business_country: Option<enums::Country>,
pub billing_country: Option<enums::Country>,
pub business_label: Option<String>,
pub setup_future_usage: Option<enums::SetupFutureUsage>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct AcquirerDataInput {
pub country: Option<enums::Country>,
pub fraud_rate: Option<f64>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct CustomerDeviceDataInput {
pub platform: Option<CustomerDevicePlatform>,
pub device_type: Option<CustomerDeviceType>,
pub display_size: Option<CustomerDeviceDisplaySize>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct IssuerDataInput {
pub name: Option<String>,
pub country: Option<enums::Country>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BackendInput {
pub metadata: Option<FxHashMap<String, String>>,
pub payment: PaymentInput,
pub payment_method: PaymentMethodInput,
pub acquirer_data: Option<AcquirerDataInput>,
pub customer_device_data: Option<CustomerDeviceDataInput>,
pub issuer_data: Option<IssuerDataInput>,
pub mandate: MandateData,
}
// File: crates/euclid/src/backend/interpreter.rs
pub mod types;
use common_utils::types::MinorUnit;
use crate::{
backend::{self, inputs, EuclidBackend},
frontend::ast,
};
pub struct InterpreterBackend<O> {
program: ast::Program<O>,
}
impl<O> InterpreterBackend<O>
where
O: Clone,
{
fn eval_number_comparison_array(
num: MinorUnit,
array: &[ast::NumberComparison],
) -> Result<bool, types::InterpreterError> {
for comparison in array {
let other = comparison.number;
let res = match comparison.comparison_type {
ast::ComparisonType::GreaterThan => num > other,
ast::ComparisonType::LessThan => num < other,
ast::ComparisonType::LessThanEqual => num <= other,
ast::ComparisonType::GreaterThanEqual => num >= other,
ast::ComparisonType::Equal => num == other,
ast::ComparisonType::NotEqual => num != other,
};
if res {
return Ok(true);
}
}
Ok(false)
}
fn eval_comparison(
comparison: &ast::Comparison,
ctx: &types::Context,
) -> Result<bool, types::InterpreterError> {
use ast::{ComparisonType::*, ValueType::*};
let value = ctx
.get(&comparison.lhs)
.ok_or_else(|| types::InterpreterError {
error_type: types::InterpreterErrorType::InvalidKey(comparison.lhs.clone()),
metadata: comparison.metadata.clone(),
})?;
if let Some(val) = value {
match (val, &comparison.comparison, &comparison.value) {
(EnumVariant(e1), Equal, EnumVariant(e2)) => Ok(e1 == e2),
(EnumVariant(e1), NotEqual, EnumVariant(e2)) => Ok(e1 != e2),
(EnumVariant(e), Equal, EnumVariantArray(evec)) => Ok(evec.iter().any(|v| e == v)),
(EnumVariant(e), NotEqual, EnumVariantArray(evec)) => {
Ok(evec.iter().all(|v| e != v))
}
(Number(n1), Equal, Number(n2)) => Ok(n1 == n2),
(Number(n1), NotEqual, Number(n2)) => Ok(n1 != n2),
(Number(n1), LessThanEqual, Number(n2)) => Ok(n1 <= n2),
(Number(n1), GreaterThanEqual, Number(n2)) => Ok(n1 >= n2),
(Number(n1), LessThan, Number(n2)) => Ok(n1 < n2),
(Number(n1), GreaterThan, Number(n2)) => Ok(n1 > n2),
(Number(n), Equal, NumberArray(nvec)) => Ok(nvec.iter().any(|v| v == n)),
(Number(n), NotEqual, NumberArray(nvec)) => Ok(nvec.iter().all(|v| v != n)),
(Number(n), Equal, NumberComparisonArray(ncvec)) => {
Self::eval_number_comparison_array(*n, ncvec)
}
_ => Err(types::InterpreterError {
error_type: types::InterpreterErrorType::InvalidComparison,
metadata: comparison.metadata.clone(),
}),
}
} else {
Ok(false)
}
}
fn eval_if_condition(
condition: &ast::IfCondition,
ctx: &types::Context,
) -> Result<bool, types::InterpreterError> {
for comparison in condition {
let res = Self::eval_comparison(comparison, ctx)?;
if !res {
return Ok(false);
}
}
Ok(true)
}
fn eval_if_statement(
stmt: &ast::IfStatement,
ctx: &types::Context,
) -> Result<bool, types::InterpreterError> {
let cond_res = Self::eval_if_condition(&stmt.condition, ctx)?;
if !cond_res {
return Ok(false);
}
if let Some(ref nested) = stmt.nested {
for nested_if in nested {
let res = Self::eval_if_statement(nested_if, ctx)?;
if res {
return Ok(true);
}
}
return Ok(false);
}
Ok(true)
}
fn eval_rule_statements(
statements: &[ast::IfStatement],
ctx: &types::Context,
) -> Result<bool, types::InterpreterError> {
for stmt in statements {
let res = Self::eval_if_statement(stmt, ctx)?;
if res {
return Ok(true);
}
}
Ok(false)
}
#[inline]
fn eval_rule(
rule: &ast::Rule<O>,
ctx: &types::Context,
) -> Result<bool, types::InterpreterError> {
Self::eval_rule_statements(&rule.statements, ctx)
}
fn eval_program(
program: &ast::Program<O>,
ctx: &types::Context,
) -> Result<backend::BackendOutput<O>, types::InterpreterError> {
for rule in &program.rules {
let res = Self::eval_rule(rule, ctx)?;
if res {
return Ok(backend::BackendOutput {
connector_selection: rule.connector_selection.clone(),
rule_name: Some(rule.name.clone()),
});
}
}
Ok(backend::BackendOutput {
connector_selection: program.default_selection.clone(),
rule_name: None,
})
}
}
impl<O> EuclidBackend<O> for InterpreterBackend<O>
where
O: Clone,
{
type Error = types::InterpreterError;
fn with_program(program: ast::Program<O>) -> Result<Self, Self::Error> {
Ok(Self { program })
}
fn execute(&self, input: inputs::BackendInput) -> Result<super::BackendOutput<O>, Self::Error> {
let ctx: types::Context = input.into();
Self::eval_program(&self.program, &ctx)
}
}
</module>
|
{
"crate": "euclid",
"file": null,
"files": [
"crates/euclid/src/backend/vir_interpreter.rs",
"crates/euclid/src/backend/inputs.rs",
"crates/euclid/src/backend/interpreter.rs"
],
"module": "crates/euclid/src/backend",
"num_files": 3,
"token_count": 6968
}
|
module_8466949050943966762
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: api_models
Module: crates/api_models/src/events
Files: 16
</path>
<module>
// File: crates/api_models/src/events/locker_migration.rs
use common_utils::events::ApiEventMetric;
use crate::locker_migration::MigrateCardResponse;
impl ApiEventMetric for MigrateCardResponse {
fn get_api_event_type(&self) -> Option<common_utils::events::ApiEventsType> {
Some(common_utils::events::ApiEventsType::RustLocker)
}
}
// File: crates/api_models/src/events/dispute.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use super::{
DeleteEvidenceRequest, DisputeResponse, DisputeResponsePaymentsRetrieve,
DisputeRetrieveRequest, DisputesAggregateResponse, SubmitEvidenceRequest,
};
impl ApiEventMetric for SubmitEvidenceRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Dispute {
dispute_id: self.dispute_id.clone(),
})
}
}
impl ApiEventMetric for DisputeRetrieveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Dispute {
dispute_id: self.dispute_id.clone(),
})
}
}
impl ApiEventMetric for DisputeResponsePaymentsRetrieve {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Dispute {
dispute_id: self.dispute_id.clone(),
})
}
}
impl ApiEventMetric for DisputeResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Dispute {
dispute_id: self.dispute_id.clone(),
})
}
}
impl ApiEventMetric for DeleteEvidenceRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Dispute {
dispute_id: self.dispute_id.clone(),
})
}
}
impl ApiEventMetric for DisputesAggregateResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
// File: crates/api_models/src/events/payouts.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::payouts::{
PayoutActionRequest, PayoutCreateRequest, PayoutCreateResponse, PayoutLinkInitiateRequest,
PayoutListConstraints, PayoutListFilterConstraints, PayoutListFilters, PayoutListResponse,
PayoutRetrieveRequest,
};
impl ApiEventMetric for PayoutRetrieveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payout {
payout_id: self.payout_id.to_owned(),
})
}
}
impl ApiEventMetric for PayoutCreateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
self.payout_id.as_ref().map(|id| ApiEventsType::Payout {
payout_id: id.to_owned(),
})
}
}
impl ApiEventMetric for PayoutCreateResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payout {
payout_id: self.payout_id.to_owned(),
})
}
}
impl ApiEventMetric for PayoutActionRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payout {
payout_id: self.payout_id.to_owned(),
})
}
}
impl ApiEventMetric for PayoutListConstraints {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PayoutListFilterConstraints {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PayoutListResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PayoutListFilters {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PayoutLinkInitiateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payout {
payout_id: self.payout_id.to_owned(),
})
}
}
// File: crates/api_models/src/events/user.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
#[cfg(feature = "dummy_connector")]
use crate::user::sample_data::SampleDataRequest;
#[cfg(feature = "control_center_theme")]
use crate::user::theme::{
CreateThemeRequest, CreateUserThemeRequest, GetThemeResponse, UpdateThemeRequest,
UploadFileRequest,
};
use crate::user::{
dashboard_metadata::{
GetMetaDataRequest, GetMetaDataResponse, GetMultipleMetaDataPayload, SetMetaDataRequest,
},
AcceptInviteFromEmailRequest, AuthSelectRequest, AuthorizeResponse, BeginTotpResponse,
ChangePasswordRequest, CloneConnectorRequest, ConnectAccountRequest, CreateInternalUserRequest,
CreateTenantUserRequest, CreateUserAuthenticationMethodRequest,
CreateUserAuthenticationMethodResponse, ForgotPasswordRequest, GetSsoAuthUrlRequest,
GetUserAuthenticationMethodsRequest, GetUserDetailsResponse, GetUserRoleDetailsRequest,
GetUserRoleDetailsResponseV2, InviteUserRequest, PlatformAccountCreateRequest,
PlatformAccountCreateResponse, ReInviteUserRequest, RecoveryCodes, ResetPasswordRequest,
RotatePasswordRequest, SendVerifyEmailRequest, SignUpRequest, SignUpWithMerchantIdRequest,
SsoSignInRequest, SwitchMerchantRequest, SwitchOrganizationRequest, SwitchProfileRequest,
TokenResponse, TwoFactorAuthStatusResponse, TwoFactorStatus, UpdateUserAccountDetailsRequest,
UpdateUserAuthenticationMethodRequest, UserFromEmailRequest, UserMerchantAccountResponse,
UserMerchantCreate, UserOrgMerchantCreateRequest, VerifyEmailRequest,
VerifyRecoveryCodeRequest, VerifyTotpRequest,
};
common_utils::impl_api_event_type!(
Miscellaneous,
(
SignUpRequest,
SignUpWithMerchantIdRequest,
ChangePasswordRequest,
GetMultipleMetaDataPayload,
GetMetaDataResponse,
GetMetaDataRequest,
SetMetaDataRequest,
SwitchOrganizationRequest,
SwitchMerchantRequest,
SwitchProfileRequest,
CreateInternalUserRequest,
CreateTenantUserRequest,
PlatformAccountCreateRequest,
PlatformAccountCreateResponse,
UserOrgMerchantCreateRequest,
UserMerchantAccountResponse,
UserMerchantCreate,
AuthorizeResponse,
ConnectAccountRequest,
ForgotPasswordRequest,
ResetPasswordRequest,
RotatePasswordRequest,
InviteUserRequest,
ReInviteUserRequest,
VerifyEmailRequest,
SendVerifyEmailRequest,
AcceptInviteFromEmailRequest,
UpdateUserAccountDetailsRequest,
GetUserDetailsResponse,
GetUserRoleDetailsRequest,
GetUserRoleDetailsResponseV2,
TokenResponse,
TwoFactorAuthStatusResponse,
TwoFactorStatus,
UserFromEmailRequest,
BeginTotpResponse,
VerifyRecoveryCodeRequest,
VerifyTotpRequest,
RecoveryCodes,
GetUserAuthenticationMethodsRequest,
CreateUserAuthenticationMethodRequest,
CreateUserAuthenticationMethodResponse,
UpdateUserAuthenticationMethodRequest,
GetSsoAuthUrlRequest,
SsoSignInRequest,
AuthSelectRequest,
CloneConnectorRequest
)
);
#[cfg(feature = "control_center_theme")]
common_utils::impl_api_event_type!(
Miscellaneous,
(
GetThemeResponse,
UploadFileRequest,
CreateThemeRequest,
CreateUserThemeRequest,
UpdateThemeRequest
)
);
#[cfg(feature = "dummy_connector")]
common_utils::impl_api_event_type!(Miscellaneous, (SampleDataRequest));
// File: crates/api_models/src/events/user_role.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::user_role::{
role::{
CreateRoleRequest, CreateRoleV2Request, GetParentGroupsInfoQueryParams, GetRoleRequest,
GroupsAndResources, ListRolesAtEntityLevelRequest, ListRolesQueryParams, ListRolesResponse,
ParentGroupInfoRequest, RoleInfoResponseNew, RoleInfoResponseWithParentsGroup,
RoleInfoWithGroupsResponse, RoleInfoWithParents, UpdateRoleRequest,
},
AuthorizationInfoResponse, DeleteUserRoleRequest, ListUsersInEntityRequest,
UpdateUserRoleRequest,
};
common_utils::impl_api_event_type!(
Miscellaneous,
(
GetRoleRequest,
GetParentGroupsInfoQueryParams,
AuthorizationInfoResponse,
UpdateUserRoleRequest,
DeleteUserRoleRequest,
CreateRoleRequest,
CreateRoleV2Request,
UpdateRoleRequest,
ListRolesAtEntityLevelRequest,
RoleInfoResponseNew,
RoleInfoWithGroupsResponse,
ListUsersInEntityRequest,
ListRolesQueryParams,
GroupsAndResources,
RoleInfoWithParents,
ParentGroupInfoRequest,
RoleInfoResponseWithParentsGroup,
ListRolesResponse
)
);
// File: crates/api_models/src/events/refund.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::refunds::{
self, RefundAggregateResponse, RefundListFilters, RefundListMetaData, RefundListRequest,
RefundListResponse,
};
#[cfg(feature = "v1")]
use crate::refunds::{
RefundManualUpdateRequest, RefundRequest, RefundUpdateRequest, RefundsRetrieveRequest,
};
#[cfg(feature = "v1")]
impl ApiEventMetric for RefundRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
let payment_id = self.payment_id.clone();
self.refund_id
.clone()
.map(|refund_id| ApiEventsType::Refund {
payment_id: Some(payment_id),
refund_id,
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for refunds::RefundResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Refund {
payment_id: Some(self.payment_id.clone()),
refund_id: self.refund_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for refunds::RefundResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Refund {
payment_id: Some(self.payment_id.clone()),
refund_id: self.id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for RefundsRetrieveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Refund {
payment_id: None,
refund_id: self.refund_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for refunds::RefundsRetrieveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Refund {
payment_id: None,
refund_id: self.refund_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for RefundUpdateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Refund {
payment_id: None,
refund_id: self.refund_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for RefundManualUpdateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Refund {
payment_id: None,
refund_id: self.refund_id.clone(),
})
}
}
impl ApiEventMetric for RefundListRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for RefundListResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for RefundAggregateResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for RefundListMetaData {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for RefundListFilters {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
// File: crates/api_models/src/events/external_service_auth.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::external_service_auth::{
ExternalSignoutTokenRequest, ExternalTokenResponse, ExternalVerifyTokenRequest,
ExternalVerifyTokenResponse,
};
impl ApiEventMetric for ExternalTokenResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ExternalServiceAuth)
}
}
impl ApiEventMetric for ExternalVerifyTokenRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ExternalServiceAuth)
}
}
impl ApiEventMetric for ExternalVerifyTokenResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ExternalServiceAuth)
}
}
impl ApiEventMetric for ExternalSignoutTokenRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ExternalServiceAuth)
}
}
// File: crates/api_models/src/events/payment.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
#[cfg(feature = "v2")]
use super::{
PaymentAttemptListRequest, PaymentAttemptListResponse, PaymentStartRedirectionRequest,
PaymentsCreateIntentRequest, PaymentsGetIntentRequest, PaymentsIntentResponse, PaymentsRequest,
RecoveryPaymentsCreate, RecoveryPaymentsResponse,
};
#[cfg(feature = "v2")]
use crate::payment_methods::{
ListMethodsForPaymentMethodsRequest, PaymentMethodListResponseForSession,
};
use crate::{
payment_methods::{
self, ListCountriesCurrenciesRequest, ListCountriesCurrenciesResponse,
PaymentMethodCollectLinkRenderRequest, PaymentMethodCollectLinkRequest,
PaymentMethodCollectLinkResponse, PaymentMethodMigrateResponse, PaymentMethodResponse,
PaymentMethodUpdate,
},
payments::{
self, PaymentListConstraints, PaymentListFilters, PaymentListFiltersV2,
PaymentListResponse, PaymentsAggregateResponse, PaymentsSessionResponse,
RedirectionResponse,
},
};
#[cfg(feature = "v1")]
use crate::{
payment_methods::{PaymentMethodListRequest, PaymentMethodListResponse},
payments::{
ExtendedCardInfoResponse, PaymentIdType, PaymentListFilterConstraints,
PaymentListResponseV2, PaymentsApproveRequest, PaymentsCancelPostCaptureRequest,
PaymentsCancelRequest, PaymentsCaptureRequest, PaymentsCompleteAuthorizeRequest,
PaymentsDynamicTaxCalculationRequest, PaymentsDynamicTaxCalculationResponse,
PaymentsExtendAuthorizationRequest, PaymentsExternalAuthenticationRequest,
PaymentsExternalAuthenticationResponse, PaymentsIncrementalAuthorizationRequest,
PaymentsManualUpdateRequest, PaymentsManualUpdateResponse,
PaymentsPostSessionTokensRequest, PaymentsPostSessionTokensResponse, PaymentsRejectRequest,
PaymentsRetrieveRequest, PaymentsStartRequest, PaymentsUpdateMetadataRequest,
PaymentsUpdateMetadataResponse,
},
};
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsRetrieveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
match self.resource_id {
PaymentIdType::PaymentIntentId(ref id) => Some(ApiEventsType::Payment {
payment_id: id.clone(),
}),
_ => None,
}
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsStartRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsCaptureRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.to_owned(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsCompleteAuthorizeRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsDynamicTaxCalculationRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsPostSessionTokensRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsUpdateMetadataRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsUpdateMetadataResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsPostSessionTokensResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsDynamicTaxCalculationResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsCancelRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsCancelPostCaptureRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsExtendAuthorizationRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsApproveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsRejectRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for payments::PaymentsRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
match self.payment_id {
Some(PaymentIdType::PaymentIntentId(ref id)) => Some(ApiEventsType::Payment {
payment_id: id.clone(),
}),
_ => None,
}
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for payments::PaymentsEligibilityRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for payments::PaymentsEligibilityResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentsCreateIntentRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for payments::GiftCardBalanceCheckResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentsRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentsGetIntentRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentAttemptListRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_intent_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentAttemptListResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentsIntentResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for payments::PaymentsResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for payments::PaymentsCancelRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for payments::PaymentsCancelResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for payments::PaymentsResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
impl ApiEventMetric for PaymentMethodResponse {
#[cfg(feature = "v1")]
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.payment_method_id.clone(),
payment_method: self.payment_method,
payment_method_type: self.payment_method_type,
})
}
#[cfg(feature = "v2")]
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.id.clone(),
payment_method_type: self.payment_method_type,
payment_method_subtype: self.payment_method_subtype,
})
}
}
impl ApiEventMetric for PaymentMethodMigrateResponse {
#[cfg(feature = "v1")]
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.payment_method_response.payment_method_id.clone(),
payment_method: self.payment_method_response.payment_method,
payment_method_type: self.payment_method_response.payment_method_type,
})
}
#[cfg(feature = "v2")]
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.payment_method_response.id.clone(),
payment_method_type: self.payment_method_response.payment_method_type,
payment_method_subtype: self.payment_method_response.payment_method_subtype,
})
}
}
impl ApiEventMetric for PaymentMethodUpdate {}
#[cfg(feature = "v1")]
impl ApiEventMetric for payment_methods::DefaultPaymentMethod {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.payment_method_id.clone(),
payment_method: None,
payment_method_type: None,
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for payment_methods::PaymentMethodDeleteResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.id.clone(),
payment_method_type: None,
payment_method_subtype: None,
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for payment_methods::PaymentMethodDeleteResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.payment_method_id.clone(),
payment_method: None,
payment_method_type: None,
})
}
}
impl ApiEventMetric for payment_methods::CustomerPaymentMethodsListResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentMethodListRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethodList {
payment_id: self
.client_secret
.as_ref()
.and_then(|cs| cs.rsplit_once("_secret_"))
.map(|(pid, _)| pid.to_string()),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for ListMethodsForPaymentMethodsRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethodList {
payment_id: self
.client_secret
.as_ref()
.and_then(|cs| cs.rsplit_once("_secret_"))
.map(|(pid, _)| pid.to_string()),
})
}
}
impl ApiEventMetric for ListCountriesCurrenciesRequest {}
impl ApiEventMetric for ListCountriesCurrenciesResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentMethodListResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for payment_methods::CustomerDefaultPaymentMethodResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethod {
payment_method_id: self.default_payment_method_id.clone().unwrap_or_default(),
payment_method: Some(self.payment_method),
payment_method_type: self.payment_method_type,
})
}
}
impl ApiEventMetric for PaymentMethodCollectLinkRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
self.pm_collect_link_id
.as_ref()
.map(|id| ApiEventsType::PaymentMethodCollectLink {
link_id: id.clone(),
})
}
}
impl ApiEventMetric for PaymentMethodCollectLinkRenderRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethodCollectLink {
link_id: self.pm_collect_link_id.clone(),
})
}
}
impl ApiEventMetric for PaymentMethodCollectLinkResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentMethodCollectLink {
link_id: self.pm_collect_link_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentListFilterConstraints {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PaymentListFilters {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PaymentListFiltersV2 {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PaymentListConstraints {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PaymentListResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for RecoveryPaymentsCreate {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for RecoveryPaymentsResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentListResponseV2 {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for PaymentsAggregateResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
impl ApiEventMetric for RedirectionResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsIncrementalAuthorizationRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsExternalAuthenticationResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsExternalAuthenticationRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for ExtendedCardInfoResponse {}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsManualUpdateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsManualUpdateResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
impl ApiEventMetric for PaymentsSessionResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.payment_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentStartRedirectionRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for payments::PaymentMethodListResponseForPayments {
// Payment id would be populated by the request
fn get_api_event_type(&self) -> Option<ApiEventsType> {
None
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentMethodListResponseForSession {}
#[cfg(feature = "v2")]
impl ApiEventMetric for payments::PaymentsCaptureResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Payment {
payment_id: self.id.clone(),
})
}
}
// File: crates/api_models/src/events/recon.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use masking::PeekInterface;
use crate::recon::{
ReconStatusResponse, ReconTokenResponse, ReconUpdateMerchantRequest, VerifyTokenResponse,
};
impl ApiEventMetric for ReconUpdateMerchantRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Recon)
}
}
impl ApiEventMetric for ReconTokenResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Recon)
}
}
impl ApiEventMetric for ReconStatusResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Recon)
}
}
impl ApiEventMetric for VerifyTokenResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::User {
user_id: self.user_email.peek().to_string(),
})
}
}
// File: crates/api_models/src/events/customer.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::customers::{
CustomerDeleteResponse, CustomerListRequestWithConstraints, CustomerListResponse,
CustomerRequest, CustomerResponse, CustomerUpdateRequestInternal,
};
#[cfg(feature = "v1")]
impl ApiEventMetric for CustomerDeleteResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer {
customer_id: self.customer_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for CustomerDeleteResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer {
customer_id: Some(self.id.clone()),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for CustomerRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
self.get_merchant_reference_id()
.clone()
.map(|cid| ApiEventsType::Customer { customer_id: cid })
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for CustomerRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer { customer_id: None })
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for CustomerResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
self.get_merchant_reference_id()
.clone()
.map(|cid| ApiEventsType::Customer { customer_id: cid })
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for CustomerResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer {
customer_id: Some(self.id.clone()),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for CustomerUpdateRequestInternal {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer {
customer_id: self.customer_id.clone(),
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for CustomerUpdateRequestInternal {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer {
customer_id: Some(self.id.clone()),
})
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for CustomerListRequestWithConstraints {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer {
customer_id: self.customer_id.clone()?,
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for CustomerListRequestWithConstraints {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Customer { customer_id: None })
}
}
#[cfg(feature = "v1")]
impl ApiEventMetric for CustomerListResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for CustomerListResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ResourceListAPI)
}
}
// File: crates/api_models/src/events/gsm.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::gsm;
impl ApiEventMetric for gsm::GsmCreateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Gsm)
}
}
impl ApiEventMetric for gsm::GsmUpdateRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Gsm)
}
}
impl ApiEventMetric for gsm::GsmRetrieveRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Gsm)
}
}
impl ApiEventMetric for gsm::GsmDeleteRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Gsm)
}
}
impl ApiEventMetric for gsm::GsmDeleteResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Gsm)
}
}
impl ApiEventMetric for gsm::GsmResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Gsm)
}
}
// File: crates/api_models/src/events/revenue_recovery.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::process_tracker::revenue_recovery::{
RevenueRecoveryId, RevenueRecoveryResponse, RevenueRecoveryRetriggerRequest,
};
impl ApiEventMetric for RevenueRecoveryResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ProcessTracker)
}
}
impl ApiEventMetric for RevenueRecoveryId {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ProcessTracker)
}
}
impl ApiEventMetric for RevenueRecoveryRetriggerRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::ProcessTracker)
}
}
// File: crates/api_models/src/events/routing.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::routing::{
ContractBasedRoutingPayloadWrapper, ContractBasedRoutingSetupPayloadWrapper,
CreateDynamicRoutingWrapper, DynamicRoutingUpdateConfigQuery, EliminationRoutingPayloadWrapper,
LinkedRoutingConfigRetrieveResponse, MerchantRoutingAlgorithm, ProfileDefaultRoutingConfig,
RoutingAlgorithmId, RoutingConfigRequest, RoutingDictionaryRecord, RoutingKind,
RoutingLinkWrapper, RoutingPayloadWrapper, RoutingRetrieveLinkQuery,
RoutingRetrieveLinkQueryWrapper, RoutingRetrieveQuery, RoutingVolumeSplit,
RoutingVolumeSplitResponse, RoutingVolumeSplitWrapper, RuleMigrationError, RuleMigrationQuery,
RuleMigrationResponse, RuleMigrationResult, SuccessBasedRoutingConfig,
SuccessBasedRoutingPayloadWrapper, ToggleDynamicRoutingPath, ToggleDynamicRoutingQuery,
ToggleDynamicRoutingWrapper,
};
impl ApiEventMetric for RoutingKind {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for MerchantRoutingAlgorithm {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingAlgorithmId {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingDictionaryRecord {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for LinkedRoutingConfigRetrieveResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingPayloadWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for ProfileDefaultRoutingConfig {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingRetrieveQuery {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingConfigRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingRetrieveLinkQuery {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingLinkWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingRetrieveLinkQueryWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for ToggleDynamicRoutingQuery {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for SuccessBasedRoutingConfig {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for SuccessBasedRoutingPayloadWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for EliminationRoutingPayloadWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for ContractBasedRoutingPayloadWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for ContractBasedRoutingSetupPayloadWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for ToggleDynamicRoutingWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for CreateDynamicRoutingWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for DynamicRoutingUpdateConfigQuery {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingVolumeSplitWrapper {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for ToggleDynamicRoutingPath {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingVolumeSplitResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RoutingVolumeSplit {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RuleMigrationQuery {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RuleMigrationResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RuleMigrationResult {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for RuleMigrationError {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for crate::open_router::DecideGatewayResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for crate::open_router::OpenRouterDecideGatewayRequest {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for crate::open_router::UpdateScorePayload {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
impl ApiEventMetric for crate::open_router::UpdateScoreResponse {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Routing)
}
}
// File: crates/api_models/src/events/apple_pay_certificates_migration.rs
use common_utils::events::ApiEventMetric;
use crate::apple_pay_certificates_migration::ApplePayCertificatesMigrationResponse;
impl ApiEventMetric for ApplePayCertificatesMigrationResponse {
fn get_api_event_type(&self) -> Option<common_utils::events::ApiEventsType> {
Some(common_utils::events::ApiEventsType::ApplePayCertificatesMigration)
}
}
// File: crates/api_models/src/events/connector_onboarding.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::connector_onboarding::{
ActionUrlRequest, ActionUrlResponse, OnboardingStatus, OnboardingSyncRequest,
ResetTrackingIdRequest,
};
common_utils::impl_api_event_type!(
Miscellaneous,
(
ActionUrlRequest,
ActionUrlResponse,
OnboardingSyncRequest,
OnboardingStatus,
ResetTrackingIdRequest
)
);
// File: crates/api_models/src/events/chat.rs
use common_utils::events::{ApiEventMetric, ApiEventsType};
use crate::chat::{ChatListRequest, ChatListResponse, ChatRequest, ChatResponse};
common_utils::impl_api_event_type!(
Chat,
(ChatRequest, ChatResponse, ChatListRequest, ChatListResponse)
);
</module>
|
{
"crate": "api_models",
"file": null,
"files": [
"crates/api_models/src/events/locker_migration.rs",
"crates/api_models/src/events/dispute.rs",
"crates/api_models/src/events/payouts.rs",
"crates/api_models/src/events/user.rs",
"crates/api_models/src/events/user_role.rs",
"crates/api_models/src/events/refund.rs",
"crates/api_models/src/events/external_service_auth.rs",
"crates/api_models/src/events/payment.rs",
"crates/api_models/src/events/recon.rs",
"crates/api_models/src/events/customer.rs",
"crates/api_models/src/events/gsm.rs",
"crates/api_models/src/events/revenue_recovery.rs",
"crates/api_models/src/events/routing.rs",
"crates/api_models/src/events/apple_pay_certificates_migration.rs",
"crates/api_models/src/events/connector_onboarding.rs",
"crates/api_models/src/events/chat.rs"
],
"module": "crates/api_models/src/events",
"num_files": 16,
"token_count": 9738
}
|
module_1882825496524113742
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: api_models
Module: crates/api_models/src/analytics
Files: 13
</path>
<module>
// File: crates/api_models/src/analytics/payment_intents.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use common_utils::id_type;
use super::{ForexMetric, NameDescription, TimeRange};
use crate::enums::{
AuthenticationType, Connector, Currency, IntentStatus, PaymentMethod, PaymentMethodType,
};
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct PaymentIntentFilters {
#[serde(default)]
pub status: Vec<IntentStatus>,
#[serde(default)]
pub currency: Vec<Currency>,
#[serde(default)]
pub profile_id: Vec<id_type::ProfileId>,
#[serde(default)]
pub connector: Vec<Connector>,
#[serde(default)]
pub auth_type: Vec<AuthenticationType>,
#[serde(default)]
pub payment_method: Vec<PaymentMethod>,
#[serde(default)]
pub payment_method_type: Vec<PaymentMethodType>,
#[serde(default)]
pub card_network: Vec<String>,
#[serde(default)]
pub merchant_id: Vec<id_type::MerchantId>,
#[serde(default)]
pub card_last_4: Vec<String>,
#[serde(default)]
pub card_issuer: Vec<String>,
#[serde(default)]
pub error_reason: Vec<String>,
#[serde(default)]
pub customer_id: Vec<id_type::CustomerId>,
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum PaymentIntentDimensions {
#[strum(serialize = "status")]
#[serde(rename = "status")]
PaymentIntentStatus,
Currency,
ProfileId,
Connector,
#[strum(serialize = "authentication_type")]
#[serde(rename = "authentication_type")]
AuthType,
PaymentMethod,
PaymentMethodType,
CardNetwork,
MerchantId,
#[strum(serialize = "card_last_4")]
#[serde(rename = "card_last_4")]
CardLast4,
CardIssuer,
ErrorReason,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum PaymentIntentMetrics {
SuccessfulSmartRetries,
TotalSmartRetries,
SmartRetriedAmount,
PaymentIntentCount,
PaymentsSuccessRate,
PaymentProcessedAmount,
SessionizedSuccessfulSmartRetries,
SessionizedTotalSmartRetries,
SessionizedSmartRetriedAmount,
SessionizedPaymentIntentCount,
SessionizedPaymentsSuccessRate,
SessionizedPaymentProcessedAmount,
SessionizedPaymentsDistribution,
}
impl ForexMetric for PaymentIntentMetrics {
fn is_forex_metric(&self) -> bool {
matches!(
self,
Self::PaymentProcessedAmount
| Self::SmartRetriedAmount
| Self::SessionizedPaymentProcessedAmount
| Self::SessionizedSmartRetriedAmount
)
}
}
#[derive(Debug, Default, serde::Serialize)]
pub struct ErrorResult {
pub reason: String,
pub count: i64,
pub percentage: f64,
}
pub mod metric_behaviour {
pub struct SuccessfulSmartRetries;
pub struct TotalSmartRetries;
pub struct SmartRetriedAmount;
pub struct PaymentIntentCount;
pub struct PaymentsSuccessRate;
}
impl From<PaymentIntentMetrics> for NameDescription {
fn from(value: PaymentIntentMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<PaymentIntentDimensions> for NameDescription {
fn from(value: PaymentIntentDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct PaymentIntentMetricsBucketIdentifier {
pub status: Option<IntentStatus>,
pub currency: Option<Currency>,
pub profile_id: Option<String>,
pub connector: Option<String>,
pub auth_type: Option<AuthenticationType>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl PaymentIntentMetricsBucketIdentifier {
#[allow(clippy::too_many_arguments)]
pub fn new(
status: Option<IntentStatus>,
currency: Option<Currency>,
profile_id: Option<String>,
connector: Option<String>,
auth_type: Option<AuthenticationType>,
payment_method: Option<String>,
payment_method_type: Option<String>,
card_network: Option<String>,
merchant_id: Option<String>,
card_last_4: Option<String>,
card_issuer: Option<String>,
error_reason: Option<String>,
normalized_time_range: TimeRange,
) -> Self {
Self {
status,
currency,
profile_id,
connector,
auth_type,
payment_method,
payment_method_type,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
impl Hash for PaymentIntentMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.status.map(|i| i.to_string()).hash(state);
self.currency.hash(state);
self.profile_id.hash(state);
self.connector.hash(state);
self.auth_type.map(|i| i.to_string()).hash(state);
self.payment_method.hash(state);
self.payment_method_type.hash(state);
self.card_network.hash(state);
self.merchant_id.hash(state);
self.card_last_4.hash(state);
self.card_issuer.hash(state);
self.error_reason.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for PaymentIntentMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct PaymentIntentMetricsBucketValue {
pub successful_smart_retries: Option<u64>,
pub total_smart_retries: Option<u64>,
pub smart_retried_amount: Option<u64>,
pub smart_retried_amount_in_usd: Option<u64>,
pub smart_retried_amount_without_smart_retries: Option<u64>,
pub smart_retried_amount_without_smart_retries_in_usd: Option<u64>,
pub payment_intent_count: Option<u64>,
pub successful_payments: Option<u32>,
pub successful_payments_without_smart_retries: Option<u32>,
pub total_payments: Option<u32>,
pub payments_success_rate: Option<f64>,
pub payments_success_rate_without_smart_retries: Option<f64>,
pub payment_processed_amount: Option<u64>,
pub payment_processed_amount_in_usd: Option<u64>,
pub payment_processed_count: Option<u64>,
pub payment_processed_amount_without_smart_retries: Option<u64>,
pub payment_processed_amount_without_smart_retries_in_usd: Option<u64>,
pub payment_processed_count_without_smart_retries: Option<u64>,
pub payments_success_rate_distribution_without_smart_retries: Option<f64>,
pub payments_failure_rate_distribution_without_smart_retries: Option<f64>,
}
#[derive(Debug, serde::Serialize)]
pub struct MetricsBucketResponse {
#[serde(flatten)]
pub values: PaymentIntentMetricsBucketValue,
#[serde(flatten)]
pub dimensions: PaymentIntentMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/api_event.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use super::{NameDescription, TimeRange};
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct ApiLogsRequest {
#[serde(flatten)]
pub query_param: QueryType,
}
pub enum FilterType {
ApiCountFilter,
LatencyFilter,
StatusCodeFilter,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(tag = "type")]
pub enum QueryType {
Payment {
payment_id: common_utils::id_type::PaymentId,
},
Refund {
payment_id: common_utils::id_type::PaymentId,
refund_id: String,
},
Dispute {
payment_id: common_utils::id_type::PaymentId,
dispute_id: String,
},
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum ApiEventDimensions {
// Do not change the order of these enums
// Consult the Dashboard FE folks since these also affects the order of metrics on FE
StatusCode,
FlowType,
ApiFlow,
}
impl From<ApiEventDimensions> for NameDescription {
fn from(value: ApiEventDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct ApiEventFilters {
pub status_code: Vec<u64>,
pub flow_type: Vec<String>,
pub api_flow: Vec<String>,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum ApiEventMetrics {
Latency,
ApiCount,
StatusCodeCount,
}
impl From<ApiEventMetrics> for NameDescription {
fn from(value: ApiEventMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct ApiEventMetricsBucketIdentifier {
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
// Coz FE sucks
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl ApiEventMetricsBucketIdentifier {
pub fn new(normalized_time_range: TimeRange) -> Self {
Self {
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
impl Hash for ApiEventMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.time_bucket.hash(state);
}
}
impl PartialEq for ApiEventMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct ApiEventMetricsBucketValue {
pub latency: Option<u64>,
pub api_count: Option<u64>,
pub status_code_count: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct ApiMetricsBucketResponse {
#[serde(flatten)]
pub values: ApiEventMetricsBucketValue,
#[serde(flatten)]
pub dimensions: ApiEventMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/outgoing_webhook_event.rs
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct OutgoingWebhookLogsRequest {
pub payment_id: common_utils::id_type::PaymentId,
pub event_id: Option<String>,
pub refund_id: Option<String>,
pub dispute_id: Option<String>,
pub mandate_id: Option<String>,
pub payment_method_id: Option<String>,
pub attempt_id: Option<String>,
}
// File: crates/api_models/src/analytics/refunds.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use common_utils::id_type;
use crate::enums::{Currency, RefundStatus};
#[derive(
Clone,
Copy,
Debug,
Hash,
Eq,
PartialEq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumString,
)]
// TODO RefundType api_models_oss need to mapped to storage_model
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum RefundType {
InstantRefund,
RegularRefund,
RetryRefund,
}
use super::{ForexMetric, NameDescription, TimeRange};
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct RefundFilters {
#[serde(default)]
pub currency: Vec<Currency>,
#[serde(default)]
pub refund_status: Vec<RefundStatus>,
#[serde(default)]
pub connector: Vec<String>,
#[serde(default)]
pub refund_type: Vec<RefundType>,
#[serde(default)]
pub profile_id: Vec<id_type::ProfileId>,
#[serde(default)]
pub refund_reason: Vec<String>,
#[serde(default)]
pub refund_error_message: Vec<String>,
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum RefundDimensions {
Currency,
RefundStatus,
Connector,
RefundType,
ProfileId,
RefundReason,
RefundErrorMessage,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum RefundMetrics {
RefundSuccessRate,
RefundCount,
RefundSuccessCount,
RefundProcessedAmount,
SessionizedRefundSuccessRate,
SessionizedRefundCount,
SessionizedRefundSuccessCount,
SessionizedRefundProcessedAmount,
SessionizedRefundReason,
SessionizedRefundErrorMessage,
}
#[derive(Debug, Default, serde::Serialize)]
pub struct ReasonsResult {
pub reason: String,
pub count: i64,
pub percentage: f64,
}
#[derive(Debug, Default, serde::Serialize)]
pub struct ErrorMessagesResult {
pub error_message: String,
pub count: i64,
pub percentage: f64,
}
#[derive(
Clone,
Copy,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum RefundDistributions {
#[strum(serialize = "refund_reason")]
SessionizedRefundReason,
#[strum(serialize = "refund_error_message")]
SessionizedRefundErrorMessage,
}
impl ForexMetric for RefundMetrics {
fn is_forex_metric(&self) -> bool {
matches!(
self,
Self::RefundProcessedAmount | Self::SessionizedRefundProcessedAmount
)
}
}
pub mod metric_behaviour {
pub struct RefundSuccessRate;
pub struct RefundCount;
pub struct RefundSuccessCount;
pub struct RefundProcessedAmount;
}
impl From<RefundMetrics> for NameDescription {
fn from(value: RefundMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<RefundDimensions> for NameDescription {
fn from(value: RefundDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct RefundMetricsBucketIdentifier {
pub currency: Option<Currency>,
pub refund_status: Option<String>,
pub connector: Option<String>,
pub refund_type: Option<String>,
pub profile_id: Option<String>,
pub refund_reason: Option<String>,
pub refund_error_message: Option<String>,
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl Hash for RefundMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.currency.hash(state);
self.refund_status.hash(state);
self.connector.hash(state);
self.refund_type.hash(state);
self.profile_id.hash(state);
self.refund_reason.hash(state);
self.refund_error_message.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for RefundMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
impl RefundMetricsBucketIdentifier {
#[allow(clippy::too_many_arguments)]
pub fn new(
currency: Option<Currency>,
refund_status: Option<String>,
connector: Option<String>,
refund_type: Option<String>,
profile_id: Option<String>,
refund_reason: Option<String>,
refund_error_message: Option<String>,
normalized_time_range: TimeRange,
) -> Self {
Self {
currency,
refund_status,
connector,
refund_type,
profile_id,
refund_reason,
refund_error_message,
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
#[derive(Debug, serde::Serialize)]
pub struct RefundMetricsBucketValue {
pub successful_refunds: Option<u32>,
pub total_refunds: Option<u32>,
pub refund_success_rate: Option<f64>,
pub refund_count: Option<u64>,
pub refund_success_count: Option<u64>,
pub refund_processed_amount: Option<u64>,
pub refund_processed_amount_in_usd: Option<u64>,
pub refund_processed_count: Option<u64>,
pub refund_reason_distribution: Option<Vec<ReasonsResult>>,
pub refund_error_message_distribution: Option<Vec<ErrorMessagesResult>>,
pub refund_reason_count: Option<u64>,
pub refund_error_message_count: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct RefundMetricsBucketResponse {
#[serde(flatten)]
pub values: RefundMetricsBucketValue,
#[serde(flatten)]
pub dimensions: RefundMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/payments.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use common_utils::id_type;
use super::{ForexMetric, NameDescription, TimeRange};
use crate::enums::{
AttemptStatus, AuthenticationType, CardNetwork, Connector, Currency, PaymentMethod,
PaymentMethodType, RoutingApproach,
};
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct PaymentFilters {
#[serde(default)]
pub currency: Vec<Currency>,
#[serde(default)]
pub status: Vec<AttemptStatus>,
#[serde(default)]
pub connector: Vec<Connector>,
#[serde(default)]
pub auth_type: Vec<AuthenticationType>,
#[serde(default)]
pub payment_method: Vec<PaymentMethod>,
#[serde(default)]
pub payment_method_type: Vec<PaymentMethodType>,
#[serde(default)]
pub client_source: Vec<String>,
#[serde(default)]
pub client_version: Vec<String>,
#[serde(default)]
pub card_network: Vec<CardNetwork>,
#[serde(default)]
pub profile_id: Vec<id_type::ProfileId>,
#[serde(default)]
pub merchant_id: Vec<id_type::MerchantId>,
#[serde(default)]
pub card_last_4: Vec<String>,
#[serde(default)]
pub card_issuer: Vec<String>,
#[serde(default)]
pub error_reason: Vec<String>,
#[serde(default)]
pub first_attempt: Vec<bool>,
#[serde(default)]
pub routing_approach: Vec<RoutingApproach>,
#[serde(default)]
pub signature_network: Vec<String>,
#[serde(default)]
pub is_issuer_regulated: Vec<bool>,
#[serde(default)]
pub is_debit_routed: Vec<bool>,
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum PaymentDimensions {
// Do not change the order of these enums
// Consult the Dashboard FE folks since these also affects the order of metrics on FE
Connector,
PaymentMethod,
PaymentMethodType,
Currency,
#[strum(serialize = "authentication_type")]
#[serde(rename = "authentication_type")]
AuthType,
#[strum(serialize = "status")]
#[serde(rename = "status")]
PaymentStatus,
ClientSource,
ClientVersion,
ProfileId,
CardNetwork,
MerchantId,
#[strum(serialize = "card_last_4")]
#[serde(rename = "card_last_4")]
CardLast4,
CardIssuer,
ErrorReason,
RoutingApproach,
SignatureNetwork,
IsIssuerRegulated,
IsDebitRouted,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum PaymentMetrics {
PaymentSuccessRate,
PaymentCount,
PaymentSuccessCount,
PaymentProcessedAmount,
AvgTicketSize,
RetriesCount,
ConnectorSuccessRate,
DebitRouting,
SessionizedPaymentSuccessRate,
SessionizedPaymentCount,
SessionizedPaymentSuccessCount,
SessionizedPaymentProcessedAmount,
SessionizedAvgTicketSize,
SessionizedRetriesCount,
SessionizedConnectorSuccessRate,
SessionizedDebitRouting,
PaymentsDistribution,
FailureReasons,
}
impl ForexMetric for PaymentMetrics {
fn is_forex_metric(&self) -> bool {
matches!(
self,
Self::PaymentProcessedAmount
| Self::AvgTicketSize
| Self::DebitRouting
| Self::SessionizedPaymentProcessedAmount
| Self::SessionizedAvgTicketSize
| Self::SessionizedDebitRouting,
)
}
}
#[derive(Debug, Default, serde::Serialize)]
pub struct ErrorResult {
pub reason: String,
pub count: i64,
pub percentage: f64,
}
#[derive(
Clone,
Copy,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum PaymentDistributions {
#[strum(serialize = "error_message")]
PaymentErrorMessage,
}
pub mod metric_behaviour {
pub struct PaymentSuccessRate;
pub struct PaymentCount;
pub struct PaymentSuccessCount;
pub struct PaymentProcessedAmount;
pub struct AvgTicketSize;
}
impl From<PaymentMetrics> for NameDescription {
fn from(value: PaymentMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<PaymentDimensions> for NameDescription {
fn from(value: PaymentDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct PaymentMetricsBucketIdentifier {
pub currency: Option<Currency>,
pub status: Option<AttemptStatus>,
pub connector: Option<String>,
#[serde(rename = "authentication_type")]
pub auth_type: Option<AuthenticationType>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
pub profile_id: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub routing_approach: Option<RoutingApproach>,
pub signature_network: Option<String>,
pub is_issuer_regulated: Option<bool>,
pub is_debit_routed: Option<bool>,
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
// Coz FE sucks
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl PaymentMetricsBucketIdentifier {
#[allow(clippy::too_many_arguments)]
pub fn new(
currency: Option<Currency>,
status: Option<AttemptStatus>,
connector: Option<String>,
auth_type: Option<AuthenticationType>,
payment_method: Option<String>,
payment_method_type: Option<String>,
client_source: Option<String>,
client_version: Option<String>,
profile_id: Option<String>,
card_network: Option<String>,
merchant_id: Option<String>,
card_last_4: Option<String>,
card_issuer: Option<String>,
error_reason: Option<String>,
routing_approach: Option<RoutingApproach>,
signature_network: Option<String>,
is_issuer_regulated: Option<bool>,
is_debit_routed: Option<bool>,
normalized_time_range: TimeRange,
) -> Self {
Self {
currency,
status,
connector,
auth_type,
payment_method,
payment_method_type,
client_source,
client_version,
profile_id,
card_network,
merchant_id,
card_last_4,
card_issuer,
error_reason,
routing_approach,
signature_network,
is_issuer_regulated,
is_debit_routed,
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
impl Hash for PaymentMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.currency.hash(state);
self.status.map(|i| i.to_string()).hash(state);
self.connector.hash(state);
self.auth_type.map(|i| i.to_string()).hash(state);
self.payment_method.hash(state);
self.payment_method_type.hash(state);
self.client_source.hash(state);
self.client_version.hash(state);
self.profile_id.hash(state);
self.card_network.hash(state);
self.merchant_id.hash(state);
self.card_last_4.hash(state);
self.card_issuer.hash(state);
self.error_reason.hash(state);
self.routing_approach
.clone()
.map(|i| i.to_string())
.hash(state);
self.signature_network.hash(state);
self.is_issuer_regulated.hash(state);
self.is_debit_routed.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for PaymentMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct PaymentMetricsBucketValue {
pub payment_success_rate: Option<f64>,
pub payment_count: Option<u64>,
pub payment_success_count: Option<u64>,
pub payment_processed_amount: Option<u64>,
pub payment_processed_amount_in_usd: Option<u64>,
pub payment_processed_count: Option<u64>,
pub payment_processed_amount_without_smart_retries: Option<u64>,
pub payment_processed_amount_without_smart_retries_usd: Option<u64>,
pub payment_processed_count_without_smart_retries: Option<u64>,
pub avg_ticket_size: Option<f64>,
pub payment_error_message: Option<Vec<ErrorResult>>,
pub retries_count: Option<u64>,
pub retries_amount_processed: Option<u64>,
pub connector_success_rate: Option<f64>,
pub payments_success_rate_distribution: Option<f64>,
pub payments_success_rate_distribution_without_smart_retries: Option<f64>,
pub payments_success_rate_distribution_with_only_retries: Option<f64>,
pub payments_failure_rate_distribution: Option<f64>,
pub payments_failure_rate_distribution_without_smart_retries: Option<f64>,
pub payments_failure_rate_distribution_with_only_retries: Option<f64>,
pub failure_reason_count: Option<u64>,
pub failure_reason_count_without_smart_retries: Option<u64>,
pub debit_routed_transaction_count: Option<u64>,
pub debit_routing_savings: Option<u64>,
pub debit_routing_savings_in_usd: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct MetricsBucketResponse {
#[serde(flatten)]
pub values: PaymentMetricsBucketValue,
#[serde(flatten)]
pub dimensions: PaymentMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/auth_events.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use common_enums::{
AuthenticationConnectors, AuthenticationStatus, Currency, DecoupledAuthenticationType,
TransactionStatus,
};
use super::{NameDescription, TimeRange};
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct AuthEventFilters {
#[serde(default)]
pub authentication_status: Vec<AuthenticationStatus>,
#[serde(default)]
pub trans_status: Vec<TransactionStatus>,
#[serde(default)]
pub authentication_type: Vec<DecoupledAuthenticationType>,
#[serde(default)]
pub error_message: Vec<String>,
#[serde(default)]
pub authentication_connector: Vec<AuthenticationConnectors>,
#[serde(default)]
pub message_version: Vec<String>,
#[serde(default)]
pub platform: Vec<String>,
#[serde(default)]
pub acs_reference_number: Vec<String>,
#[serde(default)]
pub mcc: Vec<String>,
#[serde(default)]
pub currency: Vec<Currency>,
#[serde(default)]
pub merchant_country: Vec<String>,
#[serde(default)]
pub billing_country: Vec<String>,
#[serde(default)]
pub shipping_country: Vec<String>,
#[serde(default)]
pub issuer_country: Vec<String>,
#[serde(default)]
pub earliest_supported_version: Vec<String>,
#[serde(default)]
pub latest_supported_version: Vec<String>,
#[serde(default)]
pub whitelist_decision: Vec<bool>,
#[serde(default)]
pub device_manufacturer: Vec<String>,
#[serde(default)]
pub device_type: Vec<String>,
#[serde(default)]
pub device_brand: Vec<String>,
#[serde(default)]
pub device_os: Vec<String>,
#[serde(default)]
pub device_display: Vec<String>,
#[serde(default)]
pub browser_name: Vec<String>,
#[serde(default)]
pub browser_version: Vec<String>,
#[serde(default)]
pub issuer_id: Vec<String>,
#[serde(default)]
pub scheme_name: Vec<String>,
#[serde(default)]
pub exemption_requested: Vec<bool>,
#[serde(default)]
pub exemption_accepted: Vec<bool>,
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum AuthEventDimensions {
AuthenticationStatus,
#[strum(serialize = "trans_status")]
#[serde(rename = "trans_status")]
TransactionStatus,
AuthenticationType,
ErrorMessage,
AuthenticationConnector,
MessageVersion,
AcsReferenceNumber,
Platform,
Mcc,
Currency,
MerchantCountry,
BillingCountry,
ShippingCountry,
IssuerCountry,
EarliestSupportedVersion,
LatestSupportedVersion,
WhitelistDecision,
DeviceManufacturer,
DeviceType,
DeviceBrand,
DeviceOs,
DeviceDisplay,
BrowserName,
BrowserVersion,
IssuerId,
SchemeName,
ExemptionRequested,
ExemptionAccepted,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum AuthEventMetrics {
AuthenticationCount,
AuthenticationAttemptCount,
AuthenticationSuccessCount,
ChallengeFlowCount,
FrictionlessFlowCount,
FrictionlessSuccessCount,
ChallengeAttemptCount,
ChallengeSuccessCount,
AuthenticationErrorMessage,
AuthenticationFunnel,
AuthenticationExemptionApprovedCount,
AuthenticationExemptionRequestedCount,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
pub enum AuthEventFlows {
IncomingWebhookReceive,
PaymentsExternalAuthentication,
}
pub mod metric_behaviour {
pub struct AuthenticationCount;
pub struct AuthenticationAttemptCount;
pub struct AuthenticationSuccessCount;
pub struct ChallengeFlowCount;
pub struct FrictionlessFlowCount;
pub struct FrictionlessSuccessCount;
pub struct ChallengeAttemptCount;
pub struct ChallengeSuccessCount;
pub struct AuthenticationErrorMessage;
pub struct AuthenticationFunnel;
}
impl From<AuthEventMetrics> for NameDescription {
fn from(value: AuthEventMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<AuthEventDimensions> for NameDescription {
fn from(value: AuthEventDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct AuthEventMetricsBucketIdentifier {
pub authentication_status: Option<AuthenticationStatus>,
pub trans_status: Option<TransactionStatus>,
pub authentication_type: Option<DecoupledAuthenticationType>,
pub error_message: Option<String>,
pub authentication_connector: Option<AuthenticationConnectors>,
pub message_version: Option<String>,
pub acs_reference_number: Option<String>,
pub mcc: Option<String>,
pub currency: Option<Currency>,
pub merchant_country: Option<String>,
pub billing_country: Option<String>,
pub shipping_country: Option<String>,
pub issuer_country: Option<String>,
pub earliest_supported_version: Option<String>,
pub latest_supported_version: Option<String>,
pub whitelist_decision: Option<bool>,
pub device_manufacturer: Option<String>,
pub device_type: Option<String>,
pub device_brand: Option<String>,
pub device_os: Option<String>,
pub device_display: Option<String>,
pub browser_name: Option<String>,
pub browser_version: Option<String>,
pub issuer_id: Option<String>,
pub scheme_name: Option<String>,
pub exemption_requested: Option<bool>,
pub exemption_accepted: Option<bool>,
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl AuthEventMetricsBucketIdentifier {
#[allow(clippy::too_many_arguments)]
pub fn new(
authentication_status: Option<AuthenticationStatus>,
trans_status: Option<TransactionStatus>,
authentication_type: Option<DecoupledAuthenticationType>,
error_message: Option<String>,
authentication_connector: Option<AuthenticationConnectors>,
message_version: Option<String>,
acs_reference_number: Option<String>,
mcc: Option<String>,
currency: Option<Currency>,
merchant_country: Option<String>,
billing_country: Option<String>,
shipping_country: Option<String>,
issuer_country: Option<String>,
earliest_supported_version: Option<String>,
latest_supported_version: Option<String>,
whitelist_decision: Option<bool>,
device_manufacturer: Option<String>,
device_type: Option<String>,
device_brand: Option<String>,
device_os: Option<String>,
device_display: Option<String>,
browser_name: Option<String>,
browser_version: Option<String>,
issuer_id: Option<String>,
scheme_name: Option<String>,
exemption_requested: Option<bool>,
exemption_accepted: Option<bool>,
normalized_time_range: TimeRange,
) -> Self {
Self {
authentication_status,
trans_status,
authentication_type,
error_message,
authentication_connector,
message_version,
acs_reference_number,
mcc,
currency,
merchant_country,
billing_country,
shipping_country,
issuer_country,
earliest_supported_version,
latest_supported_version,
whitelist_decision,
device_manufacturer,
device_type,
device_brand,
device_os,
device_display,
browser_name,
browser_version,
issuer_id,
scheme_name,
exemption_requested,
exemption_accepted,
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
impl Hash for AuthEventMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.authentication_status.hash(state);
self.trans_status.hash(state);
self.authentication_type.hash(state);
self.authentication_connector.hash(state);
self.message_version.hash(state);
self.acs_reference_number.hash(state);
self.error_message.hash(state);
self.mcc.hash(state);
self.currency.hash(state);
self.merchant_country.hash(state);
self.billing_country.hash(state);
self.shipping_country.hash(state);
self.issuer_country.hash(state);
self.earliest_supported_version.hash(state);
self.latest_supported_version.hash(state);
self.whitelist_decision.hash(state);
self.device_manufacturer.hash(state);
self.device_type.hash(state);
self.device_brand.hash(state);
self.device_os.hash(state);
self.device_display.hash(state);
self.browser_name.hash(state);
self.browser_version.hash(state);
self.issuer_id.hash(state);
self.scheme_name.hash(state);
self.exemption_requested.hash(state);
self.exemption_accepted.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for AuthEventMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct AuthEventMetricsBucketValue {
pub authentication_count: Option<u64>,
pub authentication_attempt_count: Option<u64>,
pub authentication_success_count: Option<u64>,
pub challenge_flow_count: Option<u64>,
pub challenge_attempt_count: Option<u64>,
pub challenge_success_count: Option<u64>,
pub frictionless_flow_count: Option<u64>,
pub frictionless_success_count: Option<u64>,
pub error_message_count: Option<u64>,
pub authentication_funnel: Option<u64>,
pub authentication_exemption_approved_count: Option<u64>,
pub authentication_exemption_requested_count: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct MetricsBucketResponse {
#[serde(flatten)]
pub values: AuthEventMetricsBucketValue,
#[serde(flatten)]
pub dimensions: AuthEventMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/disputes.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use super::{ForexMetric, NameDescription, TimeRange};
use crate::enums::{Currency, DisputeStage};
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum DisputeMetrics {
DisputeStatusMetric,
TotalAmountDisputed,
TotalDisputeLostAmount,
SessionizedDisputeStatusMetric,
SessionizedTotalAmountDisputed,
SessionizedTotalDisputeLostAmount,
}
impl ForexMetric for DisputeMetrics {
fn is_forex_metric(&self) -> bool {
matches!(
self,
Self::TotalAmountDisputed | Self::TotalDisputeLostAmount
)
}
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum DisputeDimensions {
// Do not change the order of these enums
// Consult the Dashboard FE folks since these also affects the order of metrics on FE
Connector,
DisputeStage,
Currency,
}
impl From<DisputeDimensions> for NameDescription {
fn from(value: DisputeDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<DisputeMetrics> for NameDescription {
fn from(value: DisputeMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct DisputeFilters {
#[serde(default)]
pub dispute_stage: Vec<DisputeStage>,
#[serde(default)]
pub connector: Vec<String>,
#[serde(default)]
pub currency: Vec<Currency>,
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct DisputeMetricsBucketIdentifier {
pub dispute_stage: Option<DisputeStage>,
pub connector: Option<String>,
pub currency: Option<Currency>,
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl Hash for DisputeMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.dispute_stage.hash(state);
self.connector.hash(state);
self.currency.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for DisputeMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
impl DisputeMetricsBucketIdentifier {
pub fn new(
dispute_stage: Option<DisputeStage>,
connector: Option<String>,
currency: Option<Currency>,
normalized_time_range: TimeRange,
) -> Self {
Self {
dispute_stage,
connector,
currency,
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
#[derive(Debug, serde::Serialize)]
pub struct DisputeMetricsBucketValue {
pub disputes_challenged: Option<u64>,
pub disputes_won: Option<u64>,
pub disputes_lost: Option<u64>,
pub disputed_amount: Option<u64>,
pub dispute_lost_amount: Option<u64>,
pub total_dispute: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct DisputeMetricsBucketResponse {
#[serde(flatten)]
pub values: DisputeMetricsBucketValue,
#[serde(flatten)]
pub dimensions: DisputeMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/frm.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use common_enums::enums::FraudCheckStatus;
#[derive(
Clone,
Copy,
Debug,
Default,
Eq,
PartialEq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumString,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum FrmTransactionType {
#[default]
PreFrm,
PostFrm,
}
use super::{NameDescription, TimeRange};
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct FrmFilters {
#[serde(default)]
pub frm_status: Vec<FraudCheckStatus>,
#[serde(default)]
pub frm_name: Vec<String>,
#[serde(default)]
pub frm_transaction_type: Vec<FrmTransactionType>,
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum FrmDimensions {
FrmStatus,
FrmName,
FrmTransactionType,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum FrmMetrics {
FrmTriggeredAttempts,
FrmBlockedRate,
}
pub mod metric_behaviour {
pub struct FrmTriggeredAttempts;
pub struct FrmBlockRate;
}
impl From<FrmMetrics> for NameDescription {
fn from(value: FrmMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<FrmDimensions> for NameDescription {
fn from(value: FrmDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct FrmMetricsBucketIdentifier {
pub frm_status: Option<String>,
pub frm_name: Option<String>,
pub frm_transaction_type: Option<String>,
#[serde(rename = "time_range")]
pub time_bucket: TimeRange,
#[serde(rename = "time_bucket")]
#[serde(with = "common_utils::custom_serde::iso8601custom")]
pub start_time: time::PrimitiveDateTime,
}
impl Hash for FrmMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.frm_status.hash(state);
self.frm_name.hash(state);
self.frm_transaction_type.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for FrmMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
impl FrmMetricsBucketIdentifier {
pub fn new(
frm_status: Option<String>,
frm_name: Option<String>,
frm_transaction_type: Option<String>,
normalized_time_range: TimeRange,
) -> Self {
Self {
frm_status,
frm_name,
frm_transaction_type,
time_bucket: normalized_time_range,
start_time: normalized_time_range.start_time,
}
}
}
#[derive(Debug, serde::Serialize)]
pub struct FrmMetricsBucketValue {
pub frm_triggered_attempts: Option<u64>,
pub frm_blocked_rate: Option<f64>,
}
#[derive(Debug, serde::Serialize)]
pub struct FrmMetricsBucketResponse {
#[serde(flatten)]
pub values: FrmMetricsBucketValue,
#[serde(flatten)]
pub dimensions: FrmMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/routing_events.rs
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct RoutingEventsRequest {
pub payment_id: common_utils::id_type::PaymentId,
pub refund_id: Option<String>,
pub dispute_id: Option<String>,
}
// File: crates/api_models/src/analytics/connector_events.rs
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub struct ConnectorEventsRequest {
pub payment_id: common_utils::id_type::PaymentId,
pub refund_id: Option<String>,
pub dispute_id: Option<String>,
}
// File: crates/api_models/src/analytics/active_payments.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use super::NameDescription;
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum ActivePaymentsMetrics {
ActivePayments,
}
pub mod metric_behaviour {
pub struct ActivePayments;
}
impl From<ActivePaymentsMetrics> for NameDescription {
fn from(value: ActivePaymentsMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct ActivePaymentsMetricsBucketIdentifier {
pub time_bucket: Option<String>,
}
impl ActivePaymentsMetricsBucketIdentifier {
pub fn new(time_bucket: Option<String>) -> Self {
Self { time_bucket }
}
}
impl Hash for ActivePaymentsMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.time_bucket.hash(state);
}
}
impl PartialEq for ActivePaymentsMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct ActivePaymentsMetricsBucketValue {
pub active_payments: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct MetricsBucketResponse {
#[serde(flatten)]
pub values: ActivePaymentsMetricsBucketValue,
#[serde(flatten)]
pub dimensions: ActivePaymentsMetricsBucketIdentifier,
}
// File: crates/api_models/src/analytics/search.rs
use common_utils::{hashing::HashedString, types::TimeRange};
use masking::WithType;
use serde_json::Value;
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct SearchFilters {
pub payment_method: Option<Vec<String>>,
pub currency: Option<Vec<String>>,
pub status: Option<Vec<String>>,
pub customer_email: Option<Vec<HashedString<common_utils::pii::EmailStrategy>>>,
pub search_tags: Option<Vec<HashedString<WithType>>>,
pub connector: Option<Vec<String>>,
pub payment_method_type: Option<Vec<String>>,
pub card_network: Option<Vec<String>>,
pub card_last_4: Option<Vec<String>>,
pub payment_id: Option<Vec<String>>,
pub amount: Option<Vec<u64>>,
pub customer_id: Option<Vec<String>>,
}
impl SearchFilters {
pub fn is_all_none(&self) -> bool {
self.payment_method.is_none()
&& self.currency.is_none()
&& self.status.is_none()
&& self.customer_email.is_none()
&& self.search_tags.is_none()
&& self.connector.is_none()
&& self.payment_method_type.is_none()
&& self.card_network.is_none()
&& self.card_last_4.is_none()
&& self.payment_id.is_none()
&& self.amount.is_none()
&& self.customer_id.is_none()
}
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GetGlobalSearchRequest {
pub query: String,
#[serde(default)]
pub filters: Option<SearchFilters>,
#[serde(default)]
pub time_range: Option<TimeRange>,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GetSearchRequest {
pub offset: i64,
pub count: i64,
pub query: String,
#[serde(default)]
pub filters: Option<SearchFilters>,
#[serde(default)]
pub time_range: Option<TimeRange>,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GetSearchRequestWithIndex {
pub index: SearchIndex,
pub search_req: GetSearchRequest,
}
#[derive(
Debug, strum::EnumIter, Clone, serde::Deserialize, serde::Serialize, Copy, Eq, PartialEq,
)]
#[serde(rename_all = "snake_case")]
pub enum SearchIndex {
PaymentAttempts,
PaymentIntents,
Refunds,
Disputes,
SessionizerPaymentAttempts,
SessionizerPaymentIntents,
SessionizerRefunds,
SessionizerDisputes,
}
#[derive(Debug, strum::EnumIter, Clone, serde::Deserialize, serde::Serialize, Copy)]
pub enum SearchStatus {
Success,
Failure,
}
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct GetSearchResponse {
pub count: u64,
pub index: SearchIndex,
pub hits: Vec<Value>,
pub status: SearchStatus,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpenMsearchOutput {
#[serde(default)]
pub responses: Vec<OpensearchOutput>,
pub error: Option<OpensearchErrorDetails>,
}
#[derive(Debug, serde::Deserialize)]
#[serde(untagged)]
pub enum OpensearchOutput {
Success(OpensearchSuccess),
Error(OpensearchError),
}
#[derive(Debug, serde::Deserialize)]
pub struct OpensearchError {
pub error: OpensearchErrorDetails,
pub status: u16,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpensearchErrorDetails {
#[serde(rename = "type")]
pub error_type: String,
pub reason: String,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpensearchSuccess {
pub hits: OpensearchHits,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpensearchHits {
pub total: OpensearchResultsTotal,
pub hits: Vec<OpensearchHit>,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpensearchResultsTotal {
pub value: u64,
}
#[derive(Debug, serde::Deserialize)]
pub struct OpensearchHit {
#[serde(rename = "_source")]
pub source: Value,
}
// File: crates/api_models/src/analytics/sdk_events.rs
use std::{
collections::hash_map::DefaultHasher,
hash::{Hash, Hasher},
};
use super::{NameDescription, TimeRange};
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "camelCase")]
pub struct SdkEventsRequest {
pub payment_id: common_utils::id_type::PaymentId,
pub time_range: TimeRange,
}
#[derive(Clone, Debug, Default, serde::Deserialize, serde::Serialize)]
pub struct SdkEventFilters {
#[serde(default)]
pub payment_method: Vec<String>,
#[serde(default)]
pub platform: Vec<String>,
#[serde(default)]
pub browser_name: Vec<String>,
#[serde(default)]
pub source: Vec<String>,
#[serde(default)]
pub component: Vec<String>,
#[serde(default)]
pub payment_experience: Vec<String>,
}
#[derive(
Debug,
serde::Serialize,
serde::Deserialize,
strum::AsRefStr,
PartialEq,
PartialOrd,
Eq,
Ord,
strum::Display,
strum::EnumIter,
Clone,
Copy,
)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum SdkEventDimensions {
// Do not change the order of these enums
// Consult the Dashboard FE folks since these also affects the order of metrics on FE
PaymentMethod,
Platform,
BrowserName,
Source,
Component,
PaymentExperience,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "snake_case")]
#[serde(rename_all = "snake_case")]
pub enum SdkEventMetrics {
PaymentAttempts,
PaymentMethodsCallCount,
SdkRenderedCount,
SdkInitiatedCount,
PaymentMethodSelectedCount,
PaymentDataFilledCount,
AveragePaymentTime,
LoadTime,
}
#[derive(
Clone,
Debug,
Hash,
PartialEq,
Eq,
serde::Serialize,
serde::Deserialize,
strum::Display,
strum::EnumIter,
strum::AsRefStr,
)]
#[strum(serialize_all = "SCREAMING_SNAKE_CASE")]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum SdkEventNames {
OrcaElementsCalled,
AppRendered,
PaymentMethodChanged,
PaymentDataFilled,
PaymentAttempt,
PaymentMethodsCall,
ConfirmCall,
SessionsCall,
CustomerPaymentMethodsCall,
RedirectingUser,
DisplayBankTransferInfoPage,
DisplayQrCodeInfoPage,
AuthenticationCall,
AuthenticationCallInit,
ThreeDsMethodCall,
ThreeDsMethodResult,
ThreeDsMethod,
LoaderChanged,
DisplayThreeDsSdk,
ThreeDsSdkInit,
AreqParamsGeneration,
ChallengePresented,
ChallengeComplete,
}
pub mod metric_behaviour {
pub struct PaymentAttempts;
pub struct PaymentMethodsCallCount;
pub struct SdkRenderedCount;
pub struct SdkInitiatedCount;
pub struct PaymentMethodSelectedCount;
pub struct PaymentDataFilledCount;
pub struct AveragePaymentTime;
pub struct LoadTime;
}
impl From<SdkEventMetrics> for NameDescription {
fn from(value: SdkEventMetrics) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
impl From<SdkEventDimensions> for NameDescription {
fn from(value: SdkEventDimensions) -> Self {
Self {
name: value.to_string(),
desc: String::new(),
}
}
}
#[derive(Debug, serde::Serialize, Eq)]
pub struct SdkEventMetricsBucketIdentifier {
pub payment_method: Option<String>,
pub platform: Option<String>,
pub browser_name: Option<String>,
pub source: Option<String>,
pub component: Option<String>,
pub payment_experience: Option<String>,
pub time_bucket: Option<String>,
}
impl SdkEventMetricsBucketIdentifier {
pub fn new(
payment_method: Option<String>,
platform: Option<String>,
browser_name: Option<String>,
source: Option<String>,
component: Option<String>,
payment_experience: Option<String>,
time_bucket: Option<String>,
) -> Self {
Self {
payment_method,
platform,
browser_name,
source,
component,
payment_experience,
time_bucket,
}
}
}
impl Hash for SdkEventMetricsBucketIdentifier {
fn hash<H: Hasher>(&self, state: &mut H) {
self.payment_method.hash(state);
self.platform.hash(state);
self.browser_name.hash(state);
self.source.hash(state);
self.component.hash(state);
self.payment_experience.hash(state);
self.time_bucket.hash(state);
}
}
impl PartialEq for SdkEventMetricsBucketIdentifier {
fn eq(&self, other: &Self) -> bool {
let mut left = DefaultHasher::new();
self.hash(&mut left);
let mut right = DefaultHasher::new();
other.hash(&mut right);
left.finish() == right.finish()
}
}
#[derive(Debug, serde::Serialize)]
pub struct SdkEventMetricsBucketValue {
pub payment_attempts: Option<u64>,
pub payment_methods_call_count: Option<u64>,
pub average_payment_time: Option<u64>,
pub load_time: Option<u64>,
pub sdk_rendered_count: Option<u64>,
pub sdk_initiated_count: Option<u64>,
pub payment_method_selected_count: Option<u64>,
pub payment_data_filled_count: Option<u64>,
}
#[derive(Debug, serde::Serialize)]
pub struct MetricsBucketResponse {
#[serde(flatten)]
pub values: SdkEventMetricsBucketValue,
#[serde(flatten)]
pub dimensions: SdkEventMetricsBucketIdentifier,
}
</module>
|
{
"crate": "api_models",
"file": null,
"files": [
"crates/api_models/src/analytics/payment_intents.rs",
"crates/api_models/src/analytics/api_event.rs",
"crates/api_models/src/analytics/outgoing_webhook_event.rs",
"crates/api_models/src/analytics/refunds.rs",
"crates/api_models/src/analytics/payments.rs",
"crates/api_models/src/analytics/auth_events.rs",
"crates/api_models/src/analytics/disputes.rs",
"crates/api_models/src/analytics/frm.rs",
"crates/api_models/src/analytics/routing_events.rs",
"crates/api_models/src/analytics/connector_events.rs",
"crates/api_models/src/analytics/active_payments.rs",
"crates/api_models/src/analytics/search.rs",
"crates/api_models/src/analytics/sdk_events.rs"
],
"module": "crates/api_models/src/analytics",
"num_files": 13,
"token_count": 13438
}
|
module_5134395147225897485
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router_env
Module: crates/router_env/src
Files: 6
</path>
<module>
// File: crates/router_env/src/vergen.rs
/// Configures the [`vergen`][::vergen] crate to generate the `cargo` build instructions.
///
/// This function should be typically called within build scripts to generate `cargo` build
/// instructions for the corresponding crate.
///
/// # Panics
///
/// Panics if `vergen` fails to generate `cargo` build instructions.
#[cfg(feature = "vergen")]
#[allow(clippy::expect_used)]
pub fn generate_cargo_instructions() {
use std::io::Write;
use vergen::EmitBuilder;
EmitBuilder::builder()
.cargo_debug()
.cargo_opt_level()
.cargo_target_triple()
.git_commit_timestamp()
.git_describe(true, true, None)
.git_sha(true)
.rustc_semver()
.rustc_commit_hash()
.emit()
.expect("Failed to generate `cargo` build instructions");
writeln!(
&mut std::io::stdout(),
"cargo:rustc-env=CARGO_PROFILE={}",
std::env::var("PROFILE").expect("Failed to obtain `cargo` profile")
)
.expect("Failed to set `CARGO_PROFILE` environment variable");
}
#[cfg(not(feature = "vergen"))]
pub fn generate_cargo_instructions() {}
// File: crates/router_env/src/logger.rs
//! Logger of the system.
pub use tracing::{debug, error, event as log, info, warn};
pub use tracing_attributes::instrument;
pub mod config;
mod defaults;
pub use crate::config::Config;
// mod macros;
pub mod types;
pub use types::{Category, Flow, Level, Tag};
mod setup;
pub use setup::{setup, TelemetryGuard};
pub mod formatter;
pub use formatter::FormattingLayer;
pub mod storage;
pub use storage::{Storage, StorageSubscription};
// File: crates/router_env/src/env.rs
//! Information about the current environment.
use std::path::PathBuf;
use serde::{Deserialize, Serialize};
/// Environment variables accessed by the application. This module aims to be the source of truth
/// containing all environment variable that the application accesses.
pub mod vars {
/// Parent directory where `Cargo.toml` is stored.
pub const CARGO_MANIFEST_DIR: &str = "CARGO_MANIFEST_DIR";
/// Environment variable that sets development/sandbox/production environment.
pub const RUN_ENV: &str = "RUN_ENV";
/// Directory of config TOML files. Default is `config`.
pub const CONFIG_DIR: &str = "CONFIG_DIR";
}
/// Current environment.
#[derive(
Debug, Default, Deserialize, Serialize, Clone, Copy, strum::Display, strum::EnumString,
)]
#[serde(rename_all = "lowercase")]
#[strum(serialize_all = "lowercase")]
pub enum Env {
/// Development environment.
#[default]
Development,
/// Sandbox environment.
Sandbox,
/// Production environment.
Production,
}
/// Name of current environment. Either "development", "sandbox" or "production".
pub fn which() -> Env {
#[cfg(debug_assertions)]
let default_env = Env::Development;
#[cfg(not(debug_assertions))]
let default_env = Env::Production;
std::env::var(vars::RUN_ENV).map_or_else(|_| default_env, |v| v.parse().unwrap_or(default_env))
}
/// Three letter (lowercase) prefix corresponding to the current environment.
/// Either `dev`, `snd` or `prd`.
pub fn prefix_for_env() -> &'static str {
match which() {
Env::Development => "dev",
Env::Sandbox => "snd",
Env::Production => "prd",
}
}
/// Path to the root directory of the cargo workspace.
/// It is recommended that this be used by the application as the base path to build other paths
/// such as configuration and logs directories.
pub fn workspace_path() -> PathBuf {
if let Ok(manifest_dir) = std::env::var(vars::CARGO_MANIFEST_DIR) {
let mut path = PathBuf::from(manifest_dir);
path.pop();
path.pop();
path
} else {
PathBuf::from(".")
}
}
/// Version of the crate containing the following information:
///
/// - The latest git tag. If tags are not present in the repository, the short commit hash is used
/// instead.
/// - Short hash of the latest git commit.
/// - Timestamp of the latest git commit.
///
/// Example: `0.1.0-abcd012-2038-01-19T03:14:08Z`.
#[cfg(feature = "vergen")]
#[macro_export]
macro_rules! version {
() => {
concat!(
env!("VERGEN_GIT_DESCRIBE"),
"-",
env!("VERGEN_GIT_SHA"),
"-",
env!("VERGEN_GIT_COMMIT_TIMESTAMP"),
)
};
}
/// A string uniquely identifying the application build.
///
/// Consists of a combination of:
/// - Version defined in the crate file
/// - Timestamp of commit
/// - Hash of the commit
/// - Version of rust compiler
/// - Target triple
///
/// Example: `0.1.0-f5f383e-2022-09-04T11:39:37Z-1.63.0-x86_64-unknown-linux-gnu`
#[cfg(feature = "vergen")]
#[macro_export]
macro_rules! build {
() => {
concat!(
env!("CARGO_PKG_VERSION"),
"-",
env!("VERGEN_GIT_SHA"),
"-",
env!("VERGEN_GIT_COMMIT_TIMESTAMP"),
"-",
env!("VERGEN_RUSTC_SEMVER"),
"-",
$crate::profile!(),
"-",
env!("VERGEN_CARGO_TARGET_TRIPLE"),
)
};
}
/// Short hash of the current commit.
///
/// Example: `f5f383e`.
#[cfg(feature = "vergen")]
#[macro_export]
macro_rules! commit {
() => {
env!("VERGEN_GIT_SHA")
};
}
// /// Information about the platform on which service was built, including:
// /// - Information about OS
// /// - Information about CPU
// ///
// /// Example: ``.
// #[macro_export]
// macro_rules! platform {
// (
// ) => {
// concat!(
// env!("VERGEN_SYSINFO_OS_VERSION"),
// " - ",
// env!("VERGEN_SYSINFO_CPU_BRAND"),
// )
// };
// }
/// Service name deduced from name of the binary.
/// This macro must be called within binaries only.
///
/// Example: `router`.
#[macro_export]
macro_rules! service_name {
() => {
env!("CARGO_BIN_NAME")
};
}
/// Build profile, either debug or release.
///
/// Example: `release`.
#[macro_export]
macro_rules! profile {
() => {
env!("CARGO_PROFILE")
};
}
/// The latest git tag. If tags are not present in the repository, the short commit hash is used
/// instead. Refer to the [`git describe`](https://git-scm.com/docs/git-describe) documentation for
/// more details.
#[macro_export]
macro_rules! git_tag {
() => {
env!("VERGEN_GIT_DESCRIBE")
};
}
// File: crates/router_env/src/lib.rs
#![warn(missing_debug_implementations)]
//! Environment of payment router: logger, basic config, its environment awareness.
#![doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR" ), "/", "README.md"))]
/// Utilities to identify members of the current cargo workspace.
pub mod cargo_workspace;
pub mod env;
pub mod logger;
pub mod metrics;
/// `cargo` build instructions generation for obtaining information about the application
/// environment.
#[cfg(feature = "vergen")]
pub mod vergen;
// pub use literally;
#[doc(inline)]
pub use logger::*;
pub use opentelemetry;
pub use tracing;
#[cfg(feature = "actix_web")]
pub use tracing_actix_web;
pub use tracing_appender;
#[doc(inline)]
pub use self::env::*;
// File: crates/router_env/src/metrics.rs
//! Utilities to easily create opentelemetry contexts, meters and metrics.
/// Create a global [`Meter`][Meter] with the specified name and an optional description.
///
/// [Meter]: opentelemetry::metrics::Meter
#[macro_export]
macro_rules! global_meter {
($name:ident) => {
static $name: ::std::sync::LazyLock<$crate::opentelemetry::metrics::Meter> =
::std::sync::LazyLock::new(|| $crate::opentelemetry::global::meter(stringify!($name)));
};
($meter:ident, $name:literal) => {
static $meter: ::std::sync::LazyLock<$crate::opentelemetry::metrics::Meter> =
::std::sync::LazyLock::new(|| $crate::opentelemetry::global::meter(stringify!($name)));
};
}
/// Create a [`Counter`][Counter] metric with the specified name and an optional description,
/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter].
///
/// [Counter]: opentelemetry::metrics::Counter
/// [Meter]: opentelemetry::metrics::Meter
#[macro_export]
macro_rules! counter_metric {
($name:ident, $meter:ident) => {
pub(crate) static $name: ::std::sync::LazyLock<
$crate::opentelemetry::metrics::Counter<u64>,
> = ::std::sync::LazyLock::new(|| $meter.u64_counter(stringify!($name)).build());
};
($name:ident, $meter:ident, description:literal) => {
#[doc = $description]
pub(crate) static $name: ::std::sync::LazyLock<
$crate::opentelemetry::metrics::Counter<u64>,
> = ::std::sync::LazyLock::new(|| {
$meter
.u64_counter(stringify!($name))
.with_description($description)
.build()
});
};
}
/// Create a [`Histogram`][Histogram] f64 metric with the specified name and an optional description,
/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter].
///
/// [Histogram]: opentelemetry::metrics::Histogram
/// [Meter]: opentelemetry::metrics::Meter
#[macro_export]
macro_rules! histogram_metric_f64 {
($name:ident, $meter:ident) => {
pub(crate) static $name: ::std::sync::LazyLock<
$crate::opentelemetry::metrics::Histogram<f64>,
> = ::std::sync::LazyLock::new(|| {
$meter
.f64_histogram(stringify!($name))
.with_boundaries($crate::metrics::f64_histogram_buckets())
.build()
});
};
($name:ident, $meter:ident, $description:literal) => {
#[doc = $description]
pub(crate) static $name: ::std::sync::LazyLock<
$crate::opentelemetry::metrics::Histogram<f64>,
> = ::std::sync::LazyLock::new(|| {
$meter
.f64_histogram(stringify!($name))
.with_description($description)
.with_boundaries($crate::metrics::f64_histogram_buckets())
.build()
});
};
}
/// Create a [`Histogram`][Histogram] u64 metric with the specified name and an optional description,
/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter].
///
/// [Histogram]: opentelemetry::metrics::Histogram
/// [Meter]: opentelemetry::metrics::Meter
#[macro_export]
macro_rules! histogram_metric_u64 {
($name:ident, $meter:ident) => {
pub(crate) static $name: ::std::sync::LazyLock<
$crate::opentelemetry::metrics::Histogram<u64>,
> = ::std::sync::LazyLock::new(|| {
$meter
.u64_histogram(stringify!($name))
.with_boundaries($crate::metrics::f64_histogram_buckets())
.build()
});
};
($name:ident, $meter:ident, $description:literal) => {
#[doc = $description]
pub(crate) static $name: ::std::sync::LazyLock<
$crate::opentelemetry::metrics::Histogram<u64>,
> = ::std::sync::LazyLock::new(|| {
$meter
.u64_histogram(stringify!($name))
.with_description($description)
.with_boundaries($crate::metrics::f64_histogram_buckets())
.build()
});
};
}
/// Create a [`Gauge`][Gauge] metric with the specified name and an optional description,
/// associated with the specified meter. Note that the meter must be a valid [`Meter`][Meter].
///
/// [Gauge]: opentelemetry::metrics::Gauge
/// [Meter]: opentelemetry::metrics::Meter
#[macro_export]
macro_rules! gauge_metric {
($name:ident, $meter:ident) => {
pub(crate) static $name: ::std::sync::LazyLock<$crate::opentelemetry::metrics::Gauge<u64>> =
::std::sync::LazyLock::new(|| $meter.u64_gauge(stringify!($name)).build());
};
($name:ident, $meter:ident, description:literal) => {
#[doc = $description]
pub(crate) static $name: ::std::sync::LazyLock<$crate::opentelemetry::metrics::Gauge<u64>> =
::std::sync::LazyLock::new(|| {
$meter
.u64_gauge(stringify!($name))
.with_description($description)
.build()
});
};
}
/// Create attributes to associate with a metric from key-value pairs.
#[macro_export]
macro_rules! metric_attributes {
($(($key:expr, $value:expr $(,)?)),+ $(,)?) => {
&[$($crate::opentelemetry::KeyValue::new($key, $value)),+]
};
}
pub use helpers::f64_histogram_buckets;
mod helpers {
/// Returns the buckets to be used for a f64 histogram
#[inline(always)]
pub fn f64_histogram_buckets() -> Vec<f64> {
let mut init = 0.01;
let mut buckets: [f64; 15] = [0.0; 15];
for bucket in &mut buckets {
init *= 2.0;
*bucket = init;
}
Vec::from(buckets)
}
}
// File: crates/router_env/src/cargo_workspace.rs
/// Sets the `CARGO_WORKSPACE_MEMBERS` environment variable to include a comma-separated list of
/// names of all crates in the current cargo workspace.
///
/// This function should be typically called within build scripts, so that the environment variable
/// is available to the corresponding crate at compile time.
///
/// # Panics
///
/// Panics if running the `cargo metadata` command fails.
#[allow(clippy::expect_used)]
pub fn set_cargo_workspace_members_env() {
use std::io::Write;
let metadata = cargo_metadata::MetadataCommand::new()
.exec()
.expect("Failed to obtain cargo metadata");
let workspace_members = metadata
.workspace_packages()
.iter()
.map(|package| package.name.as_str())
.collect::<Vec<_>>()
.join(",");
writeln!(
&mut std::io::stdout(),
"cargo:rustc-env=CARGO_WORKSPACE_MEMBERS={workspace_members}"
)
.expect("Failed to set `CARGO_WORKSPACE_MEMBERS` environment variable");
}
/// Verify that the cargo metadata workspace packages format matches that expected by
/// [`set_cargo_workspace_members_env`] to set the `CARGO_WORKSPACE_MEMBERS` environment variable.
///
/// This function should be typically called within build scripts, before the
/// [`set_cargo_workspace_members_env`] function is called.
///
/// # Panics
///
/// Panics if running the `cargo metadata` command fails, or if the workspace member package names
/// cannot be determined.
pub fn verify_cargo_metadata_format() {
#[allow(clippy::expect_used)]
let metadata = cargo_metadata::MetadataCommand::new()
.exec()
.expect("Failed to obtain cargo metadata");
assert!(
metadata
.workspace_packages()
.iter()
.any(|package| package.name == env!("CARGO_PKG_NAME")),
"Unable to determine workspace member package names from `cargo metadata`"
);
}
/// Obtain the crates in the current cargo workspace as a `HashSet`.
///
/// This macro requires that [`set_cargo_workspace_members_env()`] function be called in the
/// build script of the crate where this macro is being called.
///
/// # Errors
///
/// Causes a compilation error if the `CARGO_WORKSPACE_MEMBERS` environment variable is unset.
#[macro_export]
macro_rules! cargo_workspace_members {
() => {
std::env!("CARGO_WORKSPACE_MEMBERS")
.split(',')
.collect::<std::collections::HashSet<&'static str>>()
};
}
</module>
|
{
"crate": "router_env",
"file": null,
"files": [
"crates/router_env/src/vergen.rs",
"crates/router_env/src/logger.rs",
"crates/router_env/src/env.rs",
"crates/router_env/src/lib.rs",
"crates/router_env/src/metrics.rs",
"crates/router_env/src/cargo_workspace.rs"
],
"module": "crates/router_env/src",
"num_files": 6,
"token_count": 3722
}
|
module_-6223654451440285911
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router_env
Module: crates/router_env/src/logger
Files: 6
</path>
<module>
// File: crates/router_env/src/logger/types.rs
//! Types.
use serde::Deserialize;
use strum::{Display, EnumString};
pub use tracing::{
field::{Field, Visit},
Level, Value,
};
/// Category and tag of log event.
///
/// Don't hesitate to add your variant if it is missing here.
#[derive(Debug, Default, Deserialize, Clone, Display, EnumString)]
pub enum Tag {
/// General.
#[default]
General,
/// Redis: get.
RedisGet,
/// Redis: set.
RedisSet,
/// API: incoming web request.
ApiIncomingRequest,
/// API: outgoing web request.
ApiOutgoingRequest,
/// Data base: create.
DbCreate,
/// Data base: read.
DbRead,
/// Data base: updare.
DbUpdate,
/// Data base: delete.
DbDelete,
/// Begin Request
BeginRequest,
/// End Request
EndRequest,
/// Call initiated to connector.
InitiatedToConnector,
/// Event: general.
Event,
/// Compatibility Layer Request
CompatibilityLayerRequest,
}
/// API Flow
#[derive(Debug, Display, Clone, PartialEq, Eq)]
pub enum Flow {
/// Health check
HealthCheck,
/// Deep health Check
DeepHealthCheck,
/// Organization create flow
OrganizationCreate,
/// Organization retrieve flow
OrganizationRetrieve,
/// Organization update flow
OrganizationUpdate,
/// Merchants account create flow.
MerchantsAccountCreate,
/// Merchants account retrieve flow.
MerchantsAccountRetrieve,
/// Merchants account update flow.
MerchantsAccountUpdate,
/// Merchants account delete flow.
MerchantsAccountDelete,
/// Merchant Connectors create flow.
MerchantConnectorsCreate,
/// Merchant Connectors retrieve flow.
MerchantConnectorsRetrieve,
/// Merchant account list
MerchantAccountList,
/// Merchant Connectors update flow.
MerchantConnectorsUpdate,
/// Merchant Connectors delete flow.
MerchantConnectorsDelete,
/// Merchant Connectors list flow.
MerchantConnectorsList,
/// Merchant Transfer Keys
MerchantTransferKey,
/// ConfigKey create flow.
ConfigKeyCreate,
/// ConfigKey fetch flow.
ConfigKeyFetch,
/// Enable platform account flow.
EnablePlatformAccount,
/// ConfigKey Update flow.
ConfigKeyUpdate,
/// ConfigKey Delete flow.
ConfigKeyDelete,
/// Customers create flow.
CustomersCreate,
/// Customers retrieve flow.
CustomersRetrieve,
/// Customers update flow.
CustomersUpdate,
/// Customers delete flow.
CustomersDelete,
/// Customers get mandates flow.
CustomersGetMandates,
/// Create an Ephemeral Key.
EphemeralKeyCreate,
/// Delete an Ephemeral Key.
EphemeralKeyDelete,
/// Mandates retrieve flow.
MandatesRetrieve,
/// Mandates revoke flow.
MandatesRevoke,
/// Mandates list flow.
MandatesList,
/// Payment methods create flow.
PaymentMethodsCreate,
/// Payment methods migrate flow.
PaymentMethodsMigrate,
/// Payment methods batch update flow.
PaymentMethodsBatchUpdate,
/// Payment methods list flow.
PaymentMethodsList,
/// Payment method save flow
PaymentMethodSave,
/// Customer payment methods list flow.
CustomerPaymentMethodsList,
/// Payment methods token data get flow.
GetPaymentMethodTokenData,
/// List Customers for a merchant
CustomersList,
///List Customers for a merchant with constraints.
CustomersListWithConstraints,
/// Retrieve countries and currencies for connector and payment method
ListCountriesCurrencies,
/// Payment method create collect link flow.
PaymentMethodCollectLink,
/// Payment methods retrieve flow.
PaymentMethodsRetrieve,
/// Payment methods update flow.
PaymentMethodsUpdate,
/// Payment methods delete flow.
PaymentMethodsDelete,
/// Network token status check flow.
NetworkTokenStatusCheck,
/// Default Payment method flow.
DefaultPaymentMethodsSet,
/// Payments create flow.
PaymentsCreate,
/// Payments Retrieve flow.
PaymentsRetrieve,
/// Payments Retrieve force sync flow.
PaymentsRetrieveForceSync,
/// Payments Retrieve using merchant reference id
PaymentsRetrieveUsingMerchantReferenceId,
/// Payments update flow.
PaymentsUpdate,
/// Payments confirm flow.
PaymentsConfirm,
/// Payments capture flow.
PaymentsCapture,
/// Payments cancel flow.
PaymentsCancel,
/// Payments cancel post capture flow.
PaymentsCancelPostCapture,
/// Payments approve flow.
PaymentsApprove,
/// Payments reject flow.
PaymentsReject,
/// Payments Session Token flow
PaymentsSessionToken,
/// Payments start flow.
PaymentsStart,
/// Payments list flow.
PaymentsList,
/// Payments filters flow
PaymentsFilters,
/// Payments aggregates flow
PaymentsAggregate,
/// Payments Create Intent flow
PaymentsCreateIntent,
/// Payments Get Intent flow
PaymentsGetIntent,
/// Payments Update Intent flow
PaymentsUpdateIntent,
/// Payments confirm intent flow
PaymentsConfirmIntent,
/// Payments create and confirm intent flow
PaymentsCreateAndConfirmIntent,
/// Payment attempt list flow
PaymentAttemptsList,
#[cfg(feature = "payouts")]
/// Payouts create flow
PayoutsCreate,
#[cfg(feature = "payouts")]
/// Payouts retrieve flow.
PayoutsRetrieve,
#[cfg(feature = "payouts")]
/// Payouts update flow.
PayoutsUpdate,
/// Payouts confirm flow.
PayoutsConfirm,
#[cfg(feature = "payouts")]
/// Payouts cancel flow.
PayoutsCancel,
#[cfg(feature = "payouts")]
/// Payouts fulfill flow.
PayoutsFulfill,
#[cfg(feature = "payouts")]
/// Payouts list flow.
PayoutsList,
#[cfg(feature = "payouts")]
/// Payouts filter flow.
PayoutsFilter,
/// Payouts accounts flow.
PayoutsAccounts,
/// Payout link initiate flow
PayoutLinkInitiate,
/// Payments Redirect flow
PaymentsRedirect,
/// Payemnts Complete Authorize Flow
PaymentsCompleteAuthorize,
/// Refunds create flow.
RefundsCreate,
/// Refunds retrieve flow.
RefundsRetrieve,
/// Refunds retrieve force sync flow.
RefundsRetrieveForceSync,
/// Refunds update flow.
RefundsUpdate,
/// Refunds list flow.
RefundsList,
/// Refunds filters flow
RefundsFilters,
/// Refunds aggregates flow
RefundsAggregate,
// Retrieve forex flow.
RetrieveForexFlow,
/// Toggles recon service for a merchant.
ReconMerchantUpdate,
/// Recon token request flow.
ReconTokenRequest,
/// Initial request for recon service.
ReconServiceRequest,
/// Recon token verification flow
ReconVerifyToken,
/// Routing create flow,
RoutingCreateConfig,
/// Routing link config
RoutingLinkConfig,
/// Routing link config
RoutingUnlinkConfig,
/// Routing retrieve config
RoutingRetrieveConfig,
/// Routing retrieve active config
RoutingRetrieveActiveConfig,
/// Routing retrieve default config
RoutingRetrieveDefaultConfig,
/// Routing retrieve dictionary
RoutingRetrieveDictionary,
/// Rule migration for decision-engine
DecisionEngineRuleMigration,
/// Routing update config
RoutingUpdateConfig,
/// Routing update default config
RoutingUpdateDefaultConfig,
/// Routing delete config
RoutingDeleteConfig,
/// Subscription create flow,
CreateSubscription,
/// Subscription get plans flow,
GetPlansForSubscription,
/// Subscription confirm flow,
ConfirmSubscription,
/// Subscription create and confirm flow,
CreateAndConfirmSubscription,
/// Get Subscription flow
GetSubscription,
/// Update Subscription flow
UpdateSubscription,
/// Get Subscription estimate flow
GetSubscriptionEstimate,
/// Create dynamic routing
CreateDynamicRoutingConfig,
/// Toggle dynamic routing
ToggleDynamicRouting,
/// Update dynamic routing config
UpdateDynamicRoutingConfigs,
/// Add record to blocklist
AddToBlocklist,
/// Delete record from blocklist
DeleteFromBlocklist,
/// List entries from blocklist
ListBlocklist,
/// Toggle blocklist for merchant
ToggleBlocklistGuard,
/// Incoming Webhook Receive
IncomingWebhookReceive,
/// Recovery incoming webhook receive
RecoveryIncomingWebhookReceive,
/// Validate payment method flow
ValidatePaymentMethod,
/// API Key create flow
ApiKeyCreate,
/// API Key retrieve flow
ApiKeyRetrieve,
/// API Key update flow
ApiKeyUpdate,
/// API Key revoke flow
ApiKeyRevoke,
/// API Key list flow
ApiKeyList,
/// Dispute Retrieve flow
DisputesRetrieve,
/// Dispute List flow
DisputesList,
/// Dispute Filters flow
DisputesFilters,
/// Cards Info flow
CardsInfo,
/// Create File flow
CreateFile,
/// Delete File flow
DeleteFile,
/// Retrieve File flow
RetrieveFile,
/// Dispute Evidence submission flow
DisputesEvidenceSubmit,
/// Create Config Key flow
CreateConfigKey,
/// Attach Dispute Evidence flow
AttachDisputeEvidence,
/// Delete Dispute Evidence flow
DeleteDisputeEvidence,
/// Disputes aggregate flow
DisputesAggregate,
/// Retrieve Dispute Evidence flow
RetrieveDisputeEvidence,
/// Invalidate cache flow
CacheInvalidate,
/// Payment Link Retrieve flow
PaymentLinkRetrieve,
/// payment Link Initiate flow
PaymentLinkInitiate,
/// payment Link Initiate flow
PaymentSecureLinkInitiate,
/// Payment Link List flow
PaymentLinkList,
/// Payment Link Status
PaymentLinkStatus,
/// Create a profile
ProfileCreate,
/// Update a profile
ProfileUpdate,
/// Retrieve a profile
ProfileRetrieve,
/// Delete a profile
ProfileDelete,
/// List all the profiles for a merchant
ProfileList,
/// Different verification flows
Verification,
/// Rust locker migration
RustLockerMigration,
/// Gsm Rule Creation flow
GsmRuleCreate,
/// Gsm Rule Retrieve flow
GsmRuleRetrieve,
/// Gsm Rule Update flow
GsmRuleUpdate,
/// Apple pay certificates migration
ApplePayCertificatesMigration,
/// Gsm Rule Delete flow
GsmRuleDelete,
/// Get data from embedded flow
GetDataFromHyperswitchAiFlow,
// List all chat interactions
ListAllChatInteractions,
/// User Sign Up
UserSignUp,
/// User Sign Up
UserSignUpWithMerchantId,
/// User Sign In
UserSignIn,
/// User transfer key
UserTransferKey,
/// User connect account
UserConnectAccount,
/// Upsert Decision Manager Config
DecisionManagerUpsertConfig,
/// Delete Decision Manager Config
DecisionManagerDeleteConfig,
/// Retrieve Decision Manager Config
DecisionManagerRetrieveConfig,
/// Manual payment fulfillment acknowledgement
FrmFulfillment,
/// Get connectors feature matrix
FeatureMatrix,
/// Change password flow
ChangePassword,
/// Signout flow
Signout,
/// Set Dashboard Metadata flow
SetDashboardMetadata,
/// Get Multiple Dashboard Metadata flow
GetMultipleDashboardMetadata,
/// Payment Connector Verify
VerifyPaymentConnector,
/// Internal user signup
InternalUserSignup,
/// Create tenant level user
TenantUserCreate,
/// Switch org
SwitchOrg,
/// Switch merchant v2
SwitchMerchantV2,
/// Switch profile
SwitchProfile,
/// Get permission info
GetAuthorizationInfo,
/// Get Roles info
GetRolesInfo,
/// Get Parent Group Info
GetParentGroupInfo,
/// List roles v2
ListRolesV2,
/// List invitable roles at entity level
ListInvitableRolesAtEntityLevel,
/// List updatable roles at entity level
ListUpdatableRolesAtEntityLevel,
/// Get role
GetRole,
/// Get parent info for role
GetRoleV2,
/// Get role from token
GetRoleFromToken,
/// Get resources and groups for role from token
GetRoleFromTokenV2,
/// Get parent groups info for role from token
GetParentGroupsInfoForRoleFromToken,
/// Update user role
UpdateUserRole,
/// Create merchant account for user in a org
UserMerchantAccountCreate,
/// Create Platform
CreatePlatformAccount,
/// Create Org in a given tenancy
UserOrgMerchantCreate,
/// Generate Sample Data
GenerateSampleData,
/// Delete Sample Data
DeleteSampleData,
/// Get details of a user
GetUserDetails,
/// Get details of a user role in a merchant account
GetUserRoleDetails,
/// PaymentMethodAuth Link token create
PmAuthLinkTokenCreate,
/// PaymentMethodAuth Exchange token create
PmAuthExchangeToken,
/// Get reset password link
ForgotPassword,
/// Reset password using link
ResetPassword,
/// Force set or force change password
RotatePassword,
/// Invite multiple users
InviteMultipleUser,
/// Reinvite user
ReInviteUser,
/// Accept invite from email
AcceptInviteFromEmail,
/// Delete user role
DeleteUserRole,
/// Incremental Authorization flow
PaymentsIncrementalAuthorization,
/// Extend Authorization flow
PaymentsExtendAuthorization,
/// Get action URL for connector onboarding
GetActionUrl,
/// Sync connector onboarding status
SyncOnboardingStatus,
/// Reset tracking id
ResetTrackingId,
/// Verify email Token
VerifyEmail,
/// Send verify email
VerifyEmailRequest,
/// Update user account details
UpdateUserAccountDetails,
/// Accept user invitation using entities
AcceptInvitationsV2,
/// Accept user invitation using entities before user login
AcceptInvitationsPreAuth,
/// Initiate external authentication for a payment
PaymentsExternalAuthentication,
/// Authorize the payment after external 3ds authentication
PaymentsAuthorize,
/// Create Role
CreateRole,
/// Create Role V2
CreateRoleV2,
/// Update Role
UpdateRole,
/// User email flow start
UserFromEmail,
/// Begin TOTP
TotpBegin,
/// Reset TOTP
TotpReset,
/// Verify TOTP
TotpVerify,
/// Update TOTP secret
TotpUpdate,
/// Verify Access Code
RecoveryCodeVerify,
/// Generate or Regenerate recovery codes
RecoveryCodesGenerate,
/// Terminate two factor authentication
TerminateTwoFactorAuth,
/// Check 2FA status
TwoFactorAuthStatus,
/// Create user authentication method
CreateUserAuthenticationMethod,
/// Update user authentication method
UpdateUserAuthenticationMethod,
/// List user authentication methods
ListUserAuthenticationMethods,
/// Get sso auth url
GetSsoAuthUrl,
/// Signin with SSO
SignInWithSso,
/// Auth Select
AuthSelect,
/// List Orgs for user
ListOrgForUser,
/// List Merchants for user in org
ListMerchantsForUserInOrg,
/// List Profile for user in org and merchant
ListProfileForUserInOrgAndMerchant,
/// List Users in Org
ListUsersInLineage,
/// List invitations for user
ListInvitationsForUser,
/// Get theme using lineage
GetThemeUsingLineage,
/// Get theme using theme id
GetThemeUsingThemeId,
/// Upload file to theme storage
UploadFileToThemeStorage,
/// Create theme
CreateTheme,
/// Update theme
UpdateTheme,
/// Delete theme
DeleteTheme,
/// Create user theme
CreateUserTheme,
/// Update user theme
UpdateUserTheme,
/// Delete user theme
DeleteUserTheme,
/// Upload file to user theme storage
UploadFileToUserThemeStorage,
/// Get user theme using theme id
GetUserThemeUsingThemeId,
///List All Themes In Lineage
ListAllThemesInLineage,
/// Get user theme using lineage
GetUserThemeUsingLineage,
/// List initial webhook delivery attempts
WebhookEventInitialDeliveryAttemptList,
/// List delivery attempts for a webhook event
WebhookEventDeliveryAttemptList,
/// Manually retry the delivery for a webhook event
WebhookEventDeliveryRetry,
/// Retrieve status of the Poll
RetrievePollStatus,
/// Toggles the extended card info feature in profile level
ToggleExtendedCardInfo,
/// Toggles the extended card info feature in profile level
ToggleConnectorAgnosticMit,
/// Get the extended card info associated to a payment_id
GetExtendedCardInfo,
/// Manually update the refund details like status, error code, error message etc.
RefundsManualUpdate,
/// Manually update the payment details like status, error code, error message etc.
PaymentsManualUpdate,
/// Dynamic Tax Calcultion
SessionUpdateTaxCalculation,
ProxyConfirmIntent,
/// Payments post session tokens flow
PaymentsPostSessionTokens,
/// Payments Update Metadata
PaymentsUpdateMetadata,
/// Payments start redirection flow
PaymentStartRedirection,
/// Volume split on the routing type
VolumeSplitOnRoutingType,
/// Routing evaluate rule flow
RoutingEvaluateRule,
/// Relay flow
Relay,
/// Relay retrieve flow
RelayRetrieve,
/// Card tokenization flow
TokenizeCard,
/// Card tokenization using payment method flow
TokenizeCardUsingPaymentMethodId,
/// Cards batch tokenization flow
TokenizeCardBatch,
/// Incoming Relay Webhook Receive
IncomingRelayWebhookReceive,
/// Generate Hypersense Token
HypersenseTokenRequest,
/// Verify Hypersense Token
HypersenseVerifyToken,
/// Signout Hypersense Token
HypersenseSignoutToken,
/// Payment Method Session Create
PaymentMethodSessionCreate,
/// Payment Method Session Retrieve
PaymentMethodSessionRetrieve,
// Payment Method Session Update
PaymentMethodSessionUpdate,
/// Update a saved payment method using the payment methods session
PaymentMethodSessionUpdateSavedPaymentMethod,
/// Delete a saved payment method using the payment methods session
PaymentMethodSessionDeleteSavedPaymentMethod,
/// Confirm a payment method session with payment method data
PaymentMethodSessionConfirm,
/// Create Cards Info flow
CardsInfoCreate,
/// Update Cards Info flow
CardsInfoUpdate,
/// Cards Info migrate flow
CardsInfoMigrate,
///Total payment method count for merchant
TotalPaymentMethodCount,
/// Process Tracker Revenue Recovery Workflow Retrieve
RevenueRecoveryRetrieve,
/// Process Tracker Revenue Recovery Workflow Resume
RevenueRecoveryResume,
/// Tokenization flow
TokenizationCreate,
/// Tokenization retrieve flow
TokenizationRetrieve,
/// Clone Connector flow
CloneConnector,
/// Authentication Create flow
AuthenticationCreate,
/// Authentication Eligibility flow
AuthenticationEligibility,
/// Authentication Sync flow
AuthenticationSync,
/// Authentication Sync Post Update flow
AuthenticationSyncPostUpdate,
/// Authentication Authenticate flow
AuthenticationAuthenticate,
///Proxy Flow
Proxy,
/// Profile Acquirer Create flow
ProfileAcquirerCreate,
/// Profile Acquirer Update flow
ProfileAcquirerUpdate,
/// ThreeDs Decision Rule Execute flow
ThreeDsDecisionRuleExecute,
/// Incoming Network Token Webhook Receive
IncomingNetworkTokenWebhookReceive,
/// Decision Engine Decide Gateway Call
DecisionEngineDecideGatewayCall,
/// Decision Engine Gateway Feedback Call
DecisionEngineGatewayFeedbackCall,
/// Recovery payments create flow.
RecoveryPaymentsCreate,
/// Tokenization delete flow
TokenizationDelete,
/// Payment method data backfill flow
RecoveryDataBackfill,
/// Revenue recovery Redis operations flow
RevenueRecoveryRedis,
/// Gift card balance check flow
GiftCardBalanceCheck,
/// Payments Submit Eligibility flow
PaymentsSubmitEligibility,
}
/// Trait for providing generic behaviour to flow metric
pub trait FlowMetric: ToString + std::fmt::Debug + Clone {}
impl FlowMetric for Flow {}
/// Category of log event.
#[derive(Debug)]
pub enum Category {
/// Redis: general.
Redis,
/// API: general.
Api,
/// Database: general.
Store,
/// Event: general.
Event,
/// General: general.
General,
}
// File: crates/router_env/src/logger/config.rs
//! Logger-specific config.
use std::path::PathBuf;
use serde::Deserialize;
/// Config settings.
#[derive(Debug, Deserialize, Clone)]
pub struct Config {
/// Logging to a file.
pub log: Log,
}
/// Log config settings.
#[derive(Debug, Deserialize, Clone, Default)]
#[serde(default)]
pub struct Log {
/// Logging to a file.
pub file: LogFile,
/// Logging to a console.
pub console: LogConsole,
/// Telemetry / tracing.
pub telemetry: LogTelemetry,
}
/// Logging to a file.
#[derive(Debug, Deserialize, Clone)]
#[serde(default)]
pub struct LogFile {
/// Whether you want to store log in log files.
pub enabled: bool,
/// Where to store log files.
pub path: String,
/// Name of log file without suffix.
pub file_name: String,
/// What gets into log files.
pub level: Level,
/// Directive which sets the log level for one or more crates/modules.
pub filtering_directive: Option<String>,
// pub do_async: bool, // is not used
// pub rotation: u16,
}
/// Describes the level of verbosity of a span or event.
#[derive(Debug, Clone, Copy)]
pub struct Level(pub(super) tracing::Level);
impl Level {
/// Returns the most verbose [`tracing::Level`]
pub fn into_level(self) -> tracing::Level {
self.0
}
}
impl<'de> Deserialize<'de> for Level {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
use std::str::FromStr as _;
let s = String::deserialize(deserializer)?;
tracing::Level::from_str(&s)
.map(Level)
.map_err(serde::de::Error::custom)
}
}
/// Logging to a console.
#[derive(Debug, Deserialize, Clone)]
#[serde(default)]
pub struct LogConsole {
/// Whether you want to see log in your terminal.
pub enabled: bool,
/// What you see in your terminal.
pub level: Level,
/// Log format
#[serde(default)]
pub log_format: LogFormat,
/// Directive which sets the log level for one or more crates/modules.
pub filtering_directive: Option<String>,
}
/// Telemetry / tracing.
#[derive(Debug, Deserialize, Clone, Default)]
#[serde(default)]
pub struct LogTelemetry {
/// Whether the traces pipeline is enabled.
pub traces_enabled: bool,
/// Whether the metrics pipeline is enabled.
pub metrics_enabled: bool,
/// Whether errors in setting up traces or metrics pipelines must be ignored.
pub ignore_errors: bool,
/// Sampling rate for traces
pub sampling_rate: Option<f64>,
/// Base endpoint URL to send metrics and traces to. Can optionally include the port number.
pub otel_exporter_otlp_endpoint: Option<String>,
/// Timeout (in milliseconds) for sending metrics and traces.
pub otel_exporter_otlp_timeout: Option<u64>,
/// Whether to use xray ID generator, (enable this if you plan to use AWS-XRAY)
pub use_xray_generator: bool,
/// Route Based Tracing
pub route_to_trace: Option<Vec<String>>,
/// Interval for collecting the metrics (such as gauge) in background thread
pub bg_metrics_collection_interval_in_secs: Option<u16>,
}
/// Telemetry / tracing.
#[derive(Default, Debug, Deserialize, Clone, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub enum LogFormat {
/// Default pretty log format
Default,
/// JSON based structured logging
#[default]
Json,
/// JSON based structured logging with pretty print
PrettyJson,
}
impl Config {
/// Default constructor.
pub fn new() -> Result<Self, config::ConfigError> {
Self::new_with_config_path(None)
}
/// Constructor expecting config path set explicitly.
pub fn new_with_config_path(
explicit_config_path: Option<PathBuf>,
) -> Result<Self, config::ConfigError> {
// Configuration values are picked up in the following priority order (1 being least
// priority):
// 1. Defaults from the implementation of the `Default` trait.
// 2. Values from config file. The config file accessed depends on the environment
// specified by the `RUN_ENV` environment variable. `RUN_ENV` can be one of
// `development`, `sandbox` or `production`. If nothing is specified for `RUN_ENV`,
// `/config/development.toml` file is read.
// 3. Environment variables prefixed with `ROUTER` and each level separated by double
// underscores.
//
// Values in config file override the defaults in `Default` trait, and the values set using
// environment variables override both the defaults and the config file values.
let environment = crate::env::which();
let config_path = Self::config_path(&environment.to_string(), explicit_config_path);
let config = Self::builder(&environment.to_string())?
.add_source(config::File::from(config_path).required(false))
.add_source(config::Environment::with_prefix("ROUTER").separator("__"))
.build()?;
// The logger may not yet be initialized when constructing the application configuration
#[allow(clippy::print_stderr)]
serde_path_to_error::deserialize(config).map_err(|error| {
crate::error!(%error, "Unable to deserialize configuration");
eprintln!("Unable to deserialize application configuration: {error}");
error.into_inner()
})
}
/// Construct config builder extending it by fall-back defaults and setting config file to load.
pub fn builder(
environment: &str,
) -> Result<config::ConfigBuilder<config::builder::DefaultState>, config::ConfigError> {
config::Config::builder()
// Here, it should be `set_override()` not `set_default()`.
// "env" can't be altered by config field.
// Should be single source of truth.
.set_override("env", environment)
}
/// Config path.
pub fn config_path(environment: &str, explicit_config_path: Option<PathBuf>) -> PathBuf {
let mut config_path = PathBuf::new();
if let Some(explicit_config_path_val) = explicit_config_path {
config_path.push(explicit_config_path_val);
} else {
let config_file_name = match environment {
"production" => "production.toml",
"sandbox" => "sandbox.toml",
_ => "development.toml",
};
let config_directory = Self::get_config_directory();
config_path.push(config_directory);
config_path.push(config_file_name);
}
config_path
}
/// Get the Directory for the config file
/// Read the env variable `CONFIG_DIR` or fallback to `config`
pub fn get_config_directory() -> PathBuf {
let mut config_path = PathBuf::new();
let config_directory =
std::env::var(crate::env::vars::CONFIG_DIR).unwrap_or_else(|_| "config".into());
config_path.push(crate::env::workspace_path());
config_path.push(config_directory);
config_path
}
}
// File: crates/router_env/src/logger/formatter.rs
//! Formatting [layer](https://docs.rs/tracing-subscriber/0.3.15/tracing_subscriber/layer/trait.Layer.html) for Router.
use std::{
collections::{HashMap, HashSet},
fmt,
io::Write,
sync::LazyLock,
};
use config::ConfigError;
use serde::ser::{SerializeMap, Serializer};
use serde_json::{ser::Formatter, Value};
// use time::format_description::well_known::Rfc3339;
use time::format_description::well_known::Iso8601;
use tracing::{Event, Metadata, Subscriber};
use tracing_subscriber::{
fmt::MakeWriter,
layer::Context,
registry::{LookupSpan, SpanRef},
Layer,
};
use crate::Storage;
// TODO: Documentation coverage for this crate
// Implicit keys
const MESSAGE: &str = "message";
const HOSTNAME: &str = "hostname";
const PID: &str = "pid";
const ENV: &str = "env";
const VERSION: &str = "version";
const BUILD: &str = "build";
const LEVEL: &str = "level";
const TARGET: &str = "target";
const SERVICE: &str = "service";
const LINE: &str = "line";
const FILE: &str = "file";
const FN: &str = "fn";
const FULL_NAME: &str = "full_name";
const TIME: &str = "time";
// Extra implicit keys. Keys that are provided during runtime but should be treated as
// implicit in the logs
const FLOW: &str = "flow";
const MERCHANT_AUTH: &str = "merchant_authentication";
const MERCHANT_ID: &str = "merchant_id";
const REQUEST_METHOD: &str = "request_method";
const REQUEST_URL_PATH: &str = "request_url_path";
const REQUEST_ID: &str = "request_id";
const WORKFLOW_ID: &str = "workflow_id";
const GLOBAL_ID: &str = "global_id";
const SESSION_ID: &str = "session_id";
/// Set of predefined implicit keys.
pub static IMPLICIT_KEYS: LazyLock<rustc_hash::FxHashSet<&str>> = LazyLock::new(|| {
let mut set = rustc_hash::FxHashSet::default();
set.insert(HOSTNAME);
set.insert(PID);
set.insert(ENV);
set.insert(VERSION);
set.insert(BUILD);
set.insert(LEVEL);
set.insert(TARGET);
set.insert(SERVICE);
set.insert(LINE);
set.insert(FILE);
set.insert(FN);
set.insert(FULL_NAME);
set.insert(TIME);
set
});
/// Extra implicit keys. Keys that are not purely implicit but need to be logged alongside
/// other implicit keys in the log json.
pub static EXTRA_IMPLICIT_KEYS: LazyLock<rustc_hash::FxHashSet<&str>> = LazyLock::new(|| {
let mut set = rustc_hash::FxHashSet::default();
set.insert(MESSAGE);
set.insert(FLOW);
set.insert(MERCHANT_AUTH);
set.insert(MERCHANT_ID);
set.insert(REQUEST_METHOD);
set.insert(REQUEST_URL_PATH);
set.insert(REQUEST_ID);
set.insert(GLOBAL_ID);
set.insert(SESSION_ID);
set.insert(WORKFLOW_ID);
set
});
/// Describe type of record: entering a span, exiting a span, an event.
#[derive(Clone, Debug)]
pub enum RecordType {
/// Entering a span.
EnterSpan,
/// Exiting a span.
ExitSpan,
/// Event.
Event,
}
impl fmt::Display for RecordType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let repr = match self {
Self::EnterSpan => "START",
Self::ExitSpan => "END",
Self::Event => "EVENT",
};
write!(f, "{repr}")
}
}
/// Format log records.
/// `FormattingLayer` relies on the `tracing_bunyan_formatter::JsonStorageLayer` which is storage of entries.
#[derive(Debug)]
pub struct FormattingLayer<W, F>
where
W: for<'a> MakeWriter<'a> + 'static,
F: Formatter + Clone,
{
dst_writer: W,
pid: u32,
hostname: String,
env: String,
service: String,
#[cfg(feature = "vergen")]
version: String,
#[cfg(feature = "vergen")]
build: String,
default_fields: HashMap<String, Value>,
formatter: F,
}
impl<W, F> FormattingLayer<W, F>
where
W: for<'a> MakeWriter<'a> + 'static,
F: Formatter + Clone,
{
/// Constructor of `FormattingLayer`.
///
/// A `name` will be attached to all records during formatting.
/// A `dst_writer` to forward all records.
///
/// ## Example
/// ```rust
/// let formatting_layer = router_env::FormattingLayer::new("my_service", std::io::stdout, serde_json::ser::CompactFormatter);
/// ```
pub fn new(
service: &str,
dst_writer: W,
formatter: F,
) -> error_stack::Result<Self, ConfigError> {
Self::new_with_implicit_entries(service, dst_writer, HashMap::new(), formatter)
}
/// Construct of `FormattingLayer with implicit default entries.
pub fn new_with_implicit_entries(
service: &str,
dst_writer: W,
default_fields: HashMap<String, Value>,
formatter: F,
) -> error_stack::Result<Self, ConfigError> {
let pid = std::process::id();
let hostname = gethostname::gethostname().to_string_lossy().into_owned();
let service = service.to_string();
#[cfg(feature = "vergen")]
let version = crate::version!().to_string();
#[cfg(feature = "vergen")]
let build = crate::build!().to_string();
let env = crate::env::which().to_string();
for key in default_fields.keys() {
if IMPLICIT_KEYS.contains(key.as_str()) {
return Err(ConfigError::Message(format!(
"A reserved key `{key}` was included in `default_fields` in the log formatting layer"
))
.into());
}
}
Ok(Self {
dst_writer,
pid,
hostname,
env,
service,
#[cfg(feature = "vergen")]
version,
#[cfg(feature = "vergen")]
build,
default_fields,
formatter,
})
}
/// Serialize common for both span and event entries.
fn common_serialize<S>(
&self,
map_serializer: &mut impl SerializeMap<Error = serde_json::Error>,
metadata: &Metadata<'_>,
span: Option<&SpanRef<'_, S>>,
storage: &Storage<'_>,
name: &str,
) -> Result<(), std::io::Error>
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
let is_extra = |s: &str| !IMPLICIT_KEYS.contains(s);
let is_extra_implicit = |s: &str| is_extra(s) && EXTRA_IMPLICIT_KEYS.contains(s);
map_serializer.serialize_entry(HOSTNAME, &self.hostname)?;
map_serializer.serialize_entry(PID, &self.pid)?;
map_serializer.serialize_entry(ENV, &self.env)?;
#[cfg(feature = "vergen")]
map_serializer.serialize_entry(VERSION, &self.version)?;
#[cfg(feature = "vergen")]
map_serializer.serialize_entry(BUILD, &self.build)?;
map_serializer.serialize_entry(LEVEL, &format_args!("{}", metadata.level()))?;
map_serializer.serialize_entry(TARGET, metadata.target())?;
map_serializer.serialize_entry(SERVICE, &self.service)?;
map_serializer.serialize_entry(LINE, &metadata.line())?;
map_serializer.serialize_entry(FILE, &metadata.file())?;
map_serializer.serialize_entry(FN, name)?;
map_serializer
.serialize_entry(FULL_NAME, &format_args!("{}::{}", metadata.target(), name))?;
if let Ok(time) = &time::OffsetDateTime::now_utc().format(&Iso8601::DEFAULT) {
map_serializer.serialize_entry(TIME, time)?;
}
// Write down implicit default entries.
for (key, value) in self.default_fields.iter() {
map_serializer.serialize_entry(key, value)?;
}
#[cfg(feature = "log_custom_entries_to_extra")]
let mut extra = serde_json::Map::default();
let mut explicit_entries_set: HashSet<&str> = HashSet::default();
// Write down explicit event's entries.
for (key, value) in storage.values.iter() {
if is_extra_implicit(key) {
#[cfg(feature = "log_extra_implicit_fields")]
map_serializer.serialize_entry(key, value)?;
explicit_entries_set.insert(key);
} else if is_extra(key) {
#[cfg(feature = "log_custom_entries_to_extra")]
extra.insert(key.to_string(), value.clone());
#[cfg(not(feature = "log_custom_entries_to_extra"))]
map_serializer.serialize_entry(key, value)?;
explicit_entries_set.insert(key);
} else {
tracing::warn!(
?key,
?value,
"Attempting to log a reserved entry. It won't be added to the logs"
);
}
}
// Write down entries from the span, if it exists.
if let Some(span) = &span {
let extensions = span.extensions();
if let Some(visitor) = extensions.get::<Storage<'_>>() {
for (key, value) in &visitor.values {
if is_extra_implicit(key) && !explicit_entries_set.contains(key) {
#[cfg(feature = "log_extra_implicit_fields")]
map_serializer.serialize_entry(key, value)?;
} else if is_extra(key) && !explicit_entries_set.contains(key) {
#[cfg(feature = "log_custom_entries_to_extra")]
extra.insert(key.to_string(), value.clone());
#[cfg(not(feature = "log_custom_entries_to_extra"))]
map_serializer.serialize_entry(key, value)?;
} else {
tracing::warn!(
?key,
?value,
"Attempting to log a reserved entry. It won't be added to the logs"
);
}
}
}
}
#[cfg(feature = "log_custom_entries_to_extra")]
map_serializer.serialize_entry("extra", &extra)?;
Ok(())
}
/// Flush memory buffer into an output stream trailing it with next line.
///
/// Should be done by single `write_all` call to avoid fragmentation of log because of mutlithreading.
fn flush(&self, mut buffer: Vec<u8>) -> Result<(), std::io::Error> {
buffer.write_all(b"\n")?;
self.dst_writer.make_writer().write_all(&buffer)
}
/// Serialize entries of span.
fn span_serialize<S>(
&self,
span: &SpanRef<'_, S>,
ty: RecordType,
) -> Result<Vec<u8>, std::io::Error>
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
let mut buffer = Vec::new();
let mut serializer =
serde_json::Serializer::with_formatter(&mut buffer, self.formatter.clone());
let mut map_serializer = serializer.serialize_map(None)?;
let message = Self::span_message(span, ty);
let mut storage = Storage::default();
storage.record_value("message", message.into());
self.common_serialize(
&mut map_serializer,
span.metadata(),
Some(span),
&storage,
span.name(),
)?;
map_serializer.end()?;
Ok(buffer)
}
/// Serialize event into a buffer of bytes using parent span.
pub fn event_serialize<S>(
&self,
span: Option<&SpanRef<'_, S>>,
event: &Event<'_>,
) -> std::io::Result<Vec<u8>>
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
let mut buffer = Vec::new();
let mut serializer =
serde_json::Serializer::with_formatter(&mut buffer, self.formatter.clone());
let mut map_serializer = serializer.serialize_map(None)?;
let mut storage = Storage::default();
event.record(&mut storage);
let name = span.map_or("?", SpanRef::name);
Self::event_message(span, event, &mut storage);
self.common_serialize(&mut map_serializer, event.metadata(), span, &storage, name)?;
map_serializer.end()?;
Ok(buffer)
}
/// Format message of a span.
///
/// Example: "[FN_WITHOUT_COLON - START]"
fn span_message<S>(span: &SpanRef<'_, S>, ty: RecordType) -> String
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
format!("[{} - {}]", span.metadata().name().to_uppercase(), ty)
}
/// Format message of an event.
///
/// Examples: "[FN_WITHOUT_COLON - EVENT] Message"
fn event_message<S>(span: Option<&SpanRef<'_, S>>, event: &Event<'_>, storage: &mut Storage<'_>)
where
S: Subscriber + for<'a> LookupSpan<'a>,
{
// Get value of kept "message" or "target" if does not exist.
let message = storage
.values
.entry("message")
.or_insert_with(|| event.metadata().target().into());
// Prepend the span name to the message if span exists.
if let (Some(span), Value::String(a)) = (span, message) {
*a = format!("{} {}", Self::span_message(span, RecordType::Event), a,);
}
}
}
#[allow(clippy::expect_used)]
impl<S, W, F> Layer<S> for FormattingLayer<W, F>
where
S: Subscriber + for<'a> LookupSpan<'a>,
W: for<'a> MakeWriter<'a> + 'static,
F: Formatter + Clone + 'static,
{
fn on_event(&self, event: &Event<'_>, ctx: Context<'_, S>) {
// Event could have no span.
let span = ctx.lookup_current();
let result: std::io::Result<Vec<u8>> = self.event_serialize(span.as_ref(), event);
if let Ok(formatted) = result {
let _ = self.flush(formatted);
}
}
#[cfg(feature = "log_active_span_json")]
fn on_enter(&self, id: &tracing::Id, ctx: Context<'_, S>) {
let span = ctx.span(id).expect("No span");
if let Ok(serialized) = self.span_serialize(&span, RecordType::EnterSpan) {
let _ = self.flush(serialized);
}
}
#[cfg(not(feature = "log_active_span_json"))]
fn on_close(&self, id: tracing::Id, ctx: Context<'_, S>) {
let span = ctx.span(&id).expect("No span");
if span.parent().is_none() {
if let Ok(serialized) = self.span_serialize(&span, RecordType::ExitSpan) {
let _ = self.flush(serialized);
}
}
}
#[cfg(feature = "log_active_span_json")]
fn on_close(&self, id: tracing::Id, ctx: Context<'_, S>) {
let span = ctx.span(&id).expect("No span");
if let Ok(serialized) = self.span_serialize(&span, RecordType::ExitSpan) {
let _ = self.flush(serialized);
}
}
}
// File: crates/router_env/src/logger/storage.rs
//! Storing [layer](https://docs.rs/tracing-subscriber/0.3.15/tracing_subscriber/layer/trait.Layer.html) for Router.
use std::{collections::HashMap, fmt, time::Instant};
use tracing::{
field::{Field, Visit},
span::{Attributes, Record},
Id, Subscriber,
};
use tracing_subscriber::{layer::Context, Layer};
/// Storage to store key value pairs of spans.
#[derive(Clone, Debug)]
pub struct StorageSubscription;
/// Storage to store key value pairs of spans.
/// When new entry is crated it stores it in [HashMap] which is owned by `extensions`.
#[derive(Clone, Debug)]
pub struct Storage<'a> {
/// Hash map to store values.
pub values: HashMap<&'a str, serde_json::Value>,
}
impl<'a> Storage<'a> {
/// Default constructor.
pub fn new() -> Self {
Self::default()
}
pub fn record_value(&mut self, key: &'a str, value: serde_json::Value) {
if super::formatter::IMPLICIT_KEYS.contains(key) {
tracing::warn!(value =? value, "{} is a reserved entry. Skipping it.", key);
} else {
self.values.insert(key, value);
}
}
}
/// Default constructor.
impl Default for Storage<'_> {
fn default() -> Self {
Self {
values: HashMap::new(),
}
}
}
/// Visitor to store entry.
impl Visit for Storage<'_> {
/// A i64.
fn record_i64(&mut self, field: &Field, value: i64) {
self.record_value(field.name(), serde_json::Value::from(value));
}
/// A u64.
fn record_u64(&mut self, field: &Field, value: u64) {
self.record_value(field.name(), serde_json::Value::from(value));
}
/// A 64-bit floating point.
fn record_f64(&mut self, field: &Field, value: f64) {
self.record_value(field.name(), serde_json::Value::from(value));
}
/// A boolean.
fn record_bool(&mut self, field: &Field, value: bool) {
self.record_value(field.name(), serde_json::Value::from(value));
}
/// A string.
fn record_str(&mut self, field: &Field, value: &str) {
self.record_value(field.name(), serde_json::Value::from(value));
}
/// Otherwise.
fn record_debug(&mut self, field: &Field, value: &dyn fmt::Debug) {
match field.name() {
// Skip fields which are already handled
name if name.starts_with("log.") => (),
name if name.starts_with("r#") => {
self.record_value(
#[allow(clippy::expect_used)]
name.get(2..)
.expect("field name must have a minimum of two characters"),
serde_json::Value::from(format!("{value:?}")),
);
}
name => {
self.record_value(name, serde_json::Value::from(format!("{value:?}")));
}
};
}
}
const PERSISTENT_KEYS: [&str; 6] = [
"payment_id",
"connector_name",
"merchant_id",
"flow",
"payment_method",
"status_code",
];
impl<S: Subscriber + for<'a> tracing_subscriber::registry::LookupSpan<'a>> Layer<S>
for StorageSubscription
{
/// On new span.
fn on_new_span(&self, attrs: &Attributes<'_>, id: &Id, ctx: Context<'_, S>) {
#[allow(clippy::expect_used)]
let span = ctx.span(id).expect("No span");
let mut extensions = span.extensions_mut();
let mut visitor = if let Some(parent_span) = span.parent() {
let mut extensions = parent_span.extensions_mut();
extensions
.get_mut::<Storage<'_>>()
.map(|v| v.to_owned())
.unwrap_or_default()
} else {
Storage::default()
};
attrs.record(&mut visitor);
extensions.insert(visitor);
}
/// On additional key value pairs store it.
fn on_record(&self, span: &Id, values: &Record<'_>, ctx: Context<'_, S>) {
#[allow(clippy::expect_used)]
let span = ctx.span(span).expect("No span");
let mut extensions = span.extensions_mut();
#[allow(clippy::expect_used)]
let visitor = extensions
.get_mut::<Storage<'_>>()
.expect("The span does not have storage");
values.record(visitor);
}
/// On enter store time.
fn on_enter(&self, span: &Id, ctx: Context<'_, S>) {
#[allow(clippy::expect_used)]
let span = ctx.span(span).expect("No span");
let mut extensions = span.extensions_mut();
if extensions.get_mut::<Instant>().is_none() {
extensions.insert(Instant::now());
}
}
/// On close create an entry about how long did it take.
fn on_close(&self, span: Id, ctx: Context<'_, S>) {
#[allow(clippy::expect_used)]
let span = ctx.span(&span).expect("No span");
let elapsed_milliseconds = {
let extensions = span.extensions();
extensions
.get::<Instant>()
.map(|i| i.elapsed().as_millis())
.unwrap_or(0)
};
if let Some(s) = span.extensions().get::<Storage<'_>>() {
s.values.iter().for_each(|(k, v)| {
if PERSISTENT_KEYS.contains(k) {
span.parent().and_then(|p| {
p.extensions_mut()
.get_mut::<Storage<'_>>()
.map(|s| s.record_value(k, v.to_owned()))
});
}
})
};
let mut extensions_mut = span.extensions_mut();
#[allow(clippy::expect_used)]
let visitor = extensions_mut
.get_mut::<Storage<'_>>()
.expect("No visitor in extensions");
if let Ok(elapsed) = serde_json::to_value(elapsed_milliseconds) {
visitor.record_value("elapsed_milliseconds", elapsed);
}
}
}
// File: crates/router_env/src/logger/defaults.rs
impl Default for super::config::LogFile {
fn default() -> Self {
Self {
enabled: true,
path: "logs".into(),
file_name: "debug.log".into(),
level: super::config::Level(tracing::Level::DEBUG),
filtering_directive: None,
}
}
}
impl Default for super::config::LogConsole {
fn default() -> Self {
Self {
enabled: false,
level: super::config::Level(tracing::Level::INFO),
log_format: super::config::LogFormat::Json,
filtering_directive: None,
}
}
}
// File: crates/router_env/src/logger/setup.rs
//! Setup logging subsystem.
use std::time::Duration;
use ::config::ConfigError;
use serde_json::ser::{CompactFormatter, PrettyFormatter};
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{fmt, prelude::*, util::SubscriberInitExt, EnvFilter, Layer};
use crate::{config, FormattingLayer, StorageSubscription};
/// Contains guards necessary for logging and metrics collection.
#[derive(Debug)]
pub struct TelemetryGuard {
_log_guards: Vec<WorkerGuard>,
}
/// Setup logging sub-system specifying the logging configuration, service (binary) name, and a
/// list of external crates for which a more verbose logging must be enabled. All crates within the
/// current cargo workspace are automatically considered for verbose logging.
#[allow(clippy::print_stdout)] // The logger hasn't been initialized yet
pub fn setup(
config: &config::Log,
service_name: &str,
crates_to_filter: impl AsRef<[&'static str]>,
) -> error_stack::Result<TelemetryGuard, ConfigError> {
let mut guards = Vec::new();
// Setup OpenTelemetry traces and metrics
let traces_layer = if config.telemetry.traces_enabled {
setup_tracing_pipeline(&config.telemetry, service_name)
} else {
None
};
if config.telemetry.metrics_enabled {
setup_metrics_pipeline(&config.telemetry)
};
// Setup file logging
let file_writer = if config.file.enabled {
let mut path = crate::env::workspace_path();
// Using an absolute path for file log path would replace workspace path with absolute path,
// which is the intended behavior for us.
path.push(&config.file.path);
let file_appender = tracing_appender::rolling::hourly(&path, &config.file.file_name);
let (file_writer, guard) = tracing_appender::non_blocking(file_appender);
guards.push(guard);
let file_filter = get_envfilter(
config.file.filtering_directive.as_ref(),
config::Level(tracing::Level::WARN),
config.file.level,
&crates_to_filter,
);
println!("Using file logging filter: {file_filter}");
let layer = FormattingLayer::new(service_name, file_writer, CompactFormatter)?
.with_filter(file_filter);
Some(layer)
} else {
None
};
let subscriber = tracing_subscriber::registry()
.with(traces_layer)
.with(StorageSubscription)
.with(file_writer);
// Setup console logging
if config.console.enabled {
let (console_writer, guard) = tracing_appender::non_blocking(std::io::stdout());
guards.push(guard);
let console_filter = get_envfilter(
config.console.filtering_directive.as_ref(),
config::Level(tracing::Level::WARN),
config.console.level,
&crates_to_filter,
);
println!("Using console logging filter: {console_filter}");
match config.console.log_format {
config::LogFormat::Default => {
let logging_layer = fmt::layer()
.with_timer(fmt::time::time())
.pretty()
.with_writer(console_writer)
.with_filter(console_filter);
subscriber.with(logging_layer).init();
}
config::LogFormat::Json => {
error_stack::Report::set_color_mode(error_stack::fmt::ColorMode::None);
subscriber
.with(
FormattingLayer::new(service_name, console_writer, CompactFormatter)?
.with_filter(console_filter),
)
.init();
}
config::LogFormat::PrettyJson => {
error_stack::Report::set_color_mode(error_stack::fmt::ColorMode::None);
subscriber
.with(
FormattingLayer::new(service_name, console_writer, PrettyFormatter::new())?
.with_filter(console_filter),
)
.init();
}
}
} else {
subscriber.init();
};
// Returning the TelemetryGuard for logs to be printed and metrics to be collected until it is
// dropped
Ok(TelemetryGuard {
_log_guards: guards,
})
}
fn get_opentelemetry_exporter_config(
config: &config::LogTelemetry,
) -> opentelemetry_otlp::ExportConfig {
let mut exporter_config = opentelemetry_otlp::ExportConfig {
protocol: opentelemetry_otlp::Protocol::Grpc,
endpoint: config.otel_exporter_otlp_endpoint.clone(),
..Default::default()
};
if let Some(timeout) = config.otel_exporter_otlp_timeout {
exporter_config.timeout = Duration::from_millis(timeout);
}
exporter_config
}
#[derive(Debug, Clone)]
enum TraceUrlAssert {
Match(String),
EndsWith(String),
}
impl TraceUrlAssert {
fn compare_url(&self, url: &str) -> bool {
match self {
Self::Match(value) => url == value,
Self::EndsWith(end) => url.ends_with(end),
}
}
}
impl From<String> for TraceUrlAssert {
fn from(value: String) -> Self {
match value {
url if url.starts_with('*') => Self::EndsWith(url.trim_start_matches('*').to_string()),
url => Self::Match(url),
}
}
}
#[derive(Debug, Clone)]
struct TraceAssertion {
clauses: Option<Vec<TraceUrlAssert>>,
/// default behaviour for tracing if no condition is provided
default: bool,
}
impl TraceAssertion {
/// Should the provided url be traced
fn should_trace_url(&self, url: &str) -> bool {
match &self.clauses {
Some(clauses) => clauses.iter().all(|cur| cur.compare_url(url)),
None => self.default,
}
}
}
/// Conditional Sampler for providing control on url based tracing
#[derive(Clone, Debug)]
struct ConditionalSampler<T: opentelemetry_sdk::trace::ShouldSample + Clone + 'static>(
TraceAssertion,
T,
);
impl<T: opentelemetry_sdk::trace::ShouldSample + Clone + 'static>
opentelemetry_sdk::trace::ShouldSample for ConditionalSampler<T>
{
fn should_sample(
&self,
parent_context: Option<&opentelemetry::Context>,
trace_id: opentelemetry::trace::TraceId,
name: &str,
span_kind: &opentelemetry::trace::SpanKind,
attributes: &[opentelemetry::KeyValue],
links: &[opentelemetry::trace::Link],
) -> opentelemetry::trace::SamplingResult {
use opentelemetry::trace::TraceContextExt;
match attributes
.iter()
.find(|&kv| kv.key == opentelemetry::Key::new("http.route"))
.map_or(self.0.default, |inner| {
self.0.should_trace_url(&inner.value.as_str())
}) {
true => {
self.1
.should_sample(parent_context, trace_id, name, span_kind, attributes, links)
}
false => opentelemetry::trace::SamplingResult {
decision: opentelemetry::trace::SamplingDecision::Drop,
attributes: Vec::new(),
trace_state: match parent_context {
Some(ctx) => ctx.span().span_context().trace_state().clone(),
None => opentelemetry::trace::TraceState::default(),
},
},
}
}
}
fn setup_tracing_pipeline(
config: &config::LogTelemetry,
service_name: &str,
) -> Option<
tracing_opentelemetry::OpenTelemetryLayer<
tracing_subscriber::Registry,
opentelemetry_sdk::trace::Tracer,
>,
> {
use opentelemetry::trace::TracerProvider;
use opentelemetry_otlp::WithExportConfig;
use opentelemetry_sdk::trace;
opentelemetry::global::set_text_map_propagator(
opentelemetry_sdk::propagation::TraceContextPropagator::new(),
);
// Set the export interval to 1 second
let batch_config = trace::BatchConfigBuilder::default()
.with_scheduled_delay(Duration::from_millis(1000))
.build();
let exporter_result = opentelemetry_otlp::SpanExporter::builder()
.with_tonic()
.with_export_config(get_opentelemetry_exporter_config(config))
.build();
let exporter = if config.ignore_errors {
#[allow(clippy::print_stderr)] // The logger hasn't been initialized yet
exporter_result
.inspect_err(|error| eprintln!("Failed to build traces exporter: {error:?}"))
.ok()?
} else {
// Safety: This is conditional, there is an option to avoid this behavior at runtime.
#[allow(clippy::expect_used)]
exporter_result.expect("Failed to build traces exporter")
};
let mut provider_builder = trace::TracerProvider::builder()
.with_span_processor(
trace::BatchSpanProcessor::builder(
exporter,
// The runtime would have to be updated if a different web framework is used
opentelemetry_sdk::runtime::TokioCurrentThread,
)
.with_batch_config(batch_config)
.build(),
)
.with_sampler(trace::Sampler::ParentBased(Box::new(ConditionalSampler(
TraceAssertion {
clauses: config
.route_to_trace
.clone()
.map(|inner| inner.into_iter().map(TraceUrlAssert::from).collect()),
default: false,
},
trace::Sampler::TraceIdRatioBased(config.sampling_rate.unwrap_or(1.0)),
))))
.with_resource(opentelemetry_sdk::Resource::new(vec![
opentelemetry::KeyValue::new("service.name", service_name.to_owned()),
]));
if config.use_xray_generator {
provider_builder = provider_builder
.with_id_generator(opentelemetry_aws::trace::XrayIdGenerator::default());
}
Some(
tracing_opentelemetry::layer()
.with_tracer(provider_builder.build().tracer(service_name.to_owned())),
)
}
fn setup_metrics_pipeline(config: &config::LogTelemetry) {
use opentelemetry_otlp::WithExportConfig;
let exporter_result = opentelemetry_otlp::MetricExporter::builder()
.with_tonic()
.with_temporality(opentelemetry_sdk::metrics::Temporality::Cumulative)
.with_export_config(get_opentelemetry_exporter_config(config))
.build();
let exporter = if config.ignore_errors {
#[allow(clippy::print_stderr)] // The logger hasn't been initialized yet
exporter_result
.inspect_err(|error| eprintln!("Failed to build metrics exporter: {error:?}"))
.ok();
return;
} else {
// Safety: This is conditional, there is an option to avoid this behavior at runtime.
#[allow(clippy::expect_used)]
exporter_result.expect("Failed to build metrics exporter")
};
let reader = opentelemetry_sdk::metrics::PeriodicReader::builder(
exporter,
// The runtime would have to be updated if a different web framework is used
opentelemetry_sdk::runtime::TokioCurrentThread,
)
.with_interval(Duration::from_secs(3))
.with_timeout(Duration::from_secs(10))
.build();
let provider = opentelemetry_sdk::metrics::SdkMeterProvider::builder()
.with_reader(reader)
.with_resource(opentelemetry_sdk::Resource::new([
opentelemetry::KeyValue::new(
"pod",
std::env::var("POD_NAME").unwrap_or(String::from("hyperswitch-server-default")),
),
]))
.build();
opentelemetry::global::set_meter_provider(provider);
}
fn get_envfilter(
filtering_directive: Option<&String>,
default_log_level: config::Level,
filter_log_level: config::Level,
crates_to_filter: impl AsRef<[&'static str]>,
) -> EnvFilter {
filtering_directive
.map(|filter| {
// Try to create target filter from specified filtering directive, if set
// Safety: If user is overriding the default filtering directive, then we need to panic
// for invalid directives.
#[allow(clippy::expect_used)]
EnvFilter::builder()
.with_default_directive(default_log_level.into_level().into())
.parse(filter)
.expect("Invalid EnvFilter filtering directive")
})
.unwrap_or_else(|| {
// Construct a default target filter otherwise
let mut workspace_members = crate::cargo_workspace_members!();
workspace_members.extend(crates_to_filter.as_ref());
workspace_members
.drain()
.zip(std::iter::repeat(filter_log_level.into_level()))
.fold(
EnvFilter::default().add_directive(default_log_level.into_level().into()),
|env_filter, (target, level)| {
// Safety: This is a hardcoded basic filtering directive. If even the basic
// filter is wrong, it's better to panic.
#[allow(clippy::expect_used)]
env_filter.add_directive(
format!("{target}={level}")
.parse()
.expect("Invalid EnvFilter directive format"),
)
},
)
})
}
</module>
|
{
"crate": "router_env",
"file": null,
"files": [
"crates/router_env/src/logger/types.rs",
"crates/router_env/src/logger/config.rs",
"crates/router_env/src/logger/formatter.rs",
"crates/router_env/src/logger/storage.rs",
"crates/router_env/src/logger/defaults.rs",
"crates/router_env/src/logger/setup.rs"
],
"module": "crates/router_env/src/logger",
"num_files": 6,
"token_count": 13761
}
|
module_9168648182038105781
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: payment_methods
Module: crates/payment_methods/src
Files: 6
</path>
<module>
// File: crates/payment_methods/src/core.rs
pub mod errors;
pub mod migration;
// File: crates/payment_methods/src/controller.rs
use std::fmt::Debug;
#[cfg(feature = "payouts")]
use api_models::payouts;
use api_models::{enums as api_enums, payment_methods as api};
#[cfg(feature = "v1")]
use common_enums::enums as common_enums;
#[cfg(feature = "v2")]
use common_utils::encryption;
use common_utils::{crypto, ext_traits, id_type, type_name, types::keymanager};
use error_stack::ResultExt;
#[cfg(feature = "v1")]
use hyperswitch_domain_models::payment_methods::PaymentMethodVaultSourceDetails;
use hyperswitch_domain_models::{merchant_key_store, payment_methods, type_encryption};
use masking::{PeekInterface, Secret};
#[cfg(feature = "v1")]
use scheduler::errors as sch_errors;
use serde::{Deserialize, Serialize};
use storage_impl::{errors as storage_errors, payment_method};
use crate::core::errors;
#[derive(Debug, Deserialize, Serialize)]
pub struct DeleteCardResp {
pub status: String,
pub error_message: Option<String>,
pub error_code: Option<String>,
}
#[derive(Debug, Deserialize, Serialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum DataDuplicationCheck {
Duplicated,
MetaDataChanged,
}
#[async_trait::async_trait]
pub trait PaymentMethodsController {
#[cfg(feature = "v1")]
#[allow(clippy::too_many_arguments)]
async fn create_payment_method(
&self,
req: &api::PaymentMethodCreate,
customer_id: &id_type::CustomerId,
payment_method_id: &str,
locker_id: Option<String>,
merchant_id: &id_type::MerchantId,
pm_metadata: Option<serde_json::Value>,
customer_acceptance: Option<serde_json::Value>,
payment_method_data: crypto::OptionalEncryptableValue,
connector_mandate_details: Option<serde_json::Value>,
status: Option<common_enums::PaymentMethodStatus>,
network_transaction_id: Option<String>,
payment_method_billing_address: crypto::OptionalEncryptableValue,
card_scheme: Option<String>,
network_token_requestor_reference_id: Option<String>,
network_token_locker_id: Option<String>,
network_token_payment_method_data: crypto::OptionalEncryptableValue,
vault_source_details: Option<PaymentMethodVaultSourceDetails>,
) -> errors::PmResult<payment_methods::PaymentMethod>;
#[cfg(feature = "v1")]
#[allow(clippy::too_many_arguments)]
async fn insert_payment_method(
&self,
resp: &api::PaymentMethodResponse,
req: &api::PaymentMethodCreate,
key_store: &merchant_key_store::MerchantKeyStore,
merchant_id: &id_type::MerchantId,
customer_id: &id_type::CustomerId,
pm_metadata: Option<serde_json::Value>,
customer_acceptance: Option<serde_json::Value>,
locker_id: Option<String>,
connector_mandate_details: Option<serde_json::Value>,
network_transaction_id: Option<String>,
payment_method_billing_address: crypto::OptionalEncryptableValue,
network_token_requestor_reference_id: Option<String>,
network_token_locker_id: Option<String>,
network_token_payment_method_data: crypto::OptionalEncryptableValue,
vault_source_details: Option<PaymentMethodVaultSourceDetails>,
) -> errors::PmResult<payment_methods::PaymentMethod>;
#[cfg(feature = "v2")]
#[allow(clippy::too_many_arguments)]
async fn insert_payment_method(
&self,
resp: &api::PaymentMethodResponse,
req: &api::PaymentMethodCreate,
key_store: &merchant_key_store::MerchantKeyStore,
merchant_id: &id_type::MerchantId,
customer_id: &id_type::CustomerId,
pm_metadata: Option<serde_json::Value>,
customer_acceptance: Option<serde_json::Value>,
locker_id: Option<String>,
connector_mandate_details: Option<serde_json::Value>,
network_transaction_id: Option<String>,
payment_method_billing_address: Option<encryption::Encryption>,
) -> errors::PmResult<payment_methods::PaymentMethod>;
#[cfg(feature = "v1")]
async fn add_payment_method(
&self,
req: &api::PaymentMethodCreate,
) -> errors::PmResponse<api::PaymentMethodResponse>;
#[cfg(feature = "v1")]
async fn retrieve_payment_method(
&self,
pm: api::PaymentMethodId,
) -> errors::PmResponse<api::PaymentMethodResponse>;
#[cfg(feature = "v1")]
async fn delete_payment_method(
&self,
pm_id: api::PaymentMethodId,
) -> errors::PmResponse<api::PaymentMethodDeleteResponse>;
async fn add_card_hs(
&self,
req: api::PaymentMethodCreate,
card: &api::CardDetail,
customer_id: &id_type::CustomerId,
locker_choice: api_enums::LockerChoice,
card_reference: Option<&str>,
) -> errors::VaultResult<(api::PaymentMethodResponse, Option<DataDuplicationCheck>)>;
/// The response will be the tuple of PaymentMethodResponse and the duplication check of payment_method
async fn add_card_to_locker(
&self,
req: api::PaymentMethodCreate,
card: &api::CardDetail,
customer_id: &id_type::CustomerId,
card_reference: Option<&str>,
) -> errors::VaultResult<(api::PaymentMethodResponse, Option<DataDuplicationCheck>)>;
#[cfg(feature = "payouts")]
async fn add_bank_to_locker(
&self,
req: api::PaymentMethodCreate,
key_store: &merchant_key_store::MerchantKeyStore,
bank: &payouts::Bank,
customer_id: &id_type::CustomerId,
) -> errors::VaultResult<(api::PaymentMethodResponse, Option<DataDuplicationCheck>)>;
#[cfg(feature = "v1")]
async fn get_or_insert_payment_method(
&self,
req: api::PaymentMethodCreate,
resp: &mut api::PaymentMethodResponse,
customer_id: &id_type::CustomerId,
key_store: &merchant_key_store::MerchantKeyStore,
) -> errors::PmResult<payment_methods::PaymentMethod>;
#[cfg(feature = "v2")]
async fn get_or_insert_payment_method(
&self,
_req: api::PaymentMethodCreate,
_resp: &mut api::PaymentMethodResponse,
_customer_id: &id_type::CustomerId,
_key_store: &merchant_key_store::MerchantKeyStore,
) -> errors::PmResult<payment_methods::PaymentMethod> {
todo!()
}
#[cfg(feature = "v1")]
async fn get_card_details_with_locker_fallback(
&self,
pm: &payment_methods::PaymentMethod,
) -> errors::PmResult<Option<api::CardDetailFromLocker>>;
#[cfg(feature = "v1")]
async fn get_card_details_without_locker_fallback(
&self,
pm: &payment_methods::PaymentMethod,
) -> errors::PmResult<api::CardDetailFromLocker>;
async fn delete_card_from_locker(
&self,
customer_id: &id_type::CustomerId,
merchant_id: &id_type::MerchantId,
card_reference: &str,
) -> errors::PmResult<DeleteCardResp>;
#[cfg(feature = "v1")]
fn store_default_payment_method(
&self,
req: &api::PaymentMethodCreate,
customer_id: &id_type::CustomerId,
merchant_id: &id_type::MerchantId,
) -> (api::PaymentMethodResponse, Option<DataDuplicationCheck>);
#[cfg(feature = "v2")]
fn store_default_payment_method(
&self,
req: &api::PaymentMethodCreate,
customer_id: &id_type::CustomerId,
merchant_id: &id_type::MerchantId,
) -> (api::PaymentMethodResponse, Option<DataDuplicationCheck>);
#[cfg(feature = "v1")]
#[allow(clippy::too_many_arguments)]
async fn save_network_token_and_update_payment_method(
&self,
req: &api::PaymentMethodMigrate,
key_store: &merchant_key_store::MerchantKeyStore,
network_token_data: &api_models::payment_methods::MigrateNetworkTokenData,
network_token_requestor_ref_id: String,
pm_id: String,
) -> errors::PmResult<bool>;
#[cfg(feature = "v1")]
async fn set_default_payment_method(
&self,
merchant_id: &id_type::MerchantId,
customer_id: &id_type::CustomerId,
payment_method_id: String,
) -> errors::PmResponse<api_models::payment_methods::CustomerDefaultPaymentMethodResponse>;
#[cfg(feature = "v1")]
async fn add_payment_method_status_update_task(
&self,
payment_method: &payment_methods::PaymentMethod,
prev_status: common_enums::PaymentMethodStatus,
curr_status: common_enums::PaymentMethodStatus,
merchant_id: &id_type::MerchantId,
) -> Result<(), sch_errors::ProcessTrackerError>;
#[cfg(feature = "v1")]
async fn validate_merchant_connector_ids_in_connector_mandate_details(
&self,
key_store: &merchant_key_store::MerchantKeyStore,
connector_mandate_details: &api_models::payment_methods::CommonMandateReference,
merchant_id: &id_type::MerchantId,
card_network: Option<common_enums::CardNetwork>,
) -> errors::PmResult<()>;
#[cfg(feature = "v1")]
async fn get_card_details_from_locker(
&self,
pm: &payment_methods::PaymentMethod,
) -> errors::PmResult<api::CardDetailFromLocker>;
}
pub async fn create_encrypted_data<T>(
key_manager_state: &keymanager::KeyManagerState,
key_store: &merchant_key_store::MerchantKeyStore,
data: T,
) -> Result<
crypto::Encryptable<Secret<serde_json::Value>>,
error_stack::Report<storage_errors::StorageError>,
>
where
T: Debug + Serialize,
{
let key = key_store.key.get_inner().peek();
let identifier = keymanager::Identifier::Merchant(key_store.merchant_id.clone());
let encoded_data = ext_traits::Encode::encode_to_value(&data)
.change_context(storage_errors::StorageError::SerializationFailed)
.attach_printable("Unable to encode data")?;
let secret_data = Secret::<_, masking::WithType>::new(encoded_data);
let encrypted_data = type_encryption::crypto_operation(
key_manager_state,
type_name!(payment_method::PaymentMethod),
type_encryption::CryptoOperation::Encrypt(secret_data),
identifier.clone(),
key,
)
.await
.and_then(|val| val.try_into_operation())
.change_context(storage_errors::StorageError::EncryptionError)
.attach_printable("Unable to encrypt data")?;
Ok(encrypted_data)
}
// File: crates/payment_methods/src/configs.rs
pub mod payment_connector_required_fields;
pub mod settings;
// File: crates/payment_methods/src/lib.rs
pub mod configs;
pub mod controller;
pub mod core;
pub mod helpers;
pub mod state;
// File: crates/payment_methods/src/helpers.rs
use api_models::{enums as api_enums, payment_methods as api};
#[cfg(feature = "v1")]
use common_utils::ext_traits::AsyncExt;
pub use hyperswitch_domain_models::{errors::api_error_response, payment_methods as domain};
#[cfg(feature = "v1")]
use router_env::logger;
use crate::state;
#[cfg(feature = "v1")]
pub async fn populate_bin_details_for_payment_method_create(
card_details: api_models::payment_methods::CardDetail,
db: Box<dyn state::PaymentMethodsStorageInterface>,
) -> api_models::payment_methods::CardDetail {
let card_isin: Option<_> = Some(card_details.card_number.get_card_isin());
if card_details.card_issuer.is_some()
&& card_details.card_network.is_some()
&& card_details.card_type.is_some()
&& card_details.card_issuing_country.is_some()
{
api::CardDetail {
card_issuer: card_details.card_issuer.to_owned(),
card_network: card_details.card_network.clone(),
card_type: card_details.card_type.to_owned(),
card_issuing_country: card_details.card_issuing_country.to_owned(),
card_exp_month: card_details.card_exp_month.clone(),
card_exp_year: card_details.card_exp_year.clone(),
card_holder_name: card_details.card_holder_name.clone(),
card_number: card_details.card_number.clone(),
nick_name: card_details.nick_name.clone(),
}
} else {
let card_info = card_isin
.clone()
.async_and_then(|card_isin| async move {
db.get_card_info(&card_isin)
.await
.map_err(|error| logger::error!(card_info_error=?error))
.ok()
})
.await
.flatten()
.map(|card_info| api::CardDetail {
card_issuer: card_info.card_issuer,
card_network: card_info.card_network.clone(),
card_type: card_info.card_type,
card_issuing_country: card_info.card_issuing_country,
card_exp_month: card_details.card_exp_month.clone(),
card_exp_year: card_details.card_exp_year.clone(),
card_holder_name: card_details.card_holder_name.clone(),
card_number: card_details.card_number.clone(),
nick_name: card_details.nick_name.clone(),
});
card_info.unwrap_or_else(|| api::CardDetail {
card_issuer: None,
card_network: None,
card_type: None,
card_issuing_country: None,
card_exp_month: card_details.card_exp_month.clone(),
card_exp_year: card_details.card_exp_year.clone(),
card_holder_name: card_details.card_holder_name.clone(),
card_number: card_details.card_number.clone(),
nick_name: card_details.nick_name.clone(),
})
}
}
#[cfg(feature = "v2")]
pub async fn populate_bin_details_for_payment_method_create(
_card_details: api_models::payment_methods::CardDetail,
_db: &dyn state::PaymentMethodsStorageInterface,
) -> api_models::payment_methods::CardDetail {
todo!()
}
pub fn validate_payment_method_type_against_payment_method(
payment_method: api_enums::PaymentMethod,
payment_method_type: api_enums::PaymentMethodType,
) -> bool {
match payment_method {
#[cfg(feature = "v1")]
api_enums::PaymentMethod::Card => matches!(
payment_method_type,
api_enums::PaymentMethodType::Credit | api_enums::PaymentMethodType::Debit
),
#[cfg(feature = "v2")]
api_enums::PaymentMethod::Card => matches!(
payment_method_type,
api_enums::PaymentMethodType::Credit
| api_enums::PaymentMethodType::Debit
| api_enums::PaymentMethodType::Card
),
api_enums::PaymentMethod::PayLater => matches!(
payment_method_type,
api_enums::PaymentMethodType::Affirm
| api_enums::PaymentMethodType::Alma
| api_enums::PaymentMethodType::AfterpayClearpay
| api_enums::PaymentMethodType::Klarna
| api_enums::PaymentMethodType::PayBright
| api_enums::PaymentMethodType::Atome
| api_enums::PaymentMethodType::Walley
| api_enums::PaymentMethodType::Breadpay
| api_enums::PaymentMethodType::Flexiti
),
api_enums::PaymentMethod::Wallet => matches!(
payment_method_type,
api_enums::PaymentMethodType::AmazonPay
| api_enums::PaymentMethodType::Bluecode
| api_enums::PaymentMethodType::Paysera
| api_enums::PaymentMethodType::Skrill
| api_enums::PaymentMethodType::ApplePay
| api_enums::PaymentMethodType::GooglePay
| api_enums::PaymentMethodType::Paypal
| api_enums::PaymentMethodType::AliPay
| api_enums::PaymentMethodType::AliPayHk
| api_enums::PaymentMethodType::Dana
| api_enums::PaymentMethodType::MbWay
| api_enums::PaymentMethodType::MobilePay
| api_enums::PaymentMethodType::SamsungPay
| api_enums::PaymentMethodType::Twint
| api_enums::PaymentMethodType::Vipps
| api_enums::PaymentMethodType::TouchNGo
| api_enums::PaymentMethodType::Swish
| api_enums::PaymentMethodType::WeChatPay
| api_enums::PaymentMethodType::GoPay
| api_enums::PaymentMethodType::Gcash
| api_enums::PaymentMethodType::Momo
| api_enums::PaymentMethodType::KakaoPay
| api_enums::PaymentMethodType::Cashapp
| api_enums::PaymentMethodType::Mifinity
| api_enums::PaymentMethodType::Paze
| api_enums::PaymentMethodType::RevolutPay
),
api_enums::PaymentMethod::BankRedirect => matches!(
payment_method_type,
api_enums::PaymentMethodType::Giropay
| api_enums::PaymentMethodType::Ideal
| api_enums::PaymentMethodType::Sofort
| api_enums::PaymentMethodType::Eft
| api_enums::PaymentMethodType::Eps
| api_enums::PaymentMethodType::BancontactCard
| api_enums::PaymentMethodType::Blik
| api_enums::PaymentMethodType::LocalBankRedirect
| api_enums::PaymentMethodType::OnlineBankingThailand
| api_enums::PaymentMethodType::OnlineBankingCzechRepublic
| api_enums::PaymentMethodType::OnlineBankingFinland
| api_enums::PaymentMethodType::OnlineBankingFpx
| api_enums::PaymentMethodType::OnlineBankingPoland
| api_enums::PaymentMethodType::OnlineBankingSlovakia
| api_enums::PaymentMethodType::Przelewy24
| api_enums::PaymentMethodType::Trustly
| api_enums::PaymentMethodType::Bizum
| api_enums::PaymentMethodType::Interac
| api_enums::PaymentMethodType::OpenBankingUk
| api_enums::PaymentMethodType::OpenBankingPIS
),
api_enums::PaymentMethod::BankTransfer => matches!(
payment_method_type,
api_enums::PaymentMethodType::Ach
| api_enums::PaymentMethodType::SepaBankTransfer
| api_enums::PaymentMethodType::Bacs
| api_enums::PaymentMethodType::Multibanco
| api_enums::PaymentMethodType::Pix
| api_enums::PaymentMethodType::Pse
| api_enums::PaymentMethodType::PermataBankTransfer
| api_enums::PaymentMethodType::BcaBankTransfer
| api_enums::PaymentMethodType::BniVa
| api_enums::PaymentMethodType::BriVa
| api_enums::PaymentMethodType::CimbVa
| api_enums::PaymentMethodType::DanamonVa
| api_enums::PaymentMethodType::MandiriVa
| api_enums::PaymentMethodType::LocalBankTransfer
| api_enums::PaymentMethodType::InstantBankTransfer
| api_enums::PaymentMethodType::InstantBankTransferFinland
| api_enums::PaymentMethodType::InstantBankTransferPoland
| api_enums::PaymentMethodType::IndonesianBankTransfer
),
api_enums::PaymentMethod::BankDebit => matches!(
payment_method_type,
api_enums::PaymentMethodType::Ach
| api_enums::PaymentMethodType::Sepa
| api_enums::PaymentMethodType::SepaGuarenteedDebit
| api_enums::PaymentMethodType::Bacs
| api_enums::PaymentMethodType::Becs
),
api_enums::PaymentMethod::Crypto => matches!(
payment_method_type,
api_enums::PaymentMethodType::CryptoCurrency
),
api_enums::PaymentMethod::Reward => matches!(
payment_method_type,
api_enums::PaymentMethodType::Evoucher | api_enums::PaymentMethodType::ClassicReward
),
api_enums::PaymentMethod::RealTimePayment => matches!(
payment_method_type,
api_enums::PaymentMethodType::Fps
| api_enums::PaymentMethodType::DuitNow
| api_enums::PaymentMethodType::PromptPay
| api_enums::PaymentMethodType::VietQr
),
api_enums::PaymentMethod::Upi => matches!(
payment_method_type,
api_enums::PaymentMethodType::UpiCollect
| api_enums::PaymentMethodType::UpiIntent
| api_enums::PaymentMethodType::UpiQr
),
api_enums::PaymentMethod::Voucher => matches!(
payment_method_type,
api_enums::PaymentMethodType::Boleto
| api_enums::PaymentMethodType::Efecty
| api_enums::PaymentMethodType::PagoEfectivo
| api_enums::PaymentMethodType::RedCompra
| api_enums::PaymentMethodType::RedPagos
| api_enums::PaymentMethodType::Indomaret
| api_enums::PaymentMethodType::Alfamart
| api_enums::PaymentMethodType::Oxxo
| api_enums::PaymentMethodType::SevenEleven
| api_enums::PaymentMethodType::Lawson
| api_enums::PaymentMethodType::MiniStop
| api_enums::PaymentMethodType::FamilyMart
| api_enums::PaymentMethodType::Seicomart
| api_enums::PaymentMethodType::PayEasy
),
api_enums::PaymentMethod::GiftCard => {
matches!(
payment_method_type,
api_enums::PaymentMethodType::Givex | api_enums::PaymentMethodType::PaySafeCard
)
}
api_enums::PaymentMethod::CardRedirect => matches!(
payment_method_type,
api_enums::PaymentMethodType::Knet
| api_enums::PaymentMethodType::Benefit
| api_enums::PaymentMethodType::MomoAtm
| api_enums::PaymentMethodType::CardRedirect
),
api_enums::PaymentMethod::OpenBanking => matches!(
payment_method_type,
api_enums::PaymentMethodType::OpenBankingPIS
),
api_enums::PaymentMethod::MobilePayment => matches!(
payment_method_type,
api_enums::PaymentMethodType::DirectCarrierBilling
),
}
}
pub trait ForeignFrom<F> {
fn foreign_from(from: F) -> Self;
}
/// Trait for converting from one foreign type to another
pub trait ForeignTryFrom<F>: Sized {
/// Custom error for conversion failure
type Error;
/// Convert from a foreign type to the current type and return an error if the conversion fails
fn foreign_try_from(from: F) -> Result<Self, Self::Error>;
}
#[cfg(feature = "v1")]
impl ForeignFrom<(Option<api::CardDetailFromLocker>, domain::PaymentMethod)>
for api::PaymentMethodResponse
{
fn foreign_from(
(card_details, item): (Option<api::CardDetailFromLocker>, domain::PaymentMethod),
) -> Self {
Self {
merchant_id: item.merchant_id.to_owned(),
customer_id: Some(item.customer_id.to_owned()),
payment_method_id: item.get_id().clone(),
payment_method: item.get_payment_method_type(),
payment_method_type: item.get_payment_method_subtype(),
card: card_details,
recurring_enabled: Some(false),
installment_payment_enabled: Some(false),
payment_experience: None,
metadata: item.metadata,
created: Some(item.created_at),
#[cfg(feature = "payouts")]
bank_transfer: None,
last_used_at: None,
client_secret: item.client_secret,
}
}
}
#[cfg(feature = "v2")]
impl ForeignFrom<(Option<api::CardDetailFromLocker>, domain::PaymentMethod)>
for api::PaymentMethodResponse
{
fn foreign_from(
(_card_details, _item): (Option<api::CardDetailFromLocker>, domain::PaymentMethod),
) -> Self {
todo!()
}
}
pub trait StorageErrorExt<T, E> {
#[track_caller]
fn to_not_found_response(self, not_found_response: E) -> error_stack::Result<T, E>;
#[track_caller]
fn to_duplicate_response(self, duplicate_response: E) -> error_stack::Result<T, E>;
}
impl<T> StorageErrorExt<T, api_error_response::ApiErrorResponse>
for error_stack::Result<T, storage_impl::StorageError>
{
#[track_caller]
fn to_not_found_response(
self,
not_found_response: api_error_response::ApiErrorResponse,
) -> error_stack::Result<T, api_error_response::ApiErrorResponse> {
self.map_err(|err| {
let new_err = match err.current_context() {
storage_impl::StorageError::ValueNotFound(_) => not_found_response,
storage_impl::StorageError::CustomerRedacted => {
api_error_response::ApiErrorResponse::CustomerRedacted
}
_ => api_error_response::ApiErrorResponse::InternalServerError,
};
err.change_context(new_err)
})
}
#[track_caller]
fn to_duplicate_response(
self,
duplicate_response: api_error_response::ApiErrorResponse,
) -> error_stack::Result<T, api_error_response::ApiErrorResponse> {
self.map_err(|err| {
let new_err = match err.current_context() {
storage_impl::StorageError::DuplicateValue { .. } => duplicate_response,
_ => api_error_response::ApiErrorResponse::InternalServerError,
};
err.change_context(new_err)
})
}
}
// File: crates/payment_methods/src/state.rs
#[cfg(feature = "v1")]
use common_utils::errors::CustomResult;
use common_utils::types::keymanager;
#[cfg(feature = "v1")]
use hyperswitch_domain_models::merchant_account;
use hyperswitch_domain_models::{
cards_info, customer, merchant_key_store, payment_methods as pm_domain,
};
use storage_impl::{errors, kv_router_store::KVRouterStore, DatabaseStore, MockDb, RouterStore};
#[async_trait::async_trait]
pub trait PaymentMethodsStorageInterface:
Send
+ Sync
+ dyn_clone::DynClone
+ pm_domain::PaymentMethodInterface<Error = errors::StorageError>
+ cards_info::CardsInfoInterface<Error = errors::StorageError>
+ customer::CustomerInterface<Error = errors::StorageError>
+ 'static
{
}
dyn_clone::clone_trait_object!(PaymentMethodsStorageInterface);
#[async_trait::async_trait]
impl PaymentMethodsStorageInterface for MockDb {}
#[async_trait::async_trait]
impl<T: DatabaseStore + 'static> PaymentMethodsStorageInterface for RouterStore<T> {}
#[async_trait::async_trait]
impl<T: DatabaseStore + 'static> PaymentMethodsStorageInterface for KVRouterStore<T> {}
#[derive(Clone)]
pub struct PaymentMethodsState {
pub store: Box<dyn PaymentMethodsStorageInterface>,
pub key_store: Option<merchant_key_store::MerchantKeyStore>,
pub key_manager_state: keymanager::KeyManagerState,
}
impl From<&PaymentMethodsState> for keymanager::KeyManagerState {
fn from(state: &PaymentMethodsState) -> Self {
state.key_manager_state.clone()
}
}
#[cfg(feature = "v1")]
impl PaymentMethodsState {
pub async fn find_payment_method(
&self,
key_store: &merchant_key_store::MerchantKeyStore,
merchant_account: &merchant_account::MerchantAccount,
payment_method_id: String,
) -> CustomResult<pm_domain::PaymentMethod, errors::StorageError> {
let db = &*self.store;
let key_manager_state = &(self.key_manager_state).clone();
match db
.find_payment_method(
key_manager_state,
key_store,
&payment_method_id,
merchant_account.storage_scheme,
)
.await
{
Err(err) if err.current_context().is_db_not_found() => {
db.find_payment_method_by_locker_id(
key_manager_state,
key_store,
&payment_method_id,
merchant_account.storage_scheme,
)
.await
}
Ok(pm) => Ok(pm),
Err(err) => Err(err),
}
}
}
</module>
|
{
"crate": "payment_methods",
"file": null,
"files": [
"crates/payment_methods/src/core.rs",
"crates/payment_methods/src/controller.rs",
"crates/payment_methods/src/configs.rs",
"crates/payment_methods/src/lib.rs",
"crates/payment_methods/src/helpers.rs",
"crates/payment_methods/src/state.rs"
],
"module": "crates/payment_methods/src",
"num_files": 6,
"token_count": 6296
}
|
module_-3721750779997710699
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: payment_methods
Module: crates/payment_methods/src/core/migration
Files: 1
</path>
<module>
// File: crates/payment_methods/src/core/migration/payment_methods.rs
use std::str::FromStr;
#[cfg(feature = "v2")]
use api_models::enums as api_enums;
#[cfg(feature = "v1")]
use api_models::enums;
use api_models::payment_methods as pm_api;
#[cfg(feature = "v1")]
use common_utils::{
consts,
crypto::Encryptable,
ext_traits::{AsyncExt, ConfigExt},
generate_id,
};
use common_utils::{errors::CustomResult, id_type};
use error_stack::ResultExt;
use hyperswitch_domain_models::{
api::ApplicationResponse, errors::api_error_response as errors, merchant_context,
};
#[cfg(feature = "v1")]
use hyperswitch_domain_models::{ext_traits::OptionExt, payment_methods as domain_pm};
use masking::PeekInterface;
#[cfg(feature = "v1")]
use masking::Secret;
#[cfg(feature = "v1")]
use router_env::{instrument, logger, tracing};
#[cfg(feature = "v1")]
use serde_json::json;
use storage_impl::cards_info;
#[cfg(feature = "v1")]
use crate::{
controller::create_encrypted_data,
core::migration,
helpers::{ForeignFrom, StorageErrorExt},
};
use crate::{controller::PaymentMethodsController, helpers::ForeignTryFrom, state};
#[cfg(feature = "v1")]
pub async fn migrate_payment_method(
state: &state::PaymentMethodsState,
req: pm_api::PaymentMethodMigrate,
merchant_id: &id_type::MerchantId,
merchant_context: &merchant_context::MerchantContext,
controller: &dyn PaymentMethodsController,
) -> CustomResult<ApplicationResponse<pm_api::PaymentMethodMigrateResponse>, errors::ApiErrorResponse>
{
let mut req = req;
let card_details = &req.card.get_required_value("card")?;
let card_number_validation_result =
cards::CardNumber::from_str(card_details.card_number.peek());
let card_bin_details = populate_bin_details_for_masked_card(
card_details,
&*state.store,
req.payment_method_type.as_ref(),
)
.await?;
req.card = Some(api_models::payment_methods::MigrateCardDetail {
card_issuing_country: card_bin_details.issuer_country.clone(),
card_network: card_bin_details.card_network.clone(),
card_issuer: card_bin_details.card_issuer.clone(),
card_type: card_bin_details.card_type.clone(),
..card_details.clone()
});
if let Some(connector_mandate_details) = &req.connector_mandate_details {
controller
.validate_merchant_connector_ids_in_connector_mandate_details(
merchant_context.get_merchant_key_store(),
connector_mandate_details,
merchant_id,
card_bin_details.card_network.clone(),
)
.await?;
};
let should_require_connector_mandate_details = req.network_token.is_none();
let mut migration_status = migration::RecordMigrationStatusBuilder::new();
let resp = match card_number_validation_result {
Ok(card_number) => {
let payment_method_create_request =
pm_api::PaymentMethodCreate::get_payment_method_create_from_payment_method_migrate(
card_number,
&req,
);
logger::debug!("Storing the card in locker and migrating the payment method");
get_client_secret_or_add_payment_method_for_migration(
state,
payment_method_create_request,
merchant_context,
&mut migration_status,
controller,
)
.await?
}
Err(card_validation_error) => {
logger::debug!("Card number to be migrated is invalid, skip saving in locker {card_validation_error}");
skip_locker_call_and_migrate_payment_method(
state,
&req,
merchant_id.to_owned(),
merchant_context,
card_bin_details.clone(),
should_require_connector_mandate_details,
&mut migration_status,
controller,
)
.await?
}
};
let payment_method_response = match resp {
ApplicationResponse::Json(response) => response,
_ => Err(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to fetch the payment method response")?,
};
let pm_id = payment_method_response.payment_method_id.clone();
let network_token = req.network_token.clone();
let network_token_migrated = match network_token {
Some(nt_detail) => {
logger::debug!("Network token migration");
let network_token_requestor_ref_id = nt_detail.network_token_requestor_ref_id.clone();
let network_token_data = &nt_detail.network_token_data;
Some(
controller
.save_network_token_and_update_payment_method(
&req,
merchant_context.get_merchant_key_store(),
network_token_data,
network_token_requestor_ref_id,
pm_id,
)
.await
.map_err(|err| logger::error!(?err, "Failed to save network token"))
.ok()
.unwrap_or_default(),
)
}
None => {
logger::debug!("Network token data is not available");
None
}
};
migration_status.network_token_migrated(network_token_migrated);
let migrate_status = migration_status.build();
Ok(ApplicationResponse::Json(
pm_api::PaymentMethodMigrateResponse {
payment_method_response,
card_migrated: migrate_status.card_migrated,
network_token_migrated: migrate_status.network_token_migrated,
connector_mandate_details_migrated: migrate_status.connector_mandate_details_migrated,
network_transaction_id_migrated: migrate_status.network_transaction_migrated,
},
))
}
#[cfg(feature = "v2")]
pub async fn migrate_payment_method(
_state: &state::PaymentMethodsState,
_req: pm_api::PaymentMethodMigrate,
_merchant_id: &id_type::MerchantId,
_merchant_context: &merchant_context::MerchantContext,
_controller: &dyn PaymentMethodsController,
) -> CustomResult<ApplicationResponse<pm_api::PaymentMethodMigrateResponse>, errors::ApiErrorResponse>
{
todo!()
}
#[cfg(feature = "v1")]
pub async fn populate_bin_details_for_masked_card(
card_details: &api_models::payment_methods::MigrateCardDetail,
db: &dyn state::PaymentMethodsStorageInterface,
payment_method_type: Option<&enums::PaymentMethodType>,
) -> CustomResult<pm_api::CardDetailFromLocker, errors::ApiErrorResponse> {
if let Some(
// Cards
enums::PaymentMethodType::Credit
| enums::PaymentMethodType::Debit
// Wallets
| enums::PaymentMethodType::ApplePay
| enums::PaymentMethodType::GooglePay,
) = payment_method_type {
migration::validate_card_expiry(
&card_details.card_exp_month,
&card_details.card_exp_year,
)?;
}
let card_number = card_details.card_number.clone();
let (card_isin, _last4_digits) = get_card_bin_and_last4_digits_for_masked_card(
card_number.peek(),
)
.change_context(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid masked card number".to_string(),
})?;
let card_bin_details = if card_details.card_issuer.is_some()
&& card_details.card_network.is_some()
&& card_details.card_type.is_some()
&& card_details.card_issuing_country.is_some()
{
pm_api::CardDetailFromLocker::foreign_try_from((card_details, None))?
} else {
let card_info = db
.get_card_info(&card_isin)
.await
.map_err(|error| logger::error!(card_info_error=?error))
.ok()
.flatten();
pm_api::CardDetailFromLocker::foreign_try_from((card_details, card_info))?
};
Ok(card_bin_details)
}
#[cfg(feature = "v1")]
impl
ForeignTryFrom<(
&api_models::payment_methods::MigrateCardDetail,
Option<cards_info::CardInfo>,
)> for pm_api::CardDetailFromLocker
{
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn foreign_try_from(
(card_details, card_info): (
&api_models::payment_methods::MigrateCardDetail,
Option<cards_info::CardInfo>,
),
) -> Result<Self, Self::Error> {
let (card_isin, last4_digits) =
get_card_bin_and_last4_digits_for_masked_card(card_details.card_number.peek())
.change_context(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid masked card number".to_string(),
})?;
if let Some(card_bin_info) = card_info {
Ok(Self {
scheme: card_details
.card_network
.clone()
.or(card_bin_info.card_network.clone())
.map(|card_network| card_network.to_string()),
last4_digits: Some(last4_digits.clone()),
issuer_country: card_details
.card_issuing_country
.clone()
.or(card_bin_info.card_issuing_country),
card_number: None,
expiry_month: Some(card_details.card_exp_month.clone()),
expiry_year: Some(card_details.card_exp_year.clone()),
card_token: None,
card_fingerprint: None,
card_holder_name: card_details.card_holder_name.clone(),
nick_name: card_details.nick_name.clone(),
card_isin: Some(card_isin.clone()),
card_issuer: card_details
.card_issuer
.clone()
.or(card_bin_info.card_issuer),
card_network: card_details
.card_network
.clone()
.or(card_bin_info.card_network),
card_type: card_details.card_type.clone().or(card_bin_info.card_type),
saved_to_locker: false,
})
} else {
Ok(Self {
scheme: card_details
.card_network
.clone()
.map(|card_network| card_network.to_string()),
last4_digits: Some(last4_digits.clone()),
issuer_country: card_details.card_issuing_country.clone(),
card_number: None,
expiry_month: Some(card_details.card_exp_month.clone()),
expiry_year: Some(card_details.card_exp_year.clone()),
card_token: None,
card_fingerprint: None,
card_holder_name: card_details.card_holder_name.clone(),
nick_name: card_details.nick_name.clone(),
card_isin: Some(card_isin.clone()),
card_issuer: card_details.card_issuer.clone(),
card_network: card_details.card_network.clone(),
card_type: card_details.card_type.clone(),
saved_to_locker: false,
})
}
}
}
#[cfg(feature = "v2")]
impl
ForeignTryFrom<(
&api_models::payment_methods::MigrateCardDetail,
Option<cards_info::CardInfo>,
)> for pm_api::CardDetailFromLocker
{
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn foreign_try_from(
(card_details, card_info): (
&api_models::payment_methods::MigrateCardDetail,
Option<cards_info::CardInfo>,
),
) -> Result<Self, Self::Error> {
let (card_isin, last4_digits) =
get_card_bin_and_last4_digits_for_masked_card(card_details.card_number.peek())
.change_context(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid masked card number".to_string(),
})?;
if let Some(card_bin_info) = card_info {
Ok(Self {
last4_digits: Some(last4_digits.clone()),
issuer_country: card_details
.card_issuing_country
.as_ref()
.map(|c| api_enums::CountryAlpha2::from_str(c))
.transpose()
.ok()
.flatten()
.or(card_bin_info
.card_issuing_country
.as_ref()
.map(|c| api_enums::CountryAlpha2::from_str(c))
.transpose()
.ok()
.flatten()),
card_number: None,
expiry_month: Some(card_details.card_exp_month.clone()),
expiry_year: Some(card_details.card_exp_year.clone()),
card_fingerprint: None,
card_holder_name: card_details.card_holder_name.clone(),
nick_name: card_details.nick_name.clone(),
card_isin: Some(card_isin.clone()),
card_issuer: card_details
.card_issuer
.clone()
.or(card_bin_info.card_issuer),
card_network: card_details
.card_network
.clone()
.or(card_bin_info.card_network),
card_type: card_details.card_type.clone().or(card_bin_info.card_type),
saved_to_locker: false,
})
} else {
Ok(Self {
last4_digits: Some(last4_digits.clone()),
issuer_country: card_details
.card_issuing_country
.as_ref()
.map(|c| api_enums::CountryAlpha2::from_str(c))
.transpose()
.ok()
.flatten(),
card_number: None,
expiry_month: Some(card_details.card_exp_month.clone()),
expiry_year: Some(card_details.card_exp_year.clone()),
card_fingerprint: None,
card_holder_name: card_details.card_holder_name.clone(),
nick_name: card_details.nick_name.clone(),
card_isin: Some(card_isin.clone()),
card_issuer: card_details.card_issuer.clone(),
card_network: card_details.card_network.clone(),
card_type: card_details.card_type.clone(),
saved_to_locker: false,
})
}
}
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
pub async fn get_client_secret_or_add_payment_method_for_migration(
state: &state::PaymentMethodsState,
req: pm_api::PaymentMethodCreate,
merchant_context: &merchant_context::MerchantContext,
migration_status: &mut migration::RecordMigrationStatusBuilder,
controller: &dyn PaymentMethodsController,
) -> CustomResult<ApplicationResponse<pm_api::PaymentMethodResponse>, errors::ApiErrorResponse> {
let merchant_id = merchant_context.get_merchant_account().get_id();
let customer_id = req.customer_id.clone().get_required_value("customer_id")?;
#[cfg(not(feature = "payouts"))]
let condition = req.card.is_some();
#[cfg(feature = "payouts")]
let condition = req.card.is_some() || req.bank_transfer.is_some() || req.wallet.is_some();
let key_manager_state = &state.into();
let payment_method_billing_address: Option<Encryptable<Secret<serde_json::Value>>> = req
.billing
.clone()
.async_map(|billing| {
create_encrypted_data(
key_manager_state,
merchant_context.get_merchant_key_store(),
billing,
)
})
.await
.transpose()
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Unable to encrypt Payment method billing address")?;
let connector_mandate_details = req
.connector_mandate_details
.clone()
.map(serde_json::to_value)
.transpose()
.change_context(errors::ApiErrorResponse::InternalServerError)?;
if condition {
Box::pin(save_migration_payment_method(
req,
migration_status,
controller,
))
.await
} else {
let payment_method_id = generate_id(consts::ID_LENGTH, "pm");
let res = controller
.create_payment_method(
&req,
&customer_id,
payment_method_id.as_str(),
None,
merchant_id,
None,
None,
None,
connector_mandate_details.clone(),
Some(enums::PaymentMethodStatus::AwaitingData),
None,
payment_method_billing_address,
None,
None,
None,
None,
Default::default(),
)
.await?;
migration_status.connector_mandate_details_migrated(
connector_mandate_details
.clone()
.and_then(|val| (val != json!({})).then_some(true))
.or_else(|| {
req.connector_mandate_details
.clone()
.and_then(|val| (!val.0.is_empty()).then_some(false))
}),
);
//card is not migrated in this case
migration_status.card_migrated(false);
if res.status == enums::PaymentMethodStatus::AwaitingData {
controller
.add_payment_method_status_update_task(
&res,
enums::PaymentMethodStatus::AwaitingData,
enums::PaymentMethodStatus::Inactive,
merchant_id,
)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable(
"Failed to add payment method status update task in process tracker",
)?;
}
Ok(ApplicationResponse::Json(
pm_api::PaymentMethodResponse::foreign_from((None, res)),
))
}
}
#[cfg(feature = "v1")]
#[allow(clippy::too_many_arguments)]
pub async fn skip_locker_call_and_migrate_payment_method(
state: &state::PaymentMethodsState,
req: &pm_api::PaymentMethodMigrate,
merchant_id: id_type::MerchantId,
merchant_context: &merchant_context::MerchantContext,
card: pm_api::CardDetailFromLocker,
should_require_connector_mandate_details: bool,
migration_status: &mut migration::RecordMigrationStatusBuilder,
controller: &dyn PaymentMethodsController,
) -> CustomResult<ApplicationResponse<pm_api::PaymentMethodResponse>, errors::ApiErrorResponse> {
let db = &*state.store;
let customer_id = req.customer_id.clone().get_required_value("customer_id")?;
// In this case, since we do not have valid card details, recurring payments can only be done through connector mandate details.
//if network token data is present, then connector mandate details are not mandatory
let connector_mandate_details = if should_require_connector_mandate_details {
let connector_mandate_details_req = req
.connector_mandate_details
.clone()
.and_then(|c| c.payments)
.clone()
.get_required_value("connector mandate details")?;
Some(
serde_json::to_value(&connector_mandate_details_req)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to parse connector mandate details")?,
)
} else {
req.connector_mandate_details
.clone()
.and_then(|c| c.payments)
.map(|mandate_details_req| {
serde_json::to_value(&mandate_details_req)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to parse connector mandate details")
})
.transpose()?
};
let key_manager_state = &state.into();
let payment_method_billing_address: Option<Encryptable<Secret<serde_json::Value>>> = req
.billing
.clone()
.async_map(|billing| {
create_encrypted_data(
key_manager_state,
merchant_context.get_merchant_key_store(),
billing,
)
})
.await
.transpose()
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Unable to encrypt Payment method billing address")?;
let customer = db
.find_customer_by_customer_id_merchant_id(
&state.into(),
&customer_id,
&merchant_id,
merchant_context.get_merchant_key_store(),
merchant_context.get_merchant_account().storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::CustomerNotFound)?;
let payment_method_card_details = pm_api::PaymentMethodsData::Card(
pm_api::CardDetailsPaymentMethod::from((card.clone(), None)),
);
let payment_method_data_encrypted: Option<Encryptable<Secret<serde_json::Value>>> = Some(
create_encrypted_data(
&state.into(),
merchant_context.get_merchant_key_store(),
payment_method_card_details,
)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Unable to encrypt Payment method card details")?,
);
let payment_method_metadata: Option<serde_json::Value> =
req.metadata.as_ref().map(|data| data.peek()).cloned();
let network_transaction_id = req.network_transaction_id.clone();
let payment_method_id = generate_id(consts::ID_LENGTH, "pm");
let current_time = common_utils::date_time::now();
let response = db
.insert_payment_method(
&state.into(),
merchant_context.get_merchant_key_store(),
domain_pm::PaymentMethod {
customer_id: customer_id.to_owned(),
merchant_id: merchant_id.to_owned(),
payment_method_id: payment_method_id.to_string(),
locker_id: None,
payment_method: req.payment_method,
payment_method_type: req.payment_method_type,
payment_method_issuer: req.payment_method_issuer.clone(),
scheme: req.card_network.clone().or(card.scheme.clone()),
metadata: payment_method_metadata.map(Secret::new),
payment_method_data: payment_method_data_encrypted,
connector_mandate_details: connector_mandate_details.clone(),
customer_acceptance: None,
client_secret: None,
status: enums::PaymentMethodStatus::Active,
network_transaction_id: network_transaction_id.clone(),
payment_method_issuer_code: None,
accepted_currency: None,
token: None,
cardholder_name: None,
issuer_name: None,
issuer_country: None,
payer_country: None,
is_stored: None,
swift_code: None,
direct_debit_token: None,
created_at: current_time,
last_modified: current_time,
last_used_at: current_time,
payment_method_billing_address,
updated_by: None,
version: common_types::consts::API_VERSION,
network_token_requestor_reference_id: None,
network_token_locker_id: None,
network_token_payment_method_data: None,
vault_source_details: Default::default(),
},
merchant_context.get_merchant_account().storage_scheme,
)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to add payment method in db")?;
logger::debug!("Payment method inserted in db");
migration_status.network_transaction_id_migrated(
network_transaction_id.and_then(|val| (!val.is_empty_after_trim()).then_some(true)),
);
migration_status.connector_mandate_details_migrated(
connector_mandate_details
.clone()
.and_then(|val| if val == json!({}) { None } else { Some(true) })
.or_else(|| {
req.connector_mandate_details.clone().and_then(|val| {
val.payments
.and_then(|payin_val| (!payin_val.0.is_empty()).then_some(false))
})
}),
);
if customer.default_payment_method_id.is_none() && req.payment_method.is_some() {
let _ = controller
.set_default_payment_method(&merchant_id, &customer_id, payment_method_id.to_owned())
.await
.map_err(|error| logger::error!(?error, "Failed to set the payment method as default"));
}
Ok(ApplicationResponse::Json(
pm_api::PaymentMethodResponse::foreign_from((Some(card), response)),
))
}
// need to discuss regarding the migration APIs for v2
#[cfg(feature = "v2")]
pub async fn skip_locker_call_and_migrate_payment_method(
_state: state::PaymentMethodsState,
_req: &pm_api::PaymentMethodMigrate,
_merchant_id: id_type::MerchantId,
_merchant_context: &merchant_context::MerchantContext,
_card: pm_api::CardDetailFromLocker,
) -> CustomResult<ApplicationResponse<pm_api::PaymentMethodResponse>, errors::ApiErrorResponse> {
todo!()
}
pub fn get_card_bin_and_last4_digits_for_masked_card(
masked_card_number: &str,
) -> Result<(String, String), cards::CardNumberValidationErr> {
let last4_digits = masked_card_number
.chars()
.rev()
.take(4)
.collect::<String>()
.chars()
.rev()
.collect::<String>();
let card_isin = masked_card_number.chars().take(6).collect::<String>();
cards::validate::validate_card_number_chars(&card_isin)
.and_then(|_| cards::validate::validate_card_number_chars(&last4_digits))?;
Ok((card_isin, last4_digits))
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
pub async fn save_migration_payment_method(
req: pm_api::PaymentMethodCreate,
migration_status: &mut migration::RecordMigrationStatusBuilder,
controller: &dyn PaymentMethodsController,
) -> CustomResult<ApplicationResponse<pm_api::PaymentMethodResponse>, errors::ApiErrorResponse> {
let connector_mandate_details = req
.connector_mandate_details
.clone()
.map(serde_json::to_value)
.transpose()
.change_context(errors::ApiErrorResponse::InternalServerError)?;
let network_transaction_id = req.network_transaction_id.clone();
let res = controller.add_payment_method(&req).await?;
migration_status.card_migrated(true);
migration_status.network_transaction_id_migrated(
network_transaction_id.and_then(|val| (!val.is_empty_after_trim()).then_some(true)),
);
migration_status.connector_mandate_details_migrated(
connector_mandate_details
.and_then(|val| if val == json!({}) { None } else { Some(true) })
.or_else(|| {
req.connector_mandate_details
.and_then(|val| (!val.0.is_empty()).then_some(false))
}),
);
Ok(res)
}
</module>
|
{
"crate": "payment_methods",
"file": null,
"files": [
"crates/payment_methods/src/core/migration/payment_methods.rs"
],
"module": "crates/payment_methods/src/core/migration",
"num_files": 1,
"token_count": 5513
}
|
module_3590457672352607426
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: pm_auth
Module: crates/pm_auth/src/connector
Files: 1
</path>
<module>
// File: crates/pm_auth/src/connector/plaid.rs
pub mod transformers;
use std::fmt::Debug;
use common_utils::{
ext_traits::BytesExt,
request::{Method, Request, RequestBuilder, RequestContent},
};
use error_stack::ResultExt;
use masking::{Mask, Maskable};
use transformers as plaid;
use crate::{
core::errors,
types::{
self as auth_types,
api::{
auth_service::{
self, BankAccountCredentials, ExchangeToken, LinkToken, RecipientCreate,
},
ConnectorCommon, ConnectorCommonExt, ConnectorIntegration,
},
},
};
#[derive(Debug, Clone)]
pub struct Plaid;
impl<Flow, Request, Response> ConnectorCommonExt<Flow, Request, Response> for Plaid
where
Self: ConnectorIntegration<Flow, Request, Response>,
{
fn build_headers(
&self,
req: &auth_types::PaymentAuthRouterData<Flow, Request, Response>,
_connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
let mut header = vec![(
"Content-Type".to_string(),
self.get_content_type().to_string().into(),
)];
let mut auth = self.get_auth_header(&req.connector_auth_type)?;
header.append(&mut auth);
Ok(header)
}
}
impl ConnectorCommon for Plaid {
fn id(&self) -> &'static str {
"plaid"
}
fn common_get_content_type(&self) -> &'static str {
"application/json"
}
fn base_url<'a>(&self, _connectors: &'a auth_types::PaymentMethodAuthConnectors) -> &'a str {
"https://sandbox.plaid.com"
}
fn get_auth_header(
&self,
auth_type: &auth_types::ConnectorAuthType,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
let auth = plaid::PlaidAuthType::try_from(auth_type)
.change_context(errors::ConnectorError::FailedToObtainAuthType)?;
let client_id = auth.client_id.into_masked();
let secret = auth.secret.into_masked();
Ok(vec![
("PLAID-CLIENT-ID".to_string(), client_id),
("PLAID-SECRET".to_string(), secret),
])
}
fn build_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
let response: plaid::PlaidErrorResponse =
res.response
.parse_struct("PlaidErrorResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
Ok(auth_types::ErrorResponse {
status_code: res.status_code,
code: crate::consts::NO_ERROR_CODE.to_string(),
message: response.error_message,
reason: response.display_message,
})
}
}
impl auth_service::AuthService for Plaid {}
impl auth_service::PaymentInitiationRecipientCreate for Plaid {}
impl auth_service::PaymentInitiation for Plaid {}
impl auth_service::AuthServiceLinkToken for Plaid {}
impl ConnectorIntegration<LinkToken, auth_types::LinkTokenRequest, auth_types::LinkTokenResponse>
for Plaid
{
fn get_headers(
&self,
req: &auth_types::LinkTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::LinkTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!(
"{}{}",
self.base_url(connectors),
"/link/token/create"
))
}
fn get_request_body(
&self,
req: &auth_types::LinkTokenRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidLinkTokenRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::LinkTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentAuthLinkTokenType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(auth_types::PaymentAuthLinkTokenType::get_headers(
self, req, connectors,
)?)
.set_body(auth_types::PaymentAuthLinkTokenType::get_request_body(
self, req,
)?)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::LinkTokenRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::LinkTokenRouterData, errors::ConnectorError> {
let response: plaid::PlaidLinkTokenResponse = res
.response
.parse_struct("PlaidLinkTokenResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
<auth_types::LinkTokenRouterData>::try_from(auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
impl auth_service::AuthServiceExchangeToken for Plaid {}
impl
ConnectorIntegration<
ExchangeToken,
auth_types::ExchangeTokenRequest,
auth_types::ExchangeTokenResponse,
> for Plaid
{
fn get_headers(
&self,
req: &auth_types::ExchangeTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::ExchangeTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!(
"{}{}",
self.base_url(connectors),
"/item/public_token/exchange"
))
}
fn get_request_body(
&self,
req: &auth_types::ExchangeTokenRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidExchangeTokenRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::ExchangeTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentAuthExchangeTokenType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(auth_types::PaymentAuthExchangeTokenType::get_headers(
self, req, connectors,
)?)
.set_body(auth_types::PaymentAuthExchangeTokenType::get_request_body(
self, req,
)?)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::ExchangeTokenRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ExchangeTokenRouterData, errors::ConnectorError> {
let response: plaid::PlaidExchangeTokenResponse = res
.response
.parse_struct("PlaidExchangeTokenResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
<auth_types::ExchangeTokenRouterData>::try_from(auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
impl auth_service::AuthServiceBankAccountCredentials for Plaid {}
impl
ConnectorIntegration<
BankAccountCredentials,
auth_types::BankAccountCredentialsRequest,
auth_types::BankAccountCredentialsResponse,
> for Plaid
{
fn get_headers(
&self,
req: &auth_types::BankDetailsRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::BankDetailsRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!("{}{}", self.base_url(connectors), "/auth/get"))
}
fn get_request_body(
&self,
req: &auth_types::BankDetailsRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidBankAccountCredentialsRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::BankDetailsRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentAuthBankAccountDetailsType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(auth_types::PaymentAuthBankAccountDetailsType::get_headers(
self, req, connectors,
)?)
.set_body(
auth_types::PaymentAuthBankAccountDetailsType::get_request_body(self, req)?,
)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::BankDetailsRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::BankDetailsRouterData, errors::ConnectorError> {
let response: plaid::PlaidBankAccountCredentialsResponse = res
.response
.parse_struct("PlaidBankAccountCredentialsResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
<auth_types::BankDetailsRouterData>::try_from(auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
impl
ConnectorIntegration<
RecipientCreate,
auth_types::RecipientCreateRequest,
auth_types::RecipientCreateResponse,
> for Plaid
{
fn get_headers(
&self,
req: &auth_types::RecipientCreateRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::RecipientCreateRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!(
"{}{}",
self.base_url(connectors),
"/payment_initiation/recipient/create"
))
}
fn get_request_body(
&self,
req: &auth_types::RecipientCreateRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidRecipientCreateRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::RecipientCreateRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentInitiationRecipientCreateType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(
auth_types::PaymentInitiationRecipientCreateType::get_headers(
self, req, connectors,
)?,
)
.set_body(
auth_types::PaymentInitiationRecipientCreateType::get_request_body(self, req)?,
)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::RecipientCreateRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::RecipientCreateRouterData, errors::ConnectorError> {
let response: plaid::PlaidRecipientCreateResponse = res
.response
.parse_struct("PlaidRecipientCreateResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
Ok(<auth_types::RecipientCreateRouterData>::from(
auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
},
))
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
</module>
|
{
"crate": "pm_auth",
"file": null,
"files": [
"crates/pm_auth/src/connector/plaid.rs"
],
"module": "crates/pm_auth/src/connector",
"num_files": 1,
"token_count": 3131
}
|
module_-2713615889087939696
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: pm_auth
Module: crates/pm_auth/src/connector/plaid
Files: 1
</path>
<module>
// File: crates/pm_auth/src/connector/plaid/transformers.rs
use std::collections::HashMap;
use common_enums::{PaymentMethod, PaymentMethodType};
use common_utils::{id_type, types as util_types};
use masking::{PeekInterface, Secret};
use serde::{Deserialize, Serialize};
use crate::{core::errors, types};
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidLinkTokenRequest {
client_name: String,
country_codes: Vec<String>,
language: String,
products: Vec<String>,
user: User,
android_package_name: Option<String>,
redirect_uri: Option<String>,
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct User {
pub client_user_id: id_type::CustomerId,
}
impl TryFrom<&types::LinkTokenRouterData> for PlaidLinkTokenRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &types::LinkTokenRouterData) -> Result<Self, Self::Error> {
Ok(Self {
client_name: item.request.client_name.clone(),
country_codes: item.request.country_codes.clone().ok_or(
errors::ConnectorError::MissingRequiredField {
field_name: "country_codes",
},
)?,
language: item.request.language.clone().unwrap_or("en".to_string()),
products: vec!["auth".to_string()],
user: User {
client_user_id: item.request.user_info.clone().ok_or(
errors::ConnectorError::MissingRequiredField {
field_name: "country_codes",
},
)?,
},
android_package_name: match item.request.client_platform {
Some(api_models::enums::ClientPlatform::Android) => {
item.request.android_package_name.clone()
}
Some(api_models::enums::ClientPlatform::Ios)
| Some(api_models::enums::ClientPlatform::Web)
| Some(api_models::enums::ClientPlatform::Unknown)
| None => None,
},
redirect_uri: match item.request.client_platform {
Some(api_models::enums::ClientPlatform::Ios) => item.request.redirect_uri.clone(),
Some(api_models::enums::ClientPlatform::Android)
| Some(api_models::enums::ClientPlatform::Web)
| Some(api_models::enums::ClientPlatform::Unknown)
| None => None,
},
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidLinkTokenResponse {
link_token: String,
}
impl<F, T>
TryFrom<types::ResponseRouterData<F, PlaidLinkTokenResponse, T, types::LinkTokenResponse>>
for types::PaymentAuthRouterData<F, T, types::LinkTokenResponse>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: types::ResponseRouterData<F, PlaidLinkTokenResponse, T, types::LinkTokenResponse>,
) -> Result<Self, Self::Error> {
Ok(Self {
response: Ok(types::LinkTokenResponse {
link_token: item.response.link_token,
}),
..item.data
})
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidExchangeTokenRequest {
public_token: String,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidExchangeTokenResponse {
pub access_token: String,
}
impl<F, T>
TryFrom<
types::ResponseRouterData<F, PlaidExchangeTokenResponse, T, types::ExchangeTokenResponse>,
> for types::PaymentAuthRouterData<F, T, types::ExchangeTokenResponse>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: types::ResponseRouterData<
F,
PlaidExchangeTokenResponse,
T,
types::ExchangeTokenResponse,
>,
) -> Result<Self, Self::Error> {
Ok(Self {
response: Ok(types::ExchangeTokenResponse {
access_token: item.response.access_token,
}),
..item.data
})
}
}
impl TryFrom<&types::ExchangeTokenRouterData> for PlaidExchangeTokenRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &types::ExchangeTokenRouterData) -> Result<Self, Self::Error> {
Ok(Self {
public_token: item.request.public_token.clone(),
})
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct PlaidRecipientCreateRequest {
pub name: String,
#[serde(flatten)]
pub account_data: PlaidRecipientAccountData,
pub address: Option<PlaidRecipientCreateAddress>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidRecipientCreateResponse {
pub recipient_id: String,
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum PlaidRecipientAccountData {
Iban(Secret<String>),
Bacs {
sort_code: Secret<String>,
account: Secret<String>,
},
}
impl TryFrom<&types::RecipientAccountData> for PlaidRecipientAccountData {
type Error = errors::ConnectorError;
fn try_from(item: &types::RecipientAccountData) -> Result<Self, Self::Error> {
match item {
types::RecipientAccountData::Iban(iban) => Ok(Self::Iban(iban.clone())),
types::RecipientAccountData::Bacs {
sort_code,
account_number,
} => Ok(Self::Bacs {
sort_code: sort_code.clone(),
account: account_number.clone(),
}),
types::RecipientAccountData::FasterPayments { .. }
| types::RecipientAccountData::Sepa(_)
| types::RecipientAccountData::SepaInstant(_)
| types::RecipientAccountData::Elixir { .. }
| types::RecipientAccountData::Bankgiro(_)
| types::RecipientAccountData::Plusgiro(_) => {
Err(errors::ConnectorError::InvalidConnectorConfig {
config: "Invalid payment method selected. Only Iban, Bacs Supported",
})
}
}
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct PlaidRecipientCreateAddress {
pub street: String,
pub city: String,
pub postal_code: String,
pub country: String,
}
impl From<&types::RecipientCreateAddress> for PlaidRecipientCreateAddress {
fn from(item: &types::RecipientCreateAddress) -> Self {
Self {
street: item.street.clone(),
city: item.city.clone(),
postal_code: item.postal_code.clone(),
country: common_enums::CountryAlpha2::to_string(&item.country),
}
}
}
impl TryFrom<&types::RecipientCreateRouterData> for PlaidRecipientCreateRequest {
type Error = errors::ConnectorError;
fn try_from(item: &types::RecipientCreateRouterData) -> Result<Self, Self::Error> {
Ok(Self {
name: item.request.name.clone(),
account_data: PlaidRecipientAccountData::try_from(&item.request.account_data)?,
address: item
.request
.address
.as_ref()
.map(PlaidRecipientCreateAddress::from),
})
}
}
impl<F, T>
From<
types::ResponseRouterData<
F,
PlaidRecipientCreateResponse,
T,
types::RecipientCreateResponse,
>,
> for types::PaymentAuthRouterData<F, T, types::RecipientCreateResponse>
{
fn from(
item: types::ResponseRouterData<
F,
PlaidRecipientCreateResponse,
T,
types::RecipientCreateResponse,
>,
) -> Self {
Self {
response: Ok(types::RecipientCreateResponse {
recipient_id: item.response.recipient_id,
}),
..item.data
}
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidBankAccountCredentialsRequest {
access_token: String,
options: Option<BankAccountCredentialsOptions>,
}
#[derive(Debug, Deserialize, PartialEq)]
pub struct PlaidBankAccountCredentialsResponse {
pub accounts: Vec<PlaidBankAccountCredentialsAccounts>,
pub numbers: PlaidBankAccountCredentialsNumbers,
// pub item: PlaidBankAccountCredentialsItem,
pub request_id: String,
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct BankAccountCredentialsOptions {
account_ids: Vec<String>,
}
#[derive(Debug, Deserialize, PartialEq)]
pub struct PlaidBankAccountCredentialsAccounts {
pub account_id: String,
pub name: String,
pub subtype: Option<String>,
pub balances: Option<PlaidBankAccountCredentialsBalances>,
}
#[derive(Debug, Deserialize, PartialEq)]
pub struct PlaidBankAccountCredentialsBalances {
pub available: Option<util_types::FloatMajorUnit>,
pub current: Option<util_types::FloatMajorUnit>,
pub limit: Option<util_types::FloatMajorUnit>,
pub iso_currency_code: Option<String>,
pub unofficial_currency_code: Option<String>,
pub last_updated_datetime: Option<String>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsNumbers {
pub ach: Vec<PlaidBankAccountCredentialsACH>,
pub eft: Vec<PlaidBankAccountCredentialsEFT>,
pub international: Vec<PlaidBankAccountCredentialsInternational>,
pub bacs: Vec<PlaidBankAccountCredentialsBacs>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsItem {
pub item_id: String,
pub institution_id: Option<String>,
pub webhook: Option<String>,
pub error: Option<PlaidErrorResponse>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsACH {
pub account_id: String,
pub account: String,
pub routing: String,
pub wire_routing: Option<String>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsEFT {
pub account_id: String,
pub account: String,
pub institution: String,
pub branch: String,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsInternational {
pub account_id: String,
pub iban: String,
pub bic: String,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsBacs {
pub account_id: String,
pub account: String,
pub sort_code: String,
}
impl TryFrom<&types::BankDetailsRouterData> for PlaidBankAccountCredentialsRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &types::BankDetailsRouterData) -> Result<Self, Self::Error> {
let options = item.request.optional_ids.as_ref().map(|bank_account_ids| {
let ids = bank_account_ids
.ids
.iter()
.map(|id| id.peek().to_string())
.collect::<Vec<_>>();
BankAccountCredentialsOptions { account_ids: ids }
});
Ok(Self {
access_token: item.request.access_token.peek().to_string(),
options,
})
}
}
impl<F, T>
TryFrom<
types::ResponseRouterData<
F,
PlaidBankAccountCredentialsResponse,
T,
types::BankAccountCredentialsResponse,
>,
> for types::PaymentAuthRouterData<F, T, types::BankAccountCredentialsResponse>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: types::ResponseRouterData<
F,
PlaidBankAccountCredentialsResponse,
T,
types::BankAccountCredentialsResponse,
>,
) -> Result<Self, Self::Error> {
let (account_numbers, accounts_info) = (item.response.numbers, item.response.accounts);
let mut bank_account_vec = Vec::new();
let mut id_to_subtype = HashMap::new();
accounts_info.into_iter().for_each(|acc| {
id_to_subtype.insert(
acc.account_id,
(
acc.subtype,
acc.name,
acc.balances.and_then(|balance| balance.available),
),
);
});
account_numbers.ach.into_iter().for_each(|ach| {
let (acc_type, acc_name, available_balance) = if let Some((
_type,
name,
available_balance,
)) = id_to_subtype.get(&ach.account_id)
{
(_type.to_owned(), Some(name.clone()), *available_balance)
} else {
(None, None, None)
};
let account_details =
types::PaymentMethodTypeDetails::Ach(types::BankAccountDetailsAch {
account_number: Secret::new(ach.account),
routing_number: Secret::new(ach.routing),
});
let bank_details_new = types::BankAccountDetails {
account_name: acc_name,
account_details,
payment_method_type: PaymentMethodType::Ach,
payment_method: PaymentMethod::BankDebit,
account_id: ach.account_id.into(),
account_type: acc_type,
balance: available_balance,
};
bank_account_vec.push(bank_details_new);
});
account_numbers.bacs.into_iter().for_each(|bacs| {
let (acc_type, acc_name, available_balance) =
if let Some((_type, name, available_balance)) = id_to_subtype.get(&bacs.account_id)
{
(_type.to_owned(), Some(name.clone()), *available_balance)
} else {
(None, None, None)
};
let account_details =
types::PaymentMethodTypeDetails::Bacs(types::BankAccountDetailsBacs {
account_number: Secret::new(bacs.account),
sort_code: Secret::new(bacs.sort_code),
});
let bank_details_new = types::BankAccountDetails {
account_name: acc_name,
account_details,
payment_method_type: PaymentMethodType::Bacs,
payment_method: PaymentMethod::BankDebit,
account_id: bacs.account_id.into(),
account_type: acc_type,
balance: available_balance,
};
bank_account_vec.push(bank_details_new);
});
account_numbers.international.into_iter().for_each(|sepa| {
let (acc_type, acc_name, available_balance) =
if let Some((_type, name, available_balance)) = id_to_subtype.get(&sepa.account_id)
{
(_type.to_owned(), Some(name.clone()), *available_balance)
} else {
(None, None, None)
};
let account_details =
types::PaymentMethodTypeDetails::Sepa(types::BankAccountDetailsSepa {
iban: Secret::new(sepa.iban),
bic: Secret::new(sepa.bic),
});
let bank_details_new = types::BankAccountDetails {
account_name: acc_name,
account_details,
payment_method_type: PaymentMethodType::Sepa,
payment_method: PaymentMethod::BankDebit,
account_id: sepa.account_id.into(),
account_type: acc_type,
balance: available_balance,
};
bank_account_vec.push(bank_details_new);
});
Ok(Self {
response: Ok(types::BankAccountCredentialsResponse {
credentials: bank_account_vec,
}),
..item.data
})
}
}
pub struct PlaidAuthType {
pub client_id: Secret<String>,
pub secret: Secret<String>,
}
impl TryFrom<&types::ConnectorAuthType> for PlaidAuthType {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(auth_type: &types::ConnectorAuthType) -> Result<Self, Self::Error> {
match auth_type {
types::ConnectorAuthType::BodyKey { client_id, secret } => Ok(Self {
client_id: client_id.to_owned(),
secret: secret.to_owned(),
}),
_ => Err(errors::ConnectorError::FailedToObtainAuthType.into()),
}
}
}
#[derive(Debug, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidErrorResponse {
pub display_message: Option<String>,
pub error_code: Option<String>,
pub error_message: String,
pub error_type: Option<String>,
}
</module>
|
{
"crate": "pm_auth",
"file": null,
"files": [
"crates/pm_auth/src/connector/plaid/transformers.rs"
],
"module": "crates/pm_auth/src/connector/plaid",
"num_files": 1,
"token_count": 3506
}
|
module_-5335818512723044802
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: external_services
Module: crates/external_services/src
Files: 13
</path>
<module>
// File: crates/external_services/src/superposition.rs
//! Superposition client for dynamic configuration management
/// Type definitions for Superposition integration
pub mod types;
use std::collections::HashMap;
use common_utils::errors::CustomResult;
use error_stack::report;
use masking::ExposeInterface;
pub use self::types::{ConfigContext, SuperpositionClientConfig, SuperpositionError};
fn convert_open_feature_value(value: open_feature::Value) -> Result<serde_json::Value, String> {
match value {
open_feature::Value::String(s) => Ok(serde_json::Value::String(s)),
open_feature::Value::Bool(b) => Ok(serde_json::Value::Bool(b)),
open_feature::Value::Int(n) => Ok(serde_json::Value::Number(serde_json::Number::from(n))),
open_feature::Value::Float(f) => serde_json::Number::from_f64(f)
.map(serde_json::Value::Number)
.ok_or_else(|| format!("Invalid number: {f}")),
open_feature::Value::Struct(sv) => Ok(types::JsonValue::try_from(sv)?.into_inner()),
open_feature::Value::Array(values) => Ok(serde_json::Value::Array(
values
.into_iter()
.map(convert_open_feature_value)
.collect::<Result<Vec<_>, _>>()?,
)),
}
}
/// Superposition client wrapper
// Debug trait cannot be derived because open_feature::Client doesn't implement Debug
#[allow(missing_debug_implementations)]
pub struct SuperpositionClient {
client: open_feature::Client,
}
impl SuperpositionClient {
/// Create a new Superposition client
pub async fn new(config: SuperpositionClientConfig) -> CustomResult<Self, SuperpositionError> {
let provider_options = superposition_provider::SuperpositionProviderOptions {
endpoint: config.endpoint.clone(),
token: config.token.expose(),
org_id: config.org_id.clone(),
workspace_id: config.workspace_id.clone(),
fallback_config: None,
evaluation_cache: None,
refresh_strategy: superposition_provider::RefreshStrategy::Polling(
superposition_provider::PollingStrategy {
interval: config.polling_interval,
timeout: config.request_timeout,
},
),
experimentation_options: None,
};
// Create provider and set up OpenFeature
let provider = superposition_provider::SuperpositionProvider::new(provider_options);
// Initialize OpenFeature API and set provider
let mut api = open_feature::OpenFeature::singleton_mut().await;
api.set_provider(provider).await;
// Create client
let client = api.create_client();
router_env::logger::info!("Superposition client initialized successfully");
Ok(Self { client })
}
/// Build evaluation context for Superposition requests
fn build_evaluation_context(
&self,
context: Option<&ConfigContext>,
) -> open_feature::EvaluationContext {
open_feature::EvaluationContext {
custom_fields: context.map_or(HashMap::new(), |ctx| {
ctx.values
.iter()
.map(|(k, v)| {
(
k.clone(),
open_feature::EvaluationContextFieldValue::String(v.clone()),
)
})
.collect()
}),
targeting_key: None,
}
}
/// Get a boolean configuration value from Superposition
pub async fn get_bool_value(
&self,
key: &str,
context: Option<&ConfigContext>,
) -> CustomResult<bool, SuperpositionError> {
let evaluation_context = self.build_evaluation_context(context);
self.client
.get_bool_value(key, Some(&evaluation_context), None)
.await
.map_err(|e| {
report!(SuperpositionError::ClientError(format!(
"Failed to get bool value for key '{key}': {e:?}"
)))
})
}
/// Get a string configuration value from Superposition
pub async fn get_string_value(
&self,
key: &str,
context: Option<&ConfigContext>,
) -> CustomResult<String, SuperpositionError> {
let evaluation_context = self.build_evaluation_context(context);
self.client
.get_string_value(key, Some(&evaluation_context), None)
.await
.map_err(|e| {
report!(SuperpositionError::ClientError(format!(
"Failed to get string value for key '{key}': {e:?}"
)))
})
}
/// Get an integer configuration value from Superposition
pub async fn get_int_value(
&self,
key: &str,
context: Option<&ConfigContext>,
) -> CustomResult<i64, SuperpositionError> {
let evaluation_context = self.build_evaluation_context(context);
self.client
.get_int_value(key, Some(&evaluation_context), None)
.await
.map_err(|e| {
report!(SuperpositionError::ClientError(format!(
"Failed to get int value for key '{key}': {e:?}"
)))
})
}
/// Get a float configuration value from Superposition
pub async fn get_float_value(
&self,
key: &str,
context: Option<&ConfigContext>,
) -> CustomResult<f64, SuperpositionError> {
let evaluation_context = self.build_evaluation_context(context);
self.client
.get_float_value(key, Some(&evaluation_context), None)
.await
.map_err(|e| {
report!(SuperpositionError::ClientError(format!(
"Failed to get float value for key '{key}': {e:?}"
)))
})
}
/// Get an object configuration value from Superposition
pub async fn get_object_value(
&self,
key: &str,
context: Option<&ConfigContext>,
) -> CustomResult<serde_json::Value, SuperpositionError> {
let evaluation_context = self.build_evaluation_context(context);
let json_result = self
.client
.get_struct_value::<types::JsonValue>(key, Some(&evaluation_context), None)
.await
.map_err(|e| {
report!(SuperpositionError::ClientError(format!(
"Failed to get object value for key '{key}': {e:?}"
)))
})?;
Ok(json_result.into_inner())
}
}
// File: crates/external_services/src/crm.rs
use std::sync::Arc;
use common_utils::{
errors::CustomResult,
ext_traits::ConfigExt,
request::{Method, Request, RequestBuilder, RequestContent},
};
use error_stack::ResultExt;
use http::header;
use hyperswitch_interfaces::{
crm::{CrmInterface, CrmPayload},
errors::HttpClientError,
types::Proxy,
};
use reqwest;
use router_env::logger;
use crate::{http_client, hubspot_proxy::HubspotRequest};
/// Hubspot Crm configuration
#[derive(Debug, Clone, serde::Deserialize)]
pub struct HubspotProxyConfig {
/// The ID of the Hubspot form to be submitted.
pub form_id: String,
/// The URL to which the Hubspot form data will be sent.
pub request_url: String,
}
impl HubspotProxyConfig {
/// Validates Hubspot configuration
pub(super) fn validate(&self) -> Result<(), InvalidCrmConfig> {
use common_utils::fp_utils::when;
when(self.request_url.is_default_or_empty(), || {
Err(InvalidCrmConfig("request url must not be empty"))
})?;
when(self.form_id.is_default_or_empty(), || {
Err(InvalidCrmConfig("form_id must not be empty"))
})
}
}
/// Error thrown when the crm config is invalid
#[derive(Debug, Clone)]
pub struct InvalidCrmConfig(pub &'static str);
impl std::error::Error for InvalidCrmConfig {}
impl std::fmt::Display for InvalidCrmConfig {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "crm: {}", self.0)
}
}
#[derive(Debug, Clone, Copy)]
/// NoCrm struct
pub struct NoCrm;
/// Enum representing different Crm configurations
#[derive(Debug, Clone, Default, serde::Deserialize)]
#[serde(tag = "crm_manager")]
#[serde(rename_all = "snake_case")]
pub enum CrmManagerConfig {
/// Hubspot Crm configuration
HubspotProxy {
/// Hubspot Crm configuration
hubspot_proxy: HubspotProxyConfig,
},
/// No Crm configuration
#[default]
NoCrm,
}
impl CrmManagerConfig {
/// Verifies that the client configuration is usable
pub fn validate(&self) -> Result<(), InvalidCrmConfig> {
match self {
Self::HubspotProxy { hubspot_proxy } => hubspot_proxy.validate(),
Self::NoCrm => Ok(()),
}
}
/// Retrieves the appropriate Crm client based on the configuration.
pub async fn get_crm_client(&self) -> Arc<dyn CrmInterface> {
match self {
Self::HubspotProxy { hubspot_proxy } => Arc::new(hubspot_proxy.clone()),
Self::NoCrm => Arc::new(NoCrm),
}
}
}
#[async_trait::async_trait]
impl CrmInterface for NoCrm {
async fn make_body(&self, _details: CrmPayload) -> RequestContent {
RequestContent::Json(Box::new(()))
}
async fn make_request(&self, _body: RequestContent, _origin_base_url: String) -> Request {
RequestBuilder::default().build()
}
async fn send_request(
&self,
_proxy: &Proxy,
_request: Request,
) -> CustomResult<reqwest::Response, HttpClientError> {
logger::info!("No CRM configured!");
Err(HttpClientError::UnexpectedState).attach_printable("No CRM configured!")
}
}
#[async_trait::async_trait]
impl CrmInterface for HubspotProxyConfig {
async fn make_body(&self, details: CrmPayload) -> RequestContent {
RequestContent::FormUrlEncoded(Box::new(HubspotRequest::new(
details.business_country_name.unwrap_or_default(),
self.form_id.clone(),
details.poc_name.unwrap_or_default(),
details.poc_email.clone().unwrap_or_default(),
details.legal_business_name.unwrap_or_default(),
details.business_website.unwrap_or_default(),
)))
}
async fn make_request(&self, body: RequestContent, origin_base_url: String) -> Request {
RequestBuilder::new()
.method(Method::Post)
.url(self.request_url.as_str())
.set_body(body)
.attach_default_headers()
.headers(vec![(
header::ORIGIN.to_string(),
format!("{origin_base_url}/dashboard").into(),
)])
.build()
}
async fn send_request(
&self,
proxy: &Proxy,
request: Request,
) -> CustomResult<reqwest::Response, HttpClientError> {
http_client::send_request(proxy, request, None).await
}
}
// File: crates/external_services/src/grpc_client.rs
/// Dyanimc Routing Client interface implementation
#[cfg(feature = "dynamic_routing")]
pub mod dynamic_routing;
/// gRPC based Heath Check Client interface implementation
#[cfg(feature = "dynamic_routing")]
pub mod health_check_client;
/// gRPC based Recovery Trainer Client interface implementation
#[cfg(feature = "revenue_recovery")]
pub mod revenue_recovery;
/// gRPC based Unified Connector Service Client interface implementation
pub mod unified_connector_service;
use std::{fmt::Debug, sync::Arc};
#[cfg(feature = "dynamic_routing")]
use common_utils::consts;
use common_utils::{id_type, ucs_types};
#[cfg(feature = "dynamic_routing")]
use dynamic_routing::{DynamicRoutingClientConfig, RoutingStrategy};
#[cfg(feature = "dynamic_routing")]
use health_check_client::HealthCheckClient;
#[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))]
use hyper_util::client::legacy::connect::HttpConnector;
#[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))]
use router_env::logger;
use serde_urlencoded;
#[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))]
use tonic::body::Body;
use typed_builder::TypedBuilder;
#[cfg(feature = "revenue_recovery")]
pub use self::revenue_recovery::{
recovery_decider_client::{
DeciderRequest, DeciderResponse, RecoveryDeciderClientConfig,
RecoveryDeciderClientInterface, RecoveryDeciderError, RecoveryDeciderResult,
},
GrpcRecoveryHeaders,
};
use crate::grpc_client::unified_connector_service::{
UnifiedConnectorServiceClient, UnifiedConnectorServiceClientConfig,
};
#[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))]
/// Hyper based Client type for maintaining connection pool for all gRPC services
pub type Client = hyper_util::client::legacy::Client<HttpConnector, Body>;
/// Struct contains all the gRPC Clients
#[derive(Debug, Clone)]
pub struct GrpcClients {
/// The routing client
#[cfg(feature = "dynamic_routing")]
pub dynamic_routing: Option<RoutingStrategy>,
/// Health Check client for all gRPC services
#[cfg(feature = "dynamic_routing")]
pub health_client: HealthCheckClient,
/// Recovery Decider Client
#[cfg(feature = "revenue_recovery")]
pub recovery_decider_client: Option<Box<dyn RecoveryDeciderClientInterface>>,
/// Unified Connector Service client
pub unified_connector_service_client: Option<UnifiedConnectorServiceClient>,
}
/// Type that contains the configs required to construct a gRPC client with its respective services.
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize, Default)]
pub struct GrpcClientSettings {
#[cfg(feature = "dynamic_routing")]
/// Configs for Dynamic Routing Client
pub dynamic_routing_client: Option<DynamicRoutingClientConfig>,
#[cfg(feature = "revenue_recovery")]
/// Configs for Recovery Decider Client
pub recovery_decider_client: Option<RecoveryDeciderClientConfig>,
/// Configs for Unified Connector Service client
pub unified_connector_service: Option<UnifiedConnectorServiceClientConfig>,
}
impl GrpcClientSettings {
/// # Panics
///
/// This function will panic if it fails to establish a connection with the gRPC server.
/// This function will be called at service startup.
#[allow(clippy::expect_used)]
pub async fn get_grpc_client_interface(&self) -> Arc<GrpcClients> {
#[cfg(any(feature = "dynamic_routing", feature = "revenue_recovery"))]
let client =
hyper_util::client::legacy::Client::builder(hyper_util::rt::TokioExecutor::new())
.http2_only(true)
.build_http();
#[cfg(feature = "dynamic_routing")]
let dynamic_routing_connection = self
.dynamic_routing_client
.clone()
.map(|config| config.get_dynamic_routing_connection(client.clone()))
.transpose()
.expect("Failed to establish a connection with the Dynamic Routing Server")
.flatten();
#[cfg(feature = "dynamic_routing")]
let health_client = HealthCheckClient::build_connections(self, client.clone())
.await
.expect("Failed to build gRPC connections");
let unified_connector_service_client =
UnifiedConnectorServiceClient::build_connections(self).await;
#[cfg(feature = "revenue_recovery")]
let recovery_decider_client = {
match &self.recovery_decider_client {
Some(config) => {
// Validate the config first
config
.validate()
.expect("Recovery Decider configuration validation failed");
// Create the client
let client = config
.get_recovery_decider_connection(client.clone())
.expect(
"Failed to establish a connection with the Recovery Decider Server",
);
logger::info!("Recovery Decider gRPC client successfully initialized");
let boxed_client: Box<dyn RecoveryDeciderClientInterface> = Box::new(client);
Some(boxed_client)
}
None => {
logger::debug!("Recovery Decider client configuration not provided, client will be disabled");
None
}
}
};
Arc::new(GrpcClients {
#[cfg(feature = "dynamic_routing")]
dynamic_routing: dynamic_routing_connection,
#[cfg(feature = "dynamic_routing")]
health_client,
#[cfg(feature = "revenue_recovery")]
recovery_decider_client,
unified_connector_service_client,
})
}
}
/// Contains grpc headers
#[derive(Debug)]
pub struct GrpcHeaders {
/// Tenant id
pub tenant_id: String,
/// Request id
pub request_id: Option<String>,
}
/// Contains grpc headers for Ucs
#[derive(Debug, TypedBuilder)]
pub struct GrpcHeadersUcs {
/// Tenant id
tenant_id: String,
/// Lineage ids
lineage_ids: LineageIds,
/// External vault proxy metadata
external_vault_proxy_metadata: Option<String>,
/// Merchant Reference Id
merchant_reference_id: Option<ucs_types::UcsReferenceId>,
request_id: Option<String>,
shadow_mode: Option<bool>,
}
/// Type aliase for GrpcHeaders builder in initial stage
pub type GrpcHeadersUcsBuilderInitial =
GrpcHeadersUcsBuilder<((String,), (), (), (), (Option<String>,), (Option<bool>,))>;
/// Type aliase for GrpcHeaders builder in intermediate stage
pub type GrpcHeadersUcsBuilderFinal = GrpcHeadersUcsBuilder<(
(String,),
(LineageIds,),
(Option<String>,),
(Option<ucs_types::UcsReferenceId>,),
(Option<String>,),
(Option<bool>,),
)>;
/// struct to represent set of Lineage ids
#[derive(Debug, serde::Serialize)]
pub struct LineageIds {
merchant_id: id_type::MerchantId,
profile_id: id_type::ProfileId,
}
impl LineageIds {
/// constructor for LineageIds
pub fn new(merchant_id: id_type::MerchantId, profile_id: id_type::ProfileId) -> Self {
Self {
merchant_id,
profile_id,
}
}
/// get url encoded string representation of LineageIds
pub fn get_url_encoded_string(self) -> Result<String, serde_urlencoded::ser::Error> {
serde_urlencoded::to_string(&self)
}
}
#[cfg(feature = "dynamic_routing")]
/// Trait to add necessary headers to the tonic Request
pub(crate) trait AddHeaders {
/// Add necessary header fields to the tonic Request
fn add_headers_to_grpc_request(&mut self, headers: GrpcHeaders);
}
#[cfg(feature = "dynamic_routing")]
impl<T> AddHeaders for tonic::Request<T> {
#[track_caller]
fn add_headers_to_grpc_request(&mut self, headers: GrpcHeaders) {
headers.tenant_id
.parse()
.map(|tenant_id| {
self
.metadata_mut()
.append(consts::TENANT_HEADER, tenant_id)
})
.inspect_err(
|err| logger::warn!(header_parse_error=?err,"invalid {} received",consts::TENANT_HEADER),
)
.ok();
headers.request_id.map(|request_id| {
request_id
.parse()
.map(|request_id| {
self
.metadata_mut()
.append(consts::X_REQUEST_ID, request_id)
})
.inspect_err(
|err| logger::warn!(header_parse_error=?err,"invalid {} received",consts::X_REQUEST_ID),
)
.ok();
});
}
}
#[cfg(feature = "dynamic_routing")]
pub(crate) fn create_grpc_request<T: Debug>(message: T, headers: GrpcHeaders) -> tonic::Request<T> {
let mut request = tonic::Request::new(message);
request.add_headers_to_grpc_request(headers);
logger::info!(?request);
request
}
// File: crates/external_services/src/no_encryption.rs
//! No encryption functionalities
pub mod core;
pub mod implementers;
// File: crates/external_services/src/lib.rs
//! Interactions with external systems.
#![warn(missing_docs, missing_debug_implementations)]
#[cfg(feature = "aws_kms")]
pub mod aws_kms;
/// crm module
pub mod crm;
#[cfg(feature = "email")]
pub mod email;
pub mod file_storage;
/// Building grpc clients to communicate with the server
pub mod grpc_client;
#[cfg(feature = "hashicorp-vault")]
pub mod hashicorp_vault;
/// http_client module
pub mod http_client;
/// hubspot_proxy module
pub mod hubspot_proxy;
pub mod managers;
pub mod no_encryption;
#[cfg(feature = "superposition")]
pub mod superposition;
/// deserializers module_path
pub mod utils;
#[cfg(feature = "revenue_recovery")]
/// date_time module
pub mod date_time {
use error_stack::ResultExt;
/// Errors in time conversion
#[derive(Debug, thiserror::Error)]
pub enum DateTimeConversionError {
#[error("Invalid timestamp value from prost Timestamp: out of representable range")]
/// Error for out of range
TimestampOutOfRange,
}
/// Converts a `time::PrimitiveDateTime` to a `prost_types::Timestamp`.
pub fn convert_to_prost_timestamp(dt: time::PrimitiveDateTime) -> prost_types::Timestamp {
let odt = dt.assume_utc();
prost_types::Timestamp {
seconds: odt.unix_timestamp(),
// This conversion is safe as nanoseconds (0..999_999_999) always fit within an i32.
#[allow(clippy::as_conversions)]
nanos: odt.nanosecond() as i32,
}
}
/// Converts a `prost_types::Timestamp` to an `time::PrimitiveDateTime`.
pub fn convert_from_prost_timestamp(
ts: &prost_types::Timestamp,
) -> error_stack::Result<time::PrimitiveDateTime, DateTimeConversionError> {
let timestamp_nanos = i128::from(ts.seconds) * 1_000_000_000 + i128::from(ts.nanos);
time::OffsetDateTime::from_unix_timestamp_nanos(timestamp_nanos)
.map(|offset_dt| time::PrimitiveDateTime::new(offset_dt.date(), offset_dt.time()))
.change_context(DateTimeConversionError::TimestampOutOfRange)
}
}
/// Crate specific constants
pub mod consts {
/// General purpose base64 engine
#[cfg(feature = "aws_kms")]
pub(crate) const BASE64_ENGINE: base64::engine::GeneralPurpose =
base64::engine::general_purpose::STANDARD;
/// Header key used to specify the connector name in UCS requests.
pub(crate) const UCS_HEADER_CONNECTOR: &str = "x-connector";
/// Header key used to indicate the authentication type being used.
pub(crate) const UCS_HEADER_AUTH_TYPE: &str = "x-auth";
/// Header key for sending the API key used for authentication.
pub(crate) const UCS_HEADER_API_KEY: &str = "x-api-key";
/// Header key for sending an additional secret key used in some auth types.
pub(crate) const UCS_HEADER_KEY1: &str = "x-key1";
/// Header key for sending the API secret in signature-based authentication.
pub(crate) const UCS_HEADER_API_SECRET: &str = "x-api-secret";
/// Header key for sending the AUTH KEY MAP in currency-based authentication.
pub(crate) const UCS_HEADER_AUTH_KEY_MAP: &str = "x-auth-key-map";
/// Header key for sending the EXTERNAL VAULT METADATA in proxy payments
pub(crate) const UCS_HEADER_EXTERNAL_VAULT_METADATA: &str = "x-external-vault-metadata";
/// Header key for sending the list of lineage ids
pub(crate) const UCS_LINEAGE_IDS: &str = "x-lineage-ids";
/// Header key for sending the merchant reference id to UCS
pub(crate) const UCS_HEADER_REFERENCE_ID: &str = "x-reference-id";
}
/// Metrics for interactions with external systems.
#[cfg(feature = "aws_kms")]
pub mod metrics {
use router_env::{counter_metric, global_meter, histogram_metric_f64};
global_meter!(GLOBAL_METER, "EXTERNAL_SERVICES");
#[cfg(feature = "aws_kms")]
counter_metric!(AWS_KMS_DECRYPTION_FAILURES, GLOBAL_METER); // No. of AWS KMS Decryption failures
#[cfg(feature = "aws_kms")]
counter_metric!(AWS_KMS_ENCRYPTION_FAILURES, GLOBAL_METER); // No. of AWS KMS Encryption failures
#[cfg(feature = "aws_kms")]
histogram_metric_f64!(AWS_KMS_DECRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS decryption time (in sec)
#[cfg(feature = "aws_kms")]
histogram_metric_f64!(AWS_KMS_ENCRYPT_TIME, GLOBAL_METER); // Histogram for AWS KMS encryption time (in sec)
}
// File: crates/external_services/src/http_client.rs
use common_utils::{consts, errors::CustomResult, request::Request};
use hyperswitch_interfaces::{errors::HttpClientError, types::Proxy};
use request::{HeaderExt, RequestBuilderExt};
use router_env::{instrument, logger, tracing};
/// client module
pub mod client;
/// metrics module
pub mod metrics;
/// request module
pub mod request;
use std::{error::Error, time::Duration};
use common_utils::request::RequestContent;
pub use common_utils::request::{ContentType, Method, RequestBuilder};
use error_stack::ResultExt;
#[allow(missing_docs)]
#[instrument(skip_all)]
pub async fn send_request(
client_proxy: &Proxy,
request: Request,
option_timeout_secs: Option<u64>,
) -> CustomResult<reqwest::Response, HttpClientError> {
logger::info!(method=?request.method, headers=?request.headers, payload=?request.body, ?request);
let url = url::Url::parse(&request.url).change_context(HttpClientError::UrlParsingFailed)?;
let client = client::create_client(
client_proxy,
request.certificate,
request.certificate_key,
request.ca_certificate,
)?;
let headers = request.headers.construct_header_map()?;
let metrics_tag = router_env::metric_attributes!((
consts::METRICS_HOST_TAG_NAME,
url.host_str().unwrap_or_default().to_owned()
));
let request = {
match request.method {
Method::Get => client.get(url),
Method::Post => {
let client = client.post(url);
match request.body {
Some(RequestContent::Json(payload)) => client.json(&payload),
Some(RequestContent::FormData((form, _))) => client.multipart(form),
Some(RequestContent::FormUrlEncoded(payload)) => client.form(&payload),
Some(RequestContent::Xml(payload)) => {
let body = quick_xml::se::to_string(&payload)
.change_context(HttpClientError::BodySerializationFailed)?;
client.body(body).header("Content-Type", "application/xml")
}
Some(RequestContent::RawBytes(payload)) => client.body(payload),
None => client,
}
}
Method::Put => {
let client = client.put(url);
match request.body {
Some(RequestContent::Json(payload)) => client.json(&payload),
Some(RequestContent::FormData((form, _))) => client.multipart(form),
Some(RequestContent::FormUrlEncoded(payload)) => client.form(&payload),
Some(RequestContent::Xml(payload)) => {
let body = quick_xml::se::to_string(&payload)
.change_context(HttpClientError::BodySerializationFailed)?;
client.body(body).header("Content-Type", "application/xml")
}
Some(RequestContent::RawBytes(payload)) => client.body(payload),
None => client,
}
}
Method::Patch => {
let client = client.patch(url);
match request.body {
Some(RequestContent::Json(payload)) => client.json(&payload),
Some(RequestContent::FormData((form, _))) => client.multipart(form),
Some(RequestContent::FormUrlEncoded(payload)) => client.form(&payload),
Some(RequestContent::Xml(payload)) => {
let body = quick_xml::se::to_string(&payload)
.change_context(HttpClientError::BodySerializationFailed)?;
client.body(body).header("Content-Type", "application/xml")
}
Some(RequestContent::RawBytes(payload)) => client.body(payload),
None => client,
}
}
Method::Delete => client.delete(url),
}
.add_headers(headers)
.timeout(Duration::from_secs(
option_timeout_secs.unwrap_or(consts::REQUEST_TIME_OUT),
))
};
// We cannot clone the request type, because it has Form trait which is not cloneable. So we are cloning the request builder here.
let cloned_send_request = request.try_clone().map(|cloned_request| async {
cloned_request
.send()
.await
.map_err(|error| match error {
error if error.is_timeout() => {
metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag);
HttpClientError::RequestTimeoutReceived
}
error if is_connection_closed_before_message_could_complete(&error) => {
metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag);
HttpClientError::ConnectionClosedIncompleteMessage
}
_ => HttpClientError::RequestNotSent(error.to_string()),
})
.attach_printable("Unable to send request to connector")
});
let send_request = async {
request
.send()
.await
.map_err(|error| match error {
error if error.is_timeout() => {
metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag);
HttpClientError::RequestTimeoutReceived
}
error if is_connection_closed_before_message_could_complete(&error) => {
metrics::REQUEST_BUILD_FAILURE.add(1, metrics_tag);
HttpClientError::ConnectionClosedIncompleteMessage
}
_ => HttpClientError::RequestNotSent(error.to_string()),
})
.attach_printable("Unable to send request to connector")
};
let response = common_utils::metrics::utils::record_operation_time(
send_request,
&metrics::EXTERNAL_REQUEST_TIME,
metrics_tag,
)
.await;
// Retry once if the response is connection closed.
//
// This is just due to the racy nature of networking.
// hyper has a connection pool of idle connections, and it selected one to send your request.
// Most of the time, hyper will receive the server’s FIN and drop the dead connection from its pool.
// But occasionally, a connection will be selected from the pool
// and written to at the same time the server is deciding to close the connection.
// Since hyper already wrote some of the request,
// it can’t really retry it automatically on a new connection, since the server may have acted already
match response {
Ok(response) => Ok(response),
Err(error)
if error.current_context() == &HttpClientError::ConnectionClosedIncompleteMessage =>
{
metrics::AUTO_RETRY_CONNECTION_CLOSED.add(1, metrics_tag);
match cloned_send_request {
Some(cloned_request) => {
logger::info!(
"Retrying request due to connection closed before message could complete"
);
common_utils::metrics::utils::record_operation_time(
cloned_request,
&metrics::EXTERNAL_REQUEST_TIME,
metrics_tag,
)
.await
}
None => {
logger::info!("Retrying request due to connection closed before message could complete failed as request is not cloneable");
Err(error)
}
}
}
err @ Err(_) => err,
}
}
fn is_connection_closed_before_message_could_complete(error: &reqwest::Error) -> bool {
let mut source = error.source();
while let Some(err) = source {
if let Some(hyper_err) = err.downcast_ref::<hyper::Error>() {
if hyper_err.is_incomplete_message() {
return true;
}
}
source = err.source();
}
false
}
// File: crates/external_services/src/managers.rs
//! Config and client managers
pub mod encryption_management;
pub mod secrets_management;
// File: crates/external_services/src/aws_kms.rs
//! Interactions with the AWS KMS SDK
pub mod core;
pub mod implementers;
// File: crates/external_services/src/email.rs
//! Interactions with the AWS SES SDK
use aws_sdk_sesv2::types::Body;
use common_utils::{errors::CustomResult, pii};
use serde::Deserialize;
/// Implementation of aws ses client
pub mod ses;
/// Implementation of SMTP server client
pub mod smtp;
/// Implementation of Email client when email support is disabled
pub mod no_email;
/// Custom Result type alias for Email operations.
pub type EmailResult<T> = CustomResult<T, EmailError>;
/// A trait that defines the methods that must be implemented to send email.
#[async_trait::async_trait]
pub trait EmailClient: Sync + Send + dyn_clone::DynClone {
/// The rich text type of the email client
type RichText;
/// Sends an email to the specified recipient with the given subject and body.
async fn send_email(
&self,
recipient: pii::Email,
subject: String,
body: Self::RichText,
proxy_url: Option<&String>,
) -> EmailResult<()>;
/// Convert Stringified HTML to client native rich text format
/// This has to be done because not all clients may format html as the same
fn convert_to_rich_text(
&self,
intermediate_string: IntermediateString,
) -> CustomResult<Self::RichText, EmailError>
where
Self::RichText: Send;
}
/// A super trait which is automatically implemented for all EmailClients
#[async_trait::async_trait]
pub trait EmailService: Sync + Send + dyn_clone::DynClone {
/// Compose and send email using the email data
async fn compose_and_send_email(
&self,
base_url: &str,
email_data: Box<dyn EmailData + Send>,
proxy_url: Option<&String>,
) -> EmailResult<()>;
}
#[async_trait::async_trait]
impl<T> EmailService for T
where
T: EmailClient,
<Self as EmailClient>::RichText: Send,
{
async fn compose_and_send_email(
&self,
base_url: &str,
email_data: Box<dyn EmailData + Send>,
proxy_url: Option<&String>,
) -> EmailResult<()> {
let email_data = email_data.get_email_data(base_url);
let email_data = email_data.await?;
let EmailContents {
subject,
body,
recipient,
} = email_data;
let rich_text_string = self.convert_to_rich_text(body)?;
self.send_email(recipient, subject, rich_text_string, proxy_url)
.await
}
}
/// This is a struct used to create Intermediate String for rich text ( html )
#[derive(Debug)]
pub struct IntermediateString(String);
impl IntermediateString {
/// Create a new Instance of IntermediateString using a string
pub fn new(inner: String) -> Self {
Self(inner)
}
/// Get the inner String
pub fn into_inner(self) -> String {
self.0
}
}
/// Temporary output for the email subject
#[derive(Debug)]
pub struct EmailContents {
/// The subject of email
pub subject: String,
/// This will be the intermediate representation of the email body in a generic format.
/// The email clients can convert this intermediate representation to their client specific rich text format
pub body: IntermediateString,
/// The email of the recipient to whom the email has to be sent
pub recipient: pii::Email,
}
/// A trait which will contain the logic of generating the email subject and body
#[async_trait::async_trait]
pub trait EmailData {
/// Get the email contents
async fn get_email_data(&self, base_url: &str) -> CustomResult<EmailContents, EmailError>;
}
dyn_clone::clone_trait_object!(EmailClient<RichText = Body>);
/// List of available email clients to choose from
#[derive(Debug, Clone, Default, Deserialize)]
#[serde(tag = "active_email_client")]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum EmailClientConfigs {
#[default]
/// Default Email client to use when no client is specified
NoEmailClient,
/// AWS ses email client
Ses {
/// AWS SES client configuration
aws_ses: ses::SESConfig,
},
/// Other Simple SMTP server
Smtp {
/// SMTP server configuration
smtp: smtp::SmtpServerConfig,
},
}
/// Struct that contains the settings required to construct an EmailClient.
#[derive(Debug, Clone, Default, Deserialize)]
#[serde(default)]
pub struct EmailSettings {
/// The AWS region to send SES requests to.
pub aws_region: String,
/// Number of days for verification of the email
pub allowed_unverified_days: i64,
/// Sender email
pub sender_email: String,
#[serde(flatten)]
/// The client specific configurations
pub client_config: EmailClientConfigs,
/// Recipient email for recon emails
pub recon_recipient_email: pii::Email,
/// Recipient email for recon emails
pub prod_intent_recipient_email: pii::Email,
}
impl EmailSettings {
/// Validation for the Email client specific configurations
pub fn validate(&self) -> Result<(), &'static str> {
match &self.client_config {
EmailClientConfigs::Ses { ref aws_ses } => aws_ses.validate(),
EmailClientConfigs::Smtp { ref smtp } => smtp.validate(),
EmailClientConfigs::NoEmailClient => Ok(()),
}
}
}
/// Errors that could occur from EmailClient.
#[derive(Debug, thiserror::Error)]
pub enum EmailError {
/// An error occurred when building email client.
#[error("Error building email client")]
ClientBuildingFailure,
/// An error occurred when sending email
#[error("Error sending email to recipient")]
EmailSendingFailure,
/// Failed to generate the email token
#[error("Failed to generate email token")]
TokenGenerationFailure,
/// The expected feature is not implemented
#[error("Feature not implemented")]
NotImplemented,
/// An error occurred when building email content.
#[error("Error building email content")]
ContentBuildFailure,
}
// File: crates/external_services/src/file_storage.rs
//! Module for managing file storage operations with support for multiple storage schemes.
use std::{
fmt::{Display, Formatter},
sync::Arc,
};
use common_utils::errors::CustomResult;
/// Includes functionality for AWS S3 storage operations.
#[cfg(feature = "aws_s3")]
mod aws_s3;
mod file_system;
/// Enum representing different file storage configurations, allowing for multiple storage schemes.
#[derive(Debug, Clone, Default, serde::Deserialize)]
#[serde(tag = "file_storage_backend")]
#[serde(rename_all = "snake_case")]
pub enum FileStorageConfig {
/// AWS S3 storage configuration.
#[cfg(feature = "aws_s3")]
AwsS3 {
/// Configuration for AWS S3 file storage.
aws_s3: aws_s3::AwsFileStorageConfig,
},
/// Local file system storage configuration.
#[default]
FileSystem,
}
impl FileStorageConfig {
/// Validates the file storage configuration.
pub fn validate(&self) -> Result<(), InvalidFileStorageConfig> {
match self {
#[cfg(feature = "aws_s3")]
Self::AwsS3 { aws_s3 } => aws_s3.validate(),
Self::FileSystem => Ok(()),
}
}
/// Retrieves the appropriate file storage client based on the file storage configuration.
pub async fn get_file_storage_client(&self) -> Arc<dyn FileStorageInterface> {
match self {
#[cfg(feature = "aws_s3")]
Self::AwsS3 { aws_s3 } => Arc::new(aws_s3::AwsFileStorageClient::new(aws_s3).await),
Self::FileSystem => Arc::new(file_system::FileSystem),
}
}
}
/// Trait for file storage operations
#[async_trait::async_trait]
pub trait FileStorageInterface: dyn_clone::DynClone + Sync + Send {
/// Uploads a file to the selected storage scheme.
async fn upload_file(
&self,
file_key: &str,
file: Vec<u8>,
) -> CustomResult<(), FileStorageError>;
/// Deletes a file from the selected storage scheme.
async fn delete_file(&self, file_key: &str) -> CustomResult<(), FileStorageError>;
/// Retrieves a file from the selected storage scheme.
async fn retrieve_file(&self, file_key: &str) -> CustomResult<Vec<u8>, FileStorageError>;
}
dyn_clone::clone_trait_object!(FileStorageInterface);
/// Error thrown when the file storage config is invalid
#[derive(Debug, Clone)]
pub struct InvalidFileStorageConfig(&'static str);
impl std::error::Error for InvalidFileStorageConfig {}
impl Display for InvalidFileStorageConfig {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "file_storage: {}", self.0)
}
}
/// Represents errors that can occur during file storage operations.
#[derive(Debug, thiserror::Error, PartialEq)]
pub enum FileStorageError {
/// Indicates that the file upload operation failed.
#[error("Failed to upload file")]
UploadFailed,
/// Indicates that the file retrieval operation failed.
#[error("Failed to retrieve file")]
RetrieveFailed,
/// Indicates that the file deletion operation failed.
#[error("Failed to delete file")]
DeleteFailed,
}
// File: crates/external_services/src/hubspot_proxy.rs
use masking::Secret;
/// Lead source constant for Hubspot
pub const HUBSPOT_LEAD_SOURCE: &str = "Hyperswitch Dashboard";
/// Struct representing a request to Hubspot
#[derive(Clone, Debug, serde::Serialize, Default)]
pub struct HubspotRequest {
/// Indicates whether Hubspot should be used.
#[serde(rename = "useHubspot")]
pub use_hubspot: bool,
/// The country of the user or company.
pub country: String,
/// The ID of the Hubspot form being submitted.
#[serde(rename = "hubspotFormId")]
pub hubspot_form_id: String,
/// The first name of the user.
pub firstname: Secret<String>,
/// The last name of the user.
pub lastname: Secret<String>,
/// The email address of the user.
pub email: Secret<String>,
/// The name of the company.
#[serde(rename = "companyName")]
pub company_name: String,
/// The source of the lead, typically set to "Hyperswitch Dashboard".
pub lead_source: String,
/// The website URL of the company.
pub website: String,
/// The phone number of the user.
pub phone: Secret<String>,
/// The role or designation of the user.
pub role: String,
/// The monthly GMV (Gross Merchandise Value) of the company.
#[serde(rename = "monthlyGMV")]
pub monthly_gmv: String,
/// Notes from the business development team.
pub bd_notes: String,
/// Additional message or comments.
pub message: String,
}
#[allow(missing_docs)]
impl HubspotRequest {
pub fn new(
country: String,
hubspot_form_id: String,
firstname: Secret<String>,
email: Secret<String>,
company_name: String,
website: String,
) -> Self {
Self {
use_hubspot: true,
country,
hubspot_form_id,
firstname,
email,
company_name,
lead_source: HUBSPOT_LEAD_SOURCE.to_string(),
website,
..Default::default()
}
}
}
// File: crates/external_services/src/utils.rs
//! Custom deserializers for external services configuration
use std::collections::HashSet;
use serde::Deserialize;
/// Parses a comma-separated string into a HashSet of typed values.
///
/// # Arguments
///
/// * `value` - String or string reference containing comma-separated values
///
/// # Returns
///
/// * `Ok(HashSet<T>)` - Successfully parsed HashSet
/// * `Err(String)` - Error message if any value parsing fails
///
/// # Type Parameters
///
/// * `T` - Target type that implements `FromStr`, `Eq`, and `Hash`
///
/// # Examples
///
/// ```
/// use std::collections::HashSet;
///
/// let result: Result<HashSet<i32>, String> =
/// deserialize_hashset_inner("1,2,3");
/// assert!(result.is_ok());
///
/// if let Ok(hashset) = result {
/// assert!(hashset.contains(&1));
/// assert!(hashset.contains(&2));
/// assert!(hashset.contains(&3));
/// }
/// ```
fn deserialize_hashset_inner<T>(value: impl AsRef<str>) -> Result<HashSet<T>, String>
where
T: Eq + std::str::FromStr + std::hash::Hash,
<T as std::str::FromStr>::Err: std::fmt::Display,
{
let (values, errors) = value
.as_ref()
.trim()
.split(',')
.map(|s| {
T::from_str(s.trim()).map_err(|error| {
format!(
"Unable to deserialize `{}` as `{}`: {error}",
s.trim(),
std::any::type_name::<T>()
)
})
})
.fold(
(HashSet::new(), Vec::new()),
|(mut values, mut errors), result| match result {
Ok(t) => {
values.insert(t);
(values, errors)
}
Err(error) => {
errors.push(error);
(values, errors)
}
},
);
if !errors.is_empty() {
Err(format!("Some errors occurred:\n{}", errors.join("\n")))
} else {
Ok(values)
}
}
/// Serde deserializer function for converting comma-separated strings into typed HashSets.
///
/// This function is designed to be used with serde's `#[serde(deserialize_with = "deserialize_hashset")]`
/// attribute to customize deserialization of HashSet fields.
///
/// # Arguments
///
/// * `deserializer` - Serde deserializer instance
///
/// # Returns
///
/// * `Ok(HashSet<T>)` - Successfully deserialized HashSet
/// * `Err(D::Error)` - Serde deserialization error
///
/// # Type Parameters
///
/// * `D` - Serde deserializer type
/// * `T` - Target type that implements `FromStr`, `Eq`, and `Hash`
pub(crate) fn deserialize_hashset<'a, D, T>(deserializer: D) -> Result<HashSet<T>, D::Error>
where
D: serde::Deserializer<'a>,
T: Eq + std::str::FromStr + std::hash::Hash,
<T as std::str::FromStr>::Err: std::fmt::Display,
{
use serde::de::Error;
deserialize_hashset_inner(<String>::deserialize(deserializer)?).map_err(D::Error::custom)
}
#[cfg(test)]
mod tests {
use std::collections::HashSet;
use super::*;
#[test]
fn test_deserialize_hashset_inner_success() {
let result: Result<HashSet<i32>, String> = deserialize_hashset_inner("1,2,3");
assert!(result.is_ok());
if let Ok(hashset) = result {
assert_eq!(hashset.len(), 3);
assert!(hashset.contains(&1));
assert!(hashset.contains(&2));
assert!(hashset.contains(&3));
}
}
#[test]
fn test_deserialize_hashset_inner_with_whitespace() {
let result: Result<HashSet<String>, String> = deserialize_hashset_inner(" a , b , c ");
assert!(result.is_ok());
if let Ok(hashset) = result {
assert_eq!(hashset.len(), 3);
assert!(hashset.contains("a"));
assert!(hashset.contains("b"));
assert!(hashset.contains("c"));
}
}
#[test]
fn test_deserialize_hashset_inner_empty_string() {
let result: Result<HashSet<String>, String> = deserialize_hashset_inner("");
assert!(result.is_ok());
if let Ok(hashset) = result {
assert_eq!(hashset.len(), 0);
}
}
#[test]
fn test_deserialize_hashset_inner_single_value() {
let result: Result<HashSet<String>, String> = deserialize_hashset_inner("single");
assert!(result.is_ok());
if let Ok(hashset) = result {
assert_eq!(hashset.len(), 1);
assert!(hashset.contains("single"));
}
}
#[test]
fn test_deserialize_hashset_inner_invalid_int() {
let result: Result<HashSet<i32>, String> = deserialize_hashset_inner("1,invalid,3");
assert!(result.is_err());
if let Err(error) = result {
assert!(error.contains("Unable to deserialize `invalid` as `i32`"));
}
}
#[test]
fn test_deserialize_hashset_inner_duplicates() {
let result: Result<HashSet<String>, String> = deserialize_hashset_inner("a,b,a,c,b");
assert!(result.is_ok());
if let Ok(hashset) = result {
assert_eq!(hashset.len(), 3); // Duplicates should be removed
assert!(hashset.contains("a"));
assert!(hashset.contains("b"));
assert!(hashset.contains("c"));
}
}
}
// File: crates/external_services/src/hashicorp_vault.rs
//! Interactions with the HashiCorp Vault
pub mod core;
pub mod implementers;
</module>
|
{
"crate": "external_services",
"file": null,
"files": [
"crates/external_services/src/superposition.rs",
"crates/external_services/src/crm.rs",
"crates/external_services/src/grpc_client.rs",
"crates/external_services/src/no_encryption.rs",
"crates/external_services/src/lib.rs",
"crates/external_services/src/http_client.rs",
"crates/external_services/src/managers.rs",
"crates/external_services/src/aws_kms.rs",
"crates/external_services/src/email.rs",
"crates/external_services/src/file_storage.rs",
"crates/external_services/src/hubspot_proxy.rs",
"crates/external_services/src/utils.rs",
"crates/external_services/src/hashicorp_vault.rs"
],
"module": "crates/external_services/src",
"num_files": 13,
"token_count": 10622
}
|
module_8734069875264920797
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: external_services
Module: crates/external_services/src/grpc_client
Files: 4
</path>
<module>
// File: crates/external_services/src/grpc_client/health_check_client.rs
use std::{collections::HashMap, fmt::Debug};
use api_models::health_check::{HealthCheckMap, HealthCheckServices};
use common_utils::{errors::CustomResult, ext_traits::AsyncExt};
use error_stack::ResultExt;
pub use health_check::{
health_check_response::ServingStatus, health_client::HealthClient, HealthCheckRequest,
HealthCheckResponse,
};
use router_env::logger;
#[allow(
missing_docs,
unused_qualifications,
clippy::unwrap_used,
clippy::as_conversions,
clippy::use_self
)]
pub mod health_check {
tonic::include_proto!("grpc.health.v1");
}
use super::{Client, DynamicRoutingClientConfig, GrpcClientSettings};
/// Result type for Dynamic Routing
pub type HealthCheckResult<T> = CustomResult<T, HealthCheckError>;
/// Dynamic Routing Errors
#[derive(Debug, Clone, thiserror::Error)]
pub enum HealthCheckError {
/// The required input is missing
#[error("Missing fields: {0} for building the Health check connection")]
MissingFields(String),
/// Error from gRPC Server
#[error("Error from gRPC Server : {0}")]
ConnectionError(String),
/// status is invalid
#[error("Invalid Status from server")]
InvalidStatus,
}
/// Health Check Client type
#[derive(Debug, Clone)]
pub struct HealthCheckClient {
/// Health clients for all gRPC based services
pub clients: HashMap<HealthCheckServices, HealthClient<Client>>,
}
impl HealthCheckClient {
/// Build connections to all gRPC services
pub async fn build_connections(
config: &GrpcClientSettings,
client: Client,
) -> Result<Self, Box<dyn std::error::Error>> {
let dynamic_routing_config = &config.dynamic_routing_client;
let connection = match dynamic_routing_config {
Some(DynamicRoutingClientConfig::Enabled {
host,
port,
service,
}) => Some((host.clone(), *port, service.clone())),
_ => None,
};
let mut client_map = HashMap::new();
if let Some(conn) = connection {
let uri = format!("http://{}:{}", conn.0, conn.1).parse::<tonic::transport::Uri>()?;
let health_client = HealthClient::with_origin(client, uri);
client_map.insert(HealthCheckServices::DynamicRoutingService, health_client);
}
Ok(Self {
clients: client_map,
})
}
/// Perform health check for all services involved
pub async fn perform_health_check(
&self,
config: &GrpcClientSettings,
) -> HealthCheckResult<HealthCheckMap> {
let dynamic_routing_config = &config.dynamic_routing_client;
let connection = match dynamic_routing_config {
Some(DynamicRoutingClientConfig::Enabled {
host,
port,
service,
}) => Some((host.clone(), *port, service.clone())),
_ => None,
};
let health_client = self
.clients
.get(&HealthCheckServices::DynamicRoutingService);
// SAFETY : This is a safe cast as there exists a valid
// integer value for this variant
#[allow(clippy::as_conversions)]
let expected_status = ServingStatus::Serving as i32;
let mut service_map = HealthCheckMap::new();
let health_check_succeed = connection
.as_ref()
.async_map(|conn| self.get_response_from_grpc_service(conn.2.clone(), health_client))
.await
.transpose()
.change_context(HealthCheckError::ConnectionError(
"error calling dynamic routing service".to_string(),
))
.map_err(|err| logger::error!(error=?err))
.ok()
.flatten()
.is_some_and(|resp| resp.status == expected_status);
connection.and_then(|_conn| {
service_map.insert(
HealthCheckServices::DynamicRoutingService,
health_check_succeed,
)
});
Ok(service_map)
}
async fn get_response_from_grpc_service(
&self,
service: String,
client: Option<&HealthClient<Client>>,
) -> HealthCheckResult<HealthCheckResponse> {
let request = tonic::Request::new(HealthCheckRequest { service });
let mut client = client
.ok_or(HealthCheckError::MissingFields(
"[health_client]".to_string(),
))?
.clone();
let response = client
.check(request)
.await
.change_context(HealthCheckError::ConnectionError(
"Failed to call dynamic routing service".to_string(),
))?
.into_inner();
Ok(response)
}
}
// File: crates/external_services/src/grpc_client/unified_connector_service.rs
use std::collections::{HashMap, HashSet};
use common_enums::connector_enums::Connector;
use common_utils::{consts as common_utils_consts, errors::CustomResult, types::Url};
use error_stack::ResultExt;
pub use hyperswitch_interfaces::unified_connector_service::transformers::UnifiedConnectorServiceError;
use masking::{PeekInterface, Secret};
use router_env::logger;
use tokio::time::{timeout, Duration};
use tonic::{
metadata::{MetadataMap, MetadataValue},
transport::Uri,
};
use unified_connector_service_client::payments::{
self as payments_grpc, payment_service_client::PaymentServiceClient,
PaymentServiceAuthorizeResponse, PaymentServiceTransformRequest,
PaymentServiceTransformResponse,
};
use crate::{
consts,
grpc_client::{GrpcClientSettings, GrpcHeadersUcs},
utils::deserialize_hashset,
};
/// Result type for Dynamic Routing
pub type UnifiedConnectorServiceResult<T> = CustomResult<T, UnifiedConnectorServiceError>;
/// Contains the Unified Connector Service client
#[derive(Debug, Clone)]
pub struct UnifiedConnectorServiceClient {
/// The Unified Connector Service Client
pub client: PaymentServiceClient<tonic::transport::Channel>,
}
/// Contains the Unified Connector Service Client config
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct UnifiedConnectorServiceClientConfig {
/// Base URL of the gRPC Server
pub base_url: Url,
/// Contains the connection timeout duration in seconds
pub connection_timeout: u64,
/// Set of external services/connectors available for the unified connector service
#[serde(default, deserialize_with = "deserialize_hashset")]
pub ucs_only_connectors: HashSet<Connector>,
/// Set of connectors for which psync is disabled in unified connector service
#[serde(default, deserialize_with = "deserialize_hashset")]
pub ucs_psync_disabled_connectors: HashSet<Connector>,
}
/// Contains the Connector Auth Type and related authentication data.
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct ConnectorAuthMetadata {
/// Name of the connector (e.g., "stripe", "paypal").
pub connector_name: String,
/// Type of authentication used (e.g., "HeaderKey", "BodyKey", "SignatureKey").
pub auth_type: String,
/// Optional API key used for authentication.
pub api_key: Option<Secret<String>>,
/// Optional additional key used by some authentication types.
pub key1: Option<Secret<String>>,
/// Optional API secret used for signature or secure authentication.
pub api_secret: Option<Secret<String>>,
/// Optional auth_key_map used for authentication.
pub auth_key_map:
Option<HashMap<common_enums::enums::Currency, common_utils::pii::SecretSerdeValue>>,
/// Id of the merchant.
pub merchant_id: Secret<String>,
}
/// External Vault Proxy Related Metadata
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(untagged)]
pub enum ExternalVaultProxyMetadata {
/// VGS proxy data variant
VgsMetadata(VgsMetadata),
}
/// VGS proxy data
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct VgsMetadata {
/// External vault url
pub proxy_url: Url,
/// CA certificates to verify the vault server
pub certificate: Secret<String>,
}
impl UnifiedConnectorServiceClient {
/// Builds the connection to the gRPC service
pub async fn build_connections(config: &GrpcClientSettings) -> Option<Self> {
match &config.unified_connector_service {
Some(unified_connector_service_client_config) => {
let uri: Uri = match unified_connector_service_client_config
.base_url
.get_string_repr()
.parse()
{
Ok(parsed_uri) => parsed_uri,
Err(err) => {
logger::error!(error = ?err, "Failed to parse URI for Unified Connector Service");
return None;
}
};
let connect_result = timeout(
Duration::from_secs(unified_connector_service_client_config.connection_timeout),
PaymentServiceClient::connect(uri),
)
.await;
match connect_result {
Ok(Ok(client)) => {
logger::info!("Successfully connected to Unified Connector Service");
Some(Self { client })
}
Ok(Err(err)) => {
logger::error!(error = ?err, "Failed to connect to Unified Connector Service");
None
}
Err(err) => {
logger::error!(error = ?err, "Connection to Unified Connector Service timed out");
None
}
}
}
None => {
router_env::logger::error!(?config.unified_connector_service, "Unified Connector Service config is missing");
None
}
}
}
/// Performs Payment Authorize
pub async fn payment_authorize(
&self,
payment_authorize_request: payments_grpc::PaymentServiceAuthorizeRequest,
connector_auth_metadata: ConnectorAuthMetadata,
grpc_headers: GrpcHeadersUcs,
) -> UnifiedConnectorServiceResult<tonic::Response<PaymentServiceAuthorizeResponse>> {
let mut request = tonic::Request::new(payment_authorize_request);
let connector_name = connector_auth_metadata.connector_name.clone();
let metadata =
build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?;
*request.metadata_mut() = metadata;
self.client
.clone()
.authorize(request)
.await
.change_context(UnifiedConnectorServiceError::PaymentAuthorizeFailure)
.inspect_err(|error| {
logger::error!(
grpc_error=?error,
method="payment_authorize",
connector_name=?connector_name,
"UCS payment authorize gRPC call failed"
)
})
}
/// Performs Payment Sync/Get
pub async fn payment_get(
&self,
payment_get_request: payments_grpc::PaymentServiceGetRequest,
connector_auth_metadata: ConnectorAuthMetadata,
grpc_headers: GrpcHeadersUcs,
) -> UnifiedConnectorServiceResult<tonic::Response<payments_grpc::PaymentServiceGetResponse>>
{
let mut request = tonic::Request::new(payment_get_request);
let connector_name = connector_auth_metadata.connector_name.clone();
let metadata =
build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?;
*request.metadata_mut() = metadata;
self.client
.clone()
.get(request)
.await
.change_context(UnifiedConnectorServiceError::PaymentGetFailure)
.inspect_err(|error| {
logger::error!(
grpc_error=?error,
method="payment_get",
connector_name=?connector_name,
"UCS payment get/sync gRPC call failed"
)
})
}
/// Performs Payment Setup Mandate
pub async fn payment_setup_mandate(
&self,
payment_register_request: payments_grpc::PaymentServiceRegisterRequest,
connector_auth_metadata: ConnectorAuthMetadata,
grpc_headers: GrpcHeadersUcs,
) -> UnifiedConnectorServiceResult<tonic::Response<payments_grpc::PaymentServiceRegisterResponse>>
{
let mut request = tonic::Request::new(payment_register_request);
let connector_name = connector_auth_metadata.connector_name.clone();
let metadata =
build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?;
*request.metadata_mut() = metadata;
self.client
.clone()
.register(request)
.await
.change_context(UnifiedConnectorServiceError::PaymentRegisterFailure)
.inspect_err(|error| {
logger::error!(
grpc_error=?error,
method="payment_setup_mandate",
connector_name=?connector_name,
"UCS payment setup mandate gRPC call failed"
)
})
}
/// Performs Payment repeat (MIT - Merchant Initiated Transaction).
pub async fn payment_repeat(
&self,
payment_repeat_request: payments_grpc::PaymentServiceRepeatEverythingRequest,
connector_auth_metadata: ConnectorAuthMetadata,
grpc_headers: GrpcHeadersUcs,
) -> UnifiedConnectorServiceResult<
tonic::Response<payments_grpc::PaymentServiceRepeatEverythingResponse>,
> {
let mut request = tonic::Request::new(payment_repeat_request);
let connector_name = connector_auth_metadata.connector_name.clone();
let metadata =
build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?;
*request.metadata_mut() = metadata;
self.client
.clone()
.repeat_everything(request)
.await
.change_context(UnifiedConnectorServiceError::PaymentRepeatEverythingFailure)
.inspect_err(|error| {
logger::error!(
grpc_error=?error,
method="payment_repeat",
connector_name=?connector_name,
"UCS payment repeat gRPC call failed"
)
})
}
/// Transforms incoming webhook through UCS
pub async fn transform_incoming_webhook(
&self,
webhook_transform_request: PaymentServiceTransformRequest,
connector_auth_metadata: ConnectorAuthMetadata,
grpc_headers: GrpcHeadersUcs,
) -> UnifiedConnectorServiceResult<tonic::Response<PaymentServiceTransformResponse>> {
let mut request = tonic::Request::new(webhook_transform_request);
let connector_name = connector_auth_metadata.connector_name.clone();
let metadata =
build_unified_connector_service_grpc_headers(connector_auth_metadata, grpc_headers)?;
*request.metadata_mut() = metadata;
self.client
.clone()
.transform(request)
.await
.change_context(UnifiedConnectorServiceError::WebhookTransformFailure)
.inspect_err(|error| {
logger::error!(
grpc_error=?error,
method="transform_incoming_webhook",
connector_name=?connector_name,
"UCS webhook transform gRPC call failed"
)
})
}
}
/// Build the gRPC Headers for Unified Connector Service Request
pub fn build_unified_connector_service_grpc_headers(
meta: ConnectorAuthMetadata,
grpc_headers: GrpcHeadersUcs,
) -> Result<MetadataMap, UnifiedConnectorServiceError> {
let mut metadata = MetadataMap::new();
let parse =
|key: &str, value: &str| -> Result<MetadataValue<_>, UnifiedConnectorServiceError> {
value.parse::<MetadataValue<_>>().map_err(|error| {
logger::error!(?error);
UnifiedConnectorServiceError::HeaderInjectionFailed(key.to_string())
})
};
metadata.append(
consts::UCS_HEADER_CONNECTOR,
parse("connector", &meta.connector_name)?,
);
metadata.append(
consts::UCS_HEADER_AUTH_TYPE,
parse("auth_type", &meta.auth_type)?,
);
if let Some(api_key) = meta.api_key {
metadata.append(
consts::UCS_HEADER_API_KEY,
parse("api_key", api_key.peek())?,
);
}
if let Some(key1) = meta.key1 {
metadata.append(consts::UCS_HEADER_KEY1, parse("key1", key1.peek())?);
}
if let Some(api_secret) = meta.api_secret {
metadata.append(
consts::UCS_HEADER_API_SECRET,
parse("api_secret", api_secret.peek())?,
);
}
if let Some(auth_key_map) = meta.auth_key_map {
let auth_key_map_str = serde_json::to_string(&auth_key_map).map_err(|error| {
logger::error!(?error);
UnifiedConnectorServiceError::ParsingFailed
})?;
metadata.append(
consts::UCS_HEADER_AUTH_KEY_MAP,
parse("auth_key_map", &auth_key_map_str)?,
);
}
metadata.append(
common_utils_consts::X_MERCHANT_ID,
parse(common_utils_consts::X_MERCHANT_ID, meta.merchant_id.peek())?,
);
if let Some(external_vault_proxy_metadata) = grpc_headers.external_vault_proxy_metadata {
metadata.append(
consts::UCS_HEADER_EXTERNAL_VAULT_METADATA,
parse("external_vault_metadata", &external_vault_proxy_metadata)?,
);
};
let lineage_ids_str = grpc_headers
.lineage_ids
.get_url_encoded_string()
.map_err(|err| {
logger::error!(?err);
UnifiedConnectorServiceError::HeaderInjectionFailed(consts::UCS_LINEAGE_IDS.to_string())
})?;
metadata.append(
consts::UCS_LINEAGE_IDS,
parse(consts::UCS_LINEAGE_IDS, &lineage_ids_str)?,
);
if let Some(reference_id) = grpc_headers.merchant_reference_id {
metadata.append(
consts::UCS_HEADER_REFERENCE_ID,
parse(
consts::UCS_HEADER_REFERENCE_ID,
reference_id.get_string_repr(),
)?,
);
};
if let Some(request_id) = grpc_headers.request_id {
metadata.append(
common_utils_consts::X_REQUEST_ID,
parse(common_utils_consts::X_REQUEST_ID, &request_id)?,
);
};
if let Some(shadow_mode) = grpc_headers.shadow_mode {
metadata.append(
common_utils_consts::X_UNIFIED_CONNECTOR_SERVICE_MODE,
parse(
common_utils_consts::X_UNIFIED_CONNECTOR_SERVICE_MODE,
&shadow_mode.to_string(),
)?,
);
}
if let Err(err) = grpc_headers
.tenant_id
.parse()
.map(|tenant_id| metadata.append(common_utils_consts::TENANT_HEADER, tenant_id))
{
logger::error!(
header_parse_error=?err,
tenant_id=?grpc_headers.tenant_id,
"Failed to parse tenant_id header for UCS gRPC request: {}",
common_utils_consts::TENANT_HEADER
);
}
Ok(metadata)
}
// File: crates/external_services/src/grpc_client/dynamic_routing.rs
/// Module for Contract based routing
pub mod contract_routing_client;
use std::fmt::Debug;
use common_utils::errors::CustomResult;
use router_env::logger;
use serde;
/// Elimination Routing Client Interface Implementation
pub mod elimination_based_client;
/// Success Routing Client Interface Implementation
pub mod success_rate_client;
pub use contract_routing_client::ContractScoreCalculatorClient;
pub use elimination_based_client::EliminationAnalyserClient;
pub use success_rate_client::SuccessRateCalculatorClient;
use super::Client;
/// Result type for Dynamic Routing
pub type DynamicRoutingResult<T> = CustomResult<T, DynamicRoutingError>;
/// Dynamic Routing Errors
#[derive(Debug, Clone, thiserror::Error)]
pub enum DynamicRoutingError {
/// The required input is missing
#[error("Missing Required Field : {field} for building the Dynamic Routing Request")]
MissingRequiredField {
/// The required field name
field: String,
},
/// Error from Dynamic Routing Server while performing success_rate analysis
#[error("Error from Dynamic Routing Server while perfrming success_rate analysis : {0}")]
SuccessRateBasedRoutingFailure(String),
/// Generic Error from Dynamic Routing Server while performing contract based routing
#[error("Error from Dynamic Routing Server while performing contract based routing: {0}")]
ContractBasedRoutingFailure(String),
/// Generic Error from Dynamic Routing Server while performing contract based routing
#[error("Contract not found in the dynamic routing service")]
ContractNotFound,
/// Error from Dynamic Routing Server while perfrming elimination
#[error("Error from Dynamic Routing Server while perfrming elimination : {0}")]
EliminationRateRoutingFailure(String),
}
/// Type that consists of all the services provided by the client
#[derive(Debug, Clone)]
pub struct RoutingStrategy {
/// success rate service for Dynamic Routing
pub success_rate_client: SuccessRateCalculatorClient<Client>,
/// contract based routing service for Dynamic Routing
pub contract_based_client: ContractScoreCalculatorClient<Client>,
/// elimination service for Dynamic Routing
pub elimination_based_client: EliminationAnalyserClient<Client>,
}
/// Contains the Dynamic Routing Client Config
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize, Default)]
#[serde(untagged)]
pub enum DynamicRoutingClientConfig {
/// If the dynamic routing client config has been enabled
Enabled {
/// The host for the client
host: String,
/// The port of the client
port: u16,
/// Service name
service: String,
},
#[default]
/// If the dynamic routing client config has been disabled
Disabled,
}
impl DynamicRoutingClientConfig {
/// establish connection with the server
pub fn get_dynamic_routing_connection(
self,
client: Client,
) -> Result<Option<RoutingStrategy>, Box<dyn std::error::Error>> {
match self {
Self::Enabled { host, port, .. } => {
let uri = format!("http://{host}:{port}").parse::<tonic::transport::Uri>()?;
logger::info!("Connection established with dynamic routing gRPC Server");
let (success_rate_client, contract_based_client, elimination_based_client) = (
SuccessRateCalculatorClient::with_origin(client.clone(), uri.clone()),
ContractScoreCalculatorClient::with_origin(client.clone(), uri.clone()),
EliminationAnalyserClient::with_origin(client, uri),
);
Ok(Some(RoutingStrategy {
success_rate_client,
contract_based_client,
elimination_based_client,
}))
}
Self::Disabled => Ok(None),
}
}
}
// File: crates/external_services/src/grpc_client/revenue_recovery.rs
/// Recovery Decider client
pub mod recovery_decider_client;
use std::fmt::Debug;
use common_utils::consts;
use router_env::logger;
/// Contains recovery grpc headers
#[derive(Debug)]
pub struct GrpcRecoveryHeaders {
/// Request id
pub request_id: Option<String>,
}
/// Trait to add necessary recovery headers to the tonic Request
pub(crate) trait AddRecoveryHeaders {
/// Add necessary recovery header fields to the tonic Request
fn add_recovery_headers(&mut self, headers: GrpcRecoveryHeaders);
}
impl<T> AddRecoveryHeaders for tonic::Request<T> {
#[track_caller]
fn add_recovery_headers(&mut self, headers: GrpcRecoveryHeaders) {
headers.request_id.map(|request_id| {
request_id
.parse()
.map(|request_id_val| {
self
.metadata_mut()
.append(consts::X_REQUEST_ID, request_id_val)
})
.inspect_err(
|err| logger::warn!(header_parse_error=?err,"invalid {} received",consts::X_REQUEST_ID),
)
.ok();
});
}
}
/// Creates a tonic::Request with recovery headers added.
pub(crate) fn create_revenue_recovery_grpc_request<T: Debug>(
message: T,
recovery_headers: GrpcRecoveryHeaders,
) -> tonic::Request<T> {
let mut request = tonic::Request::new(message);
request.add_recovery_headers(recovery_headers);
request
}
</module>
|
{
"crate": "external_services",
"file": null,
"files": [
"crates/external_services/src/grpc_client/health_check_client.rs",
"crates/external_services/src/grpc_client/unified_connector_service.rs",
"crates/external_services/src/grpc_client/dynamic_routing.rs",
"crates/external_services/src/grpc_client/revenue_recovery.rs"
],
"module": "crates/external_services/src/grpc_client",
"num_files": 4,
"token_count": 4993
}
|
module_5522263881322040348
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: external_services
Module: crates/external_services/src/grpc_client/dynamic_routing
Files: 3
</path>
<module>
// File: crates/external_services/src/grpc_client/dynamic_routing/elimination_based_client.rs
use api_models::routing::{
EliminationAnalyserConfig as EliminationConfig, RoutableConnectorChoice,
RoutableConnectorChoiceWithBucketName,
};
use common_utils::{ext_traits::OptionExt, transformers::ForeignTryFrom};
pub use elimination_rate::{
elimination_analyser_client::EliminationAnalyserClient, EliminationBucketConfig,
EliminationRequest, EliminationResponse, InvalidateBucketRequest, InvalidateBucketResponse,
LabelWithBucketName, UpdateEliminationBucketRequest, UpdateEliminationBucketResponse,
};
use error_stack::ResultExt;
use router_env::{instrument, logger, tracing};
#[allow(
missing_docs,
unused_qualifications,
clippy::unwrap_used,
clippy::as_conversions,
clippy::use_self
)]
pub mod elimination_rate {
tonic::include_proto!("elimination");
}
use super::{Client, DynamicRoutingError, DynamicRoutingResult};
use crate::grpc_client::{self, GrpcHeaders};
/// The trait Elimination Based Routing would have the functions required to support performance, calculation and invalidation bucket
#[async_trait::async_trait]
pub trait EliminationBasedRouting: dyn_clone::DynClone + Send + Sync {
/// To perform the elimination based routing for the list of connectors
async fn perform_elimination_routing(
&self,
id: String,
params: String,
labels: Vec<RoutableConnectorChoice>,
configs: Option<EliminationConfig>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<EliminationResponse>;
/// To update the bucket size and ttl for list of connectors with its respective bucket name
async fn update_elimination_bucket_config(
&self,
id: String,
params: String,
report: Vec<RoutableConnectorChoiceWithBucketName>,
config: Option<EliminationConfig>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<UpdateEliminationBucketResponse>;
/// To invalidate the previous id's bucket
async fn invalidate_elimination_bucket(
&self,
id: String,
headers: GrpcHeaders,
) -> DynamicRoutingResult<InvalidateBucketResponse>;
}
#[async_trait::async_trait]
impl EliminationBasedRouting for EliminationAnalyserClient<Client> {
#[instrument(skip_all)]
async fn perform_elimination_routing(
&self,
id: String,
params: String,
label_input: Vec<RoutableConnectorChoice>,
configs: Option<EliminationConfig>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<EliminationResponse> {
let labels = label_input
.into_iter()
.map(|conn_choice| conn_choice.to_string())
.collect::<Vec<_>>();
let config = configs.map(ForeignTryFrom::foreign_try_from).transpose()?;
let request = grpc_client::create_grpc_request(
EliminationRequest {
id,
params,
labels,
config,
},
headers,
);
let response = self
.clone()
.get_elimination_status(request)
.await
.change_context(DynamicRoutingError::EliminationRateRoutingFailure(
"Failed to perform the elimination analysis".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
#[instrument(skip_all)]
async fn update_elimination_bucket_config(
&self,
id: String,
params: String,
report: Vec<RoutableConnectorChoiceWithBucketName>,
configs: Option<EliminationConfig>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<UpdateEliminationBucketResponse> {
let config = configs.map(ForeignTryFrom::foreign_try_from).transpose()?;
let labels_with_bucket_name = report
.into_iter()
.map(|conn_choice_with_bucket| LabelWithBucketName {
label: conn_choice_with_bucket
.routable_connector_choice
.to_string(),
bucket_name: conn_choice_with_bucket.bucket_name,
})
.collect::<Vec<_>>();
let request = grpc_client::create_grpc_request(
UpdateEliminationBucketRequest {
id,
params,
labels_with_bucket_name,
config,
},
headers,
);
let response = self
.clone()
.update_elimination_bucket(request)
.await
.change_context(DynamicRoutingError::EliminationRateRoutingFailure(
"Failed to update the elimination bucket".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
#[instrument(skip_all)]
async fn invalidate_elimination_bucket(
&self,
id: String,
headers: GrpcHeaders,
) -> DynamicRoutingResult<InvalidateBucketResponse> {
let request = grpc_client::create_grpc_request(InvalidateBucketRequest { id }, headers);
let response = self
.clone()
.invalidate_bucket(request)
.await
.change_context(DynamicRoutingError::EliminationRateRoutingFailure(
"Failed to invalidate the elimination bucket".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
}
impl ForeignTryFrom<EliminationConfig> for EliminationBucketConfig {
type Error = error_stack::Report<DynamicRoutingError>;
fn foreign_try_from(config: EliminationConfig) -> Result<Self, Self::Error> {
Ok(Self {
bucket_size: config
.bucket_size
.get_required_value("bucket_size")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "bucket_size".to_string(),
})?,
bucket_leak_interval_in_secs: config
.bucket_leak_interval_in_secs
.get_required_value("bucket_leak_interval_in_secs")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "bucket_leak_interval_in_secs".to_string(),
})?,
})
}
}
// File: crates/external_services/src/grpc_client/dynamic_routing/success_rate_client.rs
use api_models::routing::{
CurrentBlockThreshold, RoutableConnectorChoice, RoutableConnectorChoiceWithStatus,
SuccessBasedRoutingConfig, SuccessBasedRoutingConfigBody, SuccessRateSpecificityLevel,
};
use common_utils::{ext_traits::OptionExt, transformers::ForeignTryFrom};
use error_stack::ResultExt;
use router_env::{instrument, logger, tracing};
pub use success_rate::{
success_rate_calculator_client::SuccessRateCalculatorClient, CalGlobalSuccessRateConfig,
CalGlobalSuccessRateRequest, CalGlobalSuccessRateResponse, CalSuccessRateConfig,
CalSuccessRateRequest, CalSuccessRateResponse,
CurrentBlockThreshold as DynamicCurrentThreshold, InvalidateWindowsRequest,
InvalidateWindowsResponse, LabelWithStatus,
SuccessRateSpecificityLevel as ProtoSpecificityLevel, UpdateSuccessRateWindowConfig,
UpdateSuccessRateWindowRequest, UpdateSuccessRateWindowResponse,
};
#[allow(
missing_docs,
unused_qualifications,
clippy::unwrap_used,
clippy::as_conversions,
clippy::use_self
)]
pub mod success_rate {
tonic::include_proto!("success_rate");
}
use super::{Client, DynamicRoutingError, DynamicRoutingResult};
use crate::grpc_client::{self, GrpcHeaders};
/// The trait Success Based Dynamic Routing would have the functions required to support the calculation and updation window
#[async_trait::async_trait]
pub trait SuccessBasedDynamicRouting: dyn_clone::DynClone + Send + Sync {
/// To calculate the success rate for the list of chosen connectors
async fn calculate_success_rate(
&self,
id: String,
success_rate_based_config: SuccessBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoice>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<CalSuccessRateResponse>;
/// To update the success rate with the given label
async fn update_success_rate(
&self,
id: String,
success_rate_based_config: SuccessBasedRoutingConfig,
params: String,
response: Vec<RoutableConnectorChoiceWithStatus>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<UpdateSuccessRateWindowResponse>;
/// To invalidates the success rate routing keys
async fn invalidate_success_rate_routing_keys(
&self,
id: String,
headers: GrpcHeaders,
) -> DynamicRoutingResult<InvalidateWindowsResponse>;
/// To calculate both global and merchant specific success rate for the list of chosen connectors
async fn calculate_entity_and_global_success_rate(
&self,
id: String,
success_rate_based_config: SuccessBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoice>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<CalGlobalSuccessRateResponse>;
}
#[async_trait::async_trait]
impl SuccessBasedDynamicRouting for SuccessRateCalculatorClient<Client> {
#[instrument(skip_all)]
async fn calculate_success_rate(
&self,
id: String,
success_rate_based_config: SuccessBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoice>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<CalSuccessRateResponse> {
let labels = label_input
.into_iter()
.map(|conn_choice| conn_choice.to_string())
.collect::<Vec<_>>();
let config = success_rate_based_config
.config
.map(ForeignTryFrom::foreign_try_from)
.transpose()?;
let request = grpc_client::create_grpc_request(
CalSuccessRateRequest {
id,
params,
labels,
config,
},
headers,
);
let response = self
.clone()
.fetch_success_rate(request)
.await
.change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure(
"Failed to fetch the success rate".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
#[instrument(skip_all)]
async fn update_success_rate(
&self,
id: String,
success_rate_based_config: SuccessBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoiceWithStatus>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<UpdateSuccessRateWindowResponse> {
let config = success_rate_based_config
.config
.map(ForeignTryFrom::foreign_try_from)
.transpose()?;
let labels_with_status = label_input
.clone()
.into_iter()
.map(|conn_choice| LabelWithStatus {
label: conn_choice.routable_connector_choice.to_string(),
status: conn_choice.status,
})
.collect();
let global_labels_with_status = label_input
.into_iter()
.map(|conn_choice| LabelWithStatus {
label: conn_choice.routable_connector_choice.connector.to_string(),
status: conn_choice.status,
})
.collect();
let request = grpc_client::create_grpc_request(
UpdateSuccessRateWindowRequest {
id,
params,
labels_with_status,
config,
global_labels_with_status,
},
headers,
);
let response = self
.clone()
.update_success_rate_window(request)
.await
.change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure(
"Failed to update the success rate window".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
#[instrument(skip_all)]
async fn invalidate_success_rate_routing_keys(
&self,
id: String,
headers: GrpcHeaders,
) -> DynamicRoutingResult<InvalidateWindowsResponse> {
let request = grpc_client::create_grpc_request(InvalidateWindowsRequest { id }, headers);
let response = self
.clone()
.invalidate_windows(request)
.await
.change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure(
"Failed to invalidate the success rate routing keys".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
async fn calculate_entity_and_global_success_rate(
&self,
id: String,
success_rate_based_config: SuccessBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoice>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<CalGlobalSuccessRateResponse> {
let labels = label_input
.clone()
.into_iter()
.map(|conn_choice| conn_choice.to_string())
.collect::<Vec<_>>();
let global_labels = label_input
.into_iter()
.map(|conn_choice| conn_choice.connector.to_string())
.collect::<Vec<_>>();
let config = success_rate_based_config
.config
.map(ForeignTryFrom::foreign_try_from)
.transpose()?;
let request = grpc_client::create_grpc_request(
CalGlobalSuccessRateRequest {
entity_id: id,
entity_params: params,
entity_labels: labels,
global_labels,
config,
},
headers,
);
let response = self
.clone()
.fetch_entity_and_global_success_rate(request)
.await
.change_context(DynamicRoutingError::SuccessRateBasedRoutingFailure(
"Failed to fetch the entity and global success rate".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
}
impl ForeignTryFrom<CurrentBlockThreshold> for DynamicCurrentThreshold {
type Error = error_stack::Report<DynamicRoutingError>;
fn foreign_try_from(current_threshold: CurrentBlockThreshold) -> Result<Self, Self::Error> {
Ok(Self {
duration_in_mins: current_threshold.duration_in_mins,
max_total_count: current_threshold
.max_total_count
.get_required_value("max_total_count")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "max_total_count".to_string(),
})?,
})
}
}
impl ForeignTryFrom<SuccessBasedRoutingConfigBody> for UpdateSuccessRateWindowConfig {
type Error = error_stack::Report<DynamicRoutingError>;
fn foreign_try_from(config: SuccessBasedRoutingConfigBody) -> Result<Self, Self::Error> {
Ok(Self {
max_aggregates_size: config
.max_aggregates_size
.get_required_value("max_aggregate_size")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "max_aggregates_size".to_string(),
})?,
current_block_threshold: config
.current_block_threshold
.map(ForeignTryFrom::foreign_try_from)
.transpose()?,
})
}
}
impl ForeignTryFrom<SuccessBasedRoutingConfigBody> for CalSuccessRateConfig {
type Error = error_stack::Report<DynamicRoutingError>;
fn foreign_try_from(config: SuccessBasedRoutingConfigBody) -> Result<Self, Self::Error> {
Ok(Self {
min_aggregates_size: config
.min_aggregates_size
.get_required_value("min_aggregate_size")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "min_aggregates_size".to_string(),
})?,
default_success_rate: config
.default_success_rate
.get_required_value("default_success_rate")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "default_success_rate".to_string(),
})?,
specificity_level: match config.specificity_level {
SuccessRateSpecificityLevel::Merchant => Some(ProtoSpecificityLevel::Entity.into()),
SuccessRateSpecificityLevel::Global => Some(ProtoSpecificityLevel::Global.into()),
},
exploration_percent: config.exploration_percent,
shuffle_on_tie_during_exploitation: config.shuffle_on_tie_during_exploitation,
})
}
}
impl ForeignTryFrom<SuccessBasedRoutingConfigBody> for CalGlobalSuccessRateConfig {
type Error = error_stack::Report<DynamicRoutingError>;
fn foreign_try_from(config: SuccessBasedRoutingConfigBody) -> Result<Self, Self::Error> {
Ok(Self {
entity_min_aggregates_size: config
.min_aggregates_size
.get_required_value("min_aggregate_size")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "min_aggregates_size".to_string(),
})?,
entity_default_success_rate: config
.default_success_rate
.get_required_value("default_success_rate")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "default_success_rate".to_string(),
})?,
})
}
}
// File: crates/external_services/src/grpc_client/dynamic_routing/contract_routing_client.rs
use api_models::routing::{
ContractBasedRoutingConfig, ContractBasedRoutingConfigBody, ContractBasedTimeScale,
LabelInformation, RoutableConnectorChoice, RoutableConnectorChoiceWithStatus,
};
use common_utils::{
ext_traits::OptionExt,
transformers::{ForeignFrom, ForeignTryFrom},
};
pub use contract_routing::{
contract_score_calculator_client::ContractScoreCalculatorClient, CalContractScoreConfig,
CalContractScoreRequest, CalContractScoreResponse, InvalidateContractRequest,
InvalidateContractResponse, LabelInformation as ProtoLabelInfo, TimeScale,
UpdateContractRequest, UpdateContractResponse,
};
use error_stack::ResultExt;
use router_env::logger;
use crate::grpc_client::{self, GrpcHeaders};
#[allow(
missing_docs,
unused_qualifications,
clippy::unwrap_used,
clippy::as_conversions,
clippy::use_self
)]
pub mod contract_routing {
tonic::include_proto!("contract_routing");
}
pub use tonic::Code;
use super::{Client, DynamicRoutingError, DynamicRoutingResult};
/// The trait ContractBasedDynamicRouting would have the functions required to support the calculation and updation window
#[async_trait::async_trait]
pub trait ContractBasedDynamicRouting: dyn_clone::DynClone + Send + Sync {
/// To calculate the contract scores for the list of chosen connectors
async fn calculate_contract_score(
&self,
id: String,
config: ContractBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoice>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<CalContractScoreResponse>;
/// To update the contract scores with the given labels
async fn update_contracts(
&self,
id: String,
label_info: Vec<LabelInformation>,
params: String,
response: Vec<RoutableConnectorChoiceWithStatus>,
incr_count: u64,
headers: GrpcHeaders,
) -> DynamicRoutingResult<UpdateContractResponse>;
/// To invalidates the contract scores against the id
async fn invalidate_contracts(
&self,
id: String,
headers: GrpcHeaders,
) -> DynamicRoutingResult<InvalidateContractResponse>;
}
#[async_trait::async_trait]
impl ContractBasedDynamicRouting for ContractScoreCalculatorClient<Client> {
async fn calculate_contract_score(
&self,
id: String,
config: ContractBasedRoutingConfig,
params: String,
label_input: Vec<RoutableConnectorChoice>,
headers: GrpcHeaders,
) -> DynamicRoutingResult<CalContractScoreResponse> {
let labels = label_input
.into_iter()
.map(|conn_choice| conn_choice.to_string())
.collect::<Vec<_>>();
let config = config
.config
.map(ForeignTryFrom::foreign_try_from)
.transpose()?;
let request = grpc_client::create_grpc_request(
CalContractScoreRequest {
id,
params,
labels,
config,
},
headers,
);
let response = self
.clone()
.fetch_contract_score(request)
.await
.map_err(|err| match err.code() {
Code::NotFound => DynamicRoutingError::ContractNotFound,
_ => DynamicRoutingError::ContractBasedRoutingFailure(err.to_string()),
})?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
async fn update_contracts(
&self,
id: String,
label_info: Vec<LabelInformation>,
params: String,
_response: Vec<RoutableConnectorChoiceWithStatus>,
incr_count: u64,
headers: GrpcHeaders,
) -> DynamicRoutingResult<UpdateContractResponse> {
let mut labels_information = label_info
.into_iter()
.map(ProtoLabelInfo::foreign_from)
.collect::<Vec<_>>();
labels_information
.iter_mut()
.for_each(|info| info.current_count += incr_count);
let request = grpc_client::create_grpc_request(
UpdateContractRequest {
id,
params,
labels_information,
},
headers,
);
let response = self
.clone()
.update_contract(request)
.await
.change_context(DynamicRoutingError::ContractBasedRoutingFailure(
"Failed to update the contracts".to_string(),
))?
.into_inner();
logger::info!(dynamic_routing_response=?response);
Ok(response)
}
async fn invalidate_contracts(
&self,
id: String,
headers: GrpcHeaders,
) -> DynamicRoutingResult<InvalidateContractResponse> {
let request = grpc_client::create_grpc_request(InvalidateContractRequest { id }, headers);
let response = self
.clone()
.invalidate_contract(request)
.await
.change_context(DynamicRoutingError::ContractBasedRoutingFailure(
"Failed to invalidate the contracts".to_string(),
))?
.into_inner();
Ok(response)
}
}
impl ForeignFrom<ContractBasedTimeScale> for TimeScale {
fn foreign_from(scale: ContractBasedTimeScale) -> Self {
Self {
time_scale: match scale {
ContractBasedTimeScale::Day => 0,
_ => 1,
},
}
}
}
impl ForeignTryFrom<ContractBasedRoutingConfigBody> for CalContractScoreConfig {
type Error = error_stack::Report<DynamicRoutingError>;
fn foreign_try_from(config: ContractBasedRoutingConfigBody) -> Result<Self, Self::Error> {
Ok(Self {
constants: config
.constants
.get_required_value("constants")
.change_context(DynamicRoutingError::MissingRequiredField {
field: "constants".to_string(),
})?,
time_scale: config.time_scale.clone().map(TimeScale::foreign_from),
})
}
}
impl ForeignFrom<LabelInformation> for ProtoLabelInfo {
fn foreign_from(config: LabelInformation) -> Self {
Self {
label: format!(
"{}:{}",
config.label.clone(),
config.mca_id.get_string_repr()
),
target_count: config.target_count,
target_time: config.target_time,
current_count: u64::default(),
}
}
}
</module>
|
{
"crate": "external_services",
"file": null,
"files": [
"crates/external_services/src/grpc_client/dynamic_routing/elimination_based_client.rs",
"crates/external_services/src/grpc_client/dynamic_routing/success_rate_client.rs",
"crates/external_services/src/grpc_client/dynamic_routing/contract_routing_client.rs"
],
"module": "crates/external_services/src/grpc_client/dynamic_routing",
"num_files": 3,
"token_count": 4913
}
|
module_6792309158182427471
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: external_services
Module: crates/external_services/src/email
Files: 3
</path>
<module>
// File: crates/external_services/src/email/ses.rs
use std::time::{Duration, SystemTime};
use aws_sdk_sesv2::{
config::Region,
operation::send_email::SendEmailError,
types::{Body, Content, Destination, EmailContent, Message},
Client,
};
use aws_sdk_sts::config::Credentials;
use aws_smithy_runtime::client::http::hyper_014::HyperClientBuilder;
use common_utils::{errors::CustomResult, pii};
use error_stack::{report, ResultExt};
use hyper::Uri;
use masking::PeekInterface;
use router_env::logger;
use crate::email::{EmailClient, EmailError, EmailResult, EmailSettings, IntermediateString};
/// Client for AWS SES operation
#[derive(Debug, Clone)]
pub struct AwsSes {
sender: String,
ses_config: SESConfig,
settings: EmailSettings,
}
/// Struct that contains the AWS ses specific configs required to construct an SES email client
#[derive(Debug, Clone, Default, serde::Deserialize)]
pub struct SESConfig {
/// The arn of email role
pub email_role_arn: String,
/// The name of sts_session role
pub sts_role_session_name: String,
}
impl SESConfig {
/// Validation for the SES client specific configs
pub fn validate(&self) -> Result<(), &'static str> {
use common_utils::{ext_traits::ConfigExt, fp_utils::when};
when(self.email_role_arn.is_default_or_empty(), || {
Err("email.aws_ses.email_role_arn must not be empty")
})?;
when(self.sts_role_session_name.is_default_or_empty(), || {
Err("email.aws_ses.sts_role_session_name must not be empty")
})
}
}
/// Errors that could occur during SES operations.
#[derive(Debug, thiserror::Error)]
pub enum AwsSesError {
/// An error occurred in the SDK while sending email.
#[error("Failed to Send Email {0:?}")]
SendingFailure(Box<aws_sdk_sesv2::error::SdkError<SendEmailError>>),
/// Configuration variable is missing to construct the email client
#[error("Missing configuration variable {0}")]
MissingConfigurationVariable(&'static str),
/// Failed to assume the given STS role
#[error("Failed to STS assume role: Role ARN: {role_arn}, Session name: {session_name}, Region: {region}")]
AssumeRoleFailure {
/// Aws region
region: String,
/// arn of email role
role_arn: String,
/// The name of sts_session role
session_name: String,
},
/// Temporary credentials are missing
#[error("Assumed role does not contain credentials for role user: {0:?}")]
TemporaryCredentialsMissing(String),
/// The proxy Connector cannot be built
#[error("The proxy build cannot be built")]
BuildingProxyConnectorFailed,
}
impl AwsSes {
/// Constructs a new AwsSes client
pub async fn create(
conf: &EmailSettings,
ses_config: &SESConfig,
proxy_url: Option<impl AsRef<str>>,
) -> Self {
// Build the client initially which will help us know if the email configuration is correct
Self::create_client(conf, ses_config, proxy_url)
.await
.map_err(|error| logger::error!(?error, "Failed to initialize SES Client"))
.ok();
Self {
sender: conf.sender_email.clone(),
ses_config: ses_config.clone(),
settings: conf.clone(),
}
}
/// A helper function to create ses client
pub async fn create_client(
conf: &EmailSettings,
ses_config: &SESConfig,
proxy_url: Option<impl AsRef<str>>,
) -> CustomResult<Client, AwsSesError> {
let sts_config = Self::get_shared_config(conf.aws_region.to_owned(), proxy_url.as_ref())?
.load()
.await;
let role = aws_sdk_sts::Client::new(&sts_config)
.assume_role()
.role_arn(&ses_config.email_role_arn)
.role_session_name(&ses_config.sts_role_session_name)
.send()
.await
.change_context(AwsSesError::AssumeRoleFailure {
region: conf.aws_region.to_owned(),
role_arn: ses_config.email_role_arn.to_owned(),
session_name: ses_config.sts_role_session_name.to_owned(),
})?;
let creds = role.credentials().ok_or(
report!(AwsSesError::TemporaryCredentialsMissing(format!(
"{role:?}"
)))
.attach_printable("Credentials object not available"),
)?;
let credentials = Credentials::new(
creds.access_key_id(),
creds.secret_access_key(),
Some(creds.session_token().to_owned()),
u64::try_from(creds.expiration().as_nanos())
.ok()
.map(Duration::from_nanos)
.and_then(|val| SystemTime::UNIX_EPOCH.checked_add(val)),
"custom_provider",
);
logger::debug!(
"Obtained SES temporary credentials with expiry {:?}",
credentials.expiry()
);
let ses_config = Self::get_shared_config(conf.aws_region.to_owned(), proxy_url)?
.credentials_provider(credentials)
.load()
.await;
Ok(Client::new(&ses_config))
}
fn get_shared_config(
region: String,
proxy_url: Option<impl AsRef<str>>,
) -> CustomResult<aws_config::ConfigLoader, AwsSesError> {
let region_provider = Region::new(region);
let mut config = aws_config::from_env().region(region_provider);
if let Some(proxy_url) = proxy_url {
let proxy_connector = Self::get_proxy_connector(proxy_url)?;
let http_client = HyperClientBuilder::new().build(proxy_connector);
config = config.http_client(http_client);
};
Ok(config)
}
fn get_proxy_connector(
proxy_url: impl AsRef<str>,
) -> CustomResult<hyper_proxy::ProxyConnector<hyper::client::HttpConnector>, AwsSesError> {
let proxy_uri = proxy_url
.as_ref()
.parse::<Uri>()
.attach_printable("Unable to parse the proxy url {proxy_url}")
.change_context(AwsSesError::BuildingProxyConnectorFailed)?;
let proxy = hyper_proxy::Proxy::new(hyper_proxy::Intercept::All, proxy_uri);
hyper_proxy::ProxyConnector::from_proxy(hyper::client::HttpConnector::new(), proxy)
.change_context(AwsSesError::BuildingProxyConnectorFailed)
}
}
#[async_trait::async_trait]
impl EmailClient for AwsSes {
type RichText = Body;
fn convert_to_rich_text(
&self,
intermediate_string: IntermediateString,
) -> CustomResult<Self::RichText, EmailError> {
let email_body = Body::builder()
.html(
Content::builder()
.data(intermediate_string.into_inner())
.charset("UTF-8")
.build()
.change_context(EmailError::ContentBuildFailure)?,
)
.build();
Ok(email_body)
}
async fn send_email(
&self,
recipient: pii::Email,
subject: String,
body: Self::RichText,
proxy_url: Option<&String>,
) -> EmailResult<()> {
// Not using the same email client which was created at startup as the role session would expire
// Create a client every time when the email is being sent
let email_client = Self::create_client(&self.settings, &self.ses_config, proxy_url)
.await
.change_context(EmailError::ClientBuildingFailure)?;
email_client
.send_email()
.from_email_address(self.sender.to_owned())
.destination(
Destination::builder()
.to_addresses(recipient.peek())
.build(),
)
.content(
EmailContent::builder()
.simple(
Message::builder()
.subject(
Content::builder()
.data(subject)
.build()
.change_context(EmailError::ContentBuildFailure)?,
)
.body(body)
.build(),
)
.build(),
)
.send()
.await
.map_err(|e| AwsSesError::SendingFailure(Box::new(e)))
.change_context(EmailError::EmailSendingFailure)?;
Ok(())
}
}
// File: crates/external_services/src/email/no_email.rs
use common_utils::{errors::CustomResult, pii};
use router_env::logger;
use crate::email::{EmailClient, EmailError, EmailResult, IntermediateString};
/// Client when email support is disabled
#[derive(Debug, Clone, Default, serde::Deserialize)]
pub struct NoEmailClient {}
impl NoEmailClient {
/// Constructs a new client when email is disabled
pub async fn create() -> Self {
Self {}
}
}
#[async_trait::async_trait]
impl EmailClient for NoEmailClient {
type RichText = String;
fn convert_to_rich_text(
&self,
intermediate_string: IntermediateString,
) -> CustomResult<Self::RichText, EmailError> {
Ok(intermediate_string.into_inner())
}
async fn send_email(
&self,
_recipient: pii::Email,
_subject: String,
_body: Self::RichText,
_proxy_url: Option<&String>,
) -> EmailResult<()> {
logger::info!("Email not sent as email support is disabled, please enable any of the supported email clients to send emails");
Ok(())
}
}
// File: crates/external_services/src/email/smtp.rs
use std::time::Duration;
use common_utils::{errors::CustomResult, pii};
use error_stack::ResultExt;
use lettre::{
address::AddressError,
error,
message::{header::ContentType, Mailbox},
transport::smtp::{self, authentication::Credentials},
Message, SmtpTransport, Transport,
};
use masking::{PeekInterface, Secret};
use crate::email::{EmailClient, EmailError, EmailResult, EmailSettings, IntermediateString};
/// Client for SMTP server operation
#[derive(Debug, Clone, Default, serde::Deserialize)]
pub struct SmtpServer {
/// sender email id
pub sender: String,
/// SMTP server specific configs
pub smtp_config: SmtpServerConfig,
}
impl SmtpServer {
/// A helper function to create SMTP server client
pub fn create_client(&self) -> Result<SmtpTransport, SmtpError> {
let host = self.smtp_config.host.clone();
let port = self.smtp_config.port;
let timeout = Some(Duration::from_secs(self.smtp_config.timeout));
let credentials = self
.smtp_config
.username
.clone()
.zip(self.smtp_config.password.clone())
.map(|(username, password)| {
Credentials::new(username.peek().to_owned(), password.peek().to_owned())
});
match &self.smtp_config.connection {
SmtpConnection::StartTls => match credentials {
Some(credentials) => Ok(SmtpTransport::starttls_relay(&host)
.map_err(SmtpError::ConnectionFailure)?
.port(port)
.timeout(timeout)
.credentials(credentials)
.build()),
None => Ok(SmtpTransport::starttls_relay(&host)
.map_err(SmtpError::ConnectionFailure)?
.port(port)
.timeout(timeout)
.build()),
},
SmtpConnection::Plaintext => match credentials {
Some(credentials) => Ok(SmtpTransport::builder_dangerous(&host)
.port(port)
.timeout(timeout)
.credentials(credentials)
.build()),
None => Ok(SmtpTransport::builder_dangerous(&host)
.port(port)
.timeout(timeout)
.build()),
},
}
}
/// Constructs a new SMTP client
pub async fn create(conf: &EmailSettings, smtp_config: SmtpServerConfig) -> Self {
Self {
sender: conf.sender_email.clone(),
smtp_config: smtp_config.clone(),
}
}
/// helper function to convert email id into Mailbox
fn to_mail_box(email: String) -> EmailResult<Mailbox> {
Ok(Mailbox::new(
None,
email
.parse()
.map_err(SmtpError::EmailParsingFailed)
.change_context(EmailError::EmailSendingFailure)?,
))
}
}
/// Struct that contains the SMTP server specific configs required
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct SmtpServerConfig {
/// hostname of the SMTP server eg: smtp.gmail.com
pub host: String,
/// portname of the SMTP server eg: 25
pub port: u16,
/// timeout for the SMTP server connection in seconds eg: 10
pub timeout: u64,
/// Username name of the SMTP server
pub username: Option<Secret<String>>,
/// Password of the SMTP server
pub password: Option<Secret<String>>,
/// Connection type of the SMTP server
#[serde(default)]
pub connection: SmtpConnection,
}
/// Enum that contains the connection types of the SMTP server
#[derive(Debug, Clone, Default, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum SmtpConnection {
#[default]
/// Plaintext connection which MUST then successfully upgrade to TLS via STARTTLS
StartTls,
/// Plaintext connection (very insecure)
Plaintext,
}
impl SmtpServerConfig {
/// Validation for the SMTP server client specific configs
pub fn validate(&self) -> Result<(), &'static str> {
use common_utils::{ext_traits::ConfigExt, fp_utils::when};
when(self.host.is_default_or_empty(), || {
Err("email.smtp.host must not be empty")
})?;
self.username.clone().zip(self.password.clone()).map_or(
Ok(()),
|(username, password)| {
when(username.peek().is_default_or_empty(), || {
Err("email.smtp.username must not be empty")
})?;
when(password.peek().is_default_or_empty(), || {
Err("email.smtp.password must not be empty")
})
},
)?;
Ok(())
}
}
#[async_trait::async_trait]
impl EmailClient for SmtpServer {
type RichText = String;
fn convert_to_rich_text(
&self,
intermediate_string: IntermediateString,
) -> CustomResult<Self::RichText, EmailError> {
Ok(intermediate_string.into_inner())
}
async fn send_email(
&self,
recipient: pii::Email,
subject: String,
body: Self::RichText,
_proxy_url: Option<&String>,
) -> EmailResult<()> {
// Create a client every time when the email is being sent
let email_client =
Self::create_client(self).change_context(EmailError::EmailSendingFailure)?;
let email = Message::builder()
.to(Self::to_mail_box(recipient.peek().to_string())?)
.from(Self::to_mail_box(self.sender.clone())?)
.subject(subject)
.header(ContentType::TEXT_HTML)
.body(body)
.map_err(SmtpError::MessageBuildingFailed)
.change_context(EmailError::EmailSendingFailure)?;
email_client
.send(&email)
.map_err(SmtpError::SendingFailure)
.change_context(EmailError::EmailSendingFailure)?;
Ok(())
}
}
/// Errors that could occur during SES operations.
#[derive(Debug, thiserror::Error)]
pub enum SmtpError {
/// An error occurred in the SMTP while sending email.
#[error("Failed to Send Email {0:?}")]
SendingFailure(smtp::Error),
/// An error occurred in the SMTP while building the message content.
#[error("Failed to create connection {0:?}")]
ConnectionFailure(smtp::Error),
/// An error occurred in the SMTP while building the message content.
#[error("Failed to Build Email content {0:?}")]
MessageBuildingFailed(error::Error),
/// An error occurred in the SMTP while building the message content.
#[error("Failed to parse given email {0:?}")]
EmailParsingFailed(AddressError),
}
</module>
|
{
"crate": "external_services",
"file": null,
"files": [
"crates/external_services/src/email/ses.rs",
"crates/external_services/src/email/no_email.rs",
"crates/external_services/src/email/smtp.rs"
],
"module": "crates/external_services/src/email",
"num_files": 3,
"token_count": 3498
}
|
module_116048505526029330
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: euclid_macros
Module: crates/euclid_macros/src/inner
Files: 2
</path>
<module>
// File: crates/euclid_macros/src/inner/enum_nums.rs
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::quote;
fn error() -> TokenStream2 {
syn::Error::new(
Span::call_site(),
"'EnumNums' can only be derived on enums with unit variants".to_string(),
)
.to_compile_error()
}
pub(crate) fn enum_nums_inner(ts: TokenStream) -> TokenStream {
let derive_input = syn::parse_macro_input!(ts as syn::DeriveInput);
let enum_obj = match derive_input.data {
syn::Data::Enum(e) => e,
_ => return error().into(),
};
let enum_name = derive_input.ident;
let mut match_arms = Vec::<TokenStream2>::with_capacity(enum_obj.variants.len());
for (i, variant) in enum_obj.variants.iter().enumerate() {
match variant.fields {
syn::Fields::Unit => {}
_ => return error().into(),
}
let var_ident = &variant.ident;
match_arms.push(quote! { Self::#var_ident => #i });
}
let impl_block = quote! {
impl #enum_name {
pub fn to_num(&self) -> usize {
match self {
#(#match_arms),*
}
}
}
};
impl_block.into()
}
// File: crates/euclid_macros/src/inner/knowledge.rs
use std::{
fmt::{Display, Formatter},
hash::Hash,
rc::Rc,
};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote};
use rustc_hash::{FxHashMap, FxHashSet};
use syn::{parse::Parse, Token};
mod strength {
syn::custom_punctuation!(Normal, ->);
syn::custom_punctuation!(Strong, ->>);
}
mod kw {
syn::custom_keyword!(any);
syn::custom_keyword!(not);
}
#[derive(Clone, PartialEq, Eq, Hash)]
enum Comparison {
LessThan,
Equal,
GreaterThan,
GreaterThanEqual,
LessThanEqual,
}
impl Display for Comparison {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let symbol = match self {
Self::LessThan => "< ",
Self::Equal => return Ok(()),
Self::GreaterThanEqual => ">= ",
Self::LessThanEqual => "<= ",
Self::GreaterThan => "> ",
};
write!(f, "{symbol}")
}
}
impl Parse for Comparison {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
if input.peek(Token![>]) {
input.parse::<Token![>]>()?;
Ok(Self::GreaterThan)
} else if input.peek(Token![<]) {
input.parse::<Token![<]>()?;
Ok(Self::LessThan)
} else if input.peek(Token!(<=)) {
input.parse::<Token![<=]>()?;
Ok(Self::LessThanEqual)
} else if input.peek(Token!(>=)) {
input.parse::<Token![>=]>()?;
Ok(Self::GreaterThanEqual)
} else {
Ok(Self::Equal)
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
enum ValueType {
Any,
EnumVariant(String),
Number { number: i64, comparison: Comparison },
}
impl ValueType {
fn to_string(&self, key: &str) -> String {
match self {
Self::Any => format!("{key}(any)"),
Self::EnumVariant(s) => format!("{key}({s})"),
Self::Number { number, comparison } => {
format!("{key}({comparison}{number})")
}
}
}
}
impl Parse for ValueType {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(syn::Ident) {
let ident: syn::Ident = input.parse()?;
Ok(Self::EnumVariant(ident.to_string()))
} else if lookahead.peek(Token![>])
|| lookahead.peek(Token![<])
|| lookahead.peek(syn::LitInt)
{
let comparison: Comparison = input.parse()?;
let number: syn::LitInt = input.parse()?;
let num_val = number.base10_parse::<i64>()?;
Ok(Self::Number {
number: num_val,
comparison,
})
} else {
Err(lookahead.error())
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
struct Atom {
key: String,
value: ValueType,
}
impl Display for Atom {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value.to_string(&self.key))
}
}
impl Parse for Atom {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let maybe_any: syn::Ident = input.parse()?;
if maybe_any == "any" {
let actual_key: syn::Ident = input.parse()?;
Ok(Self {
key: actual_key.to_string(),
value: ValueType::Any,
})
} else {
let content;
syn::parenthesized!(content in input);
let value: ValueType = content.parse()?;
Ok(Self {
key: maybe_any.to_string(),
value,
})
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, strum::Display)]
enum Strength {
Normal,
Strong,
}
impl Parse for Strength {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(strength::Strong) {
input.parse::<strength::Strong>()?;
Ok(Self::Strong)
} else if lookahead.peek(strength::Normal) {
input.parse::<strength::Normal>()?;
Ok(Self::Normal)
} else {
Err(lookahead.error())
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, strum::Display)]
enum Relation {
Positive,
Negative,
}
enum AtomType {
Value {
relation: Relation,
atom: Rc<Atom>,
},
InAggregator {
key: String,
values: Vec<String>,
relation: Relation,
},
}
fn parse_atom_type_inner(
input: syn::parse::ParseStream<'_>,
key: syn::Ident,
relation: Relation,
) -> syn::Result<AtomType> {
let result = if input.peek(Token![in]) {
input.parse::<Token![in]>()?;
let bracketed;
syn::bracketed!(bracketed in input);
let mut values = Vec::<String>::new();
let first: syn::Ident = bracketed.parse()?;
values.push(first.to_string());
while !bracketed.is_empty() {
bracketed.parse::<Token![,]>()?;
let next: syn::Ident = bracketed.parse()?;
values.push(next.to_string());
}
AtomType::InAggregator {
key: key.to_string(),
values,
relation,
}
} else if input.peek(kw::any) {
input.parse::<kw::any>()?;
AtomType::Value {
relation,
atom: Rc::new(Atom {
key: key.to_string(),
value: ValueType::Any,
}),
}
} else {
let value: ValueType = input.parse()?;
AtomType::Value {
relation,
atom: Rc::new(Atom {
key: key.to_string(),
value,
}),
}
};
Ok(result)
}
impl Parse for AtomType {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let key: syn::Ident = input.parse()?;
let content;
syn::parenthesized!(content in input);
let relation = if content.peek(kw::not) {
content.parse::<kw::not>()?;
Relation::Negative
} else {
Relation::Positive
};
let result = parse_atom_type_inner(&content, key, relation)?;
if !content.is_empty() {
Err(content.error("Unexpected input received after atom value"))
} else {
Ok(result)
}
}
}
fn parse_rhs_atom(input: syn::parse::ParseStream<'_>) -> syn::Result<Atom> {
let key: syn::Ident = input.parse()?;
let content;
syn::parenthesized!(content in input);
let lookahead = content.lookahead1();
let value_type = if lookahead.peek(kw::any) {
content.parse::<kw::any>()?;
ValueType::Any
} else if lookahead.peek(syn::Ident) {
let variant = content.parse::<syn::Ident>()?;
ValueType::EnumVariant(variant.to_string())
} else {
return Err(lookahead.error());
};
if !content.is_empty() {
Err(content.error("Unexpected input received after atom value"))
} else {
Ok(Atom {
key: key.to_string(),
value: value_type,
})
}
}
struct Rule {
lhs: Vec<AtomType>,
strength: Strength,
rhs: Rc<Atom>,
}
impl Parse for Rule {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let first_atom: AtomType = input.parse()?;
let mut lhs: Vec<AtomType> = vec![first_atom];
while input.peek(Token![&]) {
input.parse::<Token![&]>()?;
let and_atom: AtomType = input.parse()?;
lhs.push(and_atom);
}
let strength: Strength = input.parse()?;
let rhs: Rc<Atom> = Rc::new(parse_rhs_atom(input)?);
input.parse::<Token![;]>()?;
Ok(Self { lhs, strength, rhs })
}
}
#[derive(Clone)]
struct Program {
rules: Vec<Rc<Rule>>,
}
impl Parse for Program {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let mut rules: Vec<Rc<Rule>> = Vec::new();
while !input.is_empty() {
rules.push(Rc::new(input.parse::<Rule>()?));
}
Ok(Self { rules })
}
}
struct GenContext {
next_idx: usize,
next_node_idx: usize,
idx2atom: FxHashMap<usize, Rc<Atom>>,
atom2idx: FxHashMap<Rc<Atom>, usize>,
edges: FxHashMap<usize, FxHashSet<usize>>,
compiled_atoms: FxHashMap<Rc<Atom>, proc_macro2::Ident>,
}
impl GenContext {
fn new() -> Self {
Self {
next_idx: 1,
next_node_idx: 1,
idx2atom: FxHashMap::default(),
atom2idx: FxHashMap::default(),
edges: FxHashMap::default(),
compiled_atoms: FxHashMap::default(),
}
}
fn register_node(&mut self, atom: Rc<Atom>) -> usize {
if let Some(idx) = self.atom2idx.get(&atom) {
*idx
} else {
let this_idx = self.next_idx;
self.next_idx += 1;
self.idx2atom.insert(this_idx, Rc::clone(&atom));
self.atom2idx.insert(atom, this_idx);
this_idx
}
}
fn register_edge(&mut self, from: usize, to: usize) -> Result<(), String> {
let node_children = self.edges.entry(from).or_default();
if node_children.contains(&to) {
Err("Duplicate edge detected".to_string())
} else {
node_children.insert(to);
self.edges.entry(to).or_default();
Ok(())
}
}
fn register_rule(&mut self, rule: &Rule) -> Result<(), String> {
let to_idx = self.register_node(Rc::clone(&rule.rhs));
for atom_type in &rule.lhs {
if let AtomType::Value { atom, .. } = atom_type {
let from_idx = self.register_node(Rc::clone(atom));
self.register_edge(from_idx, to_idx)?;
}
}
Ok(())
}
fn cycle_dfs(
&self,
node_id: usize,
explored: &mut FxHashSet<usize>,
visited: &mut FxHashSet<usize>,
order: &mut Vec<usize>,
) -> Result<Option<Vec<usize>>, String> {
if explored.contains(&node_id) {
let position = order
.iter()
.position(|v| *v == node_id)
.ok_or_else(|| "Error deciding cycle order".to_string())?;
let cycle_order = order
.get(position..)
.ok_or_else(|| "Error getting cycle order".to_string())?
.to_vec();
Ok(Some(cycle_order))
} else if visited.contains(&node_id) {
Ok(None)
} else {
visited.insert(node_id);
explored.insert(node_id);
order.push(node_id);
let dests = self
.edges
.get(&node_id)
.ok_or_else(|| "Error getting edges of node".to_string())?;
for dest in dests.iter().copied() {
if let Some(cycle) = self.cycle_dfs(dest, explored, visited, order)? {
return Ok(Some(cycle));
}
}
order.pop();
Ok(None)
}
}
fn detect_graph_cycles(&self) -> Result<(), String> {
let start_nodes = self.edges.keys().copied().collect::<Vec<usize>>();
let mut total_visited = FxHashSet::<usize>::default();
for node_id in start_nodes.iter().copied() {
let mut explored = FxHashSet::<usize>::default();
let mut order = Vec::<usize>::new();
match self.cycle_dfs(node_id, &mut explored, &mut total_visited, &mut order)? {
None => {}
Some(order) => {
let mut display_strings = Vec::<String>::with_capacity(order.len() + 1);
for cycle_node_id in order {
let node = self.idx2atom.get(&cycle_node_id).ok_or_else(|| {
"Failed to find node during cycle display creation".to_string()
})?;
display_strings.push(node.to_string());
}
let first = display_strings
.first()
.cloned()
.ok_or("Unable to fill cycle display array")?;
display_strings.push(first);
return Err(format!("Found cycle: {}", display_strings.join(" -> ")));
}
}
}
Ok(())
}
fn next_node_ident(&mut self) -> (proc_macro2::Ident, usize) {
let this_idx = self.next_node_idx;
self.next_node_idx += 1;
(format_ident!("_node_{this_idx}"), this_idx)
}
fn compile_atom(
&mut self,
atom: &Rc<Atom>,
tokens: &mut TokenStream,
) -> Result<proc_macro2::Ident, String> {
let maybe_ident = self.compiled_atoms.get(atom);
if let Some(ident) = maybe_ident {
Ok(ident.clone())
} else {
let (identifier, _) = self.next_node_ident();
let key = format_ident!("{}", &atom.key);
let the_value = match &atom.value {
ValueType::Any => quote! {
cgraph::NodeValue::Key(DirKey::new(DirKeyKind::#key,None))
},
ValueType::EnumVariant(variant) => {
let variant = format_ident!("{}", variant);
quote! {
cgraph::NodeValue::Value(DirValue::#key(#key::#variant))
}
}
ValueType::Number { number, comparison } => {
let comp_type = match comparison {
Comparison::Equal => quote! {
None
},
Comparison::LessThan => quote! {
Some(NumValueRefinement::LessThan)
},
Comparison::GreaterThan => quote! {
Some(NumValueRefinement::GreaterThan)
},
Comparison::GreaterThanEqual => quote! {
Some(NumValueRefinement::GreaterThanEqual)
},
Comparison::LessThanEqual => quote! {
Some(NumValueRefinement::LessThanEqual)
},
};
quote! {
cgraph::NodeValue::Value(DirValue::#key(NumValue {
number: #number,
refinement: #comp_type,
}))
}
}
};
let compiled = quote! {
let #identifier = graph.make_value_node(#the_value, None, None::<()>);
};
tokens.extend(compiled);
self.compiled_atoms
.insert(Rc::clone(atom), identifier.clone());
Ok(identifier)
}
}
fn compile_atom_type(
&mut self,
atom_type: &AtomType,
tokens: &mut TokenStream,
) -> Result<(proc_macro2::Ident, Relation), String> {
match atom_type {
AtomType::Value { relation, atom } => {
let node_ident = self.compile_atom(atom, tokens)?;
Ok((node_ident, relation.clone()))
}
AtomType::InAggregator {
key,
values,
relation,
} => {
let key_ident = format_ident!("{key}");
let mut values_tokens: Vec<TokenStream> = Vec::new();
for value in values {
let value_ident = format_ident!("{value}");
values_tokens.push(quote! { DirValue::#key_ident(#key_ident::#value_ident) });
}
let (node_ident, _) = self.next_node_ident();
let node_code = quote! {
let #node_ident = graph.make_in_aggregator(
Vec::from_iter([#(#values_tokens),*]),
None,
None::<()>,
).expect("Failed to make In aggregator");
};
tokens.extend(node_code);
Ok((node_ident, relation.clone()))
}
}
}
fn compile_rule(&mut self, rule: &Rule, tokens: &mut TokenStream) -> Result<(), String> {
let rhs_ident = self.compile_atom(&rule.rhs, tokens)?;
let mut node_details: Vec<(proc_macro2::Ident, Relation)> =
Vec::with_capacity(rule.lhs.len());
for lhs_atom_type in &rule.lhs {
let details = self.compile_atom_type(lhs_atom_type, tokens)?;
node_details.push(details);
}
if node_details.len() <= 1 {
let strength = format_ident!("{}", rule.strength.to_string());
for (from_node, relation) in &node_details {
let relation = format_ident!("{}", relation.to_string());
tokens.extend(quote! {
graph.make_edge(#from_node, #rhs_ident, cgraph::Strength::#strength, cgraph::Relation::#relation, None::<cgraph::DomainId>)
.expect("Failed to make edge");
});
}
} else {
let mut all_agg_nodes: Vec<TokenStream> = Vec::with_capacity(node_details.len());
for (from_node, relation) in &node_details {
let relation = format_ident!("{}", relation.to_string());
all_agg_nodes.push(
quote! { (#from_node, cgraph::Relation::#relation, cgraph::Strength::Strong) },
);
}
let strength = format_ident!("{}", rule.strength.to_string());
let (agg_node_ident, _) = self.next_node_ident();
tokens.extend(quote! {
let #agg_node_ident = graph.make_all_aggregator(&[#(#all_agg_nodes),*], None, None::<()>, None)
.expect("Failed to make all aggregator node");
graph.make_edge(#agg_node_ident, #rhs_ident, cgraph::Strength::#strength, cgraph::Relation::Positive, None::<cgraph::DomainId>)
.expect("Failed to create all aggregator edge");
});
}
Ok(())
}
fn compile(&mut self, program: Program) -> Result<TokenStream, String> {
let mut tokens = TokenStream::new();
for rule in &program.rules {
self.compile_rule(rule, &mut tokens)?;
}
let compiled = quote! {{
use euclid_graph_prelude::*;
let mut graph = cgraph::ConstraintGraphBuilder::new();
#tokens
graph.build()
}};
Ok(compiled)
}
}
pub(crate) fn knowledge_inner(ts: TokenStream) -> syn::Result<TokenStream> {
let program = syn::parse::<Program>(ts.into())?;
let mut gen_context = GenContext::new();
for rule in &program.rules {
gen_context
.register_rule(rule)
.map_err(|msg| syn::Error::new(Span::call_site(), msg))?;
}
gen_context
.detect_graph_cycles()
.map_err(|msg| syn::Error::new(Span::call_site(), msg))?;
gen_context
.compile(program)
.map_err(|msg| syn::Error::new(Span::call_site(), msg))
}
</module>
|
{
"crate": "euclid_macros",
"file": null,
"files": [
"crates/euclid_macros/src/inner/enum_nums.rs",
"crates/euclid_macros/src/inner/knowledge.rs"
],
"module": "crates/euclid_macros/src/inner",
"num_files": 2,
"token_count": 4528
}
|
module_-2158947015841127859
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: masking
Module: crates/masking/src
Files: 13
</path>
<module>
// File: crates/masking/src/strategy.rs
use core::fmt;
/// Debugging trait which is specialized for handling secret values
pub trait Strategy<T> {
/// Format information about the secret's type.
fn fmt(value: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result;
}
/// Debug with type
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum WithType {}
impl<T> Strategy<T> for WithType {
fn fmt(_: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str("*** ")?;
fmt.write_str(std::any::type_name::<T>())?;
fmt.write_str(" ***")
}
}
/// Debug without type
pub enum WithoutType {}
impl<T> Strategy<T> for WithoutType {
fn fmt(_: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str("*** ***")
}
}
// File: crates/masking/src/abs.rs
//! Abstract data types.
use crate::Secret;
/// Interface to expose a reference to an inner secret
pub trait PeekInterface<S> {
/// Only method providing access to the secret value.
fn peek(&self) -> &S;
/// Provide a mutable reference to the inner value.
fn peek_mut(&mut self) -> &mut S;
}
/// Interface that consumes a option secret and returns the value.
pub trait ExposeOptionInterface<S> {
/// Expose option.
fn expose_option(self) -> S;
}
/// Interface that consumes a secret and returns the inner value.
pub trait ExposeInterface<S> {
/// Consume the secret and return the inner value
fn expose(self) -> S;
}
impl<S, I> ExposeOptionInterface<Option<S>> for Option<Secret<S, I>>
where
S: Clone,
I: crate::Strategy<S>,
{
fn expose_option(self) -> Option<S> {
self.map(ExposeInterface::expose)
}
}
impl<S, I> ExposeInterface<S> for Secret<S, I>
where
I: crate::Strategy<S>,
{
fn expose(self) -> S {
self.inner_secret
}
}
/// Interface that consumes a secret and converts it to a secret with a different masking strategy.
pub trait SwitchStrategy<FromStrategy, ToStrategy> {
/// The type returned by `switch_strategy()`.
type Output;
/// Consumes the secret and converts it to a secret with a different masking strategy.
fn switch_strategy(self) -> Self::Output;
}
impl<S, FromStrategy, ToStrategy> SwitchStrategy<FromStrategy, ToStrategy>
for Secret<S, FromStrategy>
where
FromStrategy: crate::Strategy<S>,
ToStrategy: crate::Strategy<S>,
{
type Output = Secret<S, ToStrategy>;
fn switch_strategy(self) -> Self::Output {
Secret::new(self.inner_secret)
}
}
// File: crates/masking/src/strong_secret.rs
//! Structure describing secret.
use std::{fmt, marker::PhantomData};
use subtle::ConstantTimeEq;
use zeroize::{self, Zeroize as ZeroizableSecret};
use crate::{strategy::Strategy, PeekInterface};
/// Secret thing.
///
/// To get access to value use method `expose()` of trait [`crate::ExposeInterface`].
pub struct StrongSecret<Secret: ZeroizableSecret, MaskingStrategy = crate::WithType> {
/// Inner secret value
pub(crate) inner_secret: Secret,
pub(crate) masking_strategy: PhantomData<MaskingStrategy>,
}
impl<Secret: ZeroizableSecret, MaskingStrategy> StrongSecret<Secret, MaskingStrategy> {
/// Take ownership of a secret value
pub fn new(secret: Secret) -> Self {
Self {
inner_secret: secret,
masking_strategy: PhantomData,
}
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> PeekInterface<Secret>
for StrongSecret<Secret, MaskingStrategy>
{
fn peek(&self) -> &Secret {
&self.inner_secret
}
fn peek_mut(&mut self) -> &mut Secret {
&mut self.inner_secret
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> From<Secret>
for StrongSecret<Secret, MaskingStrategy>
{
fn from(secret: Secret) -> Self {
Self::new(secret)
}
}
impl<Secret: Clone + ZeroizableSecret, MaskingStrategy> Clone
for StrongSecret<Secret, MaskingStrategy>
{
fn clone(&self) -> Self {
Self {
inner_secret: self.inner_secret.clone(),
masking_strategy: PhantomData,
}
}
}
impl<Secret, MaskingStrategy> PartialEq for StrongSecret<Secret, MaskingStrategy>
where
Self: PeekInterface<Secret>,
Secret: ZeroizableSecret + StrongEq,
{
fn eq(&self, other: &Self) -> bool {
StrongEq::strong_eq(self.peek(), other.peek())
}
}
impl<Secret, MaskingStrategy> Eq for StrongSecret<Secret, MaskingStrategy>
where
Self: PeekInterface<Secret>,
Secret: ZeroizableSecret + StrongEq,
{
}
impl<Secret: ZeroizableSecret, MaskingStrategy: Strategy<Secret>> fmt::Debug
for StrongSecret<Secret, MaskingStrategy>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
MaskingStrategy::fmt(&self.inner_secret, f)
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy: Strategy<Secret>> fmt::Display
for StrongSecret<Secret, MaskingStrategy>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
MaskingStrategy::fmt(&self.inner_secret, f)
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> Default for StrongSecret<Secret, MaskingStrategy>
where
Secret: ZeroizableSecret + Default,
{
fn default() -> Self {
Secret::default().into()
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> Drop for StrongSecret<Secret, MaskingStrategy> {
fn drop(&mut self) {
self.inner_secret.zeroize();
}
}
trait StrongEq {
fn strong_eq(&self, other: &Self) -> bool;
}
impl StrongEq for String {
fn strong_eq(&self, other: &Self) -> bool {
let lhs = self.as_bytes();
let rhs = other.as_bytes();
bool::from(lhs.ct_eq(rhs))
}
}
impl StrongEq for Vec<u8> {
fn strong_eq(&self, other: &Self) -> bool {
let lhs = &self;
let rhs = &other;
bool::from(lhs.ct_eq(rhs))
}
}
#[cfg(feature = "proto_tonic")]
impl<T> prost::Message for StrongSecret<T, crate::WithType>
where
T: prost::Message + Default + Clone + ZeroizableSecret,
{
fn encode_raw(&self, buf: &mut impl bytes::BufMut) {
self.peek().encode_raw(buf);
}
fn merge_field(
&mut self,
tag: u32,
wire_type: prost::encoding::WireType,
buf: &mut impl bytes::Buf,
ctx: prost::encoding::DecodeContext,
) -> Result<(), prost::DecodeError> {
if tag == 1 {
self.peek_mut().merge_field(tag, wire_type, buf, ctx)
} else {
prost::encoding::skip_field(wire_type, tag, buf, ctx)
}
}
fn encoded_len(&self) -> usize {
self.peek().encoded_len()
}
fn clear(&mut self) {
self.peek_mut().clear();
}
}
// File: crates/masking/src/serde.rs
//! Serde-related.
pub use erased_serde::Serialize as ErasedSerialize;
pub use serde::{de, Deserialize, Serialize, Serializer};
use serde_json::{value::Serializer as JsonValueSerializer, Value};
use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret};
/// Marker trait for secret types which can be [`Serialize`]-d by [`serde`].
///
/// When the `serde` feature of this crate is enabled and types are marked with
/// this trait, they receive a [`Serialize` impl] for `Secret<T>`.
/// (NOTE: all types which impl `DeserializeOwned` receive a [`Deserialize`]
/// impl)
///
/// This is done deliberately to prevent accidental exfiltration of secrets
/// via `serde` serialization.
#[cfg_attr(docsrs, cfg(feature = "serde"))]
pub trait SerializableSecret: Serialize {}
// #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
// pub trait NonSerializableSecret: Serialize {}
impl SerializableSecret for Value {}
impl SerializableSecret for u8 {}
impl SerializableSecret for u16 {}
impl SerializableSecret for i8 {}
impl SerializableSecret for i32 {}
impl SerializableSecret for i64 {}
impl SerializableSecret for url::Url {}
#[cfg(feature = "time")]
impl SerializableSecret for time::Date {}
impl<T: SerializableSecret> SerializableSecret for &T {}
impl<'de, T, I> Deserialize<'de> for Secret<T, I>
where
T: Clone + de::DeserializeOwned + Sized,
I: Strategy<T>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
T::deserialize(deserializer).map(Self::new)
}
}
impl<T, I> Serialize for Secret<T, I>
where
T: SerializableSecret + Serialize + Sized,
I: Strategy<T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
pii_serializer::pii_serialize(self, serializer)
}
}
impl<'de, T, I> Deserialize<'de> for StrongSecret<T, I>
where
T: Clone + de::DeserializeOwned + Sized + ZeroizableSecret,
I: Strategy<T>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
T::deserialize(deserializer).map(Self::new)
}
}
impl<T, I> Serialize for StrongSecret<T, I>
where
T: SerializableSecret + Serialize + ZeroizableSecret + Sized,
I: Strategy<T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
pii_serializer::pii_serialize(self, serializer)
}
}
/// Masked serialization.
///
/// the default behaviour for secrets is to serialize in exposed format since the common use cases
/// for storing the secret to database or sending it over the network requires the secret to be exposed
/// This method allows to serialize the secret in masked format if needed for logs or other insecure exposures
pub fn masked_serialize<T: Serialize>(value: &T) -> Result<Value, serde_json::Error> {
value.serialize(PIISerializer {
inner: JsonValueSerializer,
})
}
/// Masked serialization.
///
/// Trait object for supporting serialization to Value while accounting for masking
/// The usual Serde Serialize trait cannot be used as trait objects
/// like &dyn Serialize or boxed trait objects like Box<dyn Serialize> because of Rust's "object safety" rules.
/// In particular, the trait contains generic methods which cannot be made into a trait object.
/// In this case we remove the generic for assuming the serialization to be of 2 types only raw json or masked json
pub trait ErasedMaskSerialize: ErasedSerialize {
/// Masked serialization.
fn masked_serialize(&self) -> Result<Value, serde_json::Error>;
}
impl<T: Serialize + ErasedSerialize> ErasedMaskSerialize for T {
fn masked_serialize(&self) -> Result<Value, serde_json::Error> {
masked_serialize(self)
}
}
impl Serialize for dyn ErasedMaskSerialize + '_ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
erased_serde::serialize(self, serializer)
}
}
impl Serialize for dyn ErasedMaskSerialize + '_ + Send {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
erased_serde::serialize(self, serializer)
}
}
use pii_serializer::PIISerializer;
mod pii_serializer {
use std::fmt::Display;
pub(super) fn pii_serialize<
V: Serialize,
T: std::fmt::Debug + PeekInterface<V>,
S: Serializer,
>(
value: &T,
serializer: S,
) -> Result<S::Ok, S::Error> {
// Mask the value if the serializer is of type PIISerializer
// or send empty map if the serializer is of type FlatMapSerializer over PiiSerializer
if std::any::type_name::<S>() == std::any::type_name::<PIISerializer>() {
format!("{value:?}").serialize(serializer)
} else if std::any::type_name::<S>()
== std::any::type_name::<
serde::__private::ser::FlatMapSerializer<'_, SerializeMap<PIISerializer>>,
>()
{
std::collections::HashMap::<String, String>::from([]).serialize(serializer)
} else {
value.peek().serialize(serializer)
}
}
use serde::{Serialize, Serializer};
use serde_json::{value::Serializer as JsonValueSerializer, Map, Value};
use crate::PeekInterface;
pub(super) struct PIISerializer {
pub inner: JsonValueSerializer,
}
impl Clone for PIISerializer {
fn clone(&self) -> Self {
Self {
inner: JsonValueSerializer,
}
}
}
impl Serializer for PIISerializer {
type Ok = Value;
type Error = serde_json::Error;
type SerializeSeq = SerializeVec<Self>;
type SerializeTuple = SerializeVec<Self>;
type SerializeTupleStruct = SerializeVec<Self>;
type SerializeTupleVariant = SerializeTupleVariant<Self>;
type SerializeMap = SerializeMap<Self>;
type SerializeStruct = SerializeMap<Self>;
type SerializeStructVariant = SerializeStructVariant<Self>;
#[inline]
fn serialize_bool(self, value: bool) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_bool(value)
}
#[inline]
fn serialize_i8(self, value: i8) -> Result<Self::Ok, Self::Error> {
self.serialize_i64(value.into())
}
#[inline]
fn serialize_i16(self, value: i16) -> Result<Self::Ok, Self::Error> {
self.serialize_i64(value.into())
}
#[inline]
fn serialize_i32(self, value: i32) -> Result<Self::Ok, Self::Error> {
self.serialize_i64(value.into())
}
fn serialize_i64(self, value: i64) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_i64(value)
}
fn serialize_i128(self, value: i128) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_i128(value)
}
#[inline]
fn serialize_u8(self, value: u8) -> Result<Self::Ok, Self::Error> {
self.serialize_u64(value.into())
}
#[inline]
fn serialize_u16(self, value: u16) -> Result<Self::Ok, Self::Error> {
self.serialize_u64(value.into())
}
#[inline]
fn serialize_u32(self, value: u32) -> Result<Self::Ok, Self::Error> {
self.serialize_u64(value.into())
}
#[inline]
fn serialize_u64(self, value: u64) -> Result<Self::Ok, Self::Error> {
Ok(Value::Number(value.into()))
}
fn serialize_u128(self, value: u128) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_u128(value)
}
#[inline]
fn serialize_f32(self, float: f32) -> Result<Self::Ok, Self::Error> {
Ok(Value::from(float))
}
#[inline]
fn serialize_f64(self, float: f64) -> Result<Self::Ok, Self::Error> {
Ok(Value::from(float))
}
#[inline]
fn serialize_char(self, value: char) -> Result<Self::Ok, Self::Error> {
let mut s = String::new();
s.push(value);
Ok(Value::String(s))
}
#[inline]
fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> {
Ok(Value::String(value.to_owned()))
}
fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {
let vec = value.iter().map(|&b| Value::Number(b.into())).collect();
Ok(Value::Array(vec))
}
#[inline]
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
Ok(Value::Null)
}
#[inline]
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
self.serialize_unit()
}
#[inline]
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
self.serialize_str(variant)
}
#[inline]
fn serialize_newtype_struct<T>(
self,
_name: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T>(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Serialize,
{
let mut values = Map::new();
values.insert(String::from(variant), value.serialize(self)?);
Ok(Value::Object(values))
}
#[inline]
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
self.serialize_unit()
}
#[inline]
fn serialize_some<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Serialize,
{
value.serialize(self)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Ok(SerializeVec {
vec: Vec::with_capacity(len.unwrap_or(0)),
ser: self,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Ok(SerializeTupleVariant {
name: String::from(variant),
vec: Vec::with_capacity(len),
ser: self,
})
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Ok(SerializeMap {
inner: self.clone().inner.serialize_map(len)?,
ser: self,
})
}
fn serialize_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
self.serialize_map(Some(len))
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Ok(SerializeStructVariant {
name: String::from(variant),
map: Map::new(),
ser: self,
})
}
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Display,
{
self.inner.collect_str(value)
}
}
pub(super) struct SerializeVec<T: Serializer> {
vec: Vec<Value>,
ser: T,
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeSeq for SerializeVec<T> {
type Ok = Value;
type Error = T::Error;
fn serialize_element<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.vec.push(value.serialize(self.ser.clone())?);
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(Value::Array(self.vec))
}
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTuple for SerializeVec<T> {
type Ok = Value;
type Error = T::Error;
fn serialize_element<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
serde::ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
serde::ser::SerializeSeq::end(self)
}
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTupleStruct for SerializeVec<T> {
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
serde::ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
serde::ser::SerializeSeq::end(self)
}
}
pub(super) struct SerializeStructVariant<T: Serializer> {
name: String,
map: Map<String, Value>,
ser: T,
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeStructVariant
for SerializeStructVariant<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, key: &'static str, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.map
.insert(String::from(key), value.serialize(self.ser.clone())?);
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
let mut object = Map::new();
object.insert(self.name, Value::Object(self.map));
Ok(Value::Object(object))
}
}
pub(super) struct SerializeTupleVariant<T: Serializer> {
name: String,
vec: Vec<Value>,
ser: T,
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTupleVariant
for SerializeTupleVariant<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.vec.push(value.serialize(self.ser.clone())?);
Ok(())
}
fn end(self) -> Result<Value, Self::Error> {
let mut object = Map::new();
object.insert(self.name, Value::Array(self.vec));
Ok(Value::Object(object))
}
}
pub(super) struct SerializeMap<T: Serializer> {
inner: <serde_json::value::Serializer as Serializer>::SerializeMap,
ser: T,
}
impl<T: Serializer<Ok = Value, Error = serde_json::Error> + Clone> serde::ser::SerializeMap
for SerializeMap<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_key<V>(&mut self, key: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.inner.serialize_key(key)?;
Ok(())
}
fn serialize_value<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
let value = value.serialize(self.ser.clone())?;
self.inner.serialize_value(&value)?;
Ok(())
}
fn end(self) -> Result<Value, Self::Error> {
self.inner.end()
}
}
impl<T: Serializer<Ok = Value, Error = serde_json::Error> + Clone> serde::ser::SerializeStruct
for SerializeMap<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, key: &'static str, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
serde::ser::SerializeMap::serialize_entry(self, key, value)
}
fn end(self) -> Result<Value, Self::Error> {
serde::ser::SerializeMap::end(self)
}
}
}
// File: crates/masking/src/maskable.rs
//! This module contains Masking objects and traits
use crate::{ExposeInterface, Secret};
/// An Enum that allows us to optionally mask data, based on which enum variant that data is stored
/// in.
#[derive(Clone, Eq, PartialEq)]
pub enum Maskable<T: Eq + PartialEq + Clone> {
/// Variant which masks the data by wrapping in a Secret
Masked(Secret<T>),
/// Varant which doesn't mask the data
Normal(T),
}
impl<T: std::fmt::Debug + Clone + Eq + PartialEq> std::fmt::Debug for Maskable<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Masked(secret_value) => std::fmt::Debug::fmt(secret_value, f),
Self::Normal(value) => std::fmt::Debug::fmt(value, f),
}
}
}
impl<T: Eq + PartialEq + Clone + std::hash::Hash> std::hash::Hash for Maskable<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
Self::Masked(value) => crate::PeekInterface::peek(value).hash(state),
Self::Normal(value) => value.hash(state),
}
}
}
impl<T: Eq + PartialEq + Clone> Maskable<T> {
/// Get the inner data while consuming self
pub fn into_inner(self) -> T {
match self {
Self::Masked(inner_secret) => inner_secret.expose(),
Self::Normal(inner) => inner,
}
}
/// Create a new Masked data
pub fn new_masked(item: Secret<T>) -> Self {
Self::Masked(item)
}
/// Create a new non-masked data
pub fn new_normal(item: T) -> Self {
Self::Normal(item)
}
/// Checks whether the data is masked.
/// Returns `true` if the data is wrapped in the `Masked` variant,
/// returns `false` otherwise.
pub fn is_masked(&self) -> bool {
matches!(self, Self::Masked(_))
}
/// Checks whether the data is normal (not masked).
/// Returns `true` if the data is wrapped in the `Normal` variant,
/// returns `false` otherwise.
pub fn is_normal(&self) -> bool {
matches!(self, Self::Normal(_))
}
}
/// Trait for providing a method on custom types for constructing `Maskable`
pub trait Mask {
/// The type returned by the `into_masked()` method. Must implement `PartialEq`, `Eq` and `Clone`
type Output: Eq + Clone + PartialEq;
/// Construct a `Maskable` instance that wraps `Self::Output` by consuming `self`
fn into_masked(self) -> Maskable<Self::Output>;
}
impl Mask for String {
type Output = Self;
fn into_masked(self) -> Maskable<Self::Output> {
Maskable::new_masked(self.into())
}
}
impl Mask for Secret<String> {
type Output = String;
fn into_masked(self) -> Maskable<Self::Output> {
Maskable::new_masked(self)
}
}
impl<T: Eq + PartialEq + Clone> From<T> for Maskable<T> {
fn from(value: T) -> Self {
Self::new_normal(value)
}
}
impl From<&str> for Maskable<String> {
fn from(value: &str) -> Self {
Self::new_normal(value.to_string())
}
}
// File: crates/masking/src/lib.rs
#![cfg_attr(docsrs, feature(doc_auto_cfg, doc_cfg_hide))]
#![cfg_attr(docsrs, doc(cfg_hide(doc)))]
#![warn(missing_docs)]
//! Personal Identifiable Information protection. Wrapper types and traits for secret management which help ensure they aren't accidentally copied, logged, or otherwise exposed (as much as possible), and also ensure secrets are securely wiped from memory when dropped.
//! Secret-keeping library inspired by secrecy.
#![doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR" ), "/", "README.md"))]
pub use zeroize::{self, DefaultIsZeroes, Zeroize as ZeroizableSecret};
mod strategy;
pub use strategy::{Strategy, WithType, WithoutType};
mod abs;
pub use abs::{ExposeInterface, ExposeOptionInterface, PeekInterface, SwitchStrategy};
mod secret;
mod strong_secret;
#[cfg(feature = "serde")]
pub use secret::JsonMaskStrategy;
pub use secret::Secret;
pub use strong_secret::StrongSecret;
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
mod boxed;
#[cfg(feature = "bytes")]
mod bytes;
#[cfg(feature = "bytes")]
pub use self::bytes::SecretBytesMut;
#[cfg(feature = "alloc")]
mod string;
#[cfg(feature = "alloc")]
mod vec;
#[cfg(feature = "serde")]
mod serde;
#[cfg(feature = "serde")]
pub use crate::serde::{
masked_serialize, Deserialize, ErasedMaskSerialize, SerializableSecret, Serialize,
};
/// This module should be included with asterisk.
///
/// `use masking::prelude::*;`
pub mod prelude {
pub use super::{ExposeInterface, ExposeOptionInterface, PeekInterface};
}
#[cfg(feature = "diesel")]
mod diesel;
#[cfg(feature = "cassandra")]
mod cassandra;
pub mod maskable;
pub use maskable::*;
// File: crates/masking/src/boxed.rs
//! `Box` types containing secrets
//!
//! There is not alias type by design.
#[cfg(feature = "serde")]
use super::{SerializableSecret, Serialize};
#[cfg(feature = "serde")]
impl<S: Serialize> SerializableSecret for Box<S> {}
// File: crates/masking/src/cassandra.rs
use scylla::{
deserialize::DeserializeValue,
frame::response::result::ColumnType,
serialize::{
value::SerializeValue,
writers::{CellWriter, WrittenCellProof},
SerializationError,
},
};
use crate::{abs::PeekInterface, StrongSecret};
impl<T> SerializeValue for StrongSecret<T>
where
T: SerializeValue + zeroize::Zeroize + Clone,
{
fn serialize<'b>(
&self,
column_type: &ColumnType<'_>,
writer: CellWriter<'b>,
) -> Result<WrittenCellProof<'b>, SerializationError> {
self.peek().serialize(column_type, writer)
}
}
impl<'frame, 'metadata, T> DeserializeValue<'frame, 'metadata> for StrongSecret<T>
where
T: DeserializeValue<'frame, 'metadata> + zeroize::Zeroize + Clone,
{
fn type_check(column_type: &ColumnType<'_>) -> Result<(), scylla::deserialize::TypeCheckError> {
T::type_check(column_type)
}
fn deserialize(
column_type: &'metadata ColumnType<'metadata>,
v: Option<scylla::deserialize::FrameSlice<'frame>>,
) -> Result<Self, scylla::deserialize::DeserializationError> {
Ok(Self::new(T::deserialize(column_type, v)?))
}
}
// File: crates/masking/src/vec.rs
//! Secret `Vec` types
//!
//! There is not alias type by design.
#[cfg(feature = "serde")]
use super::{SerializableSecret, Serialize};
#[cfg(feature = "serde")]
impl<S: Serialize> SerializableSecret for Vec<S> {}
// File: crates/masking/src/diesel.rs
//! Diesel-related.
use diesel::{
backend::Backend,
deserialize::{self, FromSql, Queryable},
expression::AsExpression,
internal::derives::as_expression::Bound,
serialize::{self, Output, ToSql},
sql_types,
};
use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret};
impl<S, I, T> AsExpression<T> for &Secret<S, I>
where
T: sql_types::SingleValue,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, T> AsExpression<T> for &&Secret<S, I>
where
T: sql_types::SingleValue,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, T, DB> ToSql<T, DB> for Secret<S, I>
where
DB: Backend,
S: ToSql<T, DB>,
I: Strategy<S>,
{
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result {
ToSql::<T, DB>::to_sql(&self.inner_secret, out)
}
}
impl<DB, S, T, I> FromSql<T, DB> for Secret<S, I>
where
DB: Backend,
S: FromSql<T, DB>,
I: Strategy<S>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
S::from_sql(bytes).map(|raw| raw.into())
}
}
impl<S, I, T> AsExpression<T> for Secret<S, I>
where
T: sql_types::SingleValue,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<ST, DB, S, I> Queryable<ST, DB> for Secret<S, I>
where
DB: Backend,
I: Strategy<S>,
ST: sql_types::SingleValue,
Self: FromSql<ST, DB>,
{
type Row = Self;
fn build(row: Self::Row) -> deserialize::Result<Self> {
Ok(row)
}
}
impl<S, I, T> AsExpression<T> for &StrongSecret<S, I>
where
T: sql_types::SingleValue,
S: ZeroizableSecret,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, T> AsExpression<T> for &&StrongSecret<S, I>
where
T: sql_types::SingleValue,
S: ZeroizableSecret,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, DB, T> ToSql<T, DB> for StrongSecret<S, I>
where
DB: Backend,
S: ToSql<T, DB> + ZeroizableSecret,
I: Strategy<S>,
{
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result {
ToSql::<T, DB>::to_sql(&self.inner_secret, out)
}
}
impl<DB, S, I, T> FromSql<T, DB> for StrongSecret<S, I>
where
DB: Backend,
S: FromSql<T, DB> + ZeroizableSecret,
I: Strategy<S>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
S::from_sql(bytes).map(|raw| raw.into())
}
}
impl<S, I, T> AsExpression<T> for StrongSecret<S, I>
where
T: sql_types::SingleValue,
S: ZeroizableSecret,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<ST, DB, S, I> Queryable<ST, DB> for StrongSecret<S, I>
where
I: Strategy<S>,
DB: Backend,
S: ZeroizableSecret,
ST: sql_types::SingleValue,
Self: FromSql<ST, DB>,
{
type Row = Self;
fn build(row: Self::Row) -> deserialize::Result<Self> {
Ok(row)
}
}
// File: crates/masking/src/string.rs
//! Secret strings
//!
//! There is not alias type by design.
use alloc::{
str::FromStr,
string::{String, ToString},
};
#[cfg(feature = "serde")]
use super::SerializableSecret;
use super::{Secret, Strategy};
use crate::StrongSecret;
#[cfg(feature = "serde")]
impl SerializableSecret for String {}
impl<I> FromStr for Secret<String, I>
where
I: Strategy<String>,
{
type Err = core::convert::Infallible;
fn from_str(src: &str) -> Result<Self, Self::Err> {
Ok(Self::new(src.to_string()))
}
}
impl<I> FromStr for StrongSecret<String, I>
where
I: Strategy<String>,
{
type Err = core::convert::Infallible;
fn from_str(src: &str) -> Result<Self, Self::Err> {
Ok(Self::new(src.to_string()))
}
}
// File: crates/masking/src/bytes.rs
//! Optional `Secret` wrapper type for the `bytes::BytesMut` crate.
use core::fmt;
use bytes::BytesMut;
#[cfg(all(feature = "bytes", feature = "serde"))]
use serde::de::{self, Deserialize};
use super::{PeekInterface, ZeroizableSecret};
/// Instance of [`BytesMut`] protected by a type that impls the [`ExposeInterface`]
/// trait like `Secret<T>`.
///
/// Because of the nature of how the `BytesMut` type works, it needs some special
/// care in order to have a proper zeroizing drop handler.
#[derive(Clone)]
#[cfg_attr(docsrs, cfg(feature = "bytes"))]
pub struct SecretBytesMut(BytesMut);
impl SecretBytesMut {
/// Wrap bytes in `SecretBytesMut`
pub fn new(bytes: impl Into<BytesMut>) -> Self {
Self(bytes.into())
}
}
impl PeekInterface<BytesMut> for SecretBytesMut {
fn peek(&self) -> &BytesMut {
&self.0
}
fn peek_mut(&mut self) -> &mut BytesMut {
&mut self.0
}
}
impl fmt::Debug for SecretBytesMut {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SecretBytesMut([REDACTED])")
}
}
impl From<BytesMut> for SecretBytesMut {
fn from(bytes: BytesMut) -> Self {
Self::new(bytes)
}
}
impl Drop for SecretBytesMut {
fn drop(&mut self) {
self.0.resize(self.0.capacity(), 0);
self.0.as_mut().zeroize();
debug_assert!(self.0.as_ref().iter().all(|b| *b == 0));
}
}
#[cfg(all(feature = "bytes", feature = "serde"))]
impl<'de> Deserialize<'de> for SecretBytesMut {
fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct SecretBytesVisitor;
impl<'de> de::Visitor<'de> for SecretBytesVisitor {
type Value = SecretBytesMut;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("byte array")
}
#[inline]
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where
E: de::Error,
{
let mut bytes = BytesMut::with_capacity(v.len());
bytes.extend_from_slice(v);
Ok(SecretBytesMut(bytes))
}
#[inline]
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: de::SeqAccess<'de>,
{
// 4096 is cargo culted from upstream
let len = core::cmp::min(seq.size_hint().unwrap_or(0), 4096);
let mut bytes = BytesMut::with_capacity(len);
use bytes::BufMut;
while let Some(value) = seq.next_element()? {
bytes.put_u8(value);
}
Ok(SecretBytesMut(bytes))
}
}
deserializer.deserialize_bytes(SecretBytesVisitor)
}
}
// File: crates/masking/src/secret.rs
//! Structure describing secret.
use std::{fmt, marker::PhantomData};
use crate::{strategy::Strategy, PeekInterface, StrongSecret};
/// Secret thing.
///
/// To get access to value use method `expose()` of trait [`crate::ExposeInterface`].
///
/// ## Masking
/// Use the [`crate::strategy::Strategy`] trait to implement a masking strategy on a zero-variant
/// enum and pass this enum as a second generic parameter to [`Secret`] while defining it.
/// [`Secret`] will take care of applying the masking strategy on the inner secret when being
/// displayed.
///
/// ## Masking Example
///
/// ```
/// use masking::Strategy;
/// use masking::Secret;
/// use std::fmt;
///
/// enum MyStrategy {}
///
/// impl<T> Strategy<T> for MyStrategy
/// where
/// T: fmt::Display
/// {
/// fn fmt(val: &T, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "{}", val.to_string().to_ascii_lowercase())
/// }
/// }
///
/// let my_secret: Secret<String, MyStrategy> = Secret::new("HELLO".to_string());
///
/// assert_eq!("hello", &format!("{:?}", my_secret));
/// ```
pub struct Secret<Secret, MaskingStrategy = crate::WithType>
where
MaskingStrategy: Strategy<Secret>,
{
pub(crate) inner_secret: Secret,
pub(crate) masking_strategy: PhantomData<MaskingStrategy>,
}
impl<SecretValue, MaskingStrategy> Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
/// Take ownership of a secret value
pub fn new(secret: SecretValue) -> Self {
Self {
inner_secret: secret,
masking_strategy: PhantomData,
}
}
/// Zip 2 secrets with the same masking strategy into one
pub fn zip<OtherSecretValue>(
self,
other: Secret<OtherSecretValue, MaskingStrategy>,
) -> Secret<(SecretValue, OtherSecretValue), MaskingStrategy>
where
MaskingStrategy: Strategy<OtherSecretValue> + Strategy<(SecretValue, OtherSecretValue)>,
{
(self.inner_secret, other.inner_secret).into()
}
/// consume self and modify the inner value
pub fn map<OtherSecretValue>(
self,
f: impl FnOnce(SecretValue) -> OtherSecretValue,
) -> Secret<OtherSecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<OtherSecretValue>,
{
f(self.inner_secret).into()
}
/// Convert to [`StrongSecret`]
pub fn into_strong(self) -> StrongSecret<SecretValue, MaskingStrategy>
where
SecretValue: zeroize::DefaultIsZeroes,
{
StrongSecret::new(self.inner_secret)
}
/// Convert to [`Secret`] with a reference to the inner secret
pub fn as_ref(&self) -> Secret<&SecretValue, MaskingStrategy>
where
MaskingStrategy: for<'a> Strategy<&'a SecretValue>,
{
Secret::new(self.peek())
}
}
impl<SecretValue, MaskingStrategy> PeekInterface<SecretValue>
for Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
fn peek(&self) -> &SecretValue {
&self.inner_secret
}
fn peek_mut(&mut self) -> &mut SecretValue {
&mut self.inner_secret
}
}
impl<SecretValue, MaskingStrategy> From<SecretValue> for Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
fn from(secret: SecretValue) -> Self {
Self::new(secret)
}
}
impl<SecretValue, MaskingStrategy> Clone for Secret<SecretValue, MaskingStrategy>
where
SecretValue: Clone,
MaskingStrategy: Strategy<SecretValue>,
{
fn clone(&self) -> Self {
Self {
inner_secret: self.inner_secret.clone(),
masking_strategy: PhantomData,
}
}
}
impl<SecretValue, MaskingStrategy> PartialEq for Secret<SecretValue, MaskingStrategy>
where
Self: PeekInterface<SecretValue>,
SecretValue: PartialEq,
MaskingStrategy: Strategy<SecretValue>,
{
fn eq(&self, other: &Self) -> bool {
self.peek().eq(other.peek())
}
}
impl<SecretValue, MaskingStrategy> Eq for Secret<SecretValue, MaskingStrategy>
where
Self: PeekInterface<SecretValue>,
SecretValue: Eq,
MaskingStrategy: Strategy<SecretValue>,
{
}
impl<SecretValue, MaskingStrategy> fmt::Debug for Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
MaskingStrategy::fmt(&self.inner_secret, f)
}
}
impl<SecretValue, MaskingStrategy> Default for Secret<SecretValue, MaskingStrategy>
where
SecretValue: Default,
MaskingStrategy: Strategy<SecretValue>,
{
fn default() -> Self {
SecretValue::default().into()
}
}
// Required by base64-serde to serialize Secret of Vec<u8> which contains the base64 decoded value
impl AsRef<[u8]> for Secret<Vec<u8>> {
fn as_ref(&self) -> &[u8] {
self.peek().as_slice()
}
}
/// Strategy for masking JSON values
#[cfg(feature = "serde")]
pub enum JsonMaskStrategy {}
#[cfg(feature = "serde")]
impl Strategy<serde_json::Value> for JsonMaskStrategy {
fn fmt(value: &serde_json::Value, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match value {
serde_json::Value::Object(map) => {
write!(f, "{{")?;
let mut first = true;
for (key, val) in map {
if !first {
write!(f, ", ")?;
}
first = false;
write!(f, "\"{key}\":")?;
Self::fmt(val, f)?;
}
write!(f, "}}")
}
serde_json::Value::Array(arr) => {
write!(f, "[")?;
let mut first = true;
for val in arr {
if !first {
write!(f, ", ")?;
}
first = false;
Self::fmt(val, f)?;
}
write!(f, "]")
}
serde_json::Value::String(s) => {
// For strings, we show a masked version that gives a hint about the content
let masked = if s.len() <= 2 {
"**".to_string()
} else if s.len() <= 6 {
format!("{}**", &s[0..1])
} else {
// For longer strings, show first and last character with length in between
format!(
"{}**{}**{}",
&s[0..1],
s.len() - 2,
&s[s.len() - 1..s.len()]
)
};
write!(f, "\"{masked}\"")
}
serde_json::Value::Number(n) => {
// For numbers, we can show the order of magnitude
if n.is_i64() || n.is_u64() {
let num_str = n.to_string();
let masked_num = "*".repeat(num_str.len());
write!(f, "{masked_num}")
} else if n.is_f64() {
// For floats, just use a generic mask
write!(f, "**.**")
} else {
write!(f, "0")
}
}
serde_json::Value::Bool(b) => {
// For booleans, we can show a hint about which one it is
write!(f, "{}", if *b { "**true" } else { "**false" })
}
serde_json::Value::Null => write!(f, "null"),
}
}
}
#[cfg(feature = "proto_tonic")]
impl<T> prost::Message for Secret<T, crate::WithType>
where
T: prost::Message + Default + Clone,
{
fn encode_raw(&self, buf: &mut impl bytes::BufMut) {
self.peek().encode_raw(buf);
}
fn merge_field(
&mut self,
tag: u32,
wire_type: prost::encoding::WireType,
buf: &mut impl bytes::Buf,
ctx: prost::encoding::DecodeContext,
) -> Result<(), prost::DecodeError> {
if tag == 1 {
self.peek_mut().merge_field(tag, wire_type, buf, ctx)
} else {
prost::encoding::skip_field(wire_type, tag, buf, ctx)
}
}
fn encoded_len(&self) -> usize {
self.peek().encoded_len()
}
fn clear(&mut self) {
self.peek_mut().clear();
}
}
#[cfg(test)]
#[cfg(feature = "serde")]
mod tests {
use serde_json::json;
use super::*;
#[test]
#[allow(clippy::expect_used)]
fn test_json_mask_strategy() {
// Create a sample JSON with different types for testing
let original = json!({ "user": { "name": "John Doe", "email": "[email protected]", "age": 35, "verified": true }, "card": { "number": "4242424242424242", "cvv": 123, "amount": 99.99 }, "tags": ["personal", "premium"], "null_value": null, "short": "hi" });
// Apply the JsonMaskStrategy
let secret = Secret::<_, JsonMaskStrategy>::new(original.clone());
let masked_str = format!("{secret:?}");
// Get specific values from original
let original_obj = original.as_object().expect("Original should be an object");
let user_obj = original_obj["user"]
.as_object()
.expect("User should be an object");
let name = user_obj["name"].as_str().expect("Name should be a string");
let email = user_obj["email"]
.as_str()
.expect("Email should be a string");
let age = user_obj["age"].as_i64().expect("Age should be a number");
let verified = user_obj["verified"]
.as_bool()
.expect("Verified should be a boolean");
let card_obj = original_obj["card"]
.as_object()
.expect("Card should be an object");
let card_number = card_obj["number"]
.as_str()
.expect("Card number should be a string");
let cvv = card_obj["cvv"].as_i64().expect("CVV should be a number");
let tags = original_obj["tags"]
.as_array()
.expect("Tags should be an array");
let tag1 = tags
.first()
.and_then(|v| v.as_str())
.expect("First tag should be a string");
// Now explicitly verify the masking patterns for each value type
// 1. String masking - pattern: first char + ** + length - 2 + ** + last char
let expected_name_mask = format!(
"\"{}**{}**{}\"",
&name[0..1],
name.len() - 2,
&name[name.len() - 1..]
);
let expected_email_mask = format!(
"\"{}**{}**{}\"",
&email[0..1],
email.len() - 2,
&email[email.len() - 1..]
);
let expected_card_mask = format!(
"\"{}**{}**{}\"",
&card_number[0..1],
card_number.len() - 2,
&card_number[card_number.len() - 1..]
);
let expected_tag1_mask = if tag1.len() <= 2 {
"\"**\"".to_string()
} else if tag1.len() <= 6 {
format!("\"{}**\"", &tag1[0..1])
} else {
format!(
"\"{}**{}**{}\"",
&tag1[0..1],
tag1.len() - 2,
&tag1[tag1.len() - 1..]
)
};
let expected_short_mask = "\"**\"".to_string(); // For "hi"
// 2. Number masking
let expected_age_mask = "*".repeat(age.to_string().len()); // Repeat * for the number of digits
let expected_cvv_mask = "*".repeat(cvv.to_string().len());
// 3. Boolean masking
let expected_verified_mask = if verified { "**true" } else { "**false" };
// Check that the masked output includes the expected masked patterns
assert!(
masked_str.contains(&expected_name_mask),
"Name not masked correctly. Expected: {expected_name_mask}"
);
assert!(
masked_str.contains(&expected_email_mask),
"Email not masked correctly. Expected: {expected_email_mask}",
);
assert!(
masked_str.contains(&expected_card_mask),
"Card number not masked correctly. Expected: {expected_card_mask}",
);
assert!(
masked_str.contains(&expected_tag1_mask),
"Tag not masked correctly. Expected: {expected_tag1_mask}",
);
assert!(
masked_str.contains(&expected_short_mask),
"Short string not masked correctly. Expected: {expected_short_mask}",
);
assert!(
masked_str.contains(&expected_age_mask),
"Age not masked correctly. Expected: {expected_age_mask}",
);
assert!(
masked_str.contains(&expected_cvv_mask),
"CVV not masked correctly. Expected: {expected_cvv_mask}",
);
assert!(
masked_str.contains(expected_verified_mask),
"Boolean not masked correctly. Expected: {expected_verified_mask}",
);
// Check structure preservation
assert!(
masked_str.contains("\"user\""),
"Structure not preserved - missing user object"
);
assert!(
masked_str.contains("\"card\""),
"Structure not preserved - missing card object"
);
assert!(
masked_str.contains("\"tags\""),
"Structure not preserved - missing tags array"
);
assert!(
masked_str.contains("\"null_value\":null"),
"Null value not preserved correctly"
);
// Additional security checks to ensure no original values are exposed
assert!(
!masked_str.contains(name),
"Original name value exposed in masked output"
);
assert!(
!masked_str.contains(email),
"Original email value exposed in masked output"
);
assert!(
!masked_str.contains(card_number),
"Original card number exposed in masked output"
);
assert!(
!masked_str.contains(&age.to_string()),
"Original age value exposed in masked output"
);
assert!(
!masked_str.contains(&cvv.to_string()),
"Original CVV value exposed in masked output"
);
assert!(
!masked_str.contains(tag1),
"Original tag value exposed in masked output"
);
assert!(
!masked_str.contains("hi"),
"Original short string value exposed in masked output"
);
}
}
</module>
|
{
"crate": "masking",
"file": null,
"files": [
"crates/masking/src/strategy.rs",
"crates/masking/src/abs.rs",
"crates/masking/src/strong_secret.rs",
"crates/masking/src/serde.rs",
"crates/masking/src/maskable.rs",
"crates/masking/src/lib.rs",
"crates/masking/src/boxed.rs",
"crates/masking/src/cassandra.rs",
"crates/masking/src/vec.rs",
"crates/masking/src/diesel.rs",
"crates/masking/src/string.rs",
"crates/masking/src/bytes.rs",
"crates/masking/src/secret.rs"
],
"module": "crates/masking/src",
"num_files": 13,
"token_count": 12422
}
|
module_7051012173636665100
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: injector
Module: crates/injector/src
Files: 5
</path>
<module>
// File: crates/injector/src/consts.rs
/// Header name for external vault metadata
pub const EXTERNAL_VAULT_METADATA_HEADER: &str = "x-external-vault-metadata";
// File: crates/injector/src/injector.rs
pub mod core {
use std::collections::HashMap;
use async_trait::async_trait;
use common_utils::request::{Method, RequestBuilder, RequestContent};
use error_stack::{self, ResultExt};
use masking::{self, ExposeInterface};
use nom::{
bytes::complete::{tag, take_while1},
character::complete::{char, multispace0},
sequence::{delimited, preceded, terminated},
IResult,
};
use router_env::{instrument, logger, tracing};
use serde_json::Value;
use thiserror::Error;
use crate as injector_types;
use crate::{
types::{ContentType, InjectorRequest, InjectorResponse, IntoInjectorResponse},
vault_metadata::VaultMetadataExtractorExt,
};
impl From<injector_types::HttpMethod> for Method {
fn from(method: injector_types::HttpMethod) -> Self {
match method {
injector_types::HttpMethod::GET => Self::Get,
injector_types::HttpMethod::POST => Self::Post,
injector_types::HttpMethod::PUT => Self::Put,
injector_types::HttpMethod::PATCH => Self::Patch,
injector_types::HttpMethod::DELETE => Self::Delete,
}
}
}
/// Proxy configuration structure (copied from hyperswitch_interfaces to make injector standalone)
#[derive(Debug, serde::Deserialize, Clone)]
#[serde(default)]
pub struct Proxy {
/// The URL of the HTTP proxy server.
pub http_url: Option<String>,
/// The URL of the HTTPS proxy server.
pub https_url: Option<String>,
/// The timeout duration (in seconds) for idle connections in the proxy pool.
pub idle_pool_connection_timeout: Option<u64>,
/// A comma-separated list of hosts that should bypass the proxy.
pub bypass_proxy_hosts: Option<String>,
}
impl Default for Proxy {
fn default() -> Self {
Self {
http_url: Default::default(),
https_url: Default::default(),
idle_pool_connection_timeout: Some(90),
bypass_proxy_hosts: Default::default(),
}
}
}
/// Create HTTP client using the proven external_services create_client logic
fn create_client(
proxy_config: &Proxy,
client_certificate: Option<masking::Secret<String>>,
client_certificate_key: Option<masking::Secret<String>>,
ca_certificate: Option<masking::Secret<String>>,
) -> error_stack::Result<reqwest::Client, InjectorError> {
logger::debug!(
has_client_cert = client_certificate.is_some(),
has_client_key = client_certificate_key.is_some(),
has_ca_cert = ca_certificate.is_some(),
"Creating HTTP client"
);
// Case 1: Mutual TLS with client certificate and key
if let (Some(encoded_certificate), Some(encoded_certificate_key)) =
(client_certificate.clone(), client_certificate_key.clone())
{
if ca_certificate.is_some() {
logger::warn!("All of client certificate, client key, and CA certificate are provided. CA certificate will be ignored in mutual TLS setup.");
}
let client_builder = get_client_builder(proxy_config)?;
let identity = create_identity_from_certificate_and_key(
encoded_certificate.clone(),
encoded_certificate_key,
)?;
let certificate_list = create_certificate(encoded_certificate)?;
let client_builder = certificate_list
.into_iter()
.fold(client_builder, |client_builder, certificate| {
client_builder.add_root_certificate(certificate)
});
return client_builder
.identity(identity)
.use_rustls_tls()
.build()
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!(
"Failed to construct client with certificate and certificate key: {:?}",
e
);
});
}
// Case 2: Use provided CA certificate for server authentication only (one-way TLS)
if let Some(ca_pem) = ca_certificate {
let pem = ca_pem.expose().replace("\\r\\n", "\n"); // Fix escaped newlines
let cert = reqwest::Certificate::from_pem(pem.as_bytes())
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to parse CA certificate PEM block: {:?}", e)
})?;
let client_builder = get_client_builder(proxy_config)?.add_root_certificate(cert);
return client_builder
.use_rustls_tls()
.build()
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to construct client with CA certificate: {:?}", e);
});
}
// Case 3: Default client (no certs)
get_base_client(proxy_config)
}
/// Helper functions from external_services
fn get_client_builder(
proxy_config: &Proxy,
) -> error_stack::Result<reqwest::ClientBuilder, InjectorError> {
let mut client_builder = reqwest::Client::builder();
// Configure proxy if provided
if let Some(proxy_url) = &proxy_config.https_url {
let proxy = reqwest::Proxy::https(proxy_url)
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to configure HTTPS proxy: {:?}", e);
})?;
client_builder = client_builder.proxy(proxy);
}
if let Some(proxy_url) = &proxy_config.http_url {
let proxy = reqwest::Proxy::http(proxy_url)
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to configure HTTP proxy: {:?}", e);
})?;
client_builder = client_builder.proxy(proxy);
}
Ok(client_builder)
}
fn get_base_client(
proxy_config: &Proxy,
) -> error_stack::Result<reqwest::Client, InjectorError> {
let client_builder = get_client_builder(proxy_config)?;
client_builder
.build()
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to build default HTTP client: {:?}", e);
})
}
fn create_identity_from_certificate_and_key(
encoded_certificate: masking::Secret<String>,
encoded_certificate_key: masking::Secret<String>,
) -> error_stack::Result<reqwest::Identity, InjectorError> {
let cert_str = encoded_certificate.expose();
let key_str = encoded_certificate_key.expose();
let combined_pem = format!("{cert_str}\n{key_str}");
reqwest::Identity::from_pem(combined_pem.as_bytes())
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!(
"Failed to create identity from certificate and key: {:?}",
e
);
})
}
fn create_certificate(
encoded_certificate: masking::Secret<String>,
) -> error_stack::Result<Vec<reqwest::Certificate>, InjectorError> {
let cert_str = encoded_certificate.expose();
let cert = reqwest::Certificate::from_pem(cert_str.as_bytes())
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to create certificate from PEM: {:?}", e);
})?;
Ok(vec![cert])
}
/// Generic function to log HTTP request errors with detailed error type information
fn log_and_convert_http_error(e: reqwest::Error, context: &str) -> InjectorError {
let error_msg = e.to_string();
logger::error!("HTTP request failed in {}: {}", context, error_msg);
// Log specific error types for debugging
if e.is_timeout() {
logger::error!("Request timed out in {}", context);
}
if e.is_connect() {
logger::error!("Connection error occurred in {}", context);
}
if e.is_request() {
logger::error!("Request construction error in {}", context);
}
if e.is_decode() {
logger::error!("Response decoding error in {}", context);
}
InjectorError::HttpRequestFailed
}
/// Apply certificate configuration to request builder and return built request
fn build_request_with_certificates(
mut request_builder: RequestBuilder,
config: &injector_types::ConnectionConfig,
) -> common_utils::request::Request {
// Add certificate configuration if provided
if let Some(cert_content) = &config.client_cert {
request_builder = request_builder.add_certificate(Some(cert_content.clone()));
}
if let Some(key_content) = &config.client_key {
request_builder = request_builder.add_certificate_key(Some(key_content.clone()));
}
if let Some(ca_content) = &config.ca_cert {
request_builder = request_builder.add_ca_certificate_pem(Some(ca_content.clone()));
}
request_builder.build()
}
/// Simplified HTTP client for injector using the proven external_services create_client logic
#[instrument(skip_all)]
pub async fn send_request(
client_proxy: &Proxy,
request: common_utils::request::Request,
_option_timeout_secs: Option<u64>,
) -> error_stack::Result<reqwest::Response, InjectorError> {
logger::info!(
has_client_cert = request.certificate.is_some(),
has_client_key = request.certificate_key.is_some(),
has_ca_cert = request.ca_certificate.is_some(),
"Making HTTP request using standalone injector HTTP client with configuration"
);
// Create reqwest client using the proven create_client function
let client = create_client(
client_proxy,
request.certificate.clone(),
request.certificate_key.clone(),
request.ca_certificate.clone(),
)?;
// Build the request
let method = match request.method {
Method::Get => reqwest::Method::GET,
Method::Post => reqwest::Method::POST,
Method::Put => reqwest::Method::PUT,
Method::Patch => reqwest::Method::PATCH,
Method::Delete => reqwest::Method::DELETE,
};
let mut req_builder = client.request(method, &request.url);
// Add headers
for (key, value) in &request.headers {
let header_value = match value {
masking::Maskable::Masked(secret) => secret.clone().expose(),
masking::Maskable::Normal(normal) => normal.clone(),
};
req_builder = req_builder.header(key, header_value);
}
// Add body if present
if let Some(body) = request.body {
match body {
RequestContent::Json(payload) => {
req_builder = req_builder.json(&payload);
}
RequestContent::FormUrlEncoded(payload) => {
req_builder = req_builder.form(&payload);
}
RequestContent::RawBytes(payload) => {
req_builder = req_builder.body(payload);
}
_ => {
logger::warn!("Unsupported request content type, using raw bytes");
}
}
}
// Send the request
let response = req_builder
.send()
.await
.map_err(|e| log_and_convert_http_error(e, "send_request"))?;
logger::info!(
status_code = response.status().as_u16(),
"HTTP request completed successfully"
);
Ok(response)
}
#[derive(Error, Debug)]
pub enum InjectorError {
#[error("Token replacement failed: {0}")]
TokenReplacementFailed(String),
#[error("HTTP request failed")]
HttpRequestFailed,
#[error("Serialization error: {0}")]
SerializationError(String),
#[error("Invalid template: {0}")]
InvalidTemplate(String),
}
#[instrument(skip_all)]
pub async fn injector_core(
request: InjectorRequest,
) -> error_stack::Result<InjectorResponse, InjectorError> {
logger::info!("Starting injector_core processing");
let injector = Injector::new();
injector.injector_core(request).await
}
/// Represents a token reference found in a template string
#[derive(Debug)]
struct TokenReference {
/// The field name to be replaced (without the {{$}} wrapper)
pub field: String,
}
/// Parses a single token reference from a string using nom parser combinators
///
/// Expects tokens in the format `{{$field_name}}` where field_name contains
/// only alphanumeric characters and underscores.
fn parse_token(input: &str) -> IResult<&str, TokenReference> {
let (input, field) = delimited(
tag("{{"),
preceded(
multispace0,
preceded(
char('$'),
terminated(
take_while1(|c: char| c.is_alphanumeric() || c == '_'),
multispace0,
),
),
),
tag("}}"),
)(input)?;
Ok((
input,
TokenReference {
field: field.to_string(),
},
))
}
/// Finds all token references in a string using nom parser
///
/// Scans through the entire input string and extracts all valid token references.
/// Returns a vector of TokenReference structs containing the field names.
fn find_all_tokens(input: &str) -> Vec<TokenReference> {
let mut tokens = Vec::new();
let mut current_input = input;
while !current_input.is_empty() {
if let Ok((remaining, token_ref)) = parse_token(current_input) {
tokens.push(token_ref);
current_input = remaining;
} else {
// Move forward one character if no token found
if let Some((_, rest)) = current_input.split_at_checked(1) {
current_input = rest;
} else {
break;
}
}
}
tokens
}
/// Recursively searches for a field in vault data JSON structure
///
/// Performs a depth-first search through the JSON object hierarchy to find
/// a field with the specified name. Returns the first matching value found.
fn find_field_recursively_in_vault_data(
obj: &serde_json::Map<String, Value>,
field_name: &str,
) -> Option<Value> {
obj.get(field_name).cloned().or_else(|| {
obj.values()
.filter_map(|val| {
if let Value::Object(inner_obj) = val {
find_field_recursively_in_vault_data(inner_obj, field_name)
} else {
None
}
})
.next()
})
}
#[async_trait]
trait TokenInjector {
async fn injector_core(
&self,
request: InjectorRequest,
) -> error_stack::Result<InjectorResponse, InjectorError>;
}
pub struct Injector;
impl Injector {
pub fn new() -> Self {
Self
}
/// Processes a string template and replaces token references with vault data
#[instrument(skip_all)]
fn interpolate_string_template_with_vault_data(
&self,
template: String,
vault_data: &Value,
vault_connector: &injector_types::VaultConnectors,
) -> error_stack::Result<String, InjectorError> {
// Find all tokens using nom parser
let tokens = find_all_tokens(&template);
let mut result = template;
for token_ref in tokens.into_iter() {
let extracted_field_value = self.extract_field_from_vault_data(
vault_data,
&token_ref.field,
vault_connector,
)?;
let token_str = match extracted_field_value {
Value::String(token_value) => token_value,
_ => serde_json::to_string(&extracted_field_value).unwrap_or_default(),
};
// Replace the token in the result string
let token_pattern = format!("{{{{${}}}}}", token_ref.field);
result = result.replace(&token_pattern, &token_str);
}
Ok(result)
}
#[instrument(skip_all)]
fn interpolate_token_references_with_vault_data(
&self,
value: Value,
vault_data: &Value,
vault_connector: &injector_types::VaultConnectors,
) -> error_stack::Result<Value, InjectorError> {
match value {
Value::Object(obj) => {
let new_obj = obj
.into_iter()
.map(|(key, val)| {
self.interpolate_token_references_with_vault_data(
val,
vault_data,
vault_connector,
)
.map(|processed| (key, processed))
})
.collect::<error_stack::Result<serde_json::Map<_, _>, InjectorError>>()?;
Ok(Value::Object(new_obj))
}
Value::String(s) => {
let processed_string = self.interpolate_string_template_with_vault_data(
s,
vault_data,
vault_connector,
)?;
Ok(Value::String(processed_string))
}
_ => Ok(value),
}
}
#[instrument(skip_all)]
fn extract_field_from_vault_data(
&self,
vault_data: &Value,
field_name: &str,
vault_connector: &injector_types::VaultConnectors,
) -> error_stack::Result<Value, InjectorError> {
logger::debug!(
"Extracting field '{}' from vault data using vault type {:?}",
field_name,
vault_connector
);
match vault_data {
Value::Object(obj) => {
let raw_value = find_field_recursively_in_vault_data(obj, field_name)
.ok_or_else(|| {
error_stack::Report::new(InjectorError::TokenReplacementFailed(
format!("Field '{field_name}' not found"),
))
})?;
// Apply vault-specific token transformation
self.apply_vault_specific_transformation(raw_value, vault_connector, field_name)
}
_ => Err(error_stack::Report::new(
InjectorError::TokenReplacementFailed(
"Vault data is not a valid JSON object".to_string(),
),
)),
}
}
#[instrument(skip_all)]
fn apply_vault_specific_transformation(
&self,
extracted_field_value: Value,
vault_connector: &injector_types::VaultConnectors,
field_name: &str,
) -> error_stack::Result<Value, InjectorError> {
match vault_connector {
injector_types::VaultConnectors::VGS => {
logger::debug!(
"VGS vault: Using direct token replacement for field '{}'",
field_name
);
Ok(extracted_field_value)
}
}
}
#[instrument(skip_all)]
async fn make_http_request(
&self,
config: &injector_types::ConnectionConfig,
payload: &str,
content_type: &ContentType,
) -> error_stack::Result<InjectorResponse, InjectorError> {
logger::info!(
method = ?config.http_method,
endpoint = %config.endpoint,
content_type = ?content_type,
payload_length = payload.len(),
headers_count = config.headers.len(),
"Making HTTP request to connector"
);
// Validate inputs first
if config.endpoint.is_empty() {
logger::error!("Endpoint URL is empty");
Err(error_stack::Report::new(InjectorError::InvalidTemplate(
"Endpoint URL cannot be empty".to_string(),
)))?;
}
// Parse and validate the complete endpoint URL
let url = reqwest::Url::parse(&config.endpoint).map_err(|e| {
logger::error!("Failed to parse endpoint URL: {}", e);
error_stack::Report::new(InjectorError::InvalidTemplate(format!(
"Invalid endpoint URL: {e}"
)))
})?;
logger::debug!("Constructed URL: {}", url);
// Convert headers to common_utils Headers format safely
let headers: Vec<(String, masking::Maskable<String>)> = config
.headers
.clone()
.into_iter()
.map(|(k, v)| (k, masking::Maskable::new_normal(v.expose().clone())))
.collect();
// Determine method and request content
let method = Method::from(config.http_method);
// Determine request content based on content type with error handling
let request_content = match content_type {
ContentType::ApplicationJson => {
// Try to parse as JSON, fallback to raw string
match serde_json::from_str::<Value>(payload) {
Ok(json) => Some(RequestContent::Json(Box::new(json))),
Err(e) => {
logger::debug!(
"Failed to parse payload as JSON: {}, falling back to raw bytes",
e
);
Some(RequestContent::RawBytes(payload.as_bytes().to_vec()))
}
}
}
ContentType::ApplicationXWwwFormUrlencoded => {
// Parse form data safely
let form_data: HashMap<String, String> =
url::form_urlencoded::parse(payload.as_bytes())
.into_owned()
.collect();
Some(RequestContent::FormUrlEncoded(Box::new(form_data)))
}
ContentType::ApplicationXml | ContentType::TextXml => {
Some(RequestContent::RawBytes(payload.as_bytes().to_vec()))
}
ContentType::TextPlain => {
Some(RequestContent::RawBytes(payload.as_bytes().to_vec()))
}
};
// Extract vault metadata directly from headers using existing functions
let (vault_proxy_url, vault_ca_cert) = if config
.headers
.contains_key(crate::consts::EXTERNAL_VAULT_METADATA_HEADER)
{
let mut temp_config = injector_types::ConnectionConfig::new(
config.endpoint.clone(),
config.http_method,
);
// Use existing vault metadata extraction with fallback
if temp_config.extract_and_apply_vault_metadata_with_fallback(&config.headers) {
(temp_config.proxy_url, temp_config.ca_cert)
} else {
(None, None)
}
} else {
(None, None)
};
// Build request safely with certificate configuration
let mut request_builder = RequestBuilder::new()
.method(method)
.url(url.as_str())
.headers(headers);
if let Some(content) = request_content {
request_builder = request_builder.set_body(content);
}
// Create final config with vault CA certificate if available
let mut final_config = config.clone();
let has_vault_ca_cert = vault_ca_cert.is_some();
if has_vault_ca_cert {
final_config.ca_cert = vault_ca_cert;
}
// Log certificate configuration (but not the actual content)
logger::info!(
has_client_cert = final_config.client_cert.is_some(),
has_client_key = final_config.client_key.is_some(),
has_ca_cert = final_config.ca_cert.is_some(),
has_vault_ca_cert = has_vault_ca_cert,
insecure = final_config.insecure.unwrap_or(false),
cert_format = ?final_config.cert_format,
"Certificate configuration applied"
);
// Build request with certificate configuration applied
let request = build_request_with_certificates(request_builder, &final_config);
// Determine which proxy to use: vault metadata > backup > none
let final_proxy_url = vault_proxy_url.or_else(|| config.backup_proxy_url.clone());
let proxy = if let Some(proxy_url) = final_proxy_url {
let proxy_url_str = proxy_url.expose();
// Set proxy URL for both HTTP and HTTPS traffic
Proxy {
http_url: Some(proxy_url_str.clone()),
https_url: Some(proxy_url_str),
idle_pool_connection_timeout: Some(90),
bypass_proxy_hosts: None,
}
} else {
Proxy::default()
};
// Send request using local standalone http client
let response = send_request(&proxy, request, None).await?;
// Convert reqwest::Response to InjectorResponse using trait
response
.into_injector_response()
.await
.map_err(|e| error_stack::Report::new(e))
}
}
impl Default for Injector {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl TokenInjector for Injector {
#[instrument(skip_all)]
async fn injector_core(
&self,
request: InjectorRequest,
) -> error_stack::Result<InjectorResponse, InjectorError> {
let start_time = std::time::Instant::now();
// Extract token data from SecretSerdeValue for vault data lookup
let vault_data = request.token_data.specific_token_data.expose().clone();
logger::debug!(
template_length = request.connector_payload.template.len(),
vault_connector = ?request.token_data.vault_connector,
"Processing token injection request"
);
// Process template string directly with vault-specific logic
let processed_payload = self.interpolate_string_template_with_vault_data(
request.connector_payload.template,
&vault_data,
&request.token_data.vault_connector,
)?;
logger::debug!(
processed_payload_length = processed_payload.len(),
"Token replacement completed"
);
// Determine content type from headers or default to form-urlencoded
let content_type = request
.connection_config
.headers
.get("Content-Type")
.and_then(|ct| match ct.clone().expose().as_str() {
"application/json" => Some(ContentType::ApplicationJson),
"application/x-www-form-urlencoded" => {
Some(ContentType::ApplicationXWwwFormUrlencoded)
}
"application/xml" => Some(ContentType::ApplicationXml),
"text/xml" => Some(ContentType::TextXml),
"text/plain" => Some(ContentType::TextPlain),
_ => None,
})
.unwrap_or(ContentType::ApplicationXWwwFormUrlencoded);
// Make HTTP request to connector and return enhanced response
let response = self
.make_http_request(
&request.connection_config,
&processed_payload,
&content_type,
)
.await?;
let elapsed = start_time.elapsed();
logger::info!(
duration_ms = elapsed.as_millis(),
status_code = response.status_code,
response_size = serde_json::to_string(&response.response)
.map(|s| s.len())
.unwrap_or(0),
headers_count = response.headers.as_ref().map(|h| h.len()).unwrap_or(0),
"Token injection completed successfully"
);
Ok(response)
}
}
}
// Re-export all items
pub use core::*;
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use std::collections::HashMap;
use router_env::logger;
use crate::*;
#[tokio::test]
#[ignore = "Integration test that requires network access"]
async fn test_injector_core_integration() {
// Create test request
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
masking::Secret::new("application/x-www-form-urlencoded".to_string()),
);
headers.insert(
"Authorization".to_string(),
masking::Secret::new("Bearer Test".to_string()),
);
let specific_token_data = common_utils::pii::SecretSerdeValue::new(serde_json::json!({
"card_number": "TEST_123",
"cvv": "123",
"exp_month": "12",
"exp_year": "25"
}));
let request = InjectorRequest {
connector_payload: ConnectorPayload {
template: "card_number={{$card_number}}&cvv={{$cvv}}&expiry={{$exp_month}}/{{$exp_year}}&amount=50¤cy=USD&transaction_type=purchase".to_string(),
},
token_data: TokenData {
vault_connector: VaultConnectors::VGS,
specific_token_data,
},
connection_config: ConnectionConfig {
endpoint: "https://api.stripe.com/v1/payment_intents".to_string(),
http_method: HttpMethod::POST,
headers,
proxy_url: None, // Remove proxy that was causing issues
backup_proxy_url: None,
// Certificate fields (None for basic test)
client_cert: None,
client_key: None,
ca_cert: None, // Empty CA cert for testing
insecure: None,
cert_password: None,
cert_format: None,
max_response_size: None, // Use default
},
};
// Test the core function - this will make a real HTTP request to httpbin.org
let result = injector_core(request).await;
// The request should succeed (httpbin.org should be accessible)
if let Err(ref e) = result {
logger::info!("Error: {e:?}");
}
assert!(
result.is_ok(),
"injector_core should succeed with valid request: {result:?}"
);
let response = result.unwrap();
// Print the actual response for demonstration
logger::info!("=== HTTP RESPONSE FROM HTTPBIN.ORG ===");
logger::info!(
"{}",
serde_json::to_string_pretty(&response).unwrap_or_default()
);
logger::info!("=======================================");
// Response should have a proper status code and response data
assert!(
response.status_code >= 200 && response.status_code < 300,
"Response should have successful status code: {}",
response.status_code
);
assert!(
response.response.is_object() || response.response.is_string(),
"Response data should be JSON object or string"
);
}
#[tokio::test]
async fn test_certificate_configuration() {
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
masking::Secret::new("application/x-www-form-urlencoded".to_string()),
);
headers.insert(
"Authorization".to_string(),
masking::Secret::new("Bearer TEST".to_string()),
);
let specific_token_data = common_utils::pii::SecretSerdeValue::new(serde_json::json!({
"card_number": "4242429789164242",
"cvv": "123",
"exp_month": "12",
"exp_year": "25"
}));
// Test with insecure flag (skip certificate verification)
let request = InjectorRequest {
connector_payload: ConnectorPayload {
template: "card_number={{$card_number}}&cvv={{$cvv}}&expiry={{$exp_month}}/{{$exp_year}}&amount=50¤cy=USD&transaction_type=purchase".to_string(),
},
token_data: TokenData {
vault_connector: VaultConnectors::VGS,
specific_token_data,
},
connection_config: ConnectionConfig {
endpoint: "https://httpbin.org/post".to_string(),
http_method: HttpMethod::POST,
headers,
proxy_url: None, // Remove proxy to make test work reliably
backup_proxy_url: None,
// Test without certificates for basic functionality
client_cert: None,
client_key: None,
ca_cert: None,
insecure: None,
cert_password: None,
cert_format: None,
max_response_size: None,
},
};
let result = injector_core(request).await;
// Should succeed even with insecure flag
assert!(
result.is_ok(),
"Certificate test should succeed: {result:?}"
);
let response = result.unwrap();
// Print the actual response for demonstration
logger::info!("=== CERTIFICATE TEST RESPONSE ===");
logger::info!(
"{}",
serde_json::to_string_pretty(&response).unwrap_or_default()
);
logger::info!("================================");
// Should succeed with proper status code
assert!(
response.status_code >= 200 && response.status_code < 300,
"Certificate test should have successful status code: {}",
response.status_code
);
// Verify the tokens were replaced correctly in the form data
// httpbin.org returns the request data in the 'form' field
let response_str = serde_json::to_string(&response.response).unwrap_or_default();
// Check that our test tokens were replaced with the actual values from vault data
let tokens_replaced = response_str.contains("4242429789164242") && // card_number
response_str.contains("123") && // cvv
response_str.contains("12/25"); // expiry
assert!(
tokens_replaced,
"Response should contain replaced tokens (card_number, cvv, expiry): {}",
serde_json::to_string_pretty(&response.response).unwrap_or_default()
);
}
}
// File: crates/injector/src/types.rs
pub mod models {
use std::collections::HashMap;
use async_trait::async_trait;
use common_utils::pii::SecretSerdeValue;
use masking::Secret;
use router_env::logger;
use serde::{Deserialize, Serialize};
// Enums for the injector - making it standalone
/// Content types supported by the injector for HTTP requests
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ContentType {
ApplicationJson,
ApplicationXWwwFormUrlencoded,
ApplicationXml,
TextXml,
TextPlain,
}
/// HTTP methods supported by the injector
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum HttpMethod {
GET,
POST,
PUT,
PATCH,
DELETE,
}
/// Vault connectors supported by the injector for token management
///
/// Currently supports VGS as the primary vault connector. While only VGS is
/// implemented today, this enum structure is maintained for future extensibility
/// to support additional vault providers (e.g., Basis Theory, Skyflow, etc.)
/// without breaking API compatibility.
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum VaultConnectors {
/// VGS (Very Good Security) vault connector
VGS,
}
/// Token data containing vault-specific information for token replacement
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TokenData {
/// The specific token data retrieved from the vault
pub specific_token_data: SecretSerdeValue,
/// The type of vault connector being used (e.g., VGS)
pub vault_connector: VaultConnectors,
}
/// Connector payload containing the template to be processed
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ConnectorPayload {
/// Template string containing token references in the format {{$field_name}}
pub template: String,
}
/// Configuration for HTTP connection to the external connector
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ConnectionConfig {
/// Complete URL endpoint for the connector (e.g., "https://api.stripe.com/v1/payment_intents")
pub endpoint: String,
/// HTTP method to use for the request
pub http_method: HttpMethod,
/// HTTP headers to include in the request
pub headers: HashMap<String, Secret<String>>,
/// Optional proxy URL for routing the request through a proxy server
pub proxy_url: Option<Secret<String>>,
/// Optional backup proxy URL to use if vault metadata doesn't provide one
#[serde(default)]
pub backup_proxy_url: Option<Secret<String>>,
/// Optional client certificate for mutual TLS authentication
pub client_cert: Option<Secret<String>>,
/// Optional client private key for mutual TLS authentication
pub client_key: Option<Secret<String>>,
/// Optional CA certificate for verifying the server certificate
pub ca_cert: Option<Secret<String>>,
/// Whether to skip certificate verification (for testing only)
pub insecure: Option<bool>,
/// Optional password for encrypted client certificate
pub cert_password: Option<Secret<String>>,
/// Format of the client certificate (e.g., "PEM")
pub cert_format: Option<String>,
/// Maximum response size in bytes (defaults to 10MB if not specified)
pub max_response_size: Option<usize>,
}
/// Complete request structure for the injector service
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct InjectorRequest {
/// Token data from the vault
pub token_data: TokenData,
/// Payload template to process
pub connector_payload: ConnectorPayload,
/// HTTP connection configuration
pub connection_config: ConnectionConfig,
}
/// Response from the injector including status code and response data
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct InjectorResponse {
/// HTTP status code from the connector response
pub status_code: u16,
/// Response headers from the connector (optional)
pub headers: Option<HashMap<String, String>>,
/// Response body from the connector
pub response: serde_json::Value,
}
/// Trait for converting HTTP responses to InjectorResponse
#[async_trait]
pub trait IntoInjectorResponse {
/// Convert to InjectorResponse with proper error handling
async fn into_injector_response(
self,
) -> Result<InjectorResponse, crate::injector::core::InjectorError>;
}
#[async_trait]
impl IntoInjectorResponse for reqwest::Response {
async fn into_injector_response(
self,
) -> Result<InjectorResponse, crate::injector::core::InjectorError> {
let status_code = self.status().as_u16();
logger::info!(
status_code = status_code,
"Converting reqwest::Response to InjectorResponse"
);
// Extract headers
let headers: Option<HashMap<String, String>> = {
let header_map: HashMap<String, String> = self
.headers()
.iter()
.filter_map(|(name, value)| {
value
.to_str()
.ok()
.map(|v| (name.to_string(), v.to_string()))
})
.collect();
if header_map.is_empty() {
None
} else {
Some(header_map)
}
};
let response_text = self
.text()
.await
.map_err(|_| crate::injector::core::InjectorError::HttpRequestFailed)?;
logger::debug!(
response_length = response_text.len(),
headers_count = headers.as_ref().map(|h| h.len()).unwrap_or(0),
"Processing connector response"
);
let response_data = match serde_json::from_str::<serde_json::Value>(&response_text) {
Ok(json) => json,
Err(_e) => serde_json::Value::String(response_text),
};
Ok(InjectorResponse {
status_code,
headers,
response: response_data,
})
}
}
impl InjectorRequest {
/// Creates a new InjectorRequest
#[allow(clippy::too_many_arguments)]
pub fn new(
endpoint: String,
http_method: HttpMethod,
template: String,
token_data: TokenData,
headers: Option<HashMap<String, Secret<String>>>,
proxy_url: Option<Secret<String>>,
client_cert: Option<Secret<String>>,
client_key: Option<Secret<String>>,
ca_cert: Option<Secret<String>>,
) -> Self {
let headers = headers.unwrap_or_default();
let mut connection_config = ConnectionConfig::new(endpoint, http_method);
// Keep vault metadata header for processing in make_http_request
// Store backup proxy for make_http_request to use as fallback
connection_config.backup_proxy_url = proxy_url;
connection_config.client_cert = connection_config.client_cert.or(client_cert);
connection_config.client_key = connection_config.client_key.or(client_key);
connection_config.ca_cert = connection_config.ca_cert.or(ca_cert);
connection_config.headers = headers;
Self {
token_data,
connector_payload: ConnectorPayload { template },
connection_config,
}
}
}
impl ConnectionConfig {
/// Creates a new ConnectionConfig from basic parameters
pub fn new(endpoint: String, http_method: HttpMethod) -> Self {
Self {
endpoint,
http_method,
headers: HashMap::new(),
proxy_url: None,
backup_proxy_url: None,
client_cert: None,
client_key: None,
ca_cert: None,
insecure: None,
cert_password: None,
cert_format: None,
max_response_size: None,
}
}
}
}
pub use models::*;
// File: crates/injector/src/lib.rs
pub mod consts;
pub mod injector;
pub mod types;
pub mod vault_metadata;
// Re-export all functionality
pub use consts::*;
pub use injector::*;
pub use types::*;
pub use vault_metadata::*;
// File: crates/injector/src/vault_metadata.rs
use std::collections::HashMap;
use base64::Engine;
use masking::{ExposeInterface, Secret};
use router_env::logger;
use url::Url;
use crate::{consts::EXTERNAL_VAULT_METADATA_HEADER, types::ConnectionConfig, VaultConnectors};
const BASE64_ENGINE: base64::engine::GeneralPurpose = base64::engine::general_purpose::STANDARD;
/// Trait for different vault metadata processors
pub trait VaultMetadataProcessor: Send + Sync {
/// Process vault metadata and return connection configuration updates
fn process_metadata(
&self,
connection_config: &mut ConnectionConfig,
) -> Result<(), VaultMetadataError>;
/// Get the vault connector type
fn vault_connector(&self) -> VaultConnectors;
}
/// Comprehensive errors related to vault metadata processing
#[derive(Debug, thiserror::Error)]
pub enum VaultMetadataError {
#[error("Failed to decode base64 vault metadata: {0}")]
Base64DecodingFailed(String),
#[error("Failed to parse vault metadata JSON: {0}")]
JsonParsingFailed(String),
#[error("Unsupported vault connector: {0}")]
UnsupportedVaultConnector(String),
#[error("Invalid URL in vault metadata: {0}")]
InvalidUrl(String),
#[error("Missing required field in vault metadata: {0}")]
MissingRequiredField(String),
#[error("Invalid certificate format: {0}")]
InvalidCertificateFormat(String),
#[error("Vault metadata header is empty or malformed")]
EmptyOrMalformedHeader,
#[error("URL validation failed for {field}: {url} - {reason}")]
UrlValidationFailed {
field: String,
url: String,
reason: String,
},
#[error("Certificate validation failed: {0}")]
CertificateValidationFailed(String),
#[error("Vault metadata processing failed for connector {connector}: {reason}")]
ProcessingFailed { connector: String, reason: String },
}
impl VaultMetadataError {
/// Create a URL validation error with context
pub fn url_validation_failed(field: &str, url: &str, reason: impl Into<String>) -> Self {
Self::UrlValidationFailed {
field: field.to_string(),
url: url.to_string(),
reason: reason.into(),
}
}
}
/// External vault proxy metadata (moved from external_services)
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(untagged)]
pub enum ExternalVaultProxyMetadata {
/// VGS proxy data variant
VgsMetadata(VgsMetadata),
}
/// VGS proxy data (moved from external_services)
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct VgsMetadata {
/// External vault url
pub proxy_url: Url,
/// CA certificates to verify the vault server
pub certificate: Secret<String>,
}
impl VaultMetadataProcessor for VgsMetadata {
fn process_metadata(
&self,
connection_config: &mut ConnectionConfig,
) -> Result<(), VaultMetadataError> {
// Validate and set proxy URL from VGS metadata
let proxy_url_str = self.proxy_url.as_str().to_string();
connection_config.proxy_url = Some(Secret::new(proxy_url_str.clone()));
// Validate and decode certificate from VGS metadata
let cert_content = self.certificate.clone().expose();
// Check if certificate is base64 encoded and decode if necessary
let decoded_cert = if cert_content.starts_with("-----BEGIN") {
cert_content
} else {
match BASE64_ENGINE.decode(&cert_content) {
Ok(decoded_bytes) => String::from_utf8(decoded_bytes).map_err(|e| {
VaultMetadataError::CertificateValidationFailed(format!(
"Certificate is not valid UTF-8 after base64 decoding: {e}"
))
})?,
Err(e) => {
logger::error!(
error = %e,
"Failed to decode base64 certificate"
);
return Err(VaultMetadataError::CertificateValidationFailed(format!(
"Failed to decode base64 certificate: {e}"
)));
}
}
};
connection_config.ca_cert = Some(Secret::new(decoded_cert.clone()));
Ok(())
}
fn vault_connector(&self) -> VaultConnectors {
VaultConnectors::VGS
}
}
impl VaultMetadataProcessor for ExternalVaultProxyMetadata {
fn process_metadata(
&self,
connection_config: &mut ConnectionConfig,
) -> Result<(), VaultMetadataError> {
match self {
Self::VgsMetadata(vgs_metadata) => vgs_metadata.process_metadata(connection_config),
}
}
fn vault_connector(&self) -> VaultConnectors {
match self {
Self::VgsMetadata(vgs_metadata) => vgs_metadata.vault_connector(),
}
}
}
/// Factory for creating vault metadata processors from different sources
pub struct VaultMetadataFactory;
impl VaultMetadataFactory {
/// Create a vault metadata processor from base64 encoded header value with comprehensive validation
pub fn from_base64_header(
base64_value: &str,
) -> Result<Box<dyn VaultMetadataProcessor>, VaultMetadataError> {
// Validate input
if base64_value.trim().is_empty() {
return Err(VaultMetadataError::EmptyOrMalformedHeader);
}
// Decode base64 with detailed error context
let decoded_bytes = BASE64_ENGINE.decode(base64_value.trim()).map_err(|e| {
logger::error!(
error = %e,
"Failed to decode base64 vault metadata header"
);
VaultMetadataError::Base64DecodingFailed(format!("Invalid base64 encoding: {e}"))
})?;
// Validate decoded size
if decoded_bytes.is_empty() {
return Err(VaultMetadataError::EmptyOrMalformedHeader);
}
if decoded_bytes.len() > 1_000_000 {
return Err(VaultMetadataError::JsonParsingFailed(
"Decoded vault metadata is too large (>1MB)".to_string(),
));
}
// Parse JSON with detailed error context
let metadata: ExternalVaultProxyMetadata =
serde_json::from_slice(&decoded_bytes).map_err(|e| {
logger::error!(
error = %e,
"Failed to parse vault metadata JSON"
);
VaultMetadataError::JsonParsingFailed(format!("Invalid JSON structure: {e}"))
})?;
logger::info!(
vault_connector = ?metadata.vault_connector(),
"Successfully parsed vault metadata from header"
);
Ok(Box::new(metadata))
}
}
/// Trait for extracting vault metadata from various sources
pub trait VaultMetadataExtractor {
/// Extract vault metadata from headers and apply to connection config
fn extract_and_apply_vault_metadata(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> Result<(), VaultMetadataError>;
}
impl VaultMetadataExtractor for ConnectionConfig {
fn extract_and_apply_vault_metadata(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> Result<(), VaultMetadataError> {
if let Some(vault_metadata_header) = headers.get(EXTERNAL_VAULT_METADATA_HEADER) {
let processor =
VaultMetadataFactory::from_base64_header(&vault_metadata_header.clone().expose())
.map_err(|e| {
logger::error!(
error = %e,
"Failed to create vault metadata processor from header"
);
e
})?;
processor.process_metadata(self).map_err(|e| {
logger::error!(
error = %e,
vault_connector = ?processor.vault_connector(),
"Failed to apply vault metadata to connection config"
);
e
})?;
logger::info!(
vault_connector = ?processor.vault_connector(),
proxy_url_applied = self.proxy_url.is_some(),
ca_cert_applied = self.ca_cert.is_some(),
client_cert_applied = self.client_cert.is_some(),
"Successfully applied vault metadata to connection configuration"
);
}
Ok(())
}
}
/// Extended trait for graceful fallback handling
pub trait VaultMetadataExtractorExt {
/// Extract vault metadata with graceful fallback (doesn't fail the entire request)
fn extract_and_apply_vault_metadata_with_fallback(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> bool;
/// Extract vault metadata from a single header value with graceful fallback
fn extract_and_apply_vault_metadata_with_fallback_from_header(
&mut self,
header_value: &str,
) -> bool;
}
impl VaultMetadataExtractorExt for ConnectionConfig {
fn extract_and_apply_vault_metadata_with_fallback(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> bool {
match self.extract_and_apply_vault_metadata(headers) {
Ok(()) => {
logger::info!(
proxy_url_set = self.proxy_url.is_some(),
ca_cert_set = self.ca_cert.is_some(),
client_cert_set = self.client_cert.is_some(),
"Vault metadata processing completed successfully"
);
true
}
Err(error) => {
logger::warn!(
error = %error,
proxy_url_set = self.proxy_url.is_some(),
ca_cert_set = self.ca_cert.is_some(),
"Vault metadata processing failed, continuing without vault configuration"
);
false
}
}
}
fn extract_and_apply_vault_metadata_with_fallback_from_header(
&mut self,
header_value: &str,
) -> bool {
let mut temp_headers = HashMap::new();
temp_headers.insert(
EXTERNAL_VAULT_METADATA_HEADER.to_string(),
Secret::new(header_value.to_string()),
);
self.extract_and_apply_vault_metadata_with_fallback(&temp_headers)
}
}
#[cfg(test)]
#[allow(clippy::expect_used)]
mod tests {
use std::collections::HashMap;
use base64::Engine;
use common_utils::pii::SecretSerdeValue;
use super::*;
use crate::types::{HttpMethod, InjectorRequest, TokenData, VaultConnectors};
#[test]
fn test_vault_metadata_processing() {
// Create test VGS metadata with base64 encoded certificate
let vgs_metadata = VgsMetadata {
proxy_url: "https://vgs-proxy.example.com:8443"
.parse()
.expect("Valid test URL"),
certificate: Secret::new("cert".to_string()),
};
let metadata = ExternalVaultProxyMetadata::VgsMetadata(vgs_metadata);
// Serialize and base64 encode (as it would come from the header)
let metadata_json =
serde_json::to_vec(&metadata).expect("Metadata serialization should succeed");
let base64_metadata = BASE64_ENGINE.encode(&metadata_json);
// Create headers with vault metadata
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
Secret::new("application/json".to_string()),
);
headers.insert(
"Authorization".to_string(),
Secret::new("Bearer token123".to_string()),
);
headers.insert(
EXTERNAL_VAULT_METADATA_HEADER.to_string(),
Secret::new(base64_metadata),
);
// Test the amazing automatic processing with the unified API!
let injector_request = InjectorRequest::new(
"https://api.example.com/v1/payments".to_string(),
HttpMethod::POST,
"amount={{$amount}}¤cy={{$currency}}".to_string(),
TokenData {
vault_connector: VaultConnectors::VGS,
specific_token_data: SecretSerdeValue::new(serde_json::json!({
"amount": "1000",
"currency": "USD"
})),
},
Some(headers),
None, // No fallback proxy needed - vault metadata provides it
None, // No fallback client cert
None, // No fallback client key
None, // No fallback CA cert
);
// Verify vault metadata was automatically applied!
assert!(injector_request.connection_config.proxy_url.is_some());
assert!(injector_request.connection_config.ca_cert.is_some());
assert_eq!(
injector_request
.connection_config
.proxy_url
.as_ref()
.expect("Proxy URL should be set")
.clone()
.expose(),
"https://vgs-proxy.example.com:8443/"
);
// Verify vault metadata header was removed from regular headers
assert!(!injector_request
.connection_config
.headers
.contains_key(EXTERNAL_VAULT_METADATA_HEADER));
// Verify other headers are preserved
assert!(injector_request
.connection_config
.headers
.contains_key("Content-Type"));
assert!(injector_request
.connection_config
.headers
.contains_key("Authorization"));
}
#[test]
fn test_vault_metadata_factory() {
let vgs_metadata = VgsMetadata {
proxy_url: "https://vgs-proxy.example.com:8443"
.parse()
.expect("Valid test URL"),
certificate: Secret::new("cert".to_string()),
};
let metadata = ExternalVaultProxyMetadata::VgsMetadata(vgs_metadata);
let metadata_json =
serde_json::to_vec(&metadata).expect("Metadata serialization should succeed");
let base64_metadata = BASE64_ENGINE.encode(&metadata_json);
// Test factory creation from base64
let processor = VaultMetadataFactory::from_base64_header(&base64_metadata)
.expect("Base64 decoding should succeed");
assert_eq!(processor.vault_connector(), VaultConnectors::VGS);
// Test processor creation was successful
assert!(processor.vault_connector() == VaultConnectors::VGS);
}
}
</module>
|
{
"crate": "injector",
"file": null,
"files": [
"crates/injector/src/consts.rs",
"crates/injector/src/injector.rs",
"crates/injector/src/types.rs",
"crates/injector/src/lib.rs",
"crates/injector/src/vault_metadata.rs"
],
"module": "crates/injector/src",
"num_files": 5,
"token_count": 11627
}
|
module_3129485083144844556
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/payments
Files: 6
</path>
<module>
// File: crates/analytics/src/payments/core.rs
#![allow(dead_code)]
use std::collections::{HashMap, HashSet};
use api_models::analytics::{
payments::{
MetricsBucketResponse, PaymentDimensions, PaymentDistributions, PaymentMetrics,
PaymentMetricsBucketIdentifier,
},
FilterValue, GetPaymentFiltersRequest, GetPaymentMetricRequest, PaymentFiltersResponse,
PaymentsAnalyticsMetadata, PaymentsMetricsResponse,
};
use bigdecimal::ToPrimitive;
use common_enums::Currency;
use common_utils::errors::CustomResult;
use currency_conversion::{conversion::convert, types::ExchangeRates};
use error_stack::ResultExt;
use router_env::{
instrument, logger,
tracing::{self, Instrument},
};
use super::{
distribution::PaymentDistributionRow,
filters::{get_payment_filter_for_dimension, PaymentFilterRow},
metrics::PaymentMetricRow,
PaymentMetricsAccumulator,
};
use crate::{
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
metrics,
payments::{PaymentDistributionAccumulator, PaymentMetricAccumulator},
AnalyticsProvider,
};
#[derive(Debug)]
pub enum TaskType {
MetricTask(
PaymentMetrics,
CustomResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>, AnalyticsError>,
),
DistributionTask(
PaymentDistributions,
CustomResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>, AnalyticsError>,
),
}
#[instrument(skip_all)]
pub async fn get_metrics(
pool: &AnalyticsProvider,
ex_rates: &Option<ExchangeRates>,
auth: &AuthInfo,
req: GetPaymentMetricRequest,
) -> AnalyticsResult<PaymentsMetricsResponse<MetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
PaymentMetricsBucketIdentifier,
PaymentMetricsAccumulator,
> = HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_payments_metrics_query",
payment_metric = metric_type.as_ref()
);
// TODO: lifetime issues with joinset,
// can be optimized away if joinset lifetime requirements are relaxed
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_payment_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::MetricTask(metric_type, data)
}
.instrument(task_span),
);
}
if let Some(distribution) = req.clone().distribution {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_payments_distribution_query",
payment_distribution = distribution.distribution_for.as_ref()
);
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_payment_distribution(
&distribution,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::DistributionTask(distribution.distribution_for, data)
}
.instrument(task_span),
);
}
while let Some(task_type) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
match task_type {
TaskType::MetricTask(metric, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
PaymentMetrics::PaymentSuccessRate
| PaymentMetrics::SessionizedPaymentSuccessRate => metrics_builder
.payment_success_rate
.add_metrics_bucket(&value),
PaymentMetrics::PaymentCount | PaymentMetrics::SessionizedPaymentCount => {
metrics_builder.payment_count.add_metrics_bucket(&value)
}
PaymentMetrics::PaymentSuccessCount
| PaymentMetrics::SessionizedPaymentSuccessCount => {
metrics_builder.payment_success.add_metrics_bucket(&value)
}
PaymentMetrics::PaymentProcessedAmount
| PaymentMetrics::SessionizedPaymentProcessedAmount => {
metrics_builder.processed_amount.add_metrics_bucket(&value)
}
PaymentMetrics::AvgTicketSize
| PaymentMetrics::SessionizedAvgTicketSize => {
metrics_builder.avg_ticket_size.add_metrics_bucket(&value)
}
PaymentMetrics::RetriesCount | PaymentMetrics::SessionizedRetriesCount => {
metrics_builder.retries_count.add_metrics_bucket(&value);
metrics_builder
.retries_amount_processed
.add_metrics_bucket(&value)
}
PaymentMetrics::ConnectorSuccessRate
| PaymentMetrics::SessionizedConnectorSuccessRate => {
metrics_builder
.connector_success_rate
.add_metrics_bucket(&value);
}
PaymentMetrics::DebitRouting | PaymentMetrics::SessionizedDebitRouting => {
metrics_builder.debit_routing.add_metrics_bucket(&value);
}
PaymentMetrics::PaymentsDistribution => {
metrics_builder
.payments_distribution
.add_metrics_bucket(&value);
}
PaymentMetrics::FailureReasons => {
metrics_builder
.failure_reasons_distribution
.add_metrics_bucket(&value);
}
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
TaskType::DistributionTask(distribution, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("distribution_type", distribution.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for distribution {distribution}");
let metrics_accumulator = metrics_accumulator.entry(id).or_default();
match distribution {
PaymentDistributions::PaymentErrorMessage => metrics_accumulator
.payment_error_message
.add_distribution_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: distribution: {}, results: {:#?}",
distribution,
metrics_accumulator
);
}
}
}
let mut total_payment_processed_amount = 0;
let mut total_payment_processed_count = 0;
let mut total_payment_processed_amount_without_smart_retries = 0;
let mut total_payment_processed_count_without_smart_retries = 0;
let mut total_failure_reasons_count = 0;
let mut total_failure_reasons_count_without_smart_retries = 0;
let mut total_payment_processed_amount_in_usd = 0;
let mut total_payment_processed_amount_without_smart_retries_usd = 0;
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let mut collected_values = val.collect();
if let Some(amount) = collected_values.payment_processed_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.payment_processed_amount_in_usd = amount_in_usd;
total_payment_processed_amount += amount;
total_payment_processed_amount_in_usd += amount_in_usd.unwrap_or(0);
}
if let Some(count) = collected_values.payment_processed_count {
total_payment_processed_count += count;
}
if let Some(amount) = collected_values.payment_processed_amount_without_smart_retries {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.payment_processed_amount_without_smart_retries_usd = amount_in_usd;
total_payment_processed_amount_without_smart_retries += amount;
total_payment_processed_amount_without_smart_retries_usd +=
amount_in_usd.unwrap_or(0);
}
if let Some(count) = collected_values.payment_processed_count_without_smart_retries {
total_payment_processed_count_without_smart_retries += count;
}
if let Some(count) = collected_values.failure_reason_count {
total_failure_reasons_count += count;
}
if let Some(count) = collected_values.failure_reason_count_without_smart_retries {
total_failure_reasons_count_without_smart_retries += count;
}
if let Some(savings) = collected_values.debit_routing_savings {
let savings_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(savings)
.inspect_err(|e| {
logger::error!(
"Debit Routing savings conversion error: {:?}",
e
)
})
.ok()
.and_then(|savings_i64| {
convert(ex_rates, currency, Currency::USD, savings_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|savings| (savings * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.debit_routing_savings_in_usd = savings_in_usd;
}
MetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
Ok(PaymentsMetricsResponse {
query_data,
meta_data: [PaymentsAnalyticsMetadata {
total_payment_processed_amount: Some(total_payment_processed_amount),
total_payment_processed_amount_in_usd: if ex_rates.is_some() {
Some(total_payment_processed_amount_in_usd)
} else {
None
},
total_payment_processed_amount_without_smart_retries: Some(
total_payment_processed_amount_without_smart_retries,
),
total_payment_processed_amount_without_smart_retries_usd: if ex_rates.is_some() {
Some(total_payment_processed_amount_without_smart_retries_usd)
} else {
None
},
total_payment_processed_count: Some(total_payment_processed_count),
total_payment_processed_count_without_smart_retries: Some(
total_payment_processed_count_without_smart_retries,
),
total_failure_reasons_count: Some(total_failure_reasons_count),
total_failure_reasons_count_without_smart_retries: Some(
total_failure_reasons_count_without_smart_retries,
),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetPaymentFiltersRequest,
auth: &AuthInfo,
) -> AnalyticsResult<PaymentFiltersResponse> {
let mut res = PaymentFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_payment_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_payment_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_poll, ckh_pool) => {
let ckh_result = get_payment_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_payment_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_poll,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_poll, ckh_pool) => {
let ckh_result = get_payment_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_payment_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_poll,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payments analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: PaymentFilterRow| match dim {
PaymentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
PaymentDimensions::PaymentStatus => fil.status.map(|i| i.as_ref().to_string()),
PaymentDimensions::Connector => fil.connector,
PaymentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()),
PaymentDimensions::PaymentMethod => fil.payment_method,
PaymentDimensions::PaymentMethodType => fil.payment_method_type,
PaymentDimensions::ClientSource => fil.client_source,
PaymentDimensions::ClientVersion => fil.client_version,
PaymentDimensions::ProfileId => fil.profile_id,
PaymentDimensions::CardNetwork => fil.card_network,
PaymentDimensions::MerchantId => fil.merchant_id,
PaymentDimensions::CardLast4 => fil.card_last_4,
PaymentDimensions::CardIssuer => fil.card_issuer,
PaymentDimensions::ErrorReason => fil.error_reason,
PaymentDimensions::RoutingApproach => fil.routing_approach.map(|i| i.as_ref().to_string()),
PaymentDimensions::SignatureNetwork => fil.signature_network,
PaymentDimensions::IsIssuerRegulated => fil.is_issuer_regulated.map(|b| b.to_string()),
PaymentDimensions::IsDebitRouted => fil.is_debit_routed.map(|b| b.to_string())
})
.collect::<Vec<String>>();
res.query_data.push(FilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
// File: crates/analytics/src/payments/distribution.rs
use api_models::analytics::{
payments::{
PaymentDimensions, PaymentDistributions, PaymentFilters, PaymentMetricsBucketIdentifier,
},
Granularity, PaymentDistributionBody, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
mod payment_error_message;
use payment_error_message::PaymentErrorMessage;
#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
pub struct PaymentDistributionRow {
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub status: Option<DBEnumWrapper<storage_enums::AttemptStatus>>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
pub profile_id: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub first_attempt: Option<bool>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
pub error_message: Option<String>,
pub routing_approach: Option<DBEnumWrapper<storage_enums::RoutingApproach>>,
pub signature_network: Option<String>,
pub is_issuer_regulated: Option<bool>,
pub is_debit_routed: Option<bool>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait PaymentDistributionAnalytics: LoadRow<PaymentDistributionRow> {}
#[async_trait::async_trait]
pub trait PaymentDistribution<T>
where
T: AnalyticsDataSource + PaymentDistributionAnalytics,
{
#[allow(clippy::too_many_arguments)]
async fn load_distribution(
&self,
distribution: &PaymentDistributionBody,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>>;
}
#[async_trait::async_trait]
impl<T> PaymentDistribution<T> for PaymentDistributions
where
T: AnalyticsDataSource + PaymentDistributionAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_distribution(
&self,
distribution: &PaymentDistributionBody,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(PaymentMetricsBucketIdentifier, PaymentDistributionRow)>> {
match self {
Self::PaymentErrorMessage => {
PaymentErrorMessage
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
}
}
}
// File: crates/analytics/src/payments/types.rs
use api_models::analytics::payments::{PaymentDimensions, PaymentFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for PaymentFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.currency.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::Currency, &self.currency)
.attach_printable("Error adding currency filter")?;
}
if !self.status.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::PaymentStatus, &self.status)
.attach_printable("Error adding payment status filter")?;
}
if !self.connector.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::Connector, &self.connector)
.attach_printable("Error adding connector filter")?;
}
if !self.auth_type.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::AuthType, &self.auth_type)
.attach_printable("Error adding auth type filter")?;
}
if !self.payment_method.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::PaymentMethod, &self.payment_method)
.attach_printable("Error adding payment method filter")?;
}
if !self.payment_method_type.is_empty() {
builder
.add_filter_in_range_clause(
PaymentDimensions::PaymentMethodType,
&self.payment_method_type,
)
.attach_printable("Error adding payment method type filter")?;
}
if !self.client_source.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::ClientSource, &self.client_source)
.attach_printable("Error adding client source filter")?;
}
if !self.client_version.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::ClientVersion, &self.client_version)
.attach_printable("Error adding client version filter")?;
}
if !self.profile_id.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::ProfileId, &self.profile_id)
.attach_printable("Error adding profile id filter")?;
}
if !self.card_network.is_empty() {
let card_networks: Vec<String> = self
.card_network
.iter()
.flat_map(|cn| {
[
format!("\"{cn}\""),
cn.to_string(),
format!("\"{cn}\"").to_uppercase(),
]
})
.collect();
builder
.add_filter_in_range_clause(
PaymentDimensions::CardNetwork,
card_networks.as_slice(),
)
.attach_printable("Error adding card network filter")?;
}
if !self.merchant_id.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::MerchantId, &self.merchant_id)
.attach_printable("Error adding merchant id filter")?;
}
if !self.card_last_4.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::CardLast4, &self.card_last_4)
.attach_printable("Error adding card last 4 filter")?;
}
if !self.card_issuer.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::CardIssuer, &self.card_issuer)
.attach_printable("Error adding card issuer filter")?;
}
if !self.error_reason.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::ErrorReason, &self.error_reason)
.attach_printable("Error adding error reason filter")?;
}
if !self.first_attempt.is_empty() {
builder
.add_filter_in_range_clause("first_attempt", &self.first_attempt)
.attach_printable("Error adding first attempt filter")?;
}
if !self.routing_approach.is_empty() {
builder
.add_filter_in_range_clause(
PaymentDimensions::RoutingApproach,
&self.routing_approach,
)
.attach_printable("Error adding routing approach filter")?;
}
if !self.signature_network.is_empty() {
builder
.add_filter_in_range_clause(
PaymentDimensions::SignatureNetwork,
&self.signature_network,
)
.attach_printable("Error adding signature network filter")?;
}
if !self.is_issuer_regulated.is_empty() {
builder
.add_filter_in_range_clause(
PaymentDimensions::IsIssuerRegulated,
&self.is_issuer_regulated,
)
.attach_printable("Error adding is issuer regulated filter")?;
}
if !self.is_debit_routed.is_empty() {
builder
.add_filter_in_range_clause(PaymentDimensions::IsDebitRouted, &self.is_debit_routed)
.attach_printable("Error adding is debit routed filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/payments/metrics.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetrics, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
mod avg_ticket_size;
mod connector_success_rate;
mod debit_routing;
mod payment_count;
mod payment_processed_amount;
mod payment_success_count;
mod retries_count;
mod sessionized_metrics;
mod success_rate;
use avg_ticket_size::AvgTicketSize;
use connector_success_rate::ConnectorSuccessRate;
use debit_routing::DebitRouting;
use payment_count::PaymentCount;
use payment_processed_amount::PaymentProcessedAmount;
use payment_success_count::PaymentSuccessCount;
use success_rate::PaymentSuccessRate;
use self::retries_count::RetriesCount;
#[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)]
pub struct PaymentMetricRow {
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub status: Option<DBEnumWrapper<storage_enums::AttemptStatus>>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
pub profile_id: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub first_attempt: Option<bool>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
pub routing_approach: Option<DBEnumWrapper<storage_enums::RoutingApproach>>,
pub signature_network: Option<String>,
pub is_issuer_regulated: Option<bool>,
pub is_debit_routed: Option<bool>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait PaymentMetricAnalytics: LoadRow<PaymentMetricRow> {}
#[async_trait::async_trait]
pub trait PaymentMetric<T>
where
T: AnalyticsDataSource + PaymentMetricAnalytics,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> PaymentMetric<T> for PaymentMetrics
where
T: AnalyticsDataSource + PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
match self {
Self::PaymentSuccessRate => {
PaymentSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentCount => {
PaymentCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentSuccessCount => {
PaymentSuccessCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentProcessedAmount => {
PaymentProcessedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::AvgTicketSize => {
AvgTicketSize
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::RetriesCount => {
RetriesCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::ConnectorSuccessRate => {
ConnectorSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::DebitRouting => {
DebitRouting
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentSuccessRate => {
sessionized_metrics::PaymentSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentCount => {
sessionized_metrics::PaymentCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentSuccessCount => {
sessionized_metrics::PaymentSuccessCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentProcessedAmount => {
sessionized_metrics::PaymentProcessedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedAvgTicketSize => {
sessionized_metrics::AvgTicketSize
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRetriesCount => {
sessionized_metrics::RetriesCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedConnectorSuccessRate => {
sessionized_metrics::ConnectorSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentsDistribution => {
sessionized_metrics::PaymentsDistribution
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::FailureReasons => {
sessionized_metrics::FailureReasons
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedDebitRouting => {
sessionized_metrics::DebitRouting
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
}
}
}
// File: crates/analytics/src/payments/filters.rs
use api_models::analytics::{payments::PaymentDimensions, Granularity, TimeRange};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::{AttemptStatus, AuthenticationType, Currency, RoutingApproach};
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait PaymentFilterAnalytics: LoadRow<PaymentFilterRow> {}
pub async fn get_payment_filter_for_dimension<T>(
dimension: PaymentDimensions,
auth: &AuthInfo,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<PaymentFilterRow>>
where
T: AnalyticsDataSource + PaymentFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<PaymentFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct PaymentFilterRow {
pub currency: Option<DBEnumWrapper<Currency>>,
pub status: Option<DBEnumWrapper<AttemptStatus>>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub client_source: Option<String>,
pub client_version: Option<String>,
pub profile_id: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub first_attempt: Option<bool>,
pub routing_approach: Option<DBEnumWrapper<RoutingApproach>>,
pub signature_network: Option<String>,
pub is_issuer_regulated: Option<bool>,
pub is_debit_routed: Option<bool>,
}
// File: crates/analytics/src/payments/accumulator.rs
use api_models::analytics::payments::{ErrorResult, PaymentMetricsBucketValue};
use bigdecimal::ToPrimitive;
use diesel_models::enums as storage_enums;
use router_env::logger;
use super::{distribution::PaymentDistributionRow, metrics::PaymentMetricRow};
#[derive(Debug, Default)]
pub struct PaymentMetricsAccumulator {
pub payment_success_rate: SuccessRateAccumulator,
pub payment_count: CountAccumulator,
pub payment_success: CountAccumulator,
pub processed_amount: ProcessedAmountAccumulator,
pub avg_ticket_size: AverageAccumulator,
pub payment_error_message: ErrorDistributionAccumulator,
pub retries_count: CountAccumulator,
pub retries_amount_processed: RetriesAmountAccumulator,
pub connector_success_rate: SuccessRateAccumulator,
pub payments_distribution: PaymentsDistributionAccumulator,
pub failure_reasons_distribution: FailureReasonsDistributionAccumulator,
pub debit_routing: DebitRoutingAccumulator,
}
#[derive(Debug, Default)]
pub struct ErrorDistributionRow {
pub count: i64,
pub total: i64,
pub error_message: String,
}
#[derive(Debug, Default)]
pub struct ErrorDistributionAccumulator {
pub error_vec: Vec<ErrorDistributionRow>,
}
#[derive(Debug, Default)]
pub struct FailureReasonsDistributionAccumulator {
pub count: u64,
pub count_without_retries: u64,
}
#[derive(Debug, Default)]
pub struct SuccessRateAccumulator {
pub success: i64,
pub total: i64,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
#[derive(Debug, Default)]
pub struct ProcessedAmountAccumulator {
pub count_with_retries: Option<i64>,
pub total_with_retries: Option<i64>,
pub count_without_retries: Option<i64>,
pub total_without_retries: Option<i64>,
}
#[derive(Debug, Default)]
pub struct DebitRoutingAccumulator {
pub transaction_count: u64,
pub savings_amount: u64,
}
#[derive(Debug, Default)]
pub struct AverageAccumulator {
pub total: u32,
pub count: u32,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct RetriesAmountAccumulator {
pub total: Option<i64>,
}
#[derive(Debug, Default)]
pub struct PaymentsDistributionAccumulator {
pub success: u32,
pub failed: u32,
pub total: u32,
pub success_without_retries: u32,
pub success_with_only_retries: u32,
pub failed_without_retries: u32,
pub failed_with_only_retries: u32,
pub total_without_retries: u32,
pub total_with_only_retries: u32,
}
pub trait PaymentMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow);
fn collect(self) -> Self::MetricOutput;
}
pub trait PaymentDistributionAccumulator {
type DistributionOutput;
fn add_distribution_bucket(&mut self, distribution: &PaymentDistributionRow);
fn collect(self) -> Self::DistributionOutput;
}
impl PaymentDistributionAccumulator for ErrorDistributionAccumulator {
type DistributionOutput = Option<Vec<ErrorResult>>;
fn add_distribution_bucket(&mut self, distribution: &PaymentDistributionRow) {
self.error_vec.push(ErrorDistributionRow {
count: distribution.count.unwrap_or_default(),
total: distribution
.total
.clone()
.map(|i| i.to_i64().unwrap_or_default())
.unwrap_or_default(),
error_message: distribution.error_message.clone().unwrap_or("".to_string()),
})
}
fn collect(mut self) -> Self::DistributionOutput {
if self.error_vec.is_empty() {
None
} else {
self.error_vec.sort_by(|a, b| b.count.cmp(&a.count));
let mut res: Vec<ErrorResult> = Vec::new();
for val in self.error_vec.into_iter() {
let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0
/ f64::from(u32::try_from(val.total).ok()?);
res.push(ErrorResult {
reason: val.error_message,
count: val.count,
percentage: (perc * 100.0).round() / 100.0,
})
}
Some(res)
}
}
}
impl PaymentMetricAccumulator for FailureReasonsDistributionAccumulator {
type MetricOutput = (Option<u64>, Option<u64>);
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
if let Some(count) = metrics.count {
if let Ok(count_u64) = u64::try_from(count) {
self.count += count_u64;
}
}
if metrics.first_attempt.unwrap_or(false) {
if let Some(count) = metrics.count {
if let Ok(count_u64) = u64::try_from(count) {
self.count_without_retries += count_u64;
}
}
}
}
fn collect(self) -> Self::MetricOutput {
(Some(self.count), Some(self.count_without_retries))
}
}
impl PaymentMetricAccumulator for SuccessRateAccumulator {
type MetricOutput = Option<f64>;
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
if let Some(ref status) = metrics.status {
if status.as_ref() == &storage_enums::AttemptStatus::Charged {
self.success += metrics.count.unwrap_or_default();
}
};
self.total += metrics.count.unwrap_or_default();
}
fn collect(self) -> Self::MetricOutput {
if self.total <= 0 {
None
} else {
Some(
f64::from(u32::try_from(self.success).ok()?) * 100.0
/ f64::from(u32::try_from(self.total).ok()?),
)
}
}
}
impl PaymentMetricAccumulator for DebitRoutingAccumulator {
type MetricOutput = (Option<u64>, Option<u64>, Option<u64>);
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
if let Some(count) = metrics.count {
self.transaction_count += u64::try_from(count).unwrap_or(0);
}
if let Some(total) = metrics.total.as_ref().and_then(ToPrimitive::to_u64) {
self.savings_amount += total;
}
}
fn collect(self) -> Self::MetricOutput {
(
Some(self.transaction_count),
Some(self.savings_amount),
Some(0),
)
}
}
impl PaymentMetricAccumulator for PaymentsDistributionAccumulator {
type MetricOutput = (
Option<f64>,
Option<f64>,
Option<f64>,
Option<f64>,
Option<f64>,
Option<f64>,
);
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
if let Some(ref status) = metrics.status {
if status.as_ref() == &storage_enums::AttemptStatus::Charged {
if let Some(success) = metrics
.count
.and_then(|success| u32::try_from(success).ok())
{
self.success += success;
if metrics.first_attempt.unwrap_or(false) {
self.success_without_retries += success;
} else {
self.success_with_only_retries += success;
}
}
}
if status.as_ref() == &storage_enums::AttemptStatus::Failure {
if let Some(failed) = metrics.count.and_then(|failed| u32::try_from(failed).ok()) {
self.failed += failed;
if metrics.first_attempt.unwrap_or(false) {
self.failed_without_retries += failed;
} else {
self.failed_with_only_retries += failed;
}
}
}
if status.as_ref() != &storage_enums::AttemptStatus::AuthenticationFailed
&& status.as_ref() != &storage_enums::AttemptStatus::PaymentMethodAwaited
&& status.as_ref() != &storage_enums::AttemptStatus::DeviceDataCollectionPending
&& status.as_ref() != &storage_enums::AttemptStatus::ConfirmationAwaited
&& status.as_ref() != &storage_enums::AttemptStatus::Unresolved
{
if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) {
self.total += total;
if metrics.first_attempt.unwrap_or(false) {
self.total_without_retries += total;
} else {
self.total_with_only_retries += total;
}
}
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.total == 0 {
(None, None, None, None, None, None)
} else {
let success = Some(self.success);
let success_without_retries = Some(self.success_without_retries);
let success_with_only_retries = Some(self.success_with_only_retries);
let failed = Some(self.failed);
let failed_with_only_retries = Some(self.failed_with_only_retries);
let failed_without_retries = Some(self.failed_without_retries);
let total = Some(self.total);
let total_without_retries = Some(self.total_without_retries);
let total_with_only_retries = Some(self.total_with_only_retries);
let success_rate = match (success, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let success_rate_without_retries =
match (success_without_retries, total_without_retries) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let success_rate_with_only_retries =
match (success_with_only_retries, total_with_only_retries) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let failed_rate = match (failed, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let failed_rate_without_retries = match (failed_without_retries, total_without_retries)
{
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let failed_rate_with_only_retries =
match (failed_with_only_retries, total_with_only_retries) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
(
success_rate,
success_rate_without_retries,
success_rate_with_only_retries,
failed_rate,
failed_rate_without_retries,
failed_rate_with_only_retries,
)
}
}
}
impl PaymentMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl PaymentMetricAccumulator for ProcessedAmountAccumulator {
type MetricOutput = (
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
);
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
self.total_with_retries = match (
self.total_with_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count_with_retries = match (self.count_with_retries, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
if metrics.first_attempt.unwrap_or(false) {
self.total_without_retries = match (
self.total_without_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count_without_retries = match (self.count_without_retries, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
let total_with_retries = u64::try_from(self.total_with_retries.unwrap_or(0)).ok();
let count_with_retries = self.count_with_retries.and_then(|i| u64::try_from(i).ok());
let total_without_retries = u64::try_from(self.total_without_retries.unwrap_or(0)).ok();
let count_without_retries = self
.count_without_retries
.and_then(|i| u64::try_from(i).ok());
(
total_with_retries,
count_with_retries,
total_without_retries,
count_without_retries,
Some(0),
Some(0),
)
}
}
impl PaymentMetricAccumulator for RetriesAmountAccumulator {
type MetricOutput = Option<u64>;
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
self.total = match (
self.total,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
}
#[inline]
fn collect(self) -> Self::MetricOutput {
u64::try_from(self.total.unwrap_or(0)).ok()
}
}
impl PaymentMetricAccumulator for AverageAccumulator {
type MetricOutput = Option<f64>;
fn add_metrics_bucket(&mut self, metrics: &PaymentMetricRow) {
let total = metrics.total.as_ref().and_then(ToPrimitive::to_u32);
let count = metrics.count.and_then(|total| u32::try_from(total).ok());
match (total, count) {
(Some(total), Some(count)) => {
self.total += total;
self.count += count;
}
_ => {
logger::error!(message="Dropping metrics for average accumulator", metric=?metrics);
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.count == 0 {
None
} else {
Some(f64::from(self.total) / f64::from(self.count))
}
}
}
impl PaymentMetricsAccumulator {
pub fn collect(self) -> PaymentMetricsBucketValue {
let (
payment_processed_amount,
payment_processed_count,
payment_processed_amount_without_smart_retries,
payment_processed_count_without_smart_retries,
payment_processed_amount_in_usd,
payment_processed_amount_without_smart_retries_usd,
) = self.processed_amount.collect();
let (
payments_success_rate_distribution,
payments_success_rate_distribution_without_smart_retries,
payments_success_rate_distribution_with_only_retries,
payments_failure_rate_distribution,
payments_failure_rate_distribution_without_smart_retries,
payments_failure_rate_distribution_with_only_retries,
) = self.payments_distribution.collect();
let (failure_reason_count, failure_reason_count_without_smart_retries) =
self.failure_reasons_distribution.collect();
let (debit_routed_transaction_count, debit_routing_savings, debit_routing_savings_in_usd) =
self.debit_routing.collect();
PaymentMetricsBucketValue {
payment_success_rate: self.payment_success_rate.collect(),
payment_count: self.payment_count.collect(),
payment_success_count: self.payment_success.collect(),
payment_processed_amount,
payment_processed_count,
payment_processed_amount_without_smart_retries,
payment_processed_count_without_smart_retries,
avg_ticket_size: self.avg_ticket_size.collect(),
payment_error_message: self.payment_error_message.collect(),
retries_count: self.retries_count.collect(),
retries_amount_processed: self.retries_amount_processed.collect(),
connector_success_rate: self.connector_success_rate.collect(),
payments_success_rate_distribution,
payments_success_rate_distribution_without_smart_retries,
payments_success_rate_distribution_with_only_retries,
payments_failure_rate_distribution,
payments_failure_rate_distribution_without_smart_retries,
payments_failure_rate_distribution_with_only_retries,
failure_reason_count,
failure_reason_count_without_smart_retries,
payment_processed_amount_in_usd,
payment_processed_amount_without_smart_retries_usd,
debit_routed_transaction_count,
debit_routing_savings,
debit_routing_savings_in_usd,
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/payments/core.rs",
"crates/analytics/src/payments/distribution.rs",
"crates/analytics/src/payments/types.rs",
"crates/analytics/src/payments/metrics.rs",
"crates/analytics/src/payments/filters.rs",
"crates/analytics/src/payments/accumulator.rs"
],
"module": "crates/analytics/src/payments",
"num_files": 6,
"token_count": 11445
}
|
module_-1924808694605983855
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/payments/metrics
Files: 9
</path>
<module>
// File: crates/analytics/src/payments/metrics/connector_success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct ConnectorSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for ConnectorSuccessRate
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentDimensions::PaymentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause(PaymentDimensions::Connector, "NULL", FilterTypes::IsNotNull)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/retries_count.rs
use std::collections::HashSet;
use api_models::{
analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct RetriesCount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for RetriesCount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
_dimensions: &[PaymentDimensions],
auth: &AuthInfo,
_filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics.rs
mod avg_ticket_size;
mod connector_success_rate;
mod debit_routing;
mod failure_reasons;
mod payment_count;
mod payment_processed_amount;
mod payment_success_count;
mod payments_distribution;
mod retries_count;
mod success_rate;
pub(super) use avg_ticket_size::AvgTicketSize;
pub(super) use connector_success_rate::ConnectorSuccessRate;
pub(super) use debit_routing::DebitRouting;
pub(super) use failure_reasons::FailureReasons;
pub(super) use payment_count::PaymentCount;
pub(super) use payment_processed_amount::PaymentProcessedAmount;
pub(super) use payment_success_count::PaymentSuccessCount;
pub(super) use payments_distribution::PaymentsDistribution;
pub(super) use retries_count::RetriesCount;
pub(super) use success_rate::PaymentSuccessRate;
pub use super::{PaymentMetric, PaymentMetricAnalytics, PaymentMetricRow};
// File: crates/analytics/src/payments/metrics/success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentSuccessRate
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentDimensions::PaymentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/avg_ticket_size.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::{PaymentMetric, PaymentMetricRow};
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct AvgTicketSize;
#[async_trait::async_trait]
impl<T> PaymentMetric<T> for AvgTicketSize
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
i.status.as_ref().map(|i| i.0),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/debit_routing.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct DebitRouting;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for DebitRouting
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "debit_routing_savings",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/payment_count.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentCount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentCount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
i.status.as_ref().map(|i| i.0),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<HashSet<_>, crate::query::PostProcessingError>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/payment_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentSuccessCount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentSuccessCount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/payment_processed_amount.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentProcessedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentProcessedAmount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Payment);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/payments/metrics/connector_success_rate.rs",
"crates/analytics/src/payments/metrics/retries_count.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics.rs",
"crates/analytics/src/payments/metrics/success_rate.rs",
"crates/analytics/src/payments/metrics/avg_ticket_size.rs",
"crates/analytics/src/payments/metrics/debit_routing.rs",
"crates/analytics/src/payments/metrics/payment_count.rs",
"crates/analytics/src/payments/metrics/payment_success_count.rs",
"crates/analytics/src/payments/metrics/payment_processed_amount.rs"
],
"module": "crates/analytics/src/payments/metrics",
"num_files": 9,
"token_count": 7945
}
|
module_6677640450311853894
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/payments/metrics/sessionized_metrics
Files: 10
</path>
<module>
// File: crates/analytics/src/payments/metrics/sessionized_metrics/failure_reasons.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket,
ToSql, Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct FailureReasons;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for FailureReasons
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut inner_query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
inner_query_builder
.add_select_column("sum(sign_flag)")
.switch()?;
inner_query_builder
.add_custom_filter_clause(
PaymentDimensions::ErrorReason,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
time_range
.set_filter_clause(&mut inner_query_builder)
.attach_printable("Error filtering time range for inner query")
.switch()?;
let inner_query_string = inner_query_builder
.build_query()
.attach_printable("Error building inner query")
.change_context(MetricsError::QueryBuildingError)?;
let mut outer_query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
for dim in dimensions.iter() {
outer_query_builder.add_select_column(dim).switch()?;
}
outer_query_builder
.add_select_column("sum(sign_flag) AS count")
.switch()?;
outer_query_builder
.add_select_column(format!("({inner_query_string}) AS total"))
.switch()?;
outer_query_builder
.add_select_column("first_attempt")
.switch()?;
outer_query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
outer_query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters
.set_filter_clause(&mut outer_query_builder)
.switch()?;
auth.set_filter_clause(&mut outer_query_builder).switch()?;
time_range
.set_filter_clause(&mut outer_query_builder)
.attach_printable("Error filtering time range for outer query")
.switch()?;
outer_query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Failure,
)
.switch()?;
outer_query_builder
.add_custom_filter_clause(
PaymentDimensions::ErrorReason,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
for dim in dimensions.iter() {
outer_query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
outer_query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut outer_query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
outer_query_builder
.add_order_by_clause("count", Order::Descending)
.attach_printable("Error adding order by clause")
.switch()?;
let filtered_dimensions: Vec<&PaymentDimensions> = dimensions
.iter()
.filter(|&&dim| dim != PaymentDimensions::ErrorReason)
.collect();
for dim in &filtered_dimensions {
outer_query_builder
.add_order_by_clause(*dim, Order::Ascending)
.attach_printable("Error adding order by clause")
.switch()?;
}
outer_query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/connector_success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct ConnectorSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for ConnectorSuccessRate
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentDimensions::PaymentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause(PaymentDimensions::Connector, "NULL", FilterTypes::IsNotNull)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/retries_count.rs
use std::collections::HashSet;
use api_models::{
analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RetriesCount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for RetriesCount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
_dimensions: &[PaymentDimensions],
auth: &AuthInfo,
_filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/payments_distribution.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentsDistribution;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentsDistribution
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentDimensions::PaymentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder.add_select_column("first_attempt").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentSuccessRate
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentDimensions::PaymentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/avg_ticket_size.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::{PaymentMetric, PaymentMetricRow};
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct AvgTicketSize;
#[async_trait::async_trait]
impl<T> PaymentMetric<T> for AvgTicketSize
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
i.status.as_ref().map(|i| i.0),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/debit_routing.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct DebitRouting;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for DebitRouting
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "debit_routing_savings",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/payment_count.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentCount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentCount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
i.status.as_ref().map(|i| i.0),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<HashSet<_>, crate::query::PostProcessingError>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/payment_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentSuccessCount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentSuccessCount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payments/metrics/sessionized_metrics/payment_processed_amount.rs
use std::collections::HashSet;
use api_models::analytics::{
payments::{PaymentDimensions, PaymentFilters, PaymentMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentProcessedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentMetric<T> for PaymentProcessedAmount
where
T: AnalyticsDataSource + super::PaymentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentDimensions],
auth: &AuthInfo,
filters: &PaymentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentDimensions::PaymentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder.add_select_column("first_attempt").switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentDimensions::PaymentStatus,
storage_enums::AttemptStatus::Charged,
)
.switch()?;
query_builder
.execute_query::<PaymentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.client_source.clone(),
i.client_version.clone(),
i.profile_id.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
i.routing_approach.as_ref().map(|i| i.0.clone()),
i.signature_network.clone(),
i.is_issuer_regulated,
i.is_debit_routed,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentMetricsBucketIdentifier, PaymentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/payments/metrics/sessionized_metrics/failure_reasons.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/connector_success_rate.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/retries_count.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/payments_distribution.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/success_rate.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/avg_ticket_size.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/debit_routing.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/payment_count.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/payment_success_count.rs",
"crates/analytics/src/payments/metrics/sessionized_metrics/payment_processed_amount.rs"
],
"module": "crates/analytics/src/payments/metrics/sessionized_metrics",
"num_files": 10,
"token_count": 10234
}
|
module_7212990785836073318
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/api_event
Files: 5
</path>
<module>
// File: crates/analytics/src/api_event/core.rs
use std::collections::HashMap;
use api_models::analytics::{
api_event::{
ApiEventMetricsBucketIdentifier, ApiEventMetricsBucketValue, ApiLogsRequest,
ApiMetricsBucketResponse,
},
AnalyticsMetadata, ApiEventFiltersResponse, GetApiEventFiltersRequest,
GetApiEventMetricRequest, MetricsResponse,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use router_env::{
instrument, logger,
tracing::{self, Instrument},
};
use super::{
events::{get_api_event, ApiLogsResult},
metrics::ApiEventMetricRow,
};
use crate::{
errors::{AnalyticsError, AnalyticsResult},
metrics,
types::FiltersError,
AnalyticsProvider,
};
#[instrument(skip_all)]
pub async fn api_events_core(
pool: &AnalyticsProvider,
req: ApiLogsRequest,
merchant_id: &common_utils::id_type::MerchantId,
) -> AnalyticsResult<Vec<ApiLogsResult>> {
let data = match pool {
AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented(
"API Events not implemented for SQLX",
))
.attach_printable("SQL Analytics is not implemented for API Events"),
AnalyticsProvider::Clickhouse(pool) => get_api_event(merchant_id, req, pool).await,
AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
| AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
get_api_event(merchant_id, req, ckh_pool).await
}
}
.switch()?;
Ok(data)
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetApiEventFiltersRequest,
merchant_id: &common_utils::id_type::MerchantId,
) -> AnalyticsResult<ApiEventFiltersResponse> {
use api_models::analytics::{api_event::ApiEventDimensions, ApiEventFilterValue};
use super::filters::get_api_event_filter_for_dimension;
use crate::api_event::filters::ApiEventFilter;
let mut res = ApiEventFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented(
"API Events not implemented for SQLX",
))
.attach_printable("SQL Analytics is not implemented for API Events"),
AnalyticsProvider::Clickhouse(ckh_pool)
| AnalyticsProvider::CombinedSqlx(_, ckh_pool)
| AnalyticsProvider::CombinedCkh(_, ckh_pool) => {
get_api_event_filter_for_dimension(dim, merchant_id, &req.time_range, ckh_pool)
.await
}
}
.switch()?
.into_iter()
.filter_map(|fil: ApiEventFilter| match dim {
ApiEventDimensions::StatusCode => fil.status_code.map(|i| i.to_string()),
ApiEventDimensions::FlowType => fil.flow_type,
ApiEventDimensions::ApiFlow => fil.api_flow,
})
.collect::<Vec<String>>();
res.query_data.push(ApiEventFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
#[instrument(skip_all)]
pub async fn get_api_event_metrics(
pool: &AnalyticsProvider,
merchant_id: &common_utils::id_type::MerchantId,
req: GetApiEventMetricRequest,
) -> AnalyticsResult<MetricsResponse<ApiMetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<ApiEventMetricsBucketIdentifier, ApiEventMetricRow> =
HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_api_metrics_query",
api_event_metric = metric_type.as_ref()
);
// TODO: lifetime issues with joinset,
// can be optimized away if joinset lifetime requirements are relaxed
let merchant_id_scoped = merchant_id.to_owned();
set.spawn(
async move {
let data = pool
.get_api_event_metrics(
&metric_type,
&req.group_by_names.clone(),
&merchant_id_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
(metric_type, data)
}
.instrument(task_span),
);
}
while let Some((metric, data)) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
metrics_accumulator
.entry(id)
.and_modify(|data| {
data.api_count = data.api_count.or(value.api_count);
data.status_code_count = data.status_code_count.or(value.status_code_count);
data.latency = data.latency.or(value.latency);
})
.or_insert(value);
}
}
let query_data: Vec<ApiMetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| ApiMetricsBucketResponse {
values: ApiEventMetricsBucketValue {
latency: val.latency,
api_count: val.api_count,
status_code_count: val.status_code_count,
},
dimensions: id,
})
.collect();
Ok(MetricsResponse {
query_data,
meta_data: [AnalyticsMetadata {
current_time_range: req.time_range,
}],
})
}
// File: crates/analytics/src/api_event/types.rs
use api_models::analytics::api_event::{ApiEventDimensions, ApiEventFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for ApiEventFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.status_code.is_empty() {
builder
.add_filter_in_range_clause(ApiEventDimensions::StatusCode, &self.status_code)
.attach_printable("Error adding status_code filter")?;
}
if !self.flow_type.is_empty() {
builder
.add_filter_in_range_clause(ApiEventDimensions::FlowType, &self.flow_type)
.attach_printable("Error adding flow_type filter")?;
}
if !self.api_flow.is_empty() {
builder
.add_filter_in_range_clause(ApiEventDimensions::ApiFlow, &self.api_flow)
.attach_printable("Error adding api_name filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/api_event/events.rs
use api_models::analytics::{
api_event::{ApiLogsRequest, QueryType},
Granularity,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use router_env::Flow;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
};
pub trait ApiLogsFilterAnalytics: LoadRow<ApiLogsResult> {}
pub async fn get_api_event<T>(
merchant_id: &common_utils::id_type::MerchantId,
query_param: ApiLogsRequest,
pool: &T,
) -> FiltersResult<Vec<ApiLogsResult>>
where
T: AnalyticsDataSource + ApiLogsFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::ApiEvents);
query_builder.add_select_column("*").switch()?;
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
match query_param.query_param {
QueryType::Payment { payment_id } => {
query_builder
.add_filter_clause("payment_id", &payment_id)
.switch()?;
query_builder
.add_filter_in_range_clause(
"api_flow",
&[
Flow::PaymentsCancel,
Flow::PaymentsCapture,
Flow::PaymentsConfirm,
Flow::PaymentsCreate,
Flow::PaymentsStart,
Flow::PaymentsUpdate,
Flow::RefundsCreate,
Flow::RefundsUpdate,
Flow::DisputesEvidenceSubmit,
Flow::AttachDisputeEvidence,
Flow::RetrieveDisputeEvidence,
Flow::IncomingWebhookReceive,
],
)
.switch()?;
}
QueryType::Refund {
payment_id,
refund_id,
} => {
query_builder
.add_filter_clause("payment_id", &payment_id)
.switch()?;
query_builder
.add_filter_clause("refund_id", refund_id)
.switch()?;
query_builder
.add_filter_in_range_clause("api_flow", &[Flow::RefundsCreate, Flow::RefundsUpdate])
.switch()?;
}
QueryType::Dispute {
payment_id,
dispute_id,
} => {
query_builder
.add_filter_clause("payment_id", &payment_id)
.switch()?;
query_builder
.add_filter_clause("dispute_id", dispute_id)
.switch()?;
query_builder
.add_filter_in_range_clause(
"api_flow",
&[
Flow::DisputesEvidenceSubmit,
Flow::AttachDisputeEvidence,
Flow::RetrieveDisputeEvidence,
],
)
.switch()?;
}
}
//TODO!: update the execute_query function to return reports instead of plain errors...
query_builder
.execute_query::<ApiLogsResult, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct ApiLogsResult {
pub merchant_id: common_utils::id_type::MerchantId,
pub payment_id: Option<common_utils::id_type::PaymentId>,
pub refund_id: Option<String>,
pub payment_method_id: Option<String>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub customer_id: Option<String>,
pub user_id: Option<String>,
pub connector: Option<String>,
pub request_id: Option<String>,
pub flow_type: String,
pub api_flow: String,
pub api_auth_type: Option<String>,
pub request: String,
pub response: Option<String>,
pub error: Option<String>,
pub authentication_data: Option<String>,
pub status_code: u16,
pub latency: Option<u128>,
pub user_agent: Option<String>,
pub hs_latency: Option<u128>,
pub ip_addr: Option<String>,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created_at: PrimitiveDateTime,
pub http_method: Option<String>,
pub url_path: Option<String>,
}
// File: crates/analytics/src/api_event/metrics.rs
use api_models::analytics::{
api_event::{
ApiEventDimensions, ApiEventFilters, ApiEventMetrics, ApiEventMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult},
};
mod api_count;
pub mod latency;
mod status_code_count;
use std::collections::HashSet;
use api_count::ApiCount;
use latency::MaxLatency;
use status_code_count::StatusCodeCount;
use self::latency::LatencyAvg;
#[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)]
pub struct ApiEventMetricRow {
pub latency: Option<u64>,
pub api_count: Option<u64>,
pub status_code_count: Option<u64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait ApiEventMetricAnalytics: LoadRow<ApiEventMetricRow> + LoadRow<LatencyAvg> {}
#[async_trait::async_trait]
pub trait ApiEventMetric<T>
where
T: AnalyticsDataSource + ApiEventMetricAnalytics,
{
async fn load_metrics(
&self,
dimensions: &[ApiEventDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &ApiEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(ApiEventMetricsBucketIdentifier, ApiEventMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> ApiEventMetric<T> for ApiEventMetrics
where
T: AnalyticsDataSource + ApiEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[ApiEventDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &ApiEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(ApiEventMetricsBucketIdentifier, ApiEventMetricRow)>> {
match self {
Self::Latency => {
MaxLatency
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::ApiCount => {
ApiCount
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::StatusCodeCount => {
StatusCodeCount
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
}
}
}
// File: crates/analytics/src/api_event/filters.rs
use api_models::analytics::{api_event::ApiEventDimensions, Granularity, TimeRange};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
};
pub trait ApiEventFilterAnalytics: LoadRow<ApiEventFilter> {}
pub async fn get_api_event_filter_for_dimension<T>(
dimension: ApiEventDimensions,
merchant_id: &common_utils::id_type::MerchantId,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<ApiEventFilter>>
where
T: AnalyticsDataSource + ApiEventFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::ApiEvents);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<ApiEventFilter, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct ApiEventFilter {
pub status_code: Option<i32>,
pub flow_type: Option<String>,
pub api_flow: Option<String>,
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/api_event/core.rs",
"crates/analytics/src/api_event/types.rs",
"crates/analytics/src/api_event/events.rs",
"crates/analytics/src/api_event/metrics.rs",
"crates/analytics/src/api_event/filters.rs"
],
"module": "crates/analytics/src/api_event",
"num_files": 5,
"token_count": 3569
}
|
module_5974521983138180557
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/payment_intents
Files: 6
</path>
<module>
// File: crates/analytics/src/payment_intents/core.rs
#![allow(dead_code)]
use std::collections::{HashMap, HashSet};
use api_models::analytics::{
payment_intents::{
MetricsBucketResponse, PaymentIntentDimensions, PaymentIntentMetrics,
PaymentIntentMetricsBucketIdentifier,
},
GetPaymentIntentFiltersRequest, GetPaymentIntentMetricRequest, PaymentIntentFilterValue,
PaymentIntentFiltersResponse, PaymentIntentsAnalyticsMetadata, PaymentIntentsMetricsResponse,
};
use bigdecimal::ToPrimitive;
use common_enums::Currency;
use common_utils::{errors::CustomResult, types::TimeRange};
use currency_conversion::{conversion::convert, types::ExchangeRates};
use error_stack::ResultExt;
use router_env::{
instrument, logger,
tracing::{self, Instrument},
};
use super::{
filters::{get_payment_intent_filter_for_dimension, PaymentIntentFilterRow},
metrics::PaymentIntentMetricRow,
sankey::{get_sankey_data, SankeyRow},
PaymentIntentMetricsAccumulator,
};
use crate::{
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
metrics,
payment_intents::PaymentIntentMetricAccumulator,
AnalyticsProvider,
};
#[derive(Debug)]
pub enum TaskType {
MetricTask(
PaymentIntentMetrics,
CustomResult<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
AnalyticsError,
>,
),
}
#[instrument(skip_all)]
pub async fn get_sankey(
pool: &AnalyticsProvider,
auth: &AuthInfo,
req: TimeRange,
) -> AnalyticsResult<Vec<SankeyRow>> {
match pool {
AnalyticsProvider::Sqlx(_) => Err(AnalyticsError::NotImplemented(
"Sankey not implemented for sqlx",
))?,
AnalyticsProvider::Clickhouse(ckh_pool)
| AnalyticsProvider::CombinedCkh(_, ckh_pool)
| AnalyticsProvider::CombinedSqlx(_, ckh_pool) => {
let sankey_rows = get_sankey_data(ckh_pool, auth, &req)
.await
.change_context(AnalyticsError::UnknownError)?;
Ok(sankey_rows)
}
}
}
#[instrument(skip_all)]
pub async fn get_metrics(
pool: &AnalyticsProvider,
ex_rates: &Option<ExchangeRates>,
auth: &AuthInfo,
req: GetPaymentIntentMetricRequest,
) -> AnalyticsResult<PaymentIntentsMetricsResponse<MetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
PaymentIntentMetricsBucketIdentifier,
PaymentIntentMetricsAccumulator,
> = HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_payment_intents_metrics_query",
payment_metric = metric_type.as_ref()
);
// TODO: lifetime issues with joinset,
// can be optimized away if joinset lifetime requirements are relaxed
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_payment_intent_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::MetricTask(metric_type, data)
}
.instrument(task_span),
);
}
while let Some(task_type) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
match task_type {
TaskType::MetricTask(metric, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
PaymentIntentMetrics::SuccessfulSmartRetries
| PaymentIntentMetrics::SessionizedSuccessfulSmartRetries => {
metrics_builder
.successful_smart_retries
.add_metrics_bucket(&value)
}
PaymentIntentMetrics::TotalSmartRetries
| PaymentIntentMetrics::SessionizedTotalSmartRetries => metrics_builder
.total_smart_retries
.add_metrics_bucket(&value),
PaymentIntentMetrics::SmartRetriedAmount
| PaymentIntentMetrics::SessionizedSmartRetriedAmount => metrics_builder
.smart_retried_amount
.add_metrics_bucket(&value),
PaymentIntentMetrics::PaymentIntentCount
| PaymentIntentMetrics::SessionizedPaymentIntentCount => metrics_builder
.payment_intent_count
.add_metrics_bucket(&value),
PaymentIntentMetrics::PaymentsSuccessRate
| PaymentIntentMetrics::SessionizedPaymentsSuccessRate => metrics_builder
.payments_success_rate
.add_metrics_bucket(&value),
PaymentIntentMetrics::SessionizedPaymentProcessedAmount
| PaymentIntentMetrics::PaymentProcessedAmount => metrics_builder
.payment_processed_amount
.add_metrics_bucket(&value),
PaymentIntentMetrics::SessionizedPaymentsDistribution => metrics_builder
.payments_distribution
.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
}
}
let mut success = 0;
let mut success_without_smart_retries = 0;
let mut total_smart_retried_amount = 0;
let mut total_smart_retried_amount_in_usd = 0;
let mut total_smart_retried_amount_without_smart_retries = 0;
let mut total_smart_retried_amount_without_smart_retries_in_usd = 0;
let mut total = 0;
let mut total_payment_processed_amount = 0;
let mut total_payment_processed_amount_in_usd = 0;
let mut total_payment_processed_count = 0;
let mut total_payment_processed_amount_without_smart_retries = 0;
let mut total_payment_processed_amount_without_smart_retries_in_usd = 0;
let mut total_payment_processed_count_without_smart_retries = 0;
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let mut collected_values = val.collect();
if let Some(success_count) = collected_values.successful_payments {
success += success_count;
}
if let Some(success_count) = collected_values.successful_payments_without_smart_retries
{
success_without_smart_retries += success_count;
}
if let Some(total_count) = collected_values.total_payments {
total += total_count;
}
if let Some(retried_amount) = collected_values.smart_retried_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(retried_amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.smart_retried_amount_in_usd = amount_in_usd;
total_smart_retried_amount += retried_amount;
total_smart_retried_amount_in_usd += amount_in_usd.unwrap_or(0);
}
if let Some(retried_amount) =
collected_values.smart_retried_amount_without_smart_retries
{
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(retried_amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.smart_retried_amount_without_smart_retries_in_usd = amount_in_usd;
total_smart_retried_amount_without_smart_retries += retried_amount;
total_smart_retried_amount_without_smart_retries_in_usd +=
amount_in_usd.unwrap_or(0);
}
if let Some(amount) = collected_values.payment_processed_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.payment_processed_amount_in_usd = amount_in_usd;
total_payment_processed_amount_in_usd += amount_in_usd.unwrap_or(0);
total_payment_processed_amount += amount;
}
if let Some(count) = collected_values.payment_processed_count {
total_payment_processed_count += count;
}
if let Some(amount) = collected_values.payment_processed_amount_without_smart_retries {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.payment_processed_amount_without_smart_retries_in_usd =
amount_in_usd;
total_payment_processed_amount_without_smart_retries_in_usd +=
amount_in_usd.unwrap_or(0);
total_payment_processed_amount_without_smart_retries += amount;
}
if let Some(count) = collected_values.payment_processed_count_without_smart_retries {
total_payment_processed_count_without_smart_retries += count;
}
MetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
let total_success_rate = match (success, total) {
(s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let total_success_rate_without_smart_retries = match (success_without_smart_retries, total) {
(s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
Ok(PaymentIntentsMetricsResponse {
query_data,
meta_data: [PaymentIntentsAnalyticsMetadata {
total_success_rate,
total_success_rate_without_smart_retries,
total_smart_retried_amount: Some(total_smart_retried_amount),
total_smart_retried_amount_without_smart_retries: Some(
total_smart_retried_amount_without_smart_retries,
),
total_payment_processed_amount: Some(total_payment_processed_amount),
total_payment_processed_amount_without_smart_retries: Some(
total_payment_processed_amount_without_smart_retries,
),
total_smart_retried_amount_in_usd: if ex_rates.is_some() {
Some(total_smart_retried_amount_in_usd)
} else {
None
},
total_smart_retried_amount_without_smart_retries_in_usd: if ex_rates.is_some() {
Some(total_smart_retried_amount_without_smart_retries_in_usd)
} else {
None
},
total_payment_processed_amount_in_usd: if ex_rates.is_some() {
Some(total_payment_processed_amount_in_usd)
} else {
None
},
total_payment_processed_amount_without_smart_retries_in_usd: if ex_rates.is_some() {
Some(total_payment_processed_amount_without_smart_retries_in_usd)
} else {
None
},
total_payment_processed_count: Some(total_payment_processed_count),
total_payment_processed_count_without_smart_retries: Some(
total_payment_processed_count_without_smart_retries,
),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetPaymentIntentFiltersRequest,
merchant_id: &common_utils::id_type::MerchantId,
) -> AnalyticsResult<PaymentIntentFiltersResponse> {
let mut res = PaymentIntentFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_payment_intent_filter_for_dimension(dim, merchant_id, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_payment_intent_filter_for_dimension(dim, merchant_id, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_poll, ckh_pool) => {
let ckh_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
sqlx_poll,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_poll, ckh_pool) => {
let ckh_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_payment_intent_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
sqlx_poll,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres payment intents analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: PaymentIntentFilterRow| match dim {
PaymentIntentDimensions::PaymentIntentStatus => fil.status.map(|i| i.as_ref().to_string()),
PaymentIntentDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
PaymentIntentDimensions::ProfileId => fil.profile_id,
PaymentIntentDimensions::Connector => fil.connector,
PaymentIntentDimensions::AuthType => fil.authentication_type.map(|i| i.as_ref().to_string()),
PaymentIntentDimensions::PaymentMethod => fil.payment_method,
PaymentIntentDimensions::PaymentMethodType => fil.payment_method_type,
PaymentIntentDimensions::CardNetwork => fil.card_network,
PaymentIntentDimensions::MerchantId => fil.merchant_id,
PaymentIntentDimensions::CardLast4 => fil.card_last_4,
PaymentIntentDimensions::CardIssuer => fil.card_issuer,
PaymentIntentDimensions::ErrorReason => fil.error_reason,
})
.collect::<Vec<String>>();
res.query_data.push(PaymentIntentFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
// File: crates/analytics/src/payment_intents/types.rs
use api_models::analytics::payment_intents::{PaymentIntentDimensions, PaymentIntentFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for PaymentIntentFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.status.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::PaymentIntentStatus,
&self.status,
)
.attach_printable("Error adding payment intent status filter")?;
}
if !self.currency.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::Currency, &self.currency)
.attach_printable("Error adding currency filter")?;
}
if !self.profile_id.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::ProfileId, &self.profile_id)
.attach_printable("Error adding profile id filter")?;
}
if !self.connector.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::Connector, &self.connector)
.attach_printable("Error adding connector filter")?;
}
if !self.auth_type.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::AuthType, &self.auth_type)
.attach_printable("Error adding auth type filter")?;
}
if !self.payment_method.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::PaymentMethod,
&self.payment_method,
)
.attach_printable("Error adding payment method filter")?;
}
if !self.payment_method_type.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::PaymentMethodType,
&self.payment_method_type,
)
.attach_printable("Error adding payment method type filter")?;
}
if !self.card_network.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::CardNetwork,
&self.card_network,
)
.attach_printable("Error adding card network filter")?;
}
if !self.merchant_id.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::MerchantId, &self.merchant_id)
.attach_printable("Error adding merchant id filter")?;
}
if !self.card_last_4.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::CardLast4, &self.card_last_4)
.attach_printable("Error adding card last 4 filter")?;
}
if !self.card_issuer.is_empty() {
builder
.add_filter_in_range_clause(PaymentIntentDimensions::CardIssuer, &self.card_issuer)
.attach_printable("Error adding card issuer filter")?;
}
if !self.error_reason.is_empty() {
builder
.add_filter_in_range_clause(
PaymentIntentDimensions::ErrorReason,
&self.error_reason,
)
.attach_printable("Error adding error reason filter")?;
}
if !self.customer_id.is_empty() {
builder
.add_filter_in_range_clause("customer_id", &self.customer_id)
.attach_printable("Error adding customer id filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/payment_intents/metrics.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetrics,
PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
mod payment_intent_count;
mod payment_processed_amount;
mod payments_success_rate;
mod sessionized_metrics;
mod smart_retried_amount;
mod successful_smart_retries;
mod total_smart_retries;
use payment_intent_count::PaymentIntentCount;
use payment_processed_amount::PaymentProcessedAmount;
use payments_success_rate::PaymentsSuccessRate;
use smart_retried_amount::SmartRetriedAmount;
use successful_smart_retries::SuccessfulSmartRetries;
use total_smart_retries::TotalSmartRetries;
#[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)]
pub struct PaymentIntentMetricRow {
pub status: Option<DBEnumWrapper<storage_enums::IntentStatus>>,
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub profile_id: Option<String>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<storage_enums::AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub first_attempt: Option<i64>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait PaymentIntentMetricAnalytics: LoadRow<PaymentIntentMetricRow> {}
#[async_trait::async_trait]
pub trait PaymentIntentMetric<T>
where
T: AnalyticsDataSource + PaymentIntentMetricAnalytics,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> PaymentIntentMetric<T> for PaymentIntentMetrics
where
T: AnalyticsDataSource + PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
match self {
Self::SuccessfulSmartRetries => {
SuccessfulSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::TotalSmartRetries => {
TotalSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SmartRetriedAmount => {
SmartRetriedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentIntentCount => {
PaymentIntentCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentsSuccessRate => {
PaymentsSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::PaymentProcessedAmount => {
PaymentProcessedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedSuccessfulSmartRetries => {
sessionized_metrics::SuccessfulSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedTotalSmartRetries => {
sessionized_metrics::TotalSmartRetries
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedSmartRetriedAmount => {
sessionized_metrics::SmartRetriedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentIntentCount => {
sessionized_metrics::PaymentIntentCount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentsSuccessRate => {
sessionized_metrics::PaymentsSuccessRate
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentProcessedAmount => {
sessionized_metrics::PaymentProcessedAmount
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedPaymentsDistribution => {
sessionized_metrics::PaymentsDistribution
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
}
}
}
// File: crates/analytics/src/payment_intents/sankey.rs
use common_enums::enums;
use common_utils::{
errors::ParsingError,
types::{authentication::AuthInfo, TimeRange},
};
use error_stack::ResultExt;
use router_env::logger;
use crate::{
clickhouse::ClickhouseClient,
query::{Aggregate, QueryBuilder, QueryFilter},
types::{AnalyticsCollection, DBEnumWrapper, MetricsError, MetricsResult},
};
#[derive(
Clone,
Copy,
Debug,
Default,
Eq,
Hash,
PartialEq,
serde::Deserialize,
serde::Serialize,
strum::Display,
strum::EnumIter,
strum::EnumString,
)]
#[serde(rename_all = "snake_case")]
pub enum SessionizerRefundStatus {
FullRefunded,
#[default]
NotRefunded,
PartialRefunded,
}
#[derive(
Clone,
Copy,
Debug,
Default,
Eq,
Hash,
PartialEq,
serde::Deserialize,
serde::Serialize,
strum::Display,
strum::EnumIter,
strum::EnumString,
)]
#[serde(rename_all = "snake_case")]
pub enum SessionizerDisputeStatus {
DisputePresent,
#[default]
NotDisputed,
}
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct SankeyRow {
pub count: i64,
pub status: DBEnumWrapper<enums::IntentStatus>,
#[serde(default)]
pub refunds_status: Option<DBEnumWrapper<SessionizerRefundStatus>>,
#[serde(default)]
pub dispute_status: Option<DBEnumWrapper<SessionizerDisputeStatus>>,
pub first_attempt: i64,
}
impl TryInto<SankeyRow> for serde_json::Value {
type Error = error_stack::Report<ParsingError>;
fn try_into(self) -> Result<SankeyRow, Self::Error> {
logger::debug!("Parsing SankeyRow from {:?}", self);
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse Sankey in clickhouse results",
))
}
}
pub async fn get_sankey_data(
clickhouse_client: &ClickhouseClient,
auth: &AuthInfo,
time_range: &TimeRange,
) -> MetricsResult<Vec<SankeyRow>> {
let mut query_builder =
QueryBuilder::<ClickhouseClient>::new(AnalyticsCollection::PaymentIntentSessionized);
query_builder
.add_select_column(Aggregate::<String>::Count {
field: None,
alias: Some("count"),
})
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("status")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("refunds_status")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("dispute_status")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.attach_printable("Error adding select clause")
.change_context(MetricsError::QueryBuildingError)?;
auth.set_filter_clause(&mut query_builder)
.change_context(MetricsError::QueryBuildingError)?;
time_range
.set_filter_clause(&mut query_builder)
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("status")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("refunds_status")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("dispute_status")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error adding group by clause")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.execute_query::<SankeyRow, _>(clickhouse_client)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(Ok)
.collect()
}
// File: crates/analytics/src/payment_intents/filters.rs
use api_models::analytics::{payment_intents::PaymentIntentDimensions, Granularity, TimeRange};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::{AuthenticationType, Currency, IntentStatus};
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait PaymentIntentFilterAnalytics: LoadRow<PaymentIntentFilterRow> {}
pub async fn get_payment_intent_filter_for_dimension<T>(
dimension: PaymentIntentDimensions,
merchant_id: &common_utils::id_type::MerchantId,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<PaymentIntentFilterRow>>
where
T: AnalyticsDataSource + PaymentIntentFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::PaymentIntent);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<PaymentIntentFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct PaymentIntentFilterRow {
pub status: Option<DBEnumWrapper<IntentStatus>>,
pub currency: Option<DBEnumWrapper<Currency>>,
pub profile_id: Option<String>,
pub connector: Option<String>,
pub authentication_type: Option<DBEnumWrapper<AuthenticationType>>,
pub payment_method: Option<String>,
pub payment_method_type: Option<String>,
pub card_network: Option<String>,
pub merchant_id: Option<String>,
pub card_last_4: Option<String>,
pub card_issuer: Option<String>,
pub error_reason: Option<String>,
pub customer_id: Option<String>,
}
// File: crates/analytics/src/payment_intents/accumulator.rs
use api_models::analytics::payment_intents::PaymentIntentMetricsBucketValue;
use bigdecimal::ToPrimitive;
use diesel_models::enums as storage_enums;
use super::metrics::PaymentIntentMetricRow;
#[derive(Debug, Default)]
pub struct PaymentIntentMetricsAccumulator {
pub successful_smart_retries: CountAccumulator,
pub total_smart_retries: CountAccumulator,
pub smart_retried_amount: SmartRetriedAmountAccumulator,
pub payment_intent_count: CountAccumulator,
pub payments_success_rate: PaymentsSuccessRateAccumulator,
pub payment_processed_amount: ProcessedAmountAccumulator,
pub payments_distribution: PaymentsDistributionAccumulator,
}
#[derive(Debug, Default)]
pub struct ErrorDistributionRow {
pub count: i64,
pub total: i64,
pub error_message: String,
}
#[derive(Debug, Default)]
pub struct ErrorDistributionAccumulator {
pub error_vec: Vec<ErrorDistributionRow>,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
pub trait PaymentIntentMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow);
fn collect(self) -> Self::MetricOutput;
}
#[derive(Debug, Default)]
pub struct SmartRetriedAmountAccumulator {
pub amount: Option<i64>,
pub amount_without_retries: Option<i64>,
}
#[derive(Debug, Default)]
pub struct PaymentsSuccessRateAccumulator {
pub success: u32,
pub success_without_retries: u32,
pub total: u32,
}
#[derive(Debug, Default)]
pub struct ProcessedAmountAccumulator {
pub count_with_retries: Option<i64>,
pub total_with_retries: Option<i64>,
pub count_without_retries: Option<i64>,
pub total_without_retries: Option<i64>,
}
#[derive(Debug, Default)]
pub struct PaymentsDistributionAccumulator {
pub success_without_retries: u32,
pub failed_without_retries: u32,
pub total: u32,
}
impl PaymentIntentMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl PaymentIntentMetricAccumulator for SmartRetriedAmountAccumulator {
type MetricOutput = (Option<u64>, Option<u64>, Option<u64>, Option<u64>);
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
self.amount = match (
self.amount,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
if metrics.first_attempt.unwrap_or(0) == 1 {
self.amount_without_retries = match (
self.amount_without_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
} else {
self.amount_without_retries = Some(0);
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
let with_retries = self.amount.and_then(|i| u64::try_from(i).ok()).or(Some(0));
let without_retries = self
.amount_without_retries
.and_then(|i| u64::try_from(i).ok())
.or(Some(0));
(with_retries, without_retries, Some(0), Some(0))
}
}
impl PaymentIntentMetricAccumulator for PaymentsSuccessRateAccumulator {
type MetricOutput = (
Option<u32>,
Option<u32>,
Option<u32>,
Option<f64>,
Option<f64>,
);
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
if let Some(ref status) = metrics.status {
if status.as_ref() == &storage_enums::IntentStatus::Succeeded {
if let Some(success) = metrics
.count
.and_then(|success| u32::try_from(success).ok())
{
self.success += success;
if metrics.first_attempt.unwrap_or(0) == 1 {
self.success_without_retries += success;
}
}
}
if status.as_ref() != &storage_enums::IntentStatus::RequiresCustomerAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresPaymentMethod
&& status.as_ref() != &storage_enums::IntentStatus::RequiresMerchantAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresConfirmation
{
if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) {
self.total += total;
}
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.total == 0 {
(None, None, None, None, None)
} else {
let success = Some(self.success);
let success_without_retries = Some(self.success_without_retries);
let total = Some(self.total);
let success_rate = match (success, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let success_without_retries_rate = match (success_without_retries, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
(
success,
success_without_retries,
total,
success_rate,
success_without_retries_rate,
)
}
}
}
impl PaymentIntentMetricAccumulator for ProcessedAmountAccumulator {
type MetricOutput = (
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
Option<u64>,
);
#[inline]
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
self.total_with_retries = match (
self.total_with_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count_with_retries = match (self.count_with_retries, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
if metrics.first_attempt.unwrap_or(0) == 1 {
self.total_without_retries = match (
self.total_without_retries,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count_without_retries = match (self.count_without_retries, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
let total_with_retries = u64::try_from(self.total_with_retries.unwrap_or(0)).ok();
let count_with_retries = self.count_with_retries.and_then(|i| u64::try_from(i).ok());
let total_without_retries = u64::try_from(self.total_without_retries.unwrap_or(0)).ok();
let count_without_retries = self
.count_without_retries
.and_then(|i| u64::try_from(i).ok());
(
total_with_retries,
count_with_retries,
total_without_retries,
count_without_retries,
Some(0),
Some(0),
)
}
}
impl PaymentIntentMetricAccumulator for PaymentsDistributionAccumulator {
type MetricOutput = (Option<f64>, Option<f64>);
fn add_metrics_bucket(&mut self, metrics: &PaymentIntentMetricRow) {
let first_attempt = metrics.first_attempt.unwrap_or(0);
if let Some(ref status) = metrics.status {
if status.as_ref() == &storage_enums::IntentStatus::Succeeded {
if let Some(success) = metrics
.count
.and_then(|success| u32::try_from(success).ok())
{
if first_attempt == 1 {
self.success_without_retries += success;
}
}
}
if let Some(failed) = metrics.count.and_then(|failed| u32::try_from(failed).ok()) {
if first_attempt == 0
|| (first_attempt == 1
&& status.as_ref() == &storage_enums::IntentStatus::Failed)
{
self.failed_without_retries += failed;
}
}
if status.as_ref() != &storage_enums::IntentStatus::RequiresCustomerAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresPaymentMethod
&& status.as_ref() != &storage_enums::IntentStatus::RequiresMerchantAction
&& status.as_ref() != &storage_enums::IntentStatus::RequiresConfirmation
{
if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) {
self.total += total;
}
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.total == 0 {
(None, None)
} else {
let success_without_retries = Some(self.success_without_retries);
let failed_without_retries = Some(self.failed_without_retries);
let total = Some(self.total);
let success_rate_without_retries = match (success_without_retries, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
let failed_rate_without_retries = match (failed_without_retries, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
(success_rate_without_retries, failed_rate_without_retries)
}
}
}
impl PaymentIntentMetricsAccumulator {
pub fn collect(self) -> PaymentIntentMetricsBucketValue {
let (
successful_payments,
successful_payments_without_smart_retries,
total_payments,
payments_success_rate,
payments_success_rate_without_smart_retries,
) = self.payments_success_rate.collect();
let (
smart_retried_amount,
smart_retried_amount_without_smart_retries,
smart_retried_amount_in_usd,
smart_retried_amount_without_smart_retries_in_usd,
) = self.smart_retried_amount.collect();
let (
payment_processed_amount,
payment_processed_count,
payment_processed_amount_without_smart_retries,
payment_processed_count_without_smart_retries,
payment_processed_amount_in_usd,
payment_processed_amount_without_smart_retries_in_usd,
) = self.payment_processed_amount.collect();
let (
payments_success_rate_distribution_without_smart_retries,
payments_failure_rate_distribution_without_smart_retries,
) = self.payments_distribution.collect();
PaymentIntentMetricsBucketValue {
successful_smart_retries: self.successful_smart_retries.collect(),
total_smart_retries: self.total_smart_retries.collect(),
smart_retried_amount,
smart_retried_amount_in_usd,
smart_retried_amount_without_smart_retries,
smart_retried_amount_without_smart_retries_in_usd,
payment_intent_count: self.payment_intent_count.collect(),
successful_payments,
successful_payments_without_smart_retries,
total_payments,
payments_success_rate,
payments_success_rate_without_smart_retries,
payment_processed_amount,
payment_processed_count,
payment_processed_amount_without_smart_retries,
payment_processed_count_without_smart_retries,
payments_success_rate_distribution_without_smart_retries,
payments_failure_rate_distribution_without_smart_retries,
payment_processed_amount_in_usd,
payment_processed_amount_without_smart_retries_in_usd,
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/payment_intents/core.rs",
"crates/analytics/src/payment_intents/types.rs",
"crates/analytics/src/payment_intents/metrics.rs",
"crates/analytics/src/payment_intents/sankey.rs",
"crates/analytics/src/payment_intents/filters.rs",
"crates/analytics/src/payment_intents/accumulator.rs"
],
"module": "crates/analytics/src/payment_intents",
"num_files": 6,
"token_count": 10237
}
|
module_8330796506709996214
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/payment_intents/metrics
Files: 7
</path>
<module>
// File: crates/analytics/src/payment_intents/metrics/payments_success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentsSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentsSuccessRate
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/total_smart_retries.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct TotalSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for TotalSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics.rs
mod payment_intent_count;
mod payment_processed_amount;
mod payments_distribution;
mod payments_success_rate;
mod smart_retried_amount;
mod successful_smart_retries;
mod total_smart_retries;
pub(super) use payment_intent_count::PaymentIntentCount;
pub(super) use payment_processed_amount::PaymentProcessedAmount;
pub(super) use payments_distribution::PaymentsDistribution;
pub(super) use payments_success_rate::PaymentsSuccessRate;
pub(super) use smart_retried_amount::SmartRetriedAmount;
pub(super) use successful_smart_retries::SuccessfulSmartRetries;
pub(super) use total_smart_retries::TotalSmartRetries;
pub use super::{PaymentIntentMetric, PaymentIntentMetricAnalytics, PaymentIntentMetricRow};
// File: crates/analytics/src/payment_intents/metrics/payment_intent_count.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentIntentCount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentIntentCount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/successful_smart_retries.rs
use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct SuccessfulSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SuccessfulSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/smart_retried_amount.rs
use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct SmartRetriedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SmartRetriedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/payment_processed_amount.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentProcessedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentProcessedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntent);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentIntentDimensions::PaymentIntentStatus,
storage_enums::IntentStatus::Succeeded,
)
.switch()?;
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/payment_intents/metrics/payments_success_rate.rs",
"crates/analytics/src/payment_intents/metrics/total_smart_retries.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics.rs",
"crates/analytics/src/payment_intents/metrics/payment_intent_count.rs",
"crates/analytics/src/payment_intents/metrics/successful_smart_retries.rs",
"crates/analytics/src/payment_intents/metrics/smart_retried_amount.rs",
"crates/analytics/src/payment_intents/metrics/payment_processed_amount.rs"
],
"module": "crates/analytics/src/payment_intents/metrics",
"num_files": 7,
"token_count": 5902
}
|
module_-5362820497374066814
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/payment_intents/metrics/sessionized_metrics
Files: 7
</path>
<module>
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/payments_success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentsSuccessRate;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentsSuccessRate
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt".to_string())
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/total_smart_retries.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct TotalSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for TotalSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/payments_distribution.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentsDistribution;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentsDistribution
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/payment_intent_count.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentIntentCount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentIntentCount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/successful_smart_retries.rs
use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct SuccessfulSmartRetries;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SuccessfulSmartRetries
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/smart_retried_amount.rs
use std::collections::HashSet;
use api_models::{
analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
},
enums::IntentStatus,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct SmartRetriedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for SmartRetriedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_custom_filter_clause("attempt_count", "1", FilterTypes::Gt)
.switch()?;
query_builder
.add_custom_filter_clause("status", IntentStatus::Succeeded, FilterTypes::Equal)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
i.status.as_ref().map(|i| i.0),
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/payment_intents/metrics/sessionized_metrics/payment_processed_amount.rs
use std::collections::HashSet;
use api_models::analytics::{
payment_intents::{
PaymentIntentDimensions, PaymentIntentFilters, PaymentIntentMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::PaymentIntentMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct PaymentProcessedAmount;
#[async_trait::async_trait]
impl<T> super::PaymentIntentMetric<T> for PaymentProcessedAmount
where
T: AnalyticsDataSource + super::PaymentIntentMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[PaymentIntentDimensions],
auth: &AuthInfo,
filters: &PaymentIntentFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>>
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::PaymentIntentSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(PaymentIntentDimensions::PaymentIntentStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column("(attempt_count = 1) as first_attempt")
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "amount",
alias: Some("total"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_group_by_clause("first_attempt")
.attach_printable("Error grouping by first_attempt")
.switch()?;
query_builder
.add_group_by_clause("currency")
.attach_printable("Error grouping by currency")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.add_filter_clause(
PaymentIntentDimensions::PaymentIntentStatus,
storage_enums::IntentStatus::Succeeded,
)
.switch()?;
query_builder
.execute_query::<PaymentIntentMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
PaymentIntentMetricsBucketIdentifier::new(
None,
i.currency.as_ref().map(|i| i.0),
i.profile_id.clone(),
i.connector.clone(),
i.authentication_type.as_ref().map(|i| i.0),
i.payment_method.clone(),
i.payment_method_type.clone(),
i.card_network.clone(),
i.merchant_id.clone(),
i.card_last_4.clone(),
i.card_issuer.clone(),
i.error_reason.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(PaymentIntentMetricsBucketIdentifier, PaymentIntentMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/payments_success_rate.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/total_smart_retries.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/payments_distribution.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/payment_intent_count.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/successful_smart_retries.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/smart_retried_amount.rs",
"crates/analytics/src/payment_intents/metrics/sessionized_metrics/payment_processed_amount.rs"
],
"module": "crates/analytics/src/payment_intents/metrics/sessionized_metrics",
"num_files": 7,
"token_count": 6897
}
|
module_-1249627163671030958
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/sdk_events
Files: 6
</path>
<module>
// File: crates/analytics/src/sdk_events/core.rs
use std::collections::HashMap;
use api_models::analytics::{
sdk_events::{
MetricsBucketResponse, SdkEventMetrics, SdkEventMetricsBucketIdentifier, SdkEventsRequest,
},
AnalyticsMetadata, GetSdkEventFiltersRequest, GetSdkEventMetricRequest, MetricsResponse,
SdkEventFiltersResponse,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use router_env::{instrument, logger, tracing};
use super::{
events::{get_sdk_event, SdkEventsResult},
SdkEventMetricsAccumulator,
};
use crate::{
errors::{AnalyticsError, AnalyticsResult},
sdk_events::SdkEventMetricAccumulator,
types::FiltersError,
AnalyticsProvider,
};
#[instrument(skip_all)]
pub async fn sdk_events_core(
pool: &AnalyticsProvider,
req: SdkEventsRequest,
publishable_key: &String,
) -> AnalyticsResult<Vec<SdkEventsResult>> {
match pool {
AnalyticsProvider::Sqlx(_) => Err(FiltersError::NotImplemented(
"SDK Events not implemented for SQLX",
))
.attach_printable("SQL Analytics is not implemented for Sdk Events"),
AnalyticsProvider::Clickhouse(pool) => get_sdk_event(publishable_key, req, pool).await,
AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
| AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
get_sdk_event(publishable_key, req, ckh_pool).await
}
}
.switch()
}
#[instrument(skip_all)]
pub async fn get_metrics(
pool: &AnalyticsProvider,
publishable_key: &String,
req: GetSdkEventMetricRequest,
) -> AnalyticsResult<MetricsResponse<MetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
SdkEventMetricsBucketIdentifier,
SdkEventMetricsAccumulator,
> = HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let publishable_key_scoped = publishable_key.to_owned();
let pool = pool.clone();
set.spawn(async move {
let data = pool
.get_sdk_event_metrics(
&metric_type,
&req.group_by_names.clone(),
&publishable_key_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
(metric_type, data)
});
}
while let Some((metric, data)) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
logger::info!("Logging Result {:?}", data);
for (id, value) in data? {
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
SdkEventMetrics::PaymentAttempts => {
metrics_builder.payment_attempts.add_metrics_bucket(&value)
}
SdkEventMetrics::PaymentMethodsCallCount => metrics_builder
.payment_methods_call_count
.add_metrics_bucket(&value),
SdkEventMetrics::SdkRenderedCount => metrics_builder
.sdk_rendered_count
.add_metrics_bucket(&value),
SdkEventMetrics::SdkInitiatedCount => metrics_builder
.sdk_initiated_count
.add_metrics_bucket(&value),
SdkEventMetrics::PaymentMethodSelectedCount => metrics_builder
.payment_method_selected_count
.add_metrics_bucket(&value),
SdkEventMetrics::PaymentDataFilledCount => metrics_builder
.payment_data_filled_count
.add_metrics_bucket(&value),
SdkEventMetrics::AveragePaymentTime => metrics_builder
.average_payment_time
.add_metrics_bucket(&value),
SdkEventMetrics::LoadTime => metrics_builder.load_time.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| MetricsBucketResponse {
values: val.collect(),
dimensions: id,
})
.collect();
Ok(MetricsResponse {
query_data,
meta_data: [AnalyticsMetadata {
current_time_range: req.time_range,
}],
})
}
#[allow(dead_code)]
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetSdkEventFiltersRequest,
publishable_key: &String,
) -> AnalyticsResult<SdkEventFiltersResponse> {
use api_models::analytics::{sdk_events::SdkEventDimensions, SdkEventFilterValue};
use super::filters::get_sdk_event_filter_for_dimension;
use crate::sdk_events::filters::SdkEventFilter;
let mut res = SdkEventFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(_pool) => Err(FiltersError::NotImplemented(
"SDK Events not implemented for SQLX",
))
.attach_printable("SQL Analytics is not implemented for SDK Events"),
AnalyticsProvider::Clickhouse(pool) => {
get_sdk_event_filter_for_dimension(dim, publishable_key, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedSqlx(_sqlx_pool, ckh_pool)
| AnalyticsProvider::CombinedCkh(_sqlx_pool, ckh_pool) => {
get_sdk_event_filter_for_dimension(dim, publishable_key, &req.time_range, ckh_pool)
.await
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: SdkEventFilter| match dim {
SdkEventDimensions::PaymentMethod => fil.payment_method,
SdkEventDimensions::Platform => fil.platform,
SdkEventDimensions::BrowserName => fil.browser_name,
SdkEventDimensions::Source => fil.source,
SdkEventDimensions::Component => fil.component,
SdkEventDimensions::PaymentExperience => fil.payment_experience,
})
.collect::<Vec<String>>();
res.query_data.push(SdkEventFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
// File: crates/analytics/src/sdk_events/types.rs
use api_models::analytics::sdk_events::{SdkEventDimensions, SdkEventFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for SdkEventFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.payment_method.is_empty() {
builder
.add_filter_in_range_clause(SdkEventDimensions::PaymentMethod, &self.payment_method)
.attach_printable("Error adding payment method filter")?;
}
if !self.platform.is_empty() {
builder
.add_filter_in_range_clause(SdkEventDimensions::Platform, &self.platform)
.attach_printable("Error adding platform filter")?;
}
if !self.browser_name.is_empty() {
builder
.add_filter_in_range_clause(SdkEventDimensions::BrowserName, &self.browser_name)
.attach_printable("Error adding browser name filter")?;
}
if !self.source.is_empty() {
builder
.add_filter_in_range_clause(SdkEventDimensions::Source, &self.source)
.attach_printable("Error adding source filter")?;
}
if !self.component.is_empty() {
builder
.add_filter_in_range_clause(SdkEventDimensions::Component, &self.component)
.attach_printable("Error adding component filter")?;
}
if !self.payment_experience.is_empty() {
builder
.add_filter_in_range_clause(
SdkEventDimensions::PaymentExperience,
&self.payment_experience,
)
.attach_printable("Error adding payment experience filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/sdk_events/events.rs
use api_models::analytics::{
sdk_events::{SdkEventNames, SdkEventsRequest},
Granularity,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use strum::IntoEnumIterator;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
};
pub trait SdkEventsFilterAnalytics: LoadRow<SdkEventsResult> {}
pub async fn get_sdk_event<T>(
publishable_key: &String,
request: SdkEventsRequest,
pool: &T,
) -> FiltersResult<Vec<SdkEventsResult>>
where
T: AnalyticsDataSource + SdkEventsFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let static_event_list = SdkEventNames::iter()
.map(|i| format!("'{}'", i.as_ref()))
.collect::<Vec<String>>()
.join(",");
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::SdkEvents);
query_builder.add_select_column("*").switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_filter_clause("payment_id", &request.payment_id)
.switch()?;
query_builder
.add_custom_filter_clause("event_name", static_event_list, FilterTypes::In)
.switch()?;
let _ = &request
.time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
//TODO!: update the execute_query function to return reports instead of plain errors...
query_builder
.execute_query::<SdkEventsResult, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
pub struct SdkEventsResult {
pub merchant_id: common_utils::id_type::MerchantId,
pub payment_id: common_utils::id_type::PaymentId,
pub event_name: Option<String>,
pub log_type: Option<String>,
pub first_event: bool,
pub browser_name: Option<String>,
pub browser_version: Option<String>,
pub source: Option<String>,
pub category: Option<String>,
pub version: Option<String>,
pub value: Option<String>,
pub platform: Option<String>,
pub component: Option<String>,
pub payment_method: Option<String>,
pub payment_experience: Option<String>,
pub latency: Option<u64>,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created_at_precise: PrimitiveDateTime,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created_at: PrimitiveDateTime,
}
// File: crates/analytics/src/sdk_events/metrics.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetrics, SdkEventMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, LoadRow, MetricsResult},
};
mod average_payment_time;
mod load_time;
mod payment_attempts;
mod payment_data_filled_count;
mod payment_method_selected_count;
mod payment_methods_call_count;
mod sdk_initiated_count;
mod sdk_rendered_count;
use average_payment_time::AveragePaymentTime;
use load_time::LoadTime;
use payment_attempts::PaymentAttempts;
use payment_data_filled_count::PaymentDataFilledCount;
use payment_method_selected_count::PaymentMethodSelectedCount;
use payment_methods_call_count::PaymentMethodsCallCount;
use sdk_initiated_count::SdkInitiatedCount;
use sdk_rendered_count::SdkRenderedCount;
#[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)]
pub struct SdkEventMetricRow {
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
pub time_bucket: Option<String>,
pub payment_method: Option<String>,
pub platform: Option<String>,
pub browser_name: Option<String>,
pub source: Option<String>,
pub component: Option<String>,
pub payment_experience: Option<String>,
}
pub trait SdkEventMetricAnalytics: LoadRow<SdkEventMetricRow> {}
#[async_trait::async_trait]
pub trait SdkEventMetric<T>
where
T: AnalyticsDataSource + SdkEventMetricAnalytics,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> SdkEventMetric<T> for SdkEventMetrics
where
T: AnalyticsDataSource + SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
match self {
Self::PaymentAttempts => {
PaymentAttempts
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::PaymentMethodsCallCount => {
PaymentMethodsCallCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::SdkRenderedCount => {
SdkRenderedCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::SdkInitiatedCount => {
SdkInitiatedCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::PaymentMethodSelectedCount => {
PaymentMethodSelectedCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::PaymentDataFilledCount => {
PaymentDataFilledCount
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::AveragePaymentTime => {
AveragePaymentTime
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::LoadTime => {
LoadTime
.load_metrics(
dimensions,
publishable_key,
filters,
granularity,
time_range,
pool,
)
.await
}
}
}
}
// File: crates/analytics/src/sdk_events/filters.rs
use api_models::analytics::{sdk_events::SdkEventDimensions, Granularity, TimeRange};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, FiltersError, FiltersResult, LoadRow},
};
pub trait SdkEventFilterAnalytics: LoadRow<SdkEventFilter> {}
pub async fn get_sdk_event_filter_for_dimension<T>(
dimension: SdkEventDimensions,
publishable_key: &String,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<SdkEventFilter>>
where
T: AnalyticsDataSource + SdkEventFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<SdkEventFilter, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct SdkEventFilter {
pub payment_method: Option<String>,
pub platform: Option<String>,
pub browser_name: Option<String>,
pub source: Option<String>,
pub component: Option<String>,
pub payment_experience: Option<String>,
}
// File: crates/analytics/src/sdk_events/accumulator.rs
use api_models::analytics::sdk_events::SdkEventMetricsBucketValue;
use router_env::logger;
use super::metrics::SdkEventMetricRow;
#[derive(Debug, Default)]
pub struct SdkEventMetricsAccumulator {
pub payment_attempts: CountAccumulator,
pub payment_methods_call_count: CountAccumulator,
pub average_payment_time: CountAccumulator,
pub load_time: CountAccumulator,
pub sdk_initiated_count: CountAccumulator,
pub sdk_rendered_count: CountAccumulator,
pub payment_method_selected_count: CountAccumulator,
pub payment_data_filled_count: CountAccumulator,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
#[derive(Debug, Default)]
pub struct AverageAccumulator {
pub total: u32,
pub count: u32,
}
pub trait SdkEventMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &SdkEventMetricRow);
fn collect(self) -> Self::MetricOutput;
}
impl SdkEventMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &SdkEventMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl SdkEventMetricAccumulator for AverageAccumulator {
type MetricOutput = Option<f64>;
fn add_metrics_bucket(&mut self, metrics: &SdkEventMetricRow) {
let total = metrics
.total
.as_ref()
.and_then(bigdecimal::ToPrimitive::to_u32);
let count = metrics.count.and_then(|total| u32::try_from(total).ok());
match (total, count) {
(Some(total), Some(count)) => {
self.total += total;
self.count += count;
}
_ => {
logger::error!(message="Dropping metrics for average accumulator", metric=?metrics);
}
}
}
fn collect(self) -> Self::MetricOutput {
if self.count == 0 {
None
} else {
Some(f64::from(self.total) / f64::from(self.count))
}
}
}
impl SdkEventMetricsAccumulator {
#[allow(dead_code)]
pub fn collect(self) -> SdkEventMetricsBucketValue {
SdkEventMetricsBucketValue {
payment_attempts: self.payment_attempts.collect(),
payment_methods_call_count: self.payment_methods_call_count.collect(),
average_payment_time: self.average_payment_time.collect(),
load_time: self.load_time.collect(),
sdk_initiated_count: self.sdk_initiated_count.collect(),
sdk_rendered_count: self.sdk_rendered_count.collect(),
payment_method_selected_count: self.payment_method_selected_count.collect(),
payment_data_filled_count: self.payment_data_filled_count.collect(),
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/sdk_events/core.rs",
"crates/analytics/src/sdk_events/types.rs",
"crates/analytics/src/sdk_events/events.rs",
"crates/analytics/src/sdk_events/metrics.rs",
"crates/analytics/src/sdk_events/filters.rs",
"crates/analytics/src/sdk_events/accumulator.rs"
],
"module": "crates/analytics/src/sdk_events",
"num_files": 6,
"token_count": 4571
}
|
module_1391805673184214332
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/sdk_events/metrics
Files: 8
</path>
<module>
// File: crates/analytics/src/sdk_events/metrics/payment_method_selected_count.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentMethodSelectedCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for PaymentMethodSelectedCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::PaymentMethodChanged)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/payment_data_filled_count.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentDataFilledCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for PaymentDataFilledCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::PaymentDataFilled)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/load_time.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct LoadTime;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for LoadTime
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Percentile {
field: "latency",
alias: Some("count"),
percentile: Some(&50),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::AppRendered)
.switch()?;
query_builder
.add_custom_filter_clause("latency", 0, FilterTypes::Gt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/sdk_rendered_count.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct SdkRenderedCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for SdkRenderedCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::AppRendered)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/payment_methods_call_count.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentMethodsCallCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for PaymentMethodsCallCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::PaymentMethodsCall)
.switch()?;
query_builder
.add_filter_clause("log_type", "INFO")
.switch()?;
query_builder
.add_filter_clause("category", "API")
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/average_payment_time.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct AveragePaymentTime;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for AveragePaymentTime
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Percentile {
field: "latency",
alias: Some("count"),
percentile: Some(&50),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::PaymentAttempt)
.switch()?;
query_builder
.add_custom_filter_clause("latency", 0, FilterTypes::Gt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/sdk_initiated_count.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct SdkInitiatedCount;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for SdkInitiatedCount
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::OrcaElementsCalled)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/sdk_events/metrics/payment_attempts.rs
use std::collections::HashSet;
use api_models::analytics::{
sdk_events::{
SdkEventDimensions, SdkEventFilters, SdkEventMetricsBucketIdentifier, SdkEventNames,
},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::SdkEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct PaymentAttempts;
#[async_trait::async_trait]
impl<T> super::SdkEventMetric<T> for PaymentAttempts
where
T: AnalyticsDataSource + super::SdkEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[SdkEventDimensions],
publishable_key: &str,
filters: &SdkEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::SdkEventsAnalytics);
let dimensions = dimensions.to_vec();
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
if let Some(granularity) = granularity {
query_builder
.add_granularity_in_mins(granularity)
.switch()?;
}
filters.set_filter_clause(&mut query_builder).switch()?;
query_builder
.add_filter_clause("merchant_id", publishable_key)
.switch()?;
query_builder
.add_bool_filter_clause("first_event", 1)
.switch()?;
query_builder
.add_filter_clause("event_name", SdkEventNames::PaymentAttempt)
.switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(_granularity) = granularity.as_ref() {
query_builder
.add_group_by_clause("time_bucket")
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<SdkEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
SdkEventMetricsBucketIdentifier::new(
i.payment_method.clone(),
i.platform.clone(),
i.browser_name.clone(),
i.source.clone(),
i.component.clone(),
i.payment_experience.clone(),
i.time_bucket.clone(),
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(SdkEventMetricsBucketIdentifier, SdkEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/sdk_events/metrics/payment_method_selected_count.rs",
"crates/analytics/src/sdk_events/metrics/payment_data_filled_count.rs",
"crates/analytics/src/sdk_events/metrics/load_time.rs",
"crates/analytics/src/sdk_events/metrics/sdk_rendered_count.rs",
"crates/analytics/src/sdk_events/metrics/payment_methods_call_count.rs",
"crates/analytics/src/sdk_events/metrics/average_payment_time.rs",
"crates/analytics/src/sdk_events/metrics/sdk_initiated_count.rs",
"crates/analytics/src/sdk_events/metrics/payment_attempts.rs"
],
"module": "crates/analytics/src/sdk_events/metrics",
"num_files": 8,
"token_count": 6374
}
|
module_6146081276211142147
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/disputes
Files: 5
</path>
<module>
// File: crates/analytics/src/disputes/core.rs
use std::collections::HashMap;
use api_models::analytics::{
disputes::{
DisputeDimensions, DisputeMetrics, DisputeMetricsBucketIdentifier,
DisputeMetricsBucketResponse,
},
DisputeFilterValue, DisputeFiltersResponse, DisputesAnalyticsMetadata, DisputesMetricsResponse,
GetDisputeFilterRequest, GetDisputeMetricRequest,
};
use error_stack::ResultExt;
use router_env::{
logger,
tracing::{self, Instrument},
};
use super::{
filters::{get_dispute_filter_for_dimension, DisputeFilterRow},
DisputeMetricsAccumulator,
};
use crate::{
disputes::DisputeMetricAccumulator,
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
metrics, AnalyticsProvider,
};
pub async fn get_metrics(
pool: &AnalyticsProvider,
auth: &AuthInfo,
req: GetDisputeMetricRequest,
) -> AnalyticsResult<DisputesMetricsResponse<DisputeMetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
DisputeMetricsBucketIdentifier,
DisputeMetricsAccumulator,
> = HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_dispute_query",
refund_metric = metric_type.as_ref()
);
// Currently JoinSet works with only static lifetime references even if the task pool does not outlive the given reference
// We can optimize away this clone once that is fixed
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_dispute_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
(metric_type, data)
}
.instrument(task_span),
);
}
while let Some((metric, data)) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
DisputeMetrics::DisputeStatusMetric
| DisputeMetrics::SessionizedDisputeStatusMetric => metrics_builder
.disputes_status_rate
.add_metrics_bucket(&value),
DisputeMetrics::TotalAmountDisputed
| DisputeMetrics::SessionizedTotalAmountDisputed => {
metrics_builder.disputed_amount.add_metrics_bucket(&value)
}
DisputeMetrics::TotalDisputeLostAmount
| DisputeMetrics::SessionizedTotalDisputeLostAmount => metrics_builder
.dispute_lost_amount
.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
let mut total_disputed_amount = 0;
let mut total_dispute_lost_amount = 0;
let query_data: Vec<DisputeMetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let collected_values = val.collect();
if let Some(amount) = collected_values.disputed_amount {
total_disputed_amount += amount;
}
if let Some(amount) = collected_values.dispute_lost_amount {
total_dispute_lost_amount += amount;
}
DisputeMetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
Ok(DisputesMetricsResponse {
query_data,
meta_data: [DisputesAnalyticsMetadata {
total_disputed_amount: Some(total_disputed_amount),
total_dispute_lost_amount: Some(total_dispute_lost_amount),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetDisputeFilterRequest,
auth: &AuthInfo,
) -> AnalyticsResult<DisputeFiltersResponse> {
let mut res = DisputeFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_dispute_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_dispute_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_pool, ckh_pool) => {
let ckh_result = get_dispute_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_dispute_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres disputes analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_pool, ckh_pool) => {
let ckh_result = get_dispute_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_dispute_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres disputes analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: DisputeFilterRow| match dim {
DisputeDimensions::DisputeStage => fil.dispute_stage,
DisputeDimensions::Connector => fil.connector,
DisputeDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
})
.collect::<Vec<String>>();
res.query_data.push(DisputeFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
// File: crates/analytics/src/disputes/types.rs
use api_models::analytics::disputes::{DisputeDimensions, DisputeFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for DisputeFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.connector.is_empty() {
builder
.add_filter_in_range_clause(DisputeDimensions::Connector, &self.connector)
.attach_printable("Error adding connector filter")?;
}
if !self.dispute_stage.is_empty() {
builder
.add_filter_in_range_clause(DisputeDimensions::DisputeStage, &self.dispute_stage)
.attach_printable("Error adding dispute stage filter")?;
}
if !self.currency.is_empty() {
builder
.add_filter_in_range_clause(DisputeDimensions::Currency, &self.currency)
.attach_printable("Error adding currency filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/disputes/metrics.rs
mod dispute_status_metric;
mod sessionized_metrics;
mod total_amount_disputed;
mod total_dispute_lost_amount;
use std::collections::HashSet;
use api_models::analytics::{
disputes::{DisputeDimensions, DisputeFilters, DisputeMetrics, DisputeMetricsBucketIdentifier},
Granularity,
};
use common_utils::types::TimeRange;
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use self::{
dispute_status_metric::DisputeStatusMetric, total_amount_disputed::TotalAmountDisputed,
total_dispute_lost_amount::TotalDisputeLostAmount,
};
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
#[derive(Debug, Eq, PartialEq, serde::Deserialize, Hash)]
pub struct DisputeMetricRow {
pub dispute_stage: Option<DBEnumWrapper<storage_enums::DisputeStage>>,
pub dispute_status: Option<DBEnumWrapper<storage_enums::DisputeStatus>>,
pub connector: Option<String>,
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait DisputeMetricAnalytics: LoadRow<DisputeMetricRow> {}
#[async_trait::async_trait]
pub trait DisputeMetric<T>
where
T: AnalyticsDataSource + DisputeMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[DisputeDimensions],
auth: &AuthInfo,
filters: &DisputeFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(DisputeMetricsBucketIdentifier, DisputeMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> DisputeMetric<T> for DisputeMetrics
where
T: AnalyticsDataSource + DisputeMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[DisputeDimensions],
auth: &AuthInfo,
filters: &DisputeFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(DisputeMetricsBucketIdentifier, DisputeMetricRow)>> {
match self {
Self::TotalAmountDisputed => {
TotalAmountDisputed::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::DisputeStatusMetric => {
DisputeStatusMetric::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::TotalDisputeLostAmount => {
TotalDisputeLostAmount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedTotalAmountDisputed => {
sessionized_metrics::TotalAmountDisputed::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedDisputeStatusMetric => {
sessionized_metrics::DisputeStatusMetric::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedTotalDisputeLostAmount => {
sessionized_metrics::TotalDisputeLostAmount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
}
}
}
// File: crates/analytics/src/disputes/filters.rs
use api_models::analytics::{disputes::DisputeDimensions, Granularity, TimeRange};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::Currency;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait DisputeFilterAnalytics: LoadRow<DisputeFilterRow> {}
pub async fn get_dispute_filter_for_dimension<T>(
dimension: DisputeDimensions,
auth: &AuthInfo,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<DisputeFilterRow>>
where
T: AnalyticsDataSource + DisputeFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Dispute);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<DisputeFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct DisputeFilterRow {
pub connector: Option<String>,
pub dispute_status: Option<String>,
pub connector_status: Option<String>,
pub dispute_stage: Option<String>,
pub currency: Option<DBEnumWrapper<Currency>>,
}
// File: crates/analytics/src/disputes/accumulators.rs
use api_models::analytics::disputes::DisputeMetricsBucketValue;
use diesel_models::enums as storage_enums;
use super::metrics::DisputeMetricRow;
#[derive(Debug, Default)]
pub struct DisputeMetricsAccumulator {
pub disputes_status_rate: RateAccumulator,
pub disputed_amount: DisputedAmountAccumulator,
pub dispute_lost_amount: DisputedAmountAccumulator,
}
#[derive(Debug, Default)]
pub struct RateAccumulator {
pub won_count: i64,
pub challenged_count: i64,
pub lost_count: i64,
pub total: i64,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct DisputedAmountAccumulator {
pub total: Option<i64>,
}
pub trait DisputeMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &DisputeMetricRow);
fn collect(self) -> Self::MetricOutput;
}
impl DisputeMetricAccumulator for DisputedAmountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &DisputeMetricRow) {
self.total = match (
self.total,
metrics
.total
.as_ref()
.and_then(bigdecimal::ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.total.and_then(|i| u64::try_from(i).ok())
}
}
impl DisputeMetricAccumulator for RateAccumulator {
type MetricOutput = Option<(Option<u64>, Option<u64>, Option<u64>, Option<u64>)>;
fn add_metrics_bucket(&mut self, metrics: &DisputeMetricRow) {
if let Some(ref dispute_status) = metrics.dispute_status {
if dispute_status.as_ref() == &storage_enums::DisputeStatus::DisputeChallenged {
self.challenged_count += metrics.count.unwrap_or_default();
}
if dispute_status.as_ref() == &storage_enums::DisputeStatus::DisputeWon {
self.won_count += metrics.count.unwrap_or_default();
}
if dispute_status.as_ref() == &storage_enums::DisputeStatus::DisputeLost {
self.lost_count += metrics.count.unwrap_or_default();
}
};
self.total += metrics.count.unwrap_or_default();
}
fn collect(self) -> Self::MetricOutput {
if self.total <= 0 {
Some((None, None, None, None))
} else {
Some((
u64::try_from(self.challenged_count).ok(),
u64::try_from(self.won_count).ok(),
u64::try_from(self.lost_count).ok(),
u64::try_from(self.total).ok(),
))
}
}
}
impl DisputeMetricsAccumulator {
pub fn collect(self) -> DisputeMetricsBucketValue {
let (challenge_rate, won_rate, lost_rate, total_dispute) =
self.disputes_status_rate.collect().unwrap_or_default();
DisputeMetricsBucketValue {
disputes_challenged: challenge_rate,
disputes_won: won_rate,
disputes_lost: lost_rate,
disputed_amount: self.disputed_amount.collect(),
dispute_lost_amount: self.dispute_lost_amount.collect(),
total_dispute,
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/disputes/core.rs",
"crates/analytics/src/disputes/types.rs",
"crates/analytics/src/disputes/metrics.rs",
"crates/analytics/src/disputes/filters.rs",
"crates/analytics/src/disputes/accumulators.rs"
],
"module": "crates/analytics/src/disputes",
"num_files": 5,
"token_count": 3988
}
|
module_-6887977712386786347
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/frm
Files: 5
</path>
<module>
// File: crates/analytics/src/frm/core.rs
#![allow(dead_code)]
use std::collections::HashMap;
use api_models::analytics::{
frm::{FrmDimensions, FrmMetrics, FrmMetricsBucketIdentifier, FrmMetricsBucketResponse},
AnalyticsMetadata, FrmFilterValue, FrmFiltersResponse, GetFrmFilterRequest,
GetFrmMetricRequest, MetricsResponse,
};
use error_stack::ResultExt;
use router_env::{
logger,
tracing::{self, Instrument},
};
use super::{
filters::{get_frm_filter_for_dimension, FrmFilterRow},
FrmMetricsAccumulator,
};
use crate::{
errors::{AnalyticsError, AnalyticsResult},
frm::FrmMetricAccumulator,
metrics, AnalyticsProvider,
};
pub async fn get_metrics(
pool: &AnalyticsProvider,
merchant_id: &common_utils::id_type::MerchantId,
req: GetFrmMetricRequest,
) -> AnalyticsResult<MetricsResponse<FrmMetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<FrmMetricsBucketIdentifier, FrmMetricsAccumulator> =
HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span =
tracing::debug_span!("analytics_frm_query", frm_metric = metric_type.as_ref());
// Currently JoinSet works with only static lifetime references even if the task pool does not outlive the given reference
// We can optimize away this clone once that is fixed
let merchant_id_scoped = merchant_id.to_owned();
set.spawn(
async move {
let data = pool
.get_frm_metrics(
&metric_type,
&req.group_by_names.clone(),
&merchant_id_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
(metric_type, data)
}
.instrument(task_span),
);
}
while let Some((metric, data)) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
FrmMetrics::FrmBlockedRate => {
metrics_builder.frm_blocked_rate.add_metrics_bucket(&value)
}
FrmMetrics::FrmTriggeredAttempts => metrics_builder
.frm_triggered_attempts
.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
let query_data: Vec<FrmMetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| FrmMetricsBucketResponse {
values: val.collect(),
dimensions: id,
})
.collect();
Ok(MetricsResponse {
query_data,
meta_data: [AnalyticsMetadata {
current_time_range: req.time_range,
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetFrmFilterRequest,
merchant_id: &common_utils::id_type::MerchantId,
) -> AnalyticsResult<FrmFiltersResponse> {
let mut res = FrmFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_frm_filter_for_dimension(dim, merchant_id, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_frm_filter_for_dimension(dim, merchant_id, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_pool, ckh_pool) => {
let ckh_result = get_frm_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_frm_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres frm analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_pool, ckh_pool) => {
let ckh_result = get_frm_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_frm_filter_for_dimension(
dim,
merchant_id,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres frm analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: FrmFilterRow| match dim {
FrmDimensions::FrmStatus => fil.frm_status.map(|i| i.as_ref().to_string()),
FrmDimensions::FrmName => fil.frm_name,
FrmDimensions::FrmTransactionType => {
fil.frm_transaction_type.map(|i| i.as_ref().to_string())
}
})
.collect::<Vec<String>>();
res.query_data.push(FrmFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
// File: crates/analytics/src/frm/types.rs
use api_models::analytics::frm::{FrmDimensions, FrmFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for FrmFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.frm_status.is_empty() {
builder
.add_filter_in_range_clause(FrmDimensions::FrmStatus, &self.frm_status)
.attach_printable("Error adding frm status filter")?;
}
if !self.frm_name.is_empty() {
builder
.add_filter_in_range_clause(FrmDimensions::FrmName, &self.frm_name)
.attach_printable("Error adding frm name filter")?;
}
if !self.frm_transaction_type.is_empty() {
builder
.add_filter_in_range_clause(
FrmDimensions::FrmTransactionType,
&self.frm_transaction_type,
)
.attach_printable("Error adding frm transaction type filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/frm/metrics.rs
use api_models::analytics::{
frm::{FrmDimensions, FrmFilters, FrmMetrics, FrmMetricsBucketIdentifier, FrmTransactionType},
Granularity, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
mod frm_blocked_rate;
mod frm_triggered_attempts;
use frm_blocked_rate::FrmBlockedRate;
use frm_triggered_attempts::FrmTriggeredAttempts;
use crate::{
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
#[derive(Debug, Eq, PartialEq, serde::Deserialize)]
pub struct FrmMetricRow {
pub frm_name: Option<String>,
pub frm_status: Option<DBEnumWrapper<storage_enums::FraudCheckStatus>>,
pub frm_transaction_type: Option<DBEnumWrapper<FrmTransactionType>>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait FrmMetricAnalytics: LoadRow<FrmMetricRow> {}
#[async_trait::async_trait]
pub trait FrmMetric<T>
where
T: AnalyticsDataSource + FrmMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[FrmDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &FrmFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(FrmMetricsBucketIdentifier, FrmMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> FrmMetric<T> for FrmMetrics
where
T: AnalyticsDataSource + FrmMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[FrmDimensions],
merchant_id: &common_utils::id_type::MerchantId,
filters: &FrmFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(FrmMetricsBucketIdentifier, FrmMetricRow)>> {
match self {
Self::FrmTriggeredAttempts => {
FrmTriggeredAttempts::default()
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::FrmBlockedRate => {
FrmBlockedRate::default()
.load_metrics(
dimensions,
merchant_id,
filters,
granularity,
time_range,
pool,
)
.await
}
}
}
}
// File: crates/analytics/src/frm/filters.rs
use api_models::analytics::{
frm::{FrmDimensions, FrmTransactionType},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::FraudCheckStatus;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait FrmFilterAnalytics: LoadRow<FrmFilterRow> {}
pub async fn get_frm_filter_for_dimension<T>(
dimension: FrmDimensions,
merchant_id: &common_utils::id_type::MerchantId,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<FrmFilterRow>>
where
T: AnalyticsDataSource + FrmFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::FraudCheck);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
query_builder
.add_filter_clause("merchant_id", merchant_id)
.switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<FrmFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct FrmFilterRow {
pub frm_status: Option<DBEnumWrapper<FraudCheckStatus>>,
pub frm_transaction_type: Option<DBEnumWrapper<FrmTransactionType>>,
pub frm_name: Option<String>,
}
// File: crates/analytics/src/frm/accumulator.rs
use api_models::analytics::frm::FrmMetricsBucketValue;
use common_enums::enums as storage_enums;
use super::metrics::FrmMetricRow;
#[derive(Debug, Default)]
pub struct FrmMetricsAccumulator {
pub frm_triggered_attempts: TriggeredAttemptsAccumulator,
pub frm_blocked_rate: BlockedRateAccumulator,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct TriggeredAttemptsAccumulator {
pub count: Option<i64>,
}
#[derive(Debug, Default)]
pub struct BlockedRateAccumulator {
pub fraud: i64,
pub total: i64,
}
pub trait FrmMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &FrmMetricRow);
fn collect(self) -> Self::MetricOutput;
}
impl FrmMetricAccumulator for TriggeredAttemptsAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &FrmMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl FrmMetricAccumulator for BlockedRateAccumulator {
type MetricOutput = Option<f64>;
fn add_metrics_bucket(&mut self, metrics: &FrmMetricRow) {
if let Some(ref frm_status) = metrics.frm_status {
if frm_status.as_ref() == &storage_enums::FraudCheckStatus::Fraud {
self.fraud += metrics.count.unwrap_or_default();
}
};
self.total += metrics.count.unwrap_or_default();
}
fn collect(self) -> Self::MetricOutput {
if self.total <= 0 {
None
} else {
Some(
f64::from(u32::try_from(self.fraud).ok()?) * 100.0
/ f64::from(u32::try_from(self.total).ok()?),
)
}
}
}
impl FrmMetricsAccumulator {
pub fn collect(self) -> FrmMetricsBucketValue {
FrmMetricsBucketValue {
frm_blocked_rate: self.frm_blocked_rate.collect(),
frm_triggered_attempts: self.frm_triggered_attempts.collect(),
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/frm/core.rs",
"crates/analytics/src/frm/types.rs",
"crates/analytics/src/frm/metrics.rs",
"crates/analytics/src/frm/filters.rs",
"crates/analytics/src/frm/accumulator.rs"
],
"module": "crates/analytics/src/frm",
"num_files": 5,
"token_count": 3359
}
|
module_1305532238204912838
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/auth_events
Files: 6
</path>
<module>
// File: crates/analytics/src/auth_events/core.rs
use std::collections::HashMap;
use api_models::analytics::{
auth_events::{
AuthEventDimensions, AuthEventMetrics, AuthEventMetricsBucketIdentifier,
MetricsBucketResponse,
},
AuthEventFilterValue, AuthEventFiltersResponse, AuthEventMetricsResponse,
AuthEventsAnalyticsMetadata, GetAuthEventFilterRequest, GetAuthEventMetricRequest,
};
use common_utils::types::TimeRange;
use error_stack::{report, ResultExt};
use router_env::{instrument, tracing};
use super::{
filters::{get_auth_events_filter_for_dimension, AuthEventFilterRow},
sankey::{get_sankey_data, SankeyRow},
AuthEventMetricsAccumulator,
};
use crate::{
auth_events::AuthEventMetricAccumulator,
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
AnalyticsProvider,
};
#[instrument(skip_all)]
pub async fn get_metrics(
pool: &AnalyticsProvider,
auth: &AuthInfo,
req: GetAuthEventMetricRequest,
) -> AnalyticsResult<AuthEventMetricsResponse<MetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<
AuthEventMetricsBucketIdentifier,
AuthEventMetricsAccumulator,
> = HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let auth_scoped = auth.to_owned();
let pool = pool.clone();
set.spawn(async move {
let data = pool
.get_auth_event_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
(metric_type, data)
});
}
while let Some((metric, data)) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
for (id, value) in data? {
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
AuthEventMetrics::AuthenticationCount => metrics_builder
.authentication_count
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationAttemptCount => metrics_builder
.authentication_attempt_count
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationSuccessCount => metrics_builder
.authentication_success_count
.add_metrics_bucket(&value),
AuthEventMetrics::ChallengeFlowCount => metrics_builder
.challenge_flow_count
.add_metrics_bucket(&value),
AuthEventMetrics::ChallengeAttemptCount => metrics_builder
.challenge_attempt_count
.add_metrics_bucket(&value),
AuthEventMetrics::ChallengeSuccessCount => metrics_builder
.challenge_success_count
.add_metrics_bucket(&value),
AuthEventMetrics::FrictionlessFlowCount => metrics_builder
.frictionless_flow_count
.add_metrics_bucket(&value),
AuthEventMetrics::FrictionlessSuccessCount => metrics_builder
.frictionless_success_count
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationErrorMessage => metrics_builder
.authentication_error_message
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationFunnel => metrics_builder
.authentication_funnel
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationExemptionApprovedCount => metrics_builder
.authentication_exemption_approved_count
.add_metrics_bucket(&value),
AuthEventMetrics::AuthenticationExemptionRequestedCount => metrics_builder
.authentication_exemption_requested_count
.add_metrics_bucket(&value),
}
}
}
let mut total_error_message_count = 0;
let query_data: Vec<MetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let collected_values = val.collect();
if let Some(count) = collected_values.error_message_count {
total_error_message_count += count;
}
MetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
Ok(AuthEventMetricsResponse {
query_data,
meta_data: [AuthEventsAnalyticsMetadata {
total_error_message_count: Some(total_error_message_count),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetAuthEventFilterRequest,
auth: &AuthInfo,
) -> AnalyticsResult<AuthEventFiltersResponse> {
let mut res = AuthEventFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(_pool) => {
Err(report!(AnalyticsError::UnknownError))
}
AnalyticsProvider::Clickhouse(pool) => {
get_auth_events_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
.map_err(|e| e.change_context(AnalyticsError::UnknownError))
}
AnalyticsProvider::CombinedCkh(sqlx_pool, ckh_pool) | AnalyticsProvider::CombinedSqlx(sqlx_pool, ckh_pool) => {
let ckh_result = get_auth_events_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await
.map_err(|e| e.change_context(AnalyticsError::UnknownError));
let sqlx_result = get_auth_events_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await
.map_err(|e| e.change_context(AnalyticsError::UnknownError));
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters")
},
_ => {}
};
ckh_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: AuthEventFilterRow| match dim {
AuthEventDimensions::AuthenticationStatus => fil.authentication_status.map(|i| i.as_ref().to_string()),
AuthEventDimensions::TransactionStatus => fil.trans_status.map(|i| i.as_ref().to_string()),
AuthEventDimensions::AuthenticationType => fil.authentication_type.map(|i| i.as_ref().to_string()),
AuthEventDimensions::ErrorMessage => fil.error_message,
AuthEventDimensions::AuthenticationConnector => fil.authentication_connector.map(|i| i.as_ref().to_string()),
AuthEventDimensions::MessageVersion => fil.message_version,
AuthEventDimensions::AcsReferenceNumber => fil.acs_reference_number,
AuthEventDimensions::Platform => fil.platform,
AuthEventDimensions::Mcc => fil.mcc,
AuthEventDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
AuthEventDimensions::MerchantCountry => fil.merchant_country,
AuthEventDimensions::BillingCountry => fil.billing_country,
AuthEventDimensions::ShippingCountry => fil.shipping_country,
AuthEventDimensions::IssuerCountry => fil.issuer_country,
AuthEventDimensions::EarliestSupportedVersion => fil.earliest_supported_version,
AuthEventDimensions::LatestSupportedVersion => fil.latest_supported_version,
AuthEventDimensions::WhitelistDecision => fil.whitelist_decision.map(|i| i.to_string()),
AuthEventDimensions::DeviceManufacturer => fil.device_manufacturer,
AuthEventDimensions::DeviceType => fil.device_type,
AuthEventDimensions::DeviceBrand => fil.device_brand,
AuthEventDimensions::DeviceOs => fil.device_os,
AuthEventDimensions::DeviceDisplay => fil.device_display,
AuthEventDimensions::BrowserName => fil.browser_name,
AuthEventDimensions::BrowserVersion => fil.browser_version,
AuthEventDimensions::IssuerId => fil.issuer_id,
AuthEventDimensions::SchemeName => fil.scheme_name,
AuthEventDimensions::ExemptionRequested => fil.exemption_requested.map(|i| i.to_string()),
AuthEventDimensions::ExemptionAccepted => fil.exemption_accepted.map(|i| i.to_string()),
})
.collect::<Vec<String>>();
res.query_data.push(AuthEventFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
#[instrument(skip_all)]
pub async fn get_sankey(
pool: &AnalyticsProvider,
auth: &AuthInfo,
req: TimeRange,
) -> AnalyticsResult<Vec<SankeyRow>> {
match pool {
AnalyticsProvider::Sqlx(_) => Err(AnalyticsError::NotImplemented(
"Sankey not implemented for sqlx",
))?,
AnalyticsProvider::Clickhouse(ckh_pool)
| AnalyticsProvider::CombinedCkh(_, ckh_pool)
| AnalyticsProvider::CombinedSqlx(_, ckh_pool) => {
let sankey_rows = get_sankey_data(ckh_pool, auth, &req)
.await
.change_context(AnalyticsError::UnknownError)?;
Ok(sankey_rows)
}
}
}
// File: crates/analytics/src/auth_events/types.rs
use api_models::analytics::auth_events::{AuthEventDimensions, AuthEventFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for AuthEventFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.authentication_status.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::AuthenticationStatus,
&self.authentication_status,
)
.attach_printable("Error adding authentication status filter")?;
}
if !self.trans_status.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::TransactionStatus,
&self.trans_status,
)
.attach_printable("Error adding transaction status filter")?;
}
if !self.error_message.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::ErrorMessage, &self.error_message)
.attach_printable("Error adding error message filter")?;
}
if !self.authentication_connector.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::AuthenticationConnector,
&self.authentication_connector,
)
.attach_printable("Error adding authentication connector filter")?;
}
if !self.message_version.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::MessageVersion,
&self.message_version,
)
.attach_printable("Error adding message version filter")?;
}
if !self.platform.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::Platform, &self.platform)
.attach_printable("Error adding platform filter")?;
}
if !self.acs_reference_number.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::AcsReferenceNumber,
&self.acs_reference_number,
)
.attach_printable("Error adding acs reference number filter")?;
}
if !self.mcc.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::Mcc, &self.mcc)
.attach_printable("Failed to add MCC filter")?;
}
if !self.currency.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::Currency, &self.currency)
.attach_printable("Failed to add currency filter")?;
}
if !self.merchant_country.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::MerchantCountry,
&self.merchant_country,
)
.attach_printable("Failed to add merchant country filter")?;
}
if !self.billing_country.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::BillingCountry,
&self.billing_country,
)
.attach_printable("Failed to add billing country filter")?;
}
if !self.shipping_country.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::ShippingCountry,
&self.shipping_country,
)
.attach_printable("Failed to add shipping country filter")?;
}
if !self.issuer_country.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::IssuerCountry,
&self.issuer_country,
)
.attach_printable("Failed to add issuer country filter")?;
}
if !self.earliest_supported_version.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::EarliestSupportedVersion,
&self.earliest_supported_version,
)
.attach_printable("Failed to add earliest supported version filter")?;
}
if !self.latest_supported_version.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::LatestSupportedVersion,
&self.latest_supported_version,
)
.attach_printable("Failed to add latest supported version filter")?;
}
if !self.whitelist_decision.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::WhitelistDecision,
&self.whitelist_decision,
)
.attach_printable("Failed to add whitelist decision filter")?;
}
if !self.device_manufacturer.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::DeviceManufacturer,
&self.device_manufacturer,
)
.attach_printable("Failed to add device manufacturer filter")?;
}
if !self.device_type.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::DeviceType, &self.device_type)
.attach_printable("Failed to add device type filter")?;
}
if !self.device_brand.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::DeviceBrand, &self.device_brand)
.attach_printable("Failed to add device brand filter")?;
}
if !self.device_os.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::DeviceOs, &self.device_os)
.attach_printable("Failed to add device OS filter")?;
}
if !self.device_display.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::DeviceDisplay,
&self.device_display,
)
.attach_printable("Failed to add device display filter")?;
}
if !self.browser_name.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::BrowserName, &self.browser_name)
.attach_printable("Failed to add browser name filter")?;
}
if !self.browser_version.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::BrowserVersion,
&self.browser_version,
)
.attach_printable("Failed to add browser version filter")?;
}
if !self.issuer_id.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::IssuerId, &self.issuer_id)
.attach_printable("Failed to add issuer ID filter")?;
}
if !self.scheme_name.is_empty() {
builder
.add_filter_in_range_clause(AuthEventDimensions::SchemeName, &self.scheme_name)
.attach_printable("Failed to add scheme name filter")?;
}
if !self.exemption_requested.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::ExemptionRequested,
&self.exemption_requested,
)
.attach_printable("Failed to add exemption requested filter")?;
}
if !self.exemption_accepted.is_empty() {
builder
.add_filter_in_range_clause(
AuthEventDimensions::ExemptionAccepted,
&self.exemption_accepted,
)
.attach_printable("Failed to add exemption accepted filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/auth_events/metrics.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{
AuthEventDimensions, AuthEventFilters, AuthEventMetrics, AuthEventMetricsBucketIdentifier,
},
Granularity, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use crate::{
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
AuthInfo,
};
mod authentication_attempt_count;
mod authentication_count;
mod authentication_error_message;
mod authentication_exemption_approved_count;
mod authentication_exemption_requested_count;
mod authentication_funnel;
mod authentication_success_count;
mod challenge_attempt_count;
mod challenge_flow_count;
mod challenge_success_count;
mod frictionless_flow_count;
mod frictionless_success_count;
use authentication_attempt_count::AuthenticationAttemptCount;
use authentication_count::AuthenticationCount;
use authentication_error_message::AuthenticationErrorMessage;
use authentication_exemption_approved_count::AuthenticationExemptionApprovedCount;
use authentication_exemption_requested_count::AuthenticationExemptionRequestedCount;
use authentication_funnel::AuthenticationFunnel;
use authentication_success_count::AuthenticationSuccessCount;
use challenge_attempt_count::ChallengeAttemptCount;
use challenge_flow_count::ChallengeFlowCount;
use challenge_success_count::ChallengeSuccessCount;
use frictionless_flow_count::FrictionlessFlowCount;
use frictionless_success_count::FrictionlessSuccessCount;
#[derive(Debug, PartialEq, Eq, serde::Deserialize, Hash)]
pub struct AuthEventMetricRow {
pub count: Option<i64>,
pub authentication_status: Option<DBEnumWrapper<storage_enums::AuthenticationStatus>>,
pub trans_status: Option<DBEnumWrapper<storage_enums::TransactionStatus>>,
pub authentication_type: Option<DBEnumWrapper<storage_enums::DecoupledAuthenticationType>>,
pub error_message: Option<String>,
pub authentication_connector: Option<DBEnumWrapper<storage_enums::AuthenticationConnectors>>,
pub message_version: Option<String>,
pub acs_reference_number: Option<String>,
pub platform: Option<String>,
pub mcc: Option<String>,
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub merchant_country: Option<String>,
pub billing_country: Option<String>,
pub shipping_country: Option<String>,
pub issuer_country: Option<String>,
pub earliest_supported_version: Option<String>,
pub latest_supported_version: Option<String>,
pub whitelist_decision: Option<bool>,
pub device_manufacturer: Option<String>,
pub device_type: Option<String>,
pub device_brand: Option<String>,
pub device_os: Option<String>,
pub device_display: Option<String>,
pub browser_name: Option<String>,
pub browser_version: Option<String>,
pub issuer_id: Option<String>,
pub scheme_name: Option<String>,
pub exemption_requested: Option<bool>,
pub exemption_accepted: Option<bool>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait AuthEventMetricAnalytics: LoadRow<AuthEventMetricRow> {}
#[async_trait::async_trait]
pub trait AuthEventMetric<T>
where
T: AnalyticsDataSource + AuthEventMetricAnalytics,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> AuthEventMetric<T> for AuthEventMetrics
where
T: AnalyticsDataSource + AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
match self {
Self::AuthenticationCount => {
AuthenticationCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::AuthenticationAttemptCount => {
AuthenticationAttemptCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::AuthenticationSuccessCount => {
AuthenticationSuccessCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::ChallengeFlowCount => {
ChallengeFlowCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::ChallengeAttemptCount => {
ChallengeAttemptCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::ChallengeSuccessCount => {
ChallengeSuccessCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::FrictionlessFlowCount => {
FrictionlessFlowCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::FrictionlessSuccessCount => {
FrictionlessSuccessCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::AuthenticationErrorMessage => {
AuthenticationErrorMessage
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::AuthenticationFunnel => {
AuthenticationFunnel
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::AuthenticationExemptionApprovedCount => {
AuthenticationExemptionApprovedCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
Self::AuthenticationExemptionRequestedCount => {
AuthenticationExemptionRequestedCount
.load_metrics(auth, dimensions, filters, granularity, time_range, pool)
.await
}
}
}
}
// File: crates/analytics/src/auth_events/sankey.rs
use common_enums::AuthenticationStatus;
use common_utils::{
errors::ParsingError,
types::{authentication::AuthInfo, TimeRange},
};
use error_stack::ResultExt;
use router_env::logger;
use crate::{
clickhouse::ClickhouseClient,
query::{Aggregate, QueryBuilder, QueryFilter},
types::{AnalyticsCollection, MetricsError, MetricsResult},
};
#[derive(Debug, serde::Deserialize, serde::Serialize)]
pub struct SankeyRow {
pub count: i64,
pub authentication_status: Option<AuthenticationStatus>,
pub exemption_requested: Option<bool>,
pub exemption_accepted: Option<bool>,
}
impl TryInto<SankeyRow> for serde_json::Value {
type Error = error_stack::Report<ParsingError>;
fn try_into(self) -> Result<SankeyRow, Self::Error> {
logger::debug!("Parsing SankeyRow from {:?}", self);
serde_json::from_value(self).change_context(ParsingError::StructParseFailure(
"Failed to parse Sankey in clickhouse results",
))
}
}
pub async fn get_sankey_data(
clickhouse_client: &ClickhouseClient,
auth: &AuthInfo,
time_range: &TimeRange,
) -> MetricsResult<Vec<SankeyRow>> {
let mut query_builder =
QueryBuilder::<ClickhouseClient>::new(AnalyticsCollection::Authentications);
query_builder
.add_select_column(Aggregate::<String>::Count {
field: None,
alias: Some("count"),
})
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("exemption_requested")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("exemption_accepted")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_select_column("authentication_status")
.change_context(MetricsError::QueryBuildingError)?;
auth.set_filter_clause(&mut query_builder)
.change_context(MetricsError::QueryBuildingError)?;
time_range
.set_filter_clause(&mut query_builder)
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("exemption_requested")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("exemption_accepted")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.add_group_by_clause("authentication_status")
.change_context(MetricsError::QueryBuildingError)?;
query_builder
.execute_query::<SankeyRow, _>(clickhouse_client)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(Ok)
.collect()
}
// File: crates/analytics/src/auth_events/filters.rs
use api_models::analytics::{auth_events::AuthEventDimensions, Granularity, TimeRange};
use common_enums::{Currency, DecoupledAuthenticationType};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::{AuthenticationConnectors, AuthenticationStatus, TransactionStatus};
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait AuthEventFilterAnalytics: LoadRow<AuthEventFilterRow> {}
pub async fn get_auth_events_filter_for_dimension<T>(
dimension: AuthEventDimensions,
auth: &AuthInfo,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<AuthEventFilterRow>>
where
T: AnalyticsDataSource + AuthEventFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
query_builder.set_distinct();
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder
.execute_query::<AuthEventFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct AuthEventFilterRow {
pub authentication_status: Option<DBEnumWrapper<AuthenticationStatus>>,
pub trans_status: Option<DBEnumWrapper<TransactionStatus>>,
pub authentication_type: Option<DBEnumWrapper<DecoupledAuthenticationType>>,
pub error_message: Option<String>,
pub authentication_connector: Option<DBEnumWrapper<AuthenticationConnectors>>,
pub message_version: Option<String>,
pub acs_reference_number: Option<String>,
pub platform: Option<String>,
pub mcc: Option<String>,
pub currency: Option<DBEnumWrapper<Currency>>,
pub merchant_country: Option<String>,
pub billing_country: Option<String>,
pub shipping_country: Option<String>,
pub issuer_country: Option<String>,
pub earliest_supported_version: Option<String>,
pub latest_supported_version: Option<String>,
pub whitelist_decision: Option<bool>,
pub device_manufacturer: Option<String>,
pub device_type: Option<String>,
pub device_brand: Option<String>,
pub device_os: Option<String>,
pub device_display: Option<String>,
pub browser_name: Option<String>,
pub browser_version: Option<String>,
pub issuer_id: Option<String>,
pub scheme_name: Option<String>,
pub exemption_requested: Option<bool>,
pub exemption_accepted: Option<bool>,
}
// File: crates/analytics/src/auth_events/accumulator.rs
use api_models::analytics::auth_events::AuthEventMetricsBucketValue;
use super::metrics::AuthEventMetricRow;
#[derive(Debug, Default)]
pub struct AuthEventMetricsAccumulator {
pub authentication_count: CountAccumulator,
pub authentication_attempt_count: CountAccumulator,
pub authentication_error_message: AuthenticationErrorMessageAccumulator,
pub authentication_success_count: CountAccumulator,
pub challenge_flow_count: CountAccumulator,
pub challenge_attempt_count: CountAccumulator,
pub challenge_success_count: CountAccumulator,
pub frictionless_flow_count: CountAccumulator,
pub frictionless_success_count: CountAccumulator,
pub authentication_funnel: CountAccumulator,
pub authentication_exemption_approved_count: CountAccumulator,
pub authentication_exemption_requested_count: CountAccumulator,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
#[derive(Debug, Default)]
pub struct AuthenticationErrorMessageAccumulator {
pub count: Option<i64>,
}
pub trait AuthEventMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow);
fn collect(self) -> Self::MetricOutput;
}
impl AuthEventMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl AuthEventMetricAccumulator for AuthenticationErrorMessageAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &AuthEventMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl AuthEventMetricsAccumulator {
pub fn collect(self) -> AuthEventMetricsBucketValue {
AuthEventMetricsBucketValue {
authentication_count: self.authentication_count.collect(),
authentication_attempt_count: self.authentication_attempt_count.collect(),
authentication_success_count: self.authentication_success_count.collect(),
challenge_flow_count: self.challenge_flow_count.collect(),
challenge_attempt_count: self.challenge_attempt_count.collect(),
challenge_success_count: self.challenge_success_count.collect(),
frictionless_flow_count: self.frictionless_flow_count.collect(),
frictionless_success_count: self.frictionless_success_count.collect(),
error_message_count: self.authentication_error_message.collect(),
authentication_funnel: self.authentication_funnel.collect(),
authentication_exemption_approved_count: self
.authentication_exemption_approved_count
.collect(),
authentication_exemption_requested_count: self
.authentication_exemption_requested_count
.collect(),
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/auth_events/core.rs",
"crates/analytics/src/auth_events/types.rs",
"crates/analytics/src/auth_events/metrics.rs",
"crates/analytics/src/auth_events/sankey.rs",
"crates/analytics/src/auth_events/filters.rs",
"crates/analytics/src/auth_events/accumulator.rs"
],
"module": "crates/analytics/src/auth_events",
"num_files": 6,
"token_count": 6872
}
|
module_-8781102932275271432
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/auth_events/metrics
Files: 12
</path>
<module>
// File: crates/analytics/src/auth_events/metrics/authentication_exemption_approved_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationExemptionApprovedCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationExemptionApprovedCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause(AuthEventDimensions::ExemptionAccepted, true)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/frictionless_flow_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::DecoupledAuthenticationType;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct FrictionlessFlowCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for FrictionlessFlowCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause(
"authentication_type",
DecoupledAuthenticationType::Frictionless,
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/frictionless_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::{AuthenticationStatus, DecoupledAuthenticationType};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct FrictionlessSuccessCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for FrictionlessSuccessCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause(
"authentication_type",
DecoupledAuthenticationType::Frictionless,
)
.switch()?;
query_builder
.add_filter_clause("authentication_status", AuthenticationStatus::Success)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/challenge_flow_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::DecoupledAuthenticationType;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct ChallengeFlowCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for ChallengeFlowCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause(
"authentication_type",
DecoupledAuthenticationType::Challenge,
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/authentication_exemption_requested_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationExemptionRequestedCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationExemptionRequestedCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause(AuthEventDimensions::ExemptionRequested, true)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/authentication_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/challenge_attempt_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::{AuthenticationStatus, DecoupledAuthenticationType};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct ChallengeAttemptCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for ChallengeAttemptCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause(
"authentication_type",
DecoupledAuthenticationType::Challenge,
)
.switch()?;
query_builder
.add_filter_in_range_clause(
"authentication_status",
&[AuthenticationStatus::Success, AuthenticationStatus::Failed],
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/authentication_attempt_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::AuthenticationStatus;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationAttemptCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationAttemptCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_in_range_clause(
"authentication_status",
&[AuthenticationStatus::Success, AuthenticationStatus::Failed],
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/authentication_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::AuthenticationStatus;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationSuccessCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationSuccessCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause("authentication_status", AuthenticationStatus::Success)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/authentication_error_message.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::AuthenticationStatus;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{
Aggregate, FilterTypes, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket,
ToSql, Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationErrorMessage;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationErrorMessage
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column("sum(sign_flag) AS count")
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause("authentication_status", AuthenticationStatus::Failed)
.switch()?;
query_builder
.add_custom_filter_clause(
AuthEventDimensions::ErrorMessage,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
query_builder
.add_order_by_clause("count", Order::Descending)
.attach_printable("Error adding order by clause")
.switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/authentication_funnel.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{
Aggregate, FilterTypes, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql,
Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct AuthenticationFunnel;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for AuthenticationFunnel
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_custom_filter_clause(
AuthEventDimensions::TransactionStatus,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/auth_events/metrics/challenge_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
auth_events::{AuthEventDimensions, AuthEventFilters, AuthEventMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_enums::{AuthenticationStatus, DecoupledAuthenticationType};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::AuthEventMetricRow;
use crate::{
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
AuthInfo,
};
#[derive(Default)]
pub(super) struct ChallengeSuccessCount;
#[async_trait::async_trait]
impl<T> super::AuthEventMetric<T> for ChallengeSuccessCount
where
T: AnalyticsDataSource + super::AuthEventMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
auth: &AuthInfo,
dimensions: &[AuthEventDimensions],
filters: &AuthEventFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::Authentications);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
query_builder
.add_filter_clause("authentication_status", AuthenticationStatus::Success)
.switch()?;
query_builder
.add_filter_clause(
"authentication_type",
DecoupledAuthenticationType::Challenge,
)
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<AuthEventMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
AuthEventMetricsBucketIdentifier::new(
i.authentication_status.as_ref().map(|i| i.0),
i.trans_status.as_ref().map(|i| i.0.clone()),
i.authentication_type.as_ref().map(|i| i.0),
i.error_message.clone(),
i.authentication_connector.as_ref().map(|i| i.0),
i.message_version.clone(),
i.acs_reference_number.clone(),
i.mcc.clone(),
i.currency.as_ref().map(|i| i.0),
i.merchant_country.clone(),
i.billing_country.clone(),
i.shipping_country.clone(),
i.issuer_country.clone(),
i.earliest_supported_version.clone(),
i.latest_supported_version.clone(),
i.whitelist_decision,
i.device_manufacturer.clone(),
i.device_type.clone(),
i.device_brand.clone(),
i.device_os.clone(),
i.device_display.clone(),
i.browser_name.clone(),
i.browser_version.clone(),
i.issuer_id.clone(),
i.scheme_name.clone(),
i.exemption_requested,
i.exemption_accepted,
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(AuthEventMetricsBucketIdentifier, AuthEventMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/auth_events/metrics/authentication_exemption_approved_count.rs",
"crates/analytics/src/auth_events/metrics/frictionless_flow_count.rs",
"crates/analytics/src/auth_events/metrics/frictionless_success_count.rs",
"crates/analytics/src/auth_events/metrics/challenge_flow_count.rs",
"crates/analytics/src/auth_events/metrics/authentication_exemption_requested_count.rs",
"crates/analytics/src/auth_events/metrics/authentication_count.rs",
"crates/analytics/src/auth_events/metrics/challenge_attempt_count.rs",
"crates/analytics/src/auth_events/metrics/authentication_attempt_count.rs",
"crates/analytics/src/auth_events/metrics/authentication_success_count.rs",
"crates/analytics/src/auth_events/metrics/authentication_error_message.rs",
"crates/analytics/src/auth_events/metrics/authentication_funnel.rs",
"crates/analytics/src/auth_events/metrics/challenge_success_count.rs"
],
"module": "crates/analytics/src/auth_events/metrics",
"num_files": 12,
"token_count": 12680
}
|
module_-1994298943052100916
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/refunds
Files: 6
</path>
<module>
// File: crates/analytics/src/refunds/core.rs
#![allow(dead_code)]
use std::collections::{HashMap, HashSet};
use api_models::analytics::{
refunds::{
RefundDimensions, RefundDistributions, RefundMetrics, RefundMetricsBucketIdentifier,
RefundMetricsBucketResponse,
},
GetRefundFilterRequest, GetRefundMetricRequest, RefundFilterValue, RefundFiltersResponse,
RefundsAnalyticsMetadata, RefundsMetricsResponse,
};
use bigdecimal::ToPrimitive;
use common_enums::Currency;
use common_utils::errors::CustomResult;
use currency_conversion::{conversion::convert, types::ExchangeRates};
use error_stack::ResultExt;
use router_env::{
logger,
tracing::{self, Instrument},
};
use super::{
distribution::RefundDistributionRow,
filters::{get_refund_filter_for_dimension, RefundFilterRow},
metrics::RefundMetricRow,
RefundMetricsAccumulator,
};
use crate::{
enums::AuthInfo,
errors::{AnalyticsError, AnalyticsResult},
metrics,
refunds::{accumulator::RefundDistributionAccumulator, RefundMetricAccumulator},
AnalyticsProvider,
};
#[derive(Debug)]
pub enum TaskType {
MetricTask(
RefundMetrics,
CustomResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>, AnalyticsError>,
),
DistributionTask(
RefundDistributions,
CustomResult<Vec<(RefundMetricsBucketIdentifier, RefundDistributionRow)>, AnalyticsError>,
),
}
pub async fn get_metrics(
pool: &AnalyticsProvider,
ex_rates: &Option<ExchangeRates>,
auth: &AuthInfo,
req: GetRefundMetricRequest,
) -> AnalyticsResult<RefundsMetricsResponse<RefundMetricsBucketResponse>> {
let mut metrics_accumulator: HashMap<RefundMetricsBucketIdentifier, RefundMetricsAccumulator> =
HashMap::new();
let mut set = tokio::task::JoinSet::new();
for metric_type in req.metrics.iter().cloned() {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_refund_query",
refund_metric = metric_type.as_ref()
);
// Currently JoinSet works with only static lifetime references even if the task pool does not outlive the given reference
// We can optimize away this clone once that is fixed
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_refund_metrics(
&metric_type,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::MetricTask(metric_type, data)
}
.instrument(task_span),
);
}
if let Some(distribution) = req.clone().distribution {
let req = req.clone();
let pool = pool.clone();
let task_span = tracing::debug_span!(
"analytics_refunds_distribution_query",
refund_distribution = distribution.distribution_for.as_ref()
);
let auth_scoped = auth.to_owned();
set.spawn(
async move {
let data = pool
.get_refund_distribution(
&distribution,
&req.group_by_names.clone(),
&auth_scoped,
&req.filters,
&req.time_series.map(|t| t.granularity),
&req.time_range,
)
.await
.change_context(AnalyticsError::UnknownError);
TaskType::DistributionTask(distribution.distribution_for, data)
}
.instrument(task_span),
);
}
while let Some(task_type) = set
.join_next()
.await
.transpose()
.change_context(AnalyticsError::UnknownError)?
{
match task_type {
TaskType::MetricTask(metric, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("metric_type", metric.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for metric {metric}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match metric {
RefundMetrics::RefundSuccessRate
| RefundMetrics::SessionizedRefundSuccessRate => metrics_builder
.refund_success_rate
.add_metrics_bucket(&value),
RefundMetrics::RefundCount | RefundMetrics::SessionizedRefundCount => {
metrics_builder.refund_count.add_metrics_bucket(&value)
}
RefundMetrics::RefundSuccessCount
| RefundMetrics::SessionizedRefundSuccessCount => {
metrics_builder.refund_success.add_metrics_bucket(&value)
}
RefundMetrics::RefundProcessedAmount
| RefundMetrics::SessionizedRefundProcessedAmount => {
metrics_builder.processed_amount.add_metrics_bucket(&value)
}
RefundMetrics::SessionizedRefundReason => {
metrics_builder.refund_reason.add_metrics_bucket(&value)
}
RefundMetrics::SessionizedRefundErrorMessage => metrics_builder
.refund_error_message
.add_metrics_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: metric: {}, results: {:#?}",
metric,
metrics_accumulator
);
}
TaskType::DistributionTask(distribution, data) => {
let data = data?;
let attributes = router_env::metric_attributes!(
("distribution_type", distribution.to_string()),
("source", pool.to_string()),
);
let value = u64::try_from(data.len());
if let Ok(val) = value {
metrics::BUCKETS_FETCHED.record(val, attributes);
logger::debug!("Attributes: {:?}, Buckets fetched: {}", attributes, val);
}
for (id, value) in data {
logger::debug!(bucket_id=?id, bucket_value=?value, "Bucket row for distribution {distribution}");
let metrics_builder = metrics_accumulator.entry(id).or_default();
match distribution {
RefundDistributions::SessionizedRefundReason => metrics_builder
.refund_reason_distribution
.add_distribution_bucket(&value),
RefundDistributions::SessionizedRefundErrorMessage => metrics_builder
.refund_error_message_distribution
.add_distribution_bucket(&value),
}
}
logger::debug!(
"Analytics Accumulated Results: distribution: {}, results: {:#?}",
distribution,
metrics_accumulator
);
}
}
}
let mut success = 0;
let mut total = 0;
let mut total_refund_processed_amount = 0;
let mut total_refund_processed_amount_in_usd = 0;
let mut total_refund_processed_count = 0;
let mut total_refund_reason_count = 0;
let mut total_refund_error_message_count = 0;
let query_data: Vec<RefundMetricsBucketResponse> = metrics_accumulator
.into_iter()
.map(|(id, val)| {
let mut collected_values = val.collect();
if let Some(success_count) = collected_values.successful_refunds {
success += success_count;
}
if let Some(total_count) = collected_values.total_refunds {
total += total_count;
}
if let Some(amount) = collected_values.refund_processed_amount {
let amount_in_usd = if let Some(ex_rates) = ex_rates {
id.currency
.and_then(|currency| {
i64::try_from(amount)
.inspect_err(|e| logger::error!("Amount conversion error: {:?}", e))
.ok()
.and_then(|amount_i64| {
convert(ex_rates, currency, Currency::USD, amount_i64)
.inspect_err(|e| {
logger::error!("Currency conversion error: {:?}", e)
})
.ok()
})
})
.map(|amount| (amount * rust_decimal::Decimal::new(100, 0)).to_u64())
.unwrap_or_default()
} else {
None
};
collected_values.refund_processed_amount_in_usd = amount_in_usd;
total_refund_processed_amount += amount;
total_refund_processed_amount_in_usd += amount_in_usd.unwrap_or(0);
}
if let Some(count) = collected_values.refund_processed_count {
total_refund_processed_count += count;
}
if let Some(total_count) = collected_values.refund_reason_count {
total_refund_reason_count += total_count;
}
if let Some(total_count) = collected_values.refund_error_message_count {
total_refund_error_message_count += total_count;
}
RefundMetricsBucketResponse {
values: collected_values,
dimensions: id,
}
})
.collect();
let total_refund_success_rate = match (success, total) {
(s, t) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
Ok(RefundsMetricsResponse {
query_data,
meta_data: [RefundsAnalyticsMetadata {
total_refund_success_rate,
total_refund_processed_amount: Some(total_refund_processed_amount),
total_refund_processed_amount_in_usd: if ex_rates.is_some() {
Some(total_refund_processed_amount_in_usd)
} else {
None
},
total_refund_processed_count: Some(total_refund_processed_count),
total_refund_reason_count: Some(total_refund_reason_count),
total_refund_error_message_count: Some(total_refund_error_message_count),
}],
})
}
pub async fn get_filters(
pool: &AnalyticsProvider,
req: GetRefundFilterRequest,
auth: &AuthInfo,
) -> AnalyticsResult<RefundFiltersResponse> {
let mut res = RefundFiltersResponse::default();
for dim in req.group_by_names {
let values = match pool {
AnalyticsProvider::Sqlx(pool) => {
get_refund_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::Clickhouse(pool) => {
get_refund_filter_for_dimension(dim, auth, &req.time_range, pool)
.await
}
AnalyticsProvider::CombinedCkh(sqlx_pool, ckh_pool) => {
let ckh_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters")
},
_ => {}
};
ckh_result
}
AnalyticsProvider::CombinedSqlx(sqlx_pool, ckh_pool) => {
let ckh_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
ckh_pool,
)
.await;
let sqlx_result = get_refund_filter_for_dimension(
dim,
auth,
&req.time_range,
sqlx_pool,
)
.await;
match (&sqlx_result, &ckh_result) {
(Ok(ref sqlx_res), Ok(ref ckh_res)) if sqlx_res != ckh_res => {
router_env::logger::error!(clickhouse_result=?ckh_res, postgres_result=?sqlx_res, "Mismatch between clickhouse & postgres refunds analytics filters")
},
_ => {}
};
sqlx_result
}
}
.change_context(AnalyticsError::UnknownError)?
.into_iter()
.filter_map(|fil: RefundFilterRow| match dim {
RefundDimensions::Currency => fil.currency.map(|i| i.as_ref().to_string()),
RefundDimensions::RefundStatus => fil.refund_status.map(|i| i.as_ref().to_string()),
RefundDimensions::Connector => fil.connector,
RefundDimensions::RefundType => fil.refund_type.map(|i| i.as_ref().to_string()),
RefundDimensions::ProfileId => fil.profile_id,
RefundDimensions::RefundReason => fil.refund_reason,
RefundDimensions::RefundErrorMessage => fil.refund_error_message,
})
.collect::<Vec<String>>();
res.query_data.push(RefundFilterValue {
dimension: dim,
values,
})
}
Ok(res)
}
// File: crates/analytics/src/refunds/distribution.rs
use api_models::analytics::{
refunds::{
RefundDimensions, RefundDistributions, RefundFilters, RefundMetricsBucketIdentifier,
RefundType,
},
Granularity, RefundDistributionBody, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
mod sessionized_distribution;
#[derive(Debug, PartialEq, Eq, serde::Deserialize)]
pub struct RefundDistributionRow {
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub refund_status: Option<DBEnumWrapper<storage_enums::RefundStatus>>,
pub connector: Option<String>,
pub refund_type: Option<DBEnumWrapper<RefundType>>,
pub profile_id: Option<String>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
pub refund_reason: Option<String>,
pub refund_error_message: Option<String>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait RefundDistributionAnalytics: LoadRow<RefundDistributionRow> {}
#[async_trait::async_trait]
pub trait RefundDistribution<T>
where
T: AnalyticsDataSource + RefundDistributionAnalytics,
{
#[allow(clippy::too_many_arguments)]
async fn load_distribution(
&self,
distribution: &RefundDistributionBody,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(RefundMetricsBucketIdentifier, RefundDistributionRow)>>;
}
#[async_trait::async_trait]
impl<T> RefundDistribution<T> for RefundDistributions
where
T: AnalyticsDataSource + RefundDistributionAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_distribution(
&self,
distribution: &RefundDistributionBody,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: &Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<Vec<(RefundMetricsBucketIdentifier, RefundDistributionRow)>> {
match self {
Self::SessionizedRefundReason => {
sessionized_distribution::RefundReason
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
Self::SessionizedRefundErrorMessage => {
sessionized_distribution::RefundErrorMessage
.load_distribution(
distribution,
dimensions,
auth,
filters,
granularity,
time_range,
pool,
)
.await
}
}
}
}
// File: crates/analytics/src/refunds/types.rs
use api_models::analytics::refunds::{RefundDimensions, RefundFilters};
use error_stack::ResultExt;
use crate::{
query::{QueryBuilder, QueryFilter, QueryResult, ToSql},
types::{AnalyticsCollection, AnalyticsDataSource},
};
impl<T> QueryFilter<T> for RefundFilters
where
T: AnalyticsDataSource,
AnalyticsCollection: ToSql<T>,
{
fn set_filter_clause(&self, builder: &mut QueryBuilder<T>) -> QueryResult<()> {
if !self.currency.is_empty() {
builder
.add_filter_in_range_clause(RefundDimensions::Currency, &self.currency)
.attach_printable("Error adding currency filter")?;
}
if !self.refund_status.is_empty() {
builder
.add_filter_in_range_clause(RefundDimensions::RefundStatus, &self.refund_status)
.attach_printable("Error adding refund status filter")?;
}
if !self.connector.is_empty() {
builder
.add_filter_in_range_clause(RefundDimensions::Connector, &self.connector)
.attach_printable("Error adding connector filter")?;
}
if !self.refund_type.is_empty() {
builder
.add_filter_in_range_clause(RefundDimensions::RefundType, &self.refund_type)
.attach_printable("Error adding auth type filter")?;
}
if !self.profile_id.is_empty() {
builder
.add_filter_in_range_clause(RefundDimensions::ProfileId, &self.profile_id)
.attach_printable("Error adding profile id filter")?;
}
if !self.refund_reason.is_empty() {
builder
.add_filter_in_range_clause(RefundDimensions::RefundReason, &self.refund_reason)
.attach_printable("Error adding refund reason filter")?;
}
if !self.refund_error_message.is_empty() {
builder
.add_filter_in_range_clause(
RefundDimensions::RefundErrorMessage,
&self.refund_error_message,
)
.attach_printable("Error adding refund error message filter")?;
}
Ok(())
}
}
// File: crates/analytics/src/refunds/metrics.rs
use api_models::analytics::{
refunds::{
RefundDimensions, RefundFilters, RefundMetrics, RefundMetricsBucketIdentifier, RefundType,
},
Granularity, TimeRange,
};
use diesel_models::enums as storage_enums;
use time::PrimitiveDateTime;
mod refund_count;
mod refund_processed_amount;
mod refund_success_count;
mod refund_success_rate;
mod sessionized_metrics;
use std::collections::HashSet;
use refund_count::RefundCount;
use refund_processed_amount::RefundProcessedAmount;
use refund_success_count::RefundSuccessCount;
use refund_success_rate::RefundSuccessRate;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, LoadRow, MetricsResult},
};
#[derive(Debug, Eq, PartialEq, serde::Deserialize, Hash)]
pub struct RefundMetricRow {
pub currency: Option<DBEnumWrapper<storage_enums::Currency>>,
pub refund_status: Option<DBEnumWrapper<storage_enums::RefundStatus>>,
pub connector: Option<String>,
pub refund_type: Option<DBEnumWrapper<RefundType>>,
pub profile_id: Option<String>,
pub refund_reason: Option<String>,
pub refund_error_message: Option<String>,
pub total: Option<bigdecimal::BigDecimal>,
pub count: Option<i64>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub start_bucket: Option<PrimitiveDateTime>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub end_bucket: Option<PrimitiveDateTime>,
}
pub trait RefundMetricAnalytics: LoadRow<RefundMetricRow> {}
#[async_trait::async_trait]
pub trait RefundMetric<T>
where
T: AnalyticsDataSource + RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>;
}
#[async_trait::async_trait]
impl<T> RefundMetric<T> for RefundMetrics
where
T: AnalyticsDataSource + RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> {
match self {
Self::RefundSuccessRate => {
RefundSuccessRate::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::RefundCount => {
RefundCount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::RefundSuccessCount => {
RefundSuccessCount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::RefundProcessedAmount => {
RefundProcessedAmount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRefundSuccessRate => {
sessionized_metrics::RefundSuccessRate::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRefundCount => {
sessionized_metrics::RefundCount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRefundSuccessCount => {
sessionized_metrics::RefundSuccessCount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRefundProcessedAmount => {
sessionized_metrics::RefundProcessedAmount::default()
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRefundReason => {
sessionized_metrics::RefundReason
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
Self::SessionizedRefundErrorMessage => {
sessionized_metrics::RefundErrorMessage
.load_metrics(dimensions, auth, filters, granularity, time_range, pool)
.await
}
}
}
}
// File: crates/analytics/src/refunds/filters.rs
use api_models::analytics::{
refunds::{RefundDimensions, RefundType},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums::{Currency, RefundStatus};
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, ToSql, Window},
types::{
AnalyticsCollection, AnalyticsDataSource, DBEnumWrapper, FiltersError, FiltersResult,
LoadRow,
},
};
pub trait RefundFilterAnalytics: LoadRow<RefundFilterRow> {}
pub async fn get_refund_filter_for_dimension<T>(
dimension: RefundDimensions,
auth: &AuthInfo,
time_range: &TimeRange,
pool: &T,
) -> FiltersResult<Vec<RefundFilterRow>>
where
T: AnalyticsDataSource + RefundFilterAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Refund);
query_builder.add_select_column(dimension).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
query_builder.set_distinct();
query_builder
.execute_query::<RefundFilterRow, _>(pool)
.await
.change_context(FiltersError::QueryBuildingError)?
.change_context(FiltersError::QueryExecutionFailure)
}
#[derive(Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct RefundFilterRow {
pub currency: Option<DBEnumWrapper<Currency>>,
pub refund_status: Option<DBEnumWrapper<RefundStatus>>,
pub connector: Option<String>,
pub refund_type: Option<DBEnumWrapper<RefundType>>,
pub profile_id: Option<String>,
pub refund_reason: Option<String>,
pub refund_error_message: Option<String>,
}
// File: crates/analytics/src/refunds/accumulator.rs
use api_models::analytics::refunds::{
ErrorMessagesResult, ReasonsResult, RefundMetricsBucketValue,
};
use bigdecimal::ToPrimitive;
use diesel_models::enums as storage_enums;
use super::{distribution::RefundDistributionRow, metrics::RefundMetricRow};
#[derive(Debug, Default)]
pub struct RefundMetricsAccumulator {
pub refund_success_rate: SuccessRateAccumulator,
pub refund_count: CountAccumulator,
pub refund_success: CountAccumulator,
pub processed_amount: RefundProcessedAmountAccumulator,
pub refund_reason: RefundReasonAccumulator,
pub refund_reason_distribution: RefundReasonDistributionAccumulator,
pub refund_error_message: RefundReasonAccumulator,
pub refund_error_message_distribution: RefundErrorMessageDistributionAccumulator,
}
#[derive(Debug, Default)]
pub struct RefundReasonDistributionRow {
pub count: i64,
pub total: i64,
pub refund_reason: String,
}
#[derive(Debug, Default)]
pub struct RefundReasonDistributionAccumulator {
pub refund_reason_vec: Vec<RefundReasonDistributionRow>,
}
#[derive(Debug, Default)]
pub struct RefundErrorMessageDistributionRow {
pub count: i64,
pub total: i64,
pub refund_error_message: String,
}
#[derive(Debug, Default)]
pub struct RefundErrorMessageDistributionAccumulator {
pub refund_error_message_vec: Vec<RefundErrorMessageDistributionRow>,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct RefundReasonAccumulator {
pub count: u64,
}
#[derive(Debug, Default)]
pub struct SuccessRateAccumulator {
pub success: u32,
pub total: u32,
}
#[derive(Debug, Default)]
#[repr(transparent)]
pub struct CountAccumulator {
pub count: Option<i64>,
}
#[derive(Debug, Default)]
pub struct RefundProcessedAmountAccumulator {
pub count: Option<i64>,
pub total: Option<i64>,
}
pub trait RefundMetricAccumulator {
type MetricOutput;
fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow);
fn collect(self) -> Self::MetricOutput;
}
pub trait RefundDistributionAccumulator {
type DistributionOutput;
fn add_distribution_bucket(&mut self, distribution: &RefundDistributionRow);
fn collect(self) -> Self::DistributionOutput;
}
impl RefundDistributionAccumulator for RefundReasonDistributionAccumulator {
type DistributionOutput = Option<Vec<ReasonsResult>>;
fn add_distribution_bucket(&mut self, distribution: &RefundDistributionRow) {
self.refund_reason_vec.push(RefundReasonDistributionRow {
count: distribution.count.unwrap_or_default(),
total: distribution
.total
.clone()
.map(|i| i.to_i64().unwrap_or_default())
.unwrap_or_default(),
refund_reason: distribution.refund_reason.clone().unwrap_or_default(),
})
}
fn collect(mut self) -> Self::DistributionOutput {
if self.refund_reason_vec.is_empty() {
None
} else {
self.refund_reason_vec.sort_by(|a, b| b.count.cmp(&a.count));
let mut res: Vec<ReasonsResult> = Vec::new();
for val in self.refund_reason_vec.into_iter() {
let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0
/ f64::from(u32::try_from(val.total).ok()?);
res.push(ReasonsResult {
reason: val.refund_reason,
count: val.count,
percentage: (perc * 100.0).round() / 100.0,
})
}
Some(res)
}
}
}
impl RefundDistributionAccumulator for RefundErrorMessageDistributionAccumulator {
type DistributionOutput = Option<Vec<ErrorMessagesResult>>;
fn add_distribution_bucket(&mut self, distribution: &RefundDistributionRow) {
self.refund_error_message_vec
.push(RefundErrorMessageDistributionRow {
count: distribution.count.unwrap_or_default(),
total: distribution
.total
.clone()
.map(|i| i.to_i64().unwrap_or_default())
.unwrap_or_default(),
refund_error_message: distribution
.refund_error_message
.clone()
.unwrap_or_default(),
})
}
fn collect(mut self) -> Self::DistributionOutput {
if self.refund_error_message_vec.is_empty() {
None
} else {
self.refund_error_message_vec
.sort_by(|a, b| b.count.cmp(&a.count));
let mut res: Vec<ErrorMessagesResult> = Vec::new();
for val in self.refund_error_message_vec.into_iter() {
let perc = f64::from(u32::try_from(val.count).ok()?) * 100.0
/ f64::from(u32::try_from(val.total).ok()?);
res.push(ErrorMessagesResult {
error_message: val.refund_error_message,
count: val.count,
percentage: (perc * 100.0).round() / 100.0,
})
}
Some(res)
}
}
}
impl RefundMetricAccumulator for CountAccumulator {
type MetricOutput = Option<u64>;
#[inline]
fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) {
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
}
}
#[inline]
fn collect(self) -> Self::MetricOutput {
self.count.and_then(|i| u64::try_from(i).ok())
}
}
impl RefundMetricAccumulator for RefundProcessedAmountAccumulator {
type MetricOutput = (Option<u64>, Option<u64>, Option<u64>);
#[inline]
fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) {
self.total = match (
self.total,
metrics.total.as_ref().and_then(ToPrimitive::to_i64),
) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
self.count = match (self.count, metrics.count) {
(None, None) => None,
(None, i @ Some(_)) | (i @ Some(_), None) => i,
(Some(a), Some(b)) => Some(a + b),
};
}
#[inline]
fn collect(self) -> Self::MetricOutput {
let total = u64::try_from(self.total.unwrap_or_default()).ok();
let count = self.count.and_then(|i| u64::try_from(i).ok());
(total, count, Some(0))
}
}
impl RefundMetricAccumulator for SuccessRateAccumulator {
type MetricOutput = (Option<u32>, Option<u32>, Option<f64>);
fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) {
if let Some(ref refund_status) = metrics.refund_status {
if refund_status.as_ref() == &storage_enums::RefundStatus::Success {
if let Some(success) = metrics
.count
.and_then(|success| u32::try_from(success).ok())
{
self.success += success;
}
}
};
if let Some(total) = metrics.count.and_then(|total| u32::try_from(total).ok()) {
self.total += total;
}
}
fn collect(self) -> Self::MetricOutput {
if self.total == 0 {
(None, None, None)
} else {
let success = Some(self.success);
let total = Some(self.total);
let success_rate = match (success, total) {
(Some(s), Some(t)) if t > 0 => Some(f64::from(s) * 100.0 / f64::from(t)),
_ => None,
};
(success, total, success_rate)
}
}
}
impl RefundMetricAccumulator for RefundReasonAccumulator {
type MetricOutput = Option<u64>;
fn add_metrics_bucket(&mut self, metrics: &RefundMetricRow) {
if let Some(count) = metrics.count {
if let Ok(count_u64) = u64::try_from(count) {
self.count += count_u64;
}
}
}
fn collect(self) -> Self::MetricOutput {
Some(self.count)
}
}
impl RefundMetricsAccumulator {
pub fn collect(self) -> RefundMetricsBucketValue {
let (successful_refunds, total_refunds, refund_success_rate) =
self.refund_success_rate.collect();
let (refund_processed_amount, refund_processed_count, refund_processed_amount_in_usd) =
self.processed_amount.collect();
RefundMetricsBucketValue {
successful_refunds,
total_refunds,
refund_success_rate,
refund_count: self.refund_count.collect(),
refund_success_count: self.refund_success.collect(),
refund_processed_amount,
refund_processed_amount_in_usd,
refund_processed_count,
refund_reason_distribution: self.refund_reason_distribution.collect(),
refund_error_message_distribution: self.refund_error_message_distribution.collect(),
refund_reason_count: self.refund_reason.collect(),
refund_error_message_count: self.refund_error_message.collect(),
}
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/refunds/core.rs",
"crates/analytics/src/refunds/distribution.rs",
"crates/analytics/src/refunds/types.rs",
"crates/analytics/src/refunds/metrics.rs",
"crates/analytics/src/refunds/filters.rs",
"crates/analytics/src/refunds/accumulator.rs"
],
"module": "crates/analytics/src/refunds",
"num_files": 6,
"token_count": 7740
}
|
module_-5471253082564447426
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/refunds/metrics
Files: 5
</path>
<module>
// File: crates/analytics/src/refunds/metrics/refund_processed_amount.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct RefundProcessedAmount {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundProcessedAmount
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
{
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Refund);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Sum {
field: "refund_amount",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder.add_group_by_clause(dim).switch()?;
}
query_builder.add_group_by_clause("currency").switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.switch()?;
}
query_builder
.add_filter_clause(
RefundDimensions::RefundStatus,
storage_enums::RefundStatus::Success,
)
.switch()?;
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<HashSet<_>, crate::query::PostProcessingError>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/refund_success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct RefundSuccessRate {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundSuccessRate
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
{
let mut query_builder = QueryBuilder::new(AnalyticsCollection::Refund);
let mut dimensions = dimensions.to_vec();
dimensions.push(RefundDimensions::RefundStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder.add_group_by_clause(dim).switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.switch()?;
}
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/sessionized_metrics.rs
mod refund_count;
mod refund_error_message;
mod refund_processed_amount;
mod refund_reason;
mod refund_success_count;
mod refund_success_rate;
pub(super) use refund_count::RefundCount;
pub(super) use refund_error_message::RefundErrorMessage;
pub(super) use refund_processed_amount::RefundProcessedAmount;
pub(super) use refund_reason::RefundReason;
pub(super) use refund_success_count::RefundSuccessCount;
pub(super) use refund_success_rate::RefundSuccessRate;
pub use super::{RefundMetric, RefundMetricAnalytics, RefundMetricRow};
// File: crates/analytics/src/refunds/metrics/refund_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct RefundSuccessCount {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundSuccessCount
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
{
let mut query_builder = QueryBuilder::new(AnalyticsCollection::Refund);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder.add_group_by_clause(dim).switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.switch()?;
}
query_builder
.add_filter_clause(
RefundDimensions::RefundStatus,
storage_enums::RefundStatus::Success,
)
.switch()?;
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/refund_count.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(super) struct RefundCount {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundCount
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> {
let mut query_builder: QueryBuilder<T> = QueryBuilder::new(AnalyticsCollection::Refund);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
i.refund_status.as_ref().map(|i| i.0.to_string()),
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<HashSet<_>, crate::query::PostProcessingError>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/refunds/metrics/refund_processed_amount.rs",
"crates/analytics/src/refunds/metrics/refund_success_rate.rs",
"crates/analytics/src/refunds/metrics/sessionized_metrics.rs",
"crates/analytics/src/refunds/metrics/refund_success_count.rs",
"crates/analytics/src/refunds/metrics/refund_count.rs"
],
"module": "crates/analytics/src/refunds/metrics",
"num_files": 5,
"token_count": 3583
}
|
module_-933909590859131937
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: analytics
Module: crates/analytics/src/refunds/metrics/sessionized_metrics
Files: 6
</path>
<module>
// File: crates/analytics/src/refunds/metrics/sessionized_metrics/refund_processed_amount.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RefundProcessedAmount {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundProcessedAmount
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
{
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::RefundSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Sum {
field: "refund_amount",
alias: Some("total"),
})
.switch()?;
query_builder.add_select_column("currency").switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder.add_group_by_clause(dim).switch()?;
}
query_builder.add_group_by_clause("currency").switch()?;
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.switch()?;
}
query_builder
.add_filter_clause(
RefundDimensions::RefundStatus,
storage_enums::RefundStatus::Success,
)
.switch()?;
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<HashSet<_>, crate::query::PostProcessingError>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_rate.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RefundSuccessRate {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundSuccessRate
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
{
let mut query_builder = QueryBuilder::new(AnalyticsCollection::RefundSessionized);
let mut dimensions = dimensions.to_vec();
dimensions.push(RefundDimensions::RefundStatus);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder.add_group_by_clause(dim).switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.switch()?;
}
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/sessionized_metrics/refund_error_message.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket,
ToSql, Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RefundErrorMessage;
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundErrorMessage
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> {
let mut inner_query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::RefundSessionized);
inner_query_builder
.add_select_column("sum(sign_flag)")
.switch()?;
inner_query_builder
.add_custom_filter_clause(
RefundDimensions::RefundErrorMessage,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
time_range
.set_filter_clause(&mut inner_query_builder)
.attach_printable("Error filtering time range for inner query")
.switch()?;
let inner_query_string = inner_query_builder
.build_query()
.attach_printable("Error building inner query")
.change_context(MetricsError::QueryBuildingError)?;
let mut outer_query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::RefundSessionized);
for dim in dimensions.iter() {
outer_query_builder.add_select_column(dim).switch()?;
}
outer_query_builder
.add_select_column("sum(sign_flag) AS count")
.switch()?;
outer_query_builder
.add_select_column(format!("({inner_query_string}) AS total"))
.switch()?;
outer_query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
outer_query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters
.set_filter_clause(&mut outer_query_builder)
.switch()?;
auth.set_filter_clause(&mut outer_query_builder).switch()?;
time_range
.set_filter_clause(&mut outer_query_builder)
.attach_printable("Error filtering time range for outer query")
.switch()?;
outer_query_builder
.add_filter_clause(
RefundDimensions::RefundStatus,
storage_enums::RefundStatus::Failure,
)
.switch()?;
outer_query_builder
.add_custom_filter_clause(
RefundDimensions::RefundErrorMessage,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
for dim in dimensions.iter() {
outer_query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut outer_query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
outer_query_builder
.add_order_by_clause("count", Order::Descending)
.attach_printable("Error adding order by clause")
.switch()?;
let filtered_dimensions: Vec<&RefundDimensions> = dimensions
.iter()
.filter(|&&dim| dim != RefundDimensions::RefundErrorMessage)
.collect();
for dim in &filtered_dimensions {
outer_query_builder
.add_order_by_clause(*dim, Order::Ascending)
.attach_printable("Error adding order by clause")
.switch()?;
}
outer_query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_count.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RefundSuccessCount {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundSuccessCount
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>>
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
{
let mut query_builder = QueryBuilder::new(AnalyticsCollection::RefundSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range.set_filter_clause(&mut query_builder).switch()?;
for dim in dimensions.iter() {
query_builder.add_group_by_clause(dim).switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.switch()?;
}
query_builder
.add_filter_clause(
RefundDimensions::RefundStatus,
storage_enums::RefundStatus::Success,
)
.switch()?;
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/sessionized_metrics/refund_reason.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{
Aggregate, FilterTypes, GroupByClause, Order, QueryBuilder, QueryFilter, SeriesBucket,
ToSql, Window,
},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RefundReason;
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundReason
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> {
let mut inner_query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::RefundSessionized);
inner_query_builder
.add_select_column("sum(sign_flag)")
.switch()?;
inner_query_builder
.add_custom_filter_clause(
RefundDimensions::RefundReason,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
time_range
.set_filter_clause(&mut inner_query_builder)
.attach_printable("Error filtering time range for inner query")
.switch()?;
let inner_query_string = inner_query_builder
.build_query()
.attach_printable("Error building inner query")
.change_context(MetricsError::QueryBuildingError)?;
let mut outer_query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::RefundSessionized);
for dim in dimensions.iter() {
outer_query_builder.add_select_column(dim).switch()?;
}
outer_query_builder
.add_select_column("sum(sign_flag) AS count")
.switch()?;
outer_query_builder
.add_select_column(format!("({inner_query_string}) AS total"))
.switch()?;
outer_query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
outer_query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters
.set_filter_clause(&mut outer_query_builder)
.switch()?;
auth.set_filter_clause(&mut outer_query_builder).switch()?;
time_range
.set_filter_clause(&mut outer_query_builder)
.attach_printable("Error filtering time range for outer query")
.switch()?;
outer_query_builder
.add_custom_filter_clause(
RefundDimensions::RefundReason,
"NULL",
FilterTypes::IsNotNull,
)
.switch()?;
for dim in dimensions.iter() {
outer_query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut outer_query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
outer_query_builder
.add_order_by_clause("count", Order::Descending)
.attach_printable("Error adding order by clause")
.switch()?;
let filtered_dimensions: Vec<&RefundDimensions> = dimensions
.iter()
.filter(|&&dim| dim != RefundDimensions::RefundReason)
.collect();
for dim in &filtered_dimensions {
outer_query_builder
.add_order_by_clause(*dim, Order::Ascending)
.attach_printable("Error adding order by clause")
.switch()?;
}
outer_query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
None,
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<
HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>,
crate::query::PostProcessingError,
>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
// File: crates/analytics/src/refunds/metrics/sessionized_metrics/refund_count.rs
use std::collections::HashSet;
use api_models::analytics::{
refunds::{RefundDimensions, RefundFilters, RefundMetricsBucketIdentifier},
Granularity, TimeRange,
};
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
use time::PrimitiveDateTime;
use super::RefundMetricRow;
use crate::{
enums::AuthInfo,
query::{Aggregate, GroupByClause, QueryBuilder, QueryFilter, SeriesBucket, ToSql, Window},
types::{AnalyticsCollection, AnalyticsDataSource, MetricsError, MetricsResult},
};
#[derive(Default)]
pub(crate) struct RefundCount {}
#[async_trait::async_trait]
impl<T> super::RefundMetric<T> for RefundCount
where
T: AnalyticsDataSource + super::RefundMetricAnalytics,
PrimitiveDateTime: ToSql<T>,
AnalyticsCollection: ToSql<T>,
Granularity: GroupByClause<T>,
Aggregate<&'static str>: ToSql<T>,
Window<&'static str>: ToSql<T>,
{
async fn load_metrics(
&self,
dimensions: &[RefundDimensions],
auth: &AuthInfo,
filters: &RefundFilters,
granularity: Option<Granularity>,
time_range: &TimeRange,
pool: &T,
) -> MetricsResult<HashSet<(RefundMetricsBucketIdentifier, RefundMetricRow)>> {
let mut query_builder: QueryBuilder<T> =
QueryBuilder::new(AnalyticsCollection::RefundSessionized);
for dim in dimensions.iter() {
query_builder.add_select_column(dim).switch()?;
}
query_builder
.add_select_column(Aggregate::Count {
field: None,
alias: Some("count"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Min {
field: "created_at",
alias: Some("start_bucket"),
})
.switch()?;
query_builder
.add_select_column(Aggregate::Max {
field: "created_at",
alias: Some("end_bucket"),
})
.switch()?;
filters.set_filter_clause(&mut query_builder).switch()?;
auth.set_filter_clause(&mut query_builder).switch()?;
time_range
.set_filter_clause(&mut query_builder)
.attach_printable("Error filtering time range")
.switch()?;
for dim in dimensions.iter() {
query_builder
.add_group_by_clause(dim)
.attach_printable("Error grouping by dimensions")
.switch()?;
}
if let Some(granularity) = granularity {
granularity
.set_group_by_clause(&mut query_builder)
.attach_printable("Error adding granularity")
.switch()?;
}
query_builder
.execute_query::<RefundMetricRow, _>(pool)
.await
.change_context(MetricsError::QueryBuildingError)?
.change_context(MetricsError::QueryExecutionFailure)?
.into_iter()
.map(|i| {
Ok((
RefundMetricsBucketIdentifier::new(
i.currency.as_ref().map(|i| i.0),
i.refund_status.as_ref().map(|i| i.0.to_string()),
i.connector.clone(),
i.refund_type.as_ref().map(|i| i.0.to_string()),
i.profile_id.clone(),
i.refund_reason.clone(),
i.refund_error_message.clone(),
TimeRange {
start_time: match (granularity, i.start_bucket) {
(Some(g), Some(st)) => g.clip_to_start(st)?,
_ => time_range.start_time,
},
end_time: granularity.as_ref().map_or_else(
|| Ok(time_range.end_time),
|g| i.end_bucket.map(|et| g.clip_to_end(et)).transpose(),
)?,
},
),
i,
))
})
.collect::<error_stack::Result<HashSet<_>, crate::query::PostProcessingError>>()
.change_context(MetricsError::PostProcessingFailure)
}
}
</module>
|
{
"crate": "analytics",
"file": null,
"files": [
"crates/analytics/src/refunds/metrics/sessionized_metrics/refund_processed_amount.rs",
"crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_rate.rs",
"crates/analytics/src/refunds/metrics/sessionized_metrics/refund_error_message.rs",
"crates/analytics/src/refunds/metrics/sessionized_metrics/refund_success_count.rs",
"crates/analytics/src/refunds/metrics/sessionized_metrics/refund_reason.rs",
"crates/analytics/src/refunds/metrics/sessionized_metrics/refund_count.rs"
],
"module": "crates/analytics/src/refunds/metrics/sessionized_metrics",
"num_files": 6,
"token_count": 5976
}
|
module_1615159993063498882
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: hyperswitch_interfaces
Module: crates/hyperswitch_interfaces/src/api
Files: 20
</path>
<module>
// File: crates/hyperswitch_interfaces/src/api/fraud_check_v2.rs
//! FRM V2 interface
use hyperswitch_domain_models::{
router_data_v2::flow_common_types::FrmFlowData,
router_flow_types::{Checkout, Fulfillment, RecordReturn, Sale, Transaction},
router_request_types::fraud_check::{
FraudCheckCheckoutData, FraudCheckFulfillmentData, FraudCheckRecordReturnData,
FraudCheckSaleData, FraudCheckTransactionData,
},
router_response_types::fraud_check::FraudCheckResponseData,
};
use crate::api::ConnectorIntegrationV2;
/// trait FraudCheckSaleV2
pub trait FraudCheckSaleV2:
ConnectorIntegrationV2<Sale, FrmFlowData, FraudCheckSaleData, FraudCheckResponseData>
{
}
/// trait FraudCheckCheckoutV2
pub trait FraudCheckCheckoutV2:
ConnectorIntegrationV2<Checkout, FrmFlowData, FraudCheckCheckoutData, FraudCheckResponseData>
{
}
/// trait FraudCheckTransactionV2
pub trait FraudCheckTransactionV2:
ConnectorIntegrationV2<Transaction, FrmFlowData, FraudCheckTransactionData, FraudCheckResponseData>
{
}
/// trait FraudCheckFulfillmentV2
pub trait FraudCheckFulfillmentV2:
ConnectorIntegrationV2<Fulfillment, FrmFlowData, FraudCheckFulfillmentData, FraudCheckResponseData>
{
}
/// trait FraudCheckRecordReturnV2
pub trait FraudCheckRecordReturnV2:
ConnectorIntegrationV2<
RecordReturn,
FrmFlowData,
FraudCheckRecordReturnData,
FraudCheckResponseData,
>
{
}
/// trait FraudCheckV2
pub trait FraudCheckV2:
super::ConnectorCommon
+ FraudCheckSaleV2
+ FraudCheckTransactionV2
+ FraudCheckCheckoutV2
+ FraudCheckFulfillmentV2
+ FraudCheckRecordReturnV2
{
}
// File: crates/hyperswitch_interfaces/src/api/payouts_v2.rs
//! Payouts V2 interface
use hyperswitch_domain_models::{
router_data_v2::flow_common_types::PayoutFlowData,
router_flow_types::payouts::{
PoCancel, PoCreate, PoEligibility, PoFulfill, PoQuote, PoRecipient, PoRecipientAccount,
PoSync,
},
router_request_types::PayoutsData,
router_response_types::PayoutsResponseData,
};
use super::ConnectorCommon;
use crate::api::ConnectorIntegrationV2;
/// trait PayoutCancelV2
pub trait PayoutCancelV2:
ConnectorIntegrationV2<PoCancel, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutCreateV2
pub trait PayoutCreateV2:
ConnectorIntegrationV2<PoCreate, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutEligibilityV2
pub trait PayoutEligibilityV2:
ConnectorIntegrationV2<PoEligibility, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutFulfillV2
pub trait PayoutFulfillV2:
ConnectorIntegrationV2<PoFulfill, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutQuoteV2
pub trait PayoutQuoteV2:
ConnectorIntegrationV2<PoQuote, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutRecipientV2
pub trait PayoutRecipientV2:
ConnectorIntegrationV2<PoRecipient, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutRecipientAccountV2
pub trait PayoutRecipientAccountV2:
ConnectorIntegrationV2<PoRecipientAccount, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutSyncV2
pub trait PayoutSyncV2:
ConnectorIntegrationV2<PoSync, PayoutFlowData, PayoutsData, PayoutsResponseData>
{
}
#[cfg(feature = "payouts")]
/// trait Payouts
pub trait PayoutsV2:
ConnectorCommon
+ PayoutCancelV2
+ PayoutCreateV2
+ PayoutEligibilityV2
+ PayoutFulfillV2
+ PayoutQuoteV2
+ PayoutRecipientV2
+ PayoutRecipientAccountV2
+ PayoutSyncV2
{
}
/// Empty trait for when payouts feature is disabled
#[cfg(not(feature = "payouts"))]
pub trait PayoutsV2 {}
// File: crates/hyperswitch_interfaces/src/api/refunds.rs
//! Refunds interface
use hyperswitch_domain_models::{
router_flow_types::{Execute, RSync},
router_request_types::RefundsData,
router_response_types::RefundsResponseData,
};
use crate::api::{self, ConnectorCommon};
/// trait RefundExecute
pub trait RefundExecute:
api::ConnectorIntegration<Execute, RefundsData, RefundsResponseData>
{
}
/// trait RefundSync
pub trait RefundSync: api::ConnectorIntegration<RSync, RefundsData, RefundsResponseData> {}
/// trait Refund
pub trait Refund: ConnectorCommon + RefundExecute + RefundSync {}
// File: crates/hyperswitch_interfaces/src/api/subscriptions.rs
//! Subscriptions Interface for V1
use hyperswitch_domain_models::{
router_flow_types::{
subscriptions::{
GetSubscriptionEstimate, GetSubscriptionPlanPrices, GetSubscriptionPlans,
SubscriptionCreate as SubscriptionCreateFlow,
},
InvoiceRecordBack,
},
router_request_types::{
revenue_recovery::InvoiceRecordBackRequest,
subscriptions::{
GetSubscriptionEstimateRequest, GetSubscriptionPlanPricesRequest,
GetSubscriptionPlansRequest, SubscriptionCreateRequest,
},
},
router_response_types::{
revenue_recovery::InvoiceRecordBackResponse,
subscriptions::{
GetSubscriptionEstimateResponse, GetSubscriptionPlanPricesResponse,
GetSubscriptionPlansResponse, SubscriptionCreateResponse,
},
},
};
use super::{
payments::ConnectorCustomer as PaymentsConnectorCustomer, ConnectorCommon, ConnectorIntegration,
};
/// trait GetSubscriptionPlans for V1
pub trait GetSubscriptionPlansFlow:
ConnectorIntegration<
GetSubscriptionPlans,
GetSubscriptionPlansRequest,
GetSubscriptionPlansResponse,
>
{
}
/// trait SubscriptionRecordBack for V1
pub trait SubscriptionRecordBackFlow:
ConnectorIntegration<InvoiceRecordBack, InvoiceRecordBackRequest, InvoiceRecordBackResponse>
{
}
/// trait GetSubscriptionPlanPrices for V1
pub trait GetSubscriptionPlanPricesFlow:
ConnectorIntegration<
GetSubscriptionPlanPrices,
GetSubscriptionPlanPricesRequest,
GetSubscriptionPlanPricesResponse,
>
{
}
/// trait SubscriptionCreate
pub trait SubscriptionCreate:
ConnectorIntegration<SubscriptionCreateFlow, SubscriptionCreateRequest, SubscriptionCreateResponse>
{
}
/// trait GetSubscriptionEstimate for V1
pub trait GetSubscriptionEstimateFlow:
ConnectorIntegration<
GetSubscriptionEstimate,
GetSubscriptionEstimateRequest,
GetSubscriptionEstimateResponse,
>
{
}
/// trait Subscriptions
pub trait Subscriptions:
ConnectorCommon
+ GetSubscriptionPlansFlow
+ GetSubscriptionPlanPricesFlow
+ SubscriptionCreate
+ PaymentsConnectorCustomer
+ SubscriptionRecordBackFlow
+ GetSubscriptionEstimateFlow
{
}
// File: crates/hyperswitch_interfaces/src/api/payouts.rs
//! Payouts interface
use hyperswitch_domain_models::{
router_flow_types::payouts::{
PoCancel, PoCreate, PoEligibility, PoFulfill, PoQuote, PoRecipient, PoRecipientAccount,
PoSync,
},
router_request_types::PayoutsData,
router_response_types::PayoutsResponseData,
};
use super::ConnectorCommon;
use crate::api::ConnectorIntegration;
/// trait PayoutCancel
pub trait PayoutCancel: ConnectorIntegration<PoCancel, PayoutsData, PayoutsResponseData> {}
/// trait PayoutCreate
pub trait PayoutCreate: ConnectorIntegration<PoCreate, PayoutsData, PayoutsResponseData> {}
/// trait PayoutEligibility
pub trait PayoutEligibility:
ConnectorIntegration<PoEligibility, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutFulfill
pub trait PayoutFulfill: ConnectorIntegration<PoFulfill, PayoutsData, PayoutsResponseData> {}
/// trait PayoutQuote
pub trait PayoutQuote: ConnectorIntegration<PoQuote, PayoutsData, PayoutsResponseData> {}
/// trait PayoutRecipient
pub trait PayoutRecipient:
ConnectorIntegration<PoRecipient, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutRecipientAccount
pub trait PayoutRecipientAccount:
ConnectorIntegration<PoRecipientAccount, PayoutsData, PayoutsResponseData>
{
}
/// trait PayoutSync
pub trait PayoutSync: ConnectorIntegration<PoSync, PayoutsData, PayoutsResponseData> {}
#[cfg(feature = "payouts")]
/// trait Payouts
pub trait Payouts:
ConnectorCommon
+ PayoutCancel
+ PayoutCreate
+ PayoutEligibility
+ PayoutFulfill
+ PayoutQuote
+ PayoutRecipient
+ PayoutRecipientAccount
+ PayoutSync
{
}
/// Empty trait for when payouts feature is disabled
#[cfg(not(feature = "payouts"))]
pub trait Payouts {}
// File: crates/hyperswitch_interfaces/src/api/refunds_v2.rs
//! Refunds V2 interface
use hyperswitch_domain_models::{
router_data_v2::flow_common_types::RefundFlowData,
router_flow_types::refunds::{Execute, RSync},
router_request_types::RefundsData,
router_response_types::RefundsResponseData,
};
use crate::api::{ConnectorCommon, ConnectorIntegrationV2};
/// trait RefundExecuteV2
pub trait RefundExecuteV2:
ConnectorIntegrationV2<Execute, RefundFlowData, RefundsData, RefundsResponseData>
{
}
/// trait RefundSyncV2
pub trait RefundSyncV2:
ConnectorIntegrationV2<RSync, RefundFlowData, RefundsData, RefundsResponseData>
{
}
/// trait RefundV2
pub trait RefundV2: ConnectorCommon + RefundExecuteV2 + RefundSyncV2 {}
// File: crates/hyperswitch_interfaces/src/api/fraud_check.rs
//! FRM interface
use hyperswitch_domain_models::{
router_flow_types::{Checkout, Fulfillment, RecordReturn, Sale, Transaction},
router_request_types::fraud_check::{
FraudCheckCheckoutData, FraudCheckFulfillmentData, FraudCheckRecordReturnData,
FraudCheckSaleData, FraudCheckTransactionData,
},
router_response_types::fraud_check::FraudCheckResponseData,
};
use crate::api::ConnectorIntegration;
/// trait FraudCheckSale
pub trait FraudCheckSale:
ConnectorIntegration<Sale, FraudCheckSaleData, FraudCheckResponseData>
{
}
/// trait FraudCheckCheckout
pub trait FraudCheckCheckout:
ConnectorIntegration<Checkout, FraudCheckCheckoutData, FraudCheckResponseData>
{
}
/// trait FraudCheckTransaction
pub trait FraudCheckTransaction:
ConnectorIntegration<Transaction, FraudCheckTransactionData, FraudCheckResponseData>
{
}
/// trait FraudCheckFulfillment
pub trait FraudCheckFulfillment:
ConnectorIntegration<Fulfillment, FraudCheckFulfillmentData, FraudCheckResponseData>
{
}
/// trait FraudCheckRecordReturn
pub trait FraudCheckRecordReturn:
ConnectorIntegration<RecordReturn, FraudCheckRecordReturnData, FraudCheckResponseData>
{
}
/// trait FraudCheck
pub trait FraudCheck:
super::ConnectorCommon
+ FraudCheckSale
+ FraudCheckTransaction
+ FraudCheckCheckout
+ FraudCheckFulfillment
+ FraudCheckRecordReturn
{
}
// File: crates/hyperswitch_interfaces/src/api/subscriptions_v2.rs
//! SubscriptionsV2
use hyperswitch_domain_models::{
router_data_v2::flow_common_types::{
GetSubscriptionEstimateData, GetSubscriptionPlanPricesData, GetSubscriptionPlansData,
InvoiceRecordBackData, SubscriptionCreateData, SubscriptionCustomerData,
},
router_flow_types::{
revenue_recovery::InvoiceRecordBack,
subscriptions::{
GetSubscriptionEstimate, GetSubscriptionPlanPrices, GetSubscriptionPlans,
SubscriptionCreate,
},
CreateConnectorCustomer,
},
router_request_types::{
revenue_recovery::InvoiceRecordBackRequest,
subscriptions::{
GetSubscriptionEstimateRequest, GetSubscriptionPlanPricesRequest,
GetSubscriptionPlansRequest, SubscriptionCreateRequest,
},
ConnectorCustomerData,
},
router_response_types::{
revenue_recovery::InvoiceRecordBackResponse,
subscriptions::{
GetSubscriptionEstimateResponse, GetSubscriptionPlanPricesResponse,
GetSubscriptionPlansResponse, SubscriptionCreateResponse,
},
PaymentsResponseData,
},
};
use crate::connector_integration_v2::ConnectorIntegrationV2;
/// trait SubscriptionsV2
pub trait SubscriptionsV2:
GetSubscriptionPlansV2
+ SubscriptionsCreateV2
+ SubscriptionConnectorCustomerV2
+ GetSubscriptionPlanPricesV2
+ SubscriptionRecordBackV2
+ GetSubscriptionEstimateV2
{
}
/// trait GetSubscriptionPlans for V2
pub trait GetSubscriptionPlansV2:
ConnectorIntegrationV2<
GetSubscriptionPlans,
GetSubscriptionPlansData,
GetSubscriptionPlansRequest,
GetSubscriptionPlansResponse,
>
{
}
/// trait SubscriptionRecordBack for V2
pub trait SubscriptionRecordBackV2:
ConnectorIntegrationV2<
InvoiceRecordBack,
InvoiceRecordBackData,
InvoiceRecordBackRequest,
InvoiceRecordBackResponse,
>
{
}
/// trait GetSubscriptionPlanPricesV2 for V2
pub trait GetSubscriptionPlanPricesV2:
ConnectorIntegrationV2<
GetSubscriptionPlanPrices,
GetSubscriptionPlanPricesData,
GetSubscriptionPlanPricesRequest,
GetSubscriptionPlanPricesResponse,
>
{
}
/// trait SubscriptionsCreateV2
pub trait SubscriptionsCreateV2:
ConnectorIntegrationV2<
SubscriptionCreate,
SubscriptionCreateData,
SubscriptionCreateRequest,
SubscriptionCreateResponse,
>
{
}
/// trait SubscriptionConnectorCustomerV2
pub trait SubscriptionConnectorCustomerV2:
ConnectorIntegrationV2<
CreateConnectorCustomer,
SubscriptionCustomerData,
ConnectorCustomerData,
PaymentsResponseData,
>
{
}
/// trait GetSubscriptionEstimate for V2
pub trait GetSubscriptionEstimateV2:
ConnectorIntegrationV2<
GetSubscriptionEstimate,
GetSubscriptionEstimateData,
GetSubscriptionEstimateRequest,
GetSubscriptionEstimateResponse,
>
{
}
// File: crates/hyperswitch_interfaces/src/api/payments.rs
//! Payments interface
use hyperswitch_domain_models::{
router_flow_types::{
payments::{
Approve, Authorize, AuthorizeSessionToken, CalculateTax, Capture, CompleteAuthorize,
CreateConnectorCustomer, ExtendAuthorization, IncrementalAuthorization, PSync,
PaymentMethodToken, PostCaptureVoid, PostProcessing, PostSessionTokens, PreProcessing,
Reject, SdkSessionUpdate, Session, SetupMandate, UpdateMetadata, Void,
},
Authenticate, CreateOrder, ExternalVaultProxy, GiftCardBalanceCheck, PostAuthenticate,
PreAuthenticate,
},
router_request_types::{
AuthorizeSessionTokenData, CompleteAuthorizeData, ConnectorCustomerData,
CreateOrderRequestData, ExternalVaultProxyPaymentsData, GiftCardBalanceCheckRequestData,
PaymentMethodTokenizationData, PaymentsApproveData, PaymentsAuthenticateData,
PaymentsAuthorizeData, PaymentsCancelData, PaymentsCancelPostCaptureData,
PaymentsCaptureData, PaymentsExtendAuthorizationData, PaymentsIncrementalAuthorizationData,
PaymentsPostAuthenticateData, PaymentsPostProcessingData, PaymentsPostSessionTokensData,
PaymentsPreAuthenticateData, PaymentsPreProcessingData, PaymentsRejectData,
PaymentsSessionData, PaymentsSyncData, PaymentsTaxCalculationData,
PaymentsUpdateMetadataData, SdkPaymentsSessionUpdateData, SetupMandateRequestData,
},
router_response_types::{
GiftCardBalanceCheckResponseData, PaymentsResponseData, TaxCalculationResponseData,
},
};
use crate::api;
/// trait Payment
pub trait Payment:
api::ConnectorCommon
+ api::ConnectorSpecifications
+ api::ConnectorValidation
+ PaymentAuthorize
+ PaymentAuthorizeSessionToken
+ PaymentsCompleteAuthorize
+ PaymentSync
+ PaymentCapture
+ PaymentVoid
+ PaymentPostCaptureVoid
+ PaymentApprove
+ PaymentReject
+ MandateSetup
+ PaymentSession
+ PaymentToken
+ PaymentsPreProcessing
+ PaymentsPostProcessing
+ ConnectorCustomer
+ PaymentIncrementalAuthorization
+ PaymentExtendAuthorization
+ PaymentSessionUpdate
+ PaymentPostSessionTokens
+ PaymentUpdateMetadata
+ PaymentsCreateOrder
+ ExternalVaultProxyPaymentsCreateV1
+ PaymentsGiftCardBalanceCheck
{
}
/// trait PaymentSession
pub trait PaymentSession:
api::ConnectorIntegration<Session, PaymentsSessionData, PaymentsResponseData>
{
}
/// trait MandateSetup
pub trait MandateSetup:
api::ConnectorIntegration<SetupMandate, SetupMandateRequestData, PaymentsResponseData>
{
}
/// trait PaymentAuthorize
pub trait PaymentAuthorize:
api::ConnectorIntegration<Authorize, PaymentsAuthorizeData, PaymentsResponseData>
{
}
/// trait PaymentCapture
pub trait PaymentCapture:
api::ConnectorIntegration<Capture, PaymentsCaptureData, PaymentsResponseData>
{
}
/// trait PaymentSync
pub trait PaymentSync:
api::ConnectorIntegration<PSync, PaymentsSyncData, PaymentsResponseData>
{
}
/// trait PaymentVoid
pub trait PaymentVoid:
api::ConnectorIntegration<Void, PaymentsCancelData, PaymentsResponseData>
{
}
/// trait PaymentPostCaptureVoid
pub trait PaymentPostCaptureVoid:
api::ConnectorIntegration<PostCaptureVoid, PaymentsCancelPostCaptureData, PaymentsResponseData>
{
}
/// trait PaymentExtendAuthorization
pub trait PaymentExtendAuthorization:
api::ConnectorIntegration<
ExtendAuthorization,
PaymentsExtendAuthorizationData,
PaymentsResponseData,
>
{
}
/// trait PaymentApprove
pub trait PaymentApprove:
api::ConnectorIntegration<Approve, PaymentsApproveData, PaymentsResponseData>
{
}
/// trait PaymentReject
pub trait PaymentReject:
api::ConnectorIntegration<Reject, PaymentsRejectData, PaymentsResponseData>
{
}
/// trait PaymentToken
pub trait PaymentToken:
api::ConnectorIntegration<PaymentMethodToken, PaymentMethodTokenizationData, PaymentsResponseData>
{
}
/// trait PaymentAuthorizeSessionToken
pub trait PaymentAuthorizeSessionToken:
api::ConnectorIntegration<AuthorizeSessionToken, AuthorizeSessionTokenData, PaymentsResponseData>
{
}
/// trait PaymentIncrementalAuthorization
pub trait PaymentIncrementalAuthorization:
api::ConnectorIntegration<
IncrementalAuthorization,
PaymentsIncrementalAuthorizationData,
PaymentsResponseData,
>
{
}
/// trait TaxCalculation
pub trait TaxCalculation:
api::ConnectorIntegration<CalculateTax, PaymentsTaxCalculationData, TaxCalculationResponseData>
{
}
/// trait SessionUpdate
pub trait PaymentSessionUpdate:
api::ConnectorIntegration<SdkSessionUpdate, SdkPaymentsSessionUpdateData, PaymentsResponseData>
{
}
/// trait PostSessionTokens
pub trait PaymentPostSessionTokens:
api::ConnectorIntegration<PostSessionTokens, PaymentsPostSessionTokensData, PaymentsResponseData>
{
}
/// trait UpdateMetadata
pub trait PaymentUpdateMetadata:
api::ConnectorIntegration<UpdateMetadata, PaymentsUpdateMetadataData, PaymentsResponseData>
{
}
/// trait PaymentsCompleteAuthorize
pub trait PaymentsCompleteAuthorize:
api::ConnectorIntegration<CompleteAuthorize, CompleteAuthorizeData, PaymentsResponseData>
{
}
/// trait ConnectorCustomer
pub trait ConnectorCustomer:
api::ConnectorIntegration<CreateConnectorCustomer, ConnectorCustomerData, PaymentsResponseData>
{
}
/// trait PaymentsPreProcessing
pub trait PaymentsPreProcessing:
api::ConnectorIntegration<PreProcessing, PaymentsPreProcessingData, PaymentsResponseData>
{
}
/// trait PaymentsPreAuthenticate
pub trait PaymentsPreAuthenticate:
api::ConnectorIntegration<PreAuthenticate, PaymentsPreAuthenticateData, PaymentsResponseData>
{
}
/// trait PaymentsAuthenticate
pub trait PaymentsAuthenticate:
api::ConnectorIntegration<Authenticate, PaymentsAuthenticateData, PaymentsResponseData>
{
}
/// trait PaymentsPostAuthenticate
pub trait PaymentsPostAuthenticate:
api::ConnectorIntegration<PostAuthenticate, PaymentsPostAuthenticateData, PaymentsResponseData>
{
}
/// trait PaymentsPostProcessing
pub trait PaymentsPostProcessing:
api::ConnectorIntegration<PostProcessing, PaymentsPostProcessingData, PaymentsResponseData>
{
}
/// trait PaymentsCreateOrder
pub trait PaymentsCreateOrder:
api::ConnectorIntegration<CreateOrder, CreateOrderRequestData, PaymentsResponseData>
{
}
/// trait ExternalVaultProxyPaymentsCreate
pub trait ExternalVaultProxyPaymentsCreateV1:
api::ConnectorIntegration<ExternalVaultProxy, ExternalVaultProxyPaymentsData, PaymentsResponseData>
{
}
/// trait PaymentsGiftCardBalanceCheck
pub trait PaymentsGiftCardBalanceCheck:
api::ConnectorIntegration<
GiftCardBalanceCheck,
GiftCardBalanceCheckRequestData,
GiftCardBalanceCheckResponseData,
>
{
}
// File: crates/hyperswitch_interfaces/src/api/disputes.rs
//! Disputes interface
use hyperswitch_domain_models::{
router_flow_types::dispute::{Accept, Defend, Dsync, Evidence, Fetch},
router_request_types::{
AcceptDisputeRequestData, DefendDisputeRequestData, DisputeSyncData,
FetchDisputesRequestData, SubmitEvidenceRequestData,
},
router_response_types::{
AcceptDisputeResponse, DefendDisputeResponse, DisputeSyncResponse, FetchDisputesResponse,
SubmitEvidenceResponse,
},
};
use crate::api::ConnectorIntegration;
/// trait AcceptDispute
pub trait AcceptDispute:
ConnectorIntegration<Accept, AcceptDisputeRequestData, AcceptDisputeResponse>
{
}
/// trait SubmitEvidence
pub trait SubmitEvidence:
ConnectorIntegration<Evidence, SubmitEvidenceRequestData, SubmitEvidenceResponse>
{
}
/// trait DefendDispute
pub trait DefendDispute:
ConnectorIntegration<Defend, DefendDisputeRequestData, DefendDisputeResponse>
{
}
/// trait Dispute
pub trait Dispute:
super::ConnectorCommon
+ AcceptDispute
+ SubmitEvidence
+ DefendDispute
+ FetchDisputes
+ DisputeSync
{
}
/// trait FetchDisputes
pub trait FetchDisputes:
ConnectorIntegration<Fetch, FetchDisputesRequestData, FetchDisputesResponse>
{
}
/// trait SyncDisputes
pub trait DisputeSync: ConnectorIntegration<Dsync, DisputeSyncData, DisputeSyncResponse> {}
// File: crates/hyperswitch_interfaces/src/api/disputes_v2.rs
//! Disputes V2 interface
use hyperswitch_domain_models::{
router_data_v2::DisputesFlowData,
router_flow_types::dispute::{Accept, Defend, Dsync, Evidence, Fetch},
router_request_types::{
AcceptDisputeRequestData, DefendDisputeRequestData, DisputeSyncData,
FetchDisputesRequestData, SubmitEvidenceRequestData,
},
router_response_types::{
AcceptDisputeResponse, DefendDisputeResponse, DisputeSyncResponse, FetchDisputesResponse,
SubmitEvidenceResponse,
},
};
use crate::api::ConnectorIntegrationV2;
/// trait AcceptDisputeV2
pub trait AcceptDisputeV2:
ConnectorIntegrationV2<Accept, DisputesFlowData, AcceptDisputeRequestData, AcceptDisputeResponse>
{
}
/// trait SubmitEvidenceV2
pub trait SubmitEvidenceV2:
ConnectorIntegrationV2<
Evidence,
DisputesFlowData,
SubmitEvidenceRequestData,
SubmitEvidenceResponse,
>
{
}
/// trait DefendDisputeV2
pub trait DefendDisputeV2:
ConnectorIntegrationV2<Defend, DisputesFlowData, DefendDisputeRequestData, DefendDisputeResponse>
{
}
/// trait DisputeV2
pub trait DisputeV2:
super::ConnectorCommon
+ AcceptDisputeV2
+ SubmitEvidenceV2
+ DefendDisputeV2
+ FetchDisputesV2
+ DisputeSyncV2
{
}
/// trait FetchDisputeV2
pub trait FetchDisputesV2:
ConnectorIntegrationV2<Fetch, DisputesFlowData, FetchDisputesRequestData, FetchDisputesResponse>
{
}
/// trait DisputeSyncV2
pub trait DisputeSyncV2:
ConnectorIntegrationV2<Dsync, DisputesFlowData, DisputeSyncData, DisputeSyncResponse>
{
}
// File: crates/hyperswitch_interfaces/src/api/revenue_recovery_v2.rs
//! Revenue Recovery Interface V2
use hyperswitch_domain_models::{
router_data_v2::flow_common_types::{
BillingConnectorInvoiceSyncFlowData, BillingConnectorPaymentsSyncFlowData,
InvoiceRecordBackData,
},
router_flow_types::{
BillingConnectorInvoiceSync, BillingConnectorPaymentsSync, InvoiceRecordBack,
},
router_request_types::revenue_recovery::{
BillingConnectorInvoiceSyncRequest, BillingConnectorPaymentsSyncRequest,
InvoiceRecordBackRequest,
},
router_response_types::revenue_recovery::{
BillingConnectorInvoiceSyncResponse, BillingConnectorPaymentsSyncResponse,
InvoiceRecordBackResponse,
},
};
use crate::connector_integration_v2::ConnectorIntegrationV2;
#[cfg(all(feature = "v2", feature = "revenue_recovery"))]
/// trait RevenueRecoveryV2
pub trait RevenueRecoveryV2:
BillingConnectorPaymentsSyncIntegrationV2
+ RevenueRecoveryRecordBackV2
+ BillingConnectorInvoiceSyncIntegrationV2
{
}
#[cfg(not(all(feature = "v2", feature = "revenue_recovery")))]
/// trait RevenueRecoveryV2
pub trait RevenueRecoveryV2 {}
/// trait BillingConnectorPaymentsSyncIntegrationV2
pub trait BillingConnectorPaymentsSyncIntegrationV2:
ConnectorIntegrationV2<
BillingConnectorPaymentsSync,
BillingConnectorPaymentsSyncFlowData,
BillingConnectorPaymentsSyncRequest,
BillingConnectorPaymentsSyncResponse,
>
{
}
/// trait RevenueRecoveryRecordBackV2
pub trait RevenueRecoveryRecordBackV2:
ConnectorIntegrationV2<
InvoiceRecordBack,
InvoiceRecordBackData,
InvoiceRecordBackRequest,
InvoiceRecordBackResponse,
>
{
}
/// trait BillingConnectorInvoiceSyncIntegrationV2
pub trait BillingConnectorInvoiceSyncIntegrationV2:
ConnectorIntegrationV2<
BillingConnectorInvoiceSync,
BillingConnectorInvoiceSyncFlowData,
BillingConnectorInvoiceSyncRequest,
BillingConnectorInvoiceSyncResponse,
>
{
}
// File: crates/hyperswitch_interfaces/src/api/files.rs
//! Files interface
use hyperswitch_domain_models::{
router_flow_types::files::{Retrieve, Upload},
router_request_types::{RetrieveFileRequestData, UploadFileRequestData},
router_response_types::{RetrieveFileResponse, UploadFileResponse},
};
use crate::{
api::{ConnectorCommon, ConnectorIntegration},
errors,
};
/// enum FilePurpose
#[derive(Debug, serde::Deserialize, strum::Display, Clone, serde::Serialize)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum FilePurpose {
/// DisputeEvidence
DisputeEvidence,
}
/// trait UploadFile
pub trait UploadFile:
ConnectorIntegration<Upload, UploadFileRequestData, UploadFileResponse>
{
}
/// trait RetrieveFile
pub trait RetrieveFile:
ConnectorIntegration<Retrieve, RetrieveFileRequestData, RetrieveFileResponse>
{
}
/// trait FileUpload
pub trait FileUpload: ConnectorCommon + Sync + UploadFile + RetrieveFile {
/// fn validate_file_upload
fn validate_file_upload(
&self,
_purpose: FilePurpose,
_file_size: i32,
_file_type: mime::Mime,
) -> common_utils::errors::CustomResult<(), errors::ConnectorError> {
Err(errors::ConnectorError::FileValidationFailed {
reason: "".to_owned(),
}
.into())
}
}
// File: crates/hyperswitch_interfaces/src/api/authentication_v2.rs
use hyperswitch_domain_models::{
router_data_v2::ExternalAuthenticationFlowData,
router_flow_types::authentication::{
Authentication, PostAuthentication, PreAuthentication, PreAuthenticationVersionCall,
},
router_request_types::authentication::{
ConnectorAuthenticationRequestData, ConnectorPostAuthenticationRequestData,
PreAuthNRequestData,
},
router_response_types::AuthenticationResponseData,
};
use crate::api::ConnectorIntegrationV2;
/// trait ConnectorAuthenticationV2
pub trait ConnectorAuthenticationV2:
ConnectorIntegrationV2<
Authentication,
ExternalAuthenticationFlowData,
ConnectorAuthenticationRequestData,
AuthenticationResponseData,
>
{
}
/// trait ConnectorPreAuthenticationV2
pub trait ConnectorPreAuthenticationV2:
ConnectorIntegrationV2<
PreAuthentication,
ExternalAuthenticationFlowData,
PreAuthNRequestData,
AuthenticationResponseData,
>
{
}
/// trait ConnectorPreAuthenticationVersionCallV2
pub trait ConnectorPreAuthenticationVersionCallV2:
ConnectorIntegrationV2<
PreAuthenticationVersionCall,
ExternalAuthenticationFlowData,
PreAuthNRequestData,
AuthenticationResponseData,
>
{
}
/// trait ConnectorPostAuthenticationV2
pub trait ConnectorPostAuthenticationV2:
ConnectorIntegrationV2<
PostAuthentication,
ExternalAuthenticationFlowData,
ConnectorPostAuthenticationRequestData,
AuthenticationResponseData,
>
{
}
/// trait ExternalAuthenticationV2
pub trait ExternalAuthenticationV2:
super::ConnectorCommon
+ ConnectorAuthenticationV2
+ ConnectorPreAuthenticationV2
+ ConnectorPreAuthenticationVersionCallV2
+ ConnectorPostAuthenticationV2
{
}
// File: crates/hyperswitch_interfaces/src/api/payments_v2.rs
//! Payments V2 interface
use hyperswitch_domain_models::{
router_data_v2::{flow_common_types::GiftCardBalanceCheckFlowData, PaymentFlowData},
router_flow_types::{
payments::{
Approve, Authorize, AuthorizeSessionToken, CalculateTax, Capture, CompleteAuthorize,
CreateConnectorCustomer, CreateOrder, ExtendAuthorization, ExternalVaultProxy,
IncrementalAuthorization, PSync, PaymentMethodToken, PostCaptureVoid, PostProcessing,
PostSessionTokens, PreProcessing, Reject, SdkSessionUpdate, Session, SetupMandate,
UpdateMetadata, Void,
},
Authenticate, GiftCardBalanceCheck, PostAuthenticate, PreAuthenticate,
},
router_request_types::{
AuthorizeSessionTokenData, CompleteAuthorizeData, ConnectorCustomerData,
CreateOrderRequestData, ExternalVaultProxyPaymentsData, GiftCardBalanceCheckRequestData,
PaymentMethodTokenizationData, PaymentsApproveData, PaymentsAuthenticateData,
PaymentsAuthorizeData, PaymentsCancelData, PaymentsCancelPostCaptureData,
PaymentsCaptureData, PaymentsExtendAuthorizationData, PaymentsIncrementalAuthorizationData,
PaymentsPostAuthenticateData, PaymentsPostProcessingData, PaymentsPostSessionTokensData,
PaymentsPreAuthenticateData, PaymentsPreProcessingData, PaymentsRejectData,
PaymentsSessionData, PaymentsSyncData, PaymentsTaxCalculationData,
PaymentsUpdateMetadataData, SdkPaymentsSessionUpdateData, SetupMandateRequestData,
},
router_response_types::{
GiftCardBalanceCheckResponseData, PaymentsResponseData, TaxCalculationResponseData,
},
};
use crate::api::{
ConnectorCommon, ConnectorIntegrationV2, ConnectorSpecifications, ConnectorValidation,
};
/// trait PaymentAuthorizeV2
pub trait PaymentAuthorizeV2:
ConnectorIntegrationV2<Authorize, PaymentFlowData, PaymentsAuthorizeData, PaymentsResponseData>
{
}
/// trait PaymentAuthorizeSessionTokenV2
pub trait PaymentAuthorizeSessionTokenV2:
ConnectorIntegrationV2<
AuthorizeSessionToken,
PaymentFlowData,
AuthorizeSessionTokenData,
PaymentsResponseData,
>
{
}
/// trait PaymentSyncV2
pub trait PaymentSyncV2:
ConnectorIntegrationV2<PSync, PaymentFlowData, PaymentsSyncData, PaymentsResponseData>
{
}
/// trait PaymentVoidV2
pub trait PaymentVoidV2:
ConnectorIntegrationV2<Void, PaymentFlowData, PaymentsCancelData, PaymentsResponseData>
{
}
/// trait PaymentPostCaptureVoidV2
pub trait PaymentPostCaptureVoidV2:
ConnectorIntegrationV2<
PostCaptureVoid,
PaymentFlowData,
PaymentsCancelPostCaptureData,
PaymentsResponseData,
>
{
}
/// trait PaymentApproveV2
pub trait PaymentApproveV2:
ConnectorIntegrationV2<Approve, PaymentFlowData, PaymentsApproveData, PaymentsResponseData>
{
}
/// trait PaymentRejectV2
pub trait PaymentRejectV2:
ConnectorIntegrationV2<Reject, PaymentFlowData, PaymentsRejectData, PaymentsResponseData>
{
}
/// trait PaymentCaptureV2
pub trait PaymentCaptureV2:
ConnectorIntegrationV2<Capture, PaymentFlowData, PaymentsCaptureData, PaymentsResponseData>
{
}
/// trait PaymentSessionV2
pub trait PaymentSessionV2:
ConnectorIntegrationV2<Session, PaymentFlowData, PaymentsSessionData, PaymentsResponseData>
{
}
/// trait MandateSetupV2
pub trait MandateSetupV2:
ConnectorIntegrationV2<SetupMandate, PaymentFlowData, SetupMandateRequestData, PaymentsResponseData>
{
}
/// trait PaymentIncrementalAuthorizationV2
pub trait PaymentIncrementalAuthorizationV2:
ConnectorIntegrationV2<
IncrementalAuthorization,
PaymentFlowData,
PaymentsIncrementalAuthorizationData,
PaymentsResponseData,
>
{
}
/// trait PaymentExtendAuthorizationV2
pub trait PaymentExtendAuthorizationV2:
ConnectorIntegrationV2<
ExtendAuthorization,
PaymentFlowData,
PaymentsExtendAuthorizationData,
PaymentsResponseData,
>
{
}
///trait TaxCalculationV2
pub trait TaxCalculationV2:
ConnectorIntegrationV2<
CalculateTax,
PaymentFlowData,
PaymentsTaxCalculationData,
TaxCalculationResponseData,
>
{
}
///trait PaymentSessionUpdateV2
pub trait PaymentSessionUpdateV2:
ConnectorIntegrationV2<
SdkSessionUpdate,
PaymentFlowData,
SdkPaymentsSessionUpdateData,
PaymentsResponseData,
>
{
}
///trait PaymentPostSessionTokensV2
pub trait PaymentPostSessionTokensV2:
ConnectorIntegrationV2<
PostSessionTokens,
PaymentFlowData,
PaymentsPostSessionTokensData,
PaymentsResponseData,
>
{
}
/// trait ConnectorCreateOrderV2
pub trait PaymentCreateOrderV2:
ConnectorIntegrationV2<CreateOrder, PaymentFlowData, CreateOrderRequestData, PaymentsResponseData>
{
}
/// trait PaymentUpdateMetadataV2
pub trait PaymentUpdateMetadataV2:
ConnectorIntegrationV2<
UpdateMetadata,
PaymentFlowData,
PaymentsUpdateMetadataData,
PaymentsResponseData,
>
{
}
/// trait PaymentsCompleteAuthorizeV2
pub trait PaymentsCompleteAuthorizeV2:
ConnectorIntegrationV2<
CompleteAuthorize,
PaymentFlowData,
CompleteAuthorizeData,
PaymentsResponseData,
>
{
}
/// trait PaymentTokenV2
pub trait PaymentTokenV2:
ConnectorIntegrationV2<
PaymentMethodToken,
PaymentFlowData,
PaymentMethodTokenizationData,
PaymentsResponseData,
>
{
}
/// trait ConnectorCustomerV2
pub trait ConnectorCustomerV2:
ConnectorIntegrationV2<
CreateConnectorCustomer,
PaymentFlowData,
ConnectorCustomerData,
PaymentsResponseData,
>
{
}
/// trait PaymentsPreProcessingV2
pub trait PaymentsPreProcessingV2:
ConnectorIntegrationV2<
PreProcessing,
PaymentFlowData,
PaymentsPreProcessingData,
PaymentsResponseData,
>
{
}
/// trait PaymentsGiftCardBalanceCheckV2
pub trait PaymentsGiftCardBalanceCheckV2:
ConnectorIntegrationV2<
GiftCardBalanceCheck,
GiftCardBalanceCheckFlowData,
GiftCardBalanceCheckRequestData,
GiftCardBalanceCheckResponseData,
>
{
}
/// trait PaymentsPreAuthenticateV2
pub trait PaymentsPreAuthenticateV2:
ConnectorIntegrationV2<
PreAuthenticate,
PaymentFlowData,
PaymentsPreAuthenticateData,
PaymentsResponseData,
>
{
}
/// trait PaymentsAuthenticateV2
pub trait PaymentsAuthenticateV2:
ConnectorIntegrationV2<
Authenticate,
PaymentFlowData,
PaymentsAuthenticateData,
PaymentsResponseData,
>
{
}
/// trait PaymentsPostAuthenticateV2
pub trait PaymentsPostAuthenticateV2:
ConnectorIntegrationV2<
PostAuthenticate,
PaymentFlowData,
PaymentsPostAuthenticateData,
PaymentsResponseData,
>
{
}
/// trait PaymentsPostProcessingV2
pub trait PaymentsPostProcessingV2:
ConnectorIntegrationV2<
PostProcessing,
PaymentFlowData,
PaymentsPostProcessingData,
PaymentsResponseData,
>
{
}
/// trait ExternalVaultProxyPaymentsCreate
pub trait ExternalVaultProxyPaymentsCreate:
ConnectorIntegrationV2<
ExternalVaultProxy,
PaymentFlowData,
ExternalVaultProxyPaymentsData,
PaymentsResponseData,
>
{
}
/// trait PaymentV2
pub trait PaymentV2:
ConnectorCommon
+ ConnectorSpecifications
+ ConnectorValidation
+ PaymentAuthorizeV2
+ PaymentAuthorizeSessionTokenV2
+ PaymentsCompleteAuthorizeV2
+ PaymentSyncV2
+ PaymentCaptureV2
+ PaymentVoidV2
+ PaymentPostCaptureVoidV2
+ PaymentApproveV2
+ PaymentRejectV2
+ MandateSetupV2
+ PaymentSessionV2
+ PaymentTokenV2
+ PaymentsPreProcessingV2
+ PaymentsPostProcessingV2
+ ConnectorCustomerV2
+ PaymentIncrementalAuthorizationV2
+ PaymentExtendAuthorizationV2
+ TaxCalculationV2
+ PaymentSessionUpdateV2
+ PaymentPostSessionTokensV2
+ PaymentUpdateMetadataV2
+ PaymentCreateOrderV2
+ ExternalVaultProxyPaymentsCreate
+ PaymentsGiftCardBalanceCheckV2
{
}
// File: crates/hyperswitch_interfaces/src/api/revenue_recovery.rs
//! Revenue Recovery Interface
use hyperswitch_domain_models::{
router_flow_types::{
BillingConnectorInvoiceSync, BillingConnectorPaymentsSync, InvoiceRecordBack,
},
router_request_types::revenue_recovery::{
BillingConnectorInvoiceSyncRequest, BillingConnectorPaymentsSyncRequest,
InvoiceRecordBackRequest,
},
router_response_types::revenue_recovery::{
BillingConnectorInvoiceSyncResponse, BillingConnectorPaymentsSyncResponse,
InvoiceRecordBackResponse,
},
};
#[cfg(all(feature = "v2", feature = "revenue_recovery"))]
use super::ConnectorCommon;
use super::ConnectorIntegration;
/// trait RevenueRecovery
#[cfg(all(feature = "v2", feature = "revenue_recovery"))]
pub trait RevenueRecovery:
ConnectorCommon
+ BillingConnectorPaymentsSyncIntegration
+ RevenueRecoveryRecordBack
+ BillingConnectorInvoiceSyncIntegration
{
}
/// trait BillingConnectorPaymentsSyncIntegration
pub trait BillingConnectorPaymentsSyncIntegration:
ConnectorIntegration<
BillingConnectorPaymentsSync,
BillingConnectorPaymentsSyncRequest,
BillingConnectorPaymentsSyncResponse,
>
{
}
/// trait RevenueRecoveryRecordBack
pub trait RevenueRecoveryRecordBack:
ConnectorIntegration<InvoiceRecordBack, InvoiceRecordBackRequest, InvoiceRecordBackResponse>
{
}
/// trait BillingConnectorInvoiceSyncIntegration
pub trait BillingConnectorInvoiceSyncIntegration:
ConnectorIntegration<
BillingConnectorInvoiceSync,
BillingConnectorInvoiceSyncRequest,
BillingConnectorInvoiceSyncResponse,
>
{
}
#[cfg(not(all(feature = "v2", feature = "revenue_recovery")))]
/// trait RevenueRecovery
pub trait RevenueRecovery {}
// File: crates/hyperswitch_interfaces/src/api/authentication.rs
use hyperswitch_domain_models::{
router_flow_types::authentication::{
Authentication, PostAuthentication, PreAuthentication, PreAuthenticationVersionCall,
},
router_request_types::authentication::{
ConnectorAuthenticationRequestData, ConnectorPostAuthenticationRequestData,
PreAuthNRequestData,
},
router_response_types::AuthenticationResponseData,
};
use crate::api::ConnectorIntegration;
/// trait ConnectorAuthentication
pub trait ConnectorAuthentication:
ConnectorIntegration<Authentication, ConnectorAuthenticationRequestData, AuthenticationResponseData>
{
}
/// trait ConnectorPreAuthentication
pub trait ConnectorPreAuthentication:
ConnectorIntegration<PreAuthentication, PreAuthNRequestData, AuthenticationResponseData>
{
}
/// trait ConnectorPreAuthenticationVersionCall
pub trait ConnectorPreAuthenticationVersionCall:
ConnectorIntegration<PreAuthenticationVersionCall, PreAuthNRequestData, AuthenticationResponseData>
{
}
/// trait ConnectorPostAuthentication
pub trait ConnectorPostAuthentication:
ConnectorIntegration<
PostAuthentication,
ConnectorPostAuthenticationRequestData,
AuthenticationResponseData,
>
{
}
/// trait ExternalAuthentication
pub trait ExternalAuthentication:
super::ConnectorCommon
+ ConnectorAuthentication
+ ConnectorPreAuthentication
+ ConnectorPreAuthenticationVersionCall
+ ConnectorPostAuthentication
{
}
// File: crates/hyperswitch_interfaces/src/api/files_v2.rs
//! Files V2 interface
use hyperswitch_domain_models::{
router_data_v2::FilesFlowData,
router_flow_types::{Retrieve, Upload},
router_request_types::{RetrieveFileRequestData, UploadFileRequestData},
router_response_types::{RetrieveFileResponse, UploadFileResponse},
};
use crate::api::{errors, files::FilePurpose, ConnectorCommon, ConnectorIntegrationV2};
/// trait UploadFileV2
pub trait UploadFileV2:
ConnectorIntegrationV2<Upload, FilesFlowData, UploadFileRequestData, UploadFileResponse>
{
}
/// trait RetrieveFileV2
pub trait RetrieveFileV2:
ConnectorIntegrationV2<Retrieve, FilesFlowData, RetrieveFileRequestData, RetrieveFileResponse>
{
}
/// trait FileUploadV2
pub trait FileUploadV2: ConnectorCommon + Sync + UploadFileV2 + RetrieveFileV2 {
/// fn validate_file_upload_v2
fn validate_file_upload_v2(
&self,
_purpose: FilePurpose,
_file_size: i32,
_file_type: mime::Mime,
) -> common_utils::errors::CustomResult<(), errors::ConnectorError> {
Err(errors::ConnectorError::FileValidationFailed {
reason: "".to_owned(),
}
.into())
}
}
// File: crates/hyperswitch_interfaces/src/api/vault.rs
//! Vault interface
use hyperswitch_domain_models::{
router_flow_types::vault::{
ExternalVaultCreateFlow, ExternalVaultDeleteFlow, ExternalVaultInsertFlow,
ExternalVaultRetrieveFlow,
},
router_request_types::VaultRequestData,
router_response_types::VaultResponseData,
};
use super::ConnectorCommon;
use crate::api::ConnectorIntegration;
/// trait ExternalVaultInsert
pub trait ExternalVaultInsert:
ConnectorIntegration<ExternalVaultInsertFlow, VaultRequestData, VaultResponseData>
{
}
/// trait ExternalVaultRetrieve
pub trait ExternalVaultRetrieve:
ConnectorIntegration<ExternalVaultRetrieveFlow, VaultRequestData, VaultResponseData>
{
}
/// trait ExternalVaultDelete
pub trait ExternalVaultDelete:
ConnectorIntegration<ExternalVaultDeleteFlow, VaultRequestData, VaultResponseData>
{
}
/// trait ExternalVaultDelete
pub trait ExternalVaultCreate:
ConnectorIntegration<ExternalVaultCreateFlow, VaultRequestData, VaultResponseData>
{
}
/// trait ExternalVault
pub trait ExternalVault:
ConnectorCommon
+ ExternalVaultInsert
+ ExternalVaultRetrieve
+ ExternalVaultDelete
+ ExternalVaultCreate
{
}
// File: crates/hyperswitch_interfaces/src/api/vault_v2.rs
//! Vault V2 interface
use hyperswitch_domain_models::{
router_data_v2::flow_common_types::VaultConnectorFlowData,
router_flow_types::vault::{
ExternalVaultCreateFlow, ExternalVaultDeleteFlow, ExternalVaultInsertFlow,
ExternalVaultRetrieveFlow,
},
router_request_types::VaultRequestData,
router_response_types::VaultResponseData,
};
use super::ConnectorCommon;
use crate::api::ConnectorIntegrationV2;
/// trait ExternalVaultInsertV2
pub trait ExternalVaultInsertV2:
ConnectorIntegrationV2<
ExternalVaultInsertFlow,
VaultConnectorFlowData,
VaultRequestData,
VaultResponseData,
>
{
}
/// trait ExternalVaultRetrieveV2
pub trait ExternalVaultRetrieveV2:
ConnectorIntegrationV2<
ExternalVaultRetrieveFlow,
VaultConnectorFlowData,
VaultRequestData,
VaultResponseData,
>
{
}
/// trait ExternalVaultDeleteV2
pub trait ExternalVaultDeleteV2:
ConnectorIntegrationV2<
ExternalVaultDeleteFlow,
VaultConnectorFlowData,
VaultRequestData,
VaultResponseData,
>
{
}
/// trait ExternalVaultDeleteV2
pub trait ExternalVaultCreateV2:
ConnectorIntegrationV2<
ExternalVaultCreateFlow,
VaultConnectorFlowData,
VaultRequestData,
VaultResponseData,
>
{
}
/// trait ExternalVaultV2
pub trait ExternalVaultV2:
ConnectorCommon
+ ExternalVaultInsertV2
+ ExternalVaultRetrieveV2
+ ExternalVaultDeleteV2
+ ExternalVaultCreateV2
{
}
</module>
|
{
"crate": "hyperswitch_interfaces",
"file": null,
"files": [
"crates/hyperswitch_interfaces/src/api/fraud_check_v2.rs",
"crates/hyperswitch_interfaces/src/api/payouts_v2.rs",
"crates/hyperswitch_interfaces/src/api/refunds.rs",
"crates/hyperswitch_interfaces/src/api/subscriptions.rs",
"crates/hyperswitch_interfaces/src/api/payouts.rs",
"crates/hyperswitch_interfaces/src/api/refunds_v2.rs",
"crates/hyperswitch_interfaces/src/api/fraud_check.rs",
"crates/hyperswitch_interfaces/src/api/subscriptions_v2.rs",
"crates/hyperswitch_interfaces/src/api/payments.rs",
"crates/hyperswitch_interfaces/src/api/disputes.rs",
"crates/hyperswitch_interfaces/src/api/disputes_v2.rs",
"crates/hyperswitch_interfaces/src/api/revenue_recovery_v2.rs",
"crates/hyperswitch_interfaces/src/api/files.rs",
"crates/hyperswitch_interfaces/src/api/authentication_v2.rs",
"crates/hyperswitch_interfaces/src/api/payments_v2.rs",
"crates/hyperswitch_interfaces/src/api/revenue_recovery.rs",
"crates/hyperswitch_interfaces/src/api/authentication.rs",
"crates/hyperswitch_interfaces/src/api/files_v2.rs",
"crates/hyperswitch_interfaces/src/api/vault.rs",
"crates/hyperswitch_interfaces/src/api/vault_v2.rs"
],
"module": "crates/hyperswitch_interfaces/src/api",
"num_files": 20,
"token_count": 9343
}
|
module_7325050378032752355
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/compatibility/stripe/payment_intents
Files: 1
</path>
<module>
// File: crates/router/src/compatibility/stripe/payment_intents/types.rs
use std::str::FromStr;
use api_models::payments;
use common_types::payments as common_payments_types;
use common_utils::{
crypto::Encryptable,
date_time,
ext_traits::StringExt,
id_type,
pii::{IpAddress, SecretSerdeValue, UpiVpaMaskingStrategy},
types::MinorUnit,
};
use error_stack::ResultExt;
use serde::{Deserialize, Serialize};
use time::PrimitiveDateTime;
use crate::{
compatibility::stripe::refunds::types as stripe_refunds,
connector::utils::AddressData,
consts,
core::errors,
pii::{Email, PeekInterface},
types::{
api::{admin, enums as api_enums},
transformers::{ForeignFrom, ForeignTryFrom},
},
};
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct StripeBillingDetails {
pub address: Option<AddressDetails>,
pub email: Option<Email>,
pub name: Option<String>,
pub phone: Option<masking::Secret<String>>,
}
impl From<StripeBillingDetails> for payments::Address {
fn from(details: StripeBillingDetails) -> Self {
Self {
phone: Some(payments::PhoneDetails {
number: details.phone,
country_code: details.address.as_ref().and_then(|address| {
address.country.as_ref().map(|country| country.to_string())
}),
}),
email: details.email,
address: details.address.map(|address| payments::AddressDetails {
city: address.city,
country: address.country,
line1: address.line1,
line2: address.line2,
zip: address.postal_code,
state: address.state,
first_name: None,
line3: None,
last_name: None,
origin_zip: None,
}),
}
}
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct StripeCard {
pub number: cards::CardNumber,
pub exp_month: masking::Secret<String>,
pub exp_year: masking::Secret<String>,
pub cvc: masking::Secret<String>,
pub holder_name: Option<masking::Secret<String>>,
}
// ApplePay wallet param is not available in stripe Docs
#[derive(Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum StripeWallet {
ApplePay(payments::ApplePayWalletData),
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct StripeUpi {
pub vpa_id: masking::Secret<String, UpiVpaMaskingStrategy>,
}
#[derive(Debug, Default, Serialize, PartialEq, Eq, Deserialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum StripePaymentMethodType {
#[default]
Card,
Wallet,
Upi,
BankRedirect,
RealTimePayment,
}
impl From<StripePaymentMethodType> for api_enums::PaymentMethod {
fn from(item: StripePaymentMethodType) -> Self {
match item {
StripePaymentMethodType::Card => Self::Card,
StripePaymentMethodType::Wallet => Self::Wallet,
StripePaymentMethodType::Upi => Self::Upi,
StripePaymentMethodType::BankRedirect => Self::BankRedirect,
StripePaymentMethodType::RealTimePayment => Self::RealTimePayment,
}
}
}
#[derive(Default, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct StripePaymentMethodData {
#[serde(rename = "type")]
pub stype: StripePaymentMethodType,
pub billing_details: Option<StripeBillingDetails>,
#[serde(flatten)]
pub payment_method_details: Option<StripePaymentMethodDetails>, // enum
pub metadata: Option<SecretSerdeValue>,
}
#[derive(PartialEq, Eq, Deserialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum StripePaymentMethodDetails {
Card(StripeCard),
Wallet(StripeWallet),
Upi(StripeUpi),
}
impl From<StripeCard> for payments::Card {
fn from(card: StripeCard) -> Self {
Self {
card_number: card.number,
card_exp_month: card.exp_month,
card_exp_year: card.exp_year,
card_holder_name: card.holder_name,
card_cvc: card.cvc,
card_issuer: None,
card_network: None,
bank_code: None,
card_issuing_country: None,
card_type: None,
nick_name: None,
}
}
}
impl From<StripeWallet> for payments::WalletData {
fn from(wallet: StripeWallet) -> Self {
match wallet {
StripeWallet::ApplePay(data) => Self::ApplePay(data),
}
}
}
impl From<StripeUpi> for payments::UpiData {
fn from(upi_data: StripeUpi) -> Self {
Self::UpiCollect(payments::UpiCollectData {
vpa_id: Some(upi_data.vpa_id),
})
}
}
impl From<StripePaymentMethodDetails> for payments::PaymentMethodData {
fn from(item: StripePaymentMethodDetails) -> Self {
match item {
StripePaymentMethodDetails::Card(card) => Self::Card(payments::Card::from(card)),
StripePaymentMethodDetails::Wallet(wallet) => {
Self::Wallet(payments::WalletData::from(wallet))
}
StripePaymentMethodDetails::Upi(upi) => Self::Upi(payments::UpiData::from(upi)),
}
}
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct Shipping {
pub address: AddressDetails,
pub name: Option<masking::Secret<String>>,
pub carrier: Option<String>,
pub phone: Option<masking::Secret<String>>,
pub tracking_number: Option<masking::Secret<String>>,
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct AddressDetails {
pub city: Option<String>,
pub country: Option<api_enums::CountryAlpha2>,
pub line1: Option<masking::Secret<String>>,
pub line2: Option<masking::Secret<String>>,
pub postal_code: Option<masking::Secret<String>>,
pub state: Option<masking::Secret<String>>,
}
impl From<Shipping> for payments::Address {
fn from(details: Shipping) -> Self {
Self {
phone: Some(payments::PhoneDetails {
number: details.phone,
country_code: details.address.country.map(|country| country.to_string()),
}),
email: None,
address: Some(payments::AddressDetails {
city: details.address.city,
country: details.address.country,
line1: details.address.line1,
line2: details.address.line2,
zip: details.address.postal_code,
state: details.address.state,
first_name: details.name,
line3: None,
last_name: None,
origin_zip: None,
}),
}
}
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct MandateData {
pub customer_acceptance: CustomerAcceptance,
pub mandate_type: Option<StripeMandateType>,
pub amount: Option<i64>,
#[serde(default, with = "common_utils::custom_serde::timestamp::option")]
pub start_date: Option<PrimitiveDateTime>,
#[serde(default, with = "common_utils::custom_serde::timestamp::option")]
pub end_date: Option<PrimitiveDateTime>,
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone, Debug)]
pub struct CustomerAcceptance {
#[serde(rename = "type")]
pub acceptance_type: Option<AcceptanceType>,
pub accepted_at: Option<PrimitiveDateTime>,
pub online: Option<OnlineMandate>,
}
#[derive(Default, Debug, serde::Deserialize, serde::Serialize, PartialEq, Eq, Clone)]
#[serde(rename_all = "lowercase")]
pub enum AcceptanceType {
Online,
#[default]
Offline,
}
#[derive(Default, Eq, PartialEq, Debug, serde::Deserialize, serde::Serialize, Clone)]
#[serde(deny_unknown_fields)]
pub struct OnlineMandate {
pub ip_address: masking::Secret<String, IpAddress>,
pub user_agent: String,
}
#[derive(Deserialize, Clone, Debug)]
pub struct StripePaymentIntentRequest {
pub id: Option<id_type::PaymentId>,
pub amount: Option<i64>, // amount in cents, hence passed as integer
pub connector: Option<Vec<api_enums::RoutableConnectors>>,
pub currency: Option<String>,
#[serde(rename = "amount_to_capture")]
pub amount_capturable: Option<i64>,
pub confirm: Option<bool>,
pub capture_method: Option<api_enums::CaptureMethod>,
pub customer: Option<id_type::CustomerId>,
pub description: Option<String>,
pub payment_method_data: Option<StripePaymentMethodData>,
pub receipt_email: Option<Email>,
pub return_url: Option<url::Url>,
pub setup_future_usage: Option<api_enums::FutureUsage>,
pub shipping: Option<Shipping>,
pub statement_descriptor: Option<String>,
pub statement_descriptor_suffix: Option<String>,
pub metadata: Option<serde_json::Value>,
pub client_secret: Option<masking::Secret<String>>,
pub payment_method_options: Option<StripePaymentMethodOptions>,
pub merchant_connector_details: Option<admin::MerchantConnectorDetailsWrap>,
pub mandate: Option<String>,
pub off_session: Option<bool>,
pub payment_method_types: Option<api_enums::PaymentMethodType>,
pub receipt_ipaddress: Option<String>,
pub user_agent: Option<String>,
pub mandate_data: Option<MandateData>,
pub automatic_payment_methods: Option<SecretSerdeValue>, // not used
pub payment_method: Option<String>, // not used
pub confirmation_method: Option<String>, // not used
pub error_on_requires_action: Option<String>, // not used
pub radar_options: Option<SecretSerdeValue>, // not used
pub connector_metadata: Option<payments::ConnectorMetadata>,
}
impl TryFrom<StripePaymentIntentRequest> for payments::PaymentsRequest {
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn try_from(item: StripePaymentIntentRequest) -> errors::RouterResult<Self> {
let routable_connector: Option<api_enums::RoutableConnectors> =
item.connector.and_then(|v| v.into_iter().next());
let routing = routable_connector
.map(|connector| {
api_models::routing::StaticRoutingAlgorithm::Single(Box::new(
api_models::routing::RoutableConnectorChoice {
choice_kind: api_models::routing::RoutableChoiceKind::FullStruct,
connector,
merchant_connector_id: None,
},
))
})
.map(|r| {
serde_json::to_value(r)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("converting to routing failed")
})
.transpose()?;
let ip_address = item
.receipt_ipaddress
.map(|ip| std::net::IpAddr::from_str(ip.as_str()))
.transpose()
.change_context(errors::ApiErrorResponse::InvalidDataFormat {
field_name: "receipt_ipaddress".to_string(),
expected_format: "127.0.0.1".to_string(),
})?;
let amount = item.amount.map(|amount| MinorUnit::new(amount).into());
let payment_method_data = item.payment_method_data.as_ref().map(|pmd| {
let billing = pmd.billing_details.clone().map(payments::Address::from);
let payment_method_data = match pmd.payment_method_details.as_ref() {
Some(spmd) => Some(payments::PaymentMethodData::from(spmd.to_owned())),
None => get_pmd_based_on_payment_method_type(
item.payment_method_types,
billing.clone().map(From::from),
),
};
payments::PaymentMethodDataRequest {
payment_method_data,
billing,
}
});
let request = Ok(Self {
payment_id: item.id.map(payments::PaymentIdType::PaymentIntentId),
amount,
currency: item
.currency
.as_ref()
.map(|c| c.to_uppercase().parse_enum("currency"))
.transpose()
.change_context(errors::ApiErrorResponse::InvalidDataValue {
field_name: "currency",
})?,
capture_method: item.capture_method,
amount_to_capture: item.amount_capturable.map(MinorUnit::new),
confirm: item.confirm,
customer_id: item.customer,
email: item.receipt_email,
phone: item.shipping.as_ref().and_then(|s| s.phone.clone()),
description: item.description,
return_url: item.return_url,
payment_method_data,
payment_method: item
.payment_method_data
.as_ref()
.map(|pmd| api_enums::PaymentMethod::from(pmd.stype.to_owned())),
shipping: item
.shipping
.as_ref()
.map(|s| payments::Address::from(s.to_owned())),
billing: item
.payment_method_data
.and_then(|pmd| pmd.billing_details.map(payments::Address::from)),
statement_descriptor_name: item.statement_descriptor,
statement_descriptor_suffix: item.statement_descriptor_suffix,
metadata: item.metadata,
client_secret: item.client_secret.map(|s| s.peek().clone()),
authentication_type: match item.payment_method_options {
Some(pmo) => {
let StripePaymentMethodOptions::Card {
request_three_d_secure,
}: StripePaymentMethodOptions = pmo;
Some(api_enums::AuthenticationType::foreign_from(
request_three_d_secure,
))
}
None => None,
},
mandate_data: ForeignTryFrom::foreign_try_from((
item.mandate_data,
item.currency.to_owned(),
))?,
merchant_connector_details: item.merchant_connector_details,
setup_future_usage: item.setup_future_usage,
mandate_id: item.mandate,
off_session: item.off_session,
payment_method_type: item.payment_method_types,
routing,
browser_info: Some(
serde_json::to_value(crate::types::BrowserInformation {
ip_address,
user_agent: item.user_agent,
..Default::default()
})
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("convert to browser info failed")?,
),
connector_metadata: item.connector_metadata,
..Self::default()
});
request
}
}
#[derive(Clone, Default, Eq, PartialEq, Serialize, Deserialize, Debug)]
#[serde(rename_all = "snake_case")]
pub enum StripePaymentStatus {
Succeeded,
Canceled,
#[default]
Processing,
RequiresAction,
RequiresPaymentMethod,
RequiresConfirmation,
RequiresCapture,
}
impl From<api_enums::IntentStatus> for StripePaymentStatus {
fn from(item: api_enums::IntentStatus) -> Self {
match item {
api_enums::IntentStatus::Succeeded | api_enums::IntentStatus::PartiallyCaptured => {
Self::Succeeded
}
api_enums::IntentStatus::Failed | api_enums::IntentStatus::Expired => Self::Canceled,
api_enums::IntentStatus::Processing => Self::Processing,
api_enums::IntentStatus::RequiresCustomerAction
| api_enums::IntentStatus::RequiresMerchantAction
| api_enums::IntentStatus::Conflicted => Self::RequiresAction,
api_enums::IntentStatus::RequiresPaymentMethod => Self::RequiresPaymentMethod,
api_enums::IntentStatus::RequiresConfirmation => Self::RequiresConfirmation,
api_enums::IntentStatus::RequiresCapture
| api_enums::IntentStatus::PartiallyCapturedAndCapturable
| api_enums::IntentStatus::PartiallyAuthorizedAndRequiresCapture => {
Self::RequiresCapture
}
api_enums::IntentStatus::Cancelled | api_enums::IntentStatus::CancelledPostCapture => {
Self::Canceled
}
}
}
}
#[derive(Debug, Serialize, Deserialize, Copy, Clone, strum::Display)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum CancellationReason {
Duplicate,
Fraudulent,
RequestedByCustomer,
Abandoned,
}
#[derive(Debug, Deserialize, Serialize, Copy, Clone)]
pub struct StripePaymentCancelRequest {
cancellation_reason: Option<CancellationReason>,
}
impl From<StripePaymentCancelRequest> for payments::PaymentsCancelRequest {
fn from(item: StripePaymentCancelRequest) -> Self {
Self {
cancellation_reason: item.cancellation_reason.map(|c| c.to_string()),
..Self::default()
}
}
}
#[derive(Default, Eq, PartialEq, Serialize, Debug)]
pub struct StripePaymentIntentResponse {
pub id: id_type::PaymentId,
pub object: &'static str,
pub amount: i64,
pub amount_received: Option<i64>,
pub amount_capturable: i64,
pub currency: String,
pub status: StripePaymentStatus,
pub client_secret: Option<masking::Secret<String>>,
pub created: Option<i64>,
pub customer: Option<id_type::CustomerId>,
pub refunds: Option<Vec<stripe_refunds::StripeRefundResponse>>,
pub mandate: Option<String>,
pub metadata: Option<serde_json::Value>,
pub charges: Charges,
pub connector: Option<String>,
pub description: Option<String>,
pub mandate_data: Option<payments::MandateData>,
pub setup_future_usage: Option<api_models::enums::FutureUsage>,
pub off_session: Option<bool>,
pub authentication_type: Option<api_models::enums::AuthenticationType>,
pub next_action: Option<StripeNextAction>,
pub cancellation_reason: Option<String>,
pub payment_method: Option<api_models::enums::PaymentMethod>,
pub payment_method_data: Option<payments::PaymentMethodDataResponse>,
pub shipping: Option<payments::Address>,
pub billing: Option<payments::Address>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub capture_on: Option<PrimitiveDateTime>,
pub payment_token: Option<String>,
pub email: Option<Email>,
pub phone: Option<masking::Secret<String>>,
pub statement_descriptor_suffix: Option<String>,
pub statement_descriptor_name: Option<String>,
pub capture_method: Option<api_models::enums::CaptureMethod>,
pub name: Option<masking::Secret<String>>,
pub last_payment_error: Option<LastPaymentError>,
pub connector_transaction_id: Option<String>,
}
#[derive(Default, Eq, PartialEq, Serialize, Debug)]
pub struct LastPaymentError {
charge: Option<String>,
code: Option<String>,
decline_code: Option<String>,
message: String,
param: Option<String>,
payment_method: StripePaymentMethod,
#[serde(rename = "type")]
error_type: String,
}
impl From<payments::PaymentsResponse> for StripePaymentIntentResponse {
fn from(resp: payments::PaymentsResponse) -> Self {
Self {
object: "payment_intent",
id: resp.payment_id,
status: StripePaymentStatus::from(resp.status),
amount: resp.amount.get_amount_as_i64(),
amount_capturable: resp.amount_capturable.get_amount_as_i64(),
amount_received: resp.amount_received.map(|amt| amt.get_amount_as_i64()),
connector: resp.connector,
client_secret: resp.client_secret,
created: resp.created.map(|t| t.assume_utc().unix_timestamp()),
currency: resp.currency.to_lowercase(),
customer: resp.customer_id,
description: resp.description,
refunds: resp
.refunds
.map(|a| a.into_iter().map(Into::into).collect()),
mandate: resp.mandate_id,
mandate_data: resp.mandate_data,
setup_future_usage: resp.setup_future_usage,
off_session: resp.off_session,
capture_on: resp.capture_on,
capture_method: resp.capture_method,
payment_method: resp.payment_method,
payment_method_data: resp
.payment_method_data
.and_then(|pmd| pmd.payment_method_data),
payment_token: resp.payment_token,
shipping: resp.shipping,
billing: resp.billing,
email: resp.email.map(|inner| inner.into()),
name: resp.name.map(Encryptable::into_inner),
phone: resp.phone.map(Encryptable::into_inner),
authentication_type: resp.authentication_type,
statement_descriptor_name: resp.statement_descriptor_name,
statement_descriptor_suffix: resp.statement_descriptor_suffix,
next_action: into_stripe_next_action(resp.next_action, resp.return_url),
cancellation_reason: resp.cancellation_reason,
metadata: resp.metadata,
charges: Charges::new(),
last_payment_error: resp.error_code.map(|code| LastPaymentError {
charge: None,
code: Some(code.to_owned()),
decline_code: None,
message: resp
.error_message
.unwrap_or_else(|| consts::NO_ERROR_MESSAGE.to_string()),
param: None,
payment_method: StripePaymentMethod {
payment_method_id: "place_holder_id".to_string(),
object: "payment_method",
card: None,
created: u64::try_from(date_time::now().assume_utc().unix_timestamp())
.unwrap_or_default(),
method_type: "card".to_string(),
livemode: false,
},
error_type: code,
}),
connector_transaction_id: resp.connector_transaction_id,
}
}
}
#[derive(Default, Eq, PartialEq, Serialize, Debug)]
pub struct StripePaymentMethod {
#[serde(rename = "id")]
payment_method_id: String,
object: &'static str,
card: Option<StripeCard>,
created: u64,
#[serde(rename = "type")]
method_type: String,
livemode: bool,
}
#[derive(Default, Eq, PartialEq, Serialize, Debug)]
pub struct Charges {
object: &'static str,
data: Vec<String>,
has_more: bool,
total_count: i32,
url: String,
}
impl Charges {
pub fn new() -> Self {
Self {
object: "list",
data: vec![],
has_more: false,
total_count: 0,
url: "http://placeholder".to_string(),
}
}
}
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(deny_unknown_fields)]
pub struct StripePaymentListConstraints {
pub customer: Option<id_type::CustomerId>,
pub starting_after: Option<id_type::PaymentId>,
pub ending_before: Option<id_type::PaymentId>,
#[serde(default = "default_limit")]
pub limit: u32,
pub created: Option<i64>,
#[serde(rename = "created[lt]")]
pub created_lt: Option<i64>,
#[serde(rename = "created[gt]")]
pub created_gt: Option<i64>,
#[serde(rename = "created[lte]")]
pub created_lte: Option<i64>,
#[serde(rename = "created[gte]")]
pub created_gte: Option<i64>,
}
fn default_limit() -> u32 {
10
}
impl TryFrom<StripePaymentListConstraints> for payments::PaymentListConstraints {
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn try_from(item: StripePaymentListConstraints) -> Result<Self, Self::Error> {
Ok(Self {
customer_id: item.customer,
starting_after: item.starting_after,
ending_before: item.ending_before,
limit: item.limit,
created: from_timestamp_to_datetime(item.created)?,
created_lt: from_timestamp_to_datetime(item.created_lt)?,
created_gt: from_timestamp_to_datetime(item.created_gt)?,
created_lte: from_timestamp_to_datetime(item.created_lte)?,
created_gte: from_timestamp_to_datetime(item.created_gte)?,
})
}
}
#[inline]
fn from_timestamp_to_datetime(
time: Option<i64>,
) -> Result<Option<PrimitiveDateTime>, errors::ApiErrorResponse> {
if let Some(time) = time {
let time = time::OffsetDateTime::from_unix_timestamp(time).map_err(|_| {
errors::ApiErrorResponse::InvalidRequestData {
message: "Error while converting timestamp".to_string(),
}
})?;
Ok(Some(PrimitiveDateTime::new(time.date(), time.time())))
} else {
Ok(None)
}
}
#[derive(Default, Eq, PartialEq, Serialize)]
pub struct StripePaymentIntentListResponse {
pub object: String,
pub url: String,
pub has_more: bool,
pub data: Vec<StripePaymentIntentResponse>,
}
impl From<payments::PaymentListResponse> for StripePaymentIntentListResponse {
fn from(it: payments::PaymentListResponse) -> Self {
Self {
object: "list".to_string(),
url: "/v1/payment_intents".to_string(),
has_more: false,
data: it.data.into_iter().map(Into::into).collect(),
}
}
}
#[derive(PartialEq, Eq, Deserialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum StripePaymentMethodOptions {
Card {
request_three_d_secure: Option<Request3DS>,
},
}
#[derive(Eq, PartialEq, Debug, serde::Deserialize, serde::Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum StripeMandateType {
SingleUse,
MultiUse,
}
#[derive(PartialEq, Eq, Clone, Default, Deserialize, Serialize, Debug)]
pub struct MandateOption {
#[serde(default, with = "common_utils::custom_serde::timestamp::option")]
pub accepted_at: Option<PrimitiveDateTime>,
pub user_agent: Option<String>,
pub ip_address: Option<masking::Secret<String, IpAddress>>,
pub mandate_type: Option<StripeMandateType>,
pub amount: Option<i64>,
#[serde(default, with = "common_utils::custom_serde::timestamp::option")]
pub start_date: Option<PrimitiveDateTime>,
#[serde(default, with = "common_utils::custom_serde::timestamp::option")]
pub end_date: Option<PrimitiveDateTime>,
}
impl ForeignTryFrom<(Option<MandateData>, Option<String>)> for Option<payments::MandateData> {
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn foreign_try_from(
(mandate_data, currency): (Option<MandateData>, Option<String>),
) -> errors::RouterResult<Self> {
let currency = currency
.ok_or(
errors::ApiErrorResponse::MissingRequiredField {
field_name: "currency",
}
.into(),
)
.and_then(|c| {
c.to_uppercase().parse_enum("currency").change_context(
errors::ApiErrorResponse::InvalidDataValue {
field_name: "currency",
},
)
})?;
let mandate_data = mandate_data.map(|mandate| payments::MandateData {
mandate_type: match mandate.mandate_type {
Some(item) => match item {
StripeMandateType::SingleUse => Some(payments::MandateType::SingleUse(
payments::MandateAmountData {
amount: MinorUnit::new(mandate.amount.unwrap_or_default()),
currency,
start_date: mandate.start_date,
end_date: mandate.end_date,
metadata: None,
},
)),
StripeMandateType::MultiUse => Some(payments::MandateType::MultiUse(Some(
payments::MandateAmountData {
amount: MinorUnit::new(mandate.amount.unwrap_or_default()),
currency,
start_date: mandate.start_date,
end_date: mandate.end_date,
metadata: None,
},
))),
},
None => Some(payments::MandateType::MultiUse(Some(
payments::MandateAmountData {
amount: MinorUnit::new(mandate.amount.unwrap_or_default()),
currency,
start_date: mandate.start_date,
end_date: mandate.end_date,
metadata: None,
},
))),
},
customer_acceptance: Some(common_payments_types::CustomerAcceptance {
acceptance_type: common_payments_types::AcceptanceType::Online,
accepted_at: mandate.customer_acceptance.accepted_at,
online: mandate.customer_acceptance.online.map(|online| {
common_payments_types::OnlineMandate {
ip_address: Some(online.ip_address),
user_agent: online.user_agent,
}
}),
}),
update_mandate_id: None,
});
Ok(mandate_data)
}
}
#[derive(Default, Eq, PartialEq, Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum Request3DS {
#[default]
Automatic,
Any,
}
impl ForeignFrom<Option<Request3DS>> for api_models::enums::AuthenticationType {
fn foreign_from(item: Option<Request3DS>) -> Self {
match item.unwrap_or_default() {
Request3DS::Automatic => Self::NoThreeDs,
Request3DS::Any => Self::ThreeDs,
}
}
}
#[derive(Default, Eq, PartialEq, Serialize, Debug)]
pub struct RedirectUrl {
pub return_url: Option<String>,
pub url: Option<String>,
}
#[derive(Eq, PartialEq, serde::Serialize, Debug)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum StripeNextAction {
RedirectToUrl {
redirect_to_url: RedirectUrl,
},
DisplayBankTransferInformation {
bank_transfer_steps_and_charges_details: payments::BankTransferNextStepsData,
},
ThirdPartySdkSessionToken {
session_token: Option<payments::SessionToken>,
},
QrCodeInformation {
image_data_url: Option<url::Url>,
display_to_timestamp: Option<i64>,
qr_code_url: Option<url::Url>,
border_color: Option<String>,
display_text: Option<String>,
},
FetchQrCodeInformation {
qr_code_fetch_url: url::Url,
},
DisplayVoucherInformation {
voucher_details: payments::VoucherNextStepData,
},
WaitScreenInformation {
display_from_timestamp: i128,
display_to_timestamp: Option<i128>,
poll_config: Option<payments::PollConfig>,
},
InvokeSdkClient {
next_action_data: payments::SdkNextActionData,
},
CollectOtp {
consent_data_required: payments::MobilePaymentConsent,
},
InvokeHiddenIframe {
iframe_data: payments::IframeData,
},
SdkUpiIntentInformation {
sdk_uri: url::Url,
},
}
pub(crate) fn into_stripe_next_action(
next_action: Option<payments::NextActionData>,
return_url: Option<String>,
) -> Option<StripeNextAction> {
next_action.map(|next_action_data| match next_action_data {
payments::NextActionData::RedirectToUrl { redirect_to_url } => {
StripeNextAction::RedirectToUrl {
redirect_to_url: RedirectUrl {
return_url,
url: Some(redirect_to_url),
},
}
}
payments::NextActionData::RedirectInsidePopup { popup_url, .. } => {
StripeNextAction::RedirectToUrl {
redirect_to_url: RedirectUrl {
return_url,
url: Some(popup_url),
},
}
}
payments::NextActionData::DisplayBankTransferInformation {
bank_transfer_steps_and_charges_details,
} => StripeNextAction::DisplayBankTransferInformation {
bank_transfer_steps_and_charges_details,
},
payments::NextActionData::ThirdPartySdkSessionToken { session_token } => {
StripeNextAction::ThirdPartySdkSessionToken { session_token }
}
payments::NextActionData::QrCodeInformation {
image_data_url,
display_to_timestamp,
qr_code_url,
border_color,
display_text,
} => StripeNextAction::QrCodeInformation {
image_data_url,
display_to_timestamp,
qr_code_url,
border_color,
display_text,
},
payments::NextActionData::FetchQrCodeInformation { qr_code_fetch_url } => {
StripeNextAction::FetchQrCodeInformation { qr_code_fetch_url }
}
payments::NextActionData::DisplayVoucherInformation { voucher_details } => {
StripeNextAction::DisplayVoucherInformation { voucher_details }
}
payments::NextActionData::WaitScreenInformation {
display_from_timestamp,
display_to_timestamp,
poll_config: _,
} => StripeNextAction::WaitScreenInformation {
display_from_timestamp,
display_to_timestamp,
poll_config: None,
},
payments::NextActionData::ThreeDsInvoke { .. } => StripeNextAction::RedirectToUrl {
redirect_to_url: RedirectUrl {
return_url: None,
url: None,
},
},
payments::NextActionData::InvokeSdkClient { next_action_data } => {
StripeNextAction::InvokeSdkClient { next_action_data }
}
payments::NextActionData::CollectOtp {
consent_data_required,
} => StripeNextAction::CollectOtp {
consent_data_required,
},
payments::NextActionData::InvokeHiddenIframe { iframe_data } => {
StripeNextAction::InvokeHiddenIframe { iframe_data }
}
payments::NextActionData::SdkUpiIntentInformation { sdk_uri } => {
StripeNextAction::SdkUpiIntentInformation { sdk_uri }
}
})
}
#[derive(Deserialize, Clone)]
pub struct StripePaymentRetrieveBody {
pub client_secret: Option<String>,
}
//To handle payment types that have empty payment method data
fn get_pmd_based_on_payment_method_type(
payment_method_type: Option<api_enums::PaymentMethodType>,
billing_details: Option<hyperswitch_domain_models::address::Address>,
) -> Option<payments::PaymentMethodData> {
match payment_method_type {
Some(api_enums::PaymentMethodType::UpiIntent) => Some(payments::PaymentMethodData::Upi(
payments::UpiData::UpiIntent(payments::UpiIntentData {}),
)),
Some(api_enums::PaymentMethodType::Fps) => {
Some(payments::PaymentMethodData::RealTimePayment(Box::new(
payments::RealTimePaymentData::Fps {},
)))
}
Some(api_enums::PaymentMethodType::DuitNow) => {
Some(payments::PaymentMethodData::RealTimePayment(Box::new(
payments::RealTimePaymentData::DuitNow {},
)))
}
Some(api_enums::PaymentMethodType::PromptPay) => {
Some(payments::PaymentMethodData::RealTimePayment(Box::new(
payments::RealTimePaymentData::PromptPay {},
)))
}
Some(api_enums::PaymentMethodType::VietQr) => {
Some(payments::PaymentMethodData::RealTimePayment(Box::new(
payments::RealTimePaymentData::VietQr {},
)))
}
Some(api_enums::PaymentMethodType::Ideal) => Some(
payments::PaymentMethodData::BankRedirect(payments::BankRedirectData::Ideal {
billing_details: billing_details.as_ref().map(|billing_data| {
payments::BankRedirectBilling {
billing_name: billing_data.get_optional_full_name(),
email: billing_data.email.clone(),
}
}),
bank_name: None,
country: billing_details
.as_ref()
.and_then(|billing_data| billing_data.get_optional_country()),
}),
),
Some(api_enums::PaymentMethodType::LocalBankRedirect) => {
Some(payments::PaymentMethodData::BankRedirect(
payments::BankRedirectData::LocalBankRedirect {},
))
}
_ => None,
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/compatibility/stripe/payment_intents/types.rs"
],
"module": "crates/router/src/compatibility/stripe/payment_intents",
"num_files": 1,
"token_count": 7820
}
|
module_3492569150289619232
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/compatibility/stripe/setup_intents
Files: 1
</path>
<module>
// File: crates/router/src/compatibility/stripe/setup_intents/types.rs
use std::str::FromStr;
use api_models::payments;
use common_utils::{date_time, ext_traits::StringExt, id_type, pii as secret};
use error_stack::ResultExt;
use router_env::logger;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::{
compatibility::stripe::{
payment_intents::types as payment_intent, refunds::types as stripe_refunds,
},
consts,
core::errors,
pii::{self, PeekInterface},
types::{
api::{self as api_types, admin, enums as api_enums},
transformers::{ForeignFrom, ForeignTryFrom},
},
utils::OptionExt,
};
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone)]
pub struct StripeBillingDetails {
pub address: Option<payments::AddressDetails>,
pub email: Option<pii::Email>,
pub name: Option<String>,
pub phone: Option<pii::Secret<String>>,
}
impl From<StripeBillingDetails> for payments::Address {
fn from(details: StripeBillingDetails) -> Self {
Self {
address: details.address,
phone: Some(payments::PhoneDetails {
number: details.phone,
country_code: None,
}),
email: details.email,
}
}
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone)]
pub struct StripeCard {
pub number: cards::CardNumber,
pub exp_month: pii::Secret<String>,
pub exp_year: pii::Secret<String>,
pub cvc: pii::Secret<String>,
}
// ApplePay wallet param is not available in stripe Docs
#[derive(Serialize, PartialEq, Eq, Deserialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum StripeWallet {
ApplePay(payments::ApplePayWalletData),
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum StripePaymentMethodType {
#[default]
Card,
Wallet,
}
impl From<StripePaymentMethodType> for api_enums::PaymentMethod {
fn from(item: StripePaymentMethodType) -> Self {
match item {
StripePaymentMethodType::Card => Self::Card,
StripePaymentMethodType::Wallet => Self::Wallet,
}
}
}
#[derive(Default, PartialEq, Eq, Deserialize, Clone)]
pub struct StripePaymentMethodData {
#[serde(rename = "type")]
pub stype: StripePaymentMethodType,
pub billing_details: Option<StripeBillingDetails>,
#[serde(flatten)]
pub payment_method_details: Option<StripePaymentMethodDetails>, // enum
pub metadata: Option<Value>,
}
#[derive(PartialEq, Eq, Deserialize, Clone)]
#[serde(rename_all = "snake_case")]
pub enum StripePaymentMethodDetails {
Card(StripeCard),
Wallet(StripeWallet),
}
impl From<StripeCard> for payments::Card {
fn from(card: StripeCard) -> Self {
Self {
card_number: card.number,
card_exp_month: card.exp_month,
card_exp_year: card.exp_year,
card_holder_name: Some(masking::Secret::new("stripe_cust".to_owned())),
card_cvc: card.cvc,
card_issuer: None,
card_network: None,
bank_code: None,
card_issuing_country: None,
card_type: None,
nick_name: None,
}
}
}
impl From<StripeWallet> for payments::WalletData {
fn from(wallet: StripeWallet) -> Self {
match wallet {
StripeWallet::ApplePay(data) => Self::ApplePay(data),
}
}
}
impl From<StripePaymentMethodDetails> for payments::PaymentMethodData {
fn from(item: StripePaymentMethodDetails) -> Self {
match item {
StripePaymentMethodDetails::Card(card) => Self::Card(payments::Card::from(card)),
StripePaymentMethodDetails::Wallet(wallet) => {
Self::Wallet(payments::WalletData::from(wallet))
}
}
}
}
#[derive(Default, Serialize, PartialEq, Eq, Deserialize, Clone)]
pub struct Shipping {
pub address: Option<payments::AddressDetails>,
pub name: Option<String>,
pub carrier: Option<String>,
pub phone: Option<pii::Secret<String>>,
pub tracking_number: Option<pii::Secret<String>>,
}
impl From<Shipping> for payments::Address {
fn from(details: Shipping) -> Self {
Self {
address: details.address,
phone: Some(payments::PhoneDetails {
number: details.phone,
country_code: None,
}),
email: None,
}
}
}
#[derive(Default, Deserialize, Clone)]
pub struct StripeSetupIntentRequest {
pub confirm: Option<bool>,
pub customer: Option<id_type::CustomerId>,
pub connector: Option<Vec<api_enums::RoutableConnectors>>,
pub description: Option<String>,
pub currency: Option<String>,
pub payment_method_data: Option<StripePaymentMethodData>,
pub receipt_email: Option<pii::Email>,
pub return_url: Option<url::Url>,
pub setup_future_usage: Option<api_enums::FutureUsage>,
pub shipping: Option<Shipping>,
pub billing_details: Option<StripeBillingDetails>,
pub statement_descriptor: Option<String>,
pub statement_descriptor_suffix: Option<String>,
pub metadata: Option<secret::SecretSerdeValue>,
pub client_secret: Option<pii::Secret<String>>,
pub payment_method_options: Option<payment_intent::StripePaymentMethodOptions>,
pub payment_method: Option<String>,
pub merchant_connector_details: Option<admin::MerchantConnectorDetailsWrap>,
pub receipt_ipaddress: Option<String>,
pub user_agent: Option<String>,
pub mandate_data: Option<payment_intent::MandateData>,
pub connector_metadata: Option<payments::ConnectorMetadata>,
}
impl TryFrom<StripeSetupIntentRequest> for payments::PaymentsRequest {
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn try_from(item: StripeSetupIntentRequest) -> errors::RouterResult<Self> {
let routable_connector: Option<api_enums::RoutableConnectors> =
item.connector.and_then(|v| v.into_iter().next());
let routing = routable_connector
.map(|connector| {
api_models::routing::StaticRoutingAlgorithm::Single(Box::new(
api_models::routing::RoutableConnectorChoice {
choice_kind: api_models::routing::RoutableChoiceKind::FullStruct,
connector,
merchant_connector_id: None,
},
))
})
.map(|r| {
serde_json::to_value(r)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("converting to routing failed")
})
.transpose()?;
let ip_address = item
.receipt_ipaddress
.map(|ip| std::net::IpAddr::from_str(ip.as_str()))
.transpose()
.change_context(errors::ApiErrorResponse::InvalidDataFormat {
field_name: "receipt_ipaddress".to_string(),
expected_format: "127.0.0.1".to_string(),
})?;
let metadata_object = item
.metadata
.clone()
.parse_value("metadata")
.change_context(errors::ApiErrorResponse::InvalidDataValue {
field_name: "metadata mapping failed",
})?;
let request = Ok(Self {
amount: Some(api_types::Amount::Zero),
capture_method: None,
amount_to_capture: None,
confirm: item.confirm,
customer_id: item.customer,
currency: item
.currency
.as_ref()
.map(|c| c.to_uppercase().parse_enum("currency"))
.transpose()
.change_context(errors::ApiErrorResponse::InvalidDataValue {
field_name: "currency",
})?,
email: item.receipt_email,
name: item
.billing_details
.as_ref()
.and_then(|b| b.name.as_ref().map(|x| masking::Secret::new(x.to_owned()))),
phone: item.shipping.as_ref().and_then(|s| s.phone.clone()),
description: item.description,
return_url: item.return_url,
payment_method_data: item.payment_method_data.as_ref().and_then(|pmd| {
pmd.payment_method_details
.as_ref()
.map(|spmd| payments::PaymentMethodDataRequest {
payment_method_data: Some(payments::PaymentMethodData::from(
spmd.to_owned(),
)),
billing: pmd.billing_details.clone().map(payments::Address::from),
})
}),
payment_method: item
.payment_method_data
.as_ref()
.map(|pmd| api_enums::PaymentMethod::from(pmd.stype.to_owned())),
shipping: item
.shipping
.as_ref()
.map(|s| payments::Address::from(s.to_owned())),
billing: item
.billing_details
.as_ref()
.map(|b| payments::Address::from(b.to_owned())),
statement_descriptor_name: item.statement_descriptor,
statement_descriptor_suffix: item.statement_descriptor_suffix,
metadata: metadata_object,
client_secret: item.client_secret.map(|s| s.peek().clone()),
setup_future_usage: item.setup_future_usage,
merchant_connector_details: item.merchant_connector_details,
routing,
authentication_type: match item.payment_method_options {
Some(pmo) => {
let payment_intent::StripePaymentMethodOptions::Card {
request_three_d_secure,
}: payment_intent::StripePaymentMethodOptions = pmo;
Some(api_enums::AuthenticationType::foreign_from(
request_three_d_secure,
))
}
None => None,
},
mandate_data: ForeignTryFrom::foreign_try_from((
item.mandate_data,
item.currency.to_owned(),
))?,
browser_info: Some(
serde_json::to_value(crate::types::BrowserInformation {
ip_address,
user_agent: item.user_agent,
..Default::default()
})
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("convert to browser info failed")?,
),
connector_metadata: item.connector_metadata,
..Default::default()
});
request
}
}
#[derive(Clone, Default, Eq, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum StripeSetupStatus {
Succeeded,
Canceled,
#[default]
Processing,
RequiresAction,
RequiresPaymentMethod,
RequiresConfirmation,
}
impl From<api_enums::IntentStatus> for StripeSetupStatus {
fn from(item: api_enums::IntentStatus) -> Self {
match item {
api_enums::IntentStatus::Succeeded | api_enums::IntentStatus::PartiallyCaptured => {
Self::Succeeded
}
api_enums::IntentStatus::Failed | api_enums::IntentStatus::Expired => Self::Canceled,
api_enums::IntentStatus::Processing
| api_enums::IntentStatus::PartiallyAuthorizedAndRequiresCapture => Self::Processing,
api_enums::IntentStatus::RequiresCustomerAction => Self::RequiresAction,
api_enums::IntentStatus::RequiresMerchantAction
| api_enums::IntentStatus::Conflicted => Self::RequiresAction,
api_enums::IntentStatus::RequiresPaymentMethod => Self::RequiresPaymentMethod,
api_enums::IntentStatus::RequiresConfirmation => Self::RequiresConfirmation,
api_enums::IntentStatus::RequiresCapture
| api_enums::IntentStatus::PartiallyCapturedAndCapturable => {
logger::error!("Invalid status change");
Self::Canceled
}
api_enums::IntentStatus::Cancelled | api_enums::IntentStatus::CancelledPostCapture => {
Self::Canceled
}
}
}
}
#[derive(Debug, Serialize, Deserialize, Copy, Clone, strum::Display)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum CancellationReason {
Duplicate,
Fraudulent,
RequestedByCustomer,
Abandoned,
}
#[derive(Debug, Deserialize, Serialize, Copy, Clone)]
pub struct StripePaymentCancelRequest {
cancellation_reason: Option<CancellationReason>,
}
impl From<StripePaymentCancelRequest> for payments::PaymentsCancelRequest {
fn from(item: StripePaymentCancelRequest) -> Self {
Self {
cancellation_reason: item.cancellation_reason.map(|c| c.to_string()),
..Self::default()
}
}
}
#[derive(Default, Eq, PartialEq, Serialize)]
pub struct RedirectUrl {
pub return_url: Option<String>,
pub url: Option<String>,
}
#[derive(Eq, PartialEq, serde::Serialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum StripeNextAction {
RedirectToUrl {
redirect_to_url: RedirectUrl,
},
DisplayBankTransferInformation {
bank_transfer_steps_and_charges_details: payments::BankTransferNextStepsData,
},
ThirdPartySdkSessionToken {
session_token: Option<payments::SessionToken>,
},
QrCodeInformation {
image_data_url: Option<url::Url>,
display_to_timestamp: Option<i64>,
qr_code_url: Option<url::Url>,
border_color: Option<String>,
display_text: Option<String>,
},
FetchQrCodeInformation {
qr_code_fetch_url: url::Url,
},
DisplayVoucherInformation {
voucher_details: payments::VoucherNextStepData,
},
WaitScreenInformation {
display_from_timestamp: i128,
display_to_timestamp: Option<i128>,
poll_config: Option<payments::PollConfig>,
},
InvokeSdkClient {
next_action_data: payments::SdkNextActionData,
},
CollectOtp {
consent_data_required: payments::MobilePaymentConsent,
},
InvokeHiddenIframe {
iframe_data: payments::IframeData,
},
SdkUpiIntentInformation {
sdk_uri: url::Url,
},
}
pub(crate) fn into_stripe_next_action(
next_action: Option<payments::NextActionData>,
return_url: Option<String>,
) -> Option<StripeNextAction> {
next_action.map(|next_action_data| match next_action_data {
payments::NextActionData::RedirectToUrl { redirect_to_url } => {
StripeNextAction::RedirectToUrl {
redirect_to_url: RedirectUrl {
return_url,
url: Some(redirect_to_url),
},
}
}
payments::NextActionData::RedirectInsidePopup { popup_url, .. } => {
StripeNextAction::RedirectToUrl {
redirect_to_url: RedirectUrl {
return_url,
url: Some(popup_url),
},
}
}
payments::NextActionData::DisplayBankTransferInformation {
bank_transfer_steps_and_charges_details,
} => StripeNextAction::DisplayBankTransferInformation {
bank_transfer_steps_and_charges_details,
},
payments::NextActionData::ThirdPartySdkSessionToken { session_token } => {
StripeNextAction::ThirdPartySdkSessionToken { session_token }
}
payments::NextActionData::QrCodeInformation {
image_data_url,
display_to_timestamp,
qr_code_url,
display_text,
border_color,
} => StripeNextAction::QrCodeInformation {
image_data_url,
display_to_timestamp,
qr_code_url,
display_text,
border_color,
},
payments::NextActionData::FetchQrCodeInformation { qr_code_fetch_url } => {
StripeNextAction::FetchQrCodeInformation { qr_code_fetch_url }
}
payments::NextActionData::DisplayVoucherInformation { voucher_details } => {
StripeNextAction::DisplayVoucherInformation { voucher_details }
}
payments::NextActionData::WaitScreenInformation {
display_from_timestamp,
display_to_timestamp,
poll_config: _,
} => StripeNextAction::WaitScreenInformation {
display_from_timestamp,
display_to_timestamp,
poll_config: None,
},
payments::NextActionData::ThreeDsInvoke { .. } => StripeNextAction::RedirectToUrl {
redirect_to_url: RedirectUrl {
return_url: None,
url: None,
},
},
payments::NextActionData::InvokeSdkClient { next_action_data } => {
StripeNextAction::InvokeSdkClient { next_action_data }
}
payments::NextActionData::CollectOtp {
consent_data_required,
} => StripeNextAction::CollectOtp {
consent_data_required,
},
payments::NextActionData::InvokeHiddenIframe { iframe_data } => {
StripeNextAction::InvokeHiddenIframe { iframe_data }
}
payments::NextActionData::SdkUpiIntentInformation { sdk_uri } => {
StripeNextAction::SdkUpiIntentInformation { sdk_uri }
}
})
}
#[derive(Default, Eq, PartialEq, Serialize)]
pub struct StripeSetupIntentResponse {
pub id: id_type::PaymentId,
pub object: String,
pub status: StripeSetupStatus,
pub client_secret: Option<masking::Secret<String>>,
pub metadata: Option<Value>,
#[serde(with = "common_utils::custom_serde::iso8601::option")]
pub created: Option<time::PrimitiveDateTime>,
pub customer: Option<id_type::CustomerId>,
pub refunds: Option<Vec<stripe_refunds::StripeRefundResponse>>,
pub mandate_id: Option<String>,
pub next_action: Option<StripeNextAction>,
pub last_payment_error: Option<LastPaymentError>,
pub charges: payment_intent::Charges,
pub connector_transaction_id: Option<String>,
}
#[derive(Default, Eq, PartialEq, Serialize)]
pub struct LastPaymentError {
charge: Option<String>,
code: Option<String>,
decline_code: Option<String>,
message: String,
param: Option<String>,
payment_method: StripePaymentMethod,
#[serde(rename = "type")]
error_type: String,
}
#[derive(Default, Eq, PartialEq, Serialize)]
pub struct StripePaymentMethod {
#[serde(rename = "id")]
payment_method_id: String,
object: &'static str,
card: Option<StripeCard>,
created: u64,
#[serde(rename = "type")]
method_type: String,
livemode: bool,
}
impl From<payments::PaymentsResponse> for StripeSetupIntentResponse {
fn from(resp: payments::PaymentsResponse) -> Self {
Self {
object: "setup_intent".to_owned(),
status: StripeSetupStatus::from(resp.status),
client_secret: resp.client_secret,
charges: payment_intent::Charges::new(),
created: resp.created,
customer: resp.customer_id,
metadata: resp.metadata,
id: resp.payment_id,
refunds: resp
.refunds
.map(|a| a.into_iter().map(Into::into).collect()),
mandate_id: resp.mandate_id,
next_action: into_stripe_next_action(resp.next_action, resp.return_url),
last_payment_error: resp.error_code.map(|code| -> LastPaymentError {
LastPaymentError {
charge: None,
code: Some(code.to_owned()),
decline_code: None,
message: resp
.error_message
.unwrap_or_else(|| consts::NO_ERROR_MESSAGE.to_string()),
param: None,
payment_method: StripePaymentMethod {
payment_method_id: "place_holder_id".to_string(),
object: "payment_method",
card: None,
created: u64::try_from(date_time::now().assume_utc().unix_timestamp())
.unwrap_or_default(),
method_type: "card".to_string(),
livemode: false,
},
error_type: code,
}
}),
connector_transaction_id: resp.connector_transaction_id,
}
}
}
#[derive(Clone, Debug, serde::Deserialize)]
#[serde(deny_unknown_fields)]
pub struct StripePaymentListConstraints {
pub customer: Option<id_type::CustomerId>,
pub starting_after: Option<id_type::PaymentId>,
pub ending_before: Option<id_type::PaymentId>,
#[serde(default = "default_limit")]
pub limit: u32,
pub created: Option<i64>,
#[serde(rename = "created[lt]")]
pub created_lt: Option<i64>,
#[serde(rename = "created[gt]")]
pub created_gt: Option<i64>,
#[serde(rename = "created[lte]")]
pub created_lte: Option<i64>,
#[serde(rename = "created[gte]")]
pub created_gte: Option<i64>,
}
fn default_limit() -> u32 {
10
}
impl TryFrom<StripePaymentListConstraints> for payments::PaymentListConstraints {
type Error = error_stack::Report<errors::ApiErrorResponse>;
fn try_from(item: StripePaymentListConstraints) -> Result<Self, Self::Error> {
Ok(Self {
customer_id: item.customer,
starting_after: item.starting_after,
ending_before: item.ending_before,
limit: item.limit,
created: from_timestamp_to_datetime(item.created)?,
created_lt: from_timestamp_to_datetime(item.created_lt)?,
created_gt: from_timestamp_to_datetime(item.created_gt)?,
created_lte: from_timestamp_to_datetime(item.created_lte)?,
created_gte: from_timestamp_to_datetime(item.created_gte)?,
})
}
}
#[inline]
fn from_timestamp_to_datetime(
time: Option<i64>,
) -> Result<Option<time::PrimitiveDateTime>, errors::ApiErrorResponse> {
if let Some(time) = time {
let time = time::OffsetDateTime::from_unix_timestamp(time).map_err(|err| {
logger::error!("Error: from_unix_timestamp: {}", err);
errors::ApiErrorResponse::InvalidRequestData {
message: "Error while converting timestamp".to_string(),
}
})?;
Ok(Some(time::PrimitiveDateTime::new(time.date(), time.time())))
} else {
Ok(None)
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/compatibility/stripe/setup_intents/types.rs"
],
"module": "crates/router/src/compatibility/stripe/setup_intents",
"num_files": 1,
"token_count": 4751
}
|
module_-3804926417901470008
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/types/domain/user
Files: 3
</path>
<module>
// File: crates/router/src/types/domain/user/dashboard_metadata.rs
use api_models::user::dashboard_metadata as api;
use diesel_models::enums::DashboardMetadata as DBEnum;
use masking::Secret;
use time::PrimitiveDateTime;
pub enum MetaData {
ProductionAgreement(ProductionAgreementValue),
SetupProcessor(api::SetupProcessor),
ConfigureEndpoint(bool),
SetupComplete(bool),
FirstProcessorConnected(api::ProcessorConnected),
SecondProcessorConnected(api::ProcessorConnected),
ConfiguredRouting(api::ConfiguredRouting),
TestPayment(api::TestPayment),
IntegrationMethod(api::IntegrationMethod),
ConfigurationType(api::ConfigurationType),
IntegrationCompleted(bool),
StripeConnected(api::ProcessorConnected),
PaypalConnected(api::ProcessorConnected),
SPRoutingConfigured(api::ConfiguredRouting),
Feedback(api::Feedback),
ProdIntent(api::ProdIntent),
SPTestPayment(bool),
DownloadWoocom(bool),
ConfigureWoocom(bool),
SetupWoocomWebhook(bool),
IsMultipleConfiguration(bool),
IsChangePasswordRequired(bool),
OnboardingSurvey(api::OnboardingSurvey),
ReconStatus(api::ReconStatus),
}
impl From<&MetaData> for DBEnum {
fn from(value: &MetaData) -> Self {
match value {
MetaData::ProductionAgreement(_) => Self::ProductionAgreement,
MetaData::SetupProcessor(_) => Self::SetupProcessor,
MetaData::ConfigureEndpoint(_) => Self::ConfigureEndpoint,
MetaData::SetupComplete(_) => Self::SetupComplete,
MetaData::FirstProcessorConnected(_) => Self::FirstProcessorConnected,
MetaData::SecondProcessorConnected(_) => Self::SecondProcessorConnected,
MetaData::ConfiguredRouting(_) => Self::ConfiguredRouting,
MetaData::TestPayment(_) => Self::TestPayment,
MetaData::IntegrationMethod(_) => Self::IntegrationMethod,
MetaData::ConfigurationType(_) => Self::ConfigurationType,
MetaData::IntegrationCompleted(_) => Self::IntegrationCompleted,
MetaData::StripeConnected(_) => Self::StripeConnected,
MetaData::PaypalConnected(_) => Self::PaypalConnected,
MetaData::SPRoutingConfigured(_) => Self::SpRoutingConfigured,
MetaData::Feedback(_) => Self::Feedback,
MetaData::ProdIntent(_) => Self::ProdIntent,
MetaData::SPTestPayment(_) => Self::SpTestPayment,
MetaData::DownloadWoocom(_) => Self::DownloadWoocom,
MetaData::ConfigureWoocom(_) => Self::ConfigureWoocom,
MetaData::SetupWoocomWebhook(_) => Self::SetupWoocomWebhook,
MetaData::IsMultipleConfiguration(_) => Self::IsMultipleConfiguration,
MetaData::IsChangePasswordRequired(_) => Self::IsChangePasswordRequired,
MetaData::OnboardingSurvey(_) => Self::OnboardingSurvey,
MetaData::ReconStatus(_) => Self::ReconStatus,
}
}
}
#[derive(Debug, serde::Serialize)]
pub struct ProductionAgreementValue {
pub version: String,
pub ip_address: Secret<String, common_utils::pii::IpAddress>,
pub timestamp: PrimitiveDateTime,
}
// File: crates/router/src/types/domain/user/decision_manager.rs
use common_enums::TokenPurpose;
use common_utils::{id_type, types::user::LineageContext};
use diesel_models::{
enums::{UserRoleVersion, UserStatus},
user_role::UserRole,
};
use error_stack::ResultExt;
use masking::Secret;
use router_env::logger;
use super::UserFromStorage;
use crate::{
core::errors::{UserErrors, UserResult},
db::user_role::ListUserRolesByUserIdPayload,
routes::SessionState,
services::authentication as auth,
utils,
};
#[derive(Eq, PartialEq, Clone, Copy)]
pub enum UserFlow {
SPTFlow(SPTFlow),
JWTFlow(JWTFlow),
}
impl UserFlow {
async fn is_required(
&self,
user: &UserFromStorage,
path: &[TokenPurpose],
state: &SessionState,
user_tenant_id: &id_type::TenantId,
) -> UserResult<bool> {
match self {
Self::SPTFlow(flow) => flow.is_required(user, path, state, user_tenant_id).await,
Self::JWTFlow(flow) => flow.is_required(user, state).await,
}
}
}
#[derive(Eq, PartialEq, Clone, Copy)]
pub enum SPTFlow {
AuthSelect,
SSO,
TOTP,
VerifyEmail,
AcceptInvitationFromEmail,
ForceSetPassword,
MerchantSelect,
ResetPassword,
}
impl SPTFlow {
async fn is_required(
&self,
user: &UserFromStorage,
path: &[TokenPurpose],
state: &SessionState,
user_tenant_id: &id_type::TenantId,
) -> UserResult<bool> {
match self {
// Auth
Self::AuthSelect => Ok(true),
Self::SSO => Ok(true),
// TOTP
Self::TOTP => Ok(!path.contains(&TokenPurpose::SSO)),
// Main email APIs
Self::AcceptInvitationFromEmail | Self::ResetPassword => Ok(true),
Self::VerifyEmail => Ok(true),
// Final Checks
Self::ForceSetPassword => user
.is_password_rotate_required(state)
.map(|rotate_required| rotate_required && !path.contains(&TokenPurpose::SSO)),
Self::MerchantSelect => Ok(state
.global_store
.list_user_roles_by_user_id(ListUserRolesByUserIdPayload {
user_id: user.get_user_id(),
tenant_id: user_tenant_id,
org_id: None,
merchant_id: None,
profile_id: None,
entity_id: None,
version: None,
status: Some(UserStatus::Active),
limit: Some(1),
})
.await
.change_context(UserErrors::InternalServerError)?
.is_empty()),
}
}
pub async fn generate_spt(
self,
state: &SessionState,
next_flow: &NextFlow,
) -> UserResult<Secret<String>> {
auth::SinglePurposeToken::new_token(
next_flow.user.get_user_id().to_string(),
self.into(),
next_flow.origin.clone(),
&state.conf,
next_flow.path.to_vec(),
Some(state.tenant.tenant_id.clone()),
)
.await
.map(|token| token.into())
}
}
#[derive(Eq, PartialEq, Clone, Copy)]
pub enum JWTFlow {
UserInfo,
}
impl JWTFlow {
async fn is_required(
&self,
_user: &UserFromStorage,
_state: &SessionState,
) -> UserResult<bool> {
Ok(true)
}
pub async fn generate_jwt(
self,
state: &SessionState,
next_flow: &NextFlow,
user_role: &UserRole,
) -> UserResult<Secret<String>> {
let user_id = next_flow.user.get_user_id();
// Fetch lineage context from DB
let lineage_context_from_db = state
.global_store
.find_user_by_id(user_id)
.await
.inspect_err(|e| {
logger::error!(
"Failed to fetch lineage context from DB for user {}: {:?}",
user_id,
e
)
})
.ok()
.and_then(|user| user.lineage_context);
let new_lineage_context = match lineage_context_from_db {
Some(ctx) => {
let tenant_id = ctx.tenant_id.clone();
let user_role_match_v2 = state
.global_store
.find_user_role_by_user_id_and_lineage(
&ctx.user_id,
&tenant_id,
&ctx.org_id,
&ctx.merchant_id,
&ctx.profile_id,
UserRoleVersion::V2,
)
.await
.inspect_err(|e| {
logger::error!("Failed to validate V2 role: {e:?}");
})
.map(|role| role.role_id == ctx.role_id)
.unwrap_or_default();
if user_role_match_v2 {
ctx
} else {
let user_role_match_v1 = state
.global_store
.find_user_role_by_user_id_and_lineage(
&ctx.user_id,
&tenant_id,
&ctx.org_id,
&ctx.merchant_id,
&ctx.profile_id,
UserRoleVersion::V1,
)
.await
.inspect_err(|e| {
logger::error!("Failed to validate V1 role: {e:?}");
})
.map(|role| role.role_id == ctx.role_id)
.unwrap_or_default();
if user_role_match_v1 {
ctx
} else {
// fallback to default lineage if cached context is invalid
Self::resolve_lineage_from_user_role(state, user_role, user_id).await?
}
}
}
None =>
// no cached context found
{
Self::resolve_lineage_from_user_role(state, user_role, user_id).await?
}
};
utils::user::spawn_async_lineage_context_update_to_db(
state,
user_id,
new_lineage_context.clone(),
);
auth::AuthToken::new_token(
new_lineage_context.user_id,
new_lineage_context.merchant_id,
new_lineage_context.role_id,
&state.conf,
new_lineage_context.org_id,
new_lineage_context.profile_id,
Some(new_lineage_context.tenant_id),
)
.await
.map(|token| token.into())
}
pub async fn resolve_lineage_from_user_role(
state: &SessionState,
user_role: &UserRole,
user_id: &str,
) -> UserResult<LineageContext> {
let org_id = utils::user_role::get_single_org_id(state, user_role).await?;
let merchant_id =
utils::user_role::get_single_merchant_id(state, user_role, &org_id).await?;
let profile_id =
utils::user_role::get_single_profile_id(state, user_role, &merchant_id).await?;
Ok(LineageContext {
user_id: user_id.to_string(),
org_id,
merchant_id,
profile_id,
role_id: user_role.role_id.clone(),
tenant_id: user_role.tenant_id.clone(),
})
}
}
#[derive(serde::Serialize, serde::Deserialize, Clone, Debug)]
#[serde(rename_all = "snake_case")]
pub enum Origin {
#[serde(rename = "sign_in_with_sso")]
SignInWithSSO,
SignIn,
SignUp,
MagicLink,
VerifyEmail,
AcceptInvitationFromEmail,
ResetPassword,
}
impl Origin {
fn get_flows(&self) -> &'static [UserFlow] {
match self {
Self::SignInWithSSO => &SIGNIN_WITH_SSO_FLOW,
Self::SignIn => &SIGNIN_FLOW,
Self::SignUp => &SIGNUP_FLOW,
Self::VerifyEmail => &VERIFY_EMAIL_FLOW,
Self::MagicLink => &MAGIC_LINK_FLOW,
Self::AcceptInvitationFromEmail => &ACCEPT_INVITATION_FROM_EMAIL_FLOW,
Self::ResetPassword => &RESET_PASSWORD_FLOW,
}
}
}
const SIGNIN_WITH_SSO_FLOW: [UserFlow; 2] = [
UserFlow::SPTFlow(SPTFlow::MerchantSelect),
UserFlow::JWTFlow(JWTFlow::UserInfo),
];
const SIGNIN_FLOW: [UserFlow; 4] = [
UserFlow::SPTFlow(SPTFlow::TOTP),
UserFlow::SPTFlow(SPTFlow::ForceSetPassword),
UserFlow::SPTFlow(SPTFlow::MerchantSelect),
UserFlow::JWTFlow(JWTFlow::UserInfo),
];
const SIGNUP_FLOW: [UserFlow; 4] = [
UserFlow::SPTFlow(SPTFlow::TOTP),
UserFlow::SPTFlow(SPTFlow::ForceSetPassword),
UserFlow::SPTFlow(SPTFlow::MerchantSelect),
UserFlow::JWTFlow(JWTFlow::UserInfo),
];
const MAGIC_LINK_FLOW: [UserFlow; 5] = [
UserFlow::SPTFlow(SPTFlow::TOTP),
UserFlow::SPTFlow(SPTFlow::VerifyEmail),
UserFlow::SPTFlow(SPTFlow::ForceSetPassword),
UserFlow::SPTFlow(SPTFlow::MerchantSelect),
UserFlow::JWTFlow(JWTFlow::UserInfo),
];
const VERIFY_EMAIL_FLOW: [UserFlow; 5] = [
UserFlow::SPTFlow(SPTFlow::TOTP),
UserFlow::SPTFlow(SPTFlow::VerifyEmail),
UserFlow::SPTFlow(SPTFlow::ForceSetPassword),
UserFlow::SPTFlow(SPTFlow::MerchantSelect),
UserFlow::JWTFlow(JWTFlow::UserInfo),
];
const ACCEPT_INVITATION_FROM_EMAIL_FLOW: [UserFlow; 6] = [
UserFlow::SPTFlow(SPTFlow::AuthSelect),
UserFlow::SPTFlow(SPTFlow::SSO),
UserFlow::SPTFlow(SPTFlow::TOTP),
UserFlow::SPTFlow(SPTFlow::AcceptInvitationFromEmail),
UserFlow::SPTFlow(SPTFlow::ForceSetPassword),
UserFlow::JWTFlow(JWTFlow::UserInfo),
];
const RESET_PASSWORD_FLOW: [UserFlow; 2] = [
UserFlow::SPTFlow(SPTFlow::TOTP),
UserFlow::SPTFlow(SPTFlow::ResetPassword),
];
pub struct CurrentFlow {
origin: Origin,
current_flow_index: usize,
path: Vec<TokenPurpose>,
tenant_id: Option<id_type::TenantId>,
}
impl CurrentFlow {
pub fn new(
token: auth::UserFromSinglePurposeToken,
current_flow: UserFlow,
) -> UserResult<Self> {
let flows = token.origin.get_flows();
let index = flows
.iter()
.position(|flow| flow == ¤t_flow)
.ok_or(UserErrors::InternalServerError)?;
let mut path = token.path;
path.push(current_flow.into());
Ok(Self {
origin: token.origin,
current_flow_index: index,
path,
tenant_id: token.tenant_id,
})
}
pub async fn next(self, user: UserFromStorage, state: &SessionState) -> UserResult<NextFlow> {
let flows = self.origin.get_flows();
let remaining_flows = flows.iter().skip(self.current_flow_index + 1);
for flow in remaining_flows {
if flow
.is_required(
&user,
&self.path,
state,
self.tenant_id.as_ref().unwrap_or(&state.tenant.tenant_id),
)
.await?
{
return Ok(NextFlow {
origin: self.origin.clone(),
next_flow: *flow,
user,
path: self.path,
tenant_id: self.tenant_id,
});
}
}
Err(UserErrors::InternalServerError.into())
}
}
pub struct NextFlow {
origin: Origin,
next_flow: UserFlow,
user: UserFromStorage,
path: Vec<TokenPurpose>,
tenant_id: Option<id_type::TenantId>,
}
impl NextFlow {
pub async fn from_origin(
origin: Origin,
user: UserFromStorage,
state: &SessionState,
) -> UserResult<Self> {
let flows = origin.get_flows();
let path = vec![];
for flow in flows {
if flow
.is_required(&user, &path, state, &state.tenant.tenant_id)
.await?
{
return Ok(Self {
origin,
next_flow: *flow,
user,
path,
tenant_id: Some(state.tenant.tenant_id.clone()),
});
}
}
Err(UserErrors::InternalServerError.into())
}
pub fn get_flow(&self) -> UserFlow {
self.next_flow
}
pub async fn get_token(&self, state: &SessionState) -> UserResult<Secret<String>> {
match self.next_flow {
UserFlow::SPTFlow(spt_flow) => spt_flow.generate_spt(state, self).await,
UserFlow::JWTFlow(jwt_flow) => {
#[cfg(feature = "email")]
{
self.user.get_verification_days_left(state)?;
}
let user_role = state
.global_store
.list_user_roles_by_user_id(ListUserRolesByUserIdPayload {
user_id: self.user.get_user_id(),
tenant_id: self.tenant_id.as_ref().unwrap_or(&state.tenant.tenant_id),
org_id: None,
merchant_id: None,
profile_id: None,
entity_id: None,
version: None,
status: Some(UserStatus::Active),
limit: Some(1),
})
.await
.change_context(UserErrors::InternalServerError)?
.pop()
.ok_or(UserErrors::InternalServerError)?;
utils::user_role::set_role_info_in_cache_by_user_role(state, &user_role).await;
jwt_flow.generate_jwt(state, self, &user_role).await
}
}
}
pub async fn get_token_with_user_role(
&self,
state: &SessionState,
user_role: &UserRole,
) -> UserResult<Secret<String>> {
match self.next_flow {
UserFlow::SPTFlow(spt_flow) => spt_flow.generate_spt(state, self).await,
UserFlow::JWTFlow(jwt_flow) => {
#[cfg(feature = "email")]
{
self.user.get_verification_days_left(state)?;
}
utils::user_role::set_role_info_in_cache_by_user_role(state, user_role).await;
jwt_flow.generate_jwt(state, self, user_role).await
}
}
}
pub async fn skip(self, user: UserFromStorage, state: &SessionState) -> UserResult<Self> {
let flows = self.origin.get_flows();
let index = flows
.iter()
.position(|flow| flow == &self.get_flow())
.ok_or(UserErrors::InternalServerError)?;
let remaining_flows = flows.iter().skip(index + 1);
for flow in remaining_flows {
if flow
.is_required(&user, &self.path, state, &state.tenant.tenant_id)
.await?
{
return Ok(Self {
origin: self.origin.clone(),
next_flow: *flow,
user,
path: self.path,
tenant_id: Some(state.tenant.tenant_id.clone()),
});
}
}
Err(UserErrors::InternalServerError.into())
}
}
impl From<UserFlow> for TokenPurpose {
fn from(value: UserFlow) -> Self {
match value {
UserFlow::SPTFlow(flow) => flow.into(),
UserFlow::JWTFlow(flow) => flow.into(),
}
}
}
impl From<SPTFlow> for TokenPurpose {
fn from(value: SPTFlow) -> Self {
match value {
SPTFlow::AuthSelect => Self::AuthSelect,
SPTFlow::SSO => Self::SSO,
SPTFlow::TOTP => Self::TOTP,
SPTFlow::VerifyEmail => Self::VerifyEmail,
SPTFlow::AcceptInvitationFromEmail => Self::AcceptInvitationFromEmail,
SPTFlow::MerchantSelect => Self::AcceptInvite,
SPTFlow::ResetPassword => Self::ResetPassword,
SPTFlow::ForceSetPassword => Self::ForceSetPassword,
}
}
}
impl From<JWTFlow> for TokenPurpose {
fn from(value: JWTFlow) -> Self {
match value {
JWTFlow::UserInfo => Self::UserInfo,
}
}
}
impl From<SPTFlow> for UserFlow {
fn from(value: SPTFlow) -> Self {
Self::SPTFlow(value)
}
}
impl From<JWTFlow> for UserFlow {
fn from(value: JWTFlow) -> Self {
Self::JWTFlow(value)
}
}
// File: crates/router/src/types/domain/user/user_authentication_method.rs
use std::sync::LazyLock;
use common_enums::{Owner, UserAuthType};
use diesel_models::UserAuthenticationMethod;
pub static DEFAULT_USER_AUTH_METHOD: LazyLock<UserAuthenticationMethod> =
LazyLock::new(|| UserAuthenticationMethod {
id: String::from("hyperswitch_default"),
auth_id: String::from("hyperswitch"),
owner_id: String::from("hyperswitch"),
owner_type: Owner::Tenant,
auth_type: UserAuthType::Password,
private_config: None,
public_config: None,
allow_signup: true,
created_at: common_utils::date_time::now(),
last_modified_at: common_utils::date_time::now(),
email_domain: String::from("hyperswitch"),
});
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/types/domain/user/dashboard_metadata.rs",
"crates/router/src/types/domain/user/decision_manager.rs",
"crates/router/src/types/domain/user/user_authentication_method.rs"
],
"module": "crates/router/src/types/domain/user",
"num_files": 3,
"token_count": 4468
}
|
module_8039225252050957342
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/fraud_check/operation
Files: 2
</path>
<module>
// File: crates/router/src/core/fraud_check/operation/fraud_check_pre.rs
use async_trait::async_trait;
use common_enums::FrmSuggestion;
use common_utils::ext_traits::Encode;
use diesel_models::enums::FraudCheckLastStep;
use router_env::{instrument, tracing};
use uuid::Uuid;
use super::{Domain, FraudCheckOperation, GetTracker, UpdateTracker};
use crate::{
core::{
errors::RouterResult,
fraud_check::{
self as frm_core,
types::{FrmData, PaymentDetails, PaymentToFrmData},
ConnectorDetailsCore,
},
payments,
},
errors,
routes::app::ReqState,
types::{
api::fraud_check as frm_api,
domain,
fraud_check::{
FraudCheckCheckoutData, FraudCheckResponseData, FraudCheckTransactionData, FrmRequest,
FrmResponse, FrmRouterData,
},
storage::{
enums::{FraudCheckStatus, FraudCheckType},
fraud_check::{FraudCheckNew, FraudCheckUpdate},
},
ResponseId,
},
SessionState,
};
#[derive(Debug, Clone, Copy)]
pub struct FraudCheckPre;
impl<F, D> FraudCheckOperation<F, D> for &FraudCheckPre
where
F: Clone + Send,
D: payments::OperationSessionGetters<F> + Send + Sync + Clone,
{
fn to_get_tracker(&self) -> RouterResult<&(dyn GetTracker<PaymentToFrmData> + Send + Sync)> {
Ok(*self)
}
fn to_domain(&self) -> RouterResult<&dyn Domain<F, D>> {
Ok(*self)
}
fn to_update_tracker(&self) -> RouterResult<&(dyn UpdateTracker<FrmData, F, D> + Send + Sync)> {
Ok(*self)
}
}
impl<F, D> FraudCheckOperation<F, D> for FraudCheckPre
where
F: Clone + Send,
D: payments::OperationSessionGetters<F> + Send + Sync + Clone,
{
fn to_get_tracker(&self) -> RouterResult<&(dyn GetTracker<PaymentToFrmData> + Send + Sync)> {
Ok(self)
}
fn to_domain(&self) -> RouterResult<&dyn Domain<F, D>> {
Ok(self)
}
fn to_update_tracker(&self) -> RouterResult<&(dyn UpdateTracker<FrmData, F, D> + Send + Sync)> {
Ok(self)
}
}
#[async_trait]
impl GetTracker<PaymentToFrmData> for FraudCheckPre {
#[cfg(feature = "v2")]
#[instrument(skip_all)]
async fn get_trackers<'a>(
&'a self,
state: &'a SessionState,
payment_data: PaymentToFrmData,
frm_connector_details: ConnectorDetailsCore,
) -> RouterResult<Option<FrmData>> {
todo!()
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
async fn get_trackers<'a>(
&'a self,
state: &'a SessionState,
payment_data: PaymentToFrmData,
frm_connector_details: ConnectorDetailsCore,
) -> RouterResult<Option<FrmData>> {
let db = &*state.store;
let payment_details: Option<serde_json::Value> = PaymentDetails::from(payment_data.clone())
.encode_to_value()
.ok();
let existing_fraud_check = db
.find_fraud_check_by_payment_id_if_present(
payment_data.payment_intent.get_id().to_owned(),
payment_data.merchant_account.get_id().clone(),
)
.await
.ok();
let fraud_check = match existing_fraud_check {
Some(Some(fraud_check)) => Ok(fraud_check),
_ => {
db.insert_fraud_check_response(FraudCheckNew {
frm_id: Uuid::new_v4().simple().to_string(),
payment_id: payment_data.payment_intent.get_id().to_owned(),
merchant_id: payment_data.merchant_account.get_id().clone(),
attempt_id: payment_data.payment_attempt.attempt_id.clone(),
created_at: common_utils::date_time::now(),
frm_name: frm_connector_details.connector_name,
frm_transaction_id: None,
frm_transaction_type: FraudCheckType::PreFrm,
frm_status: FraudCheckStatus::Pending,
frm_score: None,
frm_reason: None,
frm_error: None,
payment_details,
metadata: None,
modified_at: common_utils::date_time::now(),
last_step: FraudCheckLastStep::Processing,
payment_capture_method: payment_data.payment_attempt.capture_method,
})
.await
}
};
match fraud_check {
Ok(fraud_check_value) => {
let frm_data = FrmData {
payment_intent: payment_data.payment_intent,
payment_attempt: payment_data.payment_attempt,
merchant_account: payment_data.merchant_account,
address: payment_data.address,
fraud_check: fraud_check_value,
connector_details: payment_data.connector_details,
order_details: payment_data.order_details,
refund: None,
frm_metadata: payment_data.frm_metadata,
};
Ok(Some(frm_data))
}
Err(error) => {
router_env::logger::error!("inserting into fraud_check table failed {error:?}");
Ok(None)
}
}
}
}
#[async_trait]
impl<F, D> Domain<F, D> for FraudCheckPre
where
F: Clone + Send,
D: payments::OperationSessionGetters<F> + Send + Sync + Clone,
{
#[cfg(feature = "v2")]
#[instrument(skip_all)]
async fn post_payment_frm<'a>(
&'a self,
_state: &'a SessionState,
_req_state: ReqState,
_payment_data: &mut D,
_frm_data: &mut FrmData,
_merchant_context: &domain::MerchantContext,
_customer: &Option<domain::Customer>,
) -> RouterResult<Option<FrmRouterData>> {
todo!()
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
async fn post_payment_frm<'a>(
&'a self,
state: &'a SessionState,
_req_state: ReqState,
payment_data: &mut D,
frm_data: &mut FrmData,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
) -> RouterResult<Option<FrmRouterData>> {
let router_data = frm_core::call_frm_service::<F, frm_api::Transaction, _, D>(
state,
payment_data,
&mut frm_data.to_owned(),
merchant_context,
customer,
)
.await?;
frm_data.fraud_check.last_step = FraudCheckLastStep::TransactionOrRecordRefund;
Ok(Some(FrmRouterData {
merchant_id: router_data.merchant_id,
connector: router_data.connector,
payment_id: router_data.payment_id.clone(),
attempt_id: router_data.attempt_id,
request: FrmRequest::Transaction(FraudCheckTransactionData {
amount: router_data.request.amount,
order_details: router_data.request.order_details,
currency: router_data.request.currency,
payment_method: Some(router_data.payment_method),
error_code: router_data.request.error_code,
error_message: router_data.request.error_message,
connector_transaction_id: router_data.request.connector_transaction_id,
connector: router_data.request.connector,
}),
response: FrmResponse::Transaction(router_data.response),
}))
}
async fn pre_payment_frm<'a>(
&'a self,
state: &'a SessionState,
payment_data: &mut D,
frm_data: &mut FrmData,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
) -> RouterResult<FrmRouterData> {
let router_data = frm_core::call_frm_service::<F, frm_api::Checkout, _, D>(
state,
payment_data,
&mut frm_data.to_owned(),
merchant_context,
customer,
)
.await?;
frm_data.fraud_check.last_step = FraudCheckLastStep::CheckoutOrSale;
Ok(FrmRouterData {
merchant_id: router_data.merchant_id,
connector: router_data.connector,
payment_id: router_data.payment_id.clone(),
attempt_id: router_data.attempt_id,
request: FrmRequest::Checkout(Box::new(FraudCheckCheckoutData {
amount: router_data.request.amount,
order_details: router_data.request.order_details,
currency: router_data.request.currency,
browser_info: router_data.request.browser_info,
payment_method_data: router_data.request.payment_method_data,
email: router_data.request.email,
gateway: router_data.request.gateway,
})),
response: FrmResponse::Checkout(router_data.response),
})
}
}
#[async_trait]
impl<F, D> UpdateTracker<FrmData, F, D> for FraudCheckPre
where
F: Clone + Send,
D: payments::OperationSessionGetters<F> + Send + Sync + Clone,
{
async fn update_tracker<'b>(
&'b self,
state: &SessionState,
_key_store: &domain::MerchantKeyStore,
mut frm_data: FrmData,
payment_data: &mut D,
_frm_suggestion: Option<FrmSuggestion>,
frm_router_data: FrmRouterData,
) -> RouterResult<FrmData> {
let frm_check_update = match frm_router_data.response {
FrmResponse::Checkout(response) => match response {
Err(err) => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(err.message)),
}),
Ok(payments_response) => match payments_response {
FraudCheckResponseData::TransactionResponse {
resource_id,
connector_metadata,
status,
reason,
score,
} => {
let connector_transaction_id = match resource_id {
ResponseId::NoResponseId => None,
ResponseId::ConnectorTransactionId(id) => Some(id),
ResponseId::EncodedData(id) => Some(id),
};
let fraud_check_update = FraudCheckUpdate::ResponseUpdate {
frm_status: status,
frm_transaction_id: connector_transaction_id,
frm_reason: reason,
frm_score: score,
metadata: connector_metadata,
modified_at: common_utils::date_time::now(),
last_step: frm_data.fraud_check.last_step,
payment_capture_method: frm_data.fraud_check.payment_capture_method,
};
Some(fraud_check_update)
}
FraudCheckResponseData::FulfillmentResponse {
order_id: _,
shipment_ids: _,
} => None,
FraudCheckResponseData::RecordReturnResponse {
resource_id: _,
connector_metadata: _,
return_id: _,
} => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(
"Error: Got Record Return Response response in current Checkout flow"
.to_string(),
)),
}),
},
},
FrmResponse::Transaction(response) => match response {
Err(err) => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(err.message)),
}),
Ok(payments_response) => match payments_response {
FraudCheckResponseData::TransactionResponse {
resource_id,
connector_metadata,
status,
reason,
score,
} => {
let connector_transaction_id = match resource_id {
ResponseId::NoResponseId => None,
ResponseId::ConnectorTransactionId(id) => Some(id),
ResponseId::EncodedData(id) => Some(id),
};
let frm_status = payment_data
.get_frm_message()
.as_ref()
.map_or(status, |frm_data| frm_data.frm_status);
let fraud_check_update = FraudCheckUpdate::ResponseUpdate {
frm_status,
frm_transaction_id: connector_transaction_id,
frm_reason: reason,
frm_score: score,
metadata: connector_metadata,
modified_at: common_utils::date_time::now(),
last_step: frm_data.fraud_check.last_step,
payment_capture_method: None,
};
Some(fraud_check_update)
}
FraudCheckResponseData::FulfillmentResponse {
order_id: _,
shipment_ids: _,
} => None,
FraudCheckResponseData::RecordReturnResponse {
resource_id: _,
connector_metadata: _,
return_id: _,
} => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(
"Error: Got Record Return Response response in current Checkout flow"
.to_string(),
)),
}),
},
},
FrmResponse::Sale(_response)
| FrmResponse::Fulfillment(_response)
| FrmResponse::RecordReturn(_response) => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(
"Error: Got Pre(Sale) flow response in current post flow".to_string(),
)),
}),
};
let db = &*state.store;
frm_data.fraud_check = match frm_check_update {
Some(fraud_check_update) => db
.update_fraud_check_response_with_attempt_id(
frm_data.clone().fraud_check,
fraud_check_update,
)
.await
.map_err(|error| error.change_context(errors::ApiErrorResponse::PaymentNotFound))?,
None => frm_data.clone().fraud_check,
};
Ok(frm_data)
}
}
// File: crates/router/src/core/fraud_check/operation/fraud_check_post.rs
use async_trait::async_trait;
use common_enums::{CaptureMethod, FrmSuggestion};
use common_utils::ext_traits::Encode;
use hyperswitch_domain_models::payments::{
payment_attempt::PaymentAttemptUpdate, payment_intent::PaymentIntentUpdate, HeaderPayload,
};
use router_env::{instrument, logger, tracing};
use super::{Domain, FraudCheckOperation, GetTracker, UpdateTracker};
use crate::{
consts,
core::{
errors::{RouterResult, StorageErrorExt},
fraud_check::{
self as frm_core,
types::{FrmData, PaymentDetails, PaymentToFrmData, CANCEL_INITIATED},
ConnectorDetailsCore, FrmConfigsObject,
},
payments,
},
errors,
routes::app::ReqState,
services::{self, api},
types::{
api::{
enums::{AttemptStatus, IntentStatus},
fraud_check as frm_api, payments as payment_types, Capture, Void,
},
domain,
fraud_check::{
FraudCheckResponseData, FraudCheckSaleData, FrmRequest, FrmResponse, FrmRouterData,
},
storage::{
enums::{FraudCheckLastStep, FraudCheckStatus, FraudCheckType, MerchantDecision},
fraud_check::{FraudCheckNew, FraudCheckUpdate},
},
ResponseId,
},
utils, SessionState,
};
#[derive(Debug, Clone, Copy)]
pub struct FraudCheckPost;
impl<F, D> FraudCheckOperation<F, D> for &FraudCheckPost
where
F: Clone + Send,
D: payments::OperationSessionGetters<F>
+ payments::OperationSessionSetters<F>
+ Send
+ Sync
+ Clone,
{
fn to_get_tracker(&self) -> RouterResult<&(dyn GetTracker<PaymentToFrmData> + Send + Sync)> {
Ok(*self)
}
fn to_domain(&self) -> RouterResult<&dyn Domain<F, D>> {
Ok(*self)
}
fn to_update_tracker(&self) -> RouterResult<&(dyn UpdateTracker<FrmData, F, D> + Send + Sync)> {
Ok(*self)
}
}
impl<F, D> FraudCheckOperation<F, D> for FraudCheckPost
where
F: Clone + Send,
D: payments::OperationSessionGetters<F>
+ payments::OperationSessionSetters<F>
+ Send
+ Sync
+ Clone,
{
fn to_get_tracker(&self) -> RouterResult<&(dyn GetTracker<PaymentToFrmData> + Send + Sync)> {
Ok(self)
}
fn to_domain(&self) -> RouterResult<&dyn Domain<F, D>> {
Ok(self)
}
fn to_update_tracker(&self) -> RouterResult<&(dyn UpdateTracker<FrmData, F, D> + Send + Sync)> {
Ok(self)
}
}
#[async_trait]
impl GetTracker<PaymentToFrmData> for FraudCheckPost {
#[cfg(feature = "v2")]
async fn get_trackers<'a>(
&'a self,
state: &'a SessionState,
payment_data: PaymentToFrmData,
frm_connector_details: ConnectorDetailsCore,
) -> RouterResult<Option<FrmData>> {
todo!()
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
async fn get_trackers<'a>(
&'a self,
state: &'a SessionState,
payment_data: PaymentToFrmData,
frm_connector_details: ConnectorDetailsCore,
) -> RouterResult<Option<FrmData>> {
let db = &*state.store;
let payment_details: Option<serde_json::Value> = PaymentDetails::from(payment_data.clone())
.encode_to_value()
.ok();
let existing_fraud_check = db
.find_fraud_check_by_payment_id_if_present(
payment_data.payment_intent.get_id().to_owned(),
payment_data.merchant_account.get_id().clone(),
)
.await
.ok();
let fraud_check = match existing_fraud_check {
Some(Some(fraud_check)) => Ok(fraud_check),
_ => {
db.insert_fraud_check_response(FraudCheckNew {
frm_id: utils::generate_id(consts::ID_LENGTH, "frm"),
payment_id: payment_data.payment_intent.get_id().to_owned(),
merchant_id: payment_data.merchant_account.get_id().clone(),
attempt_id: payment_data.payment_attempt.attempt_id.clone(),
created_at: common_utils::date_time::now(),
frm_name: frm_connector_details.connector_name,
frm_transaction_id: None,
frm_transaction_type: FraudCheckType::PostFrm,
frm_status: FraudCheckStatus::Pending,
frm_score: None,
frm_reason: None,
frm_error: None,
payment_details,
metadata: None,
modified_at: common_utils::date_time::now(),
last_step: FraudCheckLastStep::Processing,
payment_capture_method: payment_data.payment_attempt.capture_method,
})
.await
}
};
match fraud_check {
Ok(fraud_check_value) => {
let frm_data = FrmData {
payment_intent: payment_data.payment_intent,
payment_attempt: payment_data.payment_attempt,
merchant_account: payment_data.merchant_account,
address: payment_data.address,
fraud_check: fraud_check_value,
connector_details: payment_data.connector_details,
order_details: payment_data.order_details,
refund: None,
frm_metadata: payment_data.frm_metadata,
};
Ok(Some(frm_data))
}
Err(error) => {
router_env::logger::error!("inserting into fraud_check table failed {error:?}");
Ok(None)
}
}
}
}
#[async_trait]
impl<F, D> Domain<F, D> for FraudCheckPost
where
F: Clone + Send,
D: payments::OperationSessionGetters<F>
+ payments::OperationSessionSetters<F>
+ Send
+ Sync
+ Clone,
{
#[instrument(skip_all)]
async fn post_payment_frm<'a>(
&'a self,
state: &'a SessionState,
_req_state: ReqState,
payment_data: &mut D,
frm_data: &mut FrmData,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
) -> RouterResult<Option<FrmRouterData>> {
if frm_data.fraud_check.last_step != FraudCheckLastStep::Processing {
logger::debug!("post_flow::Sale Skipped");
return Ok(None);
}
let router_data = frm_core::call_frm_service::<F, frm_api::Sale, _, D>(
state,
payment_data,
&mut frm_data.to_owned(),
merchant_context,
customer,
)
.await?;
frm_data.fraud_check.last_step = FraudCheckLastStep::CheckoutOrSale;
Ok(Some(FrmRouterData {
merchant_id: router_data.merchant_id,
connector: router_data.connector,
payment_id: router_data.payment_id.clone(),
attempt_id: router_data.attempt_id,
request: FrmRequest::Sale(FraudCheckSaleData {
amount: router_data.request.amount,
order_details: router_data.request.order_details,
currency: router_data.request.currency,
email: router_data.request.email,
}),
response: FrmResponse::Sale(router_data.response),
}))
}
#[cfg(feature = "v2")]
#[instrument(skip_all)]
async fn execute_post_tasks(
&self,
_state: &SessionState,
_req_state: ReqState,
_frm_data: &mut FrmData,
_merchant_context: &domain::MerchantContext,
_frm_configs: FrmConfigsObject,
_frm_suggestion: &mut Option<FrmSuggestion>,
_payment_data: &mut D,
_customer: &Option<domain::Customer>,
_should_continue_capture: &mut bool,
) -> RouterResult<Option<FrmData>> {
todo!()
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
async fn execute_post_tasks(
&self,
state: &SessionState,
req_state: ReqState,
frm_data: &mut FrmData,
merchant_context: &domain::MerchantContext,
_frm_configs: FrmConfigsObject,
frm_suggestion: &mut Option<FrmSuggestion>,
payment_data: &mut D,
customer: &Option<domain::Customer>,
_should_continue_capture: &mut bool,
) -> RouterResult<Option<FrmData>> {
if matches!(frm_data.fraud_check.frm_status, FraudCheckStatus::Fraud)
&& matches!(
frm_data.fraud_check.last_step,
FraudCheckLastStep::CheckoutOrSale
)
{
*frm_suggestion = Some(FrmSuggestion::FrmCancelTransaction);
let cancel_req = api_models::payments::PaymentsCancelRequest {
payment_id: frm_data.payment_intent.get_id().to_owned(),
cancellation_reason: frm_data.fraud_check.frm_error.clone(),
merchant_connector_details: None,
};
let cancel_res = Box::pin(payments::payments_core::<
Void,
payment_types::PaymentsResponse,
_,
_,
_,
payments::PaymentData<Void>,
>(
state.clone(),
req_state.clone(),
merchant_context.clone(),
None,
payments::PaymentCancel,
cancel_req,
api::AuthFlow::Merchant,
payments::CallConnectorAction::Trigger,
None,
HeaderPayload::default(),
))
.await?;
logger::debug!("payment_id : {:?} has been cancelled since it has been found fraudulent by configured frm connector",payment_data.get_payment_attempt().payment_id);
if let services::ApplicationResponse::JsonWithHeaders((payments_response, _)) =
cancel_res
{
payment_data.set_payment_intent_status(payments_response.status);
}
let _router_data = frm_core::call_frm_service::<F, frm_api::RecordReturn, _, D>(
state,
payment_data,
&mut frm_data.to_owned(),
merchant_context,
customer,
)
.await?;
frm_data.fraud_check.last_step = FraudCheckLastStep::TransactionOrRecordRefund;
} else if matches!(
frm_data.fraud_check.frm_status,
FraudCheckStatus::ManualReview
) {
*frm_suggestion = Some(FrmSuggestion::FrmManualReview);
} else if matches!(frm_data.fraud_check.frm_status, FraudCheckStatus::Legit)
&& matches!(
frm_data.fraud_check.payment_capture_method,
Some(CaptureMethod::Automatic) | Some(CaptureMethod::SequentialAutomatic)
)
{
let capture_request = api_models::payments::PaymentsCaptureRequest {
payment_id: frm_data.payment_intent.get_id().to_owned(),
merchant_id: None,
amount_to_capture: None,
refund_uncaptured_amount: None,
statement_descriptor_suffix: None,
statement_descriptor_prefix: None,
merchant_connector_details: None,
};
let capture_response = Box::pin(payments::payments_core::<
Capture,
payment_types::PaymentsResponse,
_,
_,
_,
payments::PaymentData<Capture>,
>(
state.clone(),
req_state.clone(),
merchant_context.clone(),
None,
payments::PaymentCapture,
capture_request,
api::AuthFlow::Merchant,
payments::CallConnectorAction::Trigger,
None,
HeaderPayload::default(),
))
.await?;
logger::debug!("payment_id : {:?} has been captured since it has been found legit by configured frm connector",payment_data.get_payment_attempt().payment_id);
if let services::ApplicationResponse::JsonWithHeaders((payments_response, _)) =
capture_response
{
payment_data.set_payment_intent_status(payments_response.status);
}
};
return Ok(Some(frm_data.to_owned()));
}
#[instrument(skip_all)]
async fn pre_payment_frm<'a>(
&'a self,
state: &'a SessionState,
payment_data: &mut D,
frm_data: &mut FrmData,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
) -> RouterResult<FrmRouterData> {
let router_data = frm_core::call_frm_service::<F, frm_api::Sale, _, D>(
state,
payment_data,
&mut frm_data.to_owned(),
merchant_context,
customer,
)
.await?;
Ok(FrmRouterData {
merchant_id: router_data.merchant_id,
connector: router_data.connector,
payment_id: router_data.payment_id,
attempt_id: router_data.attempt_id,
request: FrmRequest::Sale(FraudCheckSaleData {
amount: router_data.request.amount,
order_details: router_data.request.order_details,
currency: router_data.request.currency,
email: router_data.request.email,
}),
response: FrmResponse::Sale(router_data.response),
})
}
}
#[async_trait]
impl<F, D> UpdateTracker<FrmData, F, D> for FraudCheckPost
where
F: Clone + Send,
D: payments::OperationSessionGetters<F>
+ payments::OperationSessionSetters<F>
+ Send
+ Sync
+ Clone,
{
#[cfg(feature = "v2")]
async fn update_tracker<'b>(
&'b self,
state: &SessionState,
key_store: &domain::MerchantKeyStore,
mut frm_data: FrmData,
payment_data: &mut D,
frm_suggestion: Option<FrmSuggestion>,
frm_router_data: FrmRouterData,
) -> RouterResult<FrmData> {
todo!()
}
#[cfg(feature = "v1")]
async fn update_tracker<'b>(
&'b self,
state: &SessionState,
key_store: &domain::MerchantKeyStore,
mut frm_data: FrmData,
payment_data: &mut D,
frm_suggestion: Option<FrmSuggestion>,
frm_router_data: FrmRouterData,
) -> RouterResult<FrmData> {
let db = &*state.store;
let key_manager_state = &state.into();
let frm_check_update = match frm_router_data.response {
FrmResponse::Sale(response) => match response {
Err(err) => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(err.message)),
}),
Ok(payments_response) => match payments_response {
FraudCheckResponseData::TransactionResponse {
resource_id,
connector_metadata,
status,
reason,
score,
} => {
let connector_transaction_id = match resource_id {
ResponseId::NoResponseId => None,
ResponseId::ConnectorTransactionId(id) => Some(id),
ResponseId::EncodedData(id) => Some(id),
};
let fraud_check_update = FraudCheckUpdate::ResponseUpdate {
frm_status: status,
frm_transaction_id: connector_transaction_id,
frm_reason: reason,
frm_score: score,
metadata: connector_metadata,
modified_at: common_utils::date_time::now(),
last_step: frm_data.fraud_check.last_step,
payment_capture_method: frm_data.fraud_check.payment_capture_method,
};
Some(fraud_check_update)
},
FraudCheckResponseData::RecordReturnResponse { resource_id: _, connector_metadata: _, return_id: _ } => {
Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(
"Error: Got Record Return Response response in current Sale flow".to_string(),
)),
})
}
FraudCheckResponseData::FulfillmentResponse {
order_id: _,
shipment_ids: _,
} => None,
},
},
FrmResponse::Fulfillment(response) => match response {
Err(err) => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(err.message)),
}),
Ok(payments_response) => match payments_response {
FraudCheckResponseData::TransactionResponse {
resource_id,
connector_metadata,
status,
reason,
score,
} => {
let connector_transaction_id = match resource_id {
ResponseId::NoResponseId => None,
ResponseId::ConnectorTransactionId(id) => Some(id),
ResponseId::EncodedData(id) => Some(id),
};
let fraud_check_update = FraudCheckUpdate::ResponseUpdate {
frm_status: status,
frm_transaction_id: connector_transaction_id,
frm_reason: reason,
frm_score: score,
metadata: connector_metadata,
modified_at: common_utils::date_time::now(),
last_step: frm_data.fraud_check.last_step,
payment_capture_method: frm_data.fraud_check.payment_capture_method,
};
Some(fraud_check_update)
}
FraudCheckResponseData::FulfillmentResponse {
order_id: _,
shipment_ids: _,
} => None,
FraudCheckResponseData::RecordReturnResponse { resource_id: _, connector_metadata: _, return_id: _ } => None,
},
},
FrmResponse::RecordReturn(response) => match response {
Err(err) => Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(err.message)),
}),
Ok(payments_response) => match payments_response {
FraudCheckResponseData::TransactionResponse {
resource_id: _,
connector_metadata: _,
status: _,
reason: _,
score: _,
} => {
Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(
"Error: Got Transaction Response response in current Record Return flow".to_string(),
)),
})
},
FraudCheckResponseData::FulfillmentResponse {order_id: _, shipment_ids: _ } => {
None
},
FraudCheckResponseData::RecordReturnResponse { resource_id, connector_metadata, return_id: _ } => {
let connector_transaction_id = match resource_id {
ResponseId::NoResponseId => None,
ResponseId::ConnectorTransactionId(id) => Some(id),
ResponseId::EncodedData(id) => Some(id),
};
let fraud_check_update = FraudCheckUpdate::ResponseUpdate {
frm_status: frm_data.fraud_check.frm_status,
frm_transaction_id: connector_transaction_id,
frm_reason: frm_data.fraud_check.frm_reason.clone(),
frm_score: frm_data.fraud_check.frm_score,
metadata: connector_metadata,
modified_at: common_utils::date_time::now(),
last_step: frm_data.fraud_check.last_step,
payment_capture_method: frm_data.fraud_check.payment_capture_method,
};
Some(fraud_check_update)
}
},
},
FrmResponse::Checkout(_) | FrmResponse::Transaction(_) => {
Some(FraudCheckUpdate::ErrorUpdate {
status: FraudCheckStatus::TransactionFailure,
error_message: Some(Some(
"Error: Got Pre(Sale) flow response in current post flow".to_string(),
)),
})
}
};
if let Some(frm_suggestion) = frm_suggestion {
let (payment_attempt_status, payment_intent_status, merchant_decision, error_message) =
match frm_suggestion {
FrmSuggestion::FrmCancelTransaction => (
AttemptStatus::Failure,
IntentStatus::Failed,
Some(MerchantDecision::Rejected.to_string()),
Some(Some(CANCEL_INITIATED.to_string())),
),
FrmSuggestion::FrmManualReview => (
AttemptStatus::Unresolved,
IntentStatus::RequiresMerchantAction,
None,
None,
),
FrmSuggestion::FrmAuthorizeTransaction => (
AttemptStatus::Authorized,
IntentStatus::RequiresCapture,
None,
None,
),
};
let payment_attempt_update = PaymentAttemptUpdate::RejectUpdate {
status: payment_attempt_status,
error_code: Some(Some(frm_data.fraud_check.frm_status.to_string())),
error_message,
updated_by: frm_data.merchant_account.storage_scheme.to_string(),
};
#[cfg(feature = "v1")]
let payment_attempt = db
.update_payment_attempt_with_attempt_id(
payment_data.get_payment_attempt().clone(),
payment_attempt_update,
frm_data.merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)?;
#[cfg(feature = "v2")]
let payment_attempt = db
.update_payment_attempt_with_attempt_id(
key_manager_state,
key_store,
payment_data.get_payment_attempt().clone(),
payment_attempt_update,
frm_data.merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)?;
payment_data.set_payment_attempt(payment_attempt);
let payment_intent = db
.update_payment_intent(
key_manager_state,
payment_data.get_payment_intent().clone(),
PaymentIntentUpdate::RejectUpdate {
status: payment_intent_status,
merchant_decision,
updated_by: frm_data.merchant_account.storage_scheme.to_string(),
},
key_store,
frm_data.merchant_account.storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)?;
payment_data.set_payment_intent(payment_intent);
}
frm_data.fraud_check = match frm_check_update {
Some(fraud_check_update) => db
.update_fraud_check_response_with_attempt_id(
frm_data.fraud_check.clone(),
fraud_check_update,
)
.await
.map_err(|error| error.change_context(errors::ApiErrorResponse::PaymentNotFound))?,
None => frm_data.fraud_check.clone(),
};
Ok(frm_data)
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/fraud_check/operation/fraud_check_pre.rs",
"crates/router/src/core/fraud_check/operation/fraud_check_post.rs"
],
"module": "crates/router/src/core/fraud_check/operation",
"num_files": 2,
"token_count": 7607
}
|
module_4200152465959617123
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/fraud_check/flows
Files: 5
</path>
<module>
// File: crates/router/src/core/fraud_check/flows/checkout_flow.rs
use async_trait::async_trait;
use common_utils::{ext_traits::ValueExt, pii::Email};
use error_stack::ResultExt;
use masking::ExposeInterface;
use super::{ConstructFlowSpecificData, FeatureFrm};
use crate::{
core::{
errors::{ConnectorErrorExt, RouterResult},
fraud_check::types::FrmData,
payments::{self, helpers},
},
errors, services,
types::{
api::fraud_check::{self as frm_api, FraudCheckConnectorData},
domain,
fraud_check::{FraudCheckCheckoutData, FraudCheckResponseData, FrmCheckoutRouterData},
storage::enums as storage_enums,
BrowserInformation, ConnectorAuthType, MerchantRecipientData, ResponseId, RouterData,
},
SessionState,
};
#[async_trait]
impl ConstructFlowSpecificData<frm_api::Checkout, FraudCheckCheckoutData, FraudCheckResponseData>
for FrmData
{
#[cfg(feature = "v2")]
async fn construct_router_data<'a>(
&self,
_state: &SessionState,
_connector_id: &str,
_merchant_context: &domain::MerchantContext,
_customer: &Option<domain::Customer>,
_merchant_connector_account: &domain::MerchantConnectorAccountTypeDetails,
_merchant_recipient_data: Option<MerchantRecipientData>,
_header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
) -> RouterResult<RouterData<frm_api::Checkout, FraudCheckCheckoutData, FraudCheckResponseData>>
{
todo!()
}
#[cfg(feature = "v1")]
async fn construct_router_data<'a>(
&self,
state: &SessionState,
connector_id: &str,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
merchant_connector_account: &helpers::MerchantConnectorAccountType,
_merchant_recipient_data: Option<MerchantRecipientData>,
header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
_payment_method: Option<common_enums::PaymentMethod>,
_payment_method_type: Option<common_enums::PaymentMethodType>,
) -> RouterResult<RouterData<frm_api::Checkout, FraudCheckCheckoutData, FraudCheckResponseData>>
{
use crate::connector::utils::PaymentsAttemptData;
let status = storage_enums::AttemptStatus::Pending;
let auth_type: ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::ApiErrorResponse::MerchantConnectorAccountNotFound {
id: "ConnectorAuthType".to_string(),
})?;
let browser_info: Option<BrowserInformation> = self.payment_attempt.get_browser_info().ok();
let customer_id = customer.to_owned().map(|customer| customer.customer_id);
let router_data = RouterData {
flow: std::marker::PhantomData,
merchant_id: merchant_context.get_merchant_account().get_id().clone(),
customer_id,
tenant_id: state.tenant.tenant_id.clone(),
connector: connector_id.to_string(),
payment_id: self.payment_intent.payment_id.get_string_repr().to_owned(),
attempt_id: self.payment_attempt.attempt_id.clone(),
status,
payment_method: self
.payment_attempt
.payment_method
.ok_or(errors::ApiErrorResponse::PaymentMethodNotFound)?,
payment_method_type: self.payment_attempt.payment_method_type,
connector_auth_type: auth_type,
description: None,
payment_method_status: None,
address: self.address.clone(),
auth_type: storage_enums::AuthenticationType::NoThreeDs,
connector_meta_data: None,
connector_wallets_details: None,
amount_captured: None,
minor_amount_captured: None,
request: FraudCheckCheckoutData {
amount: self
.payment_attempt
.net_amount
.get_total_amount()
.get_amount_as_i64(),
order_details: self.order_details.clone(),
currency: self.payment_attempt.currency,
browser_info,
payment_method_data: self
.payment_attempt
.payment_method_data
.as_ref()
.map(|pm_data| {
pm_data
.clone()
.parse_value::<api_models::payments::AdditionalPaymentData>(
"AdditionalPaymentData",
)
})
.transpose()
.unwrap_or_default(),
email: customer
.clone()
.and_then(|customer_data| {
customer_data
.email
.map(|email| Email::try_from(email.into_inner().expose()))
})
.transpose()
.change_context(errors::ApiErrorResponse::InvalidDataValue {
field_name: "customer.customer_data.email",
})?,
gateway: self.payment_attempt.connector.clone(),
}, // self.order_details
response: Ok(FraudCheckResponseData::TransactionResponse {
resource_id: ResponseId::ConnectorTransactionId("".to_string()),
connector_metadata: None,
status: storage_enums::FraudCheckStatus::Pending,
score: None,
reason: None,
}),
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
connector_customer: None,
preprocessing_id: None,
connector_request_reference_id: uuid::Uuid::new_v4().to_string(),
test_mode: None,
recurring_mandate_payment_data: None,
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
payment_method_balance: None,
connector_http_status_code: None,
external_latency: None,
connector_api_version: None,
apple_pay_flow: None,
frm_metadata: self.frm_metadata.clone(),
refund_id: None,
dispute_id: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload,
connector_mandate_request_reference_id: None,
authentication_id: None,
psd2_sca_exemption_type: None,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
};
Ok(router_data)
}
}
#[async_trait]
impl FeatureFrm<frm_api::Checkout, FraudCheckCheckoutData> for FrmCheckoutRouterData {
async fn decide_frm_flows<'a>(
mut self,
state: &SessionState,
connector: &FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
merchant_context: &domain::MerchantContext,
) -> RouterResult<Self> {
decide_frm_flow(
&mut self,
state,
connector,
call_connector_action,
merchant_context,
)
.await
}
}
pub async fn decide_frm_flow(
router_data: &mut FrmCheckoutRouterData,
state: &SessionState,
connector: &FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
_merchant_context: &domain::MerchantContext,
) -> RouterResult<FrmCheckoutRouterData> {
let connector_integration: services::BoxedFrmConnectorIntegrationInterface<
frm_api::Checkout,
FraudCheckCheckoutData,
FraudCheckResponseData,
> = connector.connector.get_connector_integration();
let resp = services::execute_connector_processing_step(
state,
connector_integration,
router_data,
call_connector_action,
None,
None,
)
.await
.to_payment_failed_response()?;
Ok(resp)
}
// File: crates/router/src/core/fraud_check/flows/transaction_flow.rs
use async_trait::async_trait;
use common_utils::ext_traits::ValueExt;
use error_stack::ResultExt;
use crate::{
core::{
errors::{ConnectorErrorExt, RouterResult},
fraud_check::{FeatureFrm, FrmData},
payments::{self, flows::ConstructFlowSpecificData, helpers},
},
errors, services,
types::{
api::fraud_check as frm_api,
domain,
fraud_check::{
FraudCheckResponseData, FraudCheckTransactionData, FrmTransactionRouterData,
},
storage::enums as storage_enums,
ConnectorAuthType, MerchantRecipientData, ResponseId, RouterData,
},
SessionState,
};
#[async_trait]
impl
ConstructFlowSpecificData<
frm_api::Transaction,
FraudCheckTransactionData,
FraudCheckResponseData,
> for FrmData
{
#[cfg(feature = "v2")]
async fn construct_router_data<'a>(
&self,
_state: &SessionState,
_connector_id: &str,
_merchant_context: &domain::MerchantContext,
_customer: &Option<domain::Customer>,
_merchant_connector_account: &domain::MerchantConnectorAccountTypeDetails,
_merchant_recipient_data: Option<MerchantRecipientData>,
_header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
) -> RouterResult<
RouterData<frm_api::Transaction, FraudCheckTransactionData, FraudCheckResponseData>,
> {
todo!()
}
#[cfg(feature = "v1")]
async fn construct_router_data<'a>(
&self,
state: &SessionState,
connector_id: &str,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
merchant_connector_account: &helpers::MerchantConnectorAccountType,
_merchant_recipient_data: Option<MerchantRecipientData>,
header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
_payment_method: Option<common_enums::PaymentMethod>,
_payment_method_type: Option<common_enums::PaymentMethodType>,
) -> RouterResult<
RouterData<frm_api::Transaction, FraudCheckTransactionData, FraudCheckResponseData>,
> {
let status = storage_enums::AttemptStatus::Pending;
let auth_type: ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::ApiErrorResponse::MerchantConnectorAccountNotFound {
id: "ConnectorAuthType".to_string(),
})?;
let customer_id = customer.to_owned().map(|customer| customer.customer_id);
let payment_method = self.payment_attempt.payment_method;
let currency = self.payment_attempt.currency;
let router_data = RouterData {
flow: std::marker::PhantomData,
merchant_id: merchant_context.get_merchant_account().get_id().clone(),
tenant_id: state.tenant.tenant_id.clone(),
customer_id,
connector: connector_id.to_string(),
payment_id: self.payment_intent.payment_id.get_string_repr().to_owned(),
attempt_id: self.payment_attempt.attempt_id.clone(),
status,
payment_method: self
.payment_attempt
.payment_method
.ok_or(errors::ApiErrorResponse::PaymentMethodNotFound)?,
payment_method_type: self.payment_attempt.payment_method_type,
connector_auth_type: auth_type,
description: None,
address: self.address.clone(),
auth_type: storage_enums::AuthenticationType::NoThreeDs,
connector_meta_data: None,
connector_wallets_details: None,
amount_captured: None,
minor_amount_captured: None,
request: FraudCheckTransactionData {
amount: self
.payment_attempt
.net_amount
.get_total_amount()
.get_amount_as_i64(),
order_details: self.order_details.clone(),
currency,
payment_method,
error_code: self.payment_attempt.error_code.clone(),
error_message: self.payment_attempt.error_message.clone(),
connector_transaction_id: self
.payment_attempt
.get_connector_payment_id()
.map(ToString::to_string),
connector: self.payment_attempt.connector.clone(),
}, // self.order_details
response: Ok(FraudCheckResponseData::TransactionResponse {
resource_id: ResponseId::ConnectorTransactionId("".to_string()),
connector_metadata: None,
status: storage_enums::FraudCheckStatus::Pending,
score: None,
reason: None,
}),
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
connector_customer: None,
preprocessing_id: None,
connector_request_reference_id: uuid::Uuid::new_v4().to_string(),
test_mode: None,
recurring_mandate_payment_data: None,
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
payment_method_balance: None,
connector_http_status_code: None,
external_latency: None,
connector_api_version: None,
payment_method_status: None,
apple_pay_flow: None,
frm_metadata: self.frm_metadata.clone(),
refund_id: None,
dispute_id: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload,
connector_mandate_request_reference_id: None,
authentication_id: None,
psd2_sca_exemption_type: None,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
};
Ok(router_data)
}
}
#[async_trait]
impl FeatureFrm<frm_api::Transaction, FraudCheckTransactionData> for FrmTransactionRouterData {
async fn decide_frm_flows<'a>(
mut self,
state: &SessionState,
connector: &frm_api::FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
merchant_context: &domain::MerchantContext,
) -> RouterResult<Self> {
decide_frm_flow(
&mut self,
state,
connector,
call_connector_action,
merchant_context,
)
.await
}
}
pub async fn decide_frm_flow(
router_data: &mut FrmTransactionRouterData,
state: &SessionState,
connector: &frm_api::FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
_merchant_context: &domain::MerchantContext,
) -> RouterResult<FrmTransactionRouterData> {
let connector_integration: services::BoxedFrmConnectorIntegrationInterface<
frm_api::Transaction,
FraudCheckTransactionData,
FraudCheckResponseData,
> = connector.connector.get_connector_integration();
let resp = services::execute_connector_processing_step(
state,
connector_integration,
router_data,
call_connector_action,
None,
None,
)
.await
.to_payment_failed_response()?;
Ok(resp)
}
// File: crates/router/src/core/fraud_check/flows/sale_flow.rs
use async_trait::async_trait;
use common_utils::{ext_traits::ValueExt, pii::Email};
use error_stack::ResultExt;
use masking::ExposeInterface;
use crate::{
core::{
errors::{ConnectorErrorExt, RouterResult},
fraud_check::{FeatureFrm, FraudCheckConnectorData, FrmData},
payments::{self, flows::ConstructFlowSpecificData, helpers},
},
errors, services,
types::{
api::fraud_check as frm_api,
domain,
fraud_check::{FraudCheckResponseData, FraudCheckSaleData, FrmSaleRouterData},
storage::enums as storage_enums,
ConnectorAuthType, MerchantRecipientData, ResponseId, RouterData,
},
SessionState,
};
#[async_trait]
impl ConstructFlowSpecificData<frm_api::Sale, FraudCheckSaleData, FraudCheckResponseData>
for FrmData
{
#[cfg(feature = "v2")]
async fn construct_router_data<'a>(
&self,
_state: &SessionState,
_connector_id: &str,
_merchant_context: &domain::MerchantContext,
_customer: &Option<domain::Customer>,
_merchant_connector_account: &domain::MerchantConnectorAccountTypeDetails,
_merchant_recipient_data: Option<MerchantRecipientData>,
_header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
) -> RouterResult<RouterData<frm_api::Sale, FraudCheckSaleData, FraudCheckResponseData>> {
todo!()
}
#[cfg(feature = "v1")]
async fn construct_router_data<'a>(
&self,
state: &SessionState,
connector_id: &str,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
merchant_connector_account: &helpers::MerchantConnectorAccountType,
_merchant_recipient_data: Option<MerchantRecipientData>,
header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
_payment_method: Option<common_enums::PaymentMethod>,
_payment_method_type: Option<common_enums::PaymentMethodType>,
) -> RouterResult<RouterData<frm_api::Sale, FraudCheckSaleData, FraudCheckResponseData>> {
let status = storage_enums::AttemptStatus::Pending;
let auth_type: ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::ApiErrorResponse::MerchantConnectorAccountNotFound {
id: "ConnectorAuthType".to_string(),
})?;
let customer_id = customer.to_owned().map(|customer| customer.customer_id);
let router_data = RouterData {
flow: std::marker::PhantomData,
merchant_id: merchant_context.get_merchant_account().get_id().clone(),
customer_id,
connector: connector_id.to_string(),
payment_id: self.payment_intent.payment_id.get_string_repr().to_owned(),
attempt_id: self.payment_attempt.attempt_id.clone(),
tenant_id: state.tenant.tenant_id.clone(),
status,
payment_method: self
.payment_attempt
.payment_method
.ok_or(errors::ApiErrorResponse::PaymentMethodNotFound)?,
payment_method_type: self.payment_attempt.payment_method_type,
connector_auth_type: auth_type,
description: None,
address: self.address.clone(),
auth_type: storage_enums::AuthenticationType::NoThreeDs,
connector_meta_data: None,
connector_wallets_details: None,
amount_captured: None,
minor_amount_captured: None,
request: FraudCheckSaleData {
amount: self
.payment_attempt
.net_amount
.get_total_amount()
.get_amount_as_i64(),
order_details: self.order_details.clone(),
currency: self.payment_attempt.currency,
email: customer
.clone()
.and_then(|customer_data| {
customer_data
.email
.map(|email| Email::try_from(email.into_inner().expose()))
})
.transpose()
.change_context(errors::ApiErrorResponse::InvalidDataValue {
field_name: "customer.customer_data.email",
})?,
},
response: Ok(FraudCheckResponseData::TransactionResponse {
resource_id: ResponseId::ConnectorTransactionId("".to_string()),
connector_metadata: None,
status: storage_enums::FraudCheckStatus::Pending,
score: None,
reason: None,
}),
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
connector_customer: None,
preprocessing_id: None,
payment_method_status: None,
connector_request_reference_id: uuid::Uuid::new_v4().to_string(),
test_mode: None,
recurring_mandate_payment_data: None,
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
payment_method_balance: None,
connector_http_status_code: None,
external_latency: None,
connector_api_version: None,
apple_pay_flow: None,
frm_metadata: self.frm_metadata.clone(),
refund_id: None,
dispute_id: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload,
connector_mandate_request_reference_id: None,
authentication_id: None,
psd2_sca_exemption_type: None,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
};
Ok(router_data)
}
}
#[async_trait]
impl FeatureFrm<frm_api::Sale, FraudCheckSaleData> for FrmSaleRouterData {
async fn decide_frm_flows<'a>(
mut self,
state: &SessionState,
connector: &FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
merchant_context: &domain::MerchantContext,
) -> RouterResult<Self> {
decide_frm_flow(
&mut self,
state,
connector,
call_connector_action,
merchant_context,
)
.await
}
}
pub async fn decide_frm_flow(
router_data: &mut FrmSaleRouterData,
state: &SessionState,
connector: &FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
_merchant_context: &domain::MerchantContext,
) -> RouterResult<FrmSaleRouterData> {
let connector_integration: services::BoxedFrmConnectorIntegrationInterface<
frm_api::Sale,
FraudCheckSaleData,
FraudCheckResponseData,
> = connector.connector.get_connector_integration();
let resp = services::execute_connector_processing_step(
state,
connector_integration,
router_data,
call_connector_action,
None,
None,
)
.await
.to_payment_failed_response()?;
Ok(resp)
}
// File: crates/router/src/core/fraud_check/flows/record_return.rs
use async_trait::async_trait;
use common_utils::ext_traits::ValueExt;
use error_stack::ResultExt;
use crate::{
connector::signifyd::transformers::RefundMethod,
core::{
errors::{ConnectorErrorExt, RouterResult},
fraud_check::{FeatureFrm, FraudCheckConnectorData, FrmData},
payments::{self, flows::ConstructFlowSpecificData, helpers},
},
errors, services,
types::{
api::RecordReturn,
domain,
fraud_check::{
FraudCheckRecordReturnData, FraudCheckResponseData, FrmRecordReturnRouterData,
},
storage::enums as storage_enums,
ConnectorAuthType, MerchantRecipientData, ResponseId, RouterData,
},
utils, SessionState,
};
#[async_trait]
impl ConstructFlowSpecificData<RecordReturn, FraudCheckRecordReturnData, FraudCheckResponseData>
for FrmData
{
#[cfg(feature = "v2")]
async fn construct_router_data<'a>(
&self,
_state: &SessionState,
_connector_id: &str,
_merchant_context: &domain::MerchantContext,
_customer: &Option<domain::Customer>,
_merchant_connector_account: &domain::MerchantConnectorAccountTypeDetails,
_merchant_recipient_data: Option<MerchantRecipientData>,
_header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
) -> RouterResult<RouterData<RecordReturn, FraudCheckRecordReturnData, FraudCheckResponseData>>
{
todo!()
}
#[cfg(feature = "v1")]
async fn construct_router_data<'a>(
&self,
state: &SessionState,
connector_id: &str,
merchant_context: &domain::MerchantContext,
customer: &Option<domain::Customer>,
merchant_connector_account: &helpers::MerchantConnectorAccountType,
_merchant_recipient_data: Option<MerchantRecipientData>,
header_payload: Option<hyperswitch_domain_models::payments::HeaderPayload>,
_payment_method: Option<common_enums::PaymentMethod>,
_payment_method_type: Option<common_enums::PaymentMethodType>,
) -> RouterResult<RouterData<RecordReturn, FraudCheckRecordReturnData, FraudCheckResponseData>>
{
let status = storage_enums::AttemptStatus::Pending;
let auth_type: ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::ApiErrorResponse::MerchantConnectorAccountNotFound {
id: "ConnectorAuthType".to_string(),
})?;
let customer_id = customer.to_owned().map(|customer| customer.customer_id);
let currency = self.payment_attempt.clone().currency;
let router_data = RouterData {
flow: std::marker::PhantomData,
merchant_id: merchant_context.get_merchant_account().get_id().clone(),
tenant_id: state.tenant.tenant_id.clone(),
customer_id,
connector: connector_id.to_string(),
payment_id: self.payment_intent.payment_id.get_string_repr().to_owned(),
attempt_id: self.payment_attempt.attempt_id.clone(),
status,
payment_method: utils::OptionExt::get_required_value(
self.payment_attempt.payment_method,
"payment_method",
)?,
payment_method_type: self.payment_attempt.payment_method_type,
connector_auth_type: auth_type,
description: None,
address: self.address.clone(),
auth_type: storage_enums::AuthenticationType::NoThreeDs,
connector_meta_data: None,
connector_wallets_details: None,
amount_captured: None,
minor_amount_captured: None,
request: FraudCheckRecordReturnData {
amount: self
.payment_attempt
.net_amount
.get_total_amount()
.get_amount_as_i64(),
refund_method: RefundMethod::OriginalPaymentInstrument, //we dont consume this data now in payments...hence hardcoded
currency,
refund_transaction_id: self.refund.clone().map(|refund| refund.refund_id),
}, // self.order_details
response: Ok(FraudCheckResponseData::RecordReturnResponse {
resource_id: ResponseId::ConnectorTransactionId("".to_string()),
connector_metadata: None,
return_id: None,
}),
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
connector_customer: None,
preprocessing_id: None,
payment_method_status: None,
connector_request_reference_id: uuid::Uuid::new_v4().to_string(),
test_mode: None,
recurring_mandate_payment_data: None,
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
payment_method_balance: None,
connector_http_status_code: None,
external_latency: None,
connector_api_version: None,
apple_pay_flow: None,
frm_metadata: None,
refund_id: None,
dispute_id: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload,
connector_mandate_request_reference_id: None,
authentication_id: None,
psd2_sca_exemption_type: None,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
};
Ok(router_data)
}
}
#[async_trait]
impl FeatureFrm<RecordReturn, FraudCheckRecordReturnData> for FrmRecordReturnRouterData {
async fn decide_frm_flows<'a>(
mut self,
state: &SessionState,
connector: &FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
merchant_context: &domain::MerchantContext,
) -> RouterResult<Self> {
decide_frm_flow(
&mut self,
state,
connector,
call_connector_action,
merchant_context,
)
.await
}
}
pub async fn decide_frm_flow(
router_data: &mut FrmRecordReturnRouterData,
state: &SessionState,
connector: &FraudCheckConnectorData,
call_connector_action: payments::CallConnectorAction,
_merchant_context: &domain::MerchantContext,
) -> RouterResult<FrmRecordReturnRouterData> {
let connector_integration: services::BoxedFrmConnectorIntegrationInterface<
RecordReturn,
FraudCheckRecordReturnData,
FraudCheckResponseData,
> = connector.connector.get_connector_integration();
let resp = services::execute_connector_processing_step(
state,
connector_integration,
router_data,
call_connector_action,
None,
None,
)
.await
.to_payment_failed_response()?;
Ok(resp)
}
// File: crates/router/src/core/fraud_check/flows/fulfillment_flow.rs
use common_utils::ext_traits::{OptionExt, ValueExt};
use error_stack::ResultExt;
use router_env::tracing::{self, instrument};
use crate::{
core::{
errors::RouterResult, fraud_check::frm_core_types::FrmFulfillmentRequest,
payments::helpers, utils as core_utils,
},
errors,
types::{
domain,
fraud_check::{FraudCheckFulfillmentData, FrmFulfillmentRouterData},
storage, ConnectorAuthType, ErrorResponse, PaymentAddress, RouterData,
},
utils, SessionState,
};
#[cfg(feature = "v2")]
pub async fn construct_fulfillment_router_data<'a>(
_state: &'a SessionState,
_payment_intent: &'a storage::PaymentIntent,
_payment_attempt: &storage::PaymentAttempt,
_merchant_context: &domain::MerchantContext,
_connector: String,
_fulfillment_request: FrmFulfillmentRequest,
) -> RouterResult<FrmFulfillmentRouterData> {
todo!()
}
#[cfg(feature = "v1")]
#[instrument(skip_all)]
pub async fn construct_fulfillment_router_data<'a>(
state: &'a SessionState,
payment_intent: &'a storage::PaymentIntent,
payment_attempt: &storage::PaymentAttempt,
merchant_context: &domain::MerchantContext,
connector: String,
fulfillment_request: FrmFulfillmentRequest,
) -> RouterResult<FrmFulfillmentRouterData> {
let profile_id = payment_intent
.profile_id
.as_ref()
.get_required_value("profile_id")
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("profile_id is not set in payment_intent")?
.clone();
let connector_id = connector.clone();
let merchant_connector_account = helpers::get_merchant_connector_account(
state,
merchant_context.get_merchant_account().get_id(),
None,
merchant_context.get_merchant_key_store(),
&profile_id,
&connector,
None,
)
.await?;
let test_mode: Option<bool> = merchant_connector_account.is_test_mode_on();
let auth_type: ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::ApiErrorResponse::InternalServerError)?;
let payment_method =
utils::OptionExt::get_required_value(payment_attempt.payment_method, "payment_method")?;
let router_data = RouterData {
flow: std::marker::PhantomData,
merchant_id: merchant_context.get_merchant_account().get_id().clone(),
tenant_id: state.tenant.tenant_id.clone(),
connector,
payment_id: payment_attempt.payment_id.get_string_repr().to_owned(),
attempt_id: payment_attempt.attempt_id.clone(),
status: payment_attempt.status,
payment_method,
payment_method_type: payment_attempt.payment_method_type,
connector_auth_type: auth_type,
description: None,
address: PaymentAddress::default(),
auth_type: payment_attempt.authentication_type.unwrap_or_default(),
connector_meta_data: merchant_connector_account.get_metadata(),
connector_wallets_details: merchant_connector_account.get_connector_wallets_details(),
amount_captured: payment_intent
.amount_captured
.map(|amt| amt.get_amount_as_i64()),
minor_amount_captured: payment_intent.amount_captured,
payment_method_status: None,
request: FraudCheckFulfillmentData {
amount: payment_attempt
.net_amount
.get_total_amount()
.get_amount_as_i64(),
order_details: payment_intent.order_details.clone(),
fulfillment_req: fulfillment_request,
},
response: Err(ErrorResponse::default()),
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
connector_customer: None,
customer_id: None,
recurring_mandate_payment_data: None,
preprocessing_id: None,
payment_method_balance: None,
connector_request_reference_id: core_utils::get_connector_request_reference_id(
&state.conf,
merchant_context.get_merchant_account().get_id(),
payment_intent,
payment_attempt,
&connector_id,
)?,
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
test_mode,
connector_api_version: None,
connector_http_status_code: None,
external_latency: None,
apple_pay_flow: None,
frm_metadata: None,
refund_id: None,
dispute_id: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload: None,
connector_mandate_request_reference_id: None,
authentication_id: None,
psd2_sca_exemption_type: None,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
};
Ok(router_data)
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/fraud_check/flows/checkout_flow.rs",
"crates/router/src/core/fraud_check/flows/transaction_flow.rs",
"crates/router/src/core/fraud_check/flows/sale_flow.rs",
"crates/router/src/core/fraud_check/flows/record_return.rs",
"crates/router/src/core/fraud_check/flows/fulfillment_flow.rs"
],
"module": "crates/router/src/core/fraud_check/flows",
"num_files": 5,
"token_count": 7204
}
|
module_-8679106623900050004
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/user_role
Files: 1
</path>
<module>
// File: crates/router/src/core/user_role/role.rs
use std::{cmp, collections::HashSet, ops::Not};
use api_models::user_role::role as role_api;
use common_enums::{EntityType, ParentGroup, PermissionGroup};
use common_utils::generate_id_with_default_len;
use diesel_models::role::{ListRolesByEntityPayload, RoleNew, RoleUpdate};
use error_stack::{report, ResultExt};
use crate::{
core::errors::{StorageErrorExt, UserErrors, UserResponse},
routes::{app::ReqState, SessionState},
services::{
authentication::{blacklist, UserFromToken},
authorization::{
permission_groups::{ParentGroupExt, PermissionGroupExt},
roles::{self, predefined_roles::PREDEFINED_ROLES},
},
ApplicationResponse,
},
types::domain::user::RoleName,
utils,
};
pub async fn get_role_from_token_with_groups(
state: SessionState,
user_from_token: UserFromToken,
) -> UserResponse<Vec<PermissionGroup>> {
let role_info = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?;
let permissions = role_info.get_permission_groups().to_vec();
Ok(ApplicationResponse::Json(permissions))
}
pub async fn get_groups_and_resources_for_role_from_token(
state: SessionState,
user_from_token: UserFromToken,
) -> UserResponse<role_api::GroupsAndResources> {
let role_info = user_from_token.get_role_info_from_db(&state).await?;
let groups = role_info
.get_permission_groups()
.into_iter()
.collect::<Vec<_>>();
let resources = groups
.iter()
.flat_map(|group| group.resources())
.collect::<HashSet<_>>()
.into_iter()
.collect();
Ok(ApplicationResponse::Json(role_api::GroupsAndResources {
groups,
resources,
}))
}
pub async fn get_parent_groups_info_for_role_from_token(
state: SessionState,
user_from_token: UserFromToken,
) -> UserResponse<Vec<role_api::ParentGroupInfo>> {
let role_info = user_from_token.get_role_info_from_db(&state).await?;
let groups = role_info
.get_permission_groups()
.into_iter()
.collect::<Vec<_>>();
let parent_groups = utils::user_role::permission_groups_to_parent_group_info(
&groups,
role_info.get_entity_type(),
);
Ok(ApplicationResponse::Json(parent_groups))
}
pub async fn create_role(
state: SessionState,
user_from_token: UserFromToken,
req: role_api::CreateRoleRequest,
_req_state: ReqState,
) -> UserResponse<role_api::RoleInfoWithGroupsResponse> {
let now = common_utils::date_time::now();
let user_entity_type = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?
.get_entity_type();
let role_entity_type = req.entity_type.unwrap_or(EntityType::Merchant);
if matches!(role_entity_type, EntityType::Organization) {
return Err(report!(UserErrors::InvalidRoleOperation))
.attach_printable("User trying to create org level custom role");
}
let requestor_entity_from_role_scope = EntityType::from(req.role_scope);
if requestor_entity_from_role_scope < role_entity_type {
return Err(report!(UserErrors::InvalidRoleOperation)).attach_printable(format!(
"User is trying to create role of type {role_entity_type} and scope {requestor_entity_from_role_scope}",
));
}
let max_from_scope_and_entity = cmp::max(requestor_entity_from_role_scope, role_entity_type);
if user_entity_type < max_from_scope_and_entity {
return Err(report!(UserErrors::InvalidRoleOperation)).attach_printable(format!(
"{user_entity_type} is trying to create of scope {requestor_entity_from_role_scope} and of type {role_entity_type}",
));
}
let role_name = RoleName::new(req.role_name)?;
utils::user_role::validate_role_groups(&req.groups)?;
utils::user_role::validate_role_name(
&state,
&role_name,
&user_from_token.merchant_id,
&user_from_token.org_id,
user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id),
&user_from_token.profile_id,
&role_entity_type,
)
.await?;
let (org_id, merchant_id, profile_id) = match role_entity_type {
EntityType::Organization | EntityType::Tenant => (user_from_token.org_id, None, None),
EntityType::Merchant => (
user_from_token.org_id,
Some(user_from_token.merchant_id),
None,
),
EntityType::Profile => (
user_from_token.org_id,
Some(user_from_token.merchant_id),
Some(user_from_token.profile_id),
),
};
let role = state
.global_store
.insert_role(RoleNew {
role_id: generate_id_with_default_len("role"),
role_name: role_name.get_role_name(),
merchant_id,
org_id,
groups: req.groups,
scope: req.role_scope,
entity_type: role_entity_type,
created_by: user_from_token.user_id.clone(),
last_modified_by: user_from_token.user_id,
created_at: now,
last_modified_at: now,
profile_id,
tenant_id: user_from_token.tenant_id.unwrap_or(state.tenant.tenant_id),
})
.await
.to_duplicate_response(UserErrors::RoleNameAlreadyExists)?;
Ok(ApplicationResponse::Json(
role_api::RoleInfoWithGroupsResponse {
groups: role.groups,
role_id: role.role_id,
role_name: role.role_name,
role_scope: role.scope,
entity_type: role.entity_type,
},
))
}
pub async fn create_role_v2(
state: SessionState,
user_from_token: UserFromToken,
req: role_api::CreateRoleV2Request,
_req_state: ReqState,
) -> UserResponse<role_api::RoleInfoResponseWithParentsGroup> {
let now = common_utils::date_time::now();
let user_entity_type = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?
.get_entity_type();
let role_entity_type = req.entity_type.unwrap_or(EntityType::Merchant);
if matches!(role_entity_type, EntityType::Organization) {
return Err(report!(UserErrors::InvalidRoleOperation))
.attach_printable("User trying to create org level custom role");
}
let requestor_entity_from_role_scope = EntityType::from(req.role_scope);
if requestor_entity_from_role_scope < role_entity_type {
return Err(report!(UserErrors::InvalidRoleOperation)).attach_printable(format!(
"User is trying to create role of type {role_entity_type} and scope {requestor_entity_from_role_scope}",
));
}
let max_from_scope_and_entity = cmp::max(requestor_entity_from_role_scope, role_entity_type);
if user_entity_type < max_from_scope_and_entity {
return Err(report!(UserErrors::InvalidRoleOperation)).attach_printable(format!(
"{user_entity_type} is trying to create of scope {requestor_entity_from_role_scope} and of type {role_entity_type}",
));
}
let role_name = RoleName::new(req.role_name.clone())?;
let permission_groups =
utils::user_role::parent_group_info_request_to_permission_groups(&req.parent_groups)?;
utils::user_role::validate_role_groups(&permission_groups)?;
utils::user_role::validate_role_name(
&state,
&role_name,
&user_from_token.merchant_id,
&user_from_token.org_id,
user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id),
&user_from_token.profile_id,
&role_entity_type,
)
.await?;
let (org_id, merchant_id, profile_id) = match role_entity_type {
EntityType::Organization | EntityType::Tenant => (user_from_token.org_id, None, None),
EntityType::Merchant => (
user_from_token.org_id,
Some(user_from_token.merchant_id),
None,
),
EntityType::Profile => (
user_from_token.org_id,
Some(user_from_token.merchant_id),
Some(user_from_token.profile_id),
),
};
let role = state
.global_store
.insert_role(RoleNew {
role_id: generate_id_with_default_len("role"),
role_name: role_name.get_role_name(),
merchant_id,
org_id,
groups: permission_groups,
scope: req.role_scope,
entity_type: role_entity_type,
created_by: user_from_token.user_id.clone(),
last_modified_by: user_from_token.user_id,
created_at: now,
last_modified_at: now,
profile_id,
tenant_id: user_from_token.tenant_id.unwrap_or(state.tenant.tenant_id),
})
.await
.to_duplicate_response(UserErrors::RoleNameAlreadyExists)?;
let parent_group_details =
utils::user_role::permission_groups_to_parent_group_info(&role.groups, role.entity_type);
let parent_group_descriptions: Vec<role_api::ParentGroupDescription> = parent_group_details
.into_iter()
.filter_map(|group_details| {
let description = utils::user_role::resources_to_description(
group_details.resources,
role.entity_type,
)?;
Some(role_api::ParentGroupDescription {
name: group_details.name,
description,
scopes: group_details.scopes,
})
})
.collect();
Ok(ApplicationResponse::Json(
role_api::RoleInfoResponseWithParentsGroup {
role_id: role.role_id,
role_name: role.role_name,
role_scope: role.scope,
entity_type: role.entity_type,
parent_groups: parent_group_descriptions,
},
))
}
pub async fn get_role_with_groups(
state: SessionState,
user_from_token: UserFromToken,
role: role_api::GetRoleRequest,
) -> UserResponse<role_api::RoleInfoWithGroupsResponse> {
let role_info = roles::RoleInfo::from_role_id_org_id_tenant_id(
&state,
&role.role_id,
&user_from_token.org_id,
user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id),
)
.await
.to_not_found_response(UserErrors::InvalidRoleId)?;
if role_info.is_internal() {
return Err(UserErrors::InvalidRoleId.into());
}
Ok(ApplicationResponse::Json(
role_api::RoleInfoWithGroupsResponse {
groups: role_info.get_permission_groups().to_vec(),
role_id: role.role_id,
role_name: role_info.get_role_name().to_string(),
role_scope: role_info.get_scope(),
entity_type: role_info.get_entity_type(),
},
))
}
pub async fn get_parent_info_for_role(
state: SessionState,
user_from_token: UserFromToken,
role: role_api::GetRoleRequest,
) -> UserResponse<role_api::RoleInfoWithParents> {
let role_info = roles::RoleInfo::from_role_id_org_id_tenant_id(
&state,
&role.role_id,
&user_from_token.org_id,
user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id),
)
.await
.to_not_found_response(UserErrors::InvalidRoleId)?;
if role_info.is_internal() {
return Err(UserErrors::InvalidRoleId.into());
}
let parent_groups = ParentGroup::get_descriptions_for_groups(
role_info.get_entity_type(),
role_info.get_permission_groups().to_vec(),
)
.ok_or(UserErrors::InternalServerError)
.attach_printable(format!(
"No group descriptions found for role_id: {}",
role.role_id
))?
.into_iter()
.map(
|(parent_group, description)| role_api::ParentGroupDescription {
name: parent_group.clone(),
description,
scopes: role_info
.get_permission_groups()
.iter()
.filter_map(|group| (group.parent() == parent_group).then_some(group.scope()))
// TODO: Remove this hashset conversion when merchant access
// and organization access groups are removed
.collect::<HashSet<_>>()
.into_iter()
.collect(),
},
)
.collect();
Ok(ApplicationResponse::Json(role_api::RoleInfoWithParents {
role_id: role.role_id,
parent_groups,
role_name: role_info.get_role_name().to_string(),
role_scope: role_info.get_scope(),
}))
}
pub async fn update_role(
state: SessionState,
user_from_token: UserFromToken,
req: role_api::UpdateRoleRequest,
role_id: &str,
) -> UserResponse<role_api::RoleInfoWithGroupsResponse> {
let role_name = req.role_name.map(RoleName::new).transpose()?;
let role_info = roles::RoleInfo::from_role_id_in_lineage(
&state,
role_id,
&user_from_token.merchant_id,
&user_from_token.org_id,
&user_from_token.profile_id,
user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id),
)
.await
.to_not_found_response(UserErrors::InvalidRoleOperation)?;
let user_role_info = user_from_token.get_role_info_from_db(&state).await?;
let requested_entity_from_role_scope = EntityType::from(role_info.get_scope());
let requested_role_entity_type = role_info.get_entity_type();
let max_from_scope_and_entity =
cmp::max(requested_entity_from_role_scope, requested_role_entity_type);
if user_role_info.get_entity_type() < max_from_scope_and_entity {
return Err(report!(UserErrors::InvalidRoleOperation)).attach_printable(format!(
"{} is trying to update of scope {} and of type {}",
user_role_info.get_entity_type(),
requested_entity_from_role_scope,
requested_role_entity_type
));
}
if let Some(ref role_name) = role_name {
utils::user_role::validate_role_name(
&state,
role_name,
&user_from_token.merchant_id,
&user_from_token.org_id,
user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id),
&user_from_token.profile_id,
&role_info.get_entity_type(),
)
.await?;
}
if let Some(ref groups) = req.groups {
utils::user_role::validate_role_groups(groups)?;
}
let updated_role = state
.global_store
.update_role_by_role_id(
role_id,
RoleUpdate::UpdateDetails {
groups: req.groups,
role_name: role_name.map(RoleName::get_role_name),
last_modified_at: common_utils::date_time::now(),
last_modified_by: user_from_token.user_id,
},
)
.await
.to_duplicate_response(UserErrors::RoleNameAlreadyExists)?;
blacklist::insert_role_in_blacklist(&state, role_id).await?;
Ok(ApplicationResponse::Json(
role_api::RoleInfoWithGroupsResponse {
groups: updated_role.groups,
role_id: updated_role.role_id,
role_name: updated_role.role_name,
role_scope: updated_role.scope,
entity_type: updated_role.entity_type,
},
))
}
pub async fn list_roles_with_info(
state: SessionState,
user_from_token: UserFromToken,
request: role_api::ListRolesQueryParams,
) -> UserResponse<role_api::ListRolesResponse> {
let user_role_info = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?;
if user_role_info.is_internal() {
return Err(UserErrors::InvalidRoleOperationWithMessage(
"Internal roles are not allowed for this operation".to_string(),
)
.into());
}
let mut role_info_vec = PREDEFINED_ROLES
.values()
.filter(|role| role.is_internal().not())
.cloned()
.collect::<Vec<_>>();
let user_role_entity = user_role_info.get_entity_type();
let is_lineage_data_required = request.entity_type.is_none();
let tenant_id = user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id)
.to_owned();
let custom_roles =
match utils::user_role::get_min_entity(user_role_entity, request.entity_type)? {
EntityType::Tenant | EntityType::Organization => state
.global_store
.generic_list_roles_by_entity_type(
ListRolesByEntityPayload::Organization,
is_lineage_data_required,
tenant_id,
user_from_token.org_id,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get roles")?,
EntityType::Merchant => state
.global_store
.generic_list_roles_by_entity_type(
ListRolesByEntityPayload::Merchant(user_from_token.merchant_id),
is_lineage_data_required,
tenant_id,
user_from_token.org_id,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get roles")?,
EntityType::Profile => state
.global_store
.generic_list_roles_by_entity_type(
ListRolesByEntityPayload::Profile(
user_from_token.merchant_id,
user_from_token.profile_id,
),
is_lineage_data_required,
tenant_id,
user_from_token.org_id,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get roles")?,
};
role_info_vec.extend(custom_roles.into_iter().map(roles::RoleInfo::from));
if request.groups == Some(true) {
let list_role_info_response = role_info_vec
.into_iter()
.filter_map(|role_info| {
let is_lower_entity = user_role_entity >= role_info.get_entity_type();
let request_filter = request
.entity_type
.is_none_or(|entity_type| entity_type == role_info.get_entity_type());
(is_lower_entity && request_filter).then_some({
let permission_groups = role_info.get_permission_groups();
let parent_group_details =
utils::user_role::permission_groups_to_parent_group_info(
&permission_groups,
role_info.get_entity_type(),
);
let parent_group_descriptions: Vec<role_api::ParentGroupDescription> =
parent_group_details
.into_iter()
.filter_map(|group_details| {
let description = utils::user_role::resources_to_description(
group_details.resources,
role_info.get_entity_type(),
)?;
Some(role_api::ParentGroupDescription {
name: group_details.name,
description,
scopes: group_details.scopes,
})
})
.collect();
role_api::RoleInfoResponseWithParentsGroup {
role_id: role_info.get_role_id().to_string(),
role_name: role_info.get_role_name().to_string(),
entity_type: role_info.get_entity_type(),
parent_groups: parent_group_descriptions,
role_scope: role_info.get_scope(),
}
})
})
.collect::<Vec<_>>();
Ok(ApplicationResponse::Json(
role_api::ListRolesResponse::WithParentGroups(list_role_info_response),
))
}
// TODO: To be deprecated
else {
let list_role_info_response = role_info_vec
.into_iter()
.filter_map(|role_info| {
let is_lower_entity = user_role_entity >= role_info.get_entity_type();
let request_filter = request
.entity_type
.is_none_or(|entity_type| entity_type == role_info.get_entity_type());
(is_lower_entity && request_filter).then_some(role_api::RoleInfoResponseNew {
role_id: role_info.get_role_id().to_string(),
role_name: role_info.get_role_name().to_string(),
groups: role_info.get_permission_groups().to_vec(),
entity_type: role_info.get_entity_type(),
scope: role_info.get_scope(),
})
})
.collect::<Vec<_>>();
Ok(ApplicationResponse::Json(
role_api::ListRolesResponse::WithGroups(list_role_info_response),
))
}
}
pub async fn list_roles_at_entity_level(
state: SessionState,
user_from_token: UserFromToken,
req: role_api::ListRolesAtEntityLevelRequest,
check_type: role_api::RoleCheckType,
) -> UserResponse<Vec<role_api::MinimalRoleInfo>> {
let user_entity_type = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?
.get_entity_type();
if req.entity_type > user_entity_type {
return Err(UserErrors::InvalidRoleOperationWithMessage(
"User is attempting to request list roles above the current entity level".to_string(),
)
.into());
}
let mut role_info_vec = PREDEFINED_ROLES.values().cloned().collect::<Vec<_>>();
let tenant_id = user_from_token
.tenant_id
.as_ref()
.unwrap_or(&state.tenant.tenant_id)
.to_owned();
let is_lineage_data_required = false;
let custom_roles = match req.entity_type {
EntityType::Tenant | EntityType::Organization => state
.global_store
.generic_list_roles_by_entity_type(
ListRolesByEntityPayload::Organization,
is_lineage_data_required,
tenant_id,
user_from_token.org_id,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get roles")?,
EntityType::Merchant => state
.global_store
.generic_list_roles_by_entity_type(
ListRolesByEntityPayload::Merchant(user_from_token.merchant_id),
is_lineage_data_required,
tenant_id,
user_from_token.org_id,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get roles")?,
EntityType::Profile => state
.global_store
.generic_list_roles_by_entity_type(
ListRolesByEntityPayload::Profile(
user_from_token.merchant_id,
user_from_token.profile_id,
),
is_lineage_data_required,
tenant_id,
user_from_token.org_id,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get roles")?,
};
role_info_vec.extend(custom_roles.into_iter().map(roles::RoleInfo::from));
let list_minimal_role_info = role_info_vec
.into_iter()
.filter_map(|role_info| {
let check_type = match check_type {
role_api::RoleCheckType::Invite => role_info.is_invitable(),
role_api::RoleCheckType::Update => role_info.is_updatable(),
};
if check_type && role_info.get_entity_type() == req.entity_type {
Some(role_api::MinimalRoleInfo {
role_id: role_info.get_role_id().to_string(),
role_name: role_info.get_role_name().to_string(),
})
} else {
None
}
})
.collect::<Vec<_>>();
Ok(ApplicationResponse::Json(list_minimal_role_info))
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/user_role/role.rs"
],
"module": "crates/router/src/core/user_role",
"num_files": 1,
"token_count": 5101
}
|
module_6161714491870601628
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/user
Files: 3
</path>
<module>
// File: crates/router/src/core/user/theme.rs
use api_models::user::theme as theme_api;
use common_enums::EntityType;
use common_utils::{
ext_traits::{ByteSliceExt, Encode},
types::user::ThemeLineage,
};
use diesel_models::user::theme::{ThemeNew, ThemeUpdate};
use error_stack::ResultExt;
use hyperswitch_domain_models::api::ApplicationResponse;
use masking::ExposeInterface;
use rdkafka::message::ToBytes;
use uuid::Uuid;
use crate::{
core::errors::{StorageErrorExt, UserErrors, UserResponse},
routes::SessionState,
services::authentication::UserFromToken,
utils::user::theme as theme_utils,
};
// TODO: To be deprecated
pub async fn get_theme_using_lineage(
state: SessionState,
lineage: ThemeLineage,
) -> UserResponse<theme_api::GetThemeResponse> {
let theme = state
.store
.find_theme_by_lineage(lineage)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: theme.email_config(),
theme_id: theme.theme_id,
theme_name: theme.theme_name,
entity_type: theme.entity_type,
tenant_id: theme.tenant_id,
org_id: theme.org_id,
merchant_id: theme.merchant_id,
profile_id: theme.profile_id,
theme_data: parsed_data,
}))
}
// TODO: To be deprecated
pub async fn get_theme_using_theme_id(
state: SessionState,
theme_id: String,
) -> UserResponse<theme_api::GetThemeResponse> {
let theme = state
.store
.find_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: theme.email_config(),
theme_id: theme.theme_id,
theme_name: theme.theme_name,
entity_type: theme.entity_type,
tenant_id: theme.tenant_id,
org_id: theme.org_id,
merchant_id: theme.merchant_id,
profile_id: theme.profile_id,
theme_data: parsed_data,
}))
}
// TODO: To be deprecated
pub async fn upload_file_to_theme_storage(
state: SessionState,
theme_id: String,
request: theme_api::UploadFileRequest,
) -> UserResponse<()> {
let db_theme = state
.store
.find_theme_by_theme_id(theme_id)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_specific_file_key(&db_theme.theme_id, &request.asset_name),
request.asset_data.expose(),
)
.await?;
Ok(ApplicationResponse::StatusOk)
}
// TODO: To be deprecated
pub async fn create_theme(
state: SessionState,
request: theme_api::CreateThemeRequest,
) -> UserResponse<theme_api::GetThemeResponse> {
theme_utils::validate_lineage(&state, &request.lineage).await?;
let email_config = if cfg!(feature = "email") {
request.email_config.ok_or(UserErrors::MissingEmailConfig)?
} else {
request
.email_config
.unwrap_or(state.conf.theme.email_config.clone())
};
let new_theme = ThemeNew::new(
Uuid::new_v4().to_string(),
request.theme_name,
request.lineage,
email_config,
);
let db_theme = state
.store
.insert_theme(new_theme)
.await
.to_duplicate_response(UserErrors::ThemeAlreadyExists)?;
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
request
.theme_data
.encode_to_vec()
.change_context(UserErrors::InternalServerError)?,
)
.await?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_name: db_theme.theme_name,
theme_data: parsed_data,
}))
}
// TODO: To be deprecated
pub async fn update_theme(
state: SessionState,
theme_id: String,
request: theme_api::UpdateThemeRequest,
) -> UserResponse<theme_api::GetThemeResponse> {
let db_theme = match request.email_config {
Some(email_config) => {
let theme_update = ThemeUpdate::EmailConfig { email_config };
state
.store
.update_theme_by_theme_id(theme_id.clone(), theme_update)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?
}
None => state
.store
.find_theme_by_theme_id(theme_id)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?,
};
if let Some(theme_data) = request.theme_data {
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
theme_data
.encode_to_vec()
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to parse ThemeData")?,
)
.await?;
}
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_name: db_theme.theme_name,
theme_data: parsed_data,
}))
}
// TODO: To be deprecated
pub async fn delete_theme(state: SessionState, theme_id: String) -> UserResponse<()> {
state
.store
.delete_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
// TODO (#6717): Delete theme folder from the theme storage.
// Currently there is no simple or easy way to delete a whole folder from S3.
// So, we are not deleting the theme folder from the theme storage.
Ok(ApplicationResponse::StatusOk)
}
pub async fn create_user_theme(
state: SessionState,
user_from_token: UserFromToken,
request: theme_api::CreateUserThemeRequest,
) -> UserResponse<theme_api::GetThemeResponse> {
let email_config = if cfg!(feature = "email") {
request.email_config.ok_or(UserErrors::MissingEmailConfig)?
} else {
request
.email_config
.unwrap_or(state.conf.theme.email_config.clone())
};
let lineage = theme_utils::get_theme_lineage_from_user_token(
&user_from_token,
&state,
&request.entity_type,
)
.await?;
let new_theme = ThemeNew::new(
Uuid::new_v4().to_string(),
request.theme_name,
lineage,
email_config,
);
let db_theme = state
.store
.insert_theme(new_theme)
.await
.to_duplicate_response(UserErrors::ThemeAlreadyExists)?;
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
request
.theme_data
.encode_to_vec()
.change_context(UserErrors::InternalServerError)?,
)
.await?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_name: db_theme.theme_name,
theme_data: parsed_data,
}))
}
pub async fn delete_user_theme(
state: SessionState,
user_from_token: UserFromToken,
theme_id: String,
) -> UserResponse<()> {
let db_theme = state
.store
.find_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let user_role_info = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?;
let user_entity_type = user_role_info.get_entity_type();
theme_utils::can_user_access_theme(&user_from_token, &user_entity_type, &db_theme).await?;
state
.store
.delete_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
// TODO (#6717): Delete theme folder from the theme storage.
// Currently there is no simple or easy way to delete a whole folder from S3.
// So, we are not deleting the theme folder from the theme storage.
Ok(ApplicationResponse::StatusOk)
}
pub async fn update_user_theme(
state: SessionState,
theme_id: String,
user_from_token: UserFromToken,
request: theme_api::UpdateThemeRequest,
) -> UserResponse<theme_api::GetThemeResponse> {
let db_theme = state
.store
.find_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let user_role_info = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?;
let user_entity_type = user_role_info.get_entity_type();
theme_utils::can_user_access_theme(&user_from_token, &user_entity_type, &db_theme).await?;
let db_theme = match request.email_config {
Some(email_config) => {
let theme_update = ThemeUpdate::EmailConfig { email_config };
state
.store
.update_theme_by_theme_id(theme_id.clone(), theme_update)
.await
.change_context(UserErrors::InternalServerError)?
}
None => db_theme,
};
if let Some(theme_data) = request.theme_data {
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
theme_data
.encode_to_vec()
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to parse ThemeData")?,
)
.await?;
}
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&db_theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_name: db_theme.theme_name,
theme_data: parsed_data,
}))
}
pub async fn upload_file_to_user_theme_storage(
state: SessionState,
theme_id: String,
user_from_token: UserFromToken,
request: theme_api::UploadFileRequest,
) -> UserResponse<()> {
let db_theme = state
.store
.find_theme_by_theme_id(theme_id)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let user_role_info = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?;
let user_entity_type = user_role_info.get_entity_type();
theme_utils::can_user_access_theme(&user_from_token, &user_entity_type, &db_theme).await?;
theme_utils::upload_file_to_theme_bucket(
&state,
&theme_utils::get_specific_file_key(&db_theme.theme_id, &request.asset_name),
request.asset_data.expose(),
)
.await?;
Ok(ApplicationResponse::StatusOk)
}
pub async fn list_all_themes_in_lineage(
state: SessionState,
user: UserFromToken,
entity_type: EntityType,
) -> UserResponse<Vec<theme_api::GetThemeResponse>> {
let lineage =
theme_utils::get_theme_lineage_from_user_token(&user, &state, &entity_type).await?;
let db_themes = state
.store
.list_themes_at_and_under_lineage(lineage)
.await
.change_context(UserErrors::InternalServerError)?;
let mut themes = Vec::new();
for theme in db_themes {
match theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme.theme_id),
)
.await
{
Ok(file) => {
match file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)
{
Ok(parsed_data) => {
themes.push(theme_api::GetThemeResponse {
email_config: theme.email_config(),
theme_id: theme.theme_id,
theme_name: theme.theme_name,
entity_type: theme.entity_type,
tenant_id: theme.tenant_id,
org_id: theme.org_id,
merchant_id: theme.merchant_id,
profile_id: theme.profile_id,
theme_data: parsed_data,
});
}
Err(_) => {
return Err(UserErrors::ErrorRetrievingFile.into());
}
}
}
Err(_) => {
return Err(UserErrors::ErrorRetrievingFile.into());
}
}
}
Ok(ApplicationResponse::Json(themes))
}
pub async fn get_user_theme_using_theme_id(
state: SessionState,
user_from_token: UserFromToken,
theme_id: String,
) -> UserResponse<theme_api::GetThemeResponse> {
let db_theme = state
.store
.find_theme_by_theme_id(theme_id.clone())
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let user_role_info = user_from_token
.get_role_info_from_db(&state)
.await
.attach_printable("Invalid role_id in JWT")?;
let user_role_entity = user_role_info.get_entity_type();
theme_utils::can_user_access_theme(&user_from_token, &user_role_entity, &db_theme).await?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: db_theme.email_config(),
theme_id: db_theme.theme_id,
theme_name: db_theme.theme_name,
entity_type: db_theme.entity_type,
tenant_id: db_theme.tenant_id,
org_id: db_theme.org_id,
merchant_id: db_theme.merchant_id,
profile_id: db_theme.profile_id,
theme_data: parsed_data,
}))
}
pub async fn get_user_theme_using_lineage(
state: SessionState,
user_from_token: UserFromToken,
entity_type: EntityType,
) -> UserResponse<theme_api::GetThemeResponse> {
let lineage =
theme_utils::get_theme_lineage_from_user_token(&user_from_token, &state, &entity_type)
.await?;
let theme = state
.store
.find_theme_by_lineage(lineage)
.await
.to_not_found_response(UserErrors::ThemeNotFound)?;
let file = theme_utils::retrieve_file_from_theme_bucket(
&state,
&theme_utils::get_theme_file_key(&theme.theme_id),
)
.await?;
let parsed_data = file
.to_bytes()
.parse_struct("ThemeData")
.change_context(UserErrors::InternalServerError)?;
Ok(ApplicationResponse::Json(theme_api::GetThemeResponse {
email_config: theme.email_config(),
theme_id: theme.theme_id,
theme_name: theme.theme_name,
entity_type: theme.entity_type,
tenant_id: theme.tenant_id,
org_id: theme.org_id,
merchant_id: theme.merchant_id,
profile_id: theme.profile_id,
theme_data: parsed_data,
}))
}
// File: crates/router/src/core/user/dashboard_metadata.rs
use api_models::user::dashboard_metadata::{self as api, GetMultipleMetaDataPayload};
#[cfg(feature = "email")]
use common_enums::EntityType;
use diesel_models::{
enums::DashboardMetadata as DBEnum, user::dashboard_metadata::DashboardMetadata,
};
use error_stack::{report, ResultExt};
use hyperswitch_interfaces::crm::CrmPayload;
#[cfg(feature = "email")]
use masking::ExposeInterface;
use masking::{PeekInterface, Secret};
use router_env::logger;
use crate::{
core::errors::{UserErrors, UserResponse, UserResult},
routes::{app::ReqState, SessionState},
services::{authentication::UserFromToken, ApplicationResponse},
types::domain::{self, user::dashboard_metadata as types, MerchantKeyStore},
utils::user::{self as user_utils, dashboard_metadata as utils},
};
#[cfg(feature = "email")]
use crate::{services::email::types as email_types, utils::user::theme as theme_utils};
pub async fn set_metadata(
state: SessionState,
user: UserFromToken,
request: api::SetMetaDataRequest,
_req_state: ReqState,
) -> UserResponse<()> {
let metadata_value = parse_set_request(request)?;
let metadata_key = DBEnum::from(&metadata_value);
insert_metadata(&state, user, metadata_key, metadata_value).await?;
Ok(ApplicationResponse::StatusOk)
}
pub async fn get_multiple_metadata(
state: SessionState,
user: UserFromToken,
request: GetMultipleMetaDataPayload,
_req_state: ReqState,
) -> UserResponse<Vec<api::GetMetaDataResponse>> {
let metadata_keys: Vec<DBEnum> = request.results.into_iter().map(parse_get_request).collect();
let metadata = fetch_metadata(&state, &user, metadata_keys.clone()).await?;
let mut response = Vec::with_capacity(metadata_keys.len());
for key in metadata_keys {
let data = metadata.iter().find(|ele| ele.data_key == key);
let resp;
if data.is_none() && utils::is_backfill_required(key) {
let backfill_data = backfill_metadata(&state, &user, &key).await?;
resp = into_response(backfill_data.as_ref(), key)?;
} else {
resp = into_response(data, key)?;
}
response.push(resp);
}
Ok(ApplicationResponse::Json(response))
}
fn parse_set_request(data_enum: api::SetMetaDataRequest) -> UserResult<types::MetaData> {
match data_enum {
api::SetMetaDataRequest::ProductionAgreement(req) => {
let ip_address = req
.ip_address
.ok_or(report!(UserErrors::InternalServerError))
.attach_printable("Error Getting Ip Address")?;
Ok(types::MetaData::ProductionAgreement(
types::ProductionAgreementValue {
version: req.version,
ip_address,
timestamp: common_utils::date_time::now(),
},
))
}
api::SetMetaDataRequest::SetupProcessor(req) => Ok(types::MetaData::SetupProcessor(req)),
api::SetMetaDataRequest::ConfigureEndpoint => Ok(types::MetaData::ConfigureEndpoint(true)),
api::SetMetaDataRequest::SetupComplete => Ok(types::MetaData::SetupComplete(true)),
api::SetMetaDataRequest::FirstProcessorConnected(req) => {
Ok(types::MetaData::FirstProcessorConnected(req))
}
api::SetMetaDataRequest::SecondProcessorConnected(req) => {
Ok(types::MetaData::SecondProcessorConnected(req))
}
api::SetMetaDataRequest::ConfiguredRouting(req) => {
Ok(types::MetaData::ConfiguredRouting(req))
}
api::SetMetaDataRequest::TestPayment(req) => Ok(types::MetaData::TestPayment(req)),
api::SetMetaDataRequest::IntegrationMethod(req) => {
Ok(types::MetaData::IntegrationMethod(req))
}
api::SetMetaDataRequest::ConfigurationType(req) => {
Ok(types::MetaData::ConfigurationType(req))
}
api::SetMetaDataRequest::IntegrationCompleted => {
Ok(types::MetaData::IntegrationCompleted(true))
}
api::SetMetaDataRequest::SPRoutingConfigured(req) => {
Ok(types::MetaData::SPRoutingConfigured(req))
}
api::SetMetaDataRequest::Feedback(req) => Ok(types::MetaData::Feedback(req)),
api::SetMetaDataRequest::ProdIntent(req) => Ok(types::MetaData::ProdIntent(req)),
api::SetMetaDataRequest::SPTestPayment => Ok(types::MetaData::SPTestPayment(true)),
api::SetMetaDataRequest::DownloadWoocom => Ok(types::MetaData::DownloadWoocom(true)),
api::SetMetaDataRequest::ConfigureWoocom => Ok(types::MetaData::ConfigureWoocom(true)),
api::SetMetaDataRequest::SetupWoocomWebhook => {
Ok(types::MetaData::SetupWoocomWebhook(true))
}
api::SetMetaDataRequest::IsMultipleConfiguration => {
Ok(types::MetaData::IsMultipleConfiguration(true))
}
api::SetMetaDataRequest::IsChangePasswordRequired => {
Ok(types::MetaData::IsChangePasswordRequired(true))
}
api::SetMetaDataRequest::OnboardingSurvey(req) => {
Ok(types::MetaData::OnboardingSurvey(req))
}
api::SetMetaDataRequest::ReconStatus(req) => Ok(types::MetaData::ReconStatus(req)),
}
}
fn parse_get_request(data_enum: api::GetMetaDataRequest) -> DBEnum {
match data_enum {
api::GetMetaDataRequest::ProductionAgreement => DBEnum::ProductionAgreement,
api::GetMetaDataRequest::SetupProcessor => DBEnum::SetupProcessor,
api::GetMetaDataRequest::ConfigureEndpoint => DBEnum::ConfigureEndpoint,
api::GetMetaDataRequest::SetupComplete => DBEnum::SetupComplete,
api::GetMetaDataRequest::FirstProcessorConnected => DBEnum::FirstProcessorConnected,
api::GetMetaDataRequest::SecondProcessorConnected => DBEnum::SecondProcessorConnected,
api::GetMetaDataRequest::ConfiguredRouting => DBEnum::ConfiguredRouting,
api::GetMetaDataRequest::TestPayment => DBEnum::TestPayment,
api::GetMetaDataRequest::IntegrationMethod => DBEnum::IntegrationMethod,
api::GetMetaDataRequest::ConfigurationType => DBEnum::ConfigurationType,
api::GetMetaDataRequest::IntegrationCompleted => DBEnum::IntegrationCompleted,
api::GetMetaDataRequest::StripeConnected => DBEnum::StripeConnected,
api::GetMetaDataRequest::PaypalConnected => DBEnum::PaypalConnected,
api::GetMetaDataRequest::SPRoutingConfigured => DBEnum::SpRoutingConfigured,
api::GetMetaDataRequest::Feedback => DBEnum::Feedback,
api::GetMetaDataRequest::ProdIntent => DBEnum::ProdIntent,
api::GetMetaDataRequest::SPTestPayment => DBEnum::SpTestPayment,
api::GetMetaDataRequest::DownloadWoocom => DBEnum::DownloadWoocom,
api::GetMetaDataRequest::ConfigureWoocom => DBEnum::ConfigureWoocom,
api::GetMetaDataRequest::SetupWoocomWebhook => DBEnum::SetupWoocomWebhook,
api::GetMetaDataRequest::IsMultipleConfiguration => DBEnum::IsMultipleConfiguration,
api::GetMetaDataRequest::IsChangePasswordRequired => DBEnum::IsChangePasswordRequired,
api::GetMetaDataRequest::OnboardingSurvey => DBEnum::OnboardingSurvey,
api::GetMetaDataRequest::ReconStatus => DBEnum::ReconStatus,
}
}
fn into_response(
data: Option<&DashboardMetadata>,
data_type: DBEnum,
) -> UserResult<api::GetMetaDataResponse> {
match data_type {
DBEnum::ProductionAgreement => Ok(api::GetMetaDataResponse::ProductionAgreement(
data.is_some(),
)),
DBEnum::SetupProcessor => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::SetupProcessor(resp))
}
DBEnum::ConfigureEndpoint => {
Ok(api::GetMetaDataResponse::ConfigureEndpoint(data.is_some()))
}
DBEnum::SetupComplete => Ok(api::GetMetaDataResponse::SetupComplete(data.is_some())),
DBEnum::FirstProcessorConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::FirstProcessorConnected(resp))
}
DBEnum::SecondProcessorConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::SecondProcessorConnected(resp))
}
DBEnum::ConfiguredRouting => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ConfiguredRouting(resp))
}
DBEnum::TestPayment => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::TestPayment(resp))
}
DBEnum::IntegrationMethod => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::IntegrationMethod(resp))
}
DBEnum::ConfigurationType => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ConfigurationType(resp))
}
DBEnum::IntegrationCompleted => Ok(api::GetMetaDataResponse::IntegrationCompleted(
data.is_some(),
)),
DBEnum::StripeConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::StripeConnected(resp))
}
DBEnum::PaypalConnected => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::PaypalConnected(resp))
}
DBEnum::SpRoutingConfigured => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::SPRoutingConfigured(resp))
}
DBEnum::Feedback => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::Feedback(resp))
}
DBEnum::ProdIntent => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ProdIntent(resp))
}
DBEnum::SpTestPayment => Ok(api::GetMetaDataResponse::SPTestPayment(data.is_some())),
DBEnum::DownloadWoocom => Ok(api::GetMetaDataResponse::DownloadWoocom(data.is_some())),
DBEnum::ConfigureWoocom => Ok(api::GetMetaDataResponse::ConfigureWoocom(data.is_some())),
DBEnum::SetupWoocomWebhook => {
Ok(api::GetMetaDataResponse::SetupWoocomWebhook(data.is_some()))
}
DBEnum::IsMultipleConfiguration => Ok(api::GetMetaDataResponse::IsMultipleConfiguration(
data.is_some(),
)),
DBEnum::IsChangePasswordRequired => Ok(api::GetMetaDataResponse::IsChangePasswordRequired(
data.is_some(),
)),
DBEnum::OnboardingSurvey => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::OnboardingSurvey(resp))
}
DBEnum::ReconStatus => {
let resp = utils::deserialize_to_response(data)?;
Ok(api::GetMetaDataResponse::ReconStatus(resp))
}
}
}
async fn insert_metadata(
state: &SessionState,
user: UserFromToken,
metadata_key: DBEnum,
metadata_value: types::MetaData,
) -> UserResult<DashboardMetadata> {
match metadata_value {
types::MetaData::ProductionAgreement(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SetupProcessor(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ConfigureEndpoint(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SetupComplete(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::FirstProcessorConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SecondProcessorConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ConfiguredRouting(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::TestPayment(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::IntegrationMethod(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
.change_context(UserErrors::InternalServerError);
}
metadata
}
types::MetaData::ConfigurationType(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
.change_context(UserErrors::InternalServerError);
}
metadata
}
types::MetaData::IntegrationCompleted(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::StripeConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::PaypalConnected(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SPRoutingConfigured(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::Feedback(data) => {
let mut metadata = utils::insert_user_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_user_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
.change_context(UserErrors::InternalServerError);
}
metadata
}
types::MetaData::ProdIntent(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await
.change_context(UserErrors::InternalServerError);
}
#[cfg(feature = "email")]
{
let user_data = user.get_user_from_db(state).await?;
let user_email = domain::UserEmail::from_pii_email(user_data.get_email())
.change_context(UserErrors::InternalServerError)?
.get_secret()
.expose();
if utils::is_prod_email_required(&data, user_email) {
let theme = theme_utils::get_most_specific_theme_using_token_and_min_entity(
state,
&user,
EntityType::Merchant,
)
.await?;
let email_contents = email_types::BizEmailProd::new(
state,
data.clone(),
theme.as_ref().map(|theme| theme.theme_id.clone()),
theme
.map(|theme| theme.email_config())
.unwrap_or(state.conf.theme.email_config.clone()),
)?;
let send_email_result = state
.email_client
.compose_and_send_email(
user_utils::get_base_url(state),
Box::new(email_contents),
state.conf.proxy.https_url.as_ref(),
)
.await;
logger::info!(prod_intent_email=?send_email_result);
}
}
// Hubspot integration
let hubspot_body = state
.crm_client
.make_body(CrmPayload {
legal_business_name: data.legal_business_name.map(|s| s.into_inner()),
business_label: data.business_label.map(|s| s.into_inner()),
business_location: data.business_location,
display_name: data.display_name.map(|s| s.into_inner()),
poc_email: data.poc_email.map(|s| Secret::new(s.peek().clone())),
business_type: data.business_type.map(|s| s.into_inner()),
business_identifier: data.business_identifier.map(|s| s.into_inner()),
business_website: data.business_website.map(|s| s.into_inner()),
poc_name: data
.poc_name
.map(|s| Secret::new(s.peek().clone().into_inner())),
poc_contact: data
.poc_contact
.map(|s| Secret::new(s.peek().clone().into_inner())),
comments: data.comments.map(|s| s.into_inner()),
is_completed: data.is_completed,
business_country_name: data.business_country_name.map(|s| s.into_inner()),
})
.await;
let base_url = user_utils::get_base_url(state);
let hubspot_request = state
.crm_client
.make_request(hubspot_body, base_url.to_string())
.await;
let _ = state
.crm_client
.send_request(&state.conf.proxy, hubspot_request)
.await
.inspect_err(|err| {
logger::error!(
"An error occurred while sending data to hubspot for user_id {}: {:?}",
user.user_id,
err
);
});
metadata
}
types::MetaData::SPTestPayment(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::DownloadWoocom(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ConfigureWoocom(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::SetupWoocomWebhook(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::IsMultipleConfiguration(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::IsChangePasswordRequired(data) => {
utils::insert_user_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::OnboardingSurvey(data) => {
utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await
}
types::MetaData::ReconStatus(data) => {
let mut metadata = utils::insert_merchant_scoped_metadata_to_db(
state,
user.user_id.clone(),
user.merchant_id.clone(),
user.org_id.clone(),
metadata_key,
data.clone(),
)
.await;
if utils::is_update_required(&metadata) {
metadata = utils::update_merchant_scoped_metadata(
state,
user.user_id,
user.merchant_id,
user.org_id,
metadata_key,
data,
)
.await;
}
metadata
}
}
}
async fn fetch_metadata(
state: &SessionState,
user: &UserFromToken,
metadata_keys: Vec<DBEnum>,
) -> UserResult<Vec<DashboardMetadata>> {
let mut dashboard_metadata = Vec::with_capacity(metadata_keys.len());
let (merchant_scoped_enums, user_scoped_enums) =
utils::separate_metadata_type_based_on_scope(metadata_keys);
if !merchant_scoped_enums.is_empty() {
let mut res = utils::get_merchant_scoped_metadata_from_db(
state,
user.merchant_id.to_owned(),
user.org_id.to_owned(),
merchant_scoped_enums,
)
.await?;
dashboard_metadata.append(&mut res);
}
if !user_scoped_enums.is_empty() {
let mut res = utils::get_user_scoped_metadata_from_db(
state,
user.user_id.to_owned(),
user.merchant_id.to_owned(),
user.org_id.to_owned(),
user_scoped_enums,
)
.await?;
dashboard_metadata.append(&mut res);
}
Ok(dashboard_metadata)
}
pub async fn backfill_metadata(
state: &SessionState,
user: &UserFromToken,
key: &DBEnum,
) -> UserResult<Option<DashboardMetadata>> {
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
&state.into(),
&user.merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(UserErrors::InternalServerError)?;
match key {
DBEnum::StripeConnected => {
let mca = if let Some(stripe_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
api_models::enums::RoutableConnectors::Stripe
.to_string()
.as_str(),
&key_store,
)
.await?
{
stripe_connected
} else if let Some(stripe_test_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
//TODO: Use Enum with proper feature flag
"stripe_test",
&key_store,
)
.await?
{
stripe_test_connected
} else {
return Ok(None);
};
#[cfg(feature = "v1")]
let processor_name = mca.connector_name.clone();
#[cfg(feature = "v2")]
let processor_name = mca.connector_name.to_string().clone();
Some(
insert_metadata(
state,
user.to_owned(),
DBEnum::StripeConnected,
types::MetaData::StripeConnected(api::ProcessorConnected {
processor_id: mca.get_id(),
processor_name,
}),
)
.await,
)
.transpose()
}
DBEnum::PaypalConnected => {
let mca = if let Some(paypal_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
api_models::enums::RoutableConnectors::Paypal
.to_string()
.as_str(),
&key_store,
)
.await?
{
paypal_connected
} else if let Some(paypal_test_connected) = get_merchant_connector_account_by_name(
state,
&user.merchant_id,
//TODO: Use Enum with proper feature flag
"paypal_test",
&key_store,
)
.await?
{
paypal_test_connected
} else {
return Ok(None);
};
#[cfg(feature = "v1")]
let processor_name = mca.connector_name.clone();
#[cfg(feature = "v2")]
let processor_name = mca.connector_name.to_string().clone();
Some(
insert_metadata(
state,
user.to_owned(),
DBEnum::PaypalConnected,
types::MetaData::PaypalConnected(api::ProcessorConnected {
processor_id: mca.get_id(),
processor_name,
}),
)
.await,
)
.transpose()
}
_ => Ok(None),
}
}
pub async fn get_merchant_connector_account_by_name(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
connector_name: &str,
key_store: &MerchantKeyStore,
) -> UserResult<Option<domain::MerchantConnectorAccount>> {
#[cfg(feature = "v1")]
{
state
.store
.find_merchant_connector_account_by_merchant_id_connector_name(
&state.into(),
merchant_id,
connector_name,
key_store,
)
.await
.map_err(|e| {
e.change_context(UserErrors::InternalServerError)
.attach_printable("DB Error Fetching DashboardMetaData")
})
.map(|data| data.first().cloned())
}
#[cfg(feature = "v2")]
{
let _ = state;
let _ = merchant_id;
let _ = connector_name;
let _ = key_store;
todo!()
}
}
// File: crates/router/src/core/user/sample_data.rs
use api_models::user::sample_data::SampleDataRequest;
use common_utils::errors::ReportSwitchExt;
use diesel_models::{DisputeNew, RefundNew};
use error_stack::ResultExt;
use hyperswitch_domain_models::payments::PaymentIntent;
pub type SampleDataApiResponse<T> = SampleDataResult<ApplicationResponse<T>>;
use crate::{
core::errors::sample_data::{SampleDataError, SampleDataResult},
routes::{app::ReqState, SessionState},
services::{authentication::UserFromToken, ApplicationResponse},
utils,
};
#[cfg(feature = "v1")]
pub async fn generate_sample_data_for_user(
state: SessionState,
user_from_token: UserFromToken,
req: SampleDataRequest,
_req_state: ReqState,
) -> SampleDataApiResponse<()> {
let sample_data = utils::user::sample_data::generate_sample_data(
&state,
req,
&user_from_token.merchant_id,
&user_from_token.org_id,
)
.await?;
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
&(&state).into(),
&user_from_token.merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(SampleDataError::InternalServerError)
.attach_printable("Not able to fetch merchant key store")?; // If not able to fetch merchant key store for any reason, this should be an internal server error
let (payment_intents, payment_attempts, refunds, disputes): (
Vec<PaymentIntent>,
Vec<diesel_models::user::sample_data::PaymentAttemptBatchNew>,
Vec<RefundNew>,
Vec<DisputeNew>,
) = sample_data.into_iter().fold(
(Vec::new(), Vec::new(), Vec::new(), Vec::new()),
|(mut pi, mut pa, mut rf, mut dp), (payment_intent, payment_attempt, refund, dispute)| {
pi.push(payment_intent);
pa.push(payment_attempt);
if let Some(refund) = refund {
rf.push(refund);
}
if let Some(dispute) = dispute {
dp.push(dispute);
}
(pi, pa, rf, dp)
},
);
state
.store
.insert_payment_intents_batch_for_sample_data(&(&state).into(), payment_intents, &key_store)
.await
.switch()?;
state
.store
.insert_payment_attempts_batch_for_sample_data(payment_attempts)
.await
.switch()?;
state
.store
.insert_refunds_batch_for_sample_data(refunds)
.await
.switch()?;
state
.store
.insert_disputes_batch_for_sample_data(disputes)
.await
.switch()?;
Ok(ApplicationResponse::StatusOk)
}
#[cfg(feature = "v1")]
pub async fn delete_sample_data_for_user(
state: SessionState,
user_from_token: UserFromToken,
_req: SampleDataRequest,
_req_state: ReqState,
) -> SampleDataApiResponse<()> {
let merchant_id_del = user_from_token.merchant_id;
let key_manager_state = &(&state).into();
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&merchant_id_del,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(SampleDataError::InternalServerError)
.attach_printable("Not able to fetch merchant key store")?; // If not able to fetch merchant key store for any reason, this should be an internal server error
state
.store
.delete_payment_intents_for_sample_data(key_manager_state, &merchant_id_del, &key_store)
.await
.switch()?;
state
.store
.delete_payment_attempts_for_sample_data(&merchant_id_del)
.await
.switch()?;
state
.store
.delete_refunds_for_sample_data(&merchant_id_del)
.await
.switch()?;
state
.store
.delete_disputes_for_sample_data(&merchant_id_del)
.await
.switch()?;
Ok(ApplicationResponse::StatusOk)
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/user/theme.rs",
"crates/router/src/core/user/dashboard_metadata.rs",
"crates/router/src/core/user/sample_data.rs"
],
"module": "crates/router/src/core/user",
"num_files": 3,
"token_count": 10729
}
|
module_-2318165162841447235
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/blocklist
Files: 2
</path>
<module>
// File: crates/router/src/core/blocklist/transformers.rs
use api_models::{blocklist, enums as api_enums};
use common_utils::{
ext_traits::{Encode, StringExt},
request::RequestContent,
};
use error_stack::ResultExt;
use josekit::jwe;
use masking::{PeekInterface, StrongSecret};
use router_env::{instrument, tracing};
use crate::{
configs::settings,
core::{
errors::{self, CustomResult},
payment_methods::transformers as payment_methods,
},
headers, routes,
services::{api as services, encryption, EncryptionAlgorithm},
types::{storage, transformers::ForeignFrom},
utils::ConnectorResponseExt,
};
const LOCKER_FINGERPRINT_PATH: &str = "/cards/fingerprint";
impl ForeignFrom<storage::Blocklist> for blocklist::AddToBlocklistResponse {
fn foreign_from(from: storage::Blocklist) -> Self {
Self {
fingerprint_id: from.fingerprint_id,
data_kind: from.data_kind,
created_at: from.created_at,
}
}
}
async fn generate_fingerprint_request(
jwekey: &settings::Jwekey,
locker: &settings::Locker,
payload: &blocklist::GenerateFingerprintRequest,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<services::Request, errors::VaultError> {
let payload = payload
.encode_to_vec()
.change_context(errors::VaultError::RequestEncodingFailed)?;
let private_key = jwekey.vault_private_key.peek().as_bytes();
let jws = encryption::jws_sign_payload(&payload, &locker.locker_signing_key_id, private_key)
.await
.change_context(errors::VaultError::RequestEncodingFailed)?;
let jwe_payload = generate_jwe_payload_for_request(jwekey, &jws, locker_choice).await?;
let mut url = match locker_choice {
api_enums::LockerChoice::HyperswitchCardVault => locker.host.to_owned(),
};
url.push_str(LOCKER_FINGERPRINT_PATH);
let mut request = services::Request::new(services::Method::Post, &url);
request.add_header(headers::CONTENT_TYPE, "application/json".into());
request.set_body(RequestContent::Json(Box::new(jwe_payload)));
Ok(request)
}
async fn generate_jwe_payload_for_request(
jwekey: &settings::Jwekey,
jws: &str,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<encryption::JweBody, errors::VaultError> {
let jws_payload: Vec<&str> = jws.split('.').collect();
let generate_jws_body = |payload: Vec<&str>| -> Option<encryption::JwsBody> {
Some(encryption::JwsBody {
header: payload.first()?.to_string(),
payload: payload.get(1)?.to_string(),
signature: payload.get(2)?.to_string(),
})
};
let jws_body =
generate_jws_body(jws_payload).ok_or(errors::VaultError::GenerateFingerprintFailed)?;
let payload = jws_body
.encode_to_vec()
.change_context(errors::VaultError::GenerateFingerprintFailed)?;
let public_key = match locker_choice {
api_enums::LockerChoice::HyperswitchCardVault => {
jwekey.vault_encryption_key.peek().as_bytes()
}
};
let jwe_encrypted =
encryption::encrypt_jwe(&payload, public_key, EncryptionAlgorithm::A256GCM, None)
.await
.change_context(errors::VaultError::SaveCardFailed)
.attach_printable("Error on jwe encrypt")?;
let jwe_payload: Vec<&str> = jwe_encrypted.split('.').collect();
let generate_jwe_body = |payload: Vec<&str>| -> Option<encryption::JweBody> {
Some(encryption::JweBody {
header: payload.first()?.to_string(),
iv: payload.get(2)?.to_string(),
encrypted_payload: payload.get(3)?.to_string(),
tag: payload.get(4)?.to_string(),
encrypted_key: payload.get(1)?.to_string(),
})
};
let jwe_body =
generate_jwe_body(jwe_payload).ok_or(errors::VaultError::GenerateFingerprintFailed)?;
Ok(jwe_body)
}
#[instrument(skip_all)]
pub async fn generate_fingerprint(
state: &routes::SessionState,
card_number: StrongSecret<String>,
hash_key: StrongSecret<String>,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<blocklist::GenerateFingerprintResponsePayload, errors::VaultError> {
let payload = blocklist::GenerateFingerprintRequest {
card: blocklist::Card { card_number },
hash_key,
};
let generate_fingerprint_resp =
call_to_locker_for_fingerprint(state, &payload, locker_choice).await?;
Ok(generate_fingerprint_resp)
}
#[instrument(skip_all)]
async fn call_to_locker_for_fingerprint(
state: &routes::SessionState,
payload: &blocklist::GenerateFingerprintRequest,
locker_choice: api_enums::LockerChoice,
) -> CustomResult<blocklist::GenerateFingerprintResponsePayload, errors::VaultError> {
let locker = &state.conf.locker;
let jwekey = state.conf.jwekey.get_inner();
let request = generate_fingerprint_request(jwekey, locker, payload, locker_choice).await?;
let response = services::call_connector_api(state, request, "call_locker_to_get_fingerprint")
.await
.change_context(errors::VaultError::GenerateFingerprintFailed);
let jwe_body: encryption::JweBody = response
.get_response_inner("JweBody")
.change_context(errors::VaultError::GenerateFingerprintFailed)?;
let decrypted_payload = decrypt_generate_fingerprint_response_payload(
jwekey,
jwe_body,
Some(locker_choice),
locker.decryption_scheme.clone(),
)
.await
.change_context(errors::VaultError::GenerateFingerprintFailed)
.attach_printable("Error getting decrypted fingerprint response payload")?;
let generate_fingerprint_response: blocklist::GenerateFingerprintResponsePayload =
decrypted_payload
.parse_struct("GenerateFingerprintResponse")
.change_context(errors::VaultError::ResponseDeserializationFailed)?;
Ok(generate_fingerprint_response)
}
async fn decrypt_generate_fingerprint_response_payload(
jwekey: &settings::Jwekey,
jwe_body: encryption::JweBody,
locker_choice: Option<api_enums::LockerChoice>,
decryption_scheme: settings::DecryptionScheme,
) -> CustomResult<String, errors::VaultError> {
let target_locker = locker_choice.unwrap_or(api_enums::LockerChoice::HyperswitchCardVault);
let public_key = match target_locker {
api_enums::LockerChoice::HyperswitchCardVault => {
jwekey.vault_encryption_key.peek().as_bytes()
}
};
let private_key = jwekey.vault_private_key.peek().as_bytes();
let jwt = payment_methods::get_dotted_jwe(jwe_body);
let alg = match decryption_scheme {
settings::DecryptionScheme::RsaOaep => jwe::RSA_OAEP,
settings::DecryptionScheme::RsaOaep256 => jwe::RSA_OAEP_256,
};
let jwe_decrypted = encryption::decrypt_jwe(
&jwt,
encryption::KeyIdCheck::SkipKeyIdCheck,
private_key,
alg,
)
.await
.change_context(errors::VaultError::SaveCardFailed)
.attach_printable("Jwe Decryption failed for JweBody for vault")?;
let jws = jwe_decrypted
.parse_struct("JwsBody")
.change_context(errors::VaultError::ResponseDeserializationFailed)?;
let jws_body = payment_methods::get_dotted_jws(jws);
encryption::verify_sign(jws_body, public_key)
.change_context(errors::VaultError::SaveCardFailed)
.attach_printable("Jws Decryption failed for JwsBody for vault")
}
// File: crates/router/src/core/blocklist/utils.rs
use api_models::blocklist as api_blocklist;
use common_enums::MerchantDecision;
use common_utils::errors::CustomResult;
use diesel_models::configs;
use error_stack::ResultExt;
use masking::StrongSecret;
use super::{errors, transformers::generate_fingerprint, SessionState};
use crate::{
consts,
core::{
errors::{RouterResult, StorageErrorExt},
payments::PaymentData,
},
logger,
types::{domain, storage, transformers::ForeignInto},
utils,
};
pub async fn delete_entry_from_blocklist(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
request: api_blocklist::DeleteFromBlocklistRequest,
) -> RouterResult<api_blocklist::DeleteFromBlocklistResponse> {
let blocklist_entry = match request {
api_blocklist::DeleteFromBlocklistRequest::CardBin(bin) => {
delete_card_bin_blocklist_entry(state, &bin, merchant_id).await?
}
api_blocklist::DeleteFromBlocklistRequest::ExtendedCardBin(xbin) => {
delete_card_bin_blocklist_entry(state, &xbin, merchant_id).await?
}
api_blocklist::DeleteFromBlocklistRequest::Fingerprint(fingerprint_id) => state
.store
.delete_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, &fingerprint_id)
.await
.to_not_found_response(errors::ApiErrorResponse::GenericNotFoundError {
message: "no blocklist record for the given fingerprint id was found".to_string(),
})?,
};
Ok(blocklist_entry.foreign_into())
}
pub async fn toggle_blocklist_guard_for_merchant(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
query: api_blocklist::ToggleBlocklistQuery,
) -> CustomResult<api_blocklist::ToggleBlocklistResponse, errors::ApiErrorResponse> {
let key = merchant_id.get_blocklist_guard_key();
let maybe_guard = state.store.find_config_by_key(&key).await;
let new_config = configs::ConfigNew {
key: key.clone(),
config: query.status.to_string(),
};
match maybe_guard {
Ok(_config) => {
let updated_config = configs::ConfigUpdate::Update {
config: Some(query.status.to_string()),
};
state
.store
.update_config_by_key(&key, updated_config)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error enabling the blocklist guard")?;
}
Err(e) if e.current_context().is_db_not_found() => {
state
.store
.insert_config(new_config)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error enabling the blocklist guard")?;
}
Err(error) => {
logger::error!(?error);
Err(error)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error enabling the blocklist guard")?;
}
};
let guard_status = if query.status { "enabled" } else { "disabled" };
Ok(api_blocklist::ToggleBlocklistResponse {
blocklist_guard_status: guard_status.to_string(),
})
}
pub async fn list_blocklist_entries_for_merchant(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
query: api_blocklist::ListBlocklistQuery,
) -> RouterResult<Vec<api_blocklist::BlocklistResponse>> {
state
.store
.list_blocklist_entries_by_merchant_id_data_kind(
merchant_id,
query.data_kind,
query.limit.into(),
query.offset.into(),
)
.await
.to_not_found_response(errors::ApiErrorResponse::GenericNotFoundError {
message: "no blocklist records found".to_string(),
})
.map(|v| v.into_iter().map(ForeignInto::foreign_into).collect())
}
fn validate_card_bin(bin: &str) -> RouterResult<()> {
if bin.len() == 6 && bin.chars().all(|c| c.is_ascii_digit()) {
Ok(())
} else {
Err(errors::ApiErrorResponse::InvalidDataFormat {
field_name: "data".to_string(),
expected_format: "a 6 digit number".to_string(),
}
.into())
}
}
fn validate_extended_card_bin(bin: &str) -> RouterResult<()> {
if bin.len() == 8 && bin.chars().all(|c| c.is_ascii_digit()) {
Ok(())
} else {
Err(errors::ApiErrorResponse::InvalidDataFormat {
field_name: "data".to_string(),
expected_format: "an 8 digit number".to_string(),
}
.into())
}
}
pub async fn insert_entry_into_blocklist(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
to_block: api_blocklist::AddToBlocklistRequest,
) -> RouterResult<api_blocklist::AddToBlocklistResponse> {
let blocklist_entry = match &to_block {
api_blocklist::AddToBlocklistRequest::CardBin(bin) => {
validate_card_bin(bin)?;
duplicate_check_insert_bin(
bin,
state,
merchant_id,
common_enums::BlocklistDataKind::CardBin,
)
.await?
}
api_blocklist::AddToBlocklistRequest::ExtendedCardBin(bin) => {
validate_extended_card_bin(bin)?;
duplicate_check_insert_bin(
bin,
state,
merchant_id,
common_enums::BlocklistDataKind::ExtendedCardBin,
)
.await?
}
api_blocklist::AddToBlocklistRequest::Fingerprint(fingerprint_id) => {
let blocklist_entry_result = state
.store
.find_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, fingerprint_id)
.await;
match blocklist_entry_result {
Ok(_) => {
return Err(errors::ApiErrorResponse::PreconditionFailed {
message: "data associated with the given fingerprint is already blocked"
.to_string(),
}
.into());
}
// if it is a db not found error, we can proceed as normal
Err(inner) if inner.current_context().is_db_not_found() => {}
err @ Err(_) => {
err.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("error fetching blocklist entry from table")?;
}
}
state
.store
.insert_blocklist_entry(storage::BlocklistNew {
merchant_id: merchant_id.to_owned(),
fingerprint_id: fingerprint_id.clone(),
data_kind: api_models::enums::enums::BlocklistDataKind::PaymentMethod,
metadata: None,
created_at: common_utils::date_time::now(),
})
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("failed to add fingerprint to blocklist")?
}
};
Ok(blocklist_entry.foreign_into())
}
pub async fn get_merchant_fingerprint_secret(
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
) -> RouterResult<String> {
let key = merchant_id.get_merchant_fingerprint_secret_key();
let config_fetch_result = state.store.find_config_by_key(&key).await;
match config_fetch_result {
Ok(config) => Ok(config.config),
Err(e) if e.current_context().is_db_not_found() => {
let new_fingerprint_secret =
utils::generate_id(consts::FINGERPRINT_SECRET_LENGTH, "fs");
let new_config = storage::ConfigNew {
key,
config: new_fingerprint_secret.clone(),
};
state
.store
.insert_config(new_config)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("unable to create new fingerprint secret for merchant")?;
Ok(new_fingerprint_secret)
}
Err(e) => Err(e)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("error fetching merchant fingerprint secret"),
}
}
async fn duplicate_check_insert_bin(
bin: &str,
state: &SessionState,
merchant_id: &common_utils::id_type::MerchantId,
data_kind: common_enums::BlocklistDataKind,
) -> RouterResult<storage::Blocklist> {
let blocklist_entry_result = state
.store
.find_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, bin)
.await;
match blocklist_entry_result {
Ok(_) => {
return Err(errors::ApiErrorResponse::PreconditionFailed {
message: "provided bin is already blocked".to_string(),
}
.into());
}
Err(e) if e.current_context().is_db_not_found() => {}
err @ Err(_) => {
return err
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("unable to fetch blocklist entry");
}
}
state
.store
.insert_blocklist_entry(storage::BlocklistNew {
merchant_id: merchant_id.to_owned(),
fingerprint_id: bin.to_string(),
data_kind,
metadata: None,
created_at: common_utils::date_time::now(),
})
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("error inserting pm blocklist item")
}
async fn delete_card_bin_blocklist_entry(
state: &SessionState,
bin: &str,
merchant_id: &common_utils::id_type::MerchantId,
) -> RouterResult<storage::Blocklist> {
state
.store
.delete_blocklist_entry_by_merchant_id_fingerprint_id(merchant_id, bin)
.await
.to_not_found_response(errors::ApiErrorResponse::GenericNotFoundError {
message: "could not find a blocklist entry for the given bin".to_string(),
})
}
pub async fn should_payment_be_blocked(
state: &SessionState,
merchant_context: &domain::MerchantContext,
payment_method_data: &Option<domain::PaymentMethodData>,
) -> CustomResult<bool, errors::ApiErrorResponse> {
let db = &state.store;
let merchant_id = merchant_context.get_merchant_account().get_id();
let merchant_fingerprint_secret = get_merchant_fingerprint_secret(state, merchant_id).await?;
// Hashed Fingerprint to check whether or not this payment should be blocked.
let card_number_fingerprint =
if let Some(domain::PaymentMethodData::Card(card)) = payment_method_data {
generate_fingerprint(
state,
StrongSecret::new(card.card_number.get_card_no()),
StrongSecret::new(merchant_fingerprint_secret.clone()),
api_models::enums::LockerChoice::HyperswitchCardVault,
)
.await
.attach_printable("error in pm fingerprint creation")
.map_or_else(
|error| {
logger::error!(?error);
None
},
Some,
)
.map(|payload| payload.card_fingerprint)
} else {
None
};
// Hashed Cardbin to check whether or not this payment should be blocked.
let card_bin_fingerprint = payment_method_data
.as_ref()
.and_then(|pm_data| match pm_data {
domain::PaymentMethodData::Card(card) => Some(card.card_number.get_card_isin()),
_ => None,
});
// Hashed Extended Cardbin to check whether or not this payment should be blocked.
let extended_card_bin_fingerprint =
payment_method_data
.as_ref()
.and_then(|pm_data| match pm_data {
domain::PaymentMethodData::Card(card) => {
Some(card.card_number.get_extended_card_bin())
}
_ => None,
});
//validating the payment method.
let mut blocklist_futures = Vec::new();
if let Some(card_number_fingerprint) = card_number_fingerprint.as_ref() {
blocklist_futures.push(db.find_blocklist_entry_by_merchant_id_fingerprint_id(
merchant_id,
card_number_fingerprint,
));
}
if let Some(card_bin_fingerprint) = card_bin_fingerprint.as_ref() {
blocklist_futures.push(
db.find_blocklist_entry_by_merchant_id_fingerprint_id(
merchant_id,
card_bin_fingerprint,
),
);
}
if let Some(extended_card_bin_fingerprint) = extended_card_bin_fingerprint.as_ref() {
blocklist_futures.push(db.find_blocklist_entry_by_merchant_id_fingerprint_id(
merchant_id,
extended_card_bin_fingerprint,
));
}
let blocklist_lookups = futures::future::join_all(blocklist_futures).await;
let mut should_payment_be_blocked = false;
for lookup in blocklist_lookups {
match lookup {
Ok(_) => {
should_payment_be_blocked = true;
}
Err(e) => {
logger::error!(blocklist_db_error=?e, "failed db operations for blocklist");
}
}
}
Ok(should_payment_be_blocked)
}
pub async fn validate_data_for_blocklist<F>(
state: &SessionState,
merchant_context: &domain::MerchantContext,
payment_data: &mut PaymentData<F>,
) -> CustomResult<bool, errors::ApiErrorResponse>
where
F: Send + Clone,
{
let db = &state.store;
let should_payment_be_blocked =
should_payment_be_blocked(state, merchant_context, &payment_data.payment_method_data)
.await?;
if should_payment_be_blocked {
// Update db for attempt and intent status.
db.update_payment_intent(
&state.into(),
payment_data.payment_intent.clone(),
storage::PaymentIntentUpdate::RejectUpdate {
status: common_enums::IntentStatus::Failed,
merchant_decision: Some(MerchantDecision::Rejected.to_string()),
updated_by: merchant_context
.get_merchant_account()
.storage_scheme
.to_string(),
},
merchant_context.get_merchant_key_store(),
merchant_context.get_merchant_account().storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)
.attach_printable(
"Failed to update status in Payment Intent to failed due to it being blocklisted",
)?;
// If payment is blocked not showing connector details
let attempt_update = storage::PaymentAttemptUpdate::BlocklistUpdate {
status: common_enums::AttemptStatus::Failure,
error_code: Some(Some("HE-03".to_string())),
error_message: Some(Some("This payment method is blocked".to_string())),
updated_by: merchant_context
.get_merchant_account()
.storage_scheme
.to_string(),
};
db.update_payment_attempt_with_attempt_id(
payment_data.payment_attempt.clone(),
attempt_update,
merchant_context.get_merchant_account().storage_scheme,
)
.await
.to_not_found_response(errors::ApiErrorResponse::PaymentNotFound)
.attach_printable(
"Failed to update status in Payment Attempt to failed, due to it being blocklisted",
)?;
Err(errors::ApiErrorResponse::PaymentBlockedError {
code: 200,
message: "This payment method is blocked".to_string(),
status: "Failed".to_string(),
reason: "Blocked".to_string(),
}
.into())
} else {
payment_data.payment_attempt.fingerprint_id = generate_payment_fingerprint(
state,
payment_data.payment_attempt.merchant_id.clone(),
payment_data.payment_method_data.clone(),
)
.await?;
Ok(false)
}
}
pub async fn generate_payment_fingerprint(
state: &SessionState,
merchant_id: common_utils::id_type::MerchantId,
payment_method_data: Option<domain::PaymentMethodData>,
) -> CustomResult<Option<String>, errors::ApiErrorResponse> {
let merchant_fingerprint_secret = get_merchant_fingerprint_secret(state, &merchant_id).await?;
Ok(
if let Some(domain::PaymentMethodData::Card(card)) = payment_method_data.as_ref() {
generate_fingerprint(
state,
StrongSecret::new(card.card_number.get_card_no()),
StrongSecret::new(merchant_fingerprint_secret),
api_models::enums::LockerChoice::HyperswitchCardVault,
)
.await
.attach_printable("error in pm fingerprint creation")
.map_or_else(
|error| {
logger::error!(?error);
None
},
Some,
)
.map(|payload| payload.card_fingerprint)
} else {
logger::error!("failed to retrieve card fingerprint");
None
},
)
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/blocklist/transformers.rs",
"crates/router/src/core/blocklist/utils.rs"
],
"module": "crates/router/src/core/blocklist",
"num_files": 2,
"token_count": 5370
}
|
module_-5386856329561781717
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/unified_authentication_service
Files: 2
</path>
<module>
// File: crates/router/src/core/unified_authentication_service/types.rs
use api_models::payments;
use hyperswitch_domain_models::{
errors::api_error_response::{self as errors, NotImplementedMessage},
router_request_types::{
authentication::MessageCategory,
unified_authentication_service::{
UasAuthenticationRequestData, UasPostAuthenticationRequestData,
UasPreAuthenticationRequestData,
},
BrowserInformation,
},
};
use crate::{
core::{errors::RouterResult, payments::helpers::MerchantConnectorAccountType},
db::domain,
routes::SessionState,
};
pub const CTP_MASTERCARD: &str = "ctp_mastercard";
pub const UNIFIED_AUTHENTICATION_SERVICE: &str = "unified_authentication_service";
pub const IRRELEVANT_ATTEMPT_ID_IN_AUTHENTICATION_FLOW: &str =
"irrelevant_attempt_id_in_AUTHENTICATION_flow";
pub const IRRELEVANT_CONNECTOR_REQUEST_REFERENCE_ID_IN_AUTHENTICATION_FLOW: &str =
"irrelevant_connector_request_reference_id_in_AUTHENTICATION_flow";
pub struct ClickToPay;
pub struct ExternalAuthentication;
#[async_trait::async_trait]
pub trait UnifiedAuthenticationService {
#[allow(clippy::too_many_arguments)]
fn get_pre_authentication_request_data(
_payment_method_data: Option<&domain::PaymentMethodData>,
_service_details: Option<payments::CtpServiceDetails>,
_amount: common_utils::types::MinorUnit,
_currency: Option<common_enums::Currency>,
_merchant_details: Option<&hyperswitch_domain_models::router_request_types::unified_authentication_service::MerchantDetails>,
_billing_address: Option<&hyperswitch_domain_models::address::Address>,
_acquirer_bin: Option<String>,
_acquirer_merchant_id: Option<String>,
_payment_method_type: Option<common_enums::PaymentMethodType>,
) -> RouterResult<UasPreAuthenticationRequestData> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason(
"get_pre_authentication_request_data".to_string(),
),
}
.into())
}
#[allow(clippy::too_many_arguments)]
async fn pre_authentication(
_state: &SessionState,
_merchant_id: &common_utils::id_type::MerchantId,
_payment_id: Option<&common_utils::id_type::PaymentId>,
_payment_method_data: Option<&domain::PaymentMethodData>,
_payment_method_type: Option<common_enums::PaymentMethodType>,
_merchant_connector_account: &MerchantConnectorAccountType,
_connector_name: &str,
_authentication_id: &common_utils::id_type::AuthenticationId,
_payment_method: common_enums::PaymentMethod,
_amount: common_utils::types::MinorUnit,
_currency: Option<common_enums::Currency>,
_service_details: Option<payments::CtpServiceDetails>,
_merchant_details: Option<&hyperswitch_domain_models::router_request_types::unified_authentication_service::MerchantDetails>,
_billing_address: Option<&hyperswitch_domain_models::address::Address>,
_acquirer_bin: Option<String>,
_acquirer_merchant_id: Option<String>,
) -> RouterResult<hyperswitch_domain_models::types::UasPreAuthenticationRouterData> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason("pre_authentication".to_string()),
}
.into())
}
#[allow(clippy::too_many_arguments)]
fn get_authentication_request_data(
_browser_details: Option<BrowserInformation>,
_amount: Option<common_utils::types::MinorUnit>,
_currency: Option<common_enums::Currency>,
_message_category: MessageCategory,
_device_channel: payments::DeviceChannel,
_authentication: diesel_models::authentication::Authentication,
_return_url: Option<String>,
_sdk_information: Option<payments::SdkInformation>,
_threeds_method_comp_ind: payments::ThreeDsCompletionIndicator,
_email: Option<common_utils::pii::Email>,
_webhook_url: String,
) -> RouterResult<UasAuthenticationRequestData> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason(
"get_pre_authentication_request_data".to_string(),
),
}
.into())
}
#[allow(clippy::too_many_arguments)]
async fn authentication(
_state: &SessionState,
_business_profile: &domain::Profile,
_payment_method: &common_enums::PaymentMethod,
_browser_details: Option<BrowserInformation>,
_amount: Option<common_utils::types::MinorUnit>,
_currency: Option<common_enums::Currency>,
_message_category: MessageCategory,
_device_channel: payments::DeviceChannel,
_authentication_data: diesel_models::authentication::Authentication,
_return_url: Option<String>,
_sdk_information: Option<payments::SdkInformation>,
_threeds_method_comp_ind: payments::ThreeDsCompletionIndicator,
_email: Option<common_utils::pii::Email>,
_webhook_url: String,
_merchant_connector_account: &MerchantConnectorAccountType,
_connector_name: &str,
_payment_id: Option<common_utils::id_type::PaymentId>,
) -> RouterResult<hyperswitch_domain_models::types::UasAuthenticationRouterData> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason("authentication".to_string()),
}
.into())
}
fn get_post_authentication_request_data(
_authentication: Option<diesel_models::authentication::Authentication>,
) -> RouterResult<UasPostAuthenticationRequestData> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason("post_authentication".to_string()),
}
.into())
}
#[allow(clippy::too_many_arguments)]
async fn post_authentication(
_state: &SessionState,
_business_profile: &domain::Profile,
_payment_id: Option<&common_utils::id_type::PaymentId>,
_merchant_connector_account: &MerchantConnectorAccountType,
_connector_name: &str,
_authentication_id: &common_utils::id_type::AuthenticationId,
_payment_method: common_enums::PaymentMethod,
_merchant_id: &common_utils::id_type::MerchantId,
_authentication: Option<&diesel_models::authentication::Authentication>,
) -> RouterResult<hyperswitch_domain_models::types::UasPostAuthenticationRouterData> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason("post_authentication".to_string()),
}
.into())
}
#[allow(clippy::too_many_arguments)]
async fn confirmation(
_state: &SessionState,
_authentication_id: Option<&common_utils::id_type::AuthenticationId>,
_currency: Option<common_enums::Currency>,
_status: common_enums::AttemptStatus,
_service_details: Option<payments::CtpServiceDetails>,
_merchant_connector_account: &MerchantConnectorAccountType,
_connector_name: &str,
_payment_method: common_enums::PaymentMethod,
_net_amount: common_utils::types::MinorUnit,
_payment_id: Option<&common_utils::id_type::PaymentId>,
_merchant_id: &common_utils::id_type::MerchantId,
) -> RouterResult<()> {
Err(errors::ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason("confirmation".to_string()),
}
.into())
}
}
// File: crates/router/src/core/unified_authentication_service/utils.rs
use std::marker::PhantomData;
use common_enums::enums::PaymentMethod;
use common_utils::ext_traits::{AsyncExt, ValueExt};
use error_stack::{report, ResultExt};
use hyperswitch_domain_models::{
errors::api_error_response::ApiErrorResponse,
ext_traits::OptionExt,
payment_address::PaymentAddress,
router_data::{ConnectorAuthType, ErrorResponse, RouterData},
router_data_v2::UasFlowData,
router_request_types::unified_authentication_service::UasAuthenticationResponseData,
};
use masking::ExposeInterface;
use super::types::{
IRRELEVANT_ATTEMPT_ID_IN_AUTHENTICATION_FLOW,
IRRELEVANT_CONNECTOR_REQUEST_REFERENCE_ID_IN_AUTHENTICATION_FLOW,
};
use crate::{
consts::DEFAULT_SESSION_EXPIRY,
core::{
errors::{utils::ConnectorErrorExt, RouterResult},
payments,
},
services::{self, execute_connector_processing_step},
types::{api, transformers::ForeignFrom},
SessionState,
};
pub async fn do_auth_connector_call<F, Req, Res>(
state: &SessionState,
authentication_connector_name: String,
router_data: RouterData<F, Req, Res>,
) -> RouterResult<RouterData<F, Req, Res>>
where
Req: std::fmt::Debug + Clone + 'static,
Res: std::fmt::Debug + Clone + 'static,
F: std::fmt::Debug + Clone + 'static,
dyn api::Connector + Sync: services::api::ConnectorIntegration<F, Req, Res>,
dyn api::ConnectorV2 + Sync: services::api::ConnectorIntegrationV2<F, UasFlowData, Req, Res>,
{
let connector_data =
api::AuthenticationConnectorData::get_connector_by_name(&authentication_connector_name)?;
let connector_integration: services::BoxedUnifiedAuthenticationServiceInterface<F, Req, Res> =
connector_data.connector.get_connector_integration();
let router_data = execute_connector_processing_step(
state,
connector_integration,
&router_data,
payments::CallConnectorAction::Trigger,
None,
None,
)
.await
.to_payment_failed_response()?;
Ok(router_data)
}
#[allow(clippy::too_many_arguments)]
pub fn construct_uas_router_data<F: Clone, Req, Res>(
state: &SessionState,
authentication_connector_name: String,
payment_method: PaymentMethod,
merchant_id: common_utils::id_type::MerchantId,
address: Option<PaymentAddress>,
request_data: Req,
merchant_connector_account: &payments::helpers::MerchantConnectorAccountType,
authentication_id: Option<common_utils::id_type::AuthenticationId>,
payment_id: Option<common_utils::id_type::PaymentId>,
) -> RouterResult<RouterData<F, Req, Res>> {
let auth_type: ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(ApiErrorResponse::InternalServerError)
.attach_printable("Error while parsing ConnectorAuthType")?;
Ok(RouterData {
flow: PhantomData,
merchant_id,
customer_id: None,
connector_customer: None,
connector: authentication_connector_name,
payment_id: payment_id
.map(|id| id.get_string_repr().to_owned())
.unwrap_or_default(),
tenant_id: state.tenant.tenant_id.clone(),
attempt_id: IRRELEVANT_ATTEMPT_ID_IN_AUTHENTICATION_FLOW.to_owned(),
status: common_enums::AttemptStatus::default(),
payment_method,
payment_method_type: None,
connector_auth_type: auth_type,
description: None,
address: address.unwrap_or_default(),
auth_type: common_enums::AuthenticationType::default(),
connector_meta_data: merchant_connector_account.get_metadata().clone(),
connector_wallets_details: merchant_connector_account.get_connector_wallets_details(),
amount_captured: None,
minor_amount_captured: None,
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
recurring_mandate_payment_data: None,
preprocessing_id: None,
payment_method_balance: None,
connector_api_version: None,
request: request_data,
response: Err(ErrorResponse::default()),
connector_request_reference_id:
IRRELEVANT_CONNECTOR_REQUEST_REFERENCE_ID_IN_AUTHENTICATION_FLOW.to_owned(),
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
test_mode: None,
connector_http_status_code: None,
external_latency: None,
apple_pay_flow: None,
frm_metadata: None,
dispute_id: None,
refund_id: None,
payment_method_status: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload: None,
connector_mandate_request_reference_id: None,
authentication_id,
psd2_sca_exemption_type: None,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
})
}
#[allow(clippy::too_many_arguments)]
pub async fn external_authentication_update_trackers<F: Clone, Req>(
state: &SessionState,
router_data: RouterData<F, Req, UasAuthenticationResponseData>,
authentication: diesel_models::authentication::Authentication,
acquirer_details: Option<
hyperswitch_domain_models::router_request_types::authentication::AcquirerDetails,
>,
merchant_key_store: &hyperswitch_domain_models::merchant_key_store::MerchantKeyStore,
billing_address: Option<common_utils::encryption::Encryption>,
shipping_address: Option<common_utils::encryption::Encryption>,
email: Option<common_utils::encryption::Encryption>,
browser_info: Option<serde_json::Value>,
) -> RouterResult<diesel_models::authentication::Authentication> {
let authentication_update = match router_data.response {
Ok(response) => match response {
UasAuthenticationResponseData::PreAuthentication {
authentication_details,
} => Ok(
diesel_models::authentication::AuthenticationUpdate::PreAuthenticationUpdate {
threeds_server_transaction_id: authentication_details
.threeds_server_transaction_id
.ok_or(ApiErrorResponse::InternalServerError)
.attach_printable(
"missing threeds_server_transaction_id in PreAuthentication Details",
)?,
maximum_supported_3ds_version: authentication_details
.maximum_supported_3ds_version
.ok_or(ApiErrorResponse::InternalServerError)
.attach_printable(
"missing maximum_supported_3ds_version in PreAuthentication Details",
)?,
connector_authentication_id: authentication_details
.connector_authentication_id
.ok_or(ApiErrorResponse::InternalServerError)
.attach_printable(
"missing connector_authentication_id in PreAuthentication Details",
)?,
three_ds_method_data: authentication_details.three_ds_method_data,
three_ds_method_url: authentication_details.three_ds_method_url,
message_version: authentication_details
.message_version
.ok_or(ApiErrorResponse::InternalServerError)
.attach_printable("missing message_version in PreAuthentication Details")?,
connector_metadata: authentication_details.connector_metadata,
authentication_status: common_enums::AuthenticationStatus::Pending,
acquirer_bin: acquirer_details
.as_ref()
.map(|acquirer_details| acquirer_details.acquirer_bin.clone()),
acquirer_merchant_id: acquirer_details
.as_ref()
.map(|acquirer_details| acquirer_details.acquirer_merchant_id.clone()),
acquirer_country_code: acquirer_details
.and_then(|acquirer_details| acquirer_details.acquirer_country_code),
directory_server_id: authentication_details.directory_server_id,
browser_info: Box::new(browser_info),
email,
billing_address,
shipping_address,
},
),
UasAuthenticationResponseData::Authentication {
authentication_details,
} => {
let authentication_status = common_enums::AuthenticationStatus::foreign_from(
authentication_details.trans_status.clone(),
);
authentication_details
.authentication_value
.async_map(|auth_val| {
crate::core::payment_methods::vault::create_tokenize(
state,
auth_val.expose(),
None,
authentication
.authentication_id
.get_string_repr()
.to_string(),
merchant_key_store.key.get_inner(),
)
})
.await
.transpose()?;
Ok(
diesel_models::authentication::AuthenticationUpdate::AuthenticationUpdate {
trans_status: authentication_details.trans_status,
acs_url: authentication_details.authn_flow_type.get_acs_url(),
challenge_request: authentication_details
.authn_flow_type
.get_challenge_request(),
challenge_request_key: authentication_details
.authn_flow_type
.get_challenge_request_key(),
acs_reference_number: authentication_details
.authn_flow_type
.get_acs_reference_number(),
acs_trans_id: authentication_details.authn_flow_type.get_acs_trans_id(),
acs_signed_content: authentication_details
.authn_flow_type
.get_acs_signed_content(),
authentication_type: authentication_details
.authn_flow_type
.get_decoupled_authentication_type(),
authentication_status,
connector_metadata: authentication_details.connector_metadata,
ds_trans_id: authentication_details.ds_trans_id,
eci: authentication_details.eci,
challenge_code: authentication_details.challenge_code,
challenge_cancel: authentication_details.challenge_cancel,
challenge_code_reason: authentication_details.challenge_code_reason,
message_extension: authentication_details.message_extension,
},
)
}
UasAuthenticationResponseData::PostAuthentication {
authentication_details,
} => {
let trans_status = authentication_details
.trans_status
.ok_or(ApiErrorResponse::InternalServerError)
.attach_printable("missing trans_status in PostAuthentication Details")?;
authentication_details
.dynamic_data_details
.and_then(|details| details.dynamic_data_value)
.map(ExposeInterface::expose)
.async_map(|auth_val| {
crate::core::payment_methods::vault::create_tokenize(
state,
auth_val,
None,
authentication
.authentication_id
.get_string_repr()
.to_string(),
merchant_key_store.key.get_inner(),
)
})
.await
.transpose()?;
Ok(
diesel_models::authentication::AuthenticationUpdate::PostAuthenticationUpdate {
authentication_status: common_enums::AuthenticationStatus::foreign_from(
trans_status.clone(),
),
trans_status,
eci: authentication_details.eci,
challenge_cancel: authentication_details.challenge_cancel,
challenge_code_reason: authentication_details.challenge_code_reason,
},
)
}
UasAuthenticationResponseData::Confirmation { .. } => Err(
ApiErrorResponse::InternalServerError,
)
.attach_printable("unexpected api confirmation in external authentication flow."),
},
Err(error) => Ok(
diesel_models::authentication::AuthenticationUpdate::ErrorUpdate {
connector_authentication_id: error.connector_transaction_id,
authentication_status: common_enums::AuthenticationStatus::Failed,
error_message: error
.reason
.map(|reason| format!("message: {}, reason: {}", error.message, reason))
.or(Some(error.message)),
error_code: Some(error.code),
},
),
}?;
state
.store
.update_authentication_by_merchant_id_authentication_id(
authentication,
authentication_update,
)
.await
.change_context(ApiErrorResponse::InternalServerError)
.attach_printable("Error while updating authentication")
}
pub fn get_checkout_event_status_and_reason(
attempt_status: common_enums::AttemptStatus,
) -> (Option<String>, Option<String>) {
match attempt_status {
common_enums::AttemptStatus::Charged | common_enums::AttemptStatus::Authorized => (
Some("01".to_string()),
Some("The payment was successful".to_string()),
),
_ => (
Some("03".to_string()),
Some("The payment was not successful".to_string()),
),
}
}
pub fn authenticate_authentication_client_secret_and_check_expiry(
req_client_secret: &String,
authentication: &diesel_models::authentication::Authentication,
) -> RouterResult<()> {
let stored_client_secret = authentication
.authentication_client_secret
.clone()
.get_required_value("authentication_client_secret")
.change_context(ApiErrorResponse::MissingRequiredField {
field_name: "client_secret",
})
.attach_printable("client secret not found in db")?;
if req_client_secret != &stored_client_secret {
Err(report!(ApiErrorResponse::ClientSecretInvalid))
} else {
let current_timestamp = common_utils::date_time::now();
let session_expiry = authentication
.created_at
.saturating_add(time::Duration::seconds(DEFAULT_SESSION_EXPIRY));
if current_timestamp > session_expiry {
Err(report!(ApiErrorResponse::ClientSecretExpired))
} else {
Ok(())
}
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/unified_authentication_service/types.rs",
"crates/router/src/core/unified_authentication_service/utils.rs"
],
"module": "crates/router/src/core/unified_authentication_service",
"num_files": 2,
"token_count": 4383
}
|
module_523556711915657129
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/unified_connector_service
Files: 1
</path>
<module>
// File: crates/router/src/core/unified_connector_service/transformers.rs
use std::collections::HashMap;
use common_enums::{AttemptStatus, AuthenticationType};
use common_utils::{ext_traits::Encode, request::Method};
use diesel_models::enums as storage_enums;
use error_stack::ResultExt;
use external_services::grpc_client::unified_connector_service::UnifiedConnectorServiceError;
use hyperswitch_domain_models::{
router_data::{ErrorResponse, RouterData},
router_flow_types::{
payments::{Authorize, PSync, SetupMandate},
ExternalVaultProxy,
},
router_request_types::{
AuthenticationData, ExternalVaultProxyPaymentsData, PaymentsAuthorizeData,
PaymentsSyncData, SetupMandateRequestData,
},
router_response_types::{PaymentsResponseData, RedirectForm},
};
pub use hyperswitch_interfaces::{
helpers::ForeignTryFrom,
unified_connector_service::{
transformers::convert_connector_service_status_code, WebhookTransformData,
},
};
use masking::{ExposeInterface, PeekInterface};
use router_env::tracing;
use unified_connector_service_client::payments::{
self as payments_grpc, Identifier, PaymentServiceTransformRequest,
PaymentServiceTransformResponse,
};
use url::Url;
use crate::{
core::{errors, unified_connector_service},
types::transformers,
};
impl transformers::ForeignTryFrom<&RouterData<PSync, PaymentsSyncData, PaymentsResponseData>>
for payments_grpc::PaymentServiceGetRequest
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
router_data: &RouterData<PSync, PaymentsSyncData, PaymentsResponseData>,
) -> Result<Self, Self::Error> {
let connector_transaction_id = router_data
.request
.connector_transaction_id
.get_connector_transaction_id()
.map(|id| Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(id)),
})
.map_err(|e| {
tracing::debug!(
transaction_id_error=?e,
"Failed to extract connector transaction ID for UCS payment sync request"
);
e
})
.ok();
let encoded_data = router_data
.request
.encoded_data
.as_ref()
.map(|data| Identifier {
id_type: Some(payments_grpc::identifier::IdType::EncodedData(
data.to_string(),
)),
});
let connector_ref_id = router_data
.request
.connector_reference_id
.clone()
.map(|id| Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(id)),
});
Ok(Self {
transaction_id: connector_transaction_id.or(encoded_data),
request_ref_id: connector_ref_id,
access_token: None,
capture_method: None,
handle_response: None,
})
}
}
impl
transformers::ForeignTryFrom<
&RouterData<Authorize, PaymentsAuthorizeData, PaymentsResponseData>,
> for payments_grpc::PaymentServiceAuthorizeRequest
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
router_data: &RouterData<Authorize, PaymentsAuthorizeData, PaymentsResponseData>,
) -> Result<Self, Self::Error> {
let currency = payments_grpc::Currency::foreign_try_from(router_data.request.currency)?;
let payment_method = router_data
.request
.payment_method_type
.map(|payment_method_type| {
unified_connector_service::build_unified_connector_service_payment_method(
router_data.request.payment_method_data.clone(),
payment_method_type,
)
})
.transpose()?;
let address = payments_grpc::PaymentAddress::foreign_try_from(router_data.address.clone())?;
let auth_type = payments_grpc::AuthenticationType::foreign_try_from(router_data.auth_type)?;
let browser_info = router_data
.request
.browser_info
.clone()
.map(payments_grpc::BrowserInformation::foreign_try_from)
.transpose()?;
let capture_method = router_data
.request
.capture_method
.map(payments_grpc::CaptureMethod::foreign_try_from)
.transpose()?;
let authentication_data = router_data
.request
.authentication_data
.clone()
.map(payments_grpc::AuthenticationData::foreign_try_from)
.transpose()?;
Ok(Self {
amount: router_data.request.amount,
currency: currency.into(),
payment_method,
return_url: router_data.request.router_return_url.clone(),
address: Some(address),
auth_type: auth_type.into(),
enrolled_for_3ds: router_data.request.enrolled_for_3ds,
request_incremental_authorization: router_data
.request
.request_incremental_authorization,
minor_amount: router_data.request.amount,
email: router_data
.request
.email
.clone()
.map(|e| e.expose().expose().into()),
browser_info,
access_token: None,
session_token: None,
order_tax_amount: router_data
.request
.order_tax_amount
.map(|order_tax_amount| order_tax_amount.get_amount_as_i64()),
customer_name: router_data
.request
.customer_name
.clone()
.map(|customer_name| customer_name.peek().to_owned()),
capture_method: capture_method.map(|capture_method| capture_method.into()),
webhook_url: router_data.request.webhook_url.clone(),
complete_authorize_url: router_data.request.complete_authorize_url.clone(),
setup_future_usage: None,
off_session: None,
customer_acceptance: None,
order_category: router_data.request.order_category.clone(),
payment_experience: None,
authentication_data,
request_extended_authorization: router_data
.request
.request_extended_authorization
.map(|request_extended_authorization| request_extended_authorization.is_true()),
merchant_order_reference_id: router_data.request.merchant_order_reference_id.clone(),
shipping_cost: router_data
.request
.shipping_cost
.map(|shipping_cost| shipping_cost.get_amount_as_i64()),
request_ref_id: Some(Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(
router_data.connector_request_reference_id.clone(),
)),
}),
connector_customer_id: router_data
.request
.customer_id
.as_ref()
.map(|id| id.get_string_repr().to_string()),
metadata: router_data
.request
.metadata
.as_ref()
.and_then(|val| val.as_object())
.map(|map| {
map.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect::<HashMap<String, String>>()
})
.unwrap_or_default(),
test_mode: None,
})
}
}
impl
transformers::ForeignTryFrom<
&RouterData<ExternalVaultProxy, ExternalVaultProxyPaymentsData, PaymentsResponseData>,
> for payments_grpc::PaymentServiceAuthorizeRequest
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
router_data: &RouterData<
ExternalVaultProxy,
ExternalVaultProxyPaymentsData,
PaymentsResponseData,
>,
) -> Result<Self, Self::Error> {
let currency = payments_grpc::Currency::foreign_try_from(router_data.request.currency)?;
let payment_method = router_data
.request
.payment_method_type
.map(|payment_method_type| {
unified_connector_service::build_unified_connector_service_payment_method_for_external_proxy(
router_data.request.payment_method_data.clone(),
payment_method_type,
)
})
.transpose()?;
let address = payments_grpc::PaymentAddress::foreign_try_from(router_data.address.clone())?;
let auth_type = payments_grpc::AuthenticationType::foreign_try_from(router_data.auth_type)?;
let browser_info = router_data
.request
.browser_info
.clone()
.map(payments_grpc::BrowserInformation::foreign_try_from)
.transpose()?;
let capture_method = router_data
.request
.capture_method
.map(payments_grpc::CaptureMethod::foreign_try_from)
.transpose()?;
let authentication_data = router_data
.request
.authentication_data
.clone()
.map(payments_grpc::AuthenticationData::foreign_try_from)
.transpose()?;
Ok(Self {
amount: router_data.request.amount,
currency: currency.into(),
payment_method,
return_url: router_data.request.router_return_url.clone(),
address: Some(address),
auth_type: auth_type.into(),
enrolled_for_3ds: router_data.request.enrolled_for_3ds,
request_incremental_authorization: router_data
.request
.request_incremental_authorization,
minor_amount: router_data.request.amount,
email: router_data
.request
.email
.clone()
.map(|e| e.expose().expose().into()),
browser_info,
access_token: None,
session_token: None,
order_tax_amount: router_data
.request
.order_tax_amount
.map(|order_tax_amount| order_tax_amount.get_amount_as_i64()),
customer_name: router_data
.request
.customer_name
.clone()
.map(|customer_name| customer_name.peek().to_owned()),
capture_method: capture_method.map(|capture_method| capture_method.into()),
webhook_url: router_data.request.webhook_url.clone(),
complete_authorize_url: router_data.request.complete_authorize_url.clone(),
setup_future_usage: None,
off_session: None,
customer_acceptance: None,
order_category: router_data.request.order_category.clone(),
payment_experience: None,
authentication_data,
request_extended_authorization: router_data
.request
.request_extended_authorization
.map(|request_extended_authorization| request_extended_authorization.is_true()),
merchant_order_reference_id: router_data
.request
.merchant_order_reference_id
.as_ref()
.map(|merchant_order_reference_id| {
merchant_order_reference_id.get_string_repr().to_string()
}),
shipping_cost: router_data
.request
.shipping_cost
.map(|shipping_cost| shipping_cost.get_amount_as_i64()),
request_ref_id: Some(Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(
router_data.connector_request_reference_id.clone(),
)),
}),
connector_customer_id: router_data
.request
.customer_id
.as_ref()
.map(|id| id.get_string_repr().to_string()),
metadata: router_data
.request
.metadata
.as_ref()
.and_then(|val| val.as_object())
.map(|map| {
map.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect::<HashMap<String, String>>()
})
.unwrap_or_default(),
test_mode: None,
})
}
}
impl
transformers::ForeignTryFrom<
&RouterData<SetupMandate, SetupMandateRequestData, PaymentsResponseData>,
> for payments_grpc::PaymentServiceRegisterRequest
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
router_data: &RouterData<SetupMandate, SetupMandateRequestData, PaymentsResponseData>,
) -> Result<Self, Self::Error> {
let currency = payments_grpc::Currency::foreign_try_from(router_data.request.currency)?;
let payment_method = router_data
.request
.payment_method_type
.map(|payment_method_type| {
unified_connector_service::build_unified_connector_service_payment_method(
router_data.request.payment_method_data.clone(),
payment_method_type,
)
})
.transpose()?;
let address = payments_grpc::PaymentAddress::foreign_try_from(router_data.address.clone())?;
let auth_type = payments_grpc::AuthenticationType::foreign_try_from(router_data.auth_type)?;
let browser_info = router_data
.request
.browser_info
.clone()
.map(payments_grpc::BrowserInformation::foreign_try_from)
.transpose()?;
let setup_future_usage = router_data
.request
.setup_future_usage
.map(payments_grpc::FutureUsage::foreign_try_from)
.transpose()?;
let customer_acceptance = router_data
.request
.customer_acceptance
.clone()
.map(payments_grpc::CustomerAcceptance::foreign_try_from)
.transpose()?;
Ok(Self {
request_ref_id: Some(Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(
router_data.connector_request_reference_id.clone(),
)),
}),
currency: currency.into(),
payment_method,
minor_amount: router_data.request.amount,
email: router_data
.request
.email
.clone()
.map(|e| e.expose().expose().into()),
customer_name: router_data
.request
.customer_name
.clone()
.map(|customer_name| customer_name.peek().to_owned()),
connector_customer_id: router_data
.request
.customer_id
.as_ref()
.map(|id| id.get_string_repr().to_string()),
address: Some(address),
auth_type: auth_type.into(),
enrolled_for_3ds: false,
authentication_data: None,
metadata: router_data
.request
.metadata
.as_ref()
.map(|secret| secret.peek())
.and_then(|val| val.as_object()) //secret
.map(|map| {
map.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect::<HashMap<String, String>>()
})
.unwrap_or_default(),
return_url: router_data.request.router_return_url.clone(),
webhook_url: router_data.request.webhook_url.clone(),
complete_authorize_url: router_data.request.complete_authorize_url.clone(),
access_token: None,
session_token: None,
order_tax_amount: None,
order_category: None,
merchant_order_reference_id: None,
shipping_cost: router_data
.request
.shipping_cost
.map(|cost| cost.get_amount_as_i64()),
setup_future_usage: setup_future_usage.map(|s| s.into()),
off_session: router_data.request.off_session,
request_incremental_authorization: router_data
.request
.request_incremental_authorization,
request_extended_authorization: None,
customer_acceptance,
browser_info,
payment_experience: None,
})
}
}
impl
transformers::ForeignTryFrom<
&RouterData<Authorize, PaymentsAuthorizeData, PaymentsResponseData>,
> for payments_grpc::PaymentServiceRepeatEverythingRequest
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
router_data: &RouterData<Authorize, PaymentsAuthorizeData, PaymentsResponseData>,
) -> Result<Self, Self::Error> {
let currency = payments_grpc::Currency::foreign_try_from(router_data.request.currency)?;
let browser_info = router_data
.request
.browser_info
.clone()
.map(payments_grpc::BrowserInformation::foreign_try_from)
.transpose()?;
let capture_method = router_data
.request
.capture_method
.map(payments_grpc::CaptureMethod::foreign_try_from)
.transpose()?;
let mandate_reference = match &router_data.request.mandate_id {
Some(mandate) => match &mandate.mandate_reference_id {
Some(api_models::payments::MandateReferenceId::ConnectorMandateId(
connector_mandate_id,
)) => Some(payments_grpc::MandateReference {
mandate_id: connector_mandate_id.get_connector_mandate_id(),
}),
_ => {
return Err(UnifiedConnectorServiceError::MissingRequiredField {
field_name: "connector_mandate_id",
}
.into())
}
},
None => {
return Err(UnifiedConnectorServiceError::MissingRequiredField {
field_name: "connector_mandate_id",
}
.into())
}
};
Ok(Self {
request_ref_id: Some(Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(
router_data.connector_request_reference_id.clone(),
)),
}),
mandate_reference,
amount: router_data.request.amount,
currency: currency.into(),
minor_amount: router_data.request.amount,
merchant_order_reference_id: router_data.request.merchant_order_reference_id.clone(),
metadata: router_data
.request
.metadata
.as_ref()
.and_then(|val| val.as_object())
.map(|map| {
map.iter()
.filter_map(|(k, v)| v.as_str().map(|s| (k.clone(), s.to_string())))
.collect::<HashMap<String, String>>()
})
.unwrap_or_default(),
webhook_url: router_data.request.webhook_url.clone(),
capture_method: capture_method.map(|capture_method| capture_method.into()),
email: router_data
.request
.email
.clone()
.map(|e| e.expose().expose().into()),
browser_info,
test_mode: None,
payment_method_type: None,
access_token: None,
})
}
}
impl transformers::ForeignTryFrom<payments_grpc::PaymentServiceAuthorizeResponse>
for Result<(PaymentsResponseData, AttemptStatus), ErrorResponse>
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
response: payments_grpc::PaymentServiceAuthorizeResponse,
) -> Result<Self, Self::Error> {
let connector_response_reference_id =
response.response_ref_id.as_ref().and_then(|identifier| {
identifier
.id_type
.clone()
.and_then(|id_type| match id_type {
payments_grpc::identifier::IdType::Id(id) => Some(id),
payments_grpc::identifier::IdType::EncodedData(encoded_data) => {
Some(encoded_data)
}
payments_grpc::identifier::IdType::NoResponseIdMarker(_) => None,
})
});
let resource_id: hyperswitch_domain_models::router_request_types::ResponseId = match response.transaction_id.as_ref().and_then(|id| id.id_type.clone()) {
Some(payments_grpc::identifier::IdType::Id(id)) => hyperswitch_domain_models::router_request_types::ResponseId::ConnectorTransactionId(id),
Some(payments_grpc::identifier::IdType::EncodedData(encoded_data)) => hyperswitch_domain_models::router_request_types::ResponseId::EncodedData(encoded_data),
Some(payments_grpc::identifier::IdType::NoResponseIdMarker(_)) | None => hyperswitch_domain_models::router_request_types::ResponseId::NoResponseId,
};
let (connector_metadata, redirection_data) = match response.redirection_data.clone() {
Some(redirection_data) => match redirection_data.form_type {
Some(ref form_type) => match form_type {
payments_grpc::redirect_form::FormType::Uri(uri) => {
// For UPI intent, store the URI in connector_metadata for SDK UPI intent pattern
let sdk_uri_info = api_models::payments::SdkUpiIntentInformation {
sdk_uri: Url::parse(&uri.uri)
.change_context(UnifiedConnectorServiceError::ParsingFailed)?,
};
(
Some(sdk_uri_info.encode_to_value())
.transpose()
.change_context(UnifiedConnectorServiceError::ParsingFailed)?,
None,
)
}
_ => (
None,
Some(RedirectForm::foreign_try_from(redirection_data)).transpose()?,
),
},
None => (None, None),
},
None => (None, None),
};
let status_code = convert_connector_service_status_code(response.status_code)?;
let response = if response.error_code.is_some() {
let attempt_status = match response.status() {
payments_grpc::PaymentStatus::AttemptStatusUnspecified => None,
_ => Some(AttemptStatus::foreign_try_from(response.status())?),
};
Err(ErrorResponse {
code: response.error_code().to_owned(),
message: response.error_message().to_owned(),
reason: Some(response.error_message().to_owned()),
status_code,
attempt_status,
connector_transaction_id: connector_response_reference_id,
network_decline_code: None,
network_advice_code: None,
network_error_message: None,
connector_metadata: None,
})
} else {
let status = AttemptStatus::foreign_try_from(response.status())?;
Ok((
PaymentsResponseData::TransactionResponse {
resource_id,
redirection_data: Box::new(redirection_data),
mandate_reference: Box::new(None),
connector_metadata,
network_txn_id: response.network_txn_id.clone(),
connector_response_reference_id,
incremental_authorization_allowed: response.incremental_authorization_allowed,
charges: None,
},
status,
))
};
Ok(response)
}
}
impl transformers::ForeignTryFrom<payments_grpc::PaymentServiceRegisterResponse>
for Result<(PaymentsResponseData, AttemptStatus), ErrorResponse>
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
response: payments_grpc::PaymentServiceRegisterResponse,
) -> Result<Self, Self::Error> {
let connector_response_reference_id =
response.response_ref_id.as_ref().and_then(|identifier| {
identifier
.id_type
.clone()
.and_then(|id_type| match id_type {
payments_grpc::identifier::IdType::Id(id) => Some(id),
payments_grpc::identifier::IdType::EncodedData(encoded_data) => {
Some(encoded_data)
}
payments_grpc::identifier::IdType::NoResponseIdMarker(_) => None,
})
});
let status_code = convert_connector_service_status_code(response.status_code)?;
let response = if response.error_code.is_some() {
let attempt_status = match response.status() {
payments_grpc::PaymentStatus::AttemptStatusUnspecified => None,
_ => Some(AttemptStatus::foreign_try_from(response.status())?),
};
Err(ErrorResponse {
code: response.error_code().to_owned(),
message: response.error_message().to_owned(),
reason: Some(response.error_message().to_owned()),
status_code,
attempt_status,
connector_transaction_id: connector_response_reference_id,
network_decline_code: None,
network_advice_code: None,
network_error_message: None,
connector_metadata: None,
})
} else {
let status = AttemptStatus::foreign_try_from(response.status())?;
Ok((PaymentsResponseData::TransactionResponse {
resource_id: response.registration_id.as_ref().and_then(|identifier| {
identifier
.id_type
.clone()
.and_then(|id_type| match id_type {
payments_grpc::identifier::IdType::Id(id) => Some(
hyperswitch_domain_models::router_request_types::ResponseId::ConnectorTransactionId(id),
),
payments_grpc::identifier::IdType::EncodedData(encoded_data) => Some(
hyperswitch_domain_models::router_request_types::ResponseId::ConnectorTransactionId(encoded_data),
),
payments_grpc::identifier::IdType::NoResponseIdMarker(_) => None,
})
}).unwrap_or(hyperswitch_domain_models::router_request_types::ResponseId::NoResponseId),
redirection_data: Box::new(
response
.redirection_data
.clone()
.map(RedirectForm::foreign_try_from)
.transpose()?
),
mandate_reference: Box::new(
response.mandate_reference.map(|grpc_mandate| {
hyperswitch_domain_models::router_response_types::MandateReference {
connector_mandate_id: grpc_mandate.mandate_id,
payment_method_id: None,
mandate_metadata: None,
connector_mandate_request_reference_id: None,
}
})
),
connector_metadata: None,
network_txn_id: response.network_txn_id,
connector_response_reference_id,
incremental_authorization_allowed: response.incremental_authorization_allowed,
charges: None,
}, status))
};
Ok(response)
}
}
impl transformers::ForeignTryFrom<payments_grpc::PaymentServiceRepeatEverythingResponse>
for Result<(PaymentsResponseData, AttemptStatus), ErrorResponse>
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
response: payments_grpc::PaymentServiceRepeatEverythingResponse,
) -> Result<Self, Self::Error> {
let connector_response_reference_id =
response.response_ref_id.as_ref().and_then(|identifier| {
identifier
.id_type
.clone()
.and_then(|id_type| match id_type {
payments_grpc::identifier::IdType::Id(id) => Some(id),
payments_grpc::identifier::IdType::EncodedData(encoded_data) => {
Some(encoded_data)
}
payments_grpc::identifier::IdType::NoResponseIdMarker(_) => None,
})
});
let transaction_id = response.transaction_id.as_ref().and_then(|id| {
id.id_type.clone().and_then(|id_type| match id_type {
payments_grpc::identifier::IdType::Id(id) => Some(id),
payments_grpc::identifier::IdType::EncodedData(encoded_data) => Some(encoded_data),
payments_grpc::identifier::IdType::NoResponseIdMarker(_) => None,
})
});
let status_code = convert_connector_service_status_code(response.status_code)?;
let response = if response.error_code.is_some() {
let attempt_status = match response.status() {
payments_grpc::PaymentStatus::AttemptStatusUnspecified => None,
_ => Some(AttemptStatus::foreign_try_from(response.status())?),
};
Err(ErrorResponse {
code: response.error_code().to_owned(),
message: response.error_message().to_owned(),
reason: Some(response.error_message().to_owned()),
status_code,
attempt_status,
connector_transaction_id: transaction_id,
network_decline_code: None,
network_advice_code: None,
network_error_message: None,
connector_metadata: None,
})
} else {
let status = AttemptStatus::foreign_try_from(response.status())?;
Ok((PaymentsResponseData::TransactionResponse {
resource_id: match transaction_id.as_ref() {
Some(transaction_id) => hyperswitch_domain_models::router_request_types::ResponseId::ConnectorTransactionId(transaction_id.clone()),
None => hyperswitch_domain_models::router_request_types::ResponseId::NoResponseId,
},
redirection_data: Box::new(None),
mandate_reference: Box::new(None),
connector_metadata: None,
network_txn_id: response.network_txn_id.clone(),
connector_response_reference_id,
incremental_authorization_allowed: None,
charges: None,
}, status))
};
Ok(response)
}
}
impl transformers::ForeignTryFrom<common_enums::Currency> for payments_grpc::Currency {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(currency: common_enums::Currency) -> Result<Self, Self::Error> {
Self::from_str_name(¤cy.to_string()).ok_or_else(|| {
UnifiedConnectorServiceError::RequestEncodingFailedWithReason(
"Failed to parse currency".to_string(),
)
.into()
})
}
}
impl transformers::ForeignTryFrom<common_enums::CardNetwork> for payments_grpc::CardNetwork {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(card_network: common_enums::CardNetwork) -> Result<Self, Self::Error> {
match card_network {
common_enums::CardNetwork::Visa => Ok(Self::Visa),
common_enums::CardNetwork::Mastercard => Ok(Self::Mastercard),
common_enums::CardNetwork::JCB => Ok(Self::Jcb),
common_enums::CardNetwork::DinersClub => Ok(Self::Diners),
common_enums::CardNetwork::Discover => Ok(Self::Discover),
common_enums::CardNetwork::CartesBancaires => Ok(Self::CartesBancaires),
common_enums::CardNetwork::UnionPay => Ok(Self::Unionpay),
common_enums::CardNetwork::RuPay => Ok(Self::Rupay),
common_enums::CardNetwork::Maestro => Ok(Self::Maestro),
common_enums::CardNetwork::AmericanExpress => Ok(Self::Amex),
_ => Err(
UnifiedConnectorServiceError::RequestEncodingFailedWithReason(
"Card Network not supported".to_string(),
)
.into(),
),
}
}
}
impl transformers::ForeignTryFrom<hyperswitch_domain_models::payment_address::PaymentAddress>
for payments_grpc::PaymentAddress
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
payment_address: hyperswitch_domain_models::payment_address::PaymentAddress,
) -> Result<Self, Self::Error> {
let shipping = payment_address.get_shipping().map(|address| {
let details = address.address.as_ref();
let country = details.and_then(|details| {
details
.country
.as_ref()
.and_then(|c| payments_grpc::CountryAlpha2::from_str_name(&c.to_string()))
.map(|country| country.into())
});
payments_grpc::Address {
first_name: details
.and_then(|d| d.first_name.as_ref().map(|s| s.clone().expose().into())),
last_name: details
.and_then(|d| d.last_name.as_ref().map(|s| s.clone().expose().into())),
line1: details.and_then(|d| d.line1.as_ref().map(|s| s.clone().expose().into())),
line2: details.and_then(|d| d.line2.as_ref().map(|s| s.clone().expose().into())),
line3: details.and_then(|d| d.line3.as_ref().map(|s| s.clone().expose().into())),
city: details.and_then(|d| d.city.as_ref().map(|s| s.clone().into())),
state: details.and_then(|d| d.state.as_ref().map(|s| s.clone().expose().into())),
zip_code: details.and_then(|d| d.zip.as_ref().map(|s| s.clone().expose().into())),
country_alpha2_code: country,
email: address
.email
.as_ref()
.map(|e| e.clone().expose().expose().into()),
phone_number: address
.phone
.as_ref()
.and_then(|phone| phone.number.as_ref().map(|n| n.clone().expose().into())),
phone_country_code: address.phone.as_ref().and_then(|p| p.country_code.clone()),
}
});
let billing = payment_address.get_payment_billing().map(|address| {
let details = address.address.as_ref();
let country = details.and_then(|details| {
details
.country
.as_ref()
.and_then(|c| payments_grpc::CountryAlpha2::from_str_name(&c.to_string()))
.map(|country| country.into())
});
payments_grpc::Address {
first_name: details
.and_then(|d| d.first_name.as_ref().map(|s| s.peek().to_string().into())),
last_name: details
.and_then(|d| d.last_name.as_ref().map(|s| s.peek().to_string().into())),
line1: details.and_then(|d| d.line1.as_ref().map(|s| s.peek().to_string().into())),
line2: details.and_then(|d| d.line2.as_ref().map(|s| s.peek().to_string().into())),
line3: details.and_then(|d| d.line3.as_ref().map(|s| s.peek().to_string().into())),
city: details.and_then(|d| d.city.as_ref().map(|s| s.clone().into())),
state: details.and_then(|d| d.state.as_ref().map(|s| s.peek().to_string().into())),
zip_code: details.and_then(|d| d.zip.as_ref().map(|s| s.peek().to_string().into())),
country_alpha2_code: country,
email: address.email.as_ref().map(|e| e.peek().to_string().into()),
phone_number: address
.phone
.as_ref()
.and_then(|phone| phone.number.as_ref().map(|n| n.peek().to_string().into())),
phone_country_code: address.phone.as_ref().and_then(|p| p.country_code.clone()),
}
});
let unified_payment_method_billing =
payment_address.get_payment_method_billing().map(|address| {
let details = address.address.as_ref();
let country = details.and_then(|details| {
details
.country
.as_ref()
.and_then(|c| payments_grpc::CountryAlpha2::from_str_name(&c.to_string()))
.map(|country| country.into())
});
payments_grpc::Address {
first_name: details
.and_then(|d| d.first_name.as_ref().map(|s| s.peek().to_string().into())),
last_name: details
.and_then(|d| d.last_name.as_ref().map(|s| s.peek().to_string().into())),
line1: details
.and_then(|d| d.line1.as_ref().map(|s| s.peek().to_string().into())),
line2: details
.and_then(|d| d.line2.as_ref().map(|s| s.peek().to_string().into())),
line3: details
.and_then(|d| d.line3.as_ref().map(|s| s.peek().to_string().into())),
city: details.and_then(|d| d.city.as_ref().map(|s| s.clone().into())),
state: details
.and_then(|d| d.state.as_ref().map(|s| s.peek().to_string().into())),
zip_code: details
.and_then(|d| d.zip.as_ref().map(|s| s.peek().to_string().into())),
country_alpha2_code: country,
email: address
.email
.as_ref()
.map(|e| e.clone().expose().expose().into()),
phone_number: address
.phone
.as_ref()
.and_then(|phone| phone.number.as_ref().map(|n| n.clone().expose().into())),
phone_country_code: address.phone.as_ref().and_then(|p| p.country_code.clone()),
}
});
Ok(Self {
shipping_address: shipping,
billing_address: unified_payment_method_billing.or(billing),
})
}
}
impl transformers::ForeignTryFrom<AuthenticationType> for payments_grpc::AuthenticationType {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(auth_type: AuthenticationType) -> Result<Self, Self::Error> {
match auth_type {
AuthenticationType::ThreeDs => Ok(Self::ThreeDs),
AuthenticationType::NoThreeDs => Ok(Self::NoThreeDs),
}
}
}
impl
transformers::ForeignTryFrom<
hyperswitch_domain_models::router_request_types::BrowserInformation,
> for payments_grpc::BrowserInformation
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
browser_info: hyperswitch_domain_models::router_request_types::BrowserInformation,
) -> Result<Self, Self::Error> {
Ok(Self {
color_depth: browser_info.color_depth.map(|v| v.into()),
java_enabled: browser_info.java_enabled,
java_script_enabled: browser_info.java_script_enabled,
language: browser_info.language,
screen_height: browser_info.screen_height,
screen_width: browser_info.screen_width,
ip_address: browser_info.ip_address.map(|ip| ip.to_string()),
accept_header: browser_info.accept_header,
user_agent: browser_info.user_agent,
os_type: browser_info.os_type,
os_version: browser_info.os_version,
device_model: browser_info.device_model,
accept_language: browser_info.accept_language,
time_zone_offset_minutes: browser_info.time_zone,
referer: browser_info.referer,
})
}
}
impl transformers::ForeignTryFrom<storage_enums::CaptureMethod> for payments_grpc::CaptureMethod {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(capture_method: storage_enums::CaptureMethod) -> Result<Self, Self::Error> {
match capture_method {
common_enums::CaptureMethod::Automatic => Ok(Self::Automatic),
common_enums::CaptureMethod::Manual => Ok(Self::Manual),
common_enums::CaptureMethod::ManualMultiple => Ok(Self::ManualMultiple),
common_enums::CaptureMethod::Scheduled => Ok(Self::Scheduled),
common_enums::CaptureMethod::SequentialAutomatic => Ok(Self::SequentialAutomatic),
}
}
}
impl transformers::ForeignTryFrom<AuthenticationData> for payments_grpc::AuthenticationData {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(authentication_data: AuthenticationData) -> Result<Self, Self::Error> {
Ok(Self {
eci: authentication_data.eci,
cavv: authentication_data.cavv.peek().to_string(),
threeds_server_transaction_id: authentication_data.threeds_server_transaction_id.map(
|id| Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(id)),
},
),
message_version: None,
ds_transaction_id: authentication_data.ds_trans_id,
})
}
}
impl transformers::ForeignTryFrom<payments_grpc::RedirectForm> for RedirectForm {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(value: payments_grpc::RedirectForm) -> Result<Self, Self::Error> {
match value.form_type {
Some(payments_grpc::redirect_form::FormType::Form(form)) => Ok(Self::Form {
endpoint: form.clone().endpoint,
method: Method::foreign_try_from(form.clone().method())?,
form_fields: form.clone().form_fields,
}),
Some(payments_grpc::redirect_form::FormType::Html(html)) => Ok(Self::Html {
html_data: html.html_data,
}),
Some(payments_grpc::redirect_form::FormType::Uri(_)) => Err(
UnifiedConnectorServiceError::RequestEncodingFailedWithReason(
"URI form type is not implemented".to_string(),
)
.into(),
),
None => Err(
UnifiedConnectorServiceError::RequestEncodingFailedWithReason(
"Missing form type".to_string(),
)
.into(),
),
}
}
}
impl transformers::ForeignTryFrom<payments_grpc::HttpMethod> for Method {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(value: payments_grpc::HttpMethod) -> Result<Self, Self::Error> {
tracing::debug!("Converting gRPC HttpMethod: {:?}", value);
match value {
payments_grpc::HttpMethod::Get => Ok(Self::Get),
payments_grpc::HttpMethod::Post => Ok(Self::Post),
payments_grpc::HttpMethod::Put => Ok(Self::Put),
payments_grpc::HttpMethod::Delete => Ok(Self::Delete),
payments_grpc::HttpMethod::Unspecified => {
Err(UnifiedConnectorServiceError::ResponseDeserializationFailed)
.attach_printable("Invalid Http Method")
}
}
}
}
impl transformers::ForeignTryFrom<storage_enums::FutureUsage> for payments_grpc::FutureUsage {
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(future_usage: storage_enums::FutureUsage) -> Result<Self, Self::Error> {
match future_usage {
storage_enums::FutureUsage::OnSession => Ok(Self::OnSession),
storage_enums::FutureUsage::OffSession => Ok(Self::OffSession),
}
}
}
impl transformers::ForeignTryFrom<common_types::payments::CustomerAcceptance>
for payments_grpc::CustomerAcceptance
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
customer_acceptance: common_types::payments::CustomerAcceptance,
) -> Result<Self, Self::Error> {
let acceptance_type = match customer_acceptance.acceptance_type {
common_types::payments::AcceptanceType::Online => payments_grpc::AcceptanceType::Online,
common_types::payments::AcceptanceType::Offline => {
payments_grpc::AcceptanceType::Offline
}
};
let online_mandate_details =
customer_acceptance
.online
.map(|online| payments_grpc::OnlineMandate {
ip_address: online.ip_address.map(|ip| ip.peek().to_string()),
user_agent: online.user_agent,
});
Ok(Self {
acceptance_type: acceptance_type.into(),
accepted_at: customer_acceptance
.accepted_at
.map(|dt| dt.assume_utc().unix_timestamp())
.unwrap_or_default(),
online_mandate_details,
})
}
}
impl
transformers::ForeignTryFrom<
&hyperswitch_interfaces::webhooks::IncomingWebhookRequestDetails<'_>,
> for payments_grpc::RequestDetails
{
type Error = error_stack::Report<UnifiedConnectorServiceError>;
fn foreign_try_from(
request_details: &hyperswitch_interfaces::webhooks::IncomingWebhookRequestDetails<'_>,
) -> Result<Self, Self::Error> {
let headers_map = request_details
.headers
.iter()
.map(|(key, value)| {
let value_string = value.to_str().unwrap_or_default().to_string();
(key.as_str().to_string(), value_string)
})
.collect();
Ok(Self {
method: 1, // POST method for webhooks
uri: Some({
let uri_result = request_details
.headers
.get("x-forwarded-path")
.and_then(|h| h.to_str().map_err(|e| {
tracing::warn!(
header_conversion_error=?e,
header_value=?h,
"Failed to convert x-forwarded-path header to string for webhook processing"
);
e
}).ok());
uri_result.unwrap_or_else(|| {
tracing::debug!("x-forwarded-path header not found or invalid, using default '/Unknown'");
"/Unknown"
}).to_string()
}),
body: request_details.body.to_vec(),
headers: headers_map,
query_params: Some(request_details.query_params.clone()),
})
}
}
/// Transform UCS webhook response into webhook event data
pub fn transform_ucs_webhook_response(
response: PaymentServiceTransformResponse,
) -> Result<WebhookTransformData, error_stack::Report<errors::ApiErrorResponse>> {
let event_type =
api_models::webhooks::IncomingWebhookEvent::from_ucs_event_type(response.event_type);
Ok(WebhookTransformData {
event_type,
source_verified: response.source_verified,
webhook_content: response.content,
response_ref_id: response.response_ref_id.and_then(|identifier| {
identifier.id_type.and_then(|id_type| match id_type {
payments_grpc::identifier::IdType::Id(id) => Some(id),
payments_grpc::identifier::IdType::EncodedData(encoded_data) => Some(encoded_data),
payments_grpc::identifier::IdType::NoResponseIdMarker(_) => None,
})
}),
})
}
/// Build UCS webhook transform request from webhook components
pub fn build_webhook_transform_request(
_webhook_body: &[u8],
request_details: &hyperswitch_interfaces::webhooks::IncomingWebhookRequestDetails<'_>,
webhook_secrets: Option<payments_grpc::WebhookSecrets>,
merchant_id: &str,
connector_id: &str,
) -> Result<PaymentServiceTransformRequest, error_stack::Report<errors::ApiErrorResponse>> {
let request_details_grpc =
<payments_grpc::RequestDetails as transformers::ForeignTryFrom<_>>::foreign_try_from(
request_details,
)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to transform webhook request details to gRPC format")?;
Ok(PaymentServiceTransformRequest {
request_ref_id: Some(Identifier {
id_type: Some(payments_grpc::identifier::IdType::Id(format!(
"{}_{}_{}",
merchant_id,
connector_id,
time::OffsetDateTime::now_utc().unix_timestamp()
))),
}),
request_details: Some(request_details_grpc),
webhook_secrets,
access_token: None,
})
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/unified_connector_service/transformers.rs"
],
"module": "crates/router/src/core/unified_connector_service",
"num_files": 1,
"token_count": 9808
}
|
module_-1300094261476413945
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/revenue_recovery
Files: 3
</path>
<module>
// File: crates/router/src/core/revenue_recovery/types.rs
use std::{marker::PhantomData, str::FromStr};
use api_models::{
enums as api_enums,
payments::{
AmountDetails, PaymentRevenueRecoveryMetadata, PaymentsUpdateIntentRequest,
ProxyPaymentsRequest,
},
};
use common_utils::{
self,
ext_traits::{OptionExt, ValueExt},
id_type,
};
use diesel_models::{
enums, payment_intent, process_tracker::business_status, types as diesel_types,
};
use error_stack::{self, ResultExt};
use hyperswitch_domain_models::{
api::ApplicationResponse,
business_profile, merchant_connector_account,
merchant_context::{Context, MerchantContext},
payments::{
self as domain_payments, payment_attempt::PaymentAttempt, PaymentConfirmData,
PaymentIntent, PaymentIntentData, PaymentStatusData,
},
router_data_v2::{self, flow_common_types},
router_flow_types,
router_request_types::revenue_recovery as revenue_recovery_request,
router_response_types::revenue_recovery as revenue_recovery_response,
ApiModelToDieselModelConvertor,
};
use time::PrimitiveDateTime;
use super::errors::StorageErrorExt;
use crate::{
core::{
errors::{self, RouterResult},
payments::{self, helpers, operations::Operation, transformers::GenerateResponse},
revenue_recovery::{self as revenue_recovery_core, pcr, perform_calculate_workflow},
webhooks::{
create_event_and_trigger_outgoing_webhook, recovery_incoming as recovery_incoming_flow,
},
},
db::StorageInterface,
logger,
routes::SessionState,
services::{self, connector_integration_interface::RouterDataConversion},
types::{
self, api as api_types, api::payments as payments_types, domain, storage,
transformers::ForeignInto,
},
workflows::{
payment_sync,
revenue_recovery::{self, get_schedule_time_to_retry_mit_payments},
},
};
type RecoveryResult<T> = error_stack::Result<T, errors::RecoveryError>;
pub const REVENUE_RECOVERY: &str = "revenue_recovery";
/// The status of Passive Churn Payments
#[derive(Clone, Debug, serde::Deserialize, serde::Serialize)]
pub enum RevenueRecoveryPaymentsAttemptStatus {
Succeeded,
Failed,
Processing,
InvalidStatus(String),
// Cancelled,
}
impl RevenueRecoveryPaymentsAttemptStatus {
pub(crate) async fn update_pt_status_based_on_attempt_status_for_execute_payment(
&self,
db: &dyn StorageInterface,
execute_task_process: &storage::ProcessTracker,
) -> Result<(), errors::ProcessTrackerError> {
logger::info!("Entering update_pt_status_based_on_attempt_status_for_execute_payment");
match &self {
Self::Succeeded | Self::Failed | Self::Processing => {
// finish the current execute task
db.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE_FOR_PSYNC,
)
.await?;
}
Self::InvalidStatus(action) => {
logger::debug!(
"Invalid Attempt Status for the Recovery Payment : {}",
action
);
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Review,
business_status: Some(String::from(business_status::EXECUTE_WORKFLOW_COMPLETE)),
};
// update the process tracker status as Review
db.update_process(execute_task_process.clone(), pt_update)
.await?;
}
};
Ok(())
}
#[allow(clippy::too_many_arguments)]
pub(crate) async fn update_pt_status_based_on_attempt_status_for_payments_sync(
&self,
state: &SessionState,
payment_intent: &PaymentIntent,
process_tracker: storage::ProcessTracker,
profile: &domain::Profile,
merchant_context: domain::MerchantContext,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
payment_attempt: PaymentAttempt,
revenue_recovery_metadata: &mut PaymentRevenueRecoveryMetadata,
) -> Result<(), errors::ProcessTrackerError> {
let connector_customer_id = payment_intent
.extract_connector_customer_id_from_payment_intent()
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to extract customer ID from payment intent")?;
let db = &*state.store;
let recovery_payment_intent =
hyperswitch_domain_models::revenue_recovery::RecoveryPaymentIntent::from(
payment_intent,
);
let recovery_payment_attempt =
hyperswitch_domain_models::revenue_recovery::RecoveryPaymentAttempt::from(
&payment_attempt,
);
let recovery_payment_tuple = recovery_incoming_flow::RecoveryPaymentTuple::new(
&recovery_payment_intent,
&recovery_payment_attempt,
);
let used_token = get_payment_processor_token_id_from_payment_attempt(&payment_attempt);
let retry_count = process_tracker.retry_count;
let psync_response = revenue_recovery_payment_data
.psync_data
.as_ref()
.ok_or(errors::RecoveryError::ValueNotFound)
.attach_printable("Psync data not found in revenue recovery payment data")?;
match self {
Self::Succeeded => {
// finish psync task as the payment was a success
db.as_scheduler()
.finish_process_with_business_status(
process_tracker,
business_status::PSYNC_WORKFLOW_COMPLETE,
)
.await?;
let event_status = common_enums::EventType::PaymentSucceeded;
// publish events to kafka
if let Err(e) = recovery_incoming_flow::RecoveryPaymentTuple::publish_revenue_recovery_event_to_kafka(
state,
&recovery_payment_tuple,
Some(retry_count+1)
)
.await{
router_env::logger::error!(
"Failed to publish revenue recovery event to kafka: {:?}",
e
);
};
// update the status of token in redis
let _update_error_code = storage::revenue_recovery_redis_operation::RedisTokenManager::update_payment_processor_token_error_code_from_process_tracker(
state,
&connector_customer_id,
&None,
// Since this is succeeded payment attempt, 'is_hard_decine' will be false.
&Some(false),
used_token.as_deref(),
)
.await;
// unlocking the token
let _unlock_the_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::unlock_connector_customer_status(
state,
&connector_customer_id,
)
.await;
let payments_response = psync_response
.clone()
.generate_response(state, None, None, None, &merchant_context, profile, None)
.change_context(errors::RecoveryError::PaymentsResponseGenerationFailed)
.attach_printable("Failed while generating response for payment")?;
RevenueRecoveryOutgoingWebhook::send_outgoing_webhook_based_on_revenue_recovery_status(
state,
common_enums::EventClass::Payments,
event_status,
payment_intent,
&merchant_context,
profile,
recovery_payment_attempt
.attempt_id
.get_string_repr()
.to_string(),
payments_response
)
.await?;
// Record a successful transaction back to Billing Connector
// TODO: Add support for retrying failed outgoing recordback webhooks
record_back_to_billing_connector(
state,
&payment_attempt,
payment_intent,
&revenue_recovery_payment_data.billing_mca,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed to update the process tracker")?;
}
Self::Failed => {
// finish psync task
db.as_scheduler()
.finish_process_with_business_status(
process_tracker.clone(),
business_status::PSYNC_WORKFLOW_COMPLETE,
)
.await?;
// publish events to kafka
if let Err(e) = recovery_incoming_flow::RecoveryPaymentTuple::publish_revenue_recovery_event_to_kafka(
state,
&recovery_payment_tuple,
Some(retry_count+1)
)
.await{
router_env::logger::error!(
"Failed to publish revenue recovery event to kafka : {:?}", e
);
};
let error_code = recovery_payment_attempt.error_code;
let is_hard_decline = revenue_recovery::check_hard_decline(state, &payment_attempt)
.await
.ok();
// update the status of token in redis
let _update_error_code = storage::revenue_recovery_redis_operation::RedisTokenManager::update_payment_processor_token_error_code_from_process_tracker(
state,
&connector_customer_id,
&error_code,
&is_hard_decline,
used_token.as_deref(),
)
.await;
// unlocking the token
let _unlock_the_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::unlock_connector_customer_status(
state,
&connector_customer_id,
)
.await;
// Reopen calculate workflow on payment failure
Box::pin(reopen_calculate_workflow_on_payment_failure(
state,
&process_tracker,
profile,
merchant_context,
payment_intent,
revenue_recovery_payment_data,
psync_response.payment_attempt.get_id(),
))
.await?;
}
Self::Processing => {
// do a psync payment
let action = Box::pin(Action::payment_sync_call(
state,
revenue_recovery_payment_data,
payment_intent,
&process_tracker,
profile,
merchant_context,
payment_attempt,
))
.await?;
//handle the response
Box::pin(action.psync_response_handler(
state,
payment_intent,
&process_tracker,
revenue_recovery_metadata,
revenue_recovery_payment_data,
))
.await?;
}
Self::InvalidStatus(status) => logger::debug!(
"Invalid Attempt Status for the Recovery Payment : {}",
status
),
}
Ok(())
}
}
pub enum Decision {
Execute,
Psync(enums::AttemptStatus, id_type::GlobalAttemptId),
InvalidDecision,
ReviewForSuccessfulPayment,
ReviewForFailedPayment(enums::TriggeredBy),
}
impl Decision {
pub async fn get_decision_based_on_params(
state: &SessionState,
intent_status: enums::IntentStatus,
called_connector: enums::PaymentConnectorTransmission,
active_attempt_id: Option<id_type::GlobalAttemptId>,
revenue_recovery_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
payment_id: &id_type::GlobalPaymentId,
) -> RecoveryResult<Self> {
logger::info!("Entering get_decision_based_on_params");
Ok(match (intent_status, called_connector, active_attempt_id) {
(
enums::IntentStatus::Failed,
enums::PaymentConnectorTransmission::ConnectorCallUnsuccessful,
None,
) => Self::Execute,
(
enums::IntentStatus::Processing,
enums::PaymentConnectorTransmission::ConnectorCallSucceeded,
Some(_),
) => {
let psync_data = revenue_recovery_core::api::call_psync_api(
state,
payment_id,
revenue_recovery_data,
true,
true,
)
.await
.change_context(errors::RecoveryError::PaymentCallFailed)
.attach_printable("Error while executing the Psync call")?;
let payment_attempt = psync_data.payment_attempt;
Self::Psync(payment_attempt.status, payment_attempt.get_id().clone())
}
(
enums::IntentStatus::Failed,
enums::PaymentConnectorTransmission::ConnectorCallUnsuccessful,
Some(_),
) => {
let psync_data = revenue_recovery_core::api::call_psync_api(
state,
payment_id,
revenue_recovery_data,
true,
true,
)
.await
.change_context(errors::RecoveryError::PaymentCallFailed)
.attach_printable("Error while executing the Psync call")?;
let payment_attempt = psync_data.payment_attempt;
let attempt_triggered_by = payment_attempt
.feature_metadata
.and_then(|metadata| {
metadata.revenue_recovery.map(|revenue_recovery_metadata| {
revenue_recovery_metadata.attempt_triggered_by
})
})
.get_required_value("Attempt Triggered By")
.change_context(errors::RecoveryError::ValueNotFound)?;
Self::ReviewForFailedPayment(attempt_triggered_by)
}
(enums::IntentStatus::Succeeded, _, _) => Self::ReviewForSuccessfulPayment,
_ => Self::InvalidDecision,
})
}
}
#[derive(Debug, Clone)]
pub enum Action {
SyncPayment(PaymentAttempt),
RetryPayment(PrimitiveDateTime),
TerminalFailure(PaymentAttempt),
SuccessfulPayment(PaymentAttempt),
ReviewPayment,
ManualReviewAction,
}
impl Action {
#[allow(clippy::too_many_arguments)]
pub async fn execute_payment(
state: &SessionState,
_merchant_id: &id_type::MerchantId,
payment_intent: &PaymentIntent,
process: &storage::ProcessTracker,
profile: &domain::Profile,
merchant_context: domain::MerchantContext,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
revenue_recovery_metadata: &PaymentRevenueRecoveryMetadata,
latest_attempt_id: &id_type::GlobalAttemptId,
) -> RecoveryResult<Self> {
let connector_customer_id = payment_intent
.extract_connector_customer_id_from_payment_intent()
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to extract customer ID from payment intent")?;
let tracking_data: pcr::RevenueRecoveryWorkflowTrackingData =
serde_json::from_value(process.tracking_data.clone())
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to deserialize the tracking data from process tracker")?;
let last_token_used = payment_intent
.feature_metadata
.as_ref()
.and_then(|fm| fm.payment_revenue_recovery_metadata.as_ref())
.map(|rr| {
rr.billing_connector_payment_details
.payment_processor_token
.clone()
});
let recovery_algorithm = tracking_data.revenue_recovery_retry;
let scheduled_token = match storage::revenue_recovery_redis_operation::RedisTokenManager::get_token_based_on_retry_type(
state,
&connector_customer_id,
recovery_algorithm,
last_token_used.as_deref(),
)
.await {
Ok(scheduled_token_opt) => scheduled_token_opt,
Err(e) => {
logger::error!(
error = ?e,
connector_customer_id = %connector_customer_id,
"Failed to get PSP token status"
);
None
}
};
match scheduled_token {
Some(scheduled_token) => {
let response = revenue_recovery_core::api::call_proxy_api(
state,
payment_intent,
revenue_recovery_payment_data,
revenue_recovery_metadata,
&scheduled_token
.payment_processor_token_details
.payment_processor_token,
)
.await;
let recovery_payment_intent =
hyperswitch_domain_models::revenue_recovery::RecoveryPaymentIntent::from(
payment_intent,
);
// handle proxy api's response
match response {
Ok(payment_data) => match payment_data.payment_attempt.status.foreign_into() {
RevenueRecoveryPaymentsAttemptStatus::Succeeded => {
let recovery_payment_attempt =
hyperswitch_domain_models::revenue_recovery::RecoveryPaymentAttempt::from(
&payment_data.payment_attempt,
);
let recovery_payment_tuple =
recovery_incoming_flow::RecoveryPaymentTuple::new(
&recovery_payment_intent,
&recovery_payment_attempt,
);
// publish events to kafka
if let Err(e) = recovery_incoming_flow::RecoveryPaymentTuple::publish_revenue_recovery_event_to_kafka(
state,
&recovery_payment_tuple,
Some(process.retry_count+1)
)
.await{
router_env::logger::error!(
"Failed to publish revenue recovery event to kafka: {:?}",
e
);
};
let is_hard_decline = revenue_recovery::check_hard_decline(
state,
&payment_data.payment_attempt,
)
.await
.ok();
// update the status of token in redis
let _update_error_code = storage::revenue_recovery_redis_operation::RedisTokenManager::update_payment_processor_token_error_code_from_process_tracker(
state,
&connector_customer_id,
&None,
&is_hard_decline,
Some(&scheduled_token.payment_processor_token_details.payment_processor_token),
)
.await;
// unlocking the token
let _unlock_the_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::unlock_connector_customer_status(
state,
&connector_customer_id,
)
.await;
let event_status = common_enums::EventType::PaymentSucceeded;
let payments_response = payment_data
.clone()
.generate_response(
state,
None,
None,
None,
&merchant_context,
profile,
None,
)
.change_context(
errors::RecoveryError::PaymentsResponseGenerationFailed,
)
.attach_printable("Failed while generating response for payment")?;
RevenueRecoveryOutgoingWebhook::send_outgoing_webhook_based_on_revenue_recovery_status(
state,
common_enums::EventClass::Payments,
event_status,
payment_intent,
&merchant_context,
profile,
payment_data.payment_attempt.id.get_string_repr().to_string(),
payments_response
)
.await?;
Ok(Self::SuccessfulPayment(
payment_data.payment_attempt.clone(),
))
}
RevenueRecoveryPaymentsAttemptStatus::Failed => {
let recovery_payment_attempt =
hyperswitch_domain_models::revenue_recovery::RecoveryPaymentAttempt::from(
&payment_data.payment_attempt,
);
let recovery_payment_tuple =
recovery_incoming_flow::RecoveryPaymentTuple::new(
&recovery_payment_intent,
&recovery_payment_attempt,
);
// publish events to kafka
if let Err(e) = recovery_incoming_flow::RecoveryPaymentTuple::publish_revenue_recovery_event_to_kafka(
state,
&recovery_payment_tuple,
Some(process.retry_count+1)
)
.await{
router_env::logger::error!(
"Failed to publish revenue recovery event to kafka: {:?}",
e
);
};
let error_code = payment_data
.payment_attempt
.clone()
.error
.map(|error| error.code);
let is_hard_decline = revenue_recovery::check_hard_decline(
state,
&payment_data.payment_attempt,
)
.await
.ok();
let _update_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::update_payment_processor_token_error_code_from_process_tracker(
state,
&connector_customer_id,
&error_code,
&is_hard_decline,
Some(&scheduled_token
.payment_processor_token_details
.payment_processor_token)
,
)
.await;
// unlocking the token
let _unlock_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::unlock_connector_customer_status(
state,
&connector_customer_id,
)
.await;
// Reopen calculate workflow on payment failure
Box::pin(reopen_calculate_workflow_on_payment_failure(
state,
process,
profile,
merchant_context,
payment_intent,
revenue_recovery_payment_data,
latest_attempt_id,
))
.await?;
// Return terminal failure to finish the current execute workflow
Ok(Self::TerminalFailure(payment_data.payment_attempt.clone()))
}
RevenueRecoveryPaymentsAttemptStatus::Processing => {
Ok(Self::SyncPayment(payment_data.payment_attempt.clone()))
}
RevenueRecoveryPaymentsAttemptStatus::InvalidStatus(action) => {
logger::info!(?action, "Invalid Payment Status For PCR Payment");
Ok(Self::ManualReviewAction)
}
},
Err(err) =>
// check for an active attempt being constructed or not
{
logger::error!(execute_payment_res=?err);
Ok(Self::ReviewPayment)
}
}
}
None => {
let response = revenue_recovery_core::api::call_psync_api(
state,
payment_intent.get_id(),
revenue_recovery_payment_data,
true,
true,
)
.await;
let payment_status_data = response
.change_context(errors::RecoveryError::PaymentCallFailed)
.attach_printable("Error while executing the Psync call")?;
let payment_attempt = payment_status_data.payment_attempt;
logger::info!(
process_id = %process.id,
connector_customer_id = %connector_customer_id,
"No token available, finishing CALCULATE_WORKFLOW"
);
state
.store
.as_scheduler()
.finish_process_with_business_status(
process.clone(),
business_status::CALCULATE_WORKFLOW_FINISH,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to finish CALCULATE_WORKFLOW")?;
logger::info!(
process_id = %process.id,
connector_customer_id = %connector_customer_id,
"CALCULATE_WORKFLOW finished successfully"
);
Ok(Self::TerminalFailure(payment_attempt.clone()))
}
}
}
pub async fn execute_payment_task_response_handler(
&self,
state: &SessionState,
payment_intent: &PaymentIntent,
execute_task_process: &storage::ProcessTracker,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
revenue_recovery_metadata: &mut PaymentRevenueRecoveryMetadata,
) -> Result<(), errors::ProcessTrackerError> {
logger::info!("Entering execute_payment_task_response_handler");
let db = &*state.store;
match self {
Self::SyncPayment(payment_attempt) => {
revenue_recovery_core::insert_psync_pcr_task_to_pt(
revenue_recovery_payment_data.billing_mca.get_id().clone(),
db,
revenue_recovery_payment_data
.merchant_account
.get_id()
.to_owned(),
payment_intent.id.clone(),
revenue_recovery_payment_data.profile.get_id().to_owned(),
payment_attempt.id.clone(),
storage::ProcessTrackerRunner::PassiveRecoveryWorkflow,
revenue_recovery_payment_data.retry_algorithm,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to create a psync workflow in the process tracker")?;
db.as_scheduler()
.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE_FOR_PSYNC,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::RetryPayment(schedule_time) => {
db.as_scheduler()
.retry_process(execute_task_process.clone(), *schedule_time)
.await?;
// update the connector payment transmission field to Unsuccessful and unset active attempt id
revenue_recovery_metadata.set_payment_transmission_field_for_api_request(
enums::PaymentConnectorTransmission::ConnectorCallUnsuccessful,
);
let payment_update_req =
PaymentsUpdateIntentRequest::update_feature_metadata_and_active_attempt_with_api(
payment_intent
.feature_metadata
.clone()
.unwrap_or_default()
.convert_back()
.set_payment_revenue_recovery_metadata_using_api(
revenue_recovery_metadata.clone(),
),
api_enums::UpdateActiveAttempt::Unset,
);
logger::info!(
"Call made to payments update intent api , with the request body {:?}",
payment_update_req
);
revenue_recovery_core::api::update_payment_intent_api(
state,
payment_intent.id.clone(),
revenue_recovery_payment_data,
payment_update_req,
)
.await
.change_context(errors::RecoveryError::PaymentCallFailed)?;
Ok(())
}
Self::TerminalFailure(payment_attempt) => {
db.as_scheduler()
.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_FAILURE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
// TODO: Add support for retrying failed outgoing recordback webhooks
Ok(())
}
Self::SuccessfulPayment(payment_attempt) => {
db.as_scheduler()
.finish_process_with_business_status(
execute_task_process.clone(),
business_status::EXECUTE_WORKFLOW_COMPLETE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
// Record back to billing connector for terminal status
// TODO: Add support for retrying failed outgoing recordback webhooks
record_back_to_billing_connector(
state,
payment_attempt,
payment_intent,
&revenue_recovery_payment_data.billing_mca,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::ReviewPayment => {
// requeue the process tracker in case of error response
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Pending,
business_status: Some(String::from(business_status::EXECUTE_WORKFLOW_REQUEUE)),
};
db.as_scheduler()
.update_process(execute_task_process.clone(), pt_update)
.await?;
Ok(())
}
Self::ManualReviewAction => {
logger::debug!("Invalid Payment Status For PCR Payment");
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Review,
business_status: Some(String::from(business_status::EXECUTE_WORKFLOW_COMPLETE)),
};
// update the process tracker status as Review
db.as_scheduler()
.update_process(execute_task_process.clone(), pt_update)
.await?;
Ok(())
}
}
}
pub async fn payment_sync_call(
state: &SessionState,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
payment_intent: &PaymentIntent,
process: &storage::ProcessTracker,
profile: &domain::Profile,
merchant_context: domain::MerchantContext,
payment_attempt: PaymentAttempt,
) -> RecoveryResult<Self> {
logger::info!("Entering payment_sync_call");
let response = revenue_recovery_core::api::call_psync_api(
state,
payment_intent.get_id(),
revenue_recovery_payment_data,
true,
true,
)
.await;
let used_token = get_payment_processor_token_id_from_payment_attempt(&payment_attempt);
match response {
Ok(_payment_data) => match payment_attempt.status.foreign_into() {
RevenueRecoveryPaymentsAttemptStatus::Succeeded => {
let connector_customer_id = payment_intent
.extract_connector_customer_id_from_payment_intent()
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to extract customer ID from payment intent")?;
// update the status of token in redis
let _update_error_code = storage::revenue_recovery_redis_operation::RedisTokenManager::update_payment_processor_token_error_code_from_process_tracker(
state,
&connector_customer_id,
&None,
// Since this is succeeded, 'hard_decine' will be false.
&Some(false),
used_token.as_deref(),
)
.await;
// unlocking the token
let _unlock_the_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::unlock_connector_customer_status(
state,
&connector_customer_id,
)
.await;
Ok(Self::SuccessfulPayment(payment_attempt))
}
RevenueRecoveryPaymentsAttemptStatus::Failed => {
let connector_customer_id = payment_intent
.extract_connector_customer_id_from_payment_intent()
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to extract customer ID from payment intent")?;
let error_code = payment_attempt.clone().error.map(|error| error.code);
let is_hard_decline =
revenue_recovery::check_hard_decline(state, &payment_attempt)
.await
.ok();
let _update_error_code = storage::revenue_recovery_redis_operation::RedisTokenManager::update_payment_processor_token_error_code_from_process_tracker(
state,
&connector_customer_id,
&error_code,
&is_hard_decline,
used_token.as_deref(),
)
.await;
// unlocking the token
let _unlock_connector_customer_id = storage::revenue_recovery_redis_operation::RedisTokenManager::unlock_connector_customer_status(
state,
&connector_customer_id,
)
.await;
// Reopen calculate workflow on payment failure
Box::pin(reopen_calculate_workflow_on_payment_failure(
state,
process,
profile,
merchant_context,
payment_intent,
revenue_recovery_payment_data,
payment_attempt.get_id(),
))
.await?;
Ok(Self::TerminalFailure(payment_attempt.clone()))
}
RevenueRecoveryPaymentsAttemptStatus::Processing => {
Ok(Self::SyncPayment(payment_attempt))
}
RevenueRecoveryPaymentsAttemptStatus::InvalidStatus(action) => {
logger::info!(?action, "Invalid Payment Status For PCR PSync Payment");
Ok(Self::ManualReviewAction)
}
},
Err(err) =>
// if there is an error while psync we create a new Review Task
{
logger::error!(sync_payment_response=?err);
Ok(Self::ReviewPayment)
}
}
}
pub async fn psync_response_handler(
&self,
state: &SessionState,
payment_intent: &PaymentIntent,
psync_task_process: &storage::ProcessTracker,
revenue_recovery_metadata: &mut PaymentRevenueRecoveryMetadata,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
) -> Result<(), errors::ProcessTrackerError> {
logger::info!("Entering psync_response_handler");
let db = &*state.store;
let connector_customer_id = payment_intent
.feature_metadata
.as_ref()
.and_then(|fm| fm.payment_revenue_recovery_metadata.as_ref())
.map(|rr| {
rr.billing_connector_payment_details
.connector_customer_id
.clone()
});
match self {
Self::SyncPayment(payment_attempt) => {
// get a schedule time for psync
// and retry the process if there is a schedule time
// if None mark the pt status as Retries Exceeded and finish the task
payment_sync::recovery_retry_sync_task(
state,
connector_customer_id,
revenue_recovery_metadata.connector.to_string(),
revenue_recovery_payment_data
.merchant_account
.get_id()
.clone(),
psync_task_process.clone(),
)
.await?;
Ok(())
}
Self::RetryPayment(schedule_time) => {
// finish the psync task
db.as_scheduler()
.finish_process_with_business_status(
psync_task_process.clone(),
business_status::PSYNC_WORKFLOW_COMPLETE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
// fetch the execute task
let task = revenue_recovery_core::EXECUTE_WORKFLOW;
let runner = storage::ProcessTrackerRunner::PassiveRecoveryWorkflow;
let process_tracker_id = payment_intent
.get_id()
.get_execute_revenue_recovery_id(task, runner);
let execute_task_process = db
.as_scheduler()
.find_process_by_id(&process_tracker_id)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)?
.get_required_value("Process Tracker")?;
// retry the execute tasks
db.as_scheduler()
.retry_process(execute_task_process, *schedule_time)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::TerminalFailure(payment_attempt) => {
// TODO: Add support for retrying failed outgoing recordback webhooks
// finish the current psync task
db.as_scheduler()
.finish_process_with_business_status(
psync_task_process.clone(),
business_status::PSYNC_WORKFLOW_COMPLETE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::SuccessfulPayment(payment_attempt) => {
// finish the current psync task
db.as_scheduler()
.finish_process_with_business_status(
psync_task_process.clone(),
business_status::PSYNC_WORKFLOW_COMPLETE,
)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
// Record a successful transaction back to Billing Connector
// TODO: Add support for retrying failed outgoing recordback webhooks
record_back_to_billing_connector(
state,
payment_attempt,
payment_intent,
&revenue_recovery_payment_data.billing_mca,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
Self::ReviewPayment => {
// requeue the process tracker task in case of psync api error
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Pending,
business_status: Some(String::from(business_status::PSYNC_WORKFLOW_REQUEUE)),
};
db.as_scheduler()
.update_process(psync_task_process.clone(), pt_update)
.await?;
Ok(())
}
Self::ManualReviewAction => {
logger::debug!("Invalid Payment Status For PCR Payment");
let pt_update = storage::ProcessTrackerUpdate::StatusUpdate {
status: enums::ProcessTrackerStatus::Review,
business_status: Some(String::from(business_status::PSYNC_WORKFLOW_COMPLETE)),
};
// update the process tracker status as Review
db.as_scheduler()
.update_process(psync_task_process.clone(), pt_update)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update the process tracker")?;
Ok(())
}
}
}
pub(crate) async fn decide_retry_failure_action(
state: &SessionState,
merchant_id: &id_type::MerchantId,
pt: storage::ProcessTracker,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
payment_attempt: &PaymentAttempt,
payment_intent: &PaymentIntent,
) -> RecoveryResult<Self> {
let db = &*state.store;
let next_retry_count = pt.retry_count + 1;
let error_message = payment_attempt
.error
.as_ref()
.map(|details| details.message.clone());
let error_code = payment_attempt
.error
.as_ref()
.map(|details| details.code.clone());
let connector_name = payment_attempt
.connector
.clone()
.ok_or(errors::RecoveryError::ValueNotFound)
.attach_printable("unable to derive payment connector from payment attempt")?;
let gsm_record = helpers::get_gsm_record(
state,
error_code,
error_message,
connector_name,
REVENUE_RECOVERY.to_string(),
)
.await;
let is_hard_decline = gsm_record
.and_then(|gsm_record| gsm_record.error_category)
.map(|gsm_error_category| {
gsm_error_category == common_enums::ErrorCategory::HardDecline
})
.unwrap_or(false);
let schedule_time = revenue_recovery_payment_data
.get_schedule_time_based_on_retry_type(
state,
merchant_id,
next_retry_count,
payment_attempt,
payment_intent,
is_hard_decline,
)
.await;
match schedule_time {
Some(schedule_time) => Ok(Self::RetryPayment(schedule_time)),
None => Ok(Self::TerminalFailure(payment_attempt.clone())),
}
}
}
/// Reopen calculate workflow when payment fails
pub async fn reopen_calculate_workflow_on_payment_failure(
state: &SessionState,
process: &storage::ProcessTracker,
profile: &domain::Profile,
merchant_context: domain::MerchantContext,
payment_intent: &PaymentIntent,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
latest_attempt_id: &id_type::GlobalAttemptId,
) -> RecoveryResult<()> {
let db = &*state.store;
let id = payment_intent.id.clone();
let task = revenue_recovery_core::CALCULATE_WORKFLOW;
let runner = storage::ProcessTrackerRunner::PassiveRecoveryWorkflow;
let old_tracking_data: pcr::RevenueRecoveryWorkflowTrackingData =
serde_json::from_value(process.tracking_data.clone())
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to deserialize the tracking data from process tracker")?;
let new_tracking_data = pcr::RevenueRecoveryWorkflowTrackingData {
payment_attempt_id: latest_attempt_id.clone(),
..old_tracking_data
};
let tracking_data = serde_json::to_value(new_tracking_data)
.change_context(errors::RecoveryError::ValueNotFound)
.attach_printable("Failed to serialize the tracking data for process tracker")?;
// Construct the process tracker ID for CALCULATE_WORKFLOW
let process_tracker_id = format!("{}_{}_{}", runner, task, id.get_string_repr());
logger::info!(
payment_id = %id.get_string_repr(),
process_tracker_id = %process_tracker_id,
"Attempting to reopen CALCULATE_WORKFLOW on payment failure"
);
// Find the existing CALCULATE_WORKFLOW process tracker
let calculate_process = db
.find_process_by_id(&process_tracker_id)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to find CALCULATE_WORKFLOW process tracker")?;
match calculate_process {
Some(process) => {
logger::info!(
payment_id = %id.get_string_repr(),
process_tracker_id = %process_tracker_id,
current_status = %process.business_status,
current_retry_count = process.retry_count,
"Found existing CALCULATE_WORKFLOW, updating status and retry count"
);
// Update the process tracker to reopen the calculate workflow
// 1. Change status from "finish" to "pending"
// 2. Increase retry count by 1
// 3. Set business status to QUEUED
// 4. Schedule for immediate execution
let new_retry_count = process.retry_count + 1;
let new_schedule_time = common_utils::date_time::now()
+ time::Duration::seconds(
state
.conf
.revenue_recovery
.recovery_timestamp
.reopen_workflow_buffer_time_in_seconds,
);
let pt_update = storage::ProcessTrackerUpdate::Update {
name: Some(task.to_string()),
retry_count: Some(new_retry_count),
schedule_time: Some(new_schedule_time),
tracking_data: Some(tracking_data),
business_status: Some(String::from(business_status::PENDING)),
status: Some(common_enums::ProcessTrackerStatus::Pending),
updated_at: Some(common_utils::date_time::now()),
};
db.update_process(process.clone(), pt_update)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to update CALCULATE_WORKFLOW process tracker")?;
logger::info!(
payment_id = %id.get_string_repr(),
process_tracker_id = %process_tracker_id,
new_retry_count = new_retry_count,
new_schedule_time = %new_schedule_time,
"Successfully reopened CALCULATE_WORKFLOW with increased retry count"
);
}
None => {
logger::info!(
payment_id = %id.get_string_repr(),
process_tracker_id = %process_tracker_id,
"CALCULATE_WORKFLOW process tracker not found, creating new entry"
);
let task = "CALCULATE_WORKFLOW";
let db = &*state.store;
// Create process tracker ID in the format: CALCULATE_WORKFLOW_{payment_intent_id}
let process_tracker_id = format!("{runner}_{task}_{}", id.get_string_repr());
// Set scheduled time to current time + buffer time set in configuration
let schedule_time = common_utils::date_time::now()
+ time::Duration::seconds(
state
.conf
.revenue_recovery
.recovery_timestamp
.reopen_workflow_buffer_time_in_seconds,
);
let new_retry_count = process.retry_count + 1;
// Check if a process tracker entry already exists for this payment intent
let existing_entry = db
.as_scheduler()
.find_process_by_id(&process_tracker_id)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable(
"Failed to check for existing calculate workflow process tracker entry",
)?;
// No entry exists - create a new one
router_env::logger::info!(
"No existing CALCULATE_WORKFLOW task found for payment_intent_id: {}, creating new entry... ",
id.get_string_repr()
);
let tag = ["PCR"];
let runner = storage::ProcessTrackerRunner::PassiveRecoveryWorkflow;
let process_tracker_entry = storage::ProcessTrackerNew::new(
&process_tracker_id,
task,
runner,
tag,
process.tracking_data.clone(),
Some(new_retry_count),
schedule_time,
common_types::consts::API_VERSION,
)
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable("Failed to construct calculate workflow process tracker entry")?;
// Insert into process tracker with status New
db.as_scheduler()
.insert_process(process_tracker_entry)
.await
.change_context(errors::RecoveryError::ProcessTrackerFailure)
.attach_printable(
"Failed to enter calculate workflow process_tracker_entry in DB",
)?;
router_env::logger::info!(
"Successfully created new CALCULATE_WORKFLOW task for payment_intent_id: {}",
id.get_string_repr()
);
logger::info!(
payment_id = %id.get_string_repr(),
process_tracker_id = %process_tracker_id,
"Successfully created new CALCULATE_WORKFLOW entry using perform_calculate_workflow"
);
}
}
Ok(())
}
// TODO: Move these to impl based functions
async fn record_back_to_billing_connector(
state: &SessionState,
payment_attempt: &PaymentAttempt,
payment_intent: &PaymentIntent,
billing_mca: &merchant_connector_account::MerchantConnectorAccount,
) -> RecoveryResult<()> {
logger::info!("Entering record_back_to_billing_connector");
let connector_name = billing_mca.connector_name.to_string();
let connector_data = api_types::ConnectorData::get_connector_by_name(
&state.conf.connectors,
&connector_name,
api_types::GetToken::Connector,
Some(billing_mca.get_id()),
)
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("invalid connector name received in billing merchant connector account")?;
let connector_integration: services::BoxedRevenueRecoveryRecordBackInterface<
router_flow_types::InvoiceRecordBack,
revenue_recovery_request::InvoiceRecordBackRequest,
revenue_recovery_response::InvoiceRecordBackResponse,
> = connector_data.connector.get_connector_integration();
let router_data = construct_invoice_record_back_router_data(
state,
billing_mca,
payment_attempt,
payment_intent,
)?;
let response = services::execute_connector_processing_step(
state,
connector_integration,
&router_data,
payments::CallConnectorAction::Trigger,
None,
None,
)
.await
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed while handling response of record back to billing connector")?;
match response.response {
Ok(response) => Ok(response),
error @ Err(_) => {
router_env::logger::error!(?error);
Err(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Failed while recording back to billing connector")
}
}?;
Ok(())
}
pub fn construct_invoice_record_back_router_data(
state: &SessionState,
billing_mca: &merchant_connector_account::MerchantConnectorAccount,
payment_attempt: &PaymentAttempt,
payment_intent: &PaymentIntent,
) -> RecoveryResult<hyperswitch_domain_models::types::InvoiceRecordBackRouterData> {
logger::info!("Entering construct_invoice_record_back_router_data");
let auth_type: types::ConnectorAuthType =
helpers::MerchantConnectorAccountType::DbVal(Box::new(billing_mca.clone()))
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)?;
let merchant_reference_id = payment_intent
.merchant_reference_id
.clone()
.ok_or(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable(
"Merchant reference id not found while recording back to billing connector",
)?;
let connector_name = billing_mca.get_connector_name_as_string();
let connector = common_enums::connector_enums::Connector::from_str(connector_name.as_str())
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Cannot find connector from the connector_name")?;
let connector_params =
hyperswitch_domain_models::connector_endpoints::Connectors::get_connector_params(
&state.conf.connectors,
connector,
)
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable(format!(
"cannot find connector params for this connector {connector} in this flow",
))?;
let router_data = router_data_v2::RouterDataV2 {
flow: PhantomData::<router_flow_types::InvoiceRecordBack>,
tenant_id: state.tenant.tenant_id.clone(),
resource_common_data: flow_common_types::InvoiceRecordBackData {
connector_meta_data: None,
},
connector_auth_type: auth_type,
request: revenue_recovery_request::InvoiceRecordBackRequest {
merchant_reference_id,
amount: payment_attempt.get_total_amount(),
currency: payment_intent.amount_details.currency,
payment_method_type: Some(payment_attempt.payment_method_subtype),
attempt_status: payment_attempt.status,
connector_transaction_id: payment_attempt
.connector_payment_id
.as_ref()
.map(|id| common_utils::types::ConnectorTransactionId::TxnId(id.clone())),
connector_params,
},
response: Err(types::ErrorResponse::default()),
};
let old_router_data = flow_common_types::InvoiceRecordBackData::to_old_router_data(router_data)
.change_context(errors::RecoveryError::RecordBackToBillingConnectorFailed)
.attach_printable("Cannot construct record back router data")?;
Ok(old_router_data)
}
pub fn get_payment_processor_token_id_from_payment_attempt(
payment_attempt: &PaymentAttempt,
) -> Option<String> {
let used_token = payment_attempt
.connector_token_details
.as_ref()
.and_then(|t| t.connector_mandate_id.clone());
logger::info!("Used token in the payment attempt : {:?}", used_token);
used_token
}
pub struct RevenueRecoveryOutgoingWebhook;
impl RevenueRecoveryOutgoingWebhook {
#[allow(clippy::too_many_arguments)]
pub async fn send_outgoing_webhook_based_on_revenue_recovery_status(
state: &SessionState,
event_class: common_enums::EventClass,
event_status: common_enums::EventType,
payment_intent: &PaymentIntent,
merchant_context: &domain::MerchantContext,
profile: &domain::Profile,
payment_attempt_id: String,
payments_response: ApplicationResponse<api_models::payments::PaymentsResponse>,
) -> RecoveryResult<()> {
match payments_response {
ApplicationResponse::JsonWithHeaders((response, _headers)) => {
let outgoing_webhook_content =
api_models::webhooks::OutgoingWebhookContent::PaymentDetails(Box::new(
response,
));
create_event_and_trigger_outgoing_webhook(
state.clone(),
profile.clone(),
merchant_context.get_merchant_key_store(),
event_status,
event_class,
payment_attempt_id,
enums::EventObjectType::PaymentDetails,
outgoing_webhook_content,
payment_intent.created_at,
)
.await
.change_context(errors::RecoveryError::InvalidTask)
.attach_printable("Failed to send out going webhook")?;
Ok(())
}
_other_variant => {
// Handle other successful response types if needed
logger::warn!("Unexpected application response variant for outgoing webhook");
Err(errors::RecoveryError::RevenueRecoveryOutgoingWebhookFailed.into())
}
}
}
}
// File: crates/router/src/core/revenue_recovery/transformers.rs
use common_enums::AttemptStatus;
use masking::PeekInterface;
use crate::{
core::revenue_recovery::types::RevenueRecoveryPaymentsAttemptStatus,
types::transformers::ForeignFrom,
};
impl ForeignFrom<AttemptStatus> for RevenueRecoveryPaymentsAttemptStatus {
fn foreign_from(s: AttemptStatus) -> Self {
match s {
AttemptStatus::Authorized
| AttemptStatus::Charged
| AttemptStatus::AutoRefunded
| AttemptStatus::PartiallyAuthorized
| AttemptStatus::PartialCharged
| AttemptStatus::PartialChargedAndChargeable => Self::Succeeded,
AttemptStatus::Started
| AttemptStatus::AuthenticationSuccessful
| AttemptStatus::Authorizing
| AttemptStatus::CodInitiated
| AttemptStatus::VoidInitiated
| AttemptStatus::CaptureInitiated
| AttemptStatus::Pending => Self::Processing,
AttemptStatus::AuthenticationFailed
| AttemptStatus::AuthorizationFailed
| AttemptStatus::VoidFailed
| AttemptStatus::RouterDeclined
| AttemptStatus::CaptureFailed
| AttemptStatus::Failure => Self::Failed,
AttemptStatus::Voided
| AttemptStatus::VoidedPostCharge
| AttemptStatus::ConfirmationAwaited
| AttemptStatus::PaymentMethodAwaited
| AttemptStatus::AuthenticationPending
| AttemptStatus::DeviceDataCollectionPending
| AttemptStatus::Unresolved
| AttemptStatus::IntegrityFailure
| AttemptStatus::Expired => Self::InvalidStatus(s.to_string()),
}
}
}
impl ForeignFrom<api_models::payments::RecoveryPaymentsCreate>
for hyperswitch_domain_models::revenue_recovery::RevenueRecoveryInvoiceData
{
fn foreign_from(data: api_models::payments::RecoveryPaymentsCreate) -> Self {
Self {
amount: data.amount_details.order_amount().into(),
currency: data.amount_details.currency(),
merchant_reference_id: data.merchant_reference_id,
billing_address: data.billing,
retry_count: None,
next_billing_at: None,
billing_started_at: data.billing_started_at,
metadata: data.metadata,
enable_partial_authorization: data.enable_partial_authorization,
}
}
}
impl ForeignFrom<&api_models::payments::RecoveryPaymentsCreate>
for hyperswitch_domain_models::revenue_recovery::RevenueRecoveryAttemptData
{
fn foreign_from(data: &api_models::payments::RecoveryPaymentsCreate) -> Self {
Self {
amount: data.amount_details.order_amount().into(),
currency: data.amount_details.currency(),
merchant_reference_id: data.merchant_reference_id.to_owned(),
connector_transaction_id: data.connector_transaction_id.as_ref().map(|txn_id| {
common_utils::types::ConnectorTransactionId::TxnId(txn_id.peek().to_string())
}),
error_code: data.error.as_ref().map(|error| error.code.clone()),
error_message: data.error.as_ref().map(|error| error.message.clone()),
processor_payment_method_token: data
.payment_method_data
.primary_processor_payment_method_token
.peek()
.to_string(),
connector_customer_id: data.connector_customer_id.peek().to_string(),
connector_account_reference_id: data
.payment_merchant_connector_id
.get_string_repr()
.to_string(),
transaction_created_at: data.transaction_created_at.to_owned(),
status: data.attempt_status,
payment_method_type: data.payment_method_type,
payment_method_sub_type: data.payment_method_sub_type,
network_advice_code: data
.error
.as_ref()
.and_then(|error| error.network_advice_code.clone()),
network_decline_code: data
.error
.as_ref()
.and_then(|error| error.network_decline_code.clone()),
network_error_message: data
.error
.as_ref()
.and_then(|error| error.network_error_message.clone()),
// retry count will be updated whenever there is new attempt is created.
retry_count: None,
invoice_next_billing_time: None,
invoice_billing_started_at_time: data.billing_started_at,
card_info: data
.payment_method_data
.additional_payment_method_info
.clone(),
charge_id: None,
}
}
}
// File: crates/router/src/core/revenue_recovery/api.rs
use actix_web::{web, Responder};
use api_models::{payments as payments_api, payments as api_payments};
use common_utils::id_type;
use error_stack::{report, FutureExt, ResultExt};
use hyperswitch_domain_models::{
merchant_context::{Context, MerchantContext},
payments as payments_domain,
};
use crate::{
core::{
errors::{self, RouterResult},
payments::{self, operations::Operation},
webhooks::recovery_incoming,
},
db::{
errors::{RouterResponse, StorageErrorExt},
storage::revenue_recovery_redis_operation::RedisTokenManager,
},
logger,
routes::{app::ReqState, SessionState},
services,
types::{
api::payments as api_types,
domain,
storage::{self, revenue_recovery as revenue_recovery_types},
transformers::ForeignFrom,
},
};
pub async fn call_psync_api(
state: &SessionState,
global_payment_id: &id_type::GlobalPaymentId,
revenue_recovery_data: &revenue_recovery_types::RevenueRecoveryPaymentData,
force_sync_bool: bool,
expand_attempts_bool: bool,
) -> RouterResult<payments_domain::PaymentStatusData<api_types::PSync>> {
let operation = payments::operations::PaymentGet;
let req = payments_api::PaymentsRetrieveRequest {
force_sync: force_sync_bool,
param: None,
expand_attempts: expand_attempts_bool,
return_raw_connector_response: None,
merchant_connector_details: None,
};
let merchant_context_from_revenue_recovery_data =
MerchantContext::NormalMerchant(Box::new(Context(
revenue_recovery_data.merchant_account.clone(),
revenue_recovery_data.key_store.clone(),
)));
// TODO : Use api handler instead of calling get_tracker and payments_operation_core
// Get the tracker related information. This includes payment intent and payment attempt
let get_tracker_response = operation
.to_get_tracker()?
.get_trackers(
state,
global_payment_id,
&req,
&merchant_context_from_revenue_recovery_data,
&revenue_recovery_data.profile,
&payments_domain::HeaderPayload::default(),
)
.await?;
let (payment_data, _req, _, _, _, _) = Box::pin(payments::payments_operation_core::<
api_types::PSync,
_,
_,
_,
payments_domain::PaymentStatusData<api_types::PSync>,
>(
state,
state.get_req_state(),
merchant_context_from_revenue_recovery_data,
&revenue_recovery_data.profile,
operation,
req,
get_tracker_response,
payments::CallConnectorAction::Trigger,
payments_domain::HeaderPayload::default(),
))
.await?;
Ok(payment_data)
}
pub async fn call_proxy_api(
state: &SessionState,
payment_intent: &payments_domain::PaymentIntent,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
revenue_recovery: &payments_api::PaymentRevenueRecoveryMetadata,
payment_processor_token: &str,
) -> RouterResult<payments_domain::PaymentConfirmData<api_types::Authorize>> {
let operation = payments::operations::proxy_payments_intent::PaymentProxyIntent;
let recurring_details = api_models::mandates::ProcessorPaymentToken {
processor_payment_token: payment_processor_token.to_string(),
merchant_connector_id: Some(revenue_recovery.get_merchant_connector_id_for_api_request()),
};
let req = payments_api::ProxyPaymentsRequest {
return_url: None,
amount: payments_api::AmountDetails::new(payment_intent.amount_details.clone().into()),
recurring_details,
shipping: None,
browser_info: None,
connector: revenue_recovery.connector.to_string(),
merchant_connector_id: revenue_recovery.get_merchant_connector_id_for_api_request(),
};
logger::info!(
"Call made to payments proxy api , with the request body {:?}",
req
);
let merchant_context_from_revenue_recovery_payment_data =
MerchantContext::NormalMerchant(Box::new(Context(
revenue_recovery_payment_data.merchant_account.clone(),
revenue_recovery_payment_data.key_store.clone(),
)));
// TODO : Use api handler instead of calling get_tracker and payments_operation_core
// Get the tracker related information. This includes payment intent and payment attempt
let get_tracker_response = operation
.to_get_tracker()?
.get_trackers(
state,
payment_intent.get_id(),
&req,
&merchant_context_from_revenue_recovery_payment_data,
&revenue_recovery_payment_data.profile,
&payments_domain::HeaderPayload::default(),
)
.await?;
let (payment_data, _req, _, _) = Box::pin(payments::proxy_for_payments_operation_core::<
api_types::Authorize,
_,
_,
_,
payments_domain::PaymentConfirmData<api_types::Authorize>,
>(
state,
state.get_req_state(),
merchant_context_from_revenue_recovery_payment_data,
revenue_recovery_payment_data.profile.clone(),
operation,
req,
get_tracker_response,
payments::CallConnectorAction::Trigger,
payments_domain::HeaderPayload::default(),
None,
))
.await?;
Ok(payment_data)
}
pub async fn update_payment_intent_api(
state: &SessionState,
global_payment_id: id_type::GlobalPaymentId,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
update_req: payments_api::PaymentsUpdateIntentRequest,
) -> RouterResult<payments_domain::PaymentIntentData<api_types::PaymentUpdateIntent>> {
// TODO : Use api handler instead of calling payments_intent_operation_core
let operation = payments::operations::PaymentUpdateIntent;
let merchant_context_from_revenue_recovery_payment_data =
MerchantContext::NormalMerchant(Box::new(Context(
revenue_recovery_payment_data.merchant_account.clone(),
revenue_recovery_payment_data.key_store.clone(),
)));
let (payment_data, _req, _) = payments::payments_intent_operation_core::<
api_types::PaymentUpdateIntent,
_,
_,
payments_domain::PaymentIntentData<api_types::PaymentUpdateIntent>,
>(
state,
state.get_req_state(),
merchant_context_from_revenue_recovery_payment_data,
revenue_recovery_payment_data.profile.clone(),
operation,
update_req,
global_payment_id,
payments_domain::HeaderPayload::default(),
)
.await?;
Ok(payment_data)
}
pub async fn record_internal_attempt_api(
state: &SessionState,
payment_intent: &payments_domain::PaymentIntent,
revenue_recovery_payment_data: &storage::revenue_recovery::RevenueRecoveryPaymentData,
revenue_recovery_metadata: &payments_api::PaymentRevenueRecoveryMetadata,
card_info: payments_api::AdditionalCardInfo,
payment_processor_token: &str,
) -> RouterResult<payments_api::PaymentAttemptRecordResponse> {
let revenue_recovery_attempt_data =
recovery_incoming::RevenueRecoveryAttempt::get_revenue_recovery_attempt(
payment_intent,
revenue_recovery_metadata,
&revenue_recovery_payment_data.billing_mca,
card_info,
payment_processor_token,
)
.change_context(errors::ApiErrorResponse::GenericNotFoundError {
message: "get_revenue_recovery_attempt was not constructed".to_string(),
})?;
let request_payload = revenue_recovery_attempt_data
.create_payment_record_request(
state,
&revenue_recovery_payment_data.billing_mca.id,
Some(
revenue_recovery_metadata
.active_attempt_payment_connector_id
.clone(),
),
Some(revenue_recovery_metadata.connector),
common_enums::TriggeredBy::Internal,
)
.await
.change_context(errors::ApiErrorResponse::GenericNotFoundError {
message: "Cannot Create the payment record Request".to_string(),
})?;
let merchant_context_from_revenue_recovery_payment_data =
MerchantContext::NormalMerchant(Box::new(Context(
revenue_recovery_payment_data.merchant_account.clone(),
revenue_recovery_payment_data.key_store.clone(),
)));
let attempt_response = Box::pin(payments::record_attempt_core(
state.clone(),
state.get_req_state(),
merchant_context_from_revenue_recovery_payment_data,
revenue_recovery_payment_data.profile.clone(),
request_payload,
payment_intent.id.clone(),
hyperswitch_domain_models::payments::HeaderPayload::default(),
))
.await;
match attempt_response {
Ok(services::ApplicationResponse::JsonWithHeaders((attempt_response, _))) => {
Ok(attempt_response)
}
Ok(_) => Err(errors::ApiErrorResponse::PaymentNotFound)
.attach_printable("Unexpected response from record attempt core"),
error @ Err(_) => {
router_env::logger::error!(?error);
Err(errors::ApiErrorResponse::PaymentNotFound)
.attach_printable("failed to record attempt for revenue recovery workflow")
}
}
}
pub async fn custom_revenue_recovery_core(
state: SessionState,
req_state: ReqState,
merchant_context: MerchantContext,
profile: domain::Profile,
request: api_models::payments::RecoveryPaymentsCreate,
) -> RouterResponse<payments_api::RecoveryPaymentsResponse> {
let store = state.store.as_ref();
let key_manager_state = &(&state).into();
let payment_merchant_connector_account_id = request.payment_merchant_connector_id.to_owned();
// Find the payment & billing merchant connector id at the top level to avoid multiple DB calls.
let payment_merchant_connector_account = store
.find_merchant_connector_account_by_id(
key_manager_state,
&payment_merchant_connector_account_id,
merchant_context.get_merchant_key_store(),
)
.await
.to_not_found_response(errors::ApiErrorResponse::MerchantConnectorAccountNotFound {
id: payment_merchant_connector_account_id
.clone()
.get_string_repr()
.to_string(),
})?;
let billing_connector_account = store
.find_merchant_connector_account_by_id(
key_manager_state,
&request.billing_merchant_connector_id.clone(),
merchant_context.get_merchant_key_store(),
)
.await
.to_not_found_response(errors::ApiErrorResponse::MerchantConnectorAccountNotFound {
id: request
.billing_merchant_connector_id
.clone()
.get_string_repr()
.to_string(),
})?;
let recovery_intent =
recovery_incoming::RevenueRecoveryInvoice::get_or_create_custom_recovery_intent(
request.clone(),
&state,
&req_state,
&merchant_context,
&profile,
)
.await
.change_context(errors::ApiErrorResponse::GenericNotFoundError {
message: format!(
"Failed to load recovery intent for merchant reference id : {:?}",
request.merchant_reference_id.to_owned()
)
.to_string(),
})?;
let (revenue_recovery_attempt_data, updated_recovery_intent) =
recovery_incoming::RevenueRecoveryAttempt::load_recovery_attempt_from_api(
request.clone(),
&state,
&req_state,
&merchant_context,
&profile,
recovery_intent.clone(),
payment_merchant_connector_account,
)
.await
.change_context(errors::ApiErrorResponse::GenericNotFoundError {
message: format!(
"Failed to load recovery attempt for merchant reference id : {:?}",
request.merchant_reference_id.to_owned()
)
.to_string(),
})?;
let intent_retry_count = updated_recovery_intent
.feature_metadata
.as_ref()
.and_then(|metadata| metadata.get_retry_count())
.ok_or(report!(errors::ApiErrorResponse::GenericNotFoundError {
message: "Failed to fetch retry count from intent feature metadata".to_string(),
}))?;
router_env::logger::info!("Intent retry count: {:?}", intent_retry_count);
let recovery_action = recovery_incoming::RecoveryAction {
action: request.action.to_owned(),
};
let mca_retry_threshold = billing_connector_account
.get_retry_threshold()
.ok_or(report!(errors::ApiErrorResponse::GenericNotFoundError {
message: "Failed to fetch retry threshold from billing merchant connector account"
.to_string(),
}))?;
recovery_action
.handle_action(
&state,
&profile,
&merchant_context,
&billing_connector_account,
mca_retry_threshold,
intent_retry_count,
&(
Some(revenue_recovery_attempt_data),
updated_recovery_intent.clone(),
),
)
.await
.change_context(errors::ApiErrorResponse::GenericNotFoundError {
message: "Unexpected response from recovery core".to_string(),
})?;
let response = api_models::payments::RecoveryPaymentsResponse {
id: updated_recovery_intent.payment_id.to_owned(),
intent_status: updated_recovery_intent.status.to_owned(),
merchant_reference_id: updated_recovery_intent.merchant_reference_id.to_owned(),
};
Ok(hyperswitch_domain_models::api::ApplicationResponse::Json(
response,
))
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/revenue_recovery/types.rs",
"crates/router/src/core/revenue_recovery/transformers.rs",
"crates/router/src/core/revenue_recovery/api.rs"
],
"module": "crates/router/src/core/revenue_recovery",
"num_files": 3,
"token_count": 14273
}
|
module_-4543002581799502785
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/errors
Files: 6
</path>
<module>
// File: crates/router/src/core/errors/customers_error_response.rs
use http::StatusCode;
#[derive(Debug, thiserror::Error)]
pub enum CustomersErrorResponse {
#[error("Customer has already been redacted")]
CustomerRedacted,
#[error("Something went wrong")]
InternalServerError,
#[error("Invalid request data: {message}")]
InvalidRequestData { message: String },
#[error("Customer has already been redacted")]
MandateActive,
#[error("Customer does not exist in our records")]
CustomerNotFound,
#[error("Customer with the given customer id already exists")]
CustomerAlreadyExists,
}
impl actix_web::ResponseError for CustomersErrorResponse {
fn status_code(&self) -> StatusCode {
common_utils::errors::ErrorSwitch::<api_models::errors::types::ApiErrorResponse>::switch(
self,
)
.status_code()
}
fn error_response(&self) -> actix_web::HttpResponse {
common_utils::errors::ErrorSwitch::<api_models::errors::types::ApiErrorResponse>::switch(
self,
)
.error_response()
}
}
// should be removed hola bola
// File: crates/router/src/core/errors/transformers.rs
use common_utils::errors::ErrorSwitch;
use hyperswitch_domain_models::errors::api_error_response::ApiErrorResponse;
use super::{CustomersErrorResponse, StorageError};
impl ErrorSwitch<api_models::errors::types::ApiErrorResponse> for CustomersErrorResponse {
fn switch(&self) -> api_models::errors::types::ApiErrorResponse {
use api_models::errors::types::{ApiError, ApiErrorResponse as AER};
match self {
Self::CustomerRedacted => AER::BadRequest(ApiError::new(
"IR",
11,
"Customer has already been redacted",
None,
)),
Self::InternalServerError => {
AER::InternalServerError(ApiError::new("HE", 0, "Something went wrong", None))
}
Self::InvalidRequestData { message } => AER::BadRequest(ApiError::new(
"IR",
7,
format!("Invalid value provided:{}", message),
None,
)),
Self::MandateActive => AER::BadRequest(ApiError::new(
"IR",
10,
"Customer has active mandate/subsciption",
None,
)),
Self::CustomerNotFound => AER::NotFound(ApiError::new(
"HE",
2,
"Customer does not exist in our records",
None,
)),
Self::CustomerAlreadyExists => AER::BadRequest(ApiError::new(
"IR",
12,
"Customer with the given `customer_id` already exists",
None,
)),
}
}
}
impl ErrorSwitch<CustomersErrorResponse> for StorageError {
fn switch(&self) -> CustomersErrorResponse {
use CustomersErrorResponse as CER;
match self {
err if err.is_db_not_found() => CER::CustomerNotFound,
Self::CustomerRedacted => CER::CustomerRedacted,
_ => CER::InternalServerError,
}
}
}
impl ErrorSwitch<CustomersErrorResponse> for common_utils::errors::CryptoError {
fn switch(&self) -> CustomersErrorResponse {
CustomersErrorResponse::InternalServerError
}
}
impl ErrorSwitch<CustomersErrorResponse> for ApiErrorResponse {
fn switch(&self) -> CustomersErrorResponse {
use CustomersErrorResponse as CER;
match self {
Self::InternalServerError => CER::InternalServerError,
Self::MandateActive => CER::MandateActive,
Self::CustomerNotFound => CER::CustomerNotFound,
_ => CER::InternalServerError,
}
}
}
// File: crates/router/src/core/errors/user.rs
use common_utils::errors::CustomResult;
use crate::services::ApplicationResponse;
pub type UserResult<T> = CustomResult<T, UserErrors>;
pub type UserResponse<T> = CustomResult<ApplicationResponse<T>, UserErrors>;
pub mod sample_data;
#[derive(Debug, thiserror::Error)]
pub enum UserErrors {
#[error("User InternalServerError")]
InternalServerError,
#[error("InvalidCredentials")]
InvalidCredentials,
#[error("UserNotFound")]
UserNotFound,
#[error("UserExists")]
UserExists,
#[error("LinkInvalid")]
LinkInvalid,
#[error("UnverifiedUser")]
UnverifiedUser,
#[error("InvalidOldPassword")]
InvalidOldPassword,
#[error("EmailParsingError")]
EmailParsingError,
#[error("NameParsingError")]
NameParsingError,
#[error("PasswordParsingError")]
PasswordParsingError,
#[error("UserAlreadyVerified")]
UserAlreadyVerified,
#[error("CompanyNameParsingError")]
CompanyNameParsingError,
#[error("MerchantAccountCreationError: {0}")]
MerchantAccountCreationError(String),
#[error("InvalidEmailError")]
InvalidEmailError,
#[error("DuplicateOrganizationId")]
DuplicateOrganizationId,
#[error("MerchantIdNotFound")]
MerchantIdNotFound,
#[error("MetadataAlreadySet")]
MetadataAlreadySet,
#[error("InvalidRoleId")]
InvalidRoleId,
#[error("InvalidRoleOperation")]
InvalidRoleOperation,
#[error("IpAddressParsingFailed")]
IpAddressParsingFailed,
#[error("InvalidMetadataRequest")]
InvalidMetadataRequest,
#[error("MerchantIdParsingError")]
MerchantIdParsingError,
#[error("ChangePasswordError")]
ChangePasswordError,
#[error("InvalidDeleteOperation")]
InvalidDeleteOperation,
#[error("MaxInvitationsError")]
MaxInvitationsError,
#[error("RoleNotFound")]
RoleNotFound,
#[error("InvalidRoleOperationWithMessage")]
InvalidRoleOperationWithMessage(String),
#[error("RoleNameParsingError")]
RoleNameParsingError,
#[error("RoleNameAlreadyExists")]
RoleNameAlreadyExists,
#[error("TotpNotSetup")]
TotpNotSetup,
#[error("InvalidTotp")]
InvalidTotp,
#[error("TotpRequired")]
TotpRequired,
#[error("InvalidRecoveryCode")]
InvalidRecoveryCode,
#[error("TwoFactorAuthRequired")]
TwoFactorAuthRequired,
#[error("TwoFactorAuthNotSetup")]
TwoFactorAuthNotSetup,
#[error("TOTP secret not found")]
TotpSecretNotFound,
#[error("User auth method already exists")]
UserAuthMethodAlreadyExists,
#[error("Invalid user auth method operation")]
InvalidUserAuthMethodOperation,
#[error("Auth config parsing error")]
AuthConfigParsingError,
#[error("Invalid SSO request")]
SSOFailed,
#[error("profile_id missing in JWT")]
JwtProfileIdMissing,
#[error("Maximum attempts reached for TOTP")]
MaxTotpAttemptsReached,
#[error("Maximum attempts reached for Recovery Code")]
MaxRecoveryCodeAttemptsReached,
#[error("Forbidden tenant id")]
ForbiddenTenantId,
#[error("Error Uploading file to Theme Storage")]
ErrorUploadingFile,
#[error("Error Retrieving file from Theme Storage")]
ErrorRetrievingFile,
#[error("Theme not found")]
ThemeNotFound,
#[error("Theme with lineage already exists")]
ThemeAlreadyExists,
#[error("Invalid field: {0} in lineage")]
InvalidThemeLineage(String),
#[error("Missing required field: email_config")]
MissingEmailConfig,
#[error("Invalid Auth Method Operation: {0}")]
InvalidAuthMethodOperationWithMessage(String),
#[error("Invalid Clone Connector Operation: {0}")]
InvalidCloneConnectorOperation(String),
#[error("Error cloning connector: {0}")]
ErrorCloningConnector(String),
}
impl common_utils::errors::ErrorSwitch<api_models::errors::types::ApiErrorResponse> for UserErrors {
fn switch(&self) -> api_models::errors::types::ApiErrorResponse {
use api_models::errors::types::{ApiError, ApiErrorResponse as AER};
let sub_code = "UR";
match self {
Self::InternalServerError => {
AER::InternalServerError(ApiError::new("HE", 0, self.get_error_message(), None))
}
Self::InvalidCredentials => {
AER::Unauthorized(ApiError::new(sub_code, 1, self.get_error_message(), None))
}
Self::UserNotFound => {
AER::Unauthorized(ApiError::new(sub_code, 2, self.get_error_message(), None))
}
Self::UserExists => {
AER::BadRequest(ApiError::new(sub_code, 3, self.get_error_message(), None))
}
Self::LinkInvalid => {
AER::Unauthorized(ApiError::new(sub_code, 4, self.get_error_message(), None))
}
Self::UnverifiedUser => {
AER::Unauthorized(ApiError::new(sub_code, 5, self.get_error_message(), None))
}
Self::InvalidOldPassword => {
AER::BadRequest(ApiError::new(sub_code, 6, self.get_error_message(), None))
}
Self::EmailParsingError => {
AER::BadRequest(ApiError::new(sub_code, 7, self.get_error_message(), None))
}
Self::NameParsingError => {
AER::BadRequest(ApiError::new(sub_code, 8, self.get_error_message(), None))
}
Self::PasswordParsingError => {
AER::BadRequest(ApiError::new(sub_code, 9, self.get_error_message(), None))
}
Self::UserAlreadyVerified => {
AER::Unauthorized(ApiError::new(sub_code, 11, self.get_error_message(), None))
}
Self::CompanyNameParsingError => {
AER::BadRequest(ApiError::new(sub_code, 14, self.get_error_message(), None))
}
Self::MerchantAccountCreationError(_) => AER::InternalServerError(ApiError::new(
sub_code,
15,
self.get_error_message(),
None,
)),
Self::InvalidEmailError => {
AER::BadRequest(ApiError::new(sub_code, 16, self.get_error_message(), None))
}
Self::MerchantIdNotFound => {
AER::BadRequest(ApiError::new(sub_code, 18, self.get_error_message(), None))
}
Self::MetadataAlreadySet => {
AER::BadRequest(ApiError::new(sub_code, 19, self.get_error_message(), None))
}
Self::DuplicateOrganizationId => AER::InternalServerError(ApiError::new(
sub_code,
21,
self.get_error_message(),
None,
)),
Self::InvalidRoleId => {
AER::BadRequest(ApiError::new(sub_code, 22, self.get_error_message(), None))
}
Self::InvalidRoleOperation => {
AER::BadRequest(ApiError::new(sub_code, 23, self.get_error_message(), None))
}
Self::IpAddressParsingFailed => AER::InternalServerError(ApiError::new(
sub_code,
24,
self.get_error_message(),
None,
)),
Self::InvalidMetadataRequest => {
AER::BadRequest(ApiError::new(sub_code, 26, self.get_error_message(), None))
}
Self::MerchantIdParsingError => {
AER::BadRequest(ApiError::new(sub_code, 28, self.get_error_message(), None))
}
Self::ChangePasswordError => {
AER::BadRequest(ApiError::new(sub_code, 29, self.get_error_message(), None))
}
Self::InvalidDeleteOperation => {
AER::BadRequest(ApiError::new(sub_code, 30, self.get_error_message(), None))
}
Self::MaxInvitationsError => {
AER::BadRequest(ApiError::new(sub_code, 31, self.get_error_message(), None))
}
Self::RoleNotFound => {
AER::BadRequest(ApiError::new(sub_code, 32, self.get_error_message(), None))
}
Self::InvalidRoleOperationWithMessage(_) => {
AER::BadRequest(ApiError::new(sub_code, 33, self.get_error_message(), None))
}
Self::RoleNameParsingError => {
AER::BadRequest(ApiError::new(sub_code, 34, self.get_error_message(), None))
}
Self::RoleNameAlreadyExists => {
AER::BadRequest(ApiError::new(sub_code, 35, self.get_error_message(), None))
}
Self::TotpNotSetup => {
AER::BadRequest(ApiError::new(sub_code, 36, self.get_error_message(), None))
}
Self::InvalidTotp => {
AER::BadRequest(ApiError::new(sub_code, 37, self.get_error_message(), None))
}
Self::TotpRequired => {
AER::BadRequest(ApiError::new(sub_code, 38, self.get_error_message(), None))
}
Self::InvalidRecoveryCode => {
AER::BadRequest(ApiError::new(sub_code, 39, self.get_error_message(), None))
}
Self::TwoFactorAuthRequired => {
AER::BadRequest(ApiError::new(sub_code, 40, self.get_error_message(), None))
}
Self::TwoFactorAuthNotSetup => {
AER::BadRequest(ApiError::new(sub_code, 41, self.get_error_message(), None))
}
Self::TotpSecretNotFound => {
AER::BadRequest(ApiError::new(sub_code, 42, self.get_error_message(), None))
}
Self::UserAuthMethodAlreadyExists => {
AER::BadRequest(ApiError::new(sub_code, 43, self.get_error_message(), None))
}
Self::InvalidUserAuthMethodOperation => {
AER::BadRequest(ApiError::new(sub_code, 44, self.get_error_message(), None))
}
Self::AuthConfigParsingError => {
AER::BadRequest(ApiError::new(sub_code, 45, self.get_error_message(), None))
}
Self::SSOFailed => {
AER::BadRequest(ApiError::new(sub_code, 46, self.get_error_message(), None))
}
Self::JwtProfileIdMissing => {
AER::Unauthorized(ApiError::new(sub_code, 47, self.get_error_message(), None))
}
Self::MaxTotpAttemptsReached => {
AER::BadRequest(ApiError::new(sub_code, 48, self.get_error_message(), None))
}
Self::MaxRecoveryCodeAttemptsReached => {
AER::BadRequest(ApiError::new(sub_code, 49, self.get_error_message(), None))
}
Self::ForbiddenTenantId => {
AER::BadRequest(ApiError::new(sub_code, 50, self.get_error_message(), None))
}
Self::ErrorUploadingFile => AER::InternalServerError(ApiError::new(
sub_code,
51,
self.get_error_message(),
None,
)),
Self::ErrorRetrievingFile => AER::InternalServerError(ApiError::new(
sub_code,
52,
self.get_error_message(),
None,
)),
Self::ThemeNotFound => {
AER::NotFound(ApiError::new(sub_code, 53, self.get_error_message(), None))
}
Self::ThemeAlreadyExists => {
AER::BadRequest(ApiError::new(sub_code, 54, self.get_error_message(), None))
}
Self::InvalidThemeLineage(_) => {
AER::BadRequest(ApiError::new(sub_code, 55, self.get_error_message(), None))
}
Self::MissingEmailConfig => {
AER::BadRequest(ApiError::new(sub_code, 56, self.get_error_message(), None))
}
Self::InvalidAuthMethodOperationWithMessage(_) => {
AER::BadRequest(ApiError::new(sub_code, 57, self.get_error_message(), None))
}
Self::InvalidCloneConnectorOperation(_) => {
AER::BadRequest(ApiError::new(sub_code, 58, self.get_error_message(), None))
}
Self::ErrorCloningConnector(_) => AER::InternalServerError(ApiError::new(
sub_code,
59,
self.get_error_message(),
None,
)),
}
}
}
impl UserErrors {
pub fn get_error_message(&self) -> String {
match self {
Self::InternalServerError => "Something went wrong".to_string(),
Self::InvalidCredentials => "Incorrect email or password".to_string(),
Self::UserNotFound => "Email doesn’t exist. Register".to_string(),
Self::UserExists => "An account already exists with this email".to_string(),
Self::LinkInvalid => "Invalid or expired link".to_string(),
Self::UnverifiedUser => "Kindly verify your account".to_string(),
Self::InvalidOldPassword => {
"Old password incorrect. Please enter the correct password".to_string()
}
Self::EmailParsingError => "Invalid Email".to_string(),
Self::NameParsingError => "Invalid Name".to_string(),
Self::PasswordParsingError => "Invalid Password".to_string(),
Self::UserAlreadyVerified => "User already verified".to_string(),
Self::CompanyNameParsingError => "Invalid Company Name".to_string(),
Self::MerchantAccountCreationError(error_message) => error_message.to_string(),
Self::InvalidEmailError => "Invalid Email".to_string(),
Self::MerchantIdNotFound => "Invalid Merchant ID".to_string(),
Self::MetadataAlreadySet => "Metadata already set".to_string(),
Self::DuplicateOrganizationId => {
"An Organization with the id already exists".to_string()
}
Self::InvalidRoleId => "Invalid Role ID".to_string(),
Self::InvalidRoleOperation => "User Role Operation Not Supported".to_string(),
Self::IpAddressParsingFailed => "Something went wrong".to_string(),
Self::InvalidMetadataRequest => "Invalid Metadata Request".to_string(),
Self::MerchantIdParsingError => "Invalid Merchant Id".to_string(),
Self::ChangePasswordError => "Old and new password cannot be same".to_string(),
Self::InvalidDeleteOperation => "Delete Operation Not Supported".to_string(),
Self::MaxInvitationsError => "Maximum invite count per request exceeded".to_string(),
Self::RoleNotFound => "Role Not Found".to_string(),
Self::InvalidRoleOperationWithMessage(error_message) => error_message.to_string(),
Self::RoleNameParsingError => "Invalid Role Name".to_string(),
Self::RoleNameAlreadyExists => "Role name already exists".to_string(),
Self::TotpNotSetup => "TOTP not setup".to_string(),
Self::InvalidTotp => "Invalid TOTP".to_string(),
Self::TotpRequired => "TOTP required".to_string(),
Self::InvalidRecoveryCode => "Invalid Recovery Code".to_string(),
Self::MaxTotpAttemptsReached => "Maximum attempts reached for TOTP".to_string(),
Self::MaxRecoveryCodeAttemptsReached => {
"Maximum attempts reached for Recovery Code".to_string()
}
Self::TwoFactorAuthRequired => "Two factor auth required".to_string(),
Self::TwoFactorAuthNotSetup => "Two factor auth not setup".to_string(),
Self::TotpSecretNotFound => "TOTP secret not found".to_string(),
Self::UserAuthMethodAlreadyExists => "User auth method already exists".to_string(),
Self::InvalidUserAuthMethodOperation => {
"Invalid user auth method operation".to_string()
}
Self::AuthConfigParsingError => "Auth config parsing error".to_string(),
Self::SSOFailed => "Invalid SSO request".to_string(),
Self::JwtProfileIdMissing => "profile_id missing in JWT".to_string(),
Self::ForbiddenTenantId => "Forbidden tenant id".to_string(),
Self::ErrorUploadingFile => "Error Uploading file to Theme Storage".to_string(),
Self::ErrorRetrievingFile => "Error Retrieving file from Theme Storage".to_string(),
Self::ThemeNotFound => "Theme not found".to_string(),
Self::ThemeAlreadyExists => "Theme with lineage already exists".to_string(),
Self::InvalidThemeLineage(field_name) => {
format!("Invalid field: {field_name} in lineage")
}
Self::MissingEmailConfig => "Missing required field: email_config".to_string(),
Self::InvalidAuthMethodOperationWithMessage(operation) => {
format!("Invalid Auth Method Operation: {operation}")
}
Self::InvalidCloneConnectorOperation(operation) => {
format!("Invalid Clone Connector Operation: {operation}")
}
Self::ErrorCloningConnector(error_message) => {
format!("Error cloning connector: {error_message}")
}
}
}
}
// File: crates/router/src/core/errors/error_handlers.rs
use actix_web::{body, dev::ServiceResponse, middleware::ErrorHandlerResponse, ResponseError};
use http::StatusCode;
use super::ApiErrorResponse;
use crate::logger;
pub fn custom_error_handlers<B: body::MessageBody + 'static>(
res: ServiceResponse<B>,
) -> actix_web::Result<ErrorHandlerResponse<B>> {
let error_response = match res.status() {
StatusCode::NOT_FOUND => ApiErrorResponse::InvalidRequestUrl,
StatusCode::METHOD_NOT_ALLOWED => ApiErrorResponse::InvalidHttpMethod,
_ => ApiErrorResponse::InternalServerError,
};
let (req, res) = res.into_parts();
logger::warn!(error_response=?res);
let res = match res.error() {
Some(_) => res.map_into_boxed_body(),
None => error_response.error_response(),
};
let res = ServiceResponse::new(req, res)
.map_into_boxed_body()
.map_into_right_body();
Ok(ErrorHandlerResponse::Response(res))
}
// can be used as .default_service for web::resource to modify the default behavior of method_not_found error i.e. raised
// use actix_web::dev::ServiceRequest
// pub async fn default_service_405<E>(req: ServiceRequest) -> Result<ServiceResponse, E> {
// Ok(req.into_response(ApiErrorResponse::InvalidHttpMethod.error_response()))
// }
// File: crates/router/src/core/errors/utils.rs
use common_utils::errors::CustomResult;
use crate::{core::errors, logger};
pub trait StorageErrorExt<T, E> {
#[track_caller]
fn to_not_found_response(self, not_found_response: E) -> error_stack::Result<T, E>;
#[track_caller]
fn to_duplicate_response(self, duplicate_response: E) -> error_stack::Result<T, E>;
}
impl<T> StorageErrorExt<T, errors::CustomersErrorResponse>
for error_stack::Result<T, errors::StorageError>
{
#[track_caller]
fn to_not_found_response(
self,
not_found_response: errors::CustomersErrorResponse,
) -> error_stack::Result<T, errors::CustomersErrorResponse> {
self.map_err(|err| match err.current_context() {
error if error.is_db_not_found() => err.change_context(not_found_response),
errors::StorageError::CustomerRedacted => {
err.change_context(errors::CustomersErrorResponse::CustomerRedacted)
}
_ => err.change_context(errors::CustomersErrorResponse::InternalServerError),
})
}
fn to_duplicate_response(
self,
duplicate_response: errors::CustomersErrorResponse,
) -> error_stack::Result<T, errors::CustomersErrorResponse> {
self.map_err(|err| {
if err.current_context().is_db_unique_violation() {
err.change_context(duplicate_response)
} else {
err.change_context(errors::CustomersErrorResponse::InternalServerError)
}
})
}
}
impl<T> StorageErrorExt<T, errors::ApiErrorResponse>
for error_stack::Result<T, errors::StorageError>
{
#[track_caller]
fn to_not_found_response(
self,
not_found_response: errors::ApiErrorResponse,
) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let new_err = match err.current_context() {
errors::StorageError::ValueNotFound(_) => not_found_response,
errors::StorageError::CustomerRedacted => {
errors::ApiErrorResponse::CustomerRedacted
}
_ => errors::ApiErrorResponse::InternalServerError,
};
err.change_context(new_err)
})
}
#[track_caller]
fn to_duplicate_response(
self,
duplicate_response: errors::ApiErrorResponse,
) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let new_err = match err.current_context() {
errors::StorageError::DuplicateValue { .. } => duplicate_response,
_ => errors::ApiErrorResponse::InternalServerError,
};
err.change_context(new_err)
})
}
}
pub trait ConnectorErrorExt<T> {
#[track_caller]
fn to_refund_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
#[track_caller]
fn to_payment_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
#[track_caller]
fn to_setup_mandate_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
#[track_caller]
fn to_dispute_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
#[track_caller]
fn to_files_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
#[cfg(feature = "payouts")]
#[track_caller]
fn to_payout_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
#[track_caller]
fn to_vault_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse>;
// Validates if the result, is Ok(..) or WebhookEventTypeNotFound all the other error variants
// are cascaded while these two event types are handled via `Option`
#[track_caller]
fn allow_webhook_event_type_not_found(
self,
enabled: bool,
) -> error_stack::Result<Option<T>, errors::ConnectorError>;
}
impl<T> ConnectorErrorExt<T> for error_stack::Result<T, errors::ConnectorError> {
fn to_refund_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| match err.current_context() {
errors::ConnectorError::ProcessingStepFailed(Some(bytes)) => {
let response_str = std::str::from_utf8(bytes);
let data = match response_str {
Ok(s) => serde_json::from_str(s)
.map_err(
|error| logger::error!(%error,"Failed to convert response to JSON"),
)
.ok(),
Err(error) => {
logger::error!(%error,"Failed to convert response to UTF8 string");
None
}
};
err.change_context(errors::ApiErrorResponse::RefundFailed { data })
}
errors::ConnectorError::NotImplemented(reason) => {
errors::ApiErrorResponse::NotImplemented {
message: errors::NotImplementedMessage::Reason(reason.to_string()),
}
.into()
}
errors::ConnectorError::NotSupported { message, connector } => {
errors::ApiErrorResponse::NotSupported {
message: format!("{message} is not supported by {connector}"),
}
.into()
}
errors::ConnectorError::CaptureMethodNotSupported => {
errors::ApiErrorResponse::NotSupported {
message: "Capture Method Not Supported".to_owned(),
}
.into()
}
errors::ConnectorError::FailedToObtainIntegrationUrl
| errors::ConnectorError::RequestEncodingFailed
| errors::ConnectorError::RequestEncodingFailedWithReason(_)
| errors::ConnectorError::ParsingFailed
| errors::ConnectorError::ResponseDeserializationFailed
| errors::ConnectorError::UnexpectedResponseError(_)
| errors::ConnectorError::RoutingRulesParsingError
| errors::ConnectorError::FailedToObtainPreferredConnector
| errors::ConnectorError::ProcessingStepFailed(_)
| errors::ConnectorError::InvalidConnectorName
| errors::ConnectorError::InvalidWallet
| errors::ConnectorError::ResponseHandlingFailed
| errors::ConnectorError::MissingRequiredField { .. }
| errors::ConnectorError::MissingRequiredFields { .. }
| errors::ConnectorError::FailedToObtainAuthType
| errors::ConnectorError::FailedToObtainCertificate
| errors::ConnectorError::NoConnectorMetaData
| errors::ConnectorError::NoConnectorWalletDetails
| errors::ConnectorError::FailedToObtainCertificateKey
| errors::ConnectorError::MaxFieldLengthViolated { .. }
| errors::ConnectorError::FlowNotSupported { .. }
| errors::ConnectorError::MissingConnectorMandateID
| errors::ConnectorError::MissingConnectorMandateMetadata
| errors::ConnectorError::MissingConnectorTransactionID
| errors::ConnectorError::MissingConnectorRefundID
| errors::ConnectorError::MissingApplePayTokenData
| errors::ConnectorError::WebhooksNotImplemented
| errors::ConnectorError::WebhookBodyDecodingFailed
| errors::ConnectorError::WebhookSignatureNotFound
| errors::ConnectorError::WebhookSourceVerificationFailed
| errors::ConnectorError::WebhookVerificationSecretNotFound
| errors::ConnectorError::WebhookVerificationSecretInvalid
| errors::ConnectorError::WebhookReferenceIdNotFound
| errors::ConnectorError::WebhookEventTypeNotFound
| errors::ConnectorError::WebhookResourceObjectNotFound
| errors::ConnectorError::WebhookResponseEncodingFailed
| errors::ConnectorError::InvalidDateFormat
| errors::ConnectorError::DateFormattingFailed
| errors::ConnectorError::InvalidDataFormat { .. }
| errors::ConnectorError::MismatchedPaymentData
| errors::ConnectorError::MandatePaymentDataMismatch { .. }
| errors::ConnectorError::InvalidWalletToken { .. }
| errors::ConnectorError::MissingConnectorRelatedTransactionID { .. }
| errors::ConnectorError::FileValidationFailed { .. }
| errors::ConnectorError::MissingConnectorRedirectionPayload { .. }
| errors::ConnectorError::FailedAtConnector { .. }
| errors::ConnectorError::MissingPaymentMethodType
| errors::ConnectorError::InSufficientBalanceInPaymentMethod
| errors::ConnectorError::RequestTimeoutReceived
| errors::ConnectorError::CurrencyNotSupported { .. }
| errors::ConnectorError::InvalidConnectorConfig { .. }
| errors::ConnectorError::AmountConversionFailed
| errors::ConnectorError::GenericError { .. } => {
err.change_context(errors::ApiErrorResponse::RefundFailed { data: None })
}
})
}
fn to_payment_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let error = match err.current_context() {
errors::ConnectorError::ProcessingStepFailed(Some(bytes)) => {
let response_str = std::str::from_utf8(bytes);
let data = match response_str {
Ok(s) => serde_json::from_str(s)
.map_err(
|error| logger::error!(%error,"Failed to convert response to JSON"),
)
.ok(),
Err(error) => {
logger::error!(%error,"Failed to convert response to UTF8 string");
None
}
};
errors::ApiErrorResponse::PaymentAuthorizationFailed { data }
}
errors::ConnectorError::MissingRequiredField { field_name } => {
errors::ApiErrorResponse::MissingRequiredField { field_name }
}
errors::ConnectorError::MissingRequiredFields { field_names } => {
errors::ApiErrorResponse::MissingRequiredFields { field_names: field_names.to_vec() }
}
errors::ConnectorError::NotImplemented(reason) => {
errors::ApiErrorResponse::NotImplemented {
message: errors::NotImplementedMessage::Reason(
reason.to_string(),
),
}
}
errors::ConnectorError::MismatchedPaymentData => {
errors::ApiErrorResponse::InvalidDataValue {
field_name:
"payment_method_data, payment_method_type and payment_experience does not match",
}
},
errors::ConnectorError::MandatePaymentDataMismatch {fields}=> {
errors::ApiErrorResponse::MandatePaymentDataMismatch {
fields: fields.to_owned(),
}
},
errors::ConnectorError::NotSupported { message, connector } => {
errors::ApiErrorResponse::NotSupported { message: format!("{message} is not supported by {connector}") }
},
errors::ConnectorError::FlowNotSupported{ flow, connector } => {
errors::ApiErrorResponse::FlowNotSupported { flow: flow.to_owned(), connector: connector.to_owned() }
},
errors::ConnectorError::MaxFieldLengthViolated{ connector, field_name, max_length, received_length} => {
errors::ApiErrorResponse::MaxFieldLengthViolated { connector: connector.to_string(), field_name: field_name.to_string(), max_length: *max_length, received_length: *received_length }
},
errors::ConnectorError::InvalidDataFormat { field_name } => {
errors::ApiErrorResponse::InvalidDataValue { field_name }
},
errors::ConnectorError::CaptureMethodNotSupported => {
errors::ApiErrorResponse::NotSupported {
message: "Capture Method Not Supported".to_owned(),
}
}
errors::ConnectorError::InvalidWalletToken {wallet_name} => errors::ApiErrorResponse::InvalidWalletToken {wallet_name: wallet_name.to_string()},
errors::ConnectorError::CurrencyNotSupported { message, connector} => errors::ApiErrorResponse::CurrencyNotSupported { message: format!("Credentials for the currency {message} are not configured with the connector {connector}/hyperswitch") },
errors::ConnectorError::FailedToObtainAuthType => errors::ApiErrorResponse::InvalidConnectorConfiguration {config: "connector_account_details".to_string()},
errors::ConnectorError::InvalidConnectorConfig { config } => errors::ApiErrorResponse::InvalidConnectorConfiguration { config: config.to_string() },
errors::ConnectorError::FailedToObtainIntegrationUrl |
errors::ConnectorError::RequestEncodingFailed |
errors::ConnectorError::RequestEncodingFailedWithReason(_) |
errors::ConnectorError::ParsingFailed |
errors::ConnectorError::ResponseDeserializationFailed |
errors::ConnectorError::UnexpectedResponseError(_) |
errors::ConnectorError::RoutingRulesParsingError |
errors::ConnectorError::FailedToObtainPreferredConnector |
errors::ConnectorError::InvalidConnectorName |
errors::ConnectorError::InvalidWallet |
errors::ConnectorError::ResponseHandlingFailed |
errors::ConnectorError::FailedToObtainCertificate |
errors::ConnectorError::NoConnectorMetaData | errors::ConnectorError::NoConnectorWalletDetails |
errors::ConnectorError::FailedToObtainCertificateKey |
errors::ConnectorError::MissingConnectorMandateID |
errors::ConnectorError::MissingConnectorMandateMetadata |
errors::ConnectorError::MissingConnectorTransactionID |
errors::ConnectorError::MissingConnectorRefundID |
errors::ConnectorError::MissingApplePayTokenData |
errors::ConnectorError::WebhooksNotImplemented |
errors::ConnectorError::WebhookBodyDecodingFailed |
errors::ConnectorError::WebhookSignatureNotFound |
errors::ConnectorError::WebhookSourceVerificationFailed |
errors::ConnectorError::WebhookVerificationSecretNotFound |
errors::ConnectorError::WebhookVerificationSecretInvalid |
errors::ConnectorError::WebhookReferenceIdNotFound |
errors::ConnectorError::WebhookEventTypeNotFound |
errors::ConnectorError::WebhookResourceObjectNotFound |
errors::ConnectorError::WebhookResponseEncodingFailed |
errors::ConnectorError::InvalidDateFormat |
errors::ConnectorError::DateFormattingFailed |
errors::ConnectorError::MissingConnectorRelatedTransactionID { .. } |
errors::ConnectorError::FileValidationFailed { .. } |
errors::ConnectorError::MissingConnectorRedirectionPayload { .. } |
errors::ConnectorError::FailedAtConnector { .. } |
errors::ConnectorError::MissingPaymentMethodType |
errors::ConnectorError::InSufficientBalanceInPaymentMethod |
errors::ConnectorError::RequestTimeoutReceived |
errors::ConnectorError::ProcessingStepFailed(None)|
errors::ConnectorError::GenericError {..} |
errors::ConnectorError::AmountConversionFailed => errors::ApiErrorResponse::InternalServerError
};
err.change_context(error)
})
}
fn to_setup_mandate_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let error = err.current_context();
let data = match error {
errors::ConnectorError::ProcessingStepFailed(Some(bytes)) => {
let response_str = std::str::from_utf8(bytes);
let error_response = match response_str {
Ok(s) => serde_json::from_str(s)
.map_err(
|err| logger::error!(%err, "Failed to convert response to JSON"),
)
.ok(),
Err(err) => {
logger::error!(%err, "Failed to convert response to UTF8 string");
None
}
};
errors::ApiErrorResponse::PaymentAuthorizationFailed {
data: error_response,
}
}
errors::ConnectorError::MissingRequiredField { field_name } => {
errors::ApiErrorResponse::MissingRequiredField { field_name }
}
errors::ConnectorError::FailedToObtainIntegrationUrl => {
errors::ApiErrorResponse::InvalidConnectorConfiguration {
config: "connector_account_details".to_string(),
}
}
errors::ConnectorError::InvalidConnectorConfig { config: field_name } => {
errors::ApiErrorResponse::InvalidConnectorConfiguration {
config: field_name.to_string(),
}
}
errors::ConnectorError::InvalidWalletToken { wallet_name } => {
errors::ApiErrorResponse::InvalidWalletToken {
wallet_name: wallet_name.to_string(),
}
}
errors::ConnectorError::CaptureMethodNotSupported => {
errors::ApiErrorResponse::NotSupported {
message: "Capture Method Not Supported".to_owned(),
}
}
errors::ConnectorError::RequestEncodingFailed
| errors::ConnectorError::RequestEncodingFailedWithReason(_)
| errors::ConnectorError::ParsingFailed
| errors::ConnectorError::ResponseDeserializationFailed
| errors::ConnectorError::UnexpectedResponseError(_)
| errors::ConnectorError::RoutingRulesParsingError
| errors::ConnectorError::FailedToObtainPreferredConnector
| errors::ConnectorError::InvalidConnectorName
| errors::ConnectorError::InvalidWallet
| errors::ConnectorError::ResponseHandlingFailed
| errors::ConnectorError::MissingRequiredFields { .. }
| errors::ConnectorError::FailedToObtainAuthType
| errors::ConnectorError::FailedToObtainCertificate
| errors::ConnectorError::NoConnectorMetaData
| errors::ConnectorError::NoConnectorWalletDetails
| errors::ConnectorError::FailedToObtainCertificateKey
| errors::ConnectorError::NotImplemented(_)
| errors::ConnectorError::NotSupported { .. }
| errors::ConnectorError::MaxFieldLengthViolated { .. }
| errors::ConnectorError::FlowNotSupported { .. }
| errors::ConnectorError::MissingConnectorMandateID
| errors::ConnectorError::MissingConnectorMandateMetadata
| errors::ConnectorError::MissingConnectorTransactionID
| errors::ConnectorError::MissingConnectorRefundID
| errors::ConnectorError::MissingApplePayTokenData
| errors::ConnectorError::WebhooksNotImplemented
| errors::ConnectorError::WebhookBodyDecodingFailed
| errors::ConnectorError::WebhookSignatureNotFound
| errors::ConnectorError::WebhookSourceVerificationFailed
| errors::ConnectorError::WebhookVerificationSecretNotFound
| errors::ConnectorError::WebhookVerificationSecretInvalid
| errors::ConnectorError::WebhookReferenceIdNotFound
| errors::ConnectorError::WebhookEventTypeNotFound
| errors::ConnectorError::WebhookResourceObjectNotFound
| errors::ConnectorError::WebhookResponseEncodingFailed
| errors::ConnectorError::InvalidDateFormat
| errors::ConnectorError::DateFormattingFailed
| errors::ConnectorError::InvalidDataFormat { .. }
| errors::ConnectorError::MismatchedPaymentData
| errors::ConnectorError::MandatePaymentDataMismatch { .. }
| errors::ConnectorError::MissingConnectorRelatedTransactionID { .. }
| errors::ConnectorError::FileValidationFailed { .. }
| errors::ConnectorError::MissingConnectorRedirectionPayload { .. }
| errors::ConnectorError::FailedAtConnector { .. }
| errors::ConnectorError::MissingPaymentMethodType
| errors::ConnectorError::InSufficientBalanceInPaymentMethod
| errors::ConnectorError::RequestTimeoutReceived
| errors::ConnectorError::CurrencyNotSupported { .. }
| errors::ConnectorError::ProcessingStepFailed(None)
| errors::ConnectorError::AmountConversionFailed
| errors::ConnectorError::GenericError { .. } => {
logger::error!(%error,"Setup Mandate flow failed");
errors::ApiErrorResponse::PaymentAuthorizationFailed { data: None }
}
};
err.change_context(data)
})
}
fn to_dispute_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let error = match err.current_context() {
errors::ConnectorError::ProcessingStepFailed(Some(bytes)) => {
let response_str = std::str::from_utf8(bytes);
let data = match response_str {
Ok(s) => serde_json::from_str(s)
.map_err(
|error| logger::error!(%error,"Failed to convert response to JSON"),
)
.ok(),
Err(error) => {
logger::error!(%error,"Failed to convert response to UTF8 string");
None
}
};
errors::ApiErrorResponse::DisputeFailed { data }
}
errors::ConnectorError::MissingRequiredField { field_name } => {
errors::ApiErrorResponse::MissingRequiredField { field_name }
}
errors::ConnectorError::MissingRequiredFields { field_names } => {
errors::ApiErrorResponse::MissingRequiredFields {
field_names: field_names.to_vec(),
}
}
_ => errors::ApiErrorResponse::InternalServerError,
};
err.change_context(error)
})
}
fn to_files_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let error = match err.current_context() {
errors::ConnectorError::ProcessingStepFailed(Some(bytes)) => {
let response_str = std::str::from_utf8(bytes);
let data = match response_str {
Ok(s) => serde_json::from_str(s)
.map_err(
|error| logger::error!(%error,"Failed to convert response to JSON"),
)
.ok(),
Err(error) => {
logger::error!(%error,"Failed to convert response to UTF8 string");
None
}
};
errors::ApiErrorResponse::DisputeFailed { data }
}
errors::ConnectorError::MissingRequiredField { field_name } => {
errors::ApiErrorResponse::MissingRequiredField { field_name }
}
errors::ConnectorError::MissingRequiredFields { field_names } => {
errors::ApiErrorResponse::MissingRequiredFields {
field_names: field_names.to_vec(),
}
}
_ => errors::ApiErrorResponse::InternalServerError,
};
err.change_context(error)
})
}
#[cfg(feature = "payouts")]
fn to_payout_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let error = match err.current_context() {
errors::ConnectorError::ProcessingStepFailed(Some(bytes)) => {
let response_str = std::str::from_utf8(bytes);
let data = match response_str {
Ok(s) => serde_json::from_str(s)
.map_err(
|error| logger::error!(%error,"Failed to convert response to JSON"),
)
.ok(),
Err(error) => {
logger::error!(%error,"Failed to convert response to UTF8 string");
None
}
};
errors::ApiErrorResponse::PayoutFailed { data }
}
errors::ConnectorError::MissingRequiredField { field_name } => {
errors::ApiErrorResponse::MissingRequiredField { field_name }
}
errors::ConnectorError::MissingRequiredFields { field_names } => {
errors::ApiErrorResponse::MissingRequiredFields {
field_names: field_names.to_vec(),
}
}
errors::ConnectorError::NotSupported { message, connector } => {
errors::ApiErrorResponse::NotSupported {
message: format!("{message} by {connector}"),
}
}
errors::ConnectorError::NotImplemented(reason) => {
errors::ApiErrorResponse::NotImplemented {
message: errors::NotImplementedMessage::Reason(reason.to_string()),
}
}
errors::ConnectorError::InvalidConnectorConfig { config } => {
errors::ApiErrorResponse::InvalidConnectorConfiguration {
config: config.to_string(),
}
}
_ => errors::ApiErrorResponse::InternalServerError,
};
err.change_context(error)
})
}
fn to_vault_failed_response(self) -> error_stack::Result<T, errors::ApiErrorResponse> {
self.map_err(|err| {
let error = match err.current_context() {
errors::ConnectorError::ProcessingStepFailed(_) => {
errors::ApiErrorResponse::ExternalVaultFailed
}
errors::ConnectorError::MissingRequiredField { field_name } => {
errors::ApiErrorResponse::MissingRequiredField { field_name }
}
errors::ConnectorError::MissingRequiredFields { field_names } => {
errors::ApiErrorResponse::MissingRequiredFields {
field_names: field_names.to_vec(),
}
}
errors::ConnectorError::NotSupported { message, connector } => {
errors::ApiErrorResponse::NotSupported {
message: format!("{message} by {connector}"),
}
}
errors::ConnectorError::NotImplemented(reason) => {
errors::ApiErrorResponse::NotImplemented {
message: errors::NotImplementedMessage::Reason(reason.to_string()),
}
}
errors::ConnectorError::InvalidConnectorConfig { config } => {
errors::ApiErrorResponse::InvalidConnectorConfiguration {
config: config.to_string(),
}
}
_ => errors::ApiErrorResponse::InternalServerError,
};
err.change_context(error)
})
}
fn allow_webhook_event_type_not_found(
self,
enabled: bool,
) -> CustomResult<Option<T>, errors::ConnectorError> {
match self {
Ok(event_type) => Ok(Some(event_type)),
Err(error) => match error.current_context() {
errors::ConnectorError::WebhookEventTypeNotFound if enabled => Ok(None),
_ => Err(error),
},
}
}
}
pub trait RedisErrorExt {
#[track_caller]
fn to_redis_failed_response(self, key: &str) -> error_stack::Report<errors::StorageError>;
}
impl RedisErrorExt for error_stack::Report<errors::RedisError> {
fn to_redis_failed_response(self, key: &str) -> error_stack::Report<errors::StorageError> {
match self.current_context() {
errors::RedisError::NotFound => self.change_context(
errors::StorageError::ValueNotFound(format!("Data does not exist for key {key}")),
),
errors::RedisError::SetNxFailed => {
self.change_context(errors::StorageError::DuplicateValue {
entity: "redis",
key: Some(key.to_string()),
})
}
_ => self.change_context(errors::StorageError::KVError),
}
}
}
#[cfg(feature = "olap")]
impl<T> StorageErrorExt<T, errors::UserErrors> for error_stack::Result<T, errors::StorageError> {
#[track_caller]
fn to_not_found_response(
self,
not_found_response: errors::UserErrors,
) -> error_stack::Result<T, errors::UserErrors> {
self.map_err(|e| {
if e.current_context().is_db_not_found() {
e.change_context(not_found_response)
} else {
e.change_context(errors::UserErrors::InternalServerError)
}
})
}
#[track_caller]
fn to_duplicate_response(
self,
duplicate_response: errors::UserErrors,
) -> error_stack::Result<T, errors::UserErrors> {
self.map_err(|e| {
if e.current_context().is_db_unique_violation() {
e.change_context(duplicate_response)
} else {
e.change_context(errors::UserErrors::InternalServerError)
}
})
}
}
// File: crates/router/src/core/errors/chat.rs
#[derive(Debug, thiserror::Error)]
pub enum ChatErrors {
#[error("User InternalServerError")]
InternalServerError,
#[error("Missing Config error")]
MissingConfigError,
#[error("Chat response deserialization failed")]
ChatResponseDeserializationFailed,
#[error("Unauthorized access")]
UnauthorizedAccess,
}
impl common_utils::errors::ErrorSwitch<api_models::errors::types::ApiErrorResponse> for ChatErrors {
fn switch(&self) -> api_models::errors::types::ApiErrorResponse {
use api_models::errors::types::{ApiError, ApiErrorResponse as AER};
let sub_code = "AI";
match self {
Self::InternalServerError => {
AER::InternalServerError(ApiError::new("HE", 0, self.get_error_message(), None))
}
Self::MissingConfigError => {
AER::InternalServerError(ApiError::new(sub_code, 1, self.get_error_message(), None))
}
Self::ChatResponseDeserializationFailed => {
AER::BadRequest(ApiError::new(sub_code, 2, self.get_error_message(), None))
}
Self::UnauthorizedAccess => {
AER::Unauthorized(ApiError::new(sub_code, 3, self.get_error_message(), None))
}
}
}
}
impl ChatErrors {
pub fn get_error_message(&self) -> String {
match self {
Self::InternalServerError => "Something went wrong".to_string(),
Self::MissingConfigError => "Missing webhook url".to_string(),
Self::ChatResponseDeserializationFailed => "Failed to parse chat response".to_string(),
Self::UnauthorizedAccess => "Not authorized to access the resource".to_string(),
}
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/errors/customers_error_response.rs",
"crates/router/src/core/errors/transformers.rs",
"crates/router/src/core/errors/user.rs",
"crates/router/src/core/errors/error_handlers.rs",
"crates/router/src/core/errors/utils.rs",
"crates/router/src/core/errors/chat.rs"
],
"module": "crates/router/src/core/errors",
"num_files": 6,
"token_count": 10797
}
|
module_-793738616337090594
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/payment_methods/tokenize
Files: 2
</path>
<module>
// File: crates/router/src/core/payment_methods/tokenize/card_executor.rs
use std::str::FromStr;
use ::payment_methods::{controller::PaymentMethodsController, core::migration};
use api_models::{enums as api_enums, payment_methods as payment_methods_api};
use common_utils::{
consts,
ext_traits::OptionExt,
generate_customer_id_of_default_length, id_type,
pii::Email,
type_name,
types::keymanager::{Identifier, KeyManagerState, ToEncryptable},
};
use error_stack::{report, ResultExt};
use hyperswitch_domain_models::type_encryption::{crypto_operation, CryptoOperation};
use masking::{ExposeInterface, PeekInterface, SwitchStrategy};
use router_env::logger;
use super::{
CardNetworkTokenizeExecutor, NetworkTokenizationBuilder, NetworkTokenizationProcess,
NetworkTokenizationResponse, State, StoreLockerResponse, TransitionTo,
};
use crate::{
core::payment_methods::{
cards::{add_card_to_hs_locker, PmCards},
transformers as pm_transformers,
},
errors::{self, RouterResult},
types::{api, domain},
utils,
};
// Available states for card tokenization
pub struct TokenizeWithCard;
pub struct CardRequestValidated;
pub struct CardDetailsAssigned;
pub struct CustomerAssigned;
pub struct CardTokenized;
pub struct CardStored;
pub struct CardTokenStored;
pub struct PaymentMethodCreated;
impl State for TokenizeWithCard {}
impl State for CustomerAssigned {}
impl State for CardRequestValidated {}
impl State for CardDetailsAssigned {}
impl State for CardTokenized {}
impl State for CardStored {}
impl State for CardTokenStored {}
impl State for PaymentMethodCreated {}
// State transitions for card tokenization
impl TransitionTo<CardRequestValidated> for TokenizeWithCard {}
impl TransitionTo<CardDetailsAssigned> for CardRequestValidated {}
impl TransitionTo<CustomerAssigned> for CardDetailsAssigned {}
impl TransitionTo<CardTokenized> for CustomerAssigned {}
impl TransitionTo<CardTokenStored> for CardTokenized {}
impl TransitionTo<PaymentMethodCreated> for CardTokenStored {}
impl Default for NetworkTokenizationBuilder<'_, TokenizeWithCard> {
fn default() -> Self {
Self::new()
}
}
impl<'a> NetworkTokenizationBuilder<'a, TokenizeWithCard> {
pub fn new() -> Self {
Self {
state: std::marker::PhantomData,
customer: None,
card: None,
card_cvc: None,
network_token: None,
stored_card: None,
stored_token: None,
payment_method_response: None,
card_tokenized: false,
error_code: None,
error_message: None,
}
}
pub fn set_validate_result(self) -> NetworkTokenizationBuilder<'a, CardRequestValidated> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardRequestValidated> {
pub fn set_card_details(
self,
card_req: &'a domain::TokenizeCardRequest,
optional_card_info: Option<diesel_models::CardInfo>,
) -> NetworkTokenizationBuilder<'a, CardDetailsAssigned> {
let card = domain::CardDetail {
card_number: card_req.raw_card_number.clone(),
card_exp_month: card_req.card_expiry_month.clone(),
card_exp_year: card_req.card_expiry_year.clone(),
bank_code: optional_card_info
.as_ref()
.and_then(|card_info| card_info.bank_code.clone()),
nick_name: card_req.nick_name.clone(),
card_holder_name: card_req.card_holder_name.clone(),
card_issuer: optional_card_info
.as_ref()
.map_or(card_req.card_issuer.clone(), |card_info| {
card_info.card_issuer.clone()
}),
card_network: optional_card_info
.as_ref()
.map_or(card_req.card_network.clone(), |card_info| {
card_info.card_network.clone()
}),
card_type: optional_card_info.as_ref().map_or(
card_req
.card_type
.as_ref()
.map(|card_type| card_type.to_string()),
|card_info| card_info.card_type.clone(),
),
card_issuing_country: optional_card_info
.as_ref()
.map_or(card_req.card_issuing_country.clone(), |card_info| {
card_info.card_issuing_country.clone()
}),
co_badged_card_data: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
card: Some(card),
card_cvc: card_req.card_cvc.clone(),
customer: self.customer,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardDetailsAssigned> {
pub fn set_customer(
self,
customer: &'a api::CustomerDetails,
) -> NetworkTokenizationBuilder<'a, CustomerAssigned> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
customer: Some(customer),
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CustomerAssigned> {
pub fn get_optional_card_and_cvc(
&self,
) -> (Option<domain::CardDetail>, Option<masking::Secret<String>>) {
(self.card.clone(), self.card_cvc.clone())
}
pub fn set_token_details(
self,
network_token: &'a NetworkTokenizationResponse,
) -> NetworkTokenizationBuilder<'a, CardTokenized> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
network_token: Some(&network_token.0),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardTokenized> {
pub fn set_stored_card_response(
self,
store_card_response: &'a StoreLockerResponse,
) -> NetworkTokenizationBuilder<'a, CardStored> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
stored_card: Some(&store_card_response.store_card_resp),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardStored> {
pub fn set_stored_token_response(
self,
store_token_response: &'a StoreLockerResponse,
) -> NetworkTokenizationBuilder<'a, CardTokenStored> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
card_tokenized: true,
stored_token: Some(&store_token_response.store_token_resp),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
payment_method_response: self.payment_method_response,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, CardTokenStored> {
pub fn set_payment_method_response(
self,
payment_method: &'a domain::PaymentMethod,
) -> NetworkTokenizationBuilder<'a, PaymentMethodCreated> {
let card_detail_from_locker = self.card.as_ref().map(|card| api::CardDetailFromLocker {
scheme: None,
issuer_country: card.card_issuing_country.clone(),
last4_digits: Some(card.card_number.clone().get_last4()),
card_number: None,
expiry_month: Some(card.card_exp_month.clone().clone()),
expiry_year: Some(card.card_exp_year.clone().clone()),
card_token: None,
card_holder_name: card.card_holder_name.clone(),
card_fingerprint: None,
nick_name: card.nick_name.clone(),
card_network: card.card_network.clone(),
card_isin: Some(card.card_number.clone().get_card_isin()),
card_issuer: card.card_issuer.clone(),
card_type: card.card_type.clone(),
saved_to_locker: true,
});
let payment_method_response = api::PaymentMethodResponse {
merchant_id: payment_method.merchant_id.clone(),
customer_id: Some(payment_method.customer_id.clone()),
payment_method_id: payment_method.payment_method_id.clone(),
payment_method: payment_method.payment_method,
payment_method_type: payment_method.payment_method_type,
card: card_detail_from_locker,
recurring_enabled: Some(true),
installment_payment_enabled: Some(false),
metadata: payment_method.metadata.clone(),
created: Some(payment_method.created_at),
last_used_at: Some(payment_method.last_used_at),
client_secret: payment_method.client_secret.clone(),
bank_transfer: None,
payment_experience: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
payment_method_response: Some(payment_method_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl NetworkTokenizationBuilder<'_, PaymentMethodCreated> {
pub fn build(self) -> api::CardNetworkTokenizeResponse {
api::CardNetworkTokenizeResponse {
payment_method_response: self.payment_method_response,
customer: self.customer.cloned(),
card_tokenized: self.card_tokenized,
error_code: self.error_code.cloned(),
error_message: self.error_message.cloned(),
// Below field is mutated by caller functions for batched API operations
tokenization_data: None,
}
}
}
// Specific executor for card tokenization
impl CardNetworkTokenizeExecutor<'_, domain::TokenizeCardRequest> {
pub async fn validate_request_and_fetch_optional_customer(
&self,
) -> RouterResult<Option<api::CustomerDetails>> {
// Validate card's expiry
migration::validate_card_expiry(&self.data.card_expiry_month, &self.data.card_expiry_year)?;
// Validate customer ID
let customer_id = self
.customer
.customer_id
.as_ref()
.get_required_value("customer_id")
.change_context(errors::ApiErrorResponse::MissingRequiredField {
field_name: "customer.customer_id",
})?;
// Fetch customer details if present
let db = &*self.state.store;
let key_manager_state: &KeyManagerState = &self.state.into();
db.find_customer_optional_by_customer_id_merchant_id(
key_manager_state,
customer_id,
self.merchant_account.get_id(),
self.key_store,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error fetching customer: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)
.map_or(
// Validate if customer creation is feasible
if self.customer.name.is_some()
|| self.customer.email.is_some()
|| self.customer.phone.is_some()
{
Ok(None)
} else {
Err(report!(errors::ApiErrorResponse::MissingRequiredFields {
field_names: vec!["customer.name", "customer.email", "customer.phone"],
}))
},
// If found, send back CustomerDetails from DB
|optional_customer| {
Ok(optional_customer.map(|customer| api::CustomerDetails {
id: customer.customer_id.clone(),
name: customer.name.clone().map(|name| name.into_inner()),
email: customer.email.clone().map(Email::from),
phone: customer.phone.clone().map(|phone| phone.into_inner()),
phone_country_code: customer.phone_country_code.clone(),
tax_registration_id: customer
.tax_registration_id
.clone()
.map(|tax_registration_id| tax_registration_id.into_inner()),
}))
},
)
}
pub async fn create_customer(&self) -> RouterResult<api::CustomerDetails> {
let db = &*self.state.store;
let customer_id = self
.customer
.customer_id
.as_ref()
.get_required_value("customer_id")
.change_context(errors::ApiErrorResponse::MissingRequiredField {
field_name: "customer_id",
})?;
let key_manager_state: &KeyManagerState = &self.state.into();
let encrypted_data = crypto_operation(
key_manager_state,
type_name!(domain::Customer),
CryptoOperation::BatchEncrypt(domain::FromRequestEncryptableCustomer::to_encryptable(
domain::FromRequestEncryptableCustomer {
name: self.customer.name.clone(),
email: self
.customer
.email
.clone()
.map(|email| email.expose().switch_strategy()),
phone: self.customer.phone.clone(),
tax_registration_id: self.customer.tax_registration_id.clone(),
},
)),
Identifier::Merchant(self.merchant_account.get_id().clone()),
self.key_store.key.get_inner().peek(),
)
.await
.inspect_err(|err| logger::info!("Error encrypting customer: {:?}", err))
.and_then(|val| val.try_into_batchoperation())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to encrypt customer")?;
let encryptable_customer =
domain::FromRequestEncryptableCustomer::from_encryptable(encrypted_data)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to form EncryptableCustomer")?;
let new_customer_id = generate_customer_id_of_default_length();
let domain_customer = domain::Customer {
customer_id: new_customer_id.clone(),
merchant_id: self.merchant_account.get_id().clone(),
name: encryptable_customer.name,
email: encryptable_customer.email.map(|email| {
utils::Encryptable::new(
email.clone().into_inner().switch_strategy(),
email.into_encrypted(),
)
}),
phone: encryptable_customer.phone,
description: None,
phone_country_code: self.customer.phone_country_code.to_owned(),
metadata: None,
connector_customer: None,
created_at: common_utils::date_time::now(),
modified_at: common_utils::date_time::now(),
address_id: None,
default_payment_method_id: None,
updated_by: None,
version: common_types::consts::API_VERSION,
tax_registration_id: encryptable_customer.tax_registration_id,
};
db.insert_customer(
domain_customer,
key_manager_state,
self.key_store,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error creating a customer: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable_lazy(|| {
format!(
"Failed to insert customer [id - {:?}] for merchant [id - {:?}]",
customer_id,
self.merchant_account.get_id()
)
})?;
Ok(api::CustomerDetails {
id: new_customer_id,
name: self.customer.name.clone(),
email: self.customer.email.clone(),
phone: self.customer.phone.clone(),
phone_country_code: self.customer.phone_country_code.clone(),
tax_registration_id: self.customer.tax_registration_id.clone(),
})
}
pub async fn store_card_and_token_in_locker(
&self,
network_token: &NetworkTokenizationResponse,
card: &domain::CardDetail,
customer_id: &id_type::CustomerId,
) -> RouterResult<StoreLockerResponse> {
let stored_card_resp = self.store_card_in_locker(card, customer_id).await?;
let stored_token_resp = self
.store_network_token_in_locker(
network_token,
customer_id,
card.card_holder_name.clone(),
card.nick_name.clone(),
)
.await?;
let store_locker_response = StoreLockerResponse {
store_card_resp: stored_card_resp,
store_token_resp: stored_token_resp,
};
Ok(store_locker_response)
}
pub async fn store_card_in_locker(
&self,
card: &domain::CardDetail,
customer_id: &id_type::CustomerId,
) -> RouterResult<pm_transformers::StoreCardRespPayload> {
let merchant_id = self.merchant_account.get_id();
let locker_req =
pm_transformers::StoreLockerReq::LockerCard(pm_transformers::StoreCardReq {
merchant_id: merchant_id.clone(),
merchant_customer_id: customer_id.clone(),
card: payment_methods_api::Card {
card_number: card.card_number.clone(),
card_exp_month: card.card_exp_month.clone(),
card_exp_year: card.card_exp_year.clone(),
card_isin: Some(card.card_number.get_card_isin().clone()),
name_on_card: card.card_holder_name.clone(),
nick_name: card
.nick_name
.as_ref()
.map(|nick_name| nick_name.clone().expose()),
card_brand: None,
},
requestor_card_reference: None,
ttl: self.state.conf.locker.ttl_for_storage_in_secs,
});
let stored_resp = add_card_to_hs_locker(
self.state,
&locker_req,
customer_id,
api_enums::LockerChoice::HyperswitchCardVault,
)
.await
.inspect_err(|err| logger::info!("Error adding card in locker: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)?;
Ok(stored_resp)
}
pub async fn create_payment_method(
&self,
stored_locker_resp: &StoreLockerResponse,
network_token_details: &NetworkTokenizationResponse,
card_details: &domain::CardDetail,
customer_id: &id_type::CustomerId,
) -> RouterResult<domain::PaymentMethod> {
let payment_method_id = common_utils::generate_id(consts::ID_LENGTH, "pm");
// Form encrypted PM data (original card)
let enc_pm_data = self.encrypt_card(card_details, true).await?;
// Form encrypted network token data
let enc_token_data = self
.encrypt_network_token(network_token_details, card_details, true)
.await?;
// Form PM create entry
let payment_method_create = api::PaymentMethodCreate {
payment_method: Some(api_enums::PaymentMethod::Card),
payment_method_type: card_details
.card_type
.as_ref()
.and_then(|card_type| api_enums::PaymentMethodType::from_str(card_type).ok()),
payment_method_issuer: card_details.card_issuer.clone(),
payment_method_issuer_code: None,
card: Some(api::CardDetail {
card_number: card_details.card_number.clone(),
card_exp_month: card_details.card_exp_month.clone(),
card_exp_year: card_details.card_exp_year.clone(),
card_holder_name: card_details.card_holder_name.clone(),
nick_name: card_details.nick_name.clone(),
card_issuing_country: card_details.card_issuing_country.clone(),
card_network: card_details.card_network.clone(),
card_issuer: card_details.card_issuer.clone(),
card_type: card_details.card_type.clone(),
}),
metadata: None,
customer_id: Some(customer_id.clone()),
card_network: card_details
.card_network
.as_ref()
.map(|network| network.to_string()),
bank_transfer: None,
wallet: None,
client_secret: None,
payment_method_data: None,
billing: None,
connector_mandate_details: None,
network_transaction_id: None,
};
PmCards {
state: self.state,
merchant_context: &domain::MerchantContext::NormalMerchant(Box::new(domain::Context(
self.merchant_account.clone(),
self.key_store.clone(),
))),
}
.create_payment_method(
&payment_method_create,
customer_id,
&payment_method_id,
Some(stored_locker_resp.store_card_resp.card_reference.clone()),
self.merchant_account.get_id(),
None,
None,
Some(enc_pm_data),
None,
None,
None,
None,
None,
network_token_details.1.clone(),
Some(stored_locker_resp.store_token_resp.card_reference.clone()),
Some(enc_token_data),
Default::default(), // this method is used only for card bulk tokenization, and currently external vault is not supported for this hence passing Default i.e. InternalVault
)
.await
}
}
// File: crates/router/src/core/payment_methods/tokenize/payment_method_executor.rs
use api_models::enums as api_enums;
use common_utils::{
ext_traits::OptionExt, fp_utils::when, pii::Email, types::keymanager::KeyManagerState,
};
use error_stack::{report, ResultExt};
use masking::Secret;
use router_env::logger;
use super::{
CardNetworkTokenizeExecutor, NetworkTokenizationBuilder, NetworkTokenizationProcess,
NetworkTokenizationResponse, State, TransitionTo,
};
use crate::{
core::payment_methods::transformers as pm_transformers,
errors::{self, RouterResult},
types::{api, domain},
};
// Available states for payment method tokenization
pub struct TokenizeWithPmId;
pub struct PmValidated;
pub struct PmFetched;
pub struct PmAssigned;
pub struct PmTokenized;
pub struct PmTokenStored;
pub struct PmTokenUpdated;
impl State for TokenizeWithPmId {}
impl State for PmValidated {}
impl State for PmFetched {}
impl State for PmAssigned {}
impl State for PmTokenized {}
impl State for PmTokenStored {}
impl State for PmTokenUpdated {}
// State transitions for payment method tokenization
impl TransitionTo<PmFetched> for TokenizeWithPmId {}
impl TransitionTo<PmValidated> for PmFetched {}
impl TransitionTo<PmAssigned> for PmValidated {}
impl TransitionTo<PmTokenized> for PmAssigned {}
impl TransitionTo<PmTokenStored> for PmTokenized {}
impl TransitionTo<PmTokenUpdated> for PmTokenStored {}
impl Default for NetworkTokenizationBuilder<'_, TokenizeWithPmId> {
fn default() -> Self {
Self::new()
}
}
impl<'a> NetworkTokenizationBuilder<'a, TokenizeWithPmId> {
pub fn new() -> Self {
Self {
state: std::marker::PhantomData,
customer: None,
card: None,
card_cvc: None,
network_token: None,
stored_card: None,
stored_token: None,
payment_method_response: None,
card_tokenized: false,
error_code: None,
error_message: None,
}
}
pub fn set_payment_method(
self,
payment_method: &domain::PaymentMethod,
) -> NetworkTokenizationBuilder<'a, PmFetched> {
let payment_method_response = api::PaymentMethodResponse {
merchant_id: payment_method.merchant_id.clone(),
customer_id: Some(payment_method.customer_id.clone()),
payment_method_id: payment_method.payment_method_id.clone(),
payment_method: payment_method.payment_method,
payment_method_type: payment_method.payment_method_type,
recurring_enabled: Some(true),
installment_payment_enabled: Some(false),
metadata: payment_method.metadata.clone(),
created: Some(payment_method.created_at),
last_used_at: Some(payment_method.last_used_at),
client_secret: payment_method.client_secret.clone(),
card: None,
bank_transfer: None,
payment_experience: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
payment_method_response: Some(payment_method_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmFetched> {
pub fn set_validate_result(
self,
customer: &'a api::CustomerDetails,
) -> NetworkTokenizationBuilder<'a, PmValidated> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
customer: Some(customer),
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmValidated> {
pub fn set_card_details(
self,
card_from_locker: &'a api_models::payment_methods::Card,
optional_card_info: Option<diesel_models::CardInfo>,
card_cvc: Option<Secret<String>>,
) -> NetworkTokenizationBuilder<'a, PmAssigned> {
let card = domain::CardDetail {
card_number: card_from_locker.card_number.clone(),
card_exp_month: card_from_locker.card_exp_month.clone(),
card_exp_year: card_from_locker.card_exp_year.clone(),
bank_code: optional_card_info
.as_ref()
.and_then(|card_info| card_info.bank_code.clone()),
nick_name: card_from_locker
.nick_name
.as_ref()
.map(|nick_name| Secret::new(nick_name.clone())),
card_holder_name: card_from_locker.name_on_card.clone(),
card_issuer: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_issuer.clone()),
card_network: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_network.clone()),
card_type: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_type.clone()),
card_issuing_country: optional_card_info
.as_ref()
.and_then(|card_info| card_info.card_issuing_country.clone()),
co_badged_card_data: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
card: Some(card),
card_cvc,
customer: self.customer,
network_token: self.network_token,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmAssigned> {
pub fn get_optional_card_and_cvc(
&self,
) -> (Option<domain::CardDetail>, Option<Secret<String>>) {
(self.card.clone(), self.card_cvc.clone())
}
pub fn set_token_details(
self,
network_token: &'a NetworkTokenizationResponse,
) -> NetworkTokenizationBuilder<'a, PmTokenized> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
network_token: Some(&network_token.0),
card_tokenized: true,
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
stored_card: self.stored_card,
stored_token: self.stored_token,
payment_method_response: self.payment_method_response,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmTokenized> {
pub fn set_stored_token_response(
self,
store_token_response: &'a pm_transformers::StoreCardRespPayload,
) -> NetworkTokenizationBuilder<'a, PmTokenStored> {
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
stored_token: Some(store_token_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
network_token: self.network_token,
stored_card: self.stored_card,
payment_method_response: self.payment_method_response,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl<'a> NetworkTokenizationBuilder<'a, PmTokenStored> {
pub fn set_payment_method(
self,
payment_method: &'a domain::PaymentMethod,
) -> NetworkTokenizationBuilder<'a, PmTokenUpdated> {
let payment_method_response = api::PaymentMethodResponse {
merchant_id: payment_method.merchant_id.clone(),
customer_id: Some(payment_method.customer_id.clone()),
payment_method_id: payment_method.payment_method_id.clone(),
payment_method: payment_method.payment_method,
payment_method_type: payment_method.payment_method_type,
recurring_enabled: Some(true),
installment_payment_enabled: Some(false),
metadata: payment_method.metadata.clone(),
created: Some(payment_method.created_at),
last_used_at: Some(payment_method.last_used_at),
client_secret: payment_method.client_secret.clone(),
card: None,
bank_transfer: None,
payment_experience: None,
};
NetworkTokenizationBuilder {
state: std::marker::PhantomData,
payment_method_response: Some(payment_method_response),
customer: self.customer,
card: self.card,
card_cvc: self.card_cvc,
stored_token: self.stored_token,
network_token: self.network_token,
stored_card: self.stored_card,
card_tokenized: self.card_tokenized,
error_code: self.error_code,
error_message: self.error_message,
}
}
}
impl NetworkTokenizationBuilder<'_, PmTokenUpdated> {
pub fn build(self) -> api::CardNetworkTokenizeResponse {
api::CardNetworkTokenizeResponse {
payment_method_response: self.payment_method_response,
customer: self.customer.cloned(),
card_tokenized: self.card_tokenized,
error_code: self.error_code.cloned(),
error_message: self.error_message.cloned(),
// Below field is mutated by caller functions for batched API operations
tokenization_data: None,
}
}
}
// Specific executor for payment method tokenization
impl CardNetworkTokenizeExecutor<'_, domain::TokenizePaymentMethodRequest> {
pub async fn fetch_payment_method(
&self,
payment_method_id: &str,
) -> RouterResult<domain::PaymentMethod> {
self.state
.store
.find_payment_method(
&self.state.into(),
self.key_store,
payment_method_id,
self.merchant_account.storage_scheme,
)
.await
.map_err(|err| match err.current_context() {
errors::StorageError::DatabaseError(err)
if matches!(
err.current_context(),
diesel_models::errors::DatabaseError::NotFound
) =>
{
report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid payment_method_id".into(),
})
}
errors::StorageError::ValueNotFound(_) => {
report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Invalid payment_method_id".to_string(),
})
}
err => {
logger::info!("Error fetching payment_method: {:?}", err);
report!(errors::ApiErrorResponse::InternalServerError)
}
})
}
pub async fn validate_request_and_locker_reference_and_customer(
&self,
payment_method: &domain::PaymentMethod,
) -> RouterResult<(String, api::CustomerDetails)> {
// Ensure customer ID matches
let customer_id_in_req = self
.customer
.customer_id
.clone()
.get_required_value("customer_id")
.change_context(errors::ApiErrorResponse::MissingRequiredField {
field_name: "customer",
})?;
when(payment_method.customer_id != customer_id_in_req, || {
Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Payment method does not belong to the customer".to_string()
}))
})?;
// Ensure payment method is card
match payment_method.payment_method {
Some(api_enums::PaymentMethod::Card) => Ok(()),
Some(_) => Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Payment method is not card".to_string()
})),
None => Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Payment method is empty".to_string()
})),
}?;
// Ensure card is not tokenized already
when(
payment_method
.network_token_requestor_reference_id
.is_some(),
|| {
Err(report!(errors::ApiErrorResponse::InvalidRequestData {
message: "Card is already tokenized".to_string()
}))
},
)?;
// Ensure locker reference is present
let locker_id = payment_method.locker_id.clone().ok_or(report!(
errors::ApiErrorResponse::InvalidRequestData {
message: "locker_id not found for given payment_method_id".to_string()
}
))?;
// Fetch customer
let db = &*self.state.store;
let key_manager_state: &KeyManagerState = &self.state.into();
let customer = db
.find_customer_by_customer_id_merchant_id(
key_manager_state,
&payment_method.customer_id,
self.merchant_account.get_id(),
self.key_store,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error fetching customer: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)?;
let customer_details = api::CustomerDetails {
id: customer.customer_id.clone(),
name: customer.name.clone().map(|name| name.into_inner()),
email: customer.email.clone().map(Email::from),
phone: customer.phone.clone().map(|phone| phone.into_inner()),
phone_country_code: customer.phone_country_code.clone(),
tax_registration_id: customer
.tax_registration_id
.clone()
.map(|tax_registration_id| tax_registration_id.into_inner()),
};
Ok((locker_id, customer_details))
}
pub async fn update_payment_method(
&self,
store_token_response: &pm_transformers::StoreCardRespPayload,
payment_method: domain::PaymentMethod,
network_token_details: &NetworkTokenizationResponse,
card_details: &domain::CardDetail,
) -> RouterResult<domain::PaymentMethod> {
// Form encrypted network token data
let enc_token_data = self
.encrypt_network_token(network_token_details, card_details, true)
.await?;
// Update payment method
let payment_method_update = diesel_models::PaymentMethodUpdate::NetworkTokenDataUpdate {
network_token_requestor_reference_id: network_token_details.1.clone(),
network_token_locker_id: Some(store_token_response.card_reference.clone()),
network_token_payment_method_data: Some(enc_token_data.into()),
};
self.state
.store
.update_payment_method(
&self.state.into(),
self.key_store,
payment_method,
payment_method_update,
self.merchant_account.storage_scheme,
)
.await
.inspect_err(|err| logger::info!("Error updating payment method: {:?}", err))
.change_context(errors::ApiErrorResponse::InternalServerError)
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/payment_methods/tokenize/card_executor.rs",
"crates/router/src/core/payment_methods/tokenize/payment_method_executor.rs"
],
"module": "crates/router/src/core/payment_methods/tokenize",
"num_files": 2,
"token_count": 7927
}
|
module_-6119942009330560367
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/core/authentication
Files: 3
</path>
<module>
// File: crates/router/src/core/authentication/types.rs
use error_stack::{Report, ResultExt};
pub use hyperswitch_domain_models::router_request_types::authentication::{
AcquirerDetails, ExternalThreeDSConnectorMetadata, PreAuthenticationData, ThreeDsMethodData,
};
use crate::{
core::errors,
types::{storage, transformers::ForeignTryFrom},
utils::OptionExt,
};
impl ForeignTryFrom<&storage::Authentication> for PreAuthenticationData {
type Error = Report<errors::ApiErrorResponse>;
fn foreign_try_from(authentication: &storage::Authentication) -> Result<Self, Self::Error> {
let error_message = errors::ApiErrorResponse::UnprocessableEntity { message: "Pre Authentication must be completed successfully before Authentication can be performed".to_string() };
let threeds_server_transaction_id = authentication
.threeds_server_transaction_id
.clone()
.get_required_value("threeds_server_transaction_id")
.change_context(error_message)?;
let message_version = authentication
.message_version
.clone()
.get_required_value("message_version")?;
Ok(Self {
threeds_server_transaction_id,
message_version,
acquirer_bin: authentication.acquirer_bin.clone(),
acquirer_merchant_id: authentication.acquirer_merchant_id.clone(),
acquirer_country_code: authentication.acquirer_country_code.clone(),
connector_metadata: authentication.connector_metadata.clone(),
})
}
}
// File: crates/router/src/core/authentication/transformers.rs
use std::marker::PhantomData;
use api_models::payments;
use common_enums::PaymentMethod;
use common_utils::ext_traits::ValueExt;
use error_stack::ResultExt;
use crate::{
core::{
errors::{self, RouterResult},
payments::helpers as payments_helpers,
},
types::{
self, domain, storage,
transformers::{ForeignFrom, ForeignTryFrom},
},
utils::ext_traits::OptionExt,
SessionState,
};
const IRRELEVANT_ATTEMPT_ID_IN_AUTHENTICATION_FLOW: &str =
"irrelevant_attempt_id_in_AUTHENTICATION_flow";
const IRRELEVANT_CONNECTOR_REQUEST_REFERENCE_ID_IN_AUTHENTICATION_FLOW: &str =
"irrelevant_connector_request_reference_id_in_AUTHENTICATION_flow";
#[allow(clippy::too_many_arguments)]
pub fn construct_authentication_router_data(
state: &SessionState,
merchant_id: common_utils::id_type::MerchantId,
authentication_connector: String,
payment_method_data: domain::PaymentMethodData,
payment_method: PaymentMethod,
billing_address: hyperswitch_domain_models::address::Address,
shipping_address: Option<hyperswitch_domain_models::address::Address>,
browser_details: Option<types::BrowserInformation>,
amount: Option<common_utils::types::MinorUnit>,
currency: Option<common_enums::Currency>,
message_category: types::api::authentication::MessageCategory,
device_channel: payments::DeviceChannel,
merchant_connector_account: payments_helpers::MerchantConnectorAccountType,
authentication_data: storage::Authentication,
return_url: Option<String>,
sdk_information: Option<payments::SdkInformation>,
threeds_method_comp_ind: payments::ThreeDsCompletionIndicator,
email: Option<common_utils::pii::Email>,
webhook_url: String,
three_ds_requestor_url: String,
psd2_sca_exemption_type: Option<common_enums::ScaExemptionType>,
payment_id: common_utils::id_type::PaymentId,
force_3ds_challenge: bool,
) -> RouterResult<types::authentication::ConnectorAuthenticationRouterData> {
let router_request = types::authentication::ConnectorAuthenticationRequestData {
payment_method_data,
billing_address,
shipping_address,
browser_details,
amount: amount.map(|amt| amt.get_amount_as_i64()),
currency,
message_category,
device_channel,
pre_authentication_data: super::types::PreAuthenticationData::foreign_try_from(
&authentication_data,
)?,
return_url,
sdk_information,
email,
three_ds_requestor_url,
threeds_method_comp_ind,
webhook_url,
force_3ds_challenge,
};
construct_router_data(
state,
authentication_connector,
payment_method,
merchant_id.clone(),
types::PaymentAddress::default(),
router_request,
&merchant_connector_account,
psd2_sca_exemption_type,
payment_id,
)
}
pub fn construct_post_authentication_router_data(
state: &SessionState,
authentication_connector: String,
business_profile: domain::Profile,
merchant_connector_account: payments_helpers::MerchantConnectorAccountType,
authentication_data: &storage::Authentication,
payment_id: &common_utils::id_type::PaymentId,
) -> RouterResult<types::authentication::ConnectorPostAuthenticationRouterData> {
let threeds_server_transaction_id = authentication_data
.threeds_server_transaction_id
.clone()
.get_required_value("threeds_server_transaction_id")
.change_context(errors::ApiErrorResponse::InternalServerError)?;
let router_request = types::authentication::ConnectorPostAuthenticationRequestData {
threeds_server_transaction_id,
};
construct_router_data(
state,
authentication_connector,
PaymentMethod::default(),
business_profile.merchant_id.clone(),
types::PaymentAddress::default(),
router_request,
&merchant_connector_account,
None,
payment_id.clone(),
)
}
pub fn construct_pre_authentication_router_data<F: Clone>(
state: &SessionState,
authentication_connector: String,
card: hyperswitch_domain_models::payment_method_data::Card,
merchant_connector_account: &payments_helpers::MerchantConnectorAccountType,
merchant_id: common_utils::id_type::MerchantId,
payment_id: common_utils::id_type::PaymentId,
) -> RouterResult<
types::RouterData<
F,
types::authentication::PreAuthNRequestData,
types::authentication::AuthenticationResponseData,
>,
> {
let router_request = types::authentication::PreAuthNRequestData { card };
construct_router_data(
state,
authentication_connector,
PaymentMethod::default(),
merchant_id,
types::PaymentAddress::default(),
router_request,
merchant_connector_account,
None,
payment_id,
)
}
#[allow(clippy::too_many_arguments)]
pub fn construct_router_data<F: Clone, Req, Res>(
state: &SessionState,
authentication_connector_name: String,
payment_method: PaymentMethod,
merchant_id: common_utils::id_type::MerchantId,
address: types::PaymentAddress,
request_data: Req,
merchant_connector_account: &payments_helpers::MerchantConnectorAccountType,
psd2_sca_exemption_type: Option<common_enums::ScaExemptionType>,
payment_id: common_utils::id_type::PaymentId,
) -> RouterResult<types::RouterData<F, Req, Res>> {
let test_mode: Option<bool> = merchant_connector_account.is_test_mode_on();
let auth_type: types::ConnectorAuthType = merchant_connector_account
.get_connector_account_details()
.parse_value("ConnectorAuthType")
.change_context(errors::ApiErrorResponse::InternalServerError)?;
Ok(types::RouterData {
flow: PhantomData,
merchant_id,
customer_id: None,
tenant_id: state.tenant.tenant_id.clone(),
connector_customer: None,
connector: authentication_connector_name,
payment_id: payment_id.get_string_repr().to_owned(),
attempt_id: IRRELEVANT_ATTEMPT_ID_IN_AUTHENTICATION_FLOW.to_owned(),
status: common_enums::AttemptStatus::default(),
payment_method,
payment_method_type: None,
connector_auth_type: auth_type,
description: None,
address,
auth_type: common_enums::AuthenticationType::NoThreeDs,
connector_meta_data: merchant_connector_account.get_metadata(),
connector_wallets_details: merchant_connector_account.get_connector_wallets_details(),
amount_captured: None,
minor_amount_captured: None,
access_token: None,
session_token: None,
reference_id: None,
payment_method_token: None,
recurring_mandate_payment_data: None,
preprocessing_id: None,
payment_method_balance: None,
connector_api_version: None,
request: request_data,
response: Err(types::ErrorResponse::default()),
connector_request_reference_id:
IRRELEVANT_CONNECTOR_REQUEST_REFERENCE_ID_IN_AUTHENTICATION_FLOW.to_owned(),
#[cfg(feature = "payouts")]
payout_method_data: None,
#[cfg(feature = "payouts")]
quote_id: None,
test_mode,
connector_http_status_code: None,
external_latency: None,
apple_pay_flow: None,
frm_metadata: None,
dispute_id: None,
refund_id: None,
payment_method_status: None,
connector_response: None,
integrity_check: Ok(()),
additional_merchant_data: None,
header_payload: None,
connector_mandate_request_reference_id: None,
authentication_id: None,
psd2_sca_exemption_type,
raw_connector_response: None,
is_payment_id_from_merchant: None,
l2_l3_data: None,
minor_amount_capturable: None,
authorized_amount: None,
})
}
impl ForeignFrom<common_enums::TransactionStatus> for common_enums::AuthenticationStatus {
fn foreign_from(trans_status: common_enums::TransactionStatus) -> Self {
match trans_status {
common_enums::TransactionStatus::Success => Self::Success,
common_enums::TransactionStatus::Failure
| common_enums::TransactionStatus::Rejected
| common_enums::TransactionStatus::VerificationNotPerformed
| common_enums::TransactionStatus::NotVerified => Self::Failed,
common_enums::TransactionStatus::ChallengeRequired
| common_enums::TransactionStatus::ChallengeRequiredDecoupledAuthentication
| common_enums::TransactionStatus::InformationOnly => Self::Pending,
}
}
}
// File: crates/router/src/core/authentication/utils.rs
use common_utils::ext_traits::AsyncExt;
use error_stack::ResultExt;
use hyperswitch_domain_models::router_data_v2::ExternalAuthenticationFlowData;
use masking::ExposeInterface;
use crate::{
consts,
core::{
errors::{self, ConnectorErrorExt, StorageErrorExt},
payments,
},
errors::RouterResult,
routes::SessionState,
services::{self, execute_connector_processing_step},
types::{
api, authentication::AuthenticationResponseData, domain, storage,
transformers::ForeignFrom, RouterData,
},
utils::OptionExt,
};
#[cfg(feature = "v1")]
pub fn get_connector_data_if_separate_authn_supported(
connector_call_type: &api::ConnectorCallType,
) -> Option<api::ConnectorData> {
match connector_call_type {
api::ConnectorCallType::PreDetermined(connector_routing_data) => {
if connector_routing_data
.connector_data
.connector_name
.is_separate_authentication_supported()
{
Some(connector_routing_data.connector_data.clone())
} else {
None
}
}
api::ConnectorCallType::Retryable(connector_routing_data) => connector_routing_data
.first()
.and_then(|connector_routing_data| {
if connector_routing_data
.connector_data
.connector_name
.is_separate_authentication_supported()
{
Some(connector_routing_data.connector_data.clone())
} else {
None
}
}),
api::ConnectorCallType::SessionMultiple(_) => None,
}
}
pub async fn update_trackers<F: Clone, Req>(
state: &SessionState,
router_data: RouterData<F, Req, AuthenticationResponseData>,
authentication: storage::Authentication,
acquirer_details: Option<super::types::AcquirerDetails>,
merchant_key_store: &hyperswitch_domain_models::merchant_key_store::MerchantKeyStore,
) -> RouterResult<storage::Authentication> {
let authentication_update = match router_data.response {
Ok(response) => match response {
AuthenticationResponseData::PreAuthNResponse {
threeds_server_transaction_id,
maximum_supported_3ds_version,
connector_authentication_id,
three_ds_method_data,
three_ds_method_url,
message_version,
connector_metadata,
directory_server_id,
} => storage::AuthenticationUpdate::PreAuthenticationUpdate {
threeds_server_transaction_id,
maximum_supported_3ds_version,
connector_authentication_id,
three_ds_method_data,
three_ds_method_url,
message_version,
connector_metadata,
authentication_status: common_enums::AuthenticationStatus::Pending,
acquirer_bin: acquirer_details
.as_ref()
.map(|acquirer_details| acquirer_details.acquirer_bin.clone()),
acquirer_merchant_id: acquirer_details
.as_ref()
.map(|acquirer_details| acquirer_details.acquirer_merchant_id.clone()),
acquirer_country_code: acquirer_details
.and_then(|acquirer_details| acquirer_details.acquirer_country_code),
directory_server_id,
billing_address: None,
shipping_address: None,
browser_info: Box::new(None),
email: None,
},
AuthenticationResponseData::AuthNResponse {
authn_flow_type,
authentication_value,
trans_status,
connector_metadata,
ds_trans_id,
eci,
challenge_code,
challenge_cancel,
challenge_code_reason,
message_extension,
} => {
authentication_value
.async_map(|auth_val| {
crate::core::payment_methods::vault::create_tokenize(
state,
auth_val.expose(),
None,
authentication
.authentication_id
.get_string_repr()
.to_string(),
merchant_key_store.key.get_inner(),
)
})
.await
.transpose()?;
let authentication_status =
common_enums::AuthenticationStatus::foreign_from(trans_status.clone());
storage::AuthenticationUpdate::AuthenticationUpdate {
trans_status,
acs_url: authn_flow_type.get_acs_url(),
challenge_request: authn_flow_type.get_challenge_request(),
challenge_request_key: authn_flow_type.get_challenge_request_key(),
acs_reference_number: authn_flow_type.get_acs_reference_number(),
acs_trans_id: authn_flow_type.get_acs_trans_id(),
acs_signed_content: authn_flow_type.get_acs_signed_content(),
authentication_type: authn_flow_type.get_decoupled_authentication_type(),
authentication_status,
connector_metadata,
ds_trans_id,
eci,
challenge_code,
challenge_cancel,
challenge_code_reason,
message_extension,
}
}
AuthenticationResponseData::PostAuthNResponse {
trans_status,
authentication_value,
eci,
challenge_cancel,
challenge_code_reason,
} => {
authentication_value
.async_map(|auth_val| {
crate::core::payment_methods::vault::create_tokenize(
state,
auth_val.expose(),
None,
authentication
.authentication_id
.get_string_repr()
.to_string(),
merchant_key_store.key.get_inner(),
)
})
.await
.transpose()?;
storage::AuthenticationUpdate::PostAuthenticationUpdate {
authentication_status: common_enums::AuthenticationStatus::foreign_from(
trans_status.clone(),
),
trans_status,
eci,
challenge_cancel,
challenge_code_reason,
}
}
AuthenticationResponseData::PreAuthVersionCallResponse {
maximum_supported_3ds_version,
} => storage::AuthenticationUpdate::PreAuthenticationVersionCallUpdate {
message_version: maximum_supported_3ds_version.clone(),
maximum_supported_3ds_version,
},
AuthenticationResponseData::PreAuthThreeDsMethodCallResponse {
threeds_server_transaction_id,
three_ds_method_data,
three_ds_method_url,
connector_metadata,
} => storage::AuthenticationUpdate::PreAuthenticationThreeDsMethodCall {
threeds_server_transaction_id,
three_ds_method_data,
three_ds_method_url,
connector_metadata,
acquirer_bin: acquirer_details
.as_ref()
.map(|acquirer_details| acquirer_details.acquirer_bin.clone()),
acquirer_merchant_id: acquirer_details
.map(|acquirer_details| acquirer_details.acquirer_merchant_id),
},
},
Err(error) => storage::AuthenticationUpdate::ErrorUpdate {
connector_authentication_id: error.connector_transaction_id,
authentication_status: common_enums::AuthenticationStatus::Failed,
error_message: error
.reason
.map(|reason| format!("message: {}, reason: {}", error.message, reason))
.or(Some(error.message)),
error_code: Some(error.code),
},
};
state
.store
.update_authentication_by_merchant_id_authentication_id(
authentication,
authentication_update,
)
.await
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error while updating authentication")
}
impl ForeignFrom<common_enums::AuthenticationStatus> for common_enums::AttemptStatus {
fn foreign_from(from: common_enums::AuthenticationStatus) -> Self {
match from {
common_enums::AuthenticationStatus::Started
| common_enums::AuthenticationStatus::Pending => Self::AuthenticationPending,
common_enums::AuthenticationStatus::Success => Self::AuthenticationSuccessful,
common_enums::AuthenticationStatus::Failed => Self::AuthenticationFailed,
}
}
}
#[allow(clippy::too_many_arguments)]
pub async fn create_new_authentication(
state: &SessionState,
merchant_id: common_utils::id_type::MerchantId,
authentication_connector: String,
token: String,
profile_id: common_utils::id_type::ProfileId,
payment_id: common_utils::id_type::PaymentId,
merchant_connector_id: common_utils::id_type::MerchantConnectorAccountId,
organization_id: common_utils::id_type::OrganizationId,
force_3ds_challenge: Option<bool>,
psd2_sca_exemption_type: Option<common_enums::ScaExemptionType>,
) -> RouterResult<storage::Authentication> {
let authentication_id = common_utils::id_type::AuthenticationId::generate_authentication_id(
consts::AUTHENTICATION_ID_PREFIX,
);
let authentication_client_secret = Some(common_utils::generate_id_with_default_len(&format!(
"{}_secret",
authentication_id.get_string_repr()
)));
let new_authorization = storage::AuthenticationNew {
authentication_id: authentication_id.clone(),
merchant_id,
authentication_connector: Some(authentication_connector),
connector_authentication_id: None,
payment_method_id: format!("eph_{token}"),
authentication_type: None,
authentication_status: common_enums::AuthenticationStatus::Started,
authentication_lifecycle_status: common_enums::AuthenticationLifecycleStatus::Unused,
error_message: None,
error_code: None,
connector_metadata: None,
maximum_supported_version: None,
threeds_server_transaction_id: None,
cavv: None,
authentication_flow_type: None,
message_version: None,
eci: None,
trans_status: None,
acquirer_bin: None,
acquirer_merchant_id: None,
three_ds_method_data: None,
three_ds_method_url: None,
acs_url: None,
challenge_request: None,
challenge_request_key: None,
acs_reference_number: None,
acs_trans_id: None,
acs_signed_content: None,
profile_id,
payment_id: Some(payment_id),
merchant_connector_id: Some(merchant_connector_id),
ds_trans_id: None,
directory_server_id: None,
acquirer_country_code: None,
service_details: None,
organization_id,
authentication_client_secret,
force_3ds_challenge,
psd2_sca_exemption_type,
return_url: None,
amount: None,
currency: None,
billing_address: None,
shipping_address: None,
browser_info: None,
email: None,
profile_acquirer_id: None,
challenge_code: None,
challenge_cancel: None,
challenge_code_reason: None,
message_extension: None,
};
state
.store
.insert_authentication(new_authorization)
.await
.to_duplicate_response(errors::ApiErrorResponse::GenericDuplicateError {
message: format!(
"Authentication with authentication_id {} already exists",
authentication_id.get_string_repr()
),
})
}
pub async fn do_auth_connector_call<F, Req, Res>(
state: &SessionState,
authentication_connector_name: String,
router_data: RouterData<F, Req, Res>,
) -> RouterResult<RouterData<F, Req, Res>>
where
Req: std::fmt::Debug + Clone + 'static,
Res: std::fmt::Debug + Clone + 'static,
F: std::fmt::Debug + Clone + 'static,
dyn api::Connector + Sync: services::api::ConnectorIntegration<F, Req, Res>,
dyn api::ConnectorV2 + Sync:
services::api::ConnectorIntegrationV2<F, ExternalAuthenticationFlowData, Req, Res>,
{
let connector_data =
api::AuthenticationConnectorData::get_connector_by_name(&authentication_connector_name)?;
let connector_integration: services::BoxedExternalAuthenticationConnectorIntegrationInterface<
F,
Req,
Res,
> = connector_data.connector.get_connector_integration();
let router_data = execute_connector_processing_step(
state,
connector_integration,
&router_data,
payments::CallConnectorAction::Trigger,
None,
None,
)
.await
.to_payment_failed_response()?;
Ok(router_data)
}
pub async fn get_authentication_connector_data(
state: &SessionState,
key_store: &domain::MerchantKeyStore,
business_profile: &domain::Profile,
authentication_connector: Option<String>,
) -> RouterResult<(
common_enums::AuthenticationConnectors,
payments::helpers::MerchantConnectorAccountType,
)> {
let authentication_connector = if let Some(authentication_connector) = authentication_connector
{
api_models::enums::convert_authentication_connector(&authentication_connector).ok_or(
errors::ApiErrorResponse::UnprocessableEntity {
message: format!(
"Invalid authentication_connector found in request : {authentication_connector}",
),
},
)?
} else {
let authentication_details = business_profile
.authentication_connector_details
.clone()
.get_required_value("authentication_details")
.change_context(errors::ApiErrorResponse::UnprocessableEntity {
message: "authentication_connector_details is not available in business profile"
.into(),
})
.attach_printable("authentication_connector_details not configured by the merchant")?;
authentication_details
.authentication_connectors
.first()
.ok_or(errors::ApiErrorResponse::UnprocessableEntity {
message: format!(
"No authentication_connector found for profile_id {:?}",
business_profile.get_id()
),
})
.attach_printable(
"No authentication_connector found from merchant_account.authentication_details",
)?
.to_owned()
};
let profile_id = business_profile.get_id();
let authentication_connector_mca = payments::helpers::get_merchant_connector_account(
state,
&business_profile.merchant_id,
None,
key_store,
profile_id,
authentication_connector.to_string().as_str(),
None,
)
.await?;
Ok((authentication_connector, authentication_connector_mca))
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/core/authentication/types.rs",
"crates/router/src/core/authentication/transformers.rs",
"crates/router/src/core/authentication/utils.rs"
],
"module": "crates/router/src/core/authentication",
"num_files": 3,
"token_count": 4964
}
|
module_-6504666828174950343
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/bin
Files: 2
</path>
<module>
// File: crates/router/src/bin/router.rs
use error_stack::ResultExt;
use router::{
configs::settings::{CmdLineConf, Settings},
core::errors::{ApplicationError, ApplicationResult},
logger,
routes::metrics,
};
#[tokio::main]
async fn main() -> ApplicationResult<()> {
// get commandline config before initializing config
let cmd_line = <CmdLineConf as clap::Parser>::parse();
#[allow(clippy::expect_used)]
let conf = Settings::with_config_path(cmd_line.config_path)
.expect("Unable to construct application configuration");
#[allow(clippy::expect_used)]
conf.validate()
.expect("Failed to validate router configuration");
#[allow(clippy::print_stdout)] // The logger has not yet been initialized
#[cfg(feature = "vergen")]
{
println!("Starting router (Version: {})", router_env::git_tag!());
}
let _guard = router_env::setup(
&conf.log,
router_env::service_name!(),
[router_env::service_name!(), "actix_server"],
)
.change_context(ApplicationError::ConfigurationError)?;
logger::info!("Application started [{:?}] [{:?}]", conf.server, conf.log);
// Spawn a thread for collecting metrics at fixed intervals
metrics::bg_metrics_collector::spawn_metrics_collector(
conf.log.telemetry.bg_metrics_collection_interval_in_secs,
);
#[allow(clippy::expect_used)]
let server = Box::pin(router::start_server(conf))
.await
.expect("Failed to create the server");
let _ = server.await;
Err(error_stack::Report::from(ApplicationError::from(
std::io::Error::other("Server shut down"),
)))
}
// File: crates/router/src/bin/scheduler.rs
use std::{collections::HashMap, str::FromStr, sync::Arc};
use actix_web::{dev::Server, web, Scope};
use api_models::health_check::SchedulerHealthCheckResponse;
use common_utils::ext_traits::{OptionExt, StringExt};
use diesel_models::process_tracker::{self as storage, business_status};
use error_stack::ResultExt;
use router::{
configs::settings::{CmdLineConf, Settings},
core::{
errors::{self, CustomResult},
health_check::HealthCheckInterface,
},
logger, routes,
services::{self, api},
workflows,
};
use router_env::{
instrument,
tracing::{self, Instrument},
};
use scheduler::{
consumer::workflows::ProcessTrackerWorkflow, errors::ProcessTrackerError,
workflows::ProcessTrackerWorkflows, SchedulerSessionState,
};
use storage_impl::errors::ApplicationError;
use tokio::sync::{mpsc, oneshot};
const SCHEDULER_FLOW: &str = "SCHEDULER_FLOW";
#[tokio::main]
async fn main() -> CustomResult<(), ProcessTrackerError> {
let cmd_line = <CmdLineConf as clap::Parser>::parse();
#[allow(clippy::expect_used)]
let conf = Settings::with_config_path(cmd_line.config_path)
.expect("Unable to construct application configuration");
let api_client = Box::new(
services::ProxyClient::new(&conf.proxy)
.change_context(ProcessTrackerError::ConfigurationError)?,
);
// channel for listening to redis disconnect events
let (redis_shutdown_signal_tx, redis_shutdown_signal_rx) = oneshot::channel();
let state = Box::pin(routes::AppState::new(
conf,
redis_shutdown_signal_tx,
api_client,
))
.await;
// channel to shutdown scheduler gracefully
let (tx, rx) = mpsc::channel(1);
let _task_handle = tokio::spawn(
router::receiver_for_error(redis_shutdown_signal_rx, tx.clone()).in_current_span(),
);
#[allow(clippy::expect_used)]
let scheduler_flow_str =
std::env::var(SCHEDULER_FLOW).expect("SCHEDULER_FLOW environment variable not set");
#[allow(clippy::expect_used)]
let scheduler_flow = scheduler::SchedulerFlow::from_str(&scheduler_flow_str)
.expect("Unable to parse SchedulerFlow from environment variable");
#[allow(clippy::print_stdout)] // The logger has not yet been initialized
#[cfg(feature = "vergen")]
{
println!(
"Starting {scheduler_flow} (Version: {})",
router_env::git_tag!()
);
}
let _guard = router_env::setup(
&state.conf.log,
&scheduler_flow_str,
[router_env::service_name!()],
);
#[allow(clippy::expect_used)]
let web_server = Box::pin(start_web_server(
state.clone(),
scheduler_flow_str.to_string(),
))
.await
.expect("Failed to create the server");
let _task_handle = tokio::spawn(
async move {
let _ = web_server.await;
logger::error!("The health check probe stopped working!");
}
.in_current_span(),
);
logger::debug!(startup_config=?state.conf);
start_scheduler(&state, scheduler_flow, (tx, rx)).await?;
logger::error!("Scheduler shut down");
Ok(())
}
pub async fn start_web_server(
state: routes::AppState,
service: String,
) -> errors::ApplicationResult<Server> {
let server = state
.conf
.scheduler
.as_ref()
.ok_or(ApplicationError::InvalidConfigurationValueError(
"Scheduler server is invalidly configured".into(),
))?
.server
.clone();
let web_server = actix_web::HttpServer::new(move || {
actix_web::App::new().service(Health::server(state.clone(), service.clone()))
})
.bind((server.host.as_str(), server.port))
.change_context(ApplicationError::ConfigurationError)?
.workers(server.workers)
.run();
let _ = web_server.handle();
Ok(web_server)
}
pub struct Health;
impl Health {
pub fn server(state: routes::AppState, service: String) -> Scope {
web::scope("health")
.app_data(web::Data::new(state))
.app_data(web::Data::new(service))
.service(web::resource("").route(web::get().to(health)))
.service(web::resource("/ready").route(web::get().to(deep_health_check)))
}
}
#[instrument(skip_all)]
pub async fn health() -> impl actix_web::Responder {
logger::info!("Scheduler health was called");
actix_web::HttpResponse::Ok().body("Scheduler health is good")
}
#[instrument(skip_all)]
pub async fn deep_health_check(
state: web::Data<routes::AppState>,
service: web::Data<String>,
) -> impl actix_web::Responder {
let mut checks = HashMap::new();
let stores = state.stores.clone();
let app_state = Arc::clone(&state.into_inner());
let service_name = service.into_inner();
for (tenant, _) in stores {
let session_state_res = app_state.clone().get_session_state(&tenant, None, || {
errors::ApiErrorResponse::MissingRequiredField {
field_name: "tenant_id",
}
.into()
});
let session_state = match session_state_res {
Ok(state) => state,
Err(err) => {
return api::log_and_return_error_response(err);
}
};
let report = deep_health_check_func(session_state, &service_name).await;
match report {
Ok(response) => {
checks.insert(
tenant,
serde_json::to_string(&response)
.map_err(|err| {
logger::error!(serialization_error=?err);
})
.unwrap_or_default(),
);
}
Err(err) => {
return api::log_and_return_error_response(err);
}
}
}
services::http_response_json(
serde_json::to_string(&checks)
.map_err(|err| {
logger::error!(serialization_error=?err);
})
.unwrap_or_default(),
)
}
#[instrument(skip_all)]
pub async fn deep_health_check_func(
state: routes::SessionState,
service: &str,
) -> errors::RouterResult<SchedulerHealthCheckResponse> {
logger::info!("{} deep health check was called", service);
logger::debug!("Database health check begin");
let db_status = state
.health_check_db()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(errors::ApiErrorResponse::HealthCheckError {
component: "Database",
message,
})
})?;
logger::debug!("Database health check end");
logger::debug!("Redis health check begin");
let redis_status = state
.health_check_redis()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(errors::ApiErrorResponse::HealthCheckError {
component: "Redis",
message,
})
})?;
let outgoing_req_check =
state
.health_check_outgoing()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(errors::ApiErrorResponse::HealthCheckError {
component: "Outgoing Request",
message,
})
})?;
logger::debug!("Redis health check end");
let response = SchedulerHealthCheckResponse {
database: db_status,
redis: redis_status,
outgoing_request: outgoing_req_check,
};
Ok(response)
}
#[derive(Debug, Copy, Clone)]
pub struct WorkflowRunner;
#[async_trait::async_trait]
impl ProcessTrackerWorkflows<routes::SessionState> for WorkflowRunner {
async fn trigger_workflow<'a>(
&'a self,
state: &'a routes::SessionState,
process: storage::ProcessTracker,
) -> CustomResult<(), ProcessTrackerError> {
let runner = process
.runner
.clone()
.get_required_value("runner")
.change_context(ProcessTrackerError::MissingRequiredField)
.attach_printable("Missing runner field in process information")?;
let runner: storage::ProcessTrackerRunner = runner
.parse_enum("ProcessTrackerRunner")
.change_context(ProcessTrackerError::UnexpectedFlow)
.attach_printable("Failed to parse workflow runner name")?;
let get_operation = |runner: storage::ProcessTrackerRunner| -> CustomResult<
Box<dyn ProcessTrackerWorkflow<routes::SessionState>>,
ProcessTrackerError,
> {
match runner {
storage::ProcessTrackerRunner::PaymentsSyncWorkflow => {
Ok(Box::new(workflows::payment_sync::PaymentsSyncWorkflow))
}
storage::ProcessTrackerRunner::RefundWorkflowRouter => {
Ok(Box::new(workflows::refund_router::RefundWorkflowRouter))
}
storage::ProcessTrackerRunner::ProcessDisputeWorkflow => {
Ok(Box::new(workflows::process_dispute::ProcessDisputeWorkflow))
}
storage::ProcessTrackerRunner::DisputeListWorkflow => {
Ok(Box::new(workflows::dispute_list::DisputeListWorkflow))
}
storage::ProcessTrackerRunner::InvoiceSyncflow => {
Ok(Box::new(workflows::invoice_sync::InvoiceSyncWorkflow))
}
storage::ProcessTrackerRunner::DeleteTokenizeDataWorkflow => Ok(Box::new(
workflows::tokenized_data::DeleteTokenizeDataWorkflow,
)),
storage::ProcessTrackerRunner::ApiKeyExpiryWorkflow => {
#[cfg(feature = "email")]
{
Ok(Box::new(workflows::api_key_expiry::ApiKeyExpiryWorkflow))
}
#[cfg(not(feature = "email"))]
{
Err(error_stack::report!(ProcessTrackerError::UnexpectedFlow))
.attach_printable(
"Cannot run API key expiry workflow when email feature is disabled",
)
}
}
storage::ProcessTrackerRunner::OutgoingWebhookRetryWorkflow => Ok(Box::new(
workflows::outgoing_webhook_retry::OutgoingWebhookRetryWorkflow,
)),
storage::ProcessTrackerRunner::AttachPayoutAccountWorkflow => {
#[cfg(feature = "payouts")]
{
Ok(Box::new(
workflows::attach_payout_account_workflow::AttachPayoutAccountWorkflow,
))
}
#[cfg(not(feature = "payouts"))]
{
Err(
error_stack::report!(ProcessTrackerError::UnexpectedFlow),
)
.attach_printable(
"Cannot run Stripe external account workflow when payouts feature is disabled",
)
}
}
storage::ProcessTrackerRunner::PaymentMethodStatusUpdateWorkflow => Ok(Box::new(
workflows::payment_method_status_update::PaymentMethodStatusUpdateWorkflow,
)),
storage::ProcessTrackerRunner::PassiveRecoveryWorkflow => {
Ok(Box::new(workflows::revenue_recovery::ExecutePcrWorkflow))
}
}
};
let operation = get_operation(runner)?;
let app_state = &state.clone();
let output = operation.execute_workflow(state, process.clone()).await;
match output {
Ok(_) => operation.success_handler(app_state, process).await,
Err(error) => match operation
.error_handler(app_state, process.clone(), error)
.await
{
Ok(_) => (),
Err(error) => {
logger::error!(?error, "Failed while handling error");
let status = state
.get_db()
.as_scheduler()
.finish_process_with_business_status(
process,
business_status::GLOBAL_FAILURE,
)
.await;
if let Err(error) = status {
logger::error!(
?error,
"Failed while performing database operation: {}",
business_status::GLOBAL_FAILURE
);
}
}
},
};
Ok(())
}
}
async fn start_scheduler(
state: &routes::AppState,
scheduler_flow: scheduler::SchedulerFlow,
channel: (mpsc::Sender<()>, mpsc::Receiver<()>),
) -> CustomResult<(), ProcessTrackerError> {
let scheduler_settings = state
.conf
.scheduler
.clone()
.ok_or(ProcessTrackerError::ConfigurationError)?;
scheduler::start_process_tracker(
state,
scheduler_flow,
Arc::new(scheduler_settings),
channel,
WorkflowRunner {},
|state, tenant| {
Arc::new(state.clone())
.get_session_state(tenant, None, || ProcessTrackerError::TenantNotFound.into())
},
)
.await
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/bin/router.rs",
"crates/router/src/bin/scheduler.rs"
],
"module": "crates/router/src/bin",
"num_files": 2,
"token_count": 3099
}
|
module_7817085730551998111
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/utils
Files: 9
</path>
<module>
// File: crates/router/src/utils/ext_traits.rs
pub use hyperswitch_domain_models::ext_traits::OptionExt;
use crate::core::errors::{self, CustomResult};
pub trait ValidateCall<T, F> {
fn validate_opt(self, func: F) -> CustomResult<(), errors::ValidationError>;
}
impl<T, F> ValidateCall<T, F> for Option<&T>
where
F: Fn(&T) -> CustomResult<(), errors::ValidationError>,
{
fn validate_opt(self, func: F) -> CustomResult<(), errors::ValidationError> {
match self {
Some(val) => func(val),
None => Ok(()),
}
}
}
// File: crates/router/src/utils/user.rs
use std::sync::Arc;
#[cfg(feature = "v1")]
use api_models::admin as admin_api;
use api_models::user as user_api;
#[cfg(feature = "v1")]
use common_enums::connector_enums;
use common_enums::UserAuthType;
#[cfg(feature = "v1")]
use common_utils::ext_traits::ValueExt;
use common_utils::{
encryption::Encryption,
errors::CustomResult,
id_type, type_name,
types::{keymanager::Identifier, user::LineageContext},
};
use diesel_models::organization::{self, OrganizationBridge};
use error_stack::ResultExt;
#[cfg(feature = "v1")]
use hyperswitch_domain_models::merchant_connector_account::MerchantConnectorAccount as DomainMerchantConnectorAccount;
#[cfg(feature = "v1")]
use masking::PeekInterface;
use masking::{ExposeInterface, Secret};
use redis_interface::RedisConnectionPool;
use router_env::{env, logger};
#[cfg(feature = "v1")]
use crate::types::AdditionalMerchantData;
use crate::{
consts::user::{REDIS_SSO_PREFIX, REDIS_SSO_TTL},
core::errors::{StorageError, UserErrors, UserResult},
routes::SessionState,
services::{
authentication::{AuthToken, UserFromToken},
authorization::roles::RoleInfo,
},
types::{
domain::{self, MerchantAccount, UserFromStorage},
transformers::ForeignFrom,
},
};
pub mod dashboard_metadata;
pub mod password;
#[cfg(feature = "dummy_connector")]
pub mod sample_data;
pub mod theme;
pub mod two_factor_auth;
impl UserFromToken {
pub async fn get_merchant_account_from_db(
&self,
state: SessionState,
) -> UserResult<MerchantAccount> {
let key_manager_state = &(&state).into();
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_state,
&self.merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.map_err(|e| {
if e.current_context().is_db_not_found() {
e.change_context(UserErrors::MerchantIdNotFound)
} else {
e.change_context(UserErrors::InternalServerError)
}
})?;
let merchant_account = state
.store
.find_merchant_account_by_merchant_id(key_manager_state, &self.merchant_id, &key_store)
.await
.map_err(|e| {
if e.current_context().is_db_not_found() {
e.change_context(UserErrors::MerchantIdNotFound)
} else {
e.change_context(UserErrors::InternalServerError)
}
})?;
Ok(merchant_account)
}
pub async fn get_user_from_db(&self, state: &SessionState) -> UserResult<UserFromStorage> {
let user = state
.global_store
.find_user_by_id(&self.user_id)
.await
.change_context(UserErrors::InternalServerError)?;
Ok(user.into())
}
pub async fn get_role_info_from_db(&self, state: &SessionState) -> UserResult<RoleInfo> {
RoleInfo::from_role_id_org_id_tenant_id(
state,
&self.role_id,
&self.org_id,
self.tenant_id.as_ref().unwrap_or(&state.tenant.tenant_id),
)
.await
.change_context(UserErrors::InternalServerError)
}
}
pub async fn generate_jwt_auth_token_with_attributes(
state: &SessionState,
user_id: String,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
role_id: String,
profile_id: id_type::ProfileId,
tenant_id: Option<id_type::TenantId>,
) -> UserResult<Secret<String>> {
let token = AuthToken::new_token(
user_id,
merchant_id,
role_id,
&state.conf,
org_id,
profile_id,
tenant_id,
)
.await?;
Ok(Secret::new(token))
}
#[allow(unused_variables)]
pub fn get_verification_days_left(
state: &SessionState,
user: &UserFromStorage,
) -> UserResult<Option<i64>> {
#[cfg(feature = "email")]
return user.get_verification_days_left(state);
#[cfg(not(feature = "email"))]
return Ok(None);
}
pub async fn get_user_from_db_by_email(
state: &SessionState,
email: domain::UserEmail,
) -> CustomResult<UserFromStorage, StorageError> {
state
.global_store
.find_user_by_email(&email)
.await
.map(UserFromStorage::from)
}
pub fn get_redis_connection_for_global_tenant(
state: &SessionState,
) -> UserResult<Arc<RedisConnectionPool>> {
state
.global_store
.get_redis_conn()
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get redis connection")
}
impl ForeignFrom<&user_api::AuthConfig> for UserAuthType {
fn foreign_from(from: &user_api::AuthConfig) -> Self {
match *from {
user_api::AuthConfig::OpenIdConnect { .. } => Self::OpenIdConnect,
user_api::AuthConfig::Password => Self::Password,
user_api::AuthConfig::MagicLink => Self::MagicLink,
}
}
}
pub async fn construct_public_and_private_db_configs(
state: &SessionState,
auth_config: &user_api::AuthConfig,
encryption_key: &[u8],
id: String,
) -> UserResult<(Option<Encryption>, Option<serde_json::Value>)> {
match auth_config {
user_api::AuthConfig::OpenIdConnect {
private_config,
public_config,
} => {
let private_config_value = serde_json::to_value(private_config.clone())
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to convert auth config to json")?;
let encrypted_config =
domain::types::crypto_operation::<serde_json::Value, masking::WithType>(
&state.into(),
type_name!(diesel_models::user::User),
domain::types::CryptoOperation::Encrypt(private_config_value.into()),
Identifier::UserAuth(id),
encryption_key,
)
.await
.and_then(|val| val.try_into_operation())
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to encrypt auth config")?;
Ok((
Some(encrypted_config.into()),
Some(
serde_json::to_value(public_config.clone())
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to convert auth config to json")?,
),
))
}
user_api::AuthConfig::Password | user_api::AuthConfig::MagicLink => Ok((None, None)),
}
}
pub fn parse_value<T>(value: serde_json::Value, type_name: &str) -> UserResult<T>
where
T: serde::de::DeserializeOwned,
{
serde_json::from_value::<T>(value)
.change_context(UserErrors::InternalServerError)
.attach_printable(format!("Unable to parse {type_name}"))
}
pub async fn decrypt_oidc_private_config(
state: &SessionState,
encrypted_config: Option<Encryption>,
id: String,
) -> UserResult<user_api::OpenIdConnectPrivateConfig> {
let user_auth_key = hex::decode(
state
.conf
.user_auth_methods
.get_inner()
.encryption_key
.clone()
.expose(),
)
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to decode DEK")?;
let private_config = domain::types::crypto_operation::<serde_json::Value, masking::WithType>(
&state.into(),
type_name!(diesel_models::user::User),
domain::types::CryptoOperation::DecryptOptional(encrypted_config),
Identifier::UserAuth(id),
&user_auth_key,
)
.await
.and_then(|val| val.try_into_optionaloperation())
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to decrypt private config")?
.ok_or(UserErrors::InternalServerError)
.attach_printable("Private config not found")?
.into_inner()
.expose();
serde_json::from_value::<user_api::OpenIdConnectPrivateConfig>(private_config)
.change_context(UserErrors::InternalServerError)
.attach_printable("unable to parse OpenIdConnectPrivateConfig")
}
pub async fn set_sso_id_in_redis(
state: &SessionState,
oidc_state: Secret<String>,
sso_id: String,
) -> UserResult<()> {
let connection = get_redis_connection_for_global_tenant(state)?;
let key = get_oidc_key(&oidc_state.expose());
connection
.set_key_with_expiry(&key.into(), sso_id, REDIS_SSO_TTL)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to set sso id in redis")
}
pub async fn get_sso_id_from_redis(
state: &SessionState,
oidc_state: Secret<String>,
) -> UserResult<String> {
let connection = get_redis_connection_for_global_tenant(state)?;
let key = get_oidc_key(&oidc_state.expose());
connection
.get_key::<Option<String>>(&key.into())
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get sso id from redis")?
.ok_or(UserErrors::SSOFailed)
.attach_printable("Cannot find oidc state in redis. Oidc state invalid or expired")
}
fn get_oidc_key(oidc_state: &str) -> String {
format!("{REDIS_SSO_PREFIX}{oidc_state}")
}
pub fn get_oidc_sso_redirect_url(state: &SessionState, provider: &str) -> String {
format!("{}/redirect/oidc/{}", state.conf.user.base_url, provider)
}
pub fn is_sso_auth_type(auth_type: UserAuthType) -> bool {
match auth_type {
UserAuthType::OpenIdConnect => true,
UserAuthType::Password | UserAuthType::MagicLink => false,
}
}
#[cfg(feature = "v1")]
pub fn create_merchant_account_request_for_org(
req: user_api::UserOrgMerchantCreateRequest,
org: organization::Organization,
product_type: common_enums::MerchantProductType,
) -> UserResult<api_models::admin::MerchantAccountCreate> {
let merchant_id = generate_env_specific_merchant_id(req.merchant_name.clone().expose())?;
let company_name = domain::UserCompanyName::new(req.merchant_name.expose())?;
Ok(api_models::admin::MerchantAccountCreate {
merchant_id,
metadata: None,
locker_id: None,
return_url: None,
merchant_name: Some(Secret::new(company_name.get_secret())),
webhook_details: None,
publishable_key: None,
organization_id: Some(org.get_organization_id()),
merchant_details: None,
routing_algorithm: None,
parent_merchant_id: None,
sub_merchants_enabled: None,
frm_routing_algorithm: None,
#[cfg(feature = "payouts")]
payout_routing_algorithm: None,
primary_business_details: None,
payment_response_hash_key: None,
enable_payment_response_hash: None,
redirect_to_merchant_with_http_post: None,
pm_collect_link_config: None,
product_type: Some(product_type),
merchant_account_type: None,
})
}
pub async fn validate_email_domain_auth_type_using_db(
state: &SessionState,
email: &domain::UserEmail,
required_auth_type: UserAuthType,
) -> UserResult<()> {
let domain = email.extract_domain()?;
let user_auth_methods = state
.store
.list_user_authentication_methods_for_email_domain(domain)
.await
.change_context(UserErrors::InternalServerError)?;
(user_auth_methods.is_empty()
|| user_auth_methods
.iter()
.any(|auth_method| auth_method.auth_type == required_auth_type))
.then_some(())
.ok_or(UserErrors::InvalidUserAuthMethodOperation.into())
}
pub fn spawn_async_lineage_context_update_to_db(
state: &SessionState,
user_id: &str,
lineage_context: LineageContext,
) {
let state = state.clone();
let lineage_context = lineage_context.clone();
let user_id = user_id.to_owned();
tokio::spawn(async move {
match state
.global_store
.update_user_by_user_id(
&user_id,
diesel_models::user::UserUpdate::LineageContextUpdate { lineage_context },
)
.await
{
Ok(_) => {
logger::debug!("Successfully updated lineage context for user {}", user_id);
}
Err(e) => {
logger::error!(
"Failed to update lineage context for user {}: {:?}",
user_id,
e
);
}
}
});
}
pub fn generate_env_specific_merchant_id(value: String) -> UserResult<id_type::MerchantId> {
if matches!(env::which(), env::Env::Production) {
let raw_id = domain::MerchantId::new(value)?;
Ok(id_type::MerchantId::try_from(raw_id)?)
} else {
Ok(id_type::MerchantId::new_from_unix_timestamp())
}
}
pub fn get_base_url(state: &SessionState) -> &str {
if !state.conf.multitenancy.enabled {
&state.conf.user.base_url
} else {
&state.tenant.user.control_center_url
}
}
#[cfg(feature = "v1")]
pub async fn build_cloned_connector_create_request(
source_mca: DomainMerchantConnectorAccount,
destination_profile_id: Option<id_type::ProfileId>,
destination_connector_label: Option<String>,
) -> UserResult<admin_api::MerchantConnectorCreate> {
let source_mca_name = source_mca
.connector_name
.parse::<connector_enums::Connector>()
.change_context(UserErrors::InternalServerError)
.attach_printable("Invalid connector name received")?;
let payment_methods_enabled = source_mca
.payment_methods_enabled
.clone()
.map(|data| {
let val = data.into_iter().map(|secret| secret.expose()).collect();
serde_json::Value::Array(val)
.parse_value("PaymentMethods")
.change_context(UserErrors::InternalServerError)
.attach_printable("Unable to deserialize PaymentMethods")
})
.transpose()?;
let frm_configs = source_mca
.frm_configs
.as_ref()
.map(|configs_vec| {
configs_vec
.iter()
.map(|config_secret| {
config_secret
.peek()
.clone()
.parse_value("FrmConfigs")
.change_context(UserErrors::InternalServerError)
.attach_printable("Unable to deserialize FrmConfigs")
})
.collect::<Result<Vec<_>, _>>()
})
.transpose()?;
let connector_webhook_details = source_mca
.connector_webhook_details
.map(|webhook_details| {
serde_json::Value::parse_value(
webhook_details.expose(),
"MerchantConnectorWebhookDetails",
)
.change_context(UserErrors::InternalServerError)
.attach_printable("Unable to deserialize connector_webhook_details")
})
.transpose()?;
let connector_wallets_details = source_mca
.connector_wallets_details
.map(|secret_value| {
secret_value
.into_inner()
.expose()
.parse_value::<admin_api::ConnectorWalletDetails>("ConnectorWalletDetails")
.change_context(UserErrors::InternalServerError)
.attach_printable("Unable to parse ConnectorWalletDetails from Value")
})
.transpose()?;
let additional_merchant_data = source_mca
.additional_merchant_data
.map(|secret_value| {
secret_value
.into_inner()
.expose()
.parse_value::<AdditionalMerchantData>("AdditionalMerchantData")
.change_context(UserErrors::InternalServerError)
.attach_printable("Unable to parse AdditionalMerchantData from Value")
})
.transpose()?
.map(admin_api::AdditionalMerchantData::foreign_from);
Ok(admin_api::MerchantConnectorCreate {
connector_type: source_mca.connector_type,
connector_name: source_mca_name,
connector_label: destination_connector_label.or(source_mca.connector_label.clone()),
merchant_connector_id: None,
connector_account_details: Some(source_mca.connector_account_details.clone().into_inner()),
test_mode: source_mca.test_mode,
disabled: source_mca.disabled,
payment_methods_enabled,
metadata: source_mca.metadata,
business_country: source_mca.business_country,
business_label: source_mca.business_label.clone(),
business_sub_label: source_mca.business_sub_label.clone(),
frm_configs,
connector_webhook_details,
profile_id: destination_profile_id,
pm_auth_config: source_mca.pm_auth_config.clone(),
connector_wallets_details,
status: Some(source_mca.status),
additional_merchant_data,
})
}
// File: crates/router/src/utils/user_role.rs
use std::{
cmp,
collections::{HashMap, HashSet},
};
use api_models::user_role::role as role_api;
use common_enums::{EntityType, ParentGroup, PermissionGroup};
use common_utils::id_type;
use diesel_models::{
enums::{UserRoleVersion, UserStatus},
role::ListRolesByEntityPayload,
user_role::{UserRole, UserRoleUpdate},
};
use error_stack::{report, Report, ResultExt};
use router_env::logger;
use storage_impl::errors::StorageError;
use strum::IntoEnumIterator;
use crate::{
consts,
core::errors::{UserErrors, UserResult},
db::{
errors::StorageErrorExt,
user_role::{ListUserRolesByOrgIdPayload, ListUserRolesByUserIdPayload},
},
routes::SessionState,
services::authorization::{
self as authz,
permission_groups::{ParentGroupExt, PermissionGroupExt},
permissions, roles,
},
types::domain,
};
pub fn validate_role_groups(groups: &[PermissionGroup]) -> UserResult<()> {
if groups.is_empty() {
return Err(report!(UserErrors::InvalidRoleOperation))
.attach_printable("Role groups cannot be empty");
}
let unique_groups: HashSet<_> = groups.iter().copied().collect();
if unique_groups.contains(&PermissionGroup::InternalManage) {
return Err(report!(UserErrors::InvalidRoleOperation))
.attach_printable("Invalid groups present in the custom role");
}
if unique_groups.len() != groups.len() {
return Err(report!(UserErrors::InvalidRoleOperation))
.attach_printable("Duplicate permission group found");
}
Ok(())
}
pub async fn validate_role_name(
state: &SessionState,
role_name: &domain::RoleName,
merchant_id: &id_type::MerchantId,
org_id: &id_type::OrganizationId,
tenant_id: &id_type::TenantId,
profile_id: &id_type::ProfileId,
entity_type: &EntityType,
) -> UserResult<()> {
let role_name_str = role_name.clone().get_role_name();
let is_present_in_predefined_roles = roles::predefined_roles::PREDEFINED_ROLES
.iter()
.any(|(_, role_info)| role_info.get_role_name() == role_name_str);
let entity_type_for_role = match entity_type {
EntityType::Tenant | EntityType::Organization => ListRolesByEntityPayload::Organization,
EntityType::Merchant => ListRolesByEntityPayload::Merchant(merchant_id.to_owned()),
EntityType::Profile => {
ListRolesByEntityPayload::Profile(merchant_id.to_owned(), profile_id.to_owned())
}
};
let is_present_in_custom_role = match state
.global_store
.generic_list_roles_by_entity_type(
entity_type_for_role,
false,
tenant_id.to_owned(),
org_id.to_owned(),
)
.await
{
Ok(roles_list) => roles_list
.iter()
.any(|role| role.role_name == role_name_str),
Err(e) => {
if e.current_context().is_db_not_found() {
false
} else {
return Err(UserErrors::InternalServerError.into());
}
}
};
if is_present_in_predefined_roles || is_present_in_custom_role {
return Err(UserErrors::RoleNameAlreadyExists.into());
}
Ok(())
}
pub async fn set_role_info_in_cache_by_user_role(
state: &SessionState,
user_role: &UserRole,
) -> bool {
let Some(ref org_id) = user_role.org_id else {
return false;
};
set_role_info_in_cache_if_required(
state,
user_role.role_id.as_str(),
org_id,
&user_role.tenant_id,
)
.await
.map_err(|e| logger::error!("Error setting permissions in cache {:?}", e))
.is_ok()
}
pub async fn set_role_info_in_cache_by_role_id_org_id(
state: &SessionState,
role_id: &str,
org_id: &id_type::OrganizationId,
tenant_id: &id_type::TenantId,
) -> bool {
set_role_info_in_cache_if_required(state, role_id, org_id, tenant_id)
.await
.map_err(|e| logger::error!("Error setting permissions in cache {:?}", e))
.is_ok()
}
pub async fn set_role_info_in_cache_if_required(
state: &SessionState,
role_id: &str,
org_id: &id_type::OrganizationId,
tenant_id: &id_type::TenantId,
) -> UserResult<()> {
if roles::predefined_roles::PREDEFINED_ROLES.contains_key(role_id) {
return Ok(());
}
let role_info =
roles::RoleInfo::from_role_id_org_id_tenant_id(state, role_id, org_id, tenant_id)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Error getting role_info from role_id")?;
authz::set_role_info_in_cache(
state,
role_id,
&role_info,
i64::try_from(consts::JWT_TOKEN_TIME_IN_SECS)
.change_context(UserErrors::InternalServerError)?,
)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Error setting permissions in redis")
}
pub async fn update_v1_and_v2_user_roles_in_db(
state: &SessionState,
user_id: &str,
tenant_id: &id_type::TenantId,
org_id: &id_type::OrganizationId,
merchant_id: Option<&id_type::MerchantId>,
profile_id: Option<&id_type::ProfileId>,
update: UserRoleUpdate,
) -> (
Result<UserRole, Report<StorageError>>,
Result<UserRole, Report<StorageError>>,
) {
let updated_v1_role = state
.global_store
.update_user_role_by_user_id_and_lineage(
user_id,
tenant_id,
org_id,
merchant_id,
profile_id,
update.clone(),
UserRoleVersion::V1,
)
.await
.map_err(|e| {
logger::error!("Error updating user_role {e:?}");
e
});
let updated_v2_role = state
.global_store
.update_user_role_by_user_id_and_lineage(
user_id,
tenant_id,
org_id,
merchant_id,
profile_id,
update,
UserRoleVersion::V2,
)
.await
.map_err(|e| {
logger::error!("Error updating user_role {e:?}");
e
});
(updated_v1_role, updated_v2_role)
}
pub async fn get_single_org_id(
state: &SessionState,
user_role: &UserRole,
) -> UserResult<id_type::OrganizationId> {
let (_, entity_type) = user_role
.get_entity_id_and_type()
.ok_or(UserErrors::InternalServerError)?;
match entity_type {
EntityType::Tenant => Ok(state
.store
.list_merchant_and_org_ids(&state.into(), 1, None)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("Failed to get merchants list for org")?
.pop()
.ok_or(UserErrors::InternalServerError)
.attach_printable("No merchants to get merchant or org id")?
.1),
EntityType::Organization | EntityType::Merchant | EntityType::Profile => user_role
.org_id
.clone()
.ok_or(UserErrors::InternalServerError)
.attach_printable("Org_id not found"),
}
}
pub async fn get_single_merchant_id(
state: &SessionState,
user_role: &UserRole,
org_id: &id_type::OrganizationId,
) -> UserResult<id_type::MerchantId> {
let (_, entity_type) = user_role
.get_entity_id_and_type()
.ok_or(UserErrors::InternalServerError)?;
match entity_type {
EntityType::Tenant | EntityType::Organization => Ok(state
.store
.list_merchant_accounts_by_organization_id(&state.into(), org_id)
.await
.to_not_found_response(UserErrors::InvalidRoleOperationWithMessage(
"Invalid Org Id".to_string(),
))?
.first()
.ok_or(UserErrors::InternalServerError)
.attach_printable("No merchants found for org_id")?
.get_id()
.clone()),
EntityType::Merchant | EntityType::Profile => user_role
.merchant_id
.clone()
.ok_or(UserErrors::InternalServerError)
.attach_printable("merchant_id not found"),
}
}
pub async fn get_single_profile_id(
state: &SessionState,
user_role: &UserRole,
merchant_id: &id_type::MerchantId,
) -> UserResult<id_type::ProfileId> {
let (_, entity_type) = user_role
.get_entity_id_and_type()
.ok_or(UserErrors::InternalServerError)?;
match entity_type {
EntityType::Tenant | EntityType::Organization | EntityType::Merchant => {
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
&state.into(),
merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(UserErrors::InternalServerError)?;
Ok(state
.store
.list_profile_by_merchant_id(&state.into(), &key_store, merchant_id)
.await
.change_context(UserErrors::InternalServerError)?
.pop()
.ok_or(UserErrors::InternalServerError)?
.get_id()
.to_owned())
}
EntityType::Profile => user_role
.profile_id
.clone()
.ok_or(UserErrors::InternalServerError)
.attach_printable("profile_id not found"),
}
}
pub async fn get_lineage_for_user_id_and_entity_for_accepting_invite(
state: &SessionState,
user_id: &str,
tenant_id: &id_type::TenantId,
entity_id: String,
entity_type: EntityType,
) -> UserResult<
Option<(
id_type::OrganizationId,
Option<id_type::MerchantId>,
Option<id_type::ProfileId>,
)>,
> {
match entity_type {
EntityType::Tenant => Err(UserErrors::InvalidRoleOperationWithMessage(
"Tenant roles are not allowed for this operation".to_string(),
)
.into()),
EntityType::Organization => {
let Ok(org_id) =
id_type::OrganizationId::try_from(std::borrow::Cow::from(entity_id.clone()))
else {
return Ok(None);
};
let user_roles = state
.global_store
.list_user_roles_by_user_id(ListUserRolesByUserIdPayload {
user_id,
tenant_id,
org_id: Some(&org_id),
merchant_id: None,
profile_id: None,
entity_id: None,
version: None,
status: Some(UserStatus::InvitationSent),
limit: None,
})
.await
.change_context(UserErrors::InternalServerError)?
.into_iter()
.collect::<HashSet<_>>();
if user_roles.len() > 1 {
return Ok(None);
}
if let Some(user_role) = user_roles.into_iter().next() {
let (_entity_id, entity_type) = user_role
.get_entity_id_and_type()
.ok_or(UserErrors::InternalServerError)?;
if entity_type != EntityType::Organization {
return Ok(None);
}
return Ok(Some((
user_role.org_id.ok_or(UserErrors::InternalServerError)?,
None,
None,
)));
}
Ok(None)
}
EntityType::Merchant => {
let Ok(merchant_id) = id_type::MerchantId::wrap(entity_id) else {
return Ok(None);
};
let user_roles = state
.global_store
.list_user_roles_by_user_id(ListUserRolesByUserIdPayload {
user_id,
tenant_id,
org_id: None,
merchant_id: Some(&merchant_id),
profile_id: None,
entity_id: None,
version: None,
status: Some(UserStatus::InvitationSent),
limit: None,
})
.await
.change_context(UserErrors::InternalServerError)?
.into_iter()
.collect::<HashSet<_>>();
if user_roles.len() > 1 {
return Ok(None);
}
if let Some(user_role) = user_roles.into_iter().next() {
let (_entity_id, entity_type) = user_role
.get_entity_id_and_type()
.ok_or(UserErrors::InternalServerError)?;
if entity_type != EntityType::Merchant {
return Ok(None);
}
return Ok(Some((
user_role.org_id.ok_or(UserErrors::InternalServerError)?,
Some(merchant_id),
None,
)));
}
Ok(None)
}
EntityType::Profile => {
let Ok(profile_id) = id_type::ProfileId::try_from(std::borrow::Cow::from(entity_id))
else {
return Ok(None);
};
let user_roles = state
.global_store
.list_user_roles_by_user_id(ListUserRolesByUserIdPayload {
user_id,
tenant_id: &state.tenant.tenant_id,
org_id: None,
merchant_id: None,
profile_id: Some(&profile_id),
entity_id: None,
version: None,
status: Some(UserStatus::InvitationSent),
limit: None,
})
.await
.change_context(UserErrors::InternalServerError)?
.into_iter()
.collect::<HashSet<_>>();
if user_roles.len() > 1 {
return Ok(None);
}
if let Some(user_role) = user_roles.into_iter().next() {
let (_entity_id, entity_type) = user_role
.get_entity_id_and_type()
.ok_or(UserErrors::InternalServerError)?;
if entity_type != EntityType::Profile {
return Ok(None);
}
return Ok(Some((
user_role.org_id.ok_or(UserErrors::InternalServerError)?,
Some(
user_role
.merchant_id
.ok_or(UserErrors::InternalServerError)?,
),
Some(profile_id),
)));
}
Ok(None)
}
}
}
pub async fn get_single_merchant_id_and_profile_id(
state: &SessionState,
user_role: &UserRole,
) -> UserResult<(id_type::MerchantId, id_type::ProfileId)> {
let org_id = get_single_org_id(state, user_role).await?;
let merchant_id = get_single_merchant_id(state, user_role, &org_id).await?;
let profile_id = get_single_profile_id(state, user_role, &merchant_id).await?;
Ok((merchant_id, profile_id))
}
pub async fn fetch_user_roles_by_payload(
state: &SessionState,
payload: ListUserRolesByOrgIdPayload<'_>,
request_entity_type: Option<EntityType>,
) -> UserResult<HashSet<UserRole>> {
Ok(state
.global_store
.list_user_roles_by_org_id(payload)
.await
.change_context(UserErrors::InternalServerError)?
.into_iter()
.filter_map(|user_role| {
let (_entity_id, entity_type) = user_role.get_entity_id_and_type()?;
request_entity_type
.is_none_or(|req_entity_type| entity_type == req_entity_type)
.then_some(user_role)
})
.collect::<HashSet<_>>())
}
pub fn get_min_entity(
user_entity: EntityType,
filter_entity: Option<EntityType>,
) -> UserResult<EntityType> {
let Some(filter_entity) = filter_entity else {
return Ok(user_entity);
};
if user_entity < filter_entity {
return Err(report!(UserErrors::InvalidRoleOperation)).attach_printable(format!(
"{user_entity} level user requesting data for {filter_entity:?} level",
));
}
Ok(cmp::min(user_entity, filter_entity))
}
pub fn parent_group_info_request_to_permission_groups(
parent_groups: &[role_api::ParentGroupInfoRequest],
) -> Result<Vec<PermissionGroup>, UserErrors> {
parent_groups
.iter()
.try_fold(Vec::new(), |mut permission_groups, parent_group| {
let scopes = &parent_group.scopes;
if scopes.is_empty() {
return Err(UserErrors::InvalidRoleOperation);
}
let available_scopes = parent_group.name.get_available_scopes();
if !scopes.iter().all(|scope| available_scopes.contains(scope)) {
return Err(UserErrors::InvalidRoleOperation);
}
let groups = PermissionGroup::iter()
.filter(|group| {
group.parent() == parent_group.name && scopes.contains(&group.scope())
})
.collect::<Vec<_>>();
permission_groups.extend(groups);
Ok(permission_groups)
})
}
pub fn permission_groups_to_parent_group_info(
permission_groups: &[PermissionGroup],
entity_type: EntityType,
) -> Vec<role_api::ParentGroupInfo> {
let parent_groups_map: HashMap<ParentGroup, Vec<common_enums::PermissionScope>> =
permission_groups
.iter()
.fold(HashMap::new(), |mut acc, group| {
let parent = group.parent();
let scope = group.scope();
acc.entry(parent).or_default().push(scope);
acc
});
parent_groups_map
.into_iter()
.filter_map(|(name, scopes)| {
let unique_scopes = scopes
.into_iter()
.collect::<HashSet<_>>()
.into_iter()
.collect();
let filtered_resources =
permissions::filter_resources_by_entity_type(name.resources(), entity_type)?;
Some(role_api::ParentGroupInfo {
name,
resources: filtered_resources,
scopes: unique_scopes,
})
})
.collect()
}
pub fn resources_to_description(
resources: Vec<common_enums::Resource>,
entity_type: EntityType,
) -> Option<String> {
if resources.is_empty() {
return None;
}
let filtered_resources = permissions::filter_resources_by_entity_type(resources, entity_type)?;
let description = filtered_resources
.iter()
.map(|res| permissions::get_resource_name(*res, entity_type))
.collect::<Option<Vec<_>>>()?
.join(", ");
Some(description)
}
// File: crates/router/src/utils/verify_connector.rs
use api_models::enums::Connector;
use error_stack::ResultExt;
use crate::{core::errors, types::domain};
pub fn generate_card_from_details(
card_number: String,
card_exp_year: String,
card_exp_month: String,
card_cvv: String,
) -> errors::RouterResult<domain::Card> {
Ok(domain::Card {
card_number: card_number
.parse::<cards::CardNumber>()
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Error while parsing card number")?,
card_issuer: None,
card_cvc: masking::Secret::new(card_cvv),
card_network: None,
card_exp_year: masking::Secret::new(card_exp_year),
card_exp_month: masking::Secret::new(card_exp_month),
nick_name: None,
card_type: None,
card_issuing_country: None,
bank_code: None,
card_holder_name: None,
co_badged_card_data: None,
})
}
pub fn get_test_card_details(
connector_name: Connector,
) -> errors::RouterResult<Option<domain::Card>> {
match connector_name {
Connector::Stripe => Some(generate_card_from_details(
"4242424242424242".to_string(),
"2025".to_string(),
"12".to_string(),
"100".to_string(),
))
.transpose(),
Connector::Paypal => Some(generate_card_from_details(
"4111111111111111".to_string(),
"2025".to_string(),
"02".to_string(),
"123".to_string(),
))
.transpose(),
_ => Ok(None),
}
}
// File: crates/router/src/utils/storage_partitioning.rs
pub use storage_impl::redis::kv_store::{KvStorePartition, PartitionKey};
// File: crates/router/src/utils/currency.rs
use std::{
collections::HashMap,
ops::Deref,
str::FromStr,
sync::{Arc, LazyLock},
};
use api_models::enums;
use common_utils::{date_time, errors::CustomResult, events::ApiEventMetric, ext_traits::AsyncExt};
use currency_conversion::types::{CurrencyFactors, ExchangeRates};
use error_stack::ResultExt;
use masking::PeekInterface;
use redis_interface::DelReply;
use router_env::{instrument, tracing};
use rust_decimal::Decimal;
use strum::IntoEnumIterator;
use tokio::sync::RwLock;
use tracing_futures::Instrument;
use crate::{
logger,
routes::app::settings::{Conversion, DefaultExchangeRates},
services, SessionState,
};
const REDIX_FOREX_CACHE_KEY: &str = "{forex_cache}_lock";
const REDIX_FOREX_CACHE_DATA: &str = "{forex_cache}_data";
const FOREX_API_TIMEOUT: u64 = 5;
const FOREX_BASE_URL: &str = "https://openexchangerates.org/api/latest.json?app_id=";
const FOREX_BASE_CURRENCY: &str = "&base=USD";
const FALLBACK_FOREX_BASE_URL: &str = "http://apilayer.net/api/live?access_key=";
const FALLBACK_FOREX_API_CURRENCY_PREFIX: &str = "USD";
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct FxExchangeRatesCacheEntry {
pub data: Arc<ExchangeRates>,
timestamp: i64,
}
static FX_EXCHANGE_RATES_CACHE: LazyLock<RwLock<Option<FxExchangeRatesCacheEntry>>> =
LazyLock::new(|| RwLock::new(None));
impl ApiEventMetric for FxExchangeRatesCacheEntry {}
#[derive(Debug, Clone, thiserror::Error)]
pub enum ForexError {
#[error("API error")]
ApiError,
#[error("API timeout")]
ApiTimeout,
#[error("API unresponsive")]
ApiUnresponsive,
#[error("Conversion error")]
ConversionError,
#[error("Could not acquire the lock for cache entry")]
CouldNotAcquireLock,
#[error("Provided currency not acceptable")]
CurrencyNotAcceptable,
#[error("Forex configuration error: {0}")]
ConfigurationError(String),
#[error("Incorrect entries in default Currency response")]
DefaultCurrencyParsingError,
#[error("Entry not found in cache")]
EntryNotFound,
#[error("Forex data unavailable")]
ForexDataUnavailable,
#[error("Expiration time invalid")]
InvalidLogExpiry,
#[error("Error reading local")]
LocalReadError,
#[error("Error writing to local cache")]
LocalWriteError,
#[error("Json Parsing error")]
ParsingError,
#[error("Aws Kms decryption error")]
AwsKmsDecryptionFailed,
#[error("Error connecting to redis")]
RedisConnectionError,
#[error("Not able to release write lock")]
RedisLockReleaseFailed,
#[error("Error writing to redis")]
RedisWriteError,
#[error("Not able to acquire write lock")]
WriteLockNotAcquired,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
struct ForexResponse {
pub rates: HashMap<String, FloatDecimal>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
struct FallbackForexResponse {
pub quotes: HashMap<String, FloatDecimal>,
}
#[derive(Debug, Copy, Clone, serde::Serialize, serde::Deserialize)]
#[serde(transparent)]
struct FloatDecimal(#[serde(with = "rust_decimal::serde::float")] Decimal);
impl Deref for FloatDecimal {
type Target = Decimal;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl FxExchangeRatesCacheEntry {
fn new(exchange_rate: ExchangeRates) -> Self {
Self {
data: Arc::new(exchange_rate),
timestamp: date_time::now_unix_timestamp(),
}
}
fn is_expired(&self, data_expiration_delay: u32) -> bool {
self.timestamp + i64::from(data_expiration_delay) < date_time::now_unix_timestamp()
}
}
async fn retrieve_forex_from_local_cache() -> Option<FxExchangeRatesCacheEntry> {
FX_EXCHANGE_RATES_CACHE.read().await.clone()
}
async fn save_forex_data_to_local_cache(
exchange_rates_cache_entry: FxExchangeRatesCacheEntry,
) -> CustomResult<(), ForexError> {
let mut local = FX_EXCHANGE_RATES_CACHE.write().await;
*local = Some(exchange_rates_cache_entry);
logger::debug!("forex_log: forex saved in cache");
Ok(())
}
impl TryFrom<DefaultExchangeRates> for ExchangeRates {
type Error = error_stack::Report<ForexError>;
fn try_from(value: DefaultExchangeRates) -> Result<Self, Self::Error> {
let mut conversion_usable: HashMap<enums::Currency, CurrencyFactors> = HashMap::new();
for (curr, conversion) in value.conversion {
let enum_curr = enums::Currency::from_str(curr.as_str())
.change_context(ForexError::ConversionError)
.attach_printable("Unable to Convert currency received")?;
conversion_usable.insert(enum_curr, CurrencyFactors::from(conversion));
}
let base_curr = enums::Currency::from_str(value.base_currency.as_str())
.change_context(ForexError::ConversionError)
.attach_printable("Unable to convert base currency")?;
Ok(Self {
base_currency: base_curr,
conversion: conversion_usable,
})
}
}
impl From<Conversion> for CurrencyFactors {
fn from(value: Conversion) -> Self {
Self {
to_factor: value.to_factor,
from_factor: value.from_factor,
}
}
}
#[instrument(skip_all)]
pub async fn get_forex_rates(
state: &SessionState,
data_expiration_delay: u32,
) -> CustomResult<FxExchangeRatesCacheEntry, ForexError> {
if let Some(local_rates) = retrieve_forex_from_local_cache().await {
if local_rates.is_expired(data_expiration_delay) {
// expired local data
logger::debug!("forex_log: Forex stored in cache is expired");
call_forex_api_and_save_data_to_cache_and_redis(state, Some(local_rates)).await
} else {
// Valid data present in local
logger::debug!("forex_log: forex found in cache");
Ok(local_rates)
}
} else {
// No data in local
call_api_if_redis_forex_data_expired(state, data_expiration_delay).await
}
}
async fn call_api_if_redis_forex_data_expired(
state: &SessionState,
data_expiration_delay: u32,
) -> CustomResult<FxExchangeRatesCacheEntry, ForexError> {
match retrieve_forex_data_from_redis(state).await {
Ok(Some(data)) => {
call_forex_api_if_redis_data_expired(state, data, data_expiration_delay).await
}
Ok(None) => {
// No data in local as well as redis
call_forex_api_and_save_data_to_cache_and_redis(state, None).await?;
Err(ForexError::ForexDataUnavailable.into())
}
Err(error) => {
// Error in deriving forex rates from redis
logger::error!("forex_error: {:?}", error);
call_forex_api_and_save_data_to_cache_and_redis(state, None).await?;
Err(ForexError::ForexDataUnavailable.into())
}
}
}
async fn call_forex_api_and_save_data_to_cache_and_redis(
state: &SessionState,
stale_redis_data: Option<FxExchangeRatesCacheEntry>,
) -> CustomResult<FxExchangeRatesCacheEntry, ForexError> {
// spawn a new thread and do the api fetch and write operations on redis.
let forex_api_key = state.conf.forex_api.get_inner().api_key.peek();
if forex_api_key.is_empty() {
Err(ForexError::ConfigurationError("api_keys not provided".into()).into())
} else {
let state = state.clone();
tokio::spawn(
async move {
acquire_redis_lock_and_call_forex_api(&state)
.await
.map_err(|err| {
logger::error!(forex_error=?err);
})
.ok();
}
.in_current_span(),
);
stale_redis_data.ok_or(ForexError::EntryNotFound.into())
}
}
async fn acquire_redis_lock_and_call_forex_api(
state: &SessionState,
) -> CustomResult<(), ForexError> {
let lock_acquired = acquire_redis_lock(state).await?;
if !lock_acquired {
Err(ForexError::CouldNotAcquireLock.into())
} else {
logger::debug!("forex_log: redis lock acquired");
let api_rates = fetch_forex_rates_from_primary_api(state).await;
match api_rates {
Ok(rates) => save_forex_data_to_cache_and_redis(state, rates).await,
Err(error) => {
logger::error!(forex_error=?error,"primary_forex_error");
// API not able to fetch data call secondary service
let secondary_api_rates = fetch_forex_rates_from_fallback_api(state).await;
match secondary_api_rates {
Ok(rates) => save_forex_data_to_cache_and_redis(state, rates).await,
Err(error) => {
release_redis_lock(state).await?;
Err(error)
}
}
}
}
}
}
async fn save_forex_data_to_cache_and_redis(
state: &SessionState,
forex: FxExchangeRatesCacheEntry,
) -> CustomResult<(), ForexError> {
save_forex_data_to_redis(state, &forex)
.await
.async_and_then(|_rates| release_redis_lock(state))
.await
.async_and_then(|_val| save_forex_data_to_local_cache(forex.clone()))
.await
}
async fn call_forex_api_if_redis_data_expired(
state: &SessionState,
redis_data: FxExchangeRatesCacheEntry,
data_expiration_delay: u32,
) -> CustomResult<FxExchangeRatesCacheEntry, ForexError> {
match is_redis_expired(Some(redis_data.clone()).as_ref(), data_expiration_delay).await {
Some(redis_forex) => {
// Valid data present in redis
let exchange_rates = FxExchangeRatesCacheEntry::new(redis_forex.as_ref().clone());
logger::debug!("forex_log: forex response found in redis");
save_forex_data_to_local_cache(exchange_rates.clone()).await?;
Ok(exchange_rates)
}
None => {
// redis expired
call_forex_api_and_save_data_to_cache_and_redis(state, Some(redis_data)).await
}
}
}
async fn fetch_forex_rates_from_primary_api(
state: &SessionState,
) -> Result<FxExchangeRatesCacheEntry, error_stack::Report<ForexError>> {
let forex_api_key = state.conf.forex_api.get_inner().api_key.peek();
logger::debug!("forex_log: Primary api call for forex fetch");
let forex_url: String = format!("{FOREX_BASE_URL}{forex_api_key}{FOREX_BASE_CURRENCY}");
let forex_request = services::RequestBuilder::new()
.method(services::Method::Get)
.url(&forex_url)
.build();
logger::info!(primary_forex_request=?forex_request,"forex_log: Primary api call for forex fetch");
let response = state
.api_client
.send_request(
&state.clone(),
forex_request,
Some(FOREX_API_TIMEOUT),
false,
)
.await
.change_context(ForexError::ApiUnresponsive)
.attach_printable("Primary forex fetch api unresponsive")?;
let forex_response = response
.json::<ForexResponse>()
.await
.change_context(ForexError::ParsingError)
.attach_printable(
"Unable to parse response received from primary api into ForexResponse",
)?;
logger::info!(primary_forex_response=?forex_response,"forex_log");
let mut conversions: HashMap<enums::Currency, CurrencyFactors> = HashMap::new();
for enum_curr in enums::Currency::iter() {
match forex_response.rates.get(&enum_curr.to_string()) {
Some(rate) => {
let from_factor = match Decimal::new(1, 0).checked_div(**rate) {
Some(rate) => rate,
None => {
logger::error!(
"forex_error: Rates for {} not received from API",
&enum_curr
);
continue;
}
};
let currency_factors = CurrencyFactors::new(**rate, from_factor);
conversions.insert(enum_curr, currency_factors);
}
None => {
logger::error!(
"forex_error: Rates for {} not received from API",
&enum_curr
);
}
};
}
Ok(FxExchangeRatesCacheEntry::new(ExchangeRates::new(
enums::Currency::USD,
conversions,
)))
}
pub async fn fetch_forex_rates_from_fallback_api(
state: &SessionState,
) -> CustomResult<FxExchangeRatesCacheEntry, ForexError> {
let fallback_forex_api_key = state.conf.forex_api.get_inner().fallback_api_key.peek();
let fallback_forex_url: String = format!("{FALLBACK_FOREX_BASE_URL}{fallback_forex_api_key}");
let fallback_forex_request = services::RequestBuilder::new()
.method(services::Method::Get)
.url(&fallback_forex_url)
.build();
logger::info!(fallback_forex_request=?fallback_forex_request,"forex_log: Fallback api call for forex fetch");
let response = state
.api_client
.send_request(
&state.clone(),
fallback_forex_request,
Some(FOREX_API_TIMEOUT),
false,
)
.await
.change_context(ForexError::ApiUnresponsive)
.attach_printable("Fallback forex fetch api unresponsive")?;
let fallback_forex_response = response
.json::<FallbackForexResponse>()
.await
.change_context(ForexError::ParsingError)
.attach_printable(
"Unable to parse response received from fallback api into ForexResponse",
)?;
logger::info!(fallback_forex_response=?fallback_forex_response,"forex_log");
let mut conversions: HashMap<enums::Currency, CurrencyFactors> = HashMap::new();
for enum_curr in enums::Currency::iter() {
match fallback_forex_response.quotes.get(
format!(
"{}{}",
FALLBACK_FOREX_API_CURRENCY_PREFIX,
&enum_curr.to_string()
)
.as_str(),
) {
Some(rate) => {
let from_factor = match Decimal::new(1, 0).checked_div(**rate) {
Some(rate) => rate,
None => {
logger::error!(
"forex_error: Rates for {} not received from API",
&enum_curr
);
continue;
}
};
let currency_factors = CurrencyFactors::new(**rate, from_factor);
conversions.insert(enum_curr, currency_factors);
}
None => {
if enum_curr == enums::Currency::USD {
let currency_factors =
CurrencyFactors::new(Decimal::new(1, 0), Decimal::new(1, 0));
conversions.insert(enum_curr, currency_factors);
} else {
logger::error!(
"forex_error: Rates for {} not received from API",
&enum_curr
);
}
}
};
}
let rates =
FxExchangeRatesCacheEntry::new(ExchangeRates::new(enums::Currency::USD, conversions));
match acquire_redis_lock(state).await {
Ok(_) => {
save_forex_data_to_cache_and_redis(state, rates.clone()).await?;
Ok(rates)
}
Err(e) => Err(e),
}
}
async fn release_redis_lock(
state: &SessionState,
) -> Result<DelReply, error_stack::Report<ForexError>> {
logger::debug!("forex_log: Releasing redis lock");
state
.store
.get_redis_conn()
.change_context(ForexError::RedisConnectionError)?
.delete_key(&REDIX_FOREX_CACHE_KEY.into())
.await
.change_context(ForexError::RedisLockReleaseFailed)
.attach_printable("Unable to release redis lock")
}
async fn acquire_redis_lock(state: &SessionState) -> CustomResult<bool, ForexError> {
let forex_api = state.conf.forex_api.get_inner();
logger::debug!("forex_log: Acquiring redis lock");
state
.store
.get_redis_conn()
.change_context(ForexError::RedisConnectionError)?
.set_key_if_not_exists_with_expiry(
&REDIX_FOREX_CACHE_KEY.into(),
"",
Some(i64::from(forex_api.redis_lock_timeout_in_seconds)),
)
.await
.map(|val| matches!(val, redis_interface::SetnxReply::KeySet))
.change_context(ForexError::CouldNotAcquireLock)
.attach_printable("Unable to acquire redis lock")
}
async fn save_forex_data_to_redis(
app_state: &SessionState,
forex_exchange_cache_entry: &FxExchangeRatesCacheEntry,
) -> CustomResult<(), ForexError> {
let forex_api = app_state.conf.forex_api.get_inner();
logger::debug!("forex_log: Saving forex to redis");
app_state
.store
.get_redis_conn()
.change_context(ForexError::RedisConnectionError)?
.serialize_and_set_key_with_expiry(
&REDIX_FOREX_CACHE_DATA.into(),
forex_exchange_cache_entry,
i64::from(forex_api.redis_ttl_in_seconds),
)
.await
.change_context(ForexError::RedisWriteError)
.attach_printable("Unable to save forex data to redis")
}
async fn retrieve_forex_data_from_redis(
app_state: &SessionState,
) -> CustomResult<Option<FxExchangeRatesCacheEntry>, ForexError> {
logger::debug!("forex_log: Retrieving forex from redis");
app_state
.store
.get_redis_conn()
.change_context(ForexError::RedisConnectionError)?
.get_and_deserialize_key(&REDIX_FOREX_CACHE_DATA.into(), "FxExchangeRatesCache")
.await
.change_context(ForexError::EntryNotFound)
.attach_printable("Forex entry not found in redis")
}
async fn is_redis_expired(
redis_cache: Option<&FxExchangeRatesCacheEntry>,
data_expiration_delay: u32,
) -> Option<Arc<ExchangeRates>> {
redis_cache.and_then(|cache| {
if cache.timestamp + i64::from(data_expiration_delay) > date_time::now_unix_timestamp() {
Some(cache.data.clone())
} else {
logger::debug!("forex_log: Forex stored in redis is expired");
None
}
})
}
#[instrument(skip_all)]
pub async fn convert_currency(
state: SessionState,
amount: i64,
to_currency: String,
from_currency: String,
) -> CustomResult<api_models::currency::CurrencyConversionResponse, ForexError> {
let forex_api = state.conf.forex_api.get_inner();
let rates = get_forex_rates(&state, forex_api.data_expiration_delay_in_seconds)
.await
.change_context(ForexError::ApiError)?;
let to_currency = enums::Currency::from_str(to_currency.as_str())
.change_context(ForexError::CurrencyNotAcceptable)
.attach_printable("The provided currency is not acceptable")?;
let from_currency = enums::Currency::from_str(from_currency.as_str())
.change_context(ForexError::CurrencyNotAcceptable)
.attach_printable("The provided currency is not acceptable")?;
let converted_amount =
currency_conversion::conversion::convert(&rates.data, from_currency, to_currency, amount)
.change_context(ForexError::ConversionError)
.attach_printable("Unable to perform currency conversion")?;
Ok(api_models::currency::CurrencyConversionResponse {
converted_amount: converted_amount.to_string(),
currency: to_currency.to_string(),
})
}
// File: crates/router/src/utils/db_utils.rs
use crate::{
core::errors::{self, utils::RedisErrorExt},
routes::metrics,
};
/// Generates hscan field pattern. Suppose the field is pa_1234_ref_1211 it will generate
/// pa_1234_ref_*
pub fn generate_hscan_pattern_for_refund(sk: &str) -> String {
sk.split('_')
.take(3)
.chain(["*"])
.collect::<Vec<&str>>()
.join("_")
}
// The first argument should be a future while the second argument should be a closure that returns a future for a database call
pub async fn try_redis_get_else_try_database_get<F, RFut, DFut, T>(
redis_fut: RFut,
database_call_closure: F,
) -> error_stack::Result<T, errors::StorageError>
where
F: FnOnce() -> DFut,
RFut: futures::Future<Output = error_stack::Result<T, redis_interface::errors::RedisError>>,
DFut: futures::Future<Output = error_stack::Result<T, errors::StorageError>>,
{
let redis_output = redis_fut.await;
match redis_output {
Ok(output) => Ok(output),
Err(redis_error) => match redis_error.current_context() {
redis_interface::errors::RedisError::NotFound => {
metrics::KV_MISS.add(1, &[]);
database_call_closure().await
}
// Keeping the key empty here since the error would never go here.
_ => Err(redis_error.to_redis_failed_response("")),
},
}
}
// File: crates/router/src/utils/connector_onboarding.rs
use diesel_models::{ConfigNew, ConfigUpdate};
use error_stack::ResultExt;
use super::errors::StorageErrorExt;
use crate::{
consts,
core::errors::{ApiErrorResponse, NotImplementedMessage, RouterResult},
routes::{app::settings, SessionState},
types::{self, api::enums},
};
pub mod paypal;
pub fn get_connector_auth(
connector: enums::Connector,
connector_data: &settings::ConnectorOnboarding,
) -> RouterResult<types::ConnectorAuthType> {
match connector {
enums::Connector::Paypal => Ok(types::ConnectorAuthType::BodyKey {
api_key: connector_data.paypal.client_secret.clone(),
key1: connector_data.paypal.client_id.clone(),
}),
_ => Err(ApiErrorResponse::NotImplemented {
message: NotImplementedMessage::Reason(format!(
"Onboarding is not implemented for {connector}",
)),
}
.into()),
}
}
pub fn is_enabled(
connector: types::Connector,
conf: &settings::ConnectorOnboarding,
) -> Option<bool> {
match connector {
enums::Connector::Paypal => Some(conf.paypal.enabled),
_ => None,
}
}
pub async fn check_if_connector_exists(
state: &SessionState,
connector_id: &common_utils::id_type::MerchantConnectorAccountId,
merchant_id: &common_utils::id_type::MerchantId,
) -> RouterResult<()> {
let key_manager_state = &state.into();
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_state,
merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.to_not_found_response(ApiErrorResponse::MerchantAccountNotFound)?;
#[cfg(feature = "v1")]
let _connector = state
.store
.find_by_merchant_connector_account_merchant_id_merchant_connector_id(
key_manager_state,
merchant_id,
connector_id,
&key_store,
)
.await
.to_not_found_response(ApiErrorResponse::MerchantConnectorAccountNotFound {
id: connector_id.get_string_repr().to_string(),
})?;
#[cfg(feature = "v2")]
{
let _ = connector_id;
let _ = key_store;
todo!()
};
Ok(())
}
pub async fn set_tracking_id_in_configs(
state: &SessionState,
connector_id: &common_utils::id_type::MerchantConnectorAccountId,
connector: enums::Connector,
) -> RouterResult<()> {
let timestamp = common_utils::date_time::now_unix_timestamp().to_string();
let find_config = state
.store
.find_config_by_key(&build_key(connector_id, connector))
.await;
if find_config.is_ok() {
state
.store
.update_config_by_key(
&build_key(connector_id, connector),
ConfigUpdate::Update {
config: Some(timestamp),
},
)
.await
.change_context(ApiErrorResponse::InternalServerError)
.attach_printable("Error updating data in configs table")?;
} else if find_config
.as_ref()
.map_err(|e| e.current_context().is_db_not_found())
.err()
.unwrap_or(false)
{
state
.store
.insert_config(ConfigNew {
key: build_key(connector_id, connector),
config: timestamp,
})
.await
.change_context(ApiErrorResponse::InternalServerError)
.attach_printable("Error inserting data in configs table")?;
} else {
find_config.change_context(ApiErrorResponse::InternalServerError)?;
}
Ok(())
}
pub async fn get_tracking_id_from_configs(
state: &SessionState,
connector_id: &common_utils::id_type::MerchantConnectorAccountId,
connector: enums::Connector,
) -> RouterResult<String> {
let timestamp = state
.store
.find_config_by_key_unwrap_or(
&build_key(connector_id, connector),
Some(common_utils::date_time::now_unix_timestamp().to_string()),
)
.await
.change_context(ApiErrorResponse::InternalServerError)
.attach_printable("Error getting data from configs table")?
.config;
Ok(format!("{}_{}", connector_id.get_string_repr(), timestamp))
}
fn build_key(
connector_id: &common_utils::id_type::MerchantConnectorAccountId,
connector: enums::Connector,
) -> String {
format!(
"{}_{}_{}",
consts::CONNECTOR_ONBOARDING_CONFIG_PREFIX,
connector,
connector_id.get_string_repr(),
)
}
// File: crates/router/src/utils/chat.rs
use api_models::chat as chat_api;
use common_utils::{
crypto::{EncodeMessage, GcmAes256},
encryption::Encryption,
};
use diesel_models::hyperswitch_ai_interaction::HyperswitchAiInteractionNew;
use error_stack::ResultExt;
use masking::ExposeInterface;
use crate::{
core::errors::{self, CustomResult},
routes::SessionState,
services::authentication as auth,
};
pub async fn construct_hyperswitch_ai_interaction(
state: &SessionState,
user_from_token: &auth::UserFromToken,
req: &chat_api::ChatRequest,
response: &chat_api::ChatResponse,
request_id: &str,
) -> CustomResult<HyperswitchAiInteractionNew, errors::ApiErrorResponse> {
let encryption_key = state.conf.chat.get_inner().encryption_key.clone().expose();
let key = match hex::decode(&encryption_key) {
Ok(key) => key,
Err(e) => {
router_env::logger::error!("Failed to decode encryption key: {}", e);
// Fallback to using the string as bytes, which was the previous behavior
encryption_key.as_bytes().to_vec()
}
};
let encrypted_user_query_bytes = GcmAes256
.encode_message(&key, &req.message.clone().expose().into_bytes())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to encrypt user query")?;
let encrypted_response_bytes = serde_json::to_vec(&response.response.clone())
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to serialize response for encryption")
.and_then(|bytes| {
GcmAes256
.encode_message(&key, &bytes)
.change_context(errors::ApiErrorResponse::InternalServerError)
})
.attach_printable("Failed to encrypt response")?;
Ok(HyperswitchAiInteractionNew {
id: request_id.to_owned(),
session_id: Some(request_id.to_string()),
user_id: Some(user_from_token.user_id.clone()),
merchant_id: Some(user_from_token.merchant_id.get_string_repr().to_string()),
profile_id: Some(user_from_token.profile_id.get_string_repr().to_string()),
org_id: Some(user_from_token.org_id.get_string_repr().to_string()),
role_id: Some(user_from_token.role_id.clone()),
user_query: Some(Encryption::new(encrypted_user_query_bytes.into())),
response: Some(Encryption::new(encrypted_response_bytes.into())),
database_query: response.query_executed.clone().map(|q| q.expose()),
interaction_status: Some(response.status.clone()),
created_at: common_utils::date_time::now(),
})
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/utils/ext_traits.rs",
"crates/router/src/utils/user.rs",
"crates/router/src/utils/user_role.rs",
"crates/router/src/utils/verify_connector.rs",
"crates/router/src/utils/storage_partitioning.rs",
"crates/router/src/utils/currency.rs",
"crates/router/src/utils/db_utils.rs",
"crates/router/src/utils/connector_onboarding.rs",
"crates/router/src/utils/chat.rs"
],
"module": "crates/router/src/utils",
"num_files": 9,
"token_count": 14528
}
|
module_-958706189380241457
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/utils/user
Files: 5
</path>
<module>
// File: crates/router/src/utils/user/theme.rs
use std::path::PathBuf;
use common_enums::EntityType;
use common_utils::{ext_traits::AsyncExt, id_type, types::user::ThemeLineage};
use diesel_models::user::theme::Theme;
use error_stack::ResultExt;
use hyperswitch_domain_models::merchant_key_store::MerchantKeyStore;
use crate::{
core::errors::{StorageErrorExt, UserErrors, UserResult},
routes::SessionState,
services::authentication::UserFromToken,
};
fn get_theme_dir_key(theme_id: &str) -> PathBuf {
["themes", theme_id].iter().collect()
}
pub fn get_specific_file_key(theme_id: &str, file_name: &str) -> PathBuf {
let mut path = get_theme_dir_key(theme_id);
path.push(file_name);
path
}
pub fn get_theme_file_key(theme_id: &str) -> PathBuf {
get_specific_file_key(theme_id, "theme.json")
}
fn path_buf_to_str(path: &PathBuf) -> UserResult<&str> {
path.to_str()
.ok_or(UserErrors::InternalServerError)
.attach_printable(format!("Failed to convert path {path:#?} to string"))
}
pub async fn retrieve_file_from_theme_bucket(
state: &SessionState,
path: &PathBuf,
) -> UserResult<Vec<u8>> {
state
.theme_storage_client
.retrieve_file(path_buf_to_str(path)?)
.await
.change_context(UserErrors::ErrorRetrievingFile)
}
pub async fn upload_file_to_theme_bucket(
state: &SessionState,
path: &PathBuf,
data: Vec<u8>,
) -> UserResult<()> {
state
.theme_storage_client
.upload_file(path_buf_to_str(path)?, data)
.await
.change_context(UserErrors::ErrorUploadingFile)
}
pub async fn validate_lineage(state: &SessionState, lineage: &ThemeLineage) -> UserResult<()> {
match lineage {
ThemeLineage::Tenant { tenant_id } => {
validate_tenant(state, tenant_id)?;
Ok(())
}
ThemeLineage::Organization { tenant_id, org_id } => {
validate_tenant(state, tenant_id)?;
validate_org(state, org_id).await?;
Ok(())
}
ThemeLineage::Merchant {
tenant_id,
org_id,
merchant_id,
} => {
validate_tenant(state, tenant_id)?;
validate_org(state, org_id).await?;
validate_merchant(state, org_id, merchant_id).await?;
Ok(())
}
ThemeLineage::Profile {
tenant_id,
org_id,
merchant_id,
profile_id,
} => {
validate_tenant(state, tenant_id)?;
validate_org(state, org_id).await?;
let key_store = validate_merchant_and_get_key_store(state, org_id, merchant_id).await?;
validate_profile(state, profile_id, merchant_id, &key_store).await?;
Ok(())
}
}
}
fn validate_tenant(state: &SessionState, tenant_id: &id_type::TenantId) -> UserResult<()> {
if &state.tenant.tenant_id != tenant_id {
return Err(UserErrors::InvalidThemeLineage("tenant_id".to_string()).into());
}
Ok(())
}
async fn validate_org(state: &SessionState, org_id: &id_type::OrganizationId) -> UserResult<()> {
state
.accounts_store
.find_organization_by_org_id(org_id)
.await
.to_not_found_response(UserErrors::InvalidThemeLineage("org_id".to_string()))
.map(|_| ())
}
async fn validate_merchant_and_get_key_store(
state: &SessionState,
org_id: &id_type::OrganizationId,
merchant_id: &id_type::MerchantId,
) -> UserResult<MerchantKeyStore> {
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
&state.into(),
merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.to_not_found_response(UserErrors::InvalidThemeLineage("merchant_id".to_string()))?;
let merchant_account = state
.store
.find_merchant_account_by_merchant_id(&state.into(), merchant_id, &key_store)
.await
.to_not_found_response(UserErrors::InvalidThemeLineage("merchant_id".to_string()))?;
if &merchant_account.organization_id != org_id {
return Err(UserErrors::InvalidThemeLineage("merchant_id".to_string()).into());
}
Ok(key_store)
}
async fn validate_merchant(
state: &SessionState,
org_id: &id_type::OrganizationId,
merchant_id: &id_type::MerchantId,
) -> UserResult<()> {
validate_merchant_and_get_key_store(state, org_id, merchant_id)
.await
.map(|_| ())
}
async fn validate_profile(
state: &SessionState,
profile_id: &id_type::ProfileId,
merchant_id: &id_type::MerchantId,
key_store: &MerchantKeyStore,
) -> UserResult<()> {
state
.store
.find_business_profile_by_merchant_id_profile_id(
&state.into(),
key_store,
merchant_id,
profile_id,
)
.await
.to_not_found_response(UserErrors::InvalidThemeLineage("profile_id".to_string()))
.map(|_| ())
}
pub async fn get_most_specific_theme_using_token_and_min_entity(
state: &SessionState,
user_from_token: &UserFromToken,
min_entity: EntityType,
) -> UserResult<Option<Theme>> {
get_most_specific_theme_using_lineage(
state,
ThemeLineage::new(
min_entity,
user_from_token
.tenant_id
.clone()
.unwrap_or(state.tenant.tenant_id.clone()),
user_from_token.org_id.clone(),
user_from_token.merchant_id.clone(),
user_from_token.profile_id.clone(),
),
)
.await
}
pub async fn get_most_specific_theme_using_lineage(
state: &SessionState,
lineage: ThemeLineage,
) -> UserResult<Option<Theme>> {
match state
.store
.find_most_specific_theme_in_lineage(lineage)
.await
{
Ok(theme) => Ok(Some(theme)),
Err(e) => {
if e.current_context().is_db_not_found() {
Ok(None)
} else {
Err(e.change_context(UserErrors::InternalServerError))
}
}
}
}
pub async fn get_theme_using_optional_theme_id(
state: &SessionState,
theme_id: Option<String>,
) -> UserResult<Option<Theme>> {
match theme_id
.async_map(|theme_id| state.store.find_theme_by_theme_id(theme_id))
.await
.transpose()
{
Ok(theme) => Ok(theme),
Err(e) => {
if e.current_context().is_db_not_found() {
Ok(None)
} else {
Err(e.change_context(UserErrors::InternalServerError))
}
}
}
}
pub async fn get_theme_lineage_from_user_token(
user_from_token: &UserFromToken,
state: &SessionState,
request_entity_type: &EntityType,
) -> UserResult<ThemeLineage> {
let tenant_id = user_from_token
.tenant_id
.clone()
.unwrap_or(state.tenant.tenant_id.clone());
let org_id = user_from_token.org_id.clone();
let merchant_id = user_from_token.merchant_id.clone();
let profile_id = user_from_token.profile_id.clone();
Ok(ThemeLineage::new(
*request_entity_type,
tenant_id,
org_id,
merchant_id,
profile_id,
))
}
pub async fn can_user_access_theme(
user: &UserFromToken,
user_entity_type: &EntityType,
theme: &Theme,
) -> UserResult<()> {
if user_entity_type < &theme.entity_type {
return Err(UserErrors::ThemeNotFound.into());
}
match theme.entity_type {
EntityType::Tenant => {
if user.tenant_id.as_ref() == Some(&theme.tenant_id)
&& theme.org_id.is_none()
&& theme.merchant_id.is_none()
&& theme.profile_id.is_none()
{
Ok(())
} else {
Err(UserErrors::ThemeNotFound.into())
}
}
EntityType::Organization => {
if user.tenant_id.as_ref() == Some(&theme.tenant_id)
&& theme.org_id.as_ref() == Some(&user.org_id)
&& theme.merchant_id.is_none()
&& theme.profile_id.is_none()
{
Ok(())
} else {
Err(UserErrors::ThemeNotFound.into())
}
}
EntityType::Merchant => {
if user.tenant_id.as_ref() == Some(&theme.tenant_id)
&& theme.org_id.as_ref() == Some(&user.org_id)
&& theme.merchant_id.as_ref() == Some(&user.merchant_id)
&& theme.profile_id.is_none()
{
Ok(())
} else {
Err(UserErrors::ThemeNotFound.into())
}
}
EntityType::Profile => {
if user.tenant_id.as_ref() == Some(&theme.tenant_id)
&& theme.org_id.as_ref() == Some(&user.org_id)
&& theme.merchant_id.as_ref() == Some(&user.merchant_id)
&& theme.profile_id.as_ref() == Some(&user.profile_id)
{
Ok(())
} else {
Err(UserErrors::ThemeNotFound.into())
}
}
}
}
// File: crates/router/src/utils/user/dashboard_metadata.rs
use std::{net::IpAddr, ops::Not, str::FromStr};
use actix_web::http::header::HeaderMap;
use api_models::user::dashboard_metadata::{
GetMetaDataRequest, GetMultipleMetaDataPayload, ProdIntent, SetMetaDataRequest,
};
use common_utils::id_type;
use diesel_models::{
enums::DashboardMetadata as DBEnum,
user::dashboard_metadata::{DashboardMetadata, DashboardMetadataNew, DashboardMetadataUpdate},
};
use error_stack::{report, ResultExt};
use masking::{ExposeInterface, PeekInterface, Secret};
use router_env::logger;
use crate::{
core::errors::{UserErrors, UserResult},
headers, SessionState,
};
pub async fn insert_merchant_scoped_metadata_to_db(
state: &SessionState,
user_id: String,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
metadata_key: DBEnum,
metadata_value: impl serde::Serialize,
) -> UserResult<DashboardMetadata> {
let now = common_utils::date_time::now();
let data_value = serde_json::to_value(metadata_value)
.change_context(UserErrors::InternalServerError)
.attach_printable("Error Converting Struct To Serde Value")?;
state
.store
.insert_metadata(DashboardMetadataNew {
user_id: None,
merchant_id,
org_id,
data_key: metadata_key,
data_value: Secret::from(data_value),
created_by: user_id.clone(),
created_at: now,
last_modified_by: user_id,
last_modified_at: now,
})
.await
.map_err(|e| {
if e.current_context().is_db_unique_violation() {
return e.change_context(UserErrors::MetadataAlreadySet);
}
e.change_context(UserErrors::InternalServerError)
})
}
pub async fn insert_user_scoped_metadata_to_db(
state: &SessionState,
user_id: String,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
metadata_key: DBEnum,
metadata_value: impl serde::Serialize,
) -> UserResult<DashboardMetadata> {
let now = common_utils::date_time::now();
let data_value = serde_json::to_value(metadata_value)
.change_context(UserErrors::InternalServerError)
.attach_printable("Error Converting Struct To Serde Value")?;
state
.store
.insert_metadata(DashboardMetadataNew {
user_id: Some(user_id.clone()),
merchant_id,
org_id,
data_key: metadata_key,
data_value: Secret::from(data_value),
created_by: user_id.clone(),
created_at: now,
last_modified_by: user_id,
last_modified_at: now,
})
.await
.map_err(|e| {
if e.current_context().is_db_unique_violation() {
return e.change_context(UserErrors::MetadataAlreadySet);
}
e.change_context(UserErrors::InternalServerError)
})
}
pub async fn get_merchant_scoped_metadata_from_db(
state: &SessionState,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
metadata_keys: Vec<DBEnum>,
) -> UserResult<Vec<DashboardMetadata>> {
state
.store
.find_merchant_scoped_dashboard_metadata(&merchant_id, &org_id, metadata_keys)
.await
.change_context(UserErrors::InternalServerError)
.attach_printable("DB Error Fetching DashboardMetaData")
}
pub async fn get_user_scoped_metadata_from_db(
state: &SessionState,
user_id: String,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
metadata_keys: Vec<DBEnum>,
) -> UserResult<Vec<DashboardMetadata>> {
match state
.store
.find_user_scoped_dashboard_metadata(&user_id, &merchant_id, &org_id, metadata_keys)
.await
{
Ok(data) => Ok(data),
Err(e) => {
if e.current_context().is_db_not_found() {
return Ok(Vec::with_capacity(0));
}
Err(e
.change_context(UserErrors::InternalServerError)
.attach_printable("DB Error Fetching DashboardMetaData"))
}
}
}
pub async fn update_merchant_scoped_metadata(
state: &SessionState,
user_id: String,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
metadata_key: DBEnum,
metadata_value: impl serde::Serialize,
) -> UserResult<DashboardMetadata> {
let data_value = serde_json::to_value(metadata_value)
.change_context(UserErrors::InternalServerError)
.attach_printable("Error Converting Struct To Serde Value")?;
state
.store
.update_metadata(
None,
merchant_id,
org_id,
metadata_key,
DashboardMetadataUpdate::UpdateData {
data_key: metadata_key,
data_value: Secret::from(data_value),
last_modified_by: user_id,
},
)
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn update_user_scoped_metadata(
state: &SessionState,
user_id: String,
merchant_id: id_type::MerchantId,
org_id: id_type::OrganizationId,
metadata_key: DBEnum,
metadata_value: impl serde::Serialize,
) -> UserResult<DashboardMetadata> {
let data_value = serde_json::to_value(metadata_value)
.change_context(UserErrors::InternalServerError)
.attach_printable("Error Converting Struct To Serde Value")?;
state
.store
.update_metadata(
Some(user_id.clone()),
merchant_id,
org_id,
metadata_key,
DashboardMetadataUpdate::UpdateData {
data_key: metadata_key,
data_value: Secret::from(data_value),
last_modified_by: user_id,
},
)
.await
.change_context(UserErrors::InternalServerError)
}
pub fn deserialize_to_response<T>(data: Option<&DashboardMetadata>) -> UserResult<Option<T>>
where
T: serde::de::DeserializeOwned,
{
data.map(|metadata| serde_json::from_value(metadata.data_value.clone().expose()))
.transpose()
.change_context(UserErrors::InternalServerError)
.attach_printable("Error Serializing Metadata from DB")
}
pub fn separate_metadata_type_based_on_scope(
metadata_keys: Vec<DBEnum>,
) -> (Vec<DBEnum>, Vec<DBEnum>) {
let (mut merchant_scoped, mut user_scoped) = (
Vec::with_capacity(metadata_keys.len()),
Vec::with_capacity(metadata_keys.len()),
);
for key in metadata_keys {
match key {
DBEnum::ProductionAgreement
| DBEnum::SetupProcessor
| DBEnum::ConfigureEndpoint
| DBEnum::SetupComplete
| DBEnum::FirstProcessorConnected
| DBEnum::SecondProcessorConnected
| DBEnum::ConfiguredRouting
| DBEnum::TestPayment
| DBEnum::IntegrationMethod
| DBEnum::ConfigurationType
| DBEnum::IntegrationCompleted
| DBEnum::StripeConnected
| DBEnum::PaypalConnected
| DBEnum::SpRoutingConfigured
| DBEnum::SpTestPayment
| DBEnum::DownloadWoocom
| DBEnum::ConfigureWoocom
| DBEnum::SetupWoocomWebhook
| DBEnum::OnboardingSurvey
| DBEnum::IsMultipleConfiguration
| DBEnum::ReconStatus
| DBEnum::ProdIntent => merchant_scoped.push(key),
DBEnum::Feedback | DBEnum::IsChangePasswordRequired => user_scoped.push(key),
}
}
(merchant_scoped, user_scoped)
}
pub fn is_update_required(metadata: &UserResult<DashboardMetadata>) -> bool {
match metadata {
Ok(_) => false,
Err(e) => matches!(e.current_context(), UserErrors::MetadataAlreadySet),
}
}
pub fn is_backfill_required(metadata_key: DBEnum) -> bool {
matches!(
metadata_key,
DBEnum::StripeConnected | DBEnum::PaypalConnected
)
}
pub fn set_ip_address_if_required(
request: &mut SetMetaDataRequest,
headers: &HeaderMap,
) -> UserResult<()> {
if let SetMetaDataRequest::ProductionAgreement(req) = request {
let ip_address_from_request: Secret<String, common_utils::pii::IpAddress> = headers
.get(headers::X_FORWARDED_FOR)
.ok_or(report!(UserErrors::IpAddressParsingFailed))
.attach_printable("X-Forwarded-For header not found")?
.to_str()
.change_context(UserErrors::IpAddressParsingFailed)
.attach_printable("Error converting Header Value to Str")?
.split(',')
.next()
.and_then(|ip| {
let ip_addr: Result<IpAddr, _> = ip.parse();
ip_addr.ok()
})
.ok_or(report!(UserErrors::IpAddressParsingFailed))
.attach_printable("Error Parsing header value to ip")?
.to_string()
.into();
req.ip_address = Some(ip_address_from_request)
}
Ok(())
}
pub fn parse_string_to_enums(query: String) -> UserResult<GetMultipleMetaDataPayload> {
Ok(GetMultipleMetaDataPayload {
results: query
.split(',')
.map(GetMetaDataRequest::from_str)
.collect::<Result<Vec<GetMetaDataRequest>, _>>()
.change_context(UserErrors::InvalidMetadataRequest)
.attach_printable("Error Parsing to DashboardMetadata enums")?,
})
}
fn not_contains_string(value: Option<&str>, value_to_be_checked: &str) -> bool {
value.is_some_and(|mail| !mail.contains(value_to_be_checked))
}
pub fn is_prod_email_required(data: &ProdIntent, user_email: String) -> bool {
let poc_email_check = not_contains_string(
data.poc_email.as_ref().map(|email| email.peek().as_str()),
"juspay",
);
let business_website_check =
not_contains_string(data.business_website.as_ref().map(|s| s.as_str()), "juspay")
&& not_contains_string(
data.business_website.as_ref().map(|s| s.as_str()),
"hyperswitch",
);
let user_email_check = not_contains_string(Some(&user_email), "juspay");
if (poc_email_check && business_website_check && user_email_check).not() {
logger::info!(prod_intent_email = poc_email_check);
logger::info!(prod_intent_email = business_website_check);
logger::info!(prod_intent_email = user_email_check);
}
poc_email_check && business_website_check && user_email_check
}
// File: crates/router/src/utils/user/password.rs
use argon2::{
password_hash::{
rand_core::OsRng, Error as argon2Err, PasswordHash, PasswordHasher, PasswordVerifier,
SaltString,
},
Argon2,
};
use common_utils::errors::CustomResult;
use error_stack::ResultExt;
use masking::{ExposeInterface, PeekInterface, Secret};
use rand::{seq::SliceRandom, Rng};
use crate::core::errors::UserErrors;
pub fn generate_password_hash(
password: Secret<String>,
) -> CustomResult<Secret<String>, UserErrors> {
let salt = SaltString::generate(&mut OsRng);
let argon2 = Argon2::default();
let password_hash = argon2
.hash_password(password.expose().as_bytes(), &salt)
.change_context(UserErrors::InternalServerError)?;
Ok(Secret::new(password_hash.to_string()))
}
pub fn is_correct_password(
candidate: &Secret<String>,
password: &Secret<String>,
) -> CustomResult<bool, UserErrors> {
let password = password.peek();
let parsed_hash =
PasswordHash::new(password).change_context(UserErrors::InternalServerError)?;
let result = Argon2::default().verify_password(candidate.peek().as_bytes(), &parsed_hash);
match result {
Ok(_) => Ok(true),
Err(argon2Err::Password) => Ok(false),
Err(e) => Err(e),
}
.change_context(UserErrors::InternalServerError)
}
pub fn get_index_for_correct_recovery_code(
candidate: &Secret<String>,
recovery_codes: &[Secret<String>],
) -> CustomResult<Option<usize>, UserErrors> {
for (index, recovery_code) in recovery_codes.iter().enumerate() {
let is_match = is_correct_password(candidate, recovery_code)?;
if is_match {
return Ok(Some(index));
}
}
Ok(None)
}
pub fn get_temp_password() -> Secret<String> {
let uuid_pass = uuid::Uuid::new_v4().to_string();
let mut rng = rand::thread_rng();
let special_chars: Vec<char> = "!@#$%^&*()-_=+[]{}|;:,.<>?".chars().collect();
let special_char = special_chars.choose(&mut rng).unwrap_or(&'@');
Secret::new(format!(
"{}{}{}{}{}",
uuid_pass,
rng.gen_range('A'..='Z'),
special_char,
rng.gen_range('a'..='z'),
rng.gen_range('0'..='9'),
))
}
// File: crates/router/src/utils/user/two_factor_auth.rs
use common_utils::pii;
use error_stack::ResultExt;
use masking::{ExposeInterface, PeekInterface};
use totp_rs::{Algorithm, TOTP};
use crate::{
consts,
core::errors::{UserErrors, UserResult},
routes::SessionState,
};
pub fn generate_default_totp(
email: pii::Email,
secret: Option<masking::Secret<String>>,
issuer: String,
) -> UserResult<TOTP> {
let secret = secret
.map(|sec| totp_rs::Secret::Encoded(sec.expose()))
.unwrap_or_else(totp_rs::Secret::generate_secret)
.to_bytes()
.change_context(UserErrors::InternalServerError)?;
TOTP::new(
Algorithm::SHA1,
consts::user::TOTP_DIGITS,
consts::user::TOTP_TOLERANCE,
consts::user::TOTP_VALIDITY_DURATION_IN_SECONDS,
secret,
Some(issuer),
email.expose().expose(),
)
.change_context(UserErrors::InternalServerError)
}
pub async fn check_totp_in_redis(state: &SessionState, user_id: &str) -> UserResult<bool> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
let key = format!("{}{}", consts::user::REDIS_TOTP_PREFIX, user_id);
redis_conn
.exists::<()>(&key.into())
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn check_recovery_code_in_redis(state: &SessionState, user_id: &str) -> UserResult<bool> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
let key = format!("{}{}", consts::user::REDIS_RECOVERY_CODE_PREFIX, user_id);
redis_conn
.exists::<()>(&key.into())
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn insert_totp_in_redis(state: &SessionState, user_id: &str) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
let key = format!("{}{}", consts::user::REDIS_TOTP_PREFIX, user_id);
redis_conn
.set_key_with_expiry(
&key.as_str().into(),
common_utils::date_time::now_unix_timestamp(),
state.conf.user.two_factor_auth_expiry_in_secs,
)
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn insert_totp_secret_in_redis(
state: &SessionState,
user_id: &str,
secret: &masking::Secret<String>,
) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.set_key_with_expiry(
&get_totp_secret_key(user_id).into(),
secret.peek(),
consts::user::REDIS_TOTP_SECRET_TTL_IN_SECS,
)
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn get_totp_secret_from_redis(
state: &SessionState,
user_id: &str,
) -> UserResult<Option<masking::Secret<String>>> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.get_key::<Option<String>>(&get_totp_secret_key(user_id).into())
.await
.change_context(UserErrors::InternalServerError)
.map(|secret| secret.map(Into::into))
}
pub async fn delete_totp_secret_from_redis(state: &SessionState, user_id: &str) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.delete_key(&get_totp_secret_key(user_id).into())
.await
.change_context(UserErrors::InternalServerError)
.map(|_| ())
}
fn get_totp_secret_key(user_id: &str) -> String {
format!("{}{}", consts::user::REDIS_TOTP_SECRET_PREFIX, user_id)
}
pub async fn insert_recovery_code_in_redis(state: &SessionState, user_id: &str) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
let key = format!("{}{}", consts::user::REDIS_RECOVERY_CODE_PREFIX, user_id);
redis_conn
.set_key_with_expiry(
&key.as_str().into(),
common_utils::date_time::now_unix_timestamp(),
state.conf.user.two_factor_auth_expiry_in_secs,
)
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn delete_totp_from_redis(state: &SessionState, user_id: &str) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
let key = format!("{}{}", consts::user::REDIS_TOTP_PREFIX, user_id);
redis_conn
.delete_key(&key.into())
.await
.change_context(UserErrors::InternalServerError)
.map(|_| ())
}
pub async fn delete_recovery_code_from_redis(
state: &SessionState,
user_id: &str,
) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
let key = format!("{}{}", consts::user::REDIS_RECOVERY_CODE_PREFIX, user_id);
redis_conn
.delete_key(&key.into())
.await
.change_context(UserErrors::InternalServerError)
.map(|_| ())
}
fn get_totp_attempts_key(user_id: &str) -> String {
format!("{}{}", consts::user::REDIS_TOTP_ATTEMPTS_PREFIX, user_id)
}
fn get_recovery_code_attempts_key(user_id: &str) -> String {
format!(
"{}{}",
consts::user::REDIS_RECOVERY_CODE_ATTEMPTS_PREFIX,
user_id
)
}
pub async fn insert_totp_attempts_in_redis(
state: &SessionState,
user_id: &str,
user_totp_attempts: u8,
) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.set_key_with_expiry(
&get_totp_attempts_key(user_id).into(),
user_totp_attempts,
consts::user::REDIS_TOTP_ATTEMPTS_TTL_IN_SECS,
)
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn get_totp_attempts_from_redis(state: &SessionState, user_id: &str) -> UserResult<u8> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.get_key::<Option<u8>>(&get_totp_attempts_key(user_id).into())
.await
.change_context(UserErrors::InternalServerError)
.map(|v| v.unwrap_or(0))
}
pub async fn insert_recovery_code_attempts_in_redis(
state: &SessionState,
user_id: &str,
user_recovery_code_attempts: u8,
) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.set_key_with_expiry(
&get_recovery_code_attempts_key(user_id).into(),
user_recovery_code_attempts,
consts::user::REDIS_RECOVERY_CODE_ATTEMPTS_TTL_IN_SECS,
)
.await
.change_context(UserErrors::InternalServerError)
}
pub async fn get_recovery_code_attempts_from_redis(
state: &SessionState,
user_id: &str,
) -> UserResult<u8> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.get_key::<Option<u8>>(&get_recovery_code_attempts_key(user_id).into())
.await
.change_context(UserErrors::InternalServerError)
.map(|v| v.unwrap_or(0))
}
pub async fn delete_totp_attempts_from_redis(
state: &SessionState,
user_id: &str,
) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.delete_key(&get_totp_attempts_key(user_id).into())
.await
.change_context(UserErrors::InternalServerError)
.map(|_| ())
}
pub async fn delete_recovery_code_attempts_from_redis(
state: &SessionState,
user_id: &str,
) -> UserResult<()> {
let redis_conn = super::get_redis_connection_for_global_tenant(state)?;
redis_conn
.delete_key(&get_recovery_code_attempts_key(user_id).into())
.await
.change_context(UserErrors::InternalServerError)
.map(|_| ())
}
// File: crates/router/src/utils/user/sample_data.rs
use api_models::{
enums::Connector::{DummyConnector4, DummyConnector7},
user::sample_data::SampleDataRequest,
};
use common_utils::{
id_type,
types::{AmountConvertor, ConnectorTransactionId, MinorUnit, StringMinorUnitForConnector},
};
#[cfg(feature = "v1")]
use diesel_models::user::sample_data::PaymentAttemptBatchNew;
use diesel_models::{enums as storage_enums, DisputeNew, RefundNew};
use error_stack::ResultExt;
use hyperswitch_domain_models::payments::PaymentIntent;
use rand::{prelude::SliceRandom, thread_rng, Rng};
use time::OffsetDateTime;
use crate::{
consts,
core::errors::sample_data::{SampleDataError, SampleDataResult},
types::domain,
SessionState,
};
#[cfg(feature = "v1")]
#[allow(clippy::type_complexity)]
pub async fn generate_sample_data(
state: &SessionState,
req: SampleDataRequest,
merchant_id: &id_type::MerchantId,
org_id: &id_type::OrganizationId,
) -> SampleDataResult<
Vec<(
PaymentIntent,
PaymentAttemptBatchNew,
Option<RefundNew>,
Option<DisputeNew>,
)>,
> {
let sample_data_size: usize = req.record.unwrap_or(100);
let key_manager_state = &state.into();
if !(10..=100).contains(&sample_data_size) {
return Err(SampleDataError::InvalidRange.into());
}
let key_store = state
.store
.get_merchant_key_store_by_merchant_id(
key_manager_state,
merchant_id,
&state.store.get_master_key().to_vec().into(),
)
.await
.change_context(SampleDataError::InternalServerError)?;
let merchant_from_db = state
.store
.find_merchant_account_by_merchant_id(key_manager_state, merchant_id, &key_store)
.await
.change_context::<SampleDataError>(SampleDataError::DataDoesNotExist)?;
let merchant_context = domain::MerchantContext::NormalMerchant(Box::new(domain::Context(
merchant_from_db.clone(),
key_store,
)));
#[cfg(feature = "v1")]
let (profile_id_result, business_country_default, business_label_default) = {
let merchant_parsed_details: Vec<api_models::admin::PrimaryBusinessDetails> =
serde_json::from_value(merchant_from_db.primary_business_details.clone())
.change_context(SampleDataError::InternalServerError)
.attach_printable("Error while parsing primary business details")?;
let business_country_default = merchant_parsed_details.first().map(|x| x.country);
let business_label_default = merchant_parsed_details.first().map(|x| x.business.clone());
let profile_id = crate::core::utils::get_profile_id_from_business_details(
key_manager_state,
business_country_default,
business_label_default.as_ref(),
&merchant_context,
req.profile_id.as_ref(),
&*state.store,
false,
)
.await;
(profile_id, business_country_default, business_label_default)
};
#[cfg(feature = "v2")]
let (profile_id_result, business_country_default, business_label_default) = {
let profile_id = req
.profile_id.clone()
.ok_or(hyperswitch_domain_models::errors::api_error_response::ApiErrorResponse::MissingRequiredField {
field_name: "profile_id",
});
(profile_id, None, None)
};
let profile_id = match profile_id_result {
Ok(id) => id.clone(),
Err(error) => {
router_env::logger::error!(
"Profile ID not found in business details. Attempting to fetch from the database {error:?}"
);
state
.store
.list_profile_by_merchant_id(
key_manager_state,
merchant_context.get_merchant_key_store(),
merchant_id,
)
.await
.change_context(SampleDataError::InternalServerError)
.attach_printable("Failed to get business profile")?
.first()
.ok_or(SampleDataError::InternalServerError)?
.get_id()
.to_owned()
}
};
// 10 percent payments should be failed
#[allow(clippy::as_conversions)]
let failure_attempts = usize::try_from((sample_data_size as f32 / 10.0).round() as i64)
.change_context(SampleDataError::InvalidParameters)?;
let failure_after_attempts = sample_data_size / failure_attempts;
// 20 percent refunds for payments
#[allow(clippy::as_conversions)]
let number_of_refunds = usize::try_from((sample_data_size as f32 / 5.0).round() as i64)
.change_context(SampleDataError::InvalidParameters)?;
let mut refunds_count = 0;
// 2 disputes if generated data size is between 50 and 100, 1 dispute if it is less than 50.
let number_of_disputes: usize = if sample_data_size >= 50 { 2 } else { 1 };
let mut disputes_count = 0;
let mut random_array: Vec<usize> = (1..=sample_data_size).collect();
// Shuffle the array
let mut rng = thread_rng();
random_array.shuffle(&mut rng);
let mut res: Vec<(
PaymentIntent,
PaymentAttemptBatchNew,
Option<RefundNew>,
Option<DisputeNew>,
)> = Vec::new();
let start_time = req
.start_time
.unwrap_or(common_utils::date_time::now() - time::Duration::days(7))
.assume_utc()
.unix_timestamp();
let end_time = req
.end_time
.unwrap_or_else(common_utils::date_time::now)
.assume_utc()
.unix_timestamp();
let current_time = common_utils::date_time::now().assume_utc().unix_timestamp();
let min_amount = req.min_amount.unwrap_or(100);
let max_amount = req.max_amount.unwrap_or(min_amount + 100);
if min_amount > max_amount
|| start_time > end_time
|| start_time > current_time
|| end_time > current_time
{
return Err(SampleDataError::InvalidParameters.into());
};
let currency_vec = req.currency.unwrap_or(vec![common_enums::Currency::USD]);
let currency_vec_len = currency_vec.len();
let connector_vec = req
.connector
.unwrap_or(vec![DummyConnector4, DummyConnector7]);
let connector_vec_len = connector_vec.len();
let auth_type = req.auth_type.unwrap_or(vec![
common_enums::AuthenticationType::ThreeDs,
common_enums::AuthenticationType::NoThreeDs,
]);
let auth_type_len = auth_type.len();
if currency_vec_len == 0 || connector_vec_len == 0 || auth_type_len == 0 {
return Err(SampleDataError::InvalidParameters.into());
}
// This has to be an internal server error because, this function failing means that the intended functionality is not working as expected
let dashboard_customer_id =
id_type::CustomerId::try_from(std::borrow::Cow::from("hs-dashboard-user"))
.change_context(SampleDataError::InternalServerError)?;
for num in 1..=sample_data_size {
let payment_id = id_type::PaymentId::generate_test_payment_id_for_sample_data();
let attempt_id = payment_id.get_attempt_id(1);
let client_secret = payment_id.generate_client_secret();
let amount = thread_rng().gen_range(min_amount..=max_amount);
let created_at @ modified_at @ last_synced =
OffsetDateTime::from_unix_timestamp(thread_rng().gen_range(start_time..=end_time))
.map(common_utils::date_time::convert_to_pdt)
.unwrap_or(
req.start_time.unwrap_or_else(|| {
common_utils::date_time::now() - time::Duration::days(7)
}),
);
let session_expiry =
created_at.saturating_add(time::Duration::seconds(consts::DEFAULT_SESSION_EXPIRY));
// After some set of payments sample data will have a failed attempt
let is_failed_payment =
(random_array.get(num - 1).unwrap_or(&0) % failure_after_attempts) == 0;
let payment_intent = PaymentIntent {
payment_id: payment_id.clone(),
merchant_id: merchant_id.clone(),
status: match is_failed_payment {
true => common_enums::IntentStatus::Failed,
_ => common_enums::IntentStatus::Succeeded,
},
amount: MinorUnit::new(amount * 100),
currency: Some(
*currency_vec
.get((num - 1) % currency_vec_len)
.unwrap_or(&common_enums::Currency::USD),
),
description: Some("This is a sample payment".to_string()),
created_at,
modified_at,
last_synced: Some(last_synced),
client_secret: Some(client_secret),
business_country: business_country_default,
business_label: business_label_default.clone(),
active_attempt: hyperswitch_domain_models::RemoteStorageObject::ForeignID(
attempt_id.clone(),
),
attempt_count: 1,
customer_id: Some(dashboard_customer_id.clone()),
amount_captured: Some(MinorUnit::new(amount * 100)),
profile_id: Some(profile_id.clone()),
return_url: Default::default(),
metadata: Default::default(),
connector_id: Default::default(),
shipping_address_id: Default::default(),
billing_address_id: Default::default(),
statement_descriptor_name: Default::default(),
statement_descriptor_suffix: Default::default(),
setup_future_usage: Default::default(),
off_session: Default::default(),
order_details: Default::default(),
allowed_payment_method_types: Default::default(),
connector_metadata: Default::default(),
feature_metadata: Default::default(),
merchant_decision: Default::default(),
payment_link_id: Default::default(),
payment_confirm_source: Default::default(),
updated_by: merchant_from_db.storage_scheme.to_string(),
surcharge_applicable: Default::default(),
request_incremental_authorization: Default::default(),
incremental_authorization_allowed: Default::default(),
authorization_count: Default::default(),
fingerprint_id: None,
session_expiry: Some(session_expiry),
request_external_three_ds_authentication: None,
split_payments: None,
frm_metadata: Default::default(),
customer_details: None,
billing_details: None,
merchant_order_reference_id: Default::default(),
shipping_details: None,
is_payment_processor_token_flow: None,
organization_id: org_id.clone(),
shipping_cost: None,
tax_details: None,
skip_external_tax_calculation: None,
request_extended_authorization: None,
psd2_sca_exemption_type: None,
processor_merchant_id: merchant_id.clone(),
created_by: None,
force_3ds_challenge: None,
force_3ds_challenge_trigger: None,
is_iframe_redirection_enabled: None,
is_payment_id_from_merchant: None,
payment_channel: None,
order_date: None,
discount_amount: None,
duty_amount: None,
tax_status: None,
shipping_amount_tax: None,
enable_partial_authorization: None,
enable_overcapture: None,
mit_category: None,
};
let (connector_transaction_id, processor_transaction_data) =
ConnectorTransactionId::form_id_and_data(attempt_id.clone());
let payment_attempt = PaymentAttemptBatchNew {
attempt_id: attempt_id.clone(),
payment_id: payment_id.clone(),
connector_transaction_id: Some(connector_transaction_id),
merchant_id: merchant_id.clone(),
status: match is_failed_payment {
true => common_enums::AttemptStatus::Failure,
_ => common_enums::AttemptStatus::Charged,
},
amount: MinorUnit::new(amount * 100),
currency: payment_intent.currency,
connector: Some(
(*connector_vec
.get((num - 1) % connector_vec_len)
.unwrap_or(&DummyConnector4))
.to_string(),
),
payment_method: Some(common_enums::PaymentMethod::Card),
payment_method_type: Some(get_payment_method_type(thread_rng().gen_range(1..=2))),
authentication_type: Some(
*auth_type
.get((num - 1) % auth_type_len)
.unwrap_or(&common_enums::AuthenticationType::NoThreeDs),
),
error_message: match is_failed_payment {
true => Some("This is a test payment which has a failed status".to_string()),
_ => None,
},
error_code: match is_failed_payment {
true => Some("HS001".to_string()),
_ => None,
},
confirm: true,
created_at,
modified_at,
last_synced: Some(last_synced),
amount_to_capture: Some(MinorUnit::new(amount * 100)),
connector_response_reference_id: Some(attempt_id.clone()),
updated_by: merchant_from_db.storage_scheme.to_string(),
save_to_locker: None,
offer_amount: None,
surcharge_amount: None,
tax_amount: None,
payment_method_id: None,
capture_method: None,
capture_on: None,
cancellation_reason: None,
mandate_id: None,
browser_info: None,
payment_token: None,
connector_metadata: None,
payment_experience: None,
payment_method_data: None,
business_sub_label: None,
straight_through_algorithm: None,
preprocessing_step_id: None,
mandate_details: None,
error_reason: None,
multiple_capture_count: None,
amount_capturable: MinorUnit::new(i64::default()),
merchant_connector_id: None,
authentication_data: None,
encoded_data: None,
unified_code: None,
unified_message: None,
net_amount: None,
external_three_ds_authentication_attempted: None,
authentication_connector: None,
authentication_id: None,
mandate_data: None,
payment_method_billing_address_id: None,
fingerprint_id: None,
charge_id: None,
client_source: None,
client_version: None,
customer_acceptance: None,
profile_id: profile_id.clone(),
organization_id: org_id.clone(),
shipping_cost: None,
order_tax_amount: None,
processor_transaction_data,
connector_mandate_detail: None,
request_extended_authorization: None,
extended_authorization_applied: None,
capture_before: None,
card_discovery: None,
processor_merchant_id: Some(merchant_id.clone()),
created_by: None,
setup_future_usage_applied: None,
routing_approach: None,
connector_request_reference_id: None,
network_transaction_id: None,
network_details: None,
is_stored_credential: None,
authorized_amount: None,
};
let refund = if refunds_count < number_of_refunds && !is_failed_payment {
refunds_count += 1;
let (connector_transaction_id, processor_transaction_data) =
ConnectorTransactionId::form_id_and_data(attempt_id.clone());
Some(RefundNew {
refund_id: common_utils::generate_id_with_default_len("test"),
internal_reference_id: common_utils::generate_id_with_default_len("test"),
external_reference_id: None,
payment_id: payment_id.clone(),
attempt_id: attempt_id.clone(),
merchant_id: merchant_id.clone(),
connector_transaction_id,
connector_refund_id: None,
description: Some("This is a sample refund".to_string()),
created_at,
modified_at,
refund_reason: Some("Sample Refund".to_string()),
connector: payment_attempt
.connector
.clone()
.unwrap_or(DummyConnector4.to_string()),
currency: *currency_vec
.get((num - 1) % currency_vec_len)
.unwrap_or(&common_enums::Currency::USD),
total_amount: MinorUnit::new(amount * 100),
refund_amount: MinorUnit::new(amount * 100),
refund_status: common_enums::RefundStatus::Success,
sent_to_gateway: true,
refund_type: diesel_models::enums::RefundType::InstantRefund,
metadata: None,
refund_arn: None,
profile_id: payment_intent.profile_id.clone(),
updated_by: merchant_from_db.storage_scheme.to_string(),
merchant_connector_id: payment_attempt.merchant_connector_id.clone(),
charges: None,
split_refunds: None,
organization_id: org_id.clone(),
processor_refund_data: None,
processor_transaction_data,
})
} else {
None
};
let dispute =
if disputes_count < number_of_disputes && !is_failed_payment && refund.is_none() {
disputes_count += 1;
let currency = payment_intent
.currency
.unwrap_or(common_enums::Currency::USD);
Some(DisputeNew {
dispute_id: common_utils::generate_id_with_default_len("test"),
amount: StringMinorUnitForConnector::convert(
&StringMinorUnitForConnector,
MinorUnit::new(amount * 100),
currency,
)
.change_context(SampleDataError::InternalServerError)?,
currency: currency.to_string(),
dispute_stage: storage_enums::DisputeStage::Dispute,
dispute_status: storage_enums::DisputeStatus::DisputeOpened,
payment_id: payment_id.clone(),
attempt_id: attempt_id.clone(),
merchant_id: merchant_id.clone(),
connector_status: "Sample connector status".into(),
connector_dispute_id: common_utils::generate_id_with_default_len("test"),
connector_reason: Some("Sample Dispute".into()),
connector_reason_code: Some("123".into()),
challenge_required_by: None,
connector_created_at: None,
connector_updated_at: None,
connector: payment_attempt
.connector
.clone()
.unwrap_or(DummyConnector4.to_string()),
evidence: None,
profile_id: payment_intent.profile_id.clone(),
merchant_connector_id: payment_attempt.merchant_connector_id.clone(),
dispute_amount: MinorUnit::new(amount * 100),
organization_id: org_id.clone(),
dispute_currency: Some(payment_intent.currency.unwrap_or_default()),
})
} else {
None
};
res.push((payment_intent, payment_attempt, refund, dispute));
}
Ok(res)
}
fn get_payment_method_type(num: u8) -> common_enums::PaymentMethodType {
let rem: u8 = (num) % 2;
match rem {
0 => common_enums::PaymentMethodType::Debit,
_ => common_enums::PaymentMethodType::Credit,
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/utils/user/theme.rs",
"crates/router/src/utils/user/dashboard_metadata.rs",
"crates/router/src/utils/user/password.rs",
"crates/router/src/utils/user/two_factor_auth.rs",
"crates/router/src/utils/user/sample_data.rs"
],
"module": "crates/router/src/utils/user",
"num_files": 5,
"token_count": 10864
}
|
module_707112558663688572
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/configs/defaults
Files: 1
</path>
<module>
// File: crates/router/src/configs/defaults/payout_required_fields.rs
use std::collections::HashMap;
use api_models::{
enums::{
CountryAlpha2, FieldType,
PaymentMethod::{BankTransfer, Card, Wallet},
PaymentMethodType, PayoutConnectors,
},
payment_methods::RequiredFieldInfo,
};
use crate::settings::{
ConnectorFields, PaymentMethodType as PaymentMethodTypeInfo, PayoutRequiredFields,
RequiredFieldFinal,
};
#[cfg(feature = "v1")]
impl Default for PayoutRequiredFields {
fn default() -> Self {
Self(HashMap::from([
(
Card,
PaymentMethodTypeInfo(HashMap::from([
// Adyen
get_connector_payment_method_type_fields(
PayoutConnectors::Adyenplatform,
PaymentMethodType::Debit,
),
get_connector_payment_method_type_fields(
PayoutConnectors::Adyenplatform,
PaymentMethodType::Credit,
),
])),
),
(
BankTransfer,
PaymentMethodTypeInfo(HashMap::from([
// Adyen
get_connector_payment_method_type_fields(
PayoutConnectors::Adyenplatform,
PaymentMethodType::SepaBankTransfer,
),
// Ebanx
get_connector_payment_method_type_fields(
PayoutConnectors::Ebanx,
PaymentMethodType::Pix,
),
// Wise
get_connector_payment_method_type_fields(
PayoutConnectors::Wise,
PaymentMethodType::Bacs,
),
])),
),
(
Wallet,
PaymentMethodTypeInfo(HashMap::from([
// Adyen
get_connector_payment_method_type_fields(
PayoutConnectors::Adyenplatform,
PaymentMethodType::Paypal,
),
])),
),
]))
}
}
fn get_billing_details_for_payment_method(
connector: PayoutConnectors,
payment_method_type: PaymentMethodType,
) -> HashMap<String, RequiredFieldInfo> {
match connector {
PayoutConnectors::Adyenplatform => {
let mut fields = HashMap::from([
(
"billing.address.line1".to_string(),
RequiredFieldInfo {
required_field: "billing.address.line1".to_string(),
display_name: "billing_address_line1".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.line2".to_string(),
RequiredFieldInfo {
required_field: "billing.address.line2".to_string(),
display_name: "billing_address_line2".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.city".to_string(),
RequiredFieldInfo {
required_field: "billing.address.city".to_string(),
display_name: "billing_address_city".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.country".to_string(),
RequiredFieldInfo {
required_field: "billing.address.country".to_string(),
display_name: "billing_address_country".to_string(),
field_type: FieldType::UserAddressCountry {
options: get_countries_for_connector(connector)
.iter()
.map(|country| country.to_string())
.collect::<Vec<String>>(),
},
value: None,
},
),
]);
// Add first_name for bank payouts only
if payment_method_type == PaymentMethodType::SepaBankTransfer {
fields.insert(
"billing.address.first_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.first_name".to_string(),
display_name: "billing_address_first_name".to_string(),
field_type: FieldType::Text,
value: None,
},
);
}
fields
}
_ => get_billing_details(connector),
}
}
#[cfg(feature = "v1")]
fn get_connector_payment_method_type_fields(
connector: PayoutConnectors,
payment_method_type: PaymentMethodType,
) -> (PaymentMethodType, ConnectorFields) {
let mut common_fields = get_billing_details_for_payment_method(connector, payment_method_type);
match payment_method_type {
// Card
PaymentMethodType::Debit => {
common_fields.extend(get_card_fields());
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
PaymentMethodType::Credit => {
common_fields.extend(get_card_fields());
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
// Banks
PaymentMethodType::Bacs => {
common_fields.extend(get_bacs_fields());
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
PaymentMethodType::Pix => {
common_fields.extend(get_pix_bank_transfer_fields());
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
PaymentMethodType::SepaBankTransfer => {
common_fields.extend(get_sepa_fields());
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
// Wallets
PaymentMethodType::Paypal => {
common_fields.extend(get_paypal_fields());
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
// Bank Redirect
PaymentMethodType::Interac => {
common_fields.extend(get_interac_fields(connector));
(
payment_method_type,
ConnectorFields {
fields: HashMap::from([(
connector.into(),
RequiredFieldFinal {
mandate: HashMap::new(),
non_mandate: HashMap::new(),
common: common_fields,
},
)]),
},
)
}
_ => (
payment_method_type,
ConnectorFields {
fields: HashMap::new(),
},
),
}
}
fn get_card_fields() -> HashMap<String, RequiredFieldInfo> {
HashMap::from([
(
"payout_method_data.card.card_number".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.card.card_number".to_string(),
display_name: "card_number".to_string(),
field_type: FieldType::UserCardNumber,
value: None,
},
),
(
"payout_method_data.card.expiry_month".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.card.expiry_month".to_string(),
display_name: "exp_month".to_string(),
field_type: FieldType::UserCardExpiryMonth,
value: None,
},
),
(
"payout_method_data.card.expiry_year".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.card.expiry_year".to_string(),
display_name: "exp_year".to_string(),
field_type: FieldType::UserCardExpiryYear,
value: None,
},
),
(
"payout_method_data.card.card_holder_name".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.card.card_holder_name".to_string(),
display_name: "card_holder_name".to_string(),
field_type: FieldType::UserFullName,
value: None,
},
),
])
}
fn get_bacs_fields() -> HashMap<String, RequiredFieldInfo> {
HashMap::from([
(
"payout_method_data.bank.bank_sort_code".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank.bank_sort_code".to_string(),
display_name: "bank_sort_code".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"payout_method_data.bank.bank_account_number".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank.bank_account_number".to_string(),
display_name: "bank_account_number".to_string(),
field_type: FieldType::Text,
value: None,
},
),
])
}
fn get_pix_bank_transfer_fields() -> HashMap<String, RequiredFieldInfo> {
HashMap::from([
(
"payout_method_data.bank.bank_account_number".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank.bank_account_number".to_string(),
display_name: "bank_account_number".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"payout_method_data.bank.pix_key".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank.pix_key".to_string(),
display_name: "pix_key".to_string(),
field_type: FieldType::Text,
value: None,
},
),
])
}
fn get_sepa_fields() -> HashMap<String, RequiredFieldInfo> {
HashMap::from([
(
"payout_method_data.bank.iban".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank.iban".to_string(),
display_name: "iban".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"payout_method_data.bank.bic".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank.bic".to_string(),
display_name: "bic".to_string(),
field_type: FieldType::Text,
value: None,
},
),
])
}
fn get_paypal_fields() -> HashMap<String, RequiredFieldInfo> {
HashMap::from([(
"payout_method_data.wallet.telephone_number".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.wallet.telephone_number".to_string(),
display_name: "telephone_number".to_string(),
field_type: FieldType::Text,
value: None,
},
)])
}
fn get_interac_fields(connector: PayoutConnectors) -> HashMap<String, RequiredFieldInfo> {
match connector {
PayoutConnectors::Loonio => HashMap::from([
(
"payout_method_data.bank_redirect.interac.email".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank_redirect.interac.email".to_string(),
display_name: "email".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.first_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.first_name".to_string(),
display_name: "billing_address_first_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.last_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.last_name".to_string(),
display_name: "billing_address_last_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
]),
PayoutConnectors::Gigadat => HashMap::from([
(
"payout_method_data.bank_redirect.interac.email".to_string(),
RequiredFieldInfo {
required_field: "payout_method_data.bank_redirect.interac.email".to_string(),
display_name: "email".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.first_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.first_name".to_string(),
display_name: "billing_address_first_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.last_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.last_name".to_string(),
display_name: "billing_address_last_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.phone.number".to_string(),
RequiredFieldInfo {
required_field: "payment_method_data.billing.phone.number".to_string(),
display_name: "phone".to_string(),
field_type: FieldType::UserPhoneNumber,
value: None,
},
),
(
"billing.phone.country_code".to_string(),
RequiredFieldInfo {
required_field: "payment_method_data.billing.phone.country_code".to_string(),
display_name: "dialing_code".to_string(),
field_type: FieldType::UserPhoneNumberCountryCode,
value: None,
},
),
]),
_ => HashMap::from([]),
}
}
fn get_countries_for_connector(connector: PayoutConnectors) -> Vec<CountryAlpha2> {
match connector {
PayoutConnectors::Adyenplatform => vec![
CountryAlpha2::ES,
CountryAlpha2::SK,
CountryAlpha2::AT,
CountryAlpha2::NL,
CountryAlpha2::DE,
CountryAlpha2::BE,
CountryAlpha2::FR,
CountryAlpha2::FI,
CountryAlpha2::PT,
CountryAlpha2::IE,
CountryAlpha2::EE,
CountryAlpha2::LT,
CountryAlpha2::LV,
CountryAlpha2::IT,
CountryAlpha2::CZ,
CountryAlpha2::DE,
CountryAlpha2::HU,
CountryAlpha2::NO,
CountryAlpha2::PL,
CountryAlpha2::SE,
CountryAlpha2::GB,
CountryAlpha2::CH,
],
PayoutConnectors::Stripe => vec![CountryAlpha2::US],
_ => vec![],
}
}
fn get_billing_details(connector: PayoutConnectors) -> HashMap<String, RequiredFieldInfo> {
match connector {
PayoutConnectors::Adyen => HashMap::from([
(
"billing.address.line1".to_string(),
RequiredFieldInfo {
required_field: "billing.address.line1".to_string(),
display_name: "billing_address_line1".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.line2".to_string(),
RequiredFieldInfo {
required_field: "billing.address.line2".to_string(),
display_name: "billing_address_line2".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.city".to_string(),
RequiredFieldInfo {
required_field: "billing.address.city".to_string(),
display_name: "billing_address_city".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.zip".to_string(),
RequiredFieldInfo {
required_field: "billing.address.zip".to_string(),
display_name: "billing_address_zip".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.country".to_string(),
RequiredFieldInfo {
required_field: "billing.address.country".to_string(),
display_name: "billing_address_country".to_string(),
field_type: FieldType::UserAddressCountry {
options: get_countries_for_connector(connector)
.iter()
.map(|country| country.to_string())
.collect::<Vec<String>>(),
},
value: None,
},
),
(
"billing.address.first_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.first_name".to_string(),
display_name: "billing_address_first_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.last_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.last_name".to_string(),
display_name: "billing_address_last_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
]),
PayoutConnectors::Wise => HashMap::from([
(
"billing.address.line1".to_string(),
RequiredFieldInfo {
required_field: "billing.address.line1".to_string(),
display_name: "billing_address_line1".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.city".to_string(),
RequiredFieldInfo {
required_field: "billing.address.city".to_string(),
display_name: "billing_address_city".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.state".to_string(),
RequiredFieldInfo {
required_field: "billing.address.state".to_string(),
display_name: "billing_address_state".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.zip".to_string(),
RequiredFieldInfo {
required_field: "billing.address.zip".to_string(),
display_name: "billing_address_zip".to_string(),
field_type: FieldType::Text,
value: None,
},
),
(
"billing.address.country".to_string(),
RequiredFieldInfo {
required_field: "billing.address.country".to_string(),
display_name: "billing_address_country".to_string(),
field_type: FieldType::UserAddressCountry {
options: get_countries_for_connector(connector)
.iter()
.map(|country| country.to_string())
.collect::<Vec<String>>(),
},
value: None,
},
),
(
"billing.address.first_name".to_string(),
RequiredFieldInfo {
required_field: "billing.address.first_name".to_string(),
display_name: "billing_address_first_name".to_string(),
field_type: FieldType::Text,
value: None,
},
),
]),
_ => HashMap::from([]),
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/configs/defaults/payout_required_fields.rs"
],
"module": "crates/router/src/configs/defaults",
"num_files": 1,
"token_count": 4009
}
|
module_-1287527440888468718
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/db/user
Files: 2
</path>
<module>
// File: crates/router/src/db/user/theme.rs
use common_utils::types::user::ThemeLineage;
use diesel_models::user::theme::{self as storage, ThemeUpdate};
use error_stack::report;
use super::MockDb;
use crate::{
connection,
core::errors::{self, CustomResult},
services::Store,
};
#[async_trait::async_trait]
pub trait ThemeInterface {
async fn insert_theme(
&self,
theme: storage::ThemeNew,
) -> CustomResult<storage::Theme, errors::StorageError>;
async fn find_theme_by_theme_id(
&self,
theme_id: String,
) -> CustomResult<storage::Theme, errors::StorageError>;
async fn find_most_specific_theme_in_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<storage::Theme, errors::StorageError>;
async fn find_theme_by_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<storage::Theme, errors::StorageError>;
async fn update_theme_by_theme_id(
&self,
theme_id: String,
theme_update: ThemeUpdate,
) -> CustomResult<storage::Theme, errors::StorageError>;
async fn delete_theme_by_theme_id(
&self,
theme_id: String,
) -> CustomResult<storage::Theme, errors::StorageError>;
async fn list_themes_at_and_under_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<Vec<storage::Theme>, errors::StorageError>;
}
#[async_trait::async_trait]
impl ThemeInterface for Store {
async fn insert_theme(
&self,
theme: storage::ThemeNew,
) -> CustomResult<storage::Theme, errors::StorageError> {
let conn = connection::pg_connection_write(self).await?;
theme
.insert(&conn)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
async fn find_theme_by_theme_id(
&self,
theme_id: String,
) -> CustomResult<storage::Theme, errors::StorageError> {
let conn = connection::pg_connection_read(self).await?;
storage::Theme::find_by_theme_id(&conn, theme_id)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
async fn find_most_specific_theme_in_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<storage::Theme, errors::StorageError> {
let conn = connection::pg_connection_read(self).await?;
storage::Theme::find_most_specific_theme_in_lineage(&conn, lineage)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
async fn find_theme_by_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<storage::Theme, errors::StorageError> {
let conn = connection::pg_connection_read(self).await?;
storage::Theme::find_by_lineage(&conn, lineage)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
async fn update_theme_by_theme_id(
&self,
theme_id: String,
theme_update: ThemeUpdate,
) -> CustomResult<storage::Theme, errors::StorageError> {
let conn = connection::pg_connection_write(self).await?;
storage::Theme::update_by_theme_id(&conn, theme_id, theme_update)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
async fn delete_theme_by_theme_id(
&self,
theme_id: String,
) -> CustomResult<storage::Theme, errors::StorageError> {
let conn = connection::pg_connection_write(self).await?;
storage::Theme::delete_by_theme_id(&conn, theme_id)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
async fn list_themes_at_and_under_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<Vec<storage::Theme>, errors::StorageError> {
let conn = connection::pg_connection_read(self).await?;
storage::Theme::find_all_by_lineage_hierarchy(&conn, lineage)
.await
.map_err(|error| report!(errors::StorageError::from(error)))
}
}
fn check_theme_with_lineage(theme: &storage::Theme, lineage: &ThemeLineage) -> bool {
match lineage {
ThemeLineage::Tenant { tenant_id } => {
&theme.tenant_id == tenant_id
&& theme.org_id.is_none()
&& theme.merchant_id.is_none()
&& theme.profile_id.is_none()
}
ThemeLineage::Organization { tenant_id, org_id } => {
&theme.tenant_id == tenant_id
&& theme
.org_id
.as_ref()
.is_some_and(|org_id_inner| org_id_inner == org_id)
&& theme.merchant_id.is_none()
&& theme.profile_id.is_none()
}
ThemeLineage::Merchant {
tenant_id,
org_id,
merchant_id,
} => {
&theme.tenant_id == tenant_id
&& theme
.org_id
.as_ref()
.is_some_and(|org_id_inner| org_id_inner == org_id)
&& theme
.merchant_id
.as_ref()
.is_some_and(|merchant_id_inner| merchant_id_inner == merchant_id)
&& theme.profile_id.is_none()
}
ThemeLineage::Profile {
tenant_id,
org_id,
merchant_id,
profile_id,
} => {
&theme.tenant_id == tenant_id
&& theme
.org_id
.as_ref()
.is_some_and(|org_id_inner| org_id_inner == org_id)
&& theme
.merchant_id
.as_ref()
.is_some_and(|merchant_id_inner| merchant_id_inner == merchant_id)
&& theme
.profile_id
.as_ref()
.is_some_and(|profile_id_inner| profile_id_inner == profile_id)
}
}
}
fn check_theme_belongs_to_lineage_hierarchy(
theme: &storage::Theme,
lineage: &ThemeLineage,
) -> bool {
match lineage {
ThemeLineage::Tenant { tenant_id } => &theme.tenant_id == tenant_id,
ThemeLineage::Organization { tenant_id, org_id } => {
&theme.tenant_id == tenant_id
&& theme
.org_id
.as_ref()
.is_some_and(|org_id_inner| org_id_inner == org_id)
}
ThemeLineage::Merchant {
tenant_id,
org_id,
merchant_id,
} => {
&theme.tenant_id == tenant_id
&& theme
.org_id
.as_ref()
.is_some_and(|org_id_inner| org_id_inner == org_id)
&& theme
.merchant_id
.as_ref()
.is_some_and(|merchant_id_inner| merchant_id_inner == merchant_id)
}
ThemeLineage::Profile {
tenant_id,
org_id,
merchant_id,
profile_id,
} => {
&theme.tenant_id == tenant_id
&& theme
.org_id
.as_ref()
.is_some_and(|org_id_inner| org_id_inner == org_id)
&& theme
.merchant_id
.as_ref()
.is_some_and(|merchant_id_inner| merchant_id_inner == merchant_id)
&& theme
.profile_id
.as_ref()
.is_some_and(|profile_id_inner| profile_id_inner == profile_id)
}
}
}
#[async_trait::async_trait]
impl ThemeInterface for MockDb {
async fn insert_theme(
&self,
new_theme: storage::ThemeNew,
) -> CustomResult<storage::Theme, errors::StorageError> {
let mut themes = self.themes.lock().await;
for theme in themes.iter() {
if new_theme.theme_id == theme.theme_id {
return Err(errors::StorageError::DuplicateValue {
entity: "theme_id",
key: None,
}
.into());
}
if new_theme.tenant_id == theme.tenant_id
&& new_theme.org_id == theme.org_id
&& new_theme.merchant_id == theme.merchant_id
&& new_theme.profile_id == theme.profile_id
{
return Err(errors::StorageError::DuplicateValue {
entity: "lineage",
key: None,
}
.into());
}
}
let theme = storage::Theme {
theme_id: new_theme.theme_id,
tenant_id: new_theme.tenant_id,
org_id: new_theme.org_id,
merchant_id: new_theme.merchant_id,
profile_id: new_theme.profile_id,
created_at: new_theme.created_at,
last_modified_at: new_theme.last_modified_at,
entity_type: new_theme.entity_type,
theme_name: new_theme.theme_name,
email_primary_color: new_theme.email_primary_color,
email_foreground_color: new_theme.email_foreground_color,
email_background_color: new_theme.email_background_color,
email_entity_name: new_theme.email_entity_name,
email_entity_logo_url: new_theme.email_entity_logo_url,
};
themes.push(theme.clone());
Ok(theme)
}
async fn find_theme_by_theme_id(
&self,
theme_id: String,
) -> CustomResult<storage::Theme, errors::StorageError> {
let themes = self.themes.lock().await;
themes
.iter()
.find(|theme| theme.theme_id == theme_id)
.cloned()
.ok_or(
errors::StorageError::ValueNotFound(format!("Theme with id {theme_id} not found"))
.into(),
)
}
async fn find_most_specific_theme_in_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<storage::Theme, errors::StorageError> {
let themes = self.themes.lock().await;
let lineages = lineage.get_same_and_higher_lineages();
themes
.iter()
.filter(|theme| {
lineages
.iter()
.any(|lineage| check_theme_with_lineage(theme, lineage))
})
.min_by_key(|theme| theme.entity_type)
.ok_or(
errors::StorageError::ValueNotFound("No theme found in lineage".to_string()).into(),
)
.cloned()
}
async fn find_theme_by_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<storage::Theme, errors::StorageError> {
let themes = self.themes.lock().await;
themes
.iter()
.find(|theme| check_theme_with_lineage(theme, &lineage))
.cloned()
.ok_or(
errors::StorageError::ValueNotFound(format!(
"Theme with lineage {lineage:?} not found",
))
.into(),
)
}
async fn update_theme_by_theme_id(
&self,
theme_id: String,
theme_update: ThemeUpdate,
) -> CustomResult<storage::Theme, errors::StorageError> {
let mut themes = self.themes.lock().await;
themes
.iter_mut()
.find(|theme| theme.theme_id == theme_id)
.map(|theme| {
match theme_update {
ThemeUpdate::EmailConfig { email_config } => {
theme.email_primary_color = email_config.primary_color;
theme.email_foreground_color = email_config.foreground_color;
theme.email_background_color = email_config.background_color;
theme.email_entity_name = email_config.entity_name;
theme.email_entity_logo_url = email_config.entity_logo_url;
}
}
theme.clone()
})
.ok_or_else(|| {
report!(errors::StorageError::ValueNotFound(format!(
"Theme with id {theme_id} not found",
)))
})
}
async fn delete_theme_by_theme_id(
&self,
theme_id: String,
) -> CustomResult<storage::Theme, errors::StorageError> {
let mut themes = self.themes.lock().await;
let index = themes
.iter()
.position(|theme| theme.theme_id == theme_id)
.ok_or(errors::StorageError::ValueNotFound(format!(
"Theme with id {theme_id} not found"
)))?;
let theme = themes.remove(index);
Ok(theme)
}
async fn list_themes_at_and_under_lineage(
&self,
lineage: ThemeLineage,
) -> CustomResult<Vec<storage::Theme>, errors::StorageError> {
let themes = self.themes.lock().await;
let matching_themes: Vec<storage::Theme> = themes
.iter()
.filter(|theme| check_theme_belongs_to_lineage_hierarchy(theme, &lineage))
.cloned()
.collect();
Ok(matching_themes)
}
}
// File: crates/router/src/db/user/sample_data.rs
use common_utils::types::keymanager::KeyManagerState;
#[cfg(feature = "v1")]
use diesel_models::user::sample_data::PaymentAttemptBatchNew;
use diesel_models::{
dispute::{Dispute, DisputeNew},
errors::DatabaseError,
query::user::sample_data as sample_data_queries,
refund::{Refund, RefundNew},
};
use error_stack::{Report, ResultExt};
use futures::{future::try_join_all, FutureExt};
use hyperswitch_domain_models::{
behaviour::Conversion,
merchant_key_store::MerchantKeyStore,
payments::{payment_attempt::PaymentAttempt, PaymentIntent},
};
use storage_impl::{errors::StorageError, DataModelExt};
use crate::{connection::pg_connection_write, core::errors::CustomResult, services::Store};
#[async_trait::async_trait]
pub trait BatchSampleDataInterface {
#[cfg(feature = "v1")]
async fn insert_payment_intents_batch_for_sample_data(
&self,
state: &KeyManagerState,
batch: Vec<PaymentIntent>,
key_store: &MerchantKeyStore,
) -> CustomResult<Vec<PaymentIntent>, StorageError>;
#[cfg(feature = "v1")]
async fn insert_payment_attempts_batch_for_sample_data(
&self,
batch: Vec<PaymentAttemptBatchNew>,
) -> CustomResult<Vec<PaymentAttempt>, StorageError>;
#[cfg(feature = "v1")]
async fn insert_refunds_batch_for_sample_data(
&self,
batch: Vec<RefundNew>,
) -> CustomResult<Vec<Refund>, StorageError>;
#[cfg(feature = "v1")]
async fn insert_disputes_batch_for_sample_data(
&self,
batch: Vec<DisputeNew>,
) -> CustomResult<Vec<Dispute>, StorageError>;
#[cfg(feature = "v1")]
async fn delete_payment_intents_for_sample_data(
&self,
state: &KeyManagerState,
merchant_id: &common_utils::id_type::MerchantId,
key_store: &MerchantKeyStore,
) -> CustomResult<Vec<PaymentIntent>, StorageError>;
#[cfg(feature = "v1")]
async fn delete_payment_attempts_for_sample_data(
&self,
merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<PaymentAttempt>, StorageError>;
#[cfg(feature = "v1")]
async fn delete_refunds_for_sample_data(
&self,
merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<Refund>, StorageError>;
#[cfg(feature = "v1")]
async fn delete_disputes_for_sample_data(
&self,
merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<Dispute>, StorageError>;
}
#[async_trait::async_trait]
impl BatchSampleDataInterface for Store {
#[cfg(feature = "v1")]
async fn insert_payment_intents_batch_for_sample_data(
&self,
state: &KeyManagerState,
batch: Vec<PaymentIntent>,
key_store: &MerchantKeyStore,
) -> CustomResult<Vec<PaymentIntent>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
let new_intents = try_join_all(batch.into_iter().map(|payment_intent| async {
payment_intent
.construct_new()
.await
.change_context(StorageError::EncryptionError)
}))
.await?;
sample_data_queries::insert_payment_intents(&conn, new_intents)
.await
.map_err(diesel_error_to_data_error)
.map(|v| {
try_join_all(v.into_iter().map(|payment_intent| {
PaymentIntent::convert_back(
state,
payment_intent,
key_store.key.get_inner(),
key_store.merchant_id.clone().into(),
)
}))
.map(|join_result| join_result.change_context(StorageError::DecryptionError))
})?
.await
}
#[cfg(feature = "v1")]
async fn insert_payment_attempts_batch_for_sample_data(
&self,
batch: Vec<PaymentAttemptBatchNew>,
) -> CustomResult<Vec<PaymentAttempt>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::insert_payment_attempts(&conn, batch)
.await
.map_err(diesel_error_to_data_error)
.map(|res| {
res.into_iter()
.map(PaymentAttempt::from_storage_model)
.collect()
})
}
#[cfg(feature = "v1")]
async fn insert_refunds_batch_for_sample_data(
&self,
batch: Vec<RefundNew>,
) -> CustomResult<Vec<Refund>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::insert_refunds(&conn, batch)
.await
.map_err(diesel_error_to_data_error)
}
#[cfg(feature = "v1")]
async fn insert_disputes_batch_for_sample_data(
&self,
batch: Vec<DisputeNew>,
) -> CustomResult<Vec<Dispute>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::insert_disputes(&conn, batch)
.await
.map_err(diesel_error_to_data_error)
}
#[cfg(feature = "v1")]
async fn delete_payment_intents_for_sample_data(
&self,
state: &KeyManagerState,
merchant_id: &common_utils::id_type::MerchantId,
key_store: &MerchantKeyStore,
) -> CustomResult<Vec<PaymentIntent>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::delete_payment_intents(&conn, merchant_id)
.await
.map_err(diesel_error_to_data_error)
.map(|v| {
try_join_all(v.into_iter().map(|payment_intent| {
PaymentIntent::convert_back(
state,
payment_intent,
key_store.key.get_inner(),
key_store.merchant_id.clone().into(),
)
}))
.map(|join_result| join_result.change_context(StorageError::DecryptionError))
})?
.await
}
#[cfg(feature = "v1")]
async fn delete_payment_attempts_for_sample_data(
&self,
merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<PaymentAttempt>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::delete_payment_attempts(&conn, merchant_id)
.await
.map_err(diesel_error_to_data_error)
.map(|res| {
res.into_iter()
.map(PaymentAttempt::from_storage_model)
.collect()
})
}
#[cfg(feature = "v1")]
async fn delete_refunds_for_sample_data(
&self,
merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<Refund>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::delete_refunds(&conn, merchant_id)
.await
.map_err(diesel_error_to_data_error)
}
#[cfg(feature = "v1")]
async fn delete_disputes_for_sample_data(
&self,
merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<Dispute>, StorageError> {
let conn = pg_connection_write(self)
.await
.change_context(StorageError::DatabaseConnectionError)?;
sample_data_queries::delete_disputes(&conn, merchant_id)
.await
.map_err(diesel_error_to_data_error)
}
}
#[async_trait::async_trait]
impl BatchSampleDataInterface for storage_impl::MockDb {
#[cfg(feature = "v1")]
async fn insert_payment_intents_batch_for_sample_data(
&self,
_state: &KeyManagerState,
_batch: Vec<PaymentIntent>,
_key_store: &MerchantKeyStore,
) -> CustomResult<Vec<PaymentIntent>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn insert_payment_attempts_batch_for_sample_data(
&self,
_batch: Vec<PaymentAttemptBatchNew>,
) -> CustomResult<Vec<PaymentAttempt>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn insert_refunds_batch_for_sample_data(
&self,
_batch: Vec<RefundNew>,
) -> CustomResult<Vec<Refund>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn insert_disputes_batch_for_sample_data(
&self,
_batch: Vec<DisputeNew>,
) -> CustomResult<Vec<Dispute>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn delete_payment_intents_for_sample_data(
&self,
_state: &KeyManagerState,
_merchant_id: &common_utils::id_type::MerchantId,
_key_store: &MerchantKeyStore,
) -> CustomResult<Vec<PaymentIntent>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn delete_payment_attempts_for_sample_data(
&self,
_merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<PaymentAttempt>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn delete_refunds_for_sample_data(
&self,
_merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<Refund>, StorageError> {
Err(StorageError::MockDbError)?
}
#[cfg(feature = "v1")]
async fn delete_disputes_for_sample_data(
&self,
_merchant_id: &common_utils::id_type::MerchantId,
) -> CustomResult<Vec<Dispute>, StorageError> {
Err(StorageError::MockDbError)?
}
}
// TODO: This error conversion is re-used from storage_impl and is not DRY when it should be
// Ideally the impl's here should be defined in that crate avoiding this re-definition
fn diesel_error_to_data_error(diesel_error: Report<DatabaseError>) -> Report<StorageError> {
let new_err = match diesel_error.current_context() {
DatabaseError::DatabaseConnectionError => StorageError::DatabaseConnectionError,
DatabaseError::NotFound => StorageError::ValueNotFound("Value not found".to_string()),
DatabaseError::UniqueViolation => StorageError::DuplicateValue {
entity: "entity ",
key: None,
},
err => StorageError::DatabaseError(error_stack::report!(*err)),
};
diesel_error.change_context(new_err)
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/db/user/theme.rs",
"crates/router/src/db/user/sample_data.rs"
],
"module": "crates/router/src/db/user",
"num_files": 2,
"token_count": 5334
}
|
module_3831055645439964946
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/events
Files: 6
</path>
<module>
// File: crates/router/src/events/outgoing_webhook_logs.rs
use api_models::{enums::EventType as OutgoingWebhookEventType, webhooks::OutgoingWebhookContent};
use common_enums::WebhookDeliveryAttempt;
use serde::Serialize;
use serde_json::Value;
use time::OffsetDateTime;
use super::EventType;
use crate::services::kafka::KafkaMessage;
#[derive(Clone, Debug, PartialEq, Serialize)]
#[serde(rename_all = "snake_case")]
pub struct OutgoingWebhookEvent {
tenant_id: common_utils::id_type::TenantId,
merchant_id: common_utils::id_type::MerchantId,
event_id: String,
event_type: OutgoingWebhookEventType,
#[serde(flatten)]
content: Option<OutgoingWebhookEventContent>,
is_error: bool,
error: Option<Value>,
created_at_timestamp: i128,
initial_attempt_id: Option<String>,
status_code: Option<u16>,
delivery_attempt: Option<WebhookDeliveryAttempt>,
}
#[derive(Clone, Debug, PartialEq, Serialize)]
#[serde(tag = "outgoing_webhook_event_type", rename_all = "snake_case")]
pub enum OutgoingWebhookEventContent {
#[cfg(feature = "v1")]
Payment {
payment_id: common_utils::id_type::PaymentId,
content: Value,
},
#[cfg(feature = "v2")]
Payment {
payment_id: common_utils::id_type::GlobalPaymentId,
content: Value,
},
Payout {
payout_id: common_utils::id_type::PayoutId,
content: Value,
},
#[cfg(feature = "v1")]
Refund {
payment_id: common_utils::id_type::PaymentId,
refund_id: String,
content: Value,
},
#[cfg(feature = "v2")]
Refund {
payment_id: common_utils::id_type::GlobalPaymentId,
refund_id: common_utils::id_type::GlobalRefundId,
content: Value,
},
#[cfg(feature = "v1")]
Dispute {
payment_id: common_utils::id_type::PaymentId,
attempt_id: String,
dispute_id: String,
content: Value,
},
#[cfg(feature = "v2")]
Dispute {
payment_id: common_utils::id_type::GlobalPaymentId,
attempt_id: String,
dispute_id: String,
content: Value,
},
Mandate {
payment_method_id: String,
mandate_id: String,
content: Value,
},
}
pub trait OutgoingWebhookEventMetric {
fn get_outgoing_webhook_event_content(&self) -> Option<OutgoingWebhookEventContent>;
}
#[cfg(feature = "v1")]
impl OutgoingWebhookEventMetric for OutgoingWebhookContent {
fn get_outgoing_webhook_event_content(&self) -> Option<OutgoingWebhookEventContent> {
match self {
Self::PaymentDetails(payment_payload) => Some(OutgoingWebhookEventContent::Payment {
payment_id: payment_payload.payment_id.clone(),
content: masking::masked_serialize(&payment_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
Self::RefundDetails(refund_payload) => Some(OutgoingWebhookEventContent::Refund {
payment_id: refund_payload.payment_id.clone(),
refund_id: refund_payload.get_refund_id_as_string(),
content: masking::masked_serialize(&refund_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
Self::DisputeDetails(dispute_payload) => Some(OutgoingWebhookEventContent::Dispute {
payment_id: dispute_payload.payment_id.clone(),
attempt_id: dispute_payload.attempt_id.clone(),
dispute_id: dispute_payload.dispute_id.clone(),
content: masking::masked_serialize(&dispute_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
Self::MandateDetails(mandate_payload) => Some(OutgoingWebhookEventContent::Mandate {
payment_method_id: mandate_payload.payment_method_id.clone(),
mandate_id: mandate_payload.mandate_id.clone(),
content: masking::masked_serialize(&mandate_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
#[cfg(feature = "payouts")]
Self::PayoutDetails(payout_payload) => Some(OutgoingWebhookEventContent::Payout {
payout_id: payout_payload.payout_id.clone(),
content: masking::masked_serialize(&payout_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
}
}
}
#[cfg(feature = "v2")]
impl OutgoingWebhookEventMetric for OutgoingWebhookContent {
fn get_outgoing_webhook_event_content(&self) -> Option<OutgoingWebhookEventContent> {
match self {
Self::PaymentDetails(payment_payload) => Some(OutgoingWebhookEventContent::Payment {
payment_id: payment_payload.id.clone(),
content: masking::masked_serialize(&payment_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
Self::RefundDetails(refund_payload) => Some(OutgoingWebhookEventContent::Refund {
payment_id: refund_payload.payment_id.clone(),
refund_id: refund_payload.id.clone(),
content: masking::masked_serialize(&refund_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
Self::DisputeDetails(dispute_payload) => {
//TODO: add support for dispute outgoing webhook
todo!()
}
Self::MandateDetails(mandate_payload) => Some(OutgoingWebhookEventContent::Mandate {
payment_method_id: mandate_payload.payment_method_id.clone(),
mandate_id: mandate_payload.mandate_id.clone(),
content: masking::masked_serialize(&mandate_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
#[cfg(feature = "payouts")]
Self::PayoutDetails(payout_payload) => Some(OutgoingWebhookEventContent::Payout {
payout_id: payout_payload.payout_id.clone(),
content: masking::masked_serialize(&payout_payload)
.unwrap_or(serde_json::json!({"error":"failed to serialize"})),
}),
}
}
}
impl OutgoingWebhookEvent {
#[allow(clippy::too_many_arguments)]
pub fn new(
tenant_id: common_utils::id_type::TenantId,
merchant_id: common_utils::id_type::MerchantId,
event_id: String,
event_type: OutgoingWebhookEventType,
content: Option<OutgoingWebhookEventContent>,
error: Option<Value>,
initial_attempt_id: Option<String>,
status_code: Option<u16>,
delivery_attempt: Option<WebhookDeliveryAttempt>,
) -> Self {
Self {
tenant_id,
merchant_id,
event_id,
event_type,
content,
is_error: error.is_some(),
error,
created_at_timestamp: OffsetDateTime::now_utc().unix_timestamp_nanos() / 1_000_000,
initial_attempt_id,
status_code,
delivery_attempt,
}
}
}
impl KafkaMessage for OutgoingWebhookEvent {
fn event_type(&self) -> EventType {
EventType::OutgoingWebhookLogs
}
fn key(&self) -> String {
self.event_id.clone()
}
}
// File: crates/router/src/events/api_logs.rs
use actix_web::HttpRequest;
pub use common_utils::events::{ApiEventMetric, ApiEventsType};
use common_utils::impl_api_event_type;
use router_env::{tracing_actix_web::RequestId, types::FlowMetric};
use serde::Serialize;
use time::OffsetDateTime;
use super::EventType;
#[cfg(feature = "dummy_connector")]
use crate::routes::dummy_connector::types::{
DummyConnectorPaymentCompleteRequest, DummyConnectorPaymentConfirmRequest,
DummyConnectorPaymentRequest, DummyConnectorPaymentResponse,
DummyConnectorPaymentRetrieveRequest, DummyConnectorRefundRequest,
DummyConnectorRefundResponse, DummyConnectorRefundRetrieveRequest,
};
use crate::{
core::payments::PaymentsRedirectResponseData,
services::{authentication::AuthenticationType, kafka::KafkaMessage},
types::api::{
AttachEvidenceRequest, Config, ConfigUpdate, CreateFileRequest, DisputeFetchQueryData,
DisputeId, FileId, FileRetrieveRequest, PollId,
},
};
#[derive(Clone, Debug, Eq, PartialEq, Serialize)]
#[serde(rename_all = "snake_case")]
pub struct ApiEvent {
tenant_id: common_utils::id_type::TenantId,
merchant_id: Option<common_utils::id_type::MerchantId>,
api_flow: String,
created_at_timestamp: i128,
request_id: String,
latency: u128,
status_code: i64,
#[serde(flatten)]
auth_type: AuthenticationType,
request: String,
user_agent: Option<String>,
ip_addr: Option<String>,
url_path: String,
response: Option<String>,
error: Option<serde_json::Value>,
#[serde(flatten)]
event_type: ApiEventsType,
hs_latency: Option<u128>,
http_method: String,
#[serde(flatten)]
infra_components: Option<serde_json::Value>,
}
impl ApiEvent {
#[allow(clippy::too_many_arguments)]
pub fn new(
tenant_id: common_utils::id_type::TenantId,
merchant_id: Option<common_utils::id_type::MerchantId>,
api_flow: &impl FlowMetric,
request_id: &RequestId,
latency: u128,
status_code: i64,
request: serde_json::Value,
response: Option<serde_json::Value>,
hs_latency: Option<u128>,
auth_type: AuthenticationType,
error: Option<serde_json::Value>,
event_type: ApiEventsType,
http_req: &HttpRequest,
http_method: &http::Method,
infra_components: Option<serde_json::Value>,
) -> Self {
Self {
tenant_id,
merchant_id,
api_flow: api_flow.to_string(),
created_at_timestamp: OffsetDateTime::now_utc().unix_timestamp_nanos() / 1_000_000,
request_id: request_id.as_hyphenated().to_string(),
latency,
status_code,
request: request.to_string(),
response: response.map(|resp| resp.to_string()),
auth_type,
error,
ip_addr: http_req
.connection_info()
.realip_remote_addr()
.map(ToOwned::to_owned),
user_agent: http_req
.headers()
.get("user-agent")
.and_then(|user_agent_value| user_agent_value.to_str().ok().map(ToOwned::to_owned)),
url_path: http_req.path().to_string(),
event_type,
hs_latency,
http_method: http_method.to_string(),
infra_components,
}
}
}
impl KafkaMessage for ApiEvent {
fn event_type(&self) -> EventType {
EventType::ApiLogs
}
fn key(&self) -> String {
self.request_id.clone()
}
}
impl_api_event_type!(
Miscellaneous,
(
Config,
CreateFileRequest,
FileId,
FileRetrieveRequest,
AttachEvidenceRequest,
DisputeFetchQueryData,
ConfigUpdate
)
);
#[cfg(feature = "dummy_connector")]
impl_api_event_type!(
Miscellaneous,
(
DummyConnectorPaymentCompleteRequest,
DummyConnectorPaymentRequest,
DummyConnectorPaymentResponse,
DummyConnectorPaymentRetrieveRequest,
DummyConnectorPaymentConfirmRequest,
DummyConnectorRefundRetrieveRequest,
DummyConnectorRefundResponse,
DummyConnectorRefundRequest
)
);
#[cfg(feature = "v1")]
impl ApiEventMetric for PaymentsRedirectResponseData {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentRedirectionResponse {
connector: self.connector.clone(),
payment_id: match &self.resource_id {
api_models::payments::PaymentIdType::PaymentIntentId(id) => Some(id.clone()),
_ => None,
},
})
}
}
#[cfg(feature = "v2")]
impl ApiEventMetric for PaymentsRedirectResponseData {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::PaymentRedirectionResponse {
payment_id: self.payment_id.clone(),
})
}
}
impl ApiEventMetric for DisputeId {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Dispute {
dispute_id: self.dispute_id.clone(),
})
}
}
impl ApiEventMetric for PollId {
fn get_api_event_type(&self) -> Option<ApiEventsType> {
Some(ApiEventsType::Poll {
poll_id: self.poll_id.clone(),
})
}
}
// File: crates/router/src/events/audit_events.rs
use api_models::payments::Amount;
use common_utils::types::MinorUnit;
use diesel_models::fraud_check::FraudCheck;
use events::{Event, EventInfo};
use serde::Serialize;
use time::PrimitiveDateTime;
#[derive(Debug, Clone, Serialize)]
#[serde(tag = "event_type")]
pub enum AuditEventType {
Error {
error_message: String,
},
PaymentCreated,
ConnectorDecided,
ConnectorCalled,
RefundCreated,
RefundSuccess,
RefundFail,
PaymentConfirm {
client_src: Option<String>,
client_ver: Option<String>,
frm_message: Box<Option<FraudCheck>>,
},
PaymentCancelled {
cancellation_reason: Option<String>,
},
PaymentCapture {
capture_amount: Option<MinorUnit>,
multiple_capture_count: Option<i16>,
},
PaymentUpdate {
amount: Amount,
},
PaymentApprove,
PaymentCreate,
PaymentStatus,
PaymentCompleteAuthorize,
PaymentReject {
error_code: Option<String>,
error_message: Option<String>,
},
}
#[derive(Debug, Clone, Serialize)]
pub struct AuditEvent {
#[serde(flatten)]
event_type: AuditEventType,
#[serde(with = "common_utils::custom_serde::iso8601")]
created_at: PrimitiveDateTime,
}
impl AuditEvent {
pub fn new(event_type: AuditEventType) -> Self {
Self {
event_type,
created_at: common_utils::date_time::now(),
}
}
}
impl Event for AuditEvent {
type EventType = super::EventType;
fn timestamp(&self) -> PrimitiveDateTime {
self.created_at
}
fn identifier(&self) -> String {
let event_type = match &self.event_type {
AuditEventType::Error { .. } => "error",
AuditEventType::PaymentCreated => "payment_created",
AuditEventType::PaymentConfirm { .. } => "payment_confirm",
AuditEventType::ConnectorDecided => "connector_decided",
AuditEventType::ConnectorCalled => "connector_called",
AuditEventType::PaymentCapture { .. } => "payment_capture",
AuditEventType::RefundCreated => "refund_created",
AuditEventType::RefundSuccess => "refund_success",
AuditEventType::RefundFail => "refund_fail",
AuditEventType::PaymentCancelled { .. } => "payment_cancelled",
AuditEventType::PaymentUpdate { .. } => "payment_update",
AuditEventType::PaymentApprove => "payment_approve",
AuditEventType::PaymentCreate => "payment_create",
AuditEventType::PaymentStatus => "payment_status",
AuditEventType::PaymentCompleteAuthorize => "payment_complete_authorize",
AuditEventType::PaymentReject { .. } => "payment_rejected",
};
format!(
"{event_type}-{}",
self.timestamp().assume_utc().unix_timestamp_nanos()
)
}
fn class(&self) -> Self::EventType {
super::EventType::AuditEvent
}
}
impl EventInfo for AuditEvent {
type Data = Self;
fn data(&self) -> error_stack::Result<Self::Data, events::EventsError> {
Ok(self.clone())
}
fn key(&self) -> String {
"event".to_string()
}
}
// File: crates/router/src/events/routing_api_logs.rs
pub use hyperswitch_interfaces::events::routing_api_logs::RoutingEvent;
use super::EventType;
use crate::services::kafka::KafkaMessage;
impl KafkaMessage for RoutingEvent {
fn event_type(&self) -> EventType {
EventType::RoutingApiLogs
}
fn key(&self) -> String {
format!(
"{}-{}-{}",
self.get_merchant_id(),
self.get_profile_id(),
self.get_payment_id()
)
}
}
// File: crates/router/src/events/connector_api_logs.rs
pub use hyperswitch_interfaces::events::connector_api_logs::ConnectorEvent;
use super::EventType;
use crate::services::kafka::KafkaMessage;
impl KafkaMessage for ConnectorEvent {
fn event_type(&self) -> EventType {
EventType::ConnectorApiLogs
}
fn key(&self) -> String {
self.request_id.clone()
}
}
// File: crates/router/src/events/event_logger.rs
use std::collections::HashMap;
use events::{EventsError, Message, MessagingInterface};
use masking::ErasedMaskSerialize;
use time::PrimitiveDateTime;
use super::EventType;
use crate::services::{kafka::KafkaMessage, logger};
#[derive(Clone, Debug, Default)]
pub struct EventLogger {}
impl EventLogger {
#[track_caller]
pub(super) fn log_event<T: KafkaMessage>(&self, event: &T) {
logger::info!(event = ?event.masked_serialize().unwrap_or_else(|e| serde_json::json!({"error": e.to_string()})), event_type =? event.event_type(), event_id =? event.key(), log_type =? "event");
}
}
impl MessagingInterface for EventLogger {
type MessageClass = EventType;
fn send_message<T>(
&self,
data: T,
metadata: HashMap<String, String>,
_timestamp: PrimitiveDateTime,
) -> error_stack::Result<(), EventsError>
where
T: Message<Class = Self::MessageClass> + ErasedMaskSerialize,
{
logger::info!(event =? data.masked_serialize().unwrap_or_else(|e| serde_json::json!({"error": e.to_string()})), event_type =? data.get_message_class(), event_id =? data.identifier(), log_type =? "event", metadata = ?metadata);
Ok(())
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/events/outgoing_webhook_logs.rs",
"crates/router/src/events/api_logs.rs",
"crates/router/src/events/audit_events.rs",
"crates/router/src/events/routing_api_logs.rs",
"crates/router/src/events/connector_api_logs.rs",
"crates/router/src/events/event_logger.rs"
],
"module": "crates/router/src/events",
"num_files": 6,
"token_count": 3963
}
|
module_-5081443249974511339
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/routes/dummy_connector
Files: 5
</path>
<module>
// File: crates/router/src/routes/dummy_connector/core.rs
use app::SessionState;
use common_utils::generate_id_with_default_len;
use error_stack::ResultExt;
use super::{errors, types, utils};
use crate::{
routes::{app, dummy_connector::consts},
services::api,
utils::OptionExt,
};
#[cfg(feature = "dummy_connector")]
pub async fn payment(
state: SessionState,
req: types::DummyConnectorPaymentRequest,
) -> types::DummyConnectorResponse<types::DummyConnectorPaymentResponse> {
utils::tokio_mock_sleep(
state.conf.dummy_connector.payment_duration,
state.conf.dummy_connector.payment_tolerance,
)
.await;
let payment_attempt: types::DummyConnectorPaymentAttempt = req.into();
let payment_data =
types::DummyConnectorPaymentData::process_payment_attempt(&state, payment_attempt)?;
utils::store_data_in_redis(
&state,
payment_data.attempt_id.clone(),
payment_data.payment_id.clone(),
state.conf.dummy_connector.authorize_ttl,
)
.await?;
utils::store_data_in_redis(
&state,
payment_data.payment_id.get_string_repr().to_owned(),
payment_data.clone(),
state.conf.dummy_connector.payment_ttl,
)
.await?;
Ok(api::ApplicationResponse::Json(payment_data.into()))
}
pub async fn payment_data(
state: SessionState,
req: types::DummyConnectorPaymentRetrieveRequest,
) -> types::DummyConnectorResponse<types::DummyConnectorPaymentResponse> {
utils::tokio_mock_sleep(
state.conf.dummy_connector.payment_retrieve_duration,
state.conf.dummy_connector.payment_retrieve_tolerance,
)
.await;
let payment_data = utils::get_payment_data_from_payment_id(&state, req.payment_id).await?;
Ok(api::ApplicationResponse::Json(payment_data.into()))
}
#[cfg(all(feature = "dummy_connector", feature = "v1"))]
pub async fn payment_authorize(
state: SessionState,
req: types::DummyConnectorPaymentConfirmRequest,
) -> types::DummyConnectorResponse<String> {
let payment_data = utils::get_payment_data_by_attempt_id(&state, req.attempt_id.clone()).await;
let dummy_connector_conf = &state.conf.dummy_connector;
if let Ok(payment_data_inner) = payment_data {
let return_url = format!(
"{}/dummy-connector/complete/{}",
state.base_url, req.attempt_id
);
Ok(api::ApplicationResponse::FileData((
utils::get_authorize_page(payment_data_inner, return_url, dummy_connector_conf)
.as_bytes()
.to_vec(),
mime::TEXT_HTML,
)))
} else {
Ok(api::ApplicationResponse::FileData((
utils::get_expired_page(dummy_connector_conf)
.as_bytes()
.to_vec(),
mime::TEXT_HTML,
)))
}
}
#[cfg(all(feature = "dummy_connector", feature = "v1"))]
pub async fn payment_complete(
state: SessionState,
req: types::DummyConnectorPaymentCompleteRequest,
) -> types::DummyConnectorResponse<()> {
utils::tokio_mock_sleep(
state.conf.dummy_connector.payment_duration,
state.conf.dummy_connector.payment_tolerance,
)
.await;
let payment_data = utils::get_payment_data_by_attempt_id(&state, req.attempt_id.clone()).await;
let payment_status = if req.confirm {
types::DummyConnectorStatus::Succeeded
} else {
types::DummyConnectorStatus::Failed
};
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::DummyConnectorErrors::InternalServerError)
.attach_printable("Failed to get redis connection")?;
let _ = redis_conn.delete_key(&req.attempt_id.as_str().into()).await;
if let Ok(payment_data) = payment_data {
let updated_payment_data = types::DummyConnectorPaymentData {
status: payment_status,
next_action: None,
..payment_data
};
utils::store_data_in_redis(
&state,
updated_payment_data.payment_id.get_string_repr().to_owned(),
updated_payment_data.clone(),
state.conf.dummy_connector.payment_ttl,
)
.await?;
return Ok(api::ApplicationResponse::JsonForRedirection(
api_models::payments::RedirectionResponse {
return_url: String::new(),
params: vec![],
return_url_with_query_params: updated_payment_data
.return_url
.unwrap_or(state.conf.dummy_connector.default_return_url.clone()),
http_method: "GET".to_string(),
headers: vec![],
},
));
}
Ok(api::ApplicationResponse::JsonForRedirection(
api_models::payments::RedirectionResponse {
return_url: String::new(),
params: vec![],
return_url_with_query_params: state.conf.dummy_connector.default_return_url.clone(),
http_method: "GET".to_string(),
headers: vec![],
},
))
}
#[cfg(all(feature = "dummy_connector", feature = "v1"))]
pub async fn refund_payment(
state: SessionState,
req: types::DummyConnectorRefundRequest,
) -> types::DummyConnectorResponse<types::DummyConnectorRefundResponse> {
utils::tokio_mock_sleep(
state.conf.dummy_connector.refund_duration,
state.conf.dummy_connector.refund_tolerance,
)
.await;
let payment_id = req
.payment_id
.get_required_value("payment_id")
.change_context(errors::DummyConnectorErrors::MissingRequiredField {
field_name: "payment_id",
})?;
let mut payment_data =
utils::get_payment_data_from_payment_id(&state, payment_id.get_string_repr().to_owned())
.await?;
payment_data.is_eligible_for_refund(req.amount)?;
let refund_id = generate_id_with_default_len(consts::REFUND_ID_PREFIX);
payment_data.eligible_amount -= req.amount;
utils::store_data_in_redis(
&state,
payment_id.get_string_repr().to_owned(),
payment_data.to_owned(),
state.conf.dummy_connector.payment_ttl,
)
.await?;
let refund_data = types::DummyConnectorRefundResponse::new(
types::DummyConnectorStatus::Succeeded,
refund_id.to_owned(),
payment_data.currency,
common_utils::date_time::now(),
payment_data.amount,
req.amount,
);
utils::store_data_in_redis(
&state,
refund_id,
refund_data.to_owned(),
state.conf.dummy_connector.refund_ttl,
)
.await?;
Ok(api::ApplicationResponse::Json(refund_data))
}
#[cfg(all(feature = "dummy_connector", feature = "v1"))]
pub async fn refund_data(
state: SessionState,
req: types::DummyConnectorRefundRetrieveRequest,
) -> types::DummyConnectorResponse<types::DummyConnectorRefundResponse> {
let refund_id = req.refund_id;
utils::tokio_mock_sleep(
state.conf.dummy_connector.refund_retrieve_duration,
state.conf.dummy_connector.refund_retrieve_tolerance,
)
.await;
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::DummyConnectorErrors::InternalServerError)
.attach_printable("Failed to get redis connection")?;
let refund_data = redis_conn
.get_and_deserialize_key::<types::DummyConnectorRefundResponse>(
&refund_id.as_str().into(),
"DummyConnectorRefundResponse",
)
.await
.change_context(errors::DummyConnectorErrors::RefundNotFound)?;
Ok(api::ApplicationResponse::Json(refund_data))
}
// File: crates/router/src/routes/dummy_connector/consts.rs
pub const ATTEMPT_ID_PREFIX: &str = "dummy_attempt";
pub const REFUND_ID_PREFIX: &str = "dummy_ref";
pub const THREE_DS_CSS: &str = include_str!("threeds_page.css");
pub const DUMMY_CONNECTOR_UPI_FAILURE_VPA_ID: &str = "failure@upi";
pub const DUMMY_CONNECTOR_UPI_SUCCESS_VPA_ID: &str = "success@upi";
// File: crates/router/src/routes/dummy_connector/types.rs
use api_models::enums::Currency;
use common_utils::{errors::CustomResult, generate_id_with_default_len, pii};
use error_stack::report;
use masking::Secret;
use router_env::types::FlowMetric;
use strum::Display;
use time::PrimitiveDateTime;
use super::{consts, errors::DummyConnectorErrors};
use crate::services;
#[derive(Debug, Display, Clone, PartialEq, Eq)]
#[allow(clippy::enum_variant_names)]
pub enum Flow {
DummyPaymentCreate,
DummyPaymentRetrieve,
DummyPaymentAuthorize,
DummyPaymentComplete,
DummyRefundCreate,
DummyRefundRetrieve,
}
impl FlowMetric for Flow {}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, strum::Display, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
#[strum(serialize_all = "snake_case")]
pub enum DummyConnectors {
#[serde(rename = "phonypay")]
#[strum(serialize = "phonypay")]
PhonyPay,
#[serde(rename = "fauxpay")]
#[strum(serialize = "fauxpay")]
FauxPay,
#[serde(rename = "pretendpay")]
#[strum(serialize = "pretendpay")]
PretendPay,
StripeTest,
AdyenTest,
CheckoutTest,
PaypalTest,
}
impl DummyConnectors {
pub fn get_connector_image_link(self, base_url: &str) -> String {
let image_name = match self {
Self::PhonyPay => "PHONYPAY.svg",
Self::FauxPay => "FAUXPAY.svg",
Self::PretendPay => "PRETENDPAY.svg",
Self::StripeTest => "STRIPE_TEST.svg",
Self::PaypalTest => "PAYPAL_TEST.svg",
_ => "PHONYPAY.svg",
};
format!("{base_url}{image_name}")
}
}
#[derive(
Default, serde::Serialize, serde::Deserialize, strum::Display, Clone, PartialEq, Debug, Eq,
)]
#[serde(rename_all = "lowercase")]
pub enum DummyConnectorStatus {
Succeeded,
#[default]
Processing,
Failed,
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct DummyConnectorPaymentAttempt {
pub timestamp: PrimitiveDateTime,
pub attempt_id: String,
pub payment_id: common_utils::id_type::PaymentId,
pub payment_request: DummyConnectorPaymentRequest,
}
impl From<DummyConnectorPaymentRequest> for DummyConnectorPaymentAttempt {
fn from(payment_request: DummyConnectorPaymentRequest) -> Self {
let timestamp = common_utils::date_time::now();
let payment_id = common_utils::id_type::PaymentId::default();
let attempt_id = generate_id_with_default_len(consts::ATTEMPT_ID_PREFIX);
Self {
timestamp,
attempt_id,
payment_id,
payment_request,
}
}
}
impl DummyConnectorPaymentAttempt {
pub fn build_payment_data(
self,
status: DummyConnectorStatus,
next_action: Option<DummyConnectorNextAction>,
return_url: Option<String>,
) -> DummyConnectorPaymentData {
DummyConnectorPaymentData {
attempt_id: self.attempt_id,
payment_id: self.payment_id,
status,
amount: self.payment_request.amount,
eligible_amount: self.payment_request.amount,
connector: self.payment_request.connector,
created: self.timestamp,
currency: self.payment_request.currency,
payment_method_type: self.payment_request.payment_method_data.into(),
next_action,
return_url,
}
}
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct DummyConnectorPaymentRequest {
pub amount: i64,
pub currency: Currency,
pub payment_method_data: DummyConnectorPaymentMethodData,
pub return_url: Option<String>,
pub connector: DummyConnectors,
}
pub trait GetPaymentMethodDetails {
fn get_name(&self) -> &'static str;
fn get_image_link(&self, base_url: &str) -> String;
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
#[serde(rename_all = "lowercase")]
pub enum DummyConnectorPaymentMethodData {
Card(DummyConnectorCard),
Upi(DummyConnectorUpi),
Wallet(DummyConnectorWallet),
PayLater(DummyConnectorPayLater),
}
#[derive(
Default, serde::Serialize, serde::Deserialize, strum::Display, PartialEq, Debug, Clone,
)]
#[serde(rename_all = "lowercase")]
pub enum DummyConnectorPaymentMethodType {
#[default]
Card,
Upi(DummyConnectorUpiType),
Wallet(DummyConnectorWallet),
PayLater(DummyConnectorPayLater),
}
impl From<DummyConnectorPaymentMethodData> for DummyConnectorPaymentMethodType {
fn from(value: DummyConnectorPaymentMethodData) -> Self {
match value {
DummyConnectorPaymentMethodData::Card(_) => Self::Card,
DummyConnectorPaymentMethodData::Upi(upi_data) => match upi_data {
DummyConnectorUpi::UpiCollect(_) => Self::Upi(DummyConnectorUpiType::UpiCollect),
},
DummyConnectorPaymentMethodData::Wallet(wallet) => Self::Wallet(wallet),
DummyConnectorPaymentMethodData::PayLater(pay_later) => Self::PayLater(pay_later),
}
}
}
impl GetPaymentMethodDetails for DummyConnectorPaymentMethodType {
fn get_name(&self) -> &'static str {
match self {
Self::Card => "3D Secure",
Self::Upi(upi_type) => upi_type.get_name(),
Self::Wallet(wallet) => wallet.get_name(),
Self::PayLater(pay_later) => pay_later.get_name(),
}
}
fn get_image_link(&self, base_url: &str) -> String {
match self {
Self::Card => format!("{}{}", base_url, "CARD.svg"),
Self::Upi(upi_type) => upi_type.get_image_link(base_url),
Self::Wallet(wallet) => wallet.get_image_link(base_url),
Self::PayLater(pay_later) => pay_later.get_image_link(base_url),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorCard {
pub name: Secret<String>,
pub number: cards::CardNumber,
pub expiry_month: Secret<String>,
pub expiry_year: Secret<String>,
pub cvc: Secret<String>,
}
#[derive(Clone, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorUpiCollect {
pub vpa_id: Secret<String, pii::UpiVpaMaskingStrategy>,
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum DummyConnectorUpi {
UpiCollect(DummyConnectorUpiCollect),
}
pub enum DummyConnectorCardFlow {
NoThreeDS(DummyConnectorStatus, Option<DummyConnectorErrors>),
ThreeDS(DummyConnectorStatus, Option<DummyConnectorErrors>),
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub enum DummyConnectorWallet {
GooglePay,
Paypal,
WeChatPay,
MbWay,
AliPay,
AliPayHK,
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub enum DummyConnectorUpiType {
UpiCollect,
}
impl GetPaymentMethodDetails for DummyConnectorUpiType {
fn get_name(&self) -> &'static str {
match self {
Self::UpiCollect => "UPI Collect",
}
}
fn get_image_link(&self, base_url: &str) -> String {
let image_name = match self {
Self::UpiCollect => "UPI_COLLECT.svg",
};
format!("{base_url}{image_name}")
}
}
impl GetPaymentMethodDetails for DummyConnectorWallet {
fn get_name(&self) -> &'static str {
match self {
Self::GooglePay => "Google Pay",
Self::Paypal => "PayPal",
Self::WeChatPay => "WeChat Pay",
Self::MbWay => "Mb Way",
Self::AliPay => "Alipay",
Self::AliPayHK => "Alipay HK",
}
}
fn get_image_link(&self, base_url: &str) -> String {
let image_name = match self {
Self::GooglePay => "GOOGLE_PAY.svg",
Self::Paypal => "PAYPAL.svg",
Self::WeChatPay => "WECHAT_PAY.svg",
Self::MbWay => "MBWAY.svg",
Self::AliPay => "ALIPAY.svg",
Self::AliPayHK => "ALIPAY.svg",
};
format!("{base_url}{image_name}")
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, Eq, PartialEq)]
pub enum DummyConnectorPayLater {
Klarna,
Affirm,
AfterPayClearPay,
}
impl GetPaymentMethodDetails for DummyConnectorPayLater {
fn get_name(&self) -> &'static str {
match self {
Self::Klarna => "Klarna",
Self::Affirm => "Affirm",
Self::AfterPayClearPay => "Afterpay Clearpay",
}
}
fn get_image_link(&self, base_url: &str) -> String {
let image_name = match self {
Self::Klarna => "KLARNA.svg",
Self::Affirm => "AFFIRM.svg",
Self::AfterPayClearPay => "AFTERPAY.svg",
};
format!("{base_url}{image_name}")
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
pub struct DummyConnectorPaymentData {
pub attempt_id: String,
pub payment_id: common_utils::id_type::PaymentId,
pub status: DummyConnectorStatus,
pub amount: i64,
pub eligible_amount: i64,
pub currency: Currency,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created: PrimitiveDateTime,
pub payment_method_type: DummyConnectorPaymentMethodType,
pub connector: DummyConnectors,
pub next_action: Option<DummyConnectorNextAction>,
pub return_url: Option<String>,
}
impl DummyConnectorPaymentData {
pub fn is_eligible_for_refund(&self, refund_amount: i64) -> DummyConnectorResult<()> {
if self.eligible_amount < refund_amount {
return Err(
report!(DummyConnectorErrors::RefundAmountExceedsPaymentAmount)
.attach_printable("Eligible amount is lesser than refund amount"),
);
}
if self.status != DummyConnectorStatus::Succeeded {
return Err(report!(DummyConnectorErrors::PaymentNotSuccessful)
.attach_printable("Payment is not successful to process the refund"));
}
Ok(())
}
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub enum DummyConnectorNextAction {
RedirectToUrl(String),
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorPaymentResponse {
pub status: DummyConnectorStatus,
pub id: common_utils::id_type::PaymentId,
pub amount: i64,
pub currency: Currency,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created: PrimitiveDateTime,
pub payment_method_type: DummyConnectorPaymentMethodType,
pub next_action: Option<DummyConnectorNextAction>,
}
impl From<DummyConnectorPaymentData> for DummyConnectorPaymentResponse {
fn from(value: DummyConnectorPaymentData) -> Self {
Self {
status: value.status,
id: value.payment_id,
amount: value.amount,
currency: value.currency,
created: value.created,
payment_method_type: value.payment_method_type,
next_action: value.next_action,
}
}
}
#[derive(Default, Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorPaymentRetrieveRequest {
pub payment_id: String,
}
#[derive(Default, Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorPaymentConfirmRequest {
pub attempt_id: String,
}
#[derive(Default, Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorPaymentCompleteRequest {
pub attempt_id: String,
pub confirm: bool,
}
#[derive(Default, Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorPaymentCompleteBody {
pub confirm: bool,
}
#[derive(Default, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct DummyConnectorRefundRequest {
pub amount: i64,
pub payment_id: Option<common_utils::id_type::PaymentId>,
}
#[derive(Clone, Debug, serde::Serialize, Eq, PartialEq, serde::Deserialize)]
pub struct DummyConnectorRefundResponse {
pub status: DummyConnectorStatus,
pub id: String,
pub currency: Currency,
#[serde(with = "common_utils::custom_serde::iso8601")]
pub created: PrimitiveDateTime,
pub payment_amount: i64,
pub refund_amount: i64,
}
impl DummyConnectorRefundResponse {
pub fn new(
status: DummyConnectorStatus,
id: String,
currency: Currency,
created: PrimitiveDateTime,
payment_amount: i64,
refund_amount: i64,
) -> Self {
Self {
status,
id,
currency,
created,
payment_amount,
refund_amount,
}
}
}
#[derive(Default, Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DummyConnectorRefundRetrieveRequest {
pub refund_id: String,
}
pub type DummyConnectorResponse<T> =
CustomResult<services::ApplicationResponse<T>, DummyConnectorErrors>;
pub type DummyConnectorResult<T> = CustomResult<T, DummyConnectorErrors>;
pub struct DummyConnectorUpiFlow {
pub status: DummyConnectorStatus,
pub error: Option<DummyConnectorErrors>,
pub is_next_action_required: bool,
}
// File: crates/router/src/routes/dummy_connector/errors.rs
#[derive(Clone, Debug, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub enum ErrorType {
ServerNotAvailable,
ObjectNotFound,
InvalidRequestError,
}
#[derive(Debug, Clone, router_derive::ApiError)]
#[error(error_type_enum = ErrorType)]
// TODO: Remove this line if InternalServerError is used anywhere
#[allow(dead_code)]
pub enum DummyConnectorErrors {
#[error(error_type = ErrorType::ServerNotAvailable, code = "DC_00", message = "Something went wrong")]
InternalServerError,
#[error(error_type = ErrorType::ObjectNotFound, code = "DC_01", message = "Payment does not exist in our records")]
PaymentNotFound,
#[error(error_type = ErrorType::InvalidRequestError, code = "DC_02", message = "Missing required param: {field_name}")]
MissingRequiredField { field_name: &'static str },
#[error(error_type = ErrorType::InvalidRequestError, code = "DC_03", message = "The refund amount exceeds the amount captured")]
RefundAmountExceedsPaymentAmount,
#[error(error_type = ErrorType::InvalidRequestError, code = "DC_04", message = "Card not supported. Please use test cards")]
CardNotSupported,
#[error(error_type = ErrorType::ObjectNotFound, code = "DC_05", message = "Refund does not exist in our records")]
RefundNotFound,
#[error(error_type = ErrorType::InvalidRequestError, code = "DC_06", message = "Payment is not successful")]
PaymentNotSuccessful,
#[error(error_type = ErrorType::ServerNotAvailable, code = "DC_07", message = "Error occurred while storing the payment")]
PaymentStoringError,
#[error(error_type = ErrorType::InvalidRequestError, code = "DC_08", message = "Payment declined: {message}")]
PaymentDeclined { message: &'static str },
}
impl core::fmt::Display for DummyConnectorErrors {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
r#"{{"error":{}}}"#,
serde_json::to_string(self)
.unwrap_or_else(|_| "Dummy connector error response".to_string())
)
}
}
impl common_utils::errors::ErrorSwitch<api_models::errors::types::ApiErrorResponse>
for DummyConnectorErrors
{
fn switch(&self) -> api_models::errors::types::ApiErrorResponse {
use api_models::errors::types::{ApiError, ApiErrorResponse as AER};
match self {
Self::InternalServerError => {
AER::InternalServerError(ApiError::new("DC", 0, self.error_message(), None))
}
Self::PaymentNotFound => {
AER::NotFound(ApiError::new("DC", 1, self.error_message(), None))
}
Self::MissingRequiredField { field_name: _ } => {
AER::BadRequest(ApiError::new("DC", 2, self.error_message(), None))
}
Self::RefundAmountExceedsPaymentAmount => {
AER::InternalServerError(ApiError::new("DC", 3, self.error_message(), None))
}
Self::CardNotSupported => {
AER::BadRequest(ApiError::new("DC", 4, self.error_message(), None))
}
Self::RefundNotFound => {
AER::NotFound(ApiError::new("DC", 5, self.error_message(), None))
}
Self::PaymentNotSuccessful => {
AER::BadRequest(ApiError::new("DC", 6, self.error_message(), None))
}
Self::PaymentStoringError => {
AER::InternalServerError(ApiError::new("DC", 7, self.error_message(), None))
}
Self::PaymentDeclined { message: _ } => {
AER::BadRequest(ApiError::new("DC", 8, self.error_message(), None))
}
}
}
}
// File: crates/router/src/routes/dummy_connector/utils.rs
use std::fmt::Debug;
use common_utils::ext_traits::AsyncExt;
use error_stack::{report, ResultExt};
use masking::PeekInterface;
use maud::html;
use rand::{distributions::Uniform, prelude::Distribution};
use tokio::time as tokio;
use super::{
consts, errors,
types::{self, GetPaymentMethodDetails},
};
use crate::{
configs::settings,
routes::{dummy_connector::types::DummyConnectors, SessionState},
};
pub async fn tokio_mock_sleep(delay: u64, tolerance: u64) {
let mut rng = rand::thread_rng();
// TODO: change this to `Uniform::try_from`
// this would require changing the fn signature
// to return a Result
let effective_delay = Uniform::from((delay - tolerance)..(delay + tolerance));
tokio::sleep(tokio::Duration::from_millis(
effective_delay.sample(&mut rng),
))
.await
}
pub async fn store_data_in_redis(
state: &SessionState,
key: String,
data: impl serde::Serialize + Debug,
ttl: i64,
) -> types::DummyConnectorResult<()> {
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::DummyConnectorErrors::InternalServerError)
.attach_printable("Failed to get redis connection")?;
redis_conn
.serialize_and_set_key_with_expiry(&key.into(), data, ttl)
.await
.change_context(errors::DummyConnectorErrors::PaymentStoringError)
.attach_printable("Failed to add data in redis")?;
Ok(())
}
pub async fn get_payment_data_from_payment_id(
state: &SessionState,
payment_id: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::DummyConnectorErrors::InternalServerError)
.attach_printable("Failed to get redis connection")?;
redis_conn
.get_and_deserialize_key::<types::DummyConnectorPaymentData>(
&payment_id.as_str().into(),
"types DummyConnectorPaymentData",
)
.await
.change_context(errors::DummyConnectorErrors::PaymentNotFound)
}
pub async fn get_payment_data_by_attempt_id(
state: &SessionState,
attempt_id: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
let redis_conn = state
.store
.get_redis_conn()
.change_context(errors::DummyConnectorErrors::InternalServerError)
.attach_printable("Failed to get redis connection")?;
redis_conn
.get_and_deserialize_key::<String>(&attempt_id.as_str().into(), "String")
.await
.async_and_then(|payment_id| async move {
redis_conn
.get_and_deserialize_key::<types::DummyConnectorPaymentData>(
&payment_id.as_str().into(),
"DummyConnectorPaymentData",
)
.await
})
.await
.change_context(errors::DummyConnectorErrors::PaymentNotFound)
}
pub fn get_authorize_page(
payment_data: types::DummyConnectorPaymentData,
return_url: String,
dummy_connector_conf: &settings::DummyConnector,
) -> String {
let mode = payment_data.payment_method_type.get_name();
let image = payment_data
.payment_method_type
.get_image_link(dummy_connector_conf.assets_base_url.as_str());
let connector_image = payment_data
.connector
.get_connector_image_link(dummy_connector_conf.assets_base_url.as_str());
let currency = payment_data.currency.to_string();
html! {
head {
title { "Authorize Payment" }
style { (consts::THREE_DS_CSS) }
link rel="icon" href=(connector_image) {}
}
body {
div.heading {
img.logo src="https://app.hyperswitch.io/assets/Dark/hyperswitchLogoIconWithText.svg" alt="Hyperswitch Logo" {}
h1 { "Test Payment Page" }
}
div.container {
div.payment_details {
img src=(image) {}
div.border_horizontal {}
img src=(connector_image) {}
}
(maud::PreEscaped(
format!(r#"
<p class="disclaimer">
This is a test payment of <span id="amount"></span> {} using {}
<script>
document.getElementById("amount").innerHTML = ({} / 100).toFixed(2);
</script>
</p>
"#, currency, mode, payment_data.amount)
)
)
p { b { "Real money will not be debited for the payment." } " \
You can choose to simulate successful or failed payment while testing this payment." }
div.user_action {
button.authorize onclick=(format!("window.location.href='{}?confirm=true'", return_url))
{ "Complete Payment" }
button.reject onclick=(format!("window.location.href='{}?confirm=false'", return_url))
{ "Reject Payment" }
}
}
div.container {
p.disclaimer { "What is this page?" }
p { "This page is just a simulation for integration and testing purpose. \
In live mode, this page will not be displayed and the user will be taken to \
the Bank page (or) Google Pay cards popup (or) original payment method's page. \
Contact us for any queries."
}
div.contact {
div.contact_item.hover_cursor onclick=(dummy_connector_conf.slack_invite_url) {
img src="https://hyperswitch.io/logos/logo_slack.svg" alt="Slack Logo" {}
}
div.contact_item.hover_cursor onclick=(dummy_connector_conf.discord_invite_url) {
img src="https://hyperswitch.io/logos/logo_discord.svg" alt="Discord Logo" {}
}
div.border_vertical {}
div.contact_item.email {
p { "Or email us at" }
a href="mailto:[email protected]" { "[email protected]" }
}
}
}
}
}
.into_string()
}
pub fn get_expired_page(dummy_connector_conf: &settings::DummyConnector) -> String {
html! {
head {
title { "Authorize Payment" }
style { (consts::THREE_DS_CSS) }
link rel="icon" href="https://app.hyperswitch.io/HyperswitchFavicon.png" {}
}
body {
div.heading {
img.logo src="https://app.hyperswitch.io/assets/Dark/hyperswitchLogoIconWithText.svg" alt="Hyperswitch Logo" {}
h1 { "Test Payment Page" }
}
div.container {
p.disclaimer { "This link is not valid or it is expired" }
}
div.container {
p.disclaimer { "What is this page?" }
p { "This page is just a simulation for integration and testing purpose.\
In live mode, this is not visible. Contact us for any queries."
}
div.contact {
div.contact_item.hover_cursor onclick=(dummy_connector_conf.slack_invite_url) {
img src="https://hyperswitch.io/logos/logo_slack.svg" alt="Slack Logo" {}
}
div.contact_item.hover_cursor onclick=(dummy_connector_conf.discord_invite_url) {
img src="https://hyperswitch.io/logos/logo_discord.svg" alt="Discord Logo" {}
}
div.border_vertical {}
div.contact_item.email {
p { "Or email us at" }
a href="mailto:[email protected]" { "[email protected]" }
}
}
}
}
}
.into_string()
}
pub trait ProcessPaymentAttempt {
fn build_payment_data_from_payment_attempt(
self,
payment_attempt: types::DummyConnectorPaymentAttempt,
redirect_url: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData>;
}
impl ProcessPaymentAttempt for types::DummyConnectorCard {
fn build_payment_data_from_payment_attempt(
self,
payment_attempt: types::DummyConnectorPaymentAttempt,
redirect_url: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
match self.get_flow_from_card_number(payment_attempt.payment_request.connector.clone())? {
types::DummyConnectorCardFlow::NoThreeDS(status, error) => {
if let Some(error) = error {
Err(error)?;
}
Ok(payment_attempt.build_payment_data(status, None, None))
}
types::DummyConnectorCardFlow::ThreeDS(_, _) => {
Ok(payment_attempt.clone().build_payment_data(
types::DummyConnectorStatus::Processing,
Some(types::DummyConnectorNextAction::RedirectToUrl(redirect_url)),
payment_attempt.payment_request.return_url,
))
}
}
}
}
impl ProcessPaymentAttempt for types::DummyConnectorUpiCollect {
fn build_payment_data_from_payment_attempt(
self,
payment_attempt: types::DummyConnectorPaymentAttempt,
redirect_url: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
let upi_collect_response = self.get_flow_from_upi_collect()?;
if let Some(error) = upi_collect_response.error {
Err(error)?;
}
let next_action = upi_collect_response
.is_next_action_required
.then_some(types::DummyConnectorNextAction::RedirectToUrl(redirect_url));
let return_url = payment_attempt.payment_request.return_url.clone();
Ok(
payment_attempt.build_payment_data(
upi_collect_response.status,
next_action,
return_url,
),
)
}
}
impl types::DummyConnectorUpiCollect {
pub fn get_flow_from_upi_collect(
self,
) -> types::DummyConnectorResult<types::DummyConnectorUpiFlow> {
let vpa_id = self.vpa_id.peek();
match vpa_id.as_str() {
consts::DUMMY_CONNECTOR_UPI_FAILURE_VPA_ID => Ok(types::DummyConnectorUpiFlow {
status: types::DummyConnectorStatus::Failed,
error: errors::DummyConnectorErrors::PaymentNotSuccessful.into(),
is_next_action_required: false,
}),
consts::DUMMY_CONNECTOR_UPI_SUCCESS_VPA_ID => Ok(types::DummyConnectorUpiFlow {
status: types::DummyConnectorStatus::Processing,
error: None,
is_next_action_required: true,
}),
_ => Ok(types::DummyConnectorUpiFlow {
status: types::DummyConnectorStatus::Failed,
error: Some(errors::DummyConnectorErrors::PaymentDeclined {
message: "Invalid Upi id",
}),
is_next_action_required: false,
}),
}
}
}
impl types::DummyConnectorCard {
pub fn get_flow_from_card_number(
self,
connector: DummyConnectors,
) -> types::DummyConnectorResult<types::DummyConnectorCardFlow> {
let card_number = self.number.peek();
match card_number.as_str() {
"4111111111111111" | "4242424242424242" | "5555555555554444" | "38000000000006"
| "378282246310005" | "6011111111111117" => {
Ok(types::DummyConnectorCardFlow::NoThreeDS(
types::DummyConnectorStatus::Succeeded,
None,
))
}
"5105105105105100" | "4000000000000002" => {
Ok(types::DummyConnectorCardFlow::NoThreeDS(
types::DummyConnectorStatus::Failed,
Some(errors::DummyConnectorErrors::PaymentDeclined {
message: "Card declined",
}),
))
}
"4000000000009995" => {
if connector == DummyConnectors::StripeTest {
Ok(types::DummyConnectorCardFlow::NoThreeDS(
types::DummyConnectorStatus::Succeeded,
None,
))
} else {
Ok(types::DummyConnectorCardFlow::NoThreeDS(
types::DummyConnectorStatus::Failed,
Some(errors::DummyConnectorErrors::PaymentDeclined {
message: "Internal Server Error from Connector, Please try again later",
}),
))
}
}
"4000000000009987" => Ok(types::DummyConnectorCardFlow::NoThreeDS(
types::DummyConnectorStatus::Failed,
Some(errors::DummyConnectorErrors::PaymentDeclined {
message: "Lost card",
}),
)),
"4000000000009979" => Ok(types::DummyConnectorCardFlow::NoThreeDS(
types::DummyConnectorStatus::Failed,
Some(errors::DummyConnectorErrors::PaymentDeclined {
message: "Stolen card",
}),
)),
"4000003800000446" => Ok(types::DummyConnectorCardFlow::ThreeDS(
types::DummyConnectorStatus::Succeeded,
None,
)),
_ => Err(report!(errors::DummyConnectorErrors::CardNotSupported)
.attach_printable("The card is not supported")),
}
}
}
impl ProcessPaymentAttempt for types::DummyConnectorWallet {
fn build_payment_data_from_payment_attempt(
self,
payment_attempt: types::DummyConnectorPaymentAttempt,
redirect_url: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
Ok(payment_attempt.clone().build_payment_data(
types::DummyConnectorStatus::Processing,
Some(types::DummyConnectorNextAction::RedirectToUrl(redirect_url)),
payment_attempt.payment_request.return_url,
))
}
}
impl ProcessPaymentAttempt for types::DummyConnectorPayLater {
fn build_payment_data_from_payment_attempt(
self,
payment_attempt: types::DummyConnectorPaymentAttempt,
redirect_url: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
Ok(payment_attempt.clone().build_payment_data(
types::DummyConnectorStatus::Processing,
Some(types::DummyConnectorNextAction::RedirectToUrl(redirect_url)),
payment_attempt.payment_request.return_url,
))
}
}
impl ProcessPaymentAttempt for types::DummyConnectorPaymentMethodData {
fn build_payment_data_from_payment_attempt(
self,
payment_attempt: types::DummyConnectorPaymentAttempt,
redirect_url: String,
) -> types::DummyConnectorResult<types::DummyConnectorPaymentData> {
match self {
Self::Card(card) => {
card.build_payment_data_from_payment_attempt(payment_attempt, redirect_url)
}
Self::Upi(upi_data) => match upi_data {
types::DummyConnectorUpi::UpiCollect(upi_collect) => upi_collect
.build_payment_data_from_payment_attempt(payment_attempt, redirect_url),
},
Self::Wallet(wallet) => {
wallet.build_payment_data_from_payment_attempt(payment_attempt, redirect_url)
}
Self::PayLater(pay_later) => {
pay_later.build_payment_data_from_payment_attempt(payment_attempt, redirect_url)
}
}
}
}
impl types::DummyConnectorPaymentData {
pub fn process_payment_attempt(
state: &SessionState,
payment_attempt: types::DummyConnectorPaymentAttempt,
) -> types::DummyConnectorResult<Self> {
let redirect_url = format!(
"{}/dummy-connector/authorize/{}",
state.base_url, payment_attempt.attempt_id
);
payment_attempt
.clone()
.payment_request
.payment_method_data
.build_payment_data_from_payment_attempt(payment_attempt, redirect_url)
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/routes/dummy_connector/core.rs",
"crates/router/src/routes/dummy_connector/consts.rs",
"crates/router/src/routes/dummy_connector/types.rs",
"crates/router/src/routes/dummy_connector/errors.rs",
"crates/router/src/routes/dummy_connector/utils.rs"
],
"module": "crates/router/src/routes/dummy_connector",
"num_files": 5,
"token_count": 9141
}
|
module_3701738747671319510
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/services/api
Files: 3
</path>
<module>
// File: crates/router/src/services/api/request.rs
pub use common_utils::request::ContentType;
pub use masking::{Mask, Maskable};
// File: crates/router/src/services/api/client.rs
use std::time::Duration;
use common_utils::errors::ReportSwitchExt;
use error_stack::ResultExt;
pub use external_services::http_client::{self, client};
use http::{HeaderValue, Method};
pub use hyperswitch_interfaces::{
api_client::{ApiClient, ApiClientWrapper, RequestBuilder},
types::Proxy,
};
use masking::PeekInterface;
use reqwest::multipart::Form;
use router_env::tracing_actix_web::RequestId;
use super::{request::Maskable, Request};
use crate::core::errors::{ApiClientError, CustomResult};
#[derive(Clone)]
pub struct ProxyClient {
proxy_config: Proxy,
client: reqwest::Client,
request_id: Option<RequestId>,
}
impl ProxyClient {
pub fn new(proxy_config: &Proxy) -> CustomResult<Self, ApiClientError> {
let client = client::get_client_builder(proxy_config)
.switch()?
.build()
.change_context(ApiClientError::InvalidProxyConfiguration)?;
Ok(Self {
proxy_config: proxy_config.clone(),
client,
request_id: None,
})
}
pub fn get_reqwest_client(
&self,
client_certificate: Option<masking::Secret<String>>,
client_certificate_key: Option<masking::Secret<String>>,
) -> CustomResult<reqwest::Client, ApiClientError> {
match (client_certificate, client_certificate_key) {
(Some(certificate), Some(certificate_key)) => {
let client_builder = client::get_client_builder(&self.proxy_config).switch()?;
let identity =
client::create_identity_from_certificate_and_key(certificate, certificate_key)
.switch()?;
Ok(client_builder
.identity(identity)
.build()
.change_context(ApiClientError::ClientConstructionFailed)
.attach_printable(
"Failed to construct client with certificate and certificate key",
)?)
}
(_, _) => Ok(self.client.clone()),
}
}
}
pub struct RouterRequestBuilder {
// Using option here to get around the reinitialization problem
// request builder follows a chain pattern where the value is consumed and a newer requestbuilder is returned
// Since for this brief period of time between the value being consumed & newer request builder
// since requestbuilder does not allow moving the value
// leaves our struct in an inconsistent state, we are using option to get around rust semantics
inner: Option<reqwest::RequestBuilder>,
}
impl RequestBuilder for RouterRequestBuilder {
fn json(&mut self, body: serde_json::Value) {
self.inner = self.inner.take().map(|r| r.json(&body));
}
fn url_encoded_form(&mut self, body: serde_json::Value) {
self.inner = self.inner.take().map(|r| r.form(&body));
}
fn timeout(&mut self, timeout: Duration) {
self.inner = self.inner.take().map(|r| r.timeout(timeout));
}
fn multipart(&mut self, form: Form) {
self.inner = self.inner.take().map(|r| r.multipart(form));
}
fn header(&mut self, key: String, value: Maskable<String>) -> CustomResult<(), ApiClientError> {
let header_value = match value {
Maskable::Masked(hvalue) => HeaderValue::from_str(hvalue.peek()).map(|mut h| {
h.set_sensitive(true);
h
}),
Maskable::Normal(hvalue) => HeaderValue::from_str(&hvalue),
}
.change_context(ApiClientError::HeaderMapConstructionFailed)?;
self.inner = self.inner.take().map(|r| r.header(key, header_value));
Ok(())
}
fn send(
self,
) -> CustomResult<
Box<dyn core::future::Future<Output = Result<reqwest::Response, reqwest::Error>> + 'static>,
ApiClientError,
> {
Ok(Box::new(
self.inner.ok_or(ApiClientError::UnexpectedState)?.send(),
))
}
}
#[async_trait::async_trait]
impl ApiClient for ProxyClient {
fn request(
&self,
method: Method,
url: String,
) -> CustomResult<Box<dyn RequestBuilder>, ApiClientError> {
self.request_with_certificate(method, url, None, None)
}
fn request_with_certificate(
&self,
method: Method,
url: String,
certificate: Option<masking::Secret<String>>,
certificate_key: Option<masking::Secret<String>>,
) -> CustomResult<Box<dyn RequestBuilder>, ApiClientError> {
let client_builder = self
.get_reqwest_client(certificate, certificate_key)
.change_context(ApiClientError::ClientConstructionFailed)?;
Ok(Box::new(RouterRequestBuilder {
inner: Some(client_builder.request(method, url)),
}))
}
async fn send_request(
&self,
api_client: &dyn ApiClientWrapper,
request: Request,
option_timeout_secs: Option<u64>,
_forward_to_kafka: bool,
) -> CustomResult<reqwest::Response, ApiClientError> {
http_client::send_request(&api_client.get_proxy(), request, option_timeout_secs)
.await
.switch()
}
fn add_request_id(&mut self, request_id: RequestId) {
self.request_id = Some(request_id);
}
fn get_request_id(&self) -> Option<RequestId> {
self.request_id
}
fn get_request_id_str(&self) -> Option<String> {
self.request_id.map(|id| id.as_hyphenated().to_string())
}
fn add_flow_name(&mut self, _flow_name: String) {}
}
/// Api client for testing sending request
#[derive(Clone)]
pub struct MockApiClient;
#[async_trait::async_trait]
impl ApiClient for MockApiClient {
fn request(
&self,
_method: Method,
_url: String,
) -> CustomResult<Box<dyn RequestBuilder>, ApiClientError> {
// [#2066]: Add Mock implementation for ApiClient
Err(ApiClientError::UnexpectedState.into())
}
fn request_with_certificate(
&self,
_method: Method,
_url: String,
_certificate: Option<masking::Secret<String>>,
_certificate_key: Option<masking::Secret<String>>,
) -> CustomResult<Box<dyn RequestBuilder>, ApiClientError> {
// [#2066]: Add Mock implementation for ApiClient
Err(ApiClientError::UnexpectedState.into())
}
async fn send_request(
&self,
_state: &dyn ApiClientWrapper,
_request: Request,
_option_timeout_secs: Option<u64>,
_forward_to_kafka: bool,
) -> CustomResult<reqwest::Response, ApiClientError> {
// [#2066]: Add Mock implementation for ApiClient
Err(ApiClientError::UnexpectedState.into())
}
fn add_request_id(&mut self, _request_id: RequestId) {
// [#2066]: Add Mock implementation for ApiClient
}
fn get_request_id(&self) -> Option<RequestId> {
// [#2066]: Add Mock implementation for ApiClient
None
}
fn get_request_id_str(&self) -> Option<String> {
// [#2066]: Add Mock implementation for ApiClient
None
}
fn add_flow_name(&mut self, _flow_name: String) {}
}
// File: crates/router/src/services/api/generic_link_response.rs
use common_utils::errors::CustomResult;
use error_stack::ResultExt;
use hyperswitch_domain_models::api::{
GenericExpiredLinkData, GenericLinkFormData, GenericLinkStatusData, GenericLinksData,
};
use tera::{Context, Tera};
use super::build_secure_payment_link_html;
use crate::core::errors;
pub mod context;
pub fn build_generic_link_html(
boxed_generic_link_data: GenericLinksData,
locale: String,
) -> CustomResult<String, errors::ApiErrorResponse> {
match boxed_generic_link_data {
GenericLinksData::ExpiredLink(link_data) => build_generic_expired_link_html(&link_data),
GenericLinksData::PaymentMethodCollect(pm_collect_data) => {
build_pm_collect_link_html(&pm_collect_data)
}
GenericLinksData::PaymentMethodCollectStatus(pm_collect_data) => {
build_pm_collect_link_status_html(&pm_collect_data)
}
GenericLinksData::PayoutLink(payout_link_data) => {
build_payout_link_html(&payout_link_data, locale.as_str())
}
GenericLinksData::PayoutLinkStatus(pm_collect_data) => {
build_payout_link_status_html(&pm_collect_data, locale.as_str())
}
GenericLinksData::SecurePaymentLink(payment_link_data) => {
build_secure_payment_link_html(payment_link_data)
}
}
}
pub fn build_generic_expired_link_html(
link_data: &GenericExpiredLinkData,
) -> CustomResult<String, errors::ApiErrorResponse> {
let mut tera = Tera::default();
let mut context = Context::new();
// Build HTML
let html_template = include_str!("../../core/generic_link/expired_link/index.html").to_string();
let _ = tera.add_raw_template("generic_expired_link", &html_template);
context.insert("title", &link_data.title);
context.insert("message", &link_data.message);
context.insert("theme", &link_data.theme);
tera.render("generic_expired_link", &context)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render expired link HTML template")
}
fn build_html_template(
link_data: &GenericLinkFormData,
document: &'static str,
styles: &'static str,
) -> CustomResult<(Tera, Context), errors::ApiErrorResponse> {
let mut tera: Tera = Tera::default();
let mut context = Context::new();
// Insert dynamic context in CSS
let css_dynamic_context = "{{ color_scheme }}";
let css_template = styles.to_string();
let final_css = format!("{css_dynamic_context}\n{css_template}");
let _ = tera.add_raw_template("document_styles", &final_css);
context.insert("color_scheme", &link_data.css_data);
let css_style_tag = tera
.render("document_styles", &context)
.map(|css| format!("<style>{css}</style>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render CSS template")?;
// Insert HTML context
let html_template = document.to_string();
let _ = tera.add_raw_template("html_template", &html_template);
context.insert("css_style_tag", &css_style_tag);
Ok((tera, context))
}
pub fn build_payout_link_html(
link_data: &GenericLinkFormData,
locale: &str,
) -> CustomResult<String, errors::ApiErrorResponse> {
let document = include_str!("../../core/generic_link/payout_link/initiate/index.html");
let styles = include_str!("../../core/generic_link/payout_link/initiate/styles.css");
let (mut tera, mut context) = build_html_template(link_data, document, styles)
.attach_printable("Failed to build context for payout link's HTML template")?;
// Insert dynamic context in JS
let script = include_str!("../../core/generic_link/payout_link/initiate/script.js");
let js_template = script.to_string();
let js_dynamic_context = "{{ script_data }}";
let final_js = format!("{js_dynamic_context}\n{js_template}");
let _ = tera.add_raw_template("document_scripts", &final_js);
context.insert("script_data", &link_data.js_data);
context::insert_locales_in_context_for_payout_link(&mut context, locale);
let js_script_tag = tera
.render("document_scripts", &context)
.map(|js| format!("<script>{js}</script>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render JS template")?;
context.insert("js_script_tag", &js_script_tag);
context.insert(
"hyper_sdk_loader_script_tag",
&format!(
r#"<script src="{}" onload="initializePayoutSDK()"></script>"#,
link_data.sdk_url
),
);
// Render HTML template
tera.render("html_template", &context)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payout link's HTML template")
}
pub fn build_pm_collect_link_html(
link_data: &GenericLinkFormData,
) -> CustomResult<String, errors::ApiErrorResponse> {
let document =
include_str!("../../core/generic_link/payment_method_collect/initiate/index.html");
let styles = include_str!("../../core/generic_link/payment_method_collect/initiate/styles.css");
let (mut tera, mut context) = build_html_template(link_data, document, styles)
.attach_printable(
"Failed to build context for payment method collect link's HTML template",
)?;
// Insert dynamic context in JS
let script = include_str!("../../core/generic_link/payment_method_collect/initiate/script.js");
let js_template = script.to_string();
let js_dynamic_context = "{{ script_data }}";
let final_js = format!("{js_dynamic_context}\n{js_template}");
let _ = tera.add_raw_template("document_scripts", &final_js);
context.insert("script_data", &link_data.js_data);
let js_script_tag = tera
.render("document_scripts", &context)
.map(|js| format!("<script>{js}</script>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render JS template")?;
context.insert("js_script_tag", &js_script_tag);
context.insert(
"hyper_sdk_loader_script_tag",
&format!(
r#"<script src="{}" onload="initializeCollectSDK()"></script>"#,
link_data.sdk_url
),
);
// Render HTML template
tera.render("html_template", &context)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payment method collect link's HTML template")
}
pub fn build_payout_link_status_html(
link_data: &GenericLinkStatusData,
locale: &str,
) -> CustomResult<String, errors::ApiErrorResponse> {
let mut tera = Tera::default();
let mut context = Context::new();
// Insert dynamic context in CSS
let css_dynamic_context = "{{ color_scheme }}";
let css_template =
include_str!("../../core/generic_link/payout_link/status/styles.css").to_string();
let final_css = format!("{css_dynamic_context}\n{css_template}");
let _ = tera.add_raw_template("payout_link_status_styles", &final_css);
context.insert("color_scheme", &link_data.css_data);
let css_style_tag = tera
.render("payout_link_status_styles", &context)
.map(|css| format!("<style>{css}</style>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payout link status CSS template")?;
// Insert dynamic context in JS
let js_dynamic_context = "{{ script_data }}";
let js_template =
include_str!("../../core/generic_link/payout_link/status/script.js").to_string();
let final_js = format!("{js_dynamic_context}\n{js_template}");
let _ = tera.add_raw_template("payout_link_status_script", &final_js);
context.insert("script_data", &link_data.js_data);
context::insert_locales_in_context_for_payout_link_status(&mut context, locale);
let js_script_tag = tera
.render("payout_link_status_script", &context)
.map(|js| format!("<script>{js}</script>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payout link status JS template")?;
// Build HTML
let html_template =
include_str!("../../core/generic_link/payout_link/status/index.html").to_string();
let _ = tera.add_raw_template("payout_status_link", &html_template);
context.insert("css_style_tag", &css_style_tag);
context.insert("js_script_tag", &js_script_tag);
tera.render("payout_status_link", &context)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payout link status HTML template")
}
pub fn build_pm_collect_link_status_html(
link_data: &GenericLinkStatusData,
) -> CustomResult<String, errors::ApiErrorResponse> {
let mut tera = Tera::default();
let mut context = Context::new();
// Insert dynamic context in CSS
let css_dynamic_context = "{{ color_scheme }}";
let css_template =
include_str!("../../core/generic_link/payment_method_collect/status/styles.css")
.to_string();
let final_css = format!("{css_dynamic_context}\n{css_template}");
let _ = tera.add_raw_template("pm_collect_link_status_styles", &final_css);
context.insert("color_scheme", &link_data.css_data);
let css_style_tag = tera
.render("pm_collect_link_status_styles", &context)
.map(|css| format!("<style>{css}</style>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payment method collect link status CSS template")?;
// Insert dynamic context in JS
let js_dynamic_context = "{{ collect_link_status_context }}";
let js_template =
include_str!("../../core/generic_link/payment_method_collect/status/script.js").to_string();
let final_js = format!("{js_dynamic_context}\n{js_template}");
let _ = tera.add_raw_template("pm_collect_link_status_script", &final_js);
context.insert("collect_link_status_context", &link_data.js_data);
let js_script_tag = tera
.render("pm_collect_link_status_script", &context)
.map(|js| format!("<script>{js}</script>"))
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payment method collect link status JS template")?;
// Build HTML
let html_template =
include_str!("../../core/generic_link/payment_method_collect/status/index.html")
.to_string();
let _ = tera.add_raw_template("payment_method_collect_status_link", &html_template);
context.insert("css_style_tag", &css_style_tag);
context.insert("js_script_tag", &js_script_tag);
tera.render("payment_method_collect_status_link", &context)
.change_context(errors::ApiErrorResponse::InternalServerError)
.attach_printable("Failed to render payment method collect link status HTML template")
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/services/api/request.rs",
"crates/router/src/services/api/client.rs",
"crates/router/src/services/api/generic_link_response.rs"
],
"module": "crates/router/src/services/api",
"num_files": 3,
"token_count": 4090
}
|
module_1146866723929605216
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/services/authorization
Files: 4
</path>
<module>
// File: crates/router/src/services/authorization/roles.rs
#[cfg(feature = "recon")]
use std::collections::HashMap;
use std::collections::HashSet;
#[cfg(feature = "recon")]
use api_models::enums::ReconPermissionScope;
use common_enums::{EntityType, PermissionGroup, Resource, RoleScope};
use common_utils::{errors::CustomResult, id_type};
#[cfg(feature = "recon")]
use super::permission_groups::{RECON_OPS, RECON_REPORTS};
use super::{permission_groups::PermissionGroupExt, permissions::Permission};
use crate::{core::errors, routes::SessionState};
pub mod predefined_roles;
#[derive(Clone, serde::Serialize, serde::Deserialize, Debug)]
pub struct RoleInfo {
role_id: String,
role_name: String,
groups: Vec<PermissionGroup>,
scope: RoleScope,
entity_type: EntityType,
is_invitable: bool,
is_deletable: bool,
is_updatable: bool,
is_internal: bool,
}
impl RoleInfo {
pub fn get_role_id(&self) -> &str {
&self.role_id
}
pub fn get_role_name(&self) -> &str {
&self.role_name
}
pub fn get_permission_groups(&self) -> Vec<PermissionGroup> {
self.groups
.iter()
.flat_map(|group| group.accessible_groups())
.collect::<HashSet<_>>()
.into_iter()
.collect()
}
pub fn get_scope(&self) -> RoleScope {
self.scope
}
pub fn get_entity_type(&self) -> EntityType {
self.entity_type
}
pub fn is_invitable(&self) -> bool {
self.is_invitable
}
pub fn is_deletable(&self) -> bool {
self.is_deletable
}
pub fn is_internal(&self) -> bool {
self.is_internal
}
pub fn is_updatable(&self) -> bool {
self.is_updatable
}
pub fn get_resources_set(&self) -> HashSet<Resource> {
self.get_permission_groups()
.iter()
.flat_map(|group| group.resources())
.collect()
}
pub fn check_permission_exists(&self, required_permission: Permission) -> bool {
required_permission.entity_type() <= self.entity_type
&& self.get_permission_groups().iter().any(|group| {
required_permission.scope() <= group.scope()
&& group.resources().contains(&required_permission.resource())
})
}
#[cfg(feature = "recon")]
pub fn get_recon_acl(&self) -> HashMap<Resource, ReconPermissionScope> {
let mut acl: HashMap<Resource, ReconPermissionScope> = HashMap::new();
let mut recon_resources = RECON_OPS.to_vec();
recon_resources.extend(RECON_REPORTS);
let recon_internal_resources = [Resource::ReconToken];
self.get_permission_groups()
.iter()
.for_each(|permission_group| {
permission_group.resources().iter().for_each(|resource| {
if recon_resources.contains(resource)
&& !recon_internal_resources.contains(resource)
{
let scope = match resource {
Resource::ReconAndSettlementAnalytics => ReconPermissionScope::Read,
_ => ReconPermissionScope::from(permission_group.scope()),
};
acl.entry(*resource)
.and_modify(|curr_scope| {
*curr_scope = if (*curr_scope) < scope {
scope
} else {
*curr_scope
}
})
.or_insert(scope);
}
})
});
acl
}
pub fn from_predefined_roles(role_id: &str) -> Option<Self> {
predefined_roles::PREDEFINED_ROLES.get(role_id).cloned()
}
pub async fn from_role_id_in_lineage(
state: &SessionState,
role_id: &str,
merchant_id: &id_type::MerchantId,
org_id: &id_type::OrganizationId,
profile_id: &id_type::ProfileId,
tenant_id: &id_type::TenantId,
) -> CustomResult<Self, errors::StorageError> {
if let Some(role) = predefined_roles::PREDEFINED_ROLES.get(role_id) {
Ok(role.clone())
} else {
state
.global_store
.find_role_by_role_id_in_lineage(
role_id,
merchant_id,
org_id,
profile_id,
tenant_id,
)
.await
.map(Self::from)
}
}
// TODO: To evaluate whether we can omit org_id and tenant_id for this function
pub async fn from_role_id_org_id_tenant_id(
state: &SessionState,
role_id: &str,
org_id: &id_type::OrganizationId,
tenant_id: &id_type::TenantId,
) -> CustomResult<Self, errors::StorageError> {
if let Some(role) = predefined_roles::PREDEFINED_ROLES.get(role_id) {
Ok(role.clone())
} else {
state
.global_store
.find_by_role_id_org_id_tenant_id(role_id, org_id, tenant_id)
.await
.map(Self::from)
}
}
}
impl From<diesel_models::role::Role> for RoleInfo {
fn from(role: diesel_models::role::Role) -> Self {
Self {
role_id: role.role_id,
role_name: role.role_name,
groups: role.groups,
scope: role.scope,
entity_type: role.entity_type,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
}
}
}
// File: crates/router/src/services/authorization/permission_groups.rs
use std::{collections::HashMap, ops::Not};
use common_enums::{EntityType, ParentGroup, PermissionGroup, PermissionScope, Resource};
use strum::IntoEnumIterator;
use super::permissions;
pub trait PermissionGroupExt {
fn scope(&self) -> PermissionScope;
fn parent(&self) -> ParentGroup;
fn resources(&self) -> Vec<Resource>;
fn accessible_groups(&self) -> Vec<PermissionGroup>;
}
impl PermissionGroupExt for PermissionGroup {
fn scope(&self) -> PermissionScope {
match self {
Self::OperationsView
| Self::ConnectorsView
| Self::WorkflowsView
| Self::AnalyticsView
| Self::UsersView
| Self::AccountView
| Self::ReconOpsView
| Self::ReconReportsView
| Self::ThemeView => PermissionScope::Read,
Self::OperationsManage
| Self::ConnectorsManage
| Self::WorkflowsManage
| Self::UsersManage
| Self::AccountManage
| Self::ReconOpsManage
| Self::ReconReportsManage
| Self::InternalManage
| Self::ThemeManage => PermissionScope::Write,
}
}
fn parent(&self) -> ParentGroup {
match self {
Self::OperationsView | Self::OperationsManage => ParentGroup::Operations,
Self::ConnectorsView | Self::ConnectorsManage => ParentGroup::Connectors,
Self::WorkflowsView | Self::WorkflowsManage => ParentGroup::Workflows,
Self::AnalyticsView => ParentGroup::Analytics,
Self::UsersView | Self::UsersManage => ParentGroup::Users,
Self::AccountView | Self::AccountManage => ParentGroup::Account,
Self::ThemeView | Self::ThemeManage => ParentGroup::Theme,
Self::ReconOpsView | Self::ReconOpsManage => ParentGroup::ReconOps,
Self::ReconReportsView | Self::ReconReportsManage => ParentGroup::ReconReports,
Self::InternalManage => ParentGroup::Internal,
}
}
fn resources(&self) -> Vec<Resource> {
self.parent().resources()
}
fn accessible_groups(&self) -> Vec<Self> {
match self {
Self::OperationsView => vec![Self::OperationsView, Self::ConnectorsView],
Self::OperationsManage => vec![
Self::OperationsView,
Self::OperationsManage,
Self::ConnectorsView,
],
Self::ConnectorsView => vec![Self::ConnectorsView],
Self::ConnectorsManage => vec![Self::ConnectorsView, Self::ConnectorsManage],
Self::WorkflowsView => vec![Self::WorkflowsView, Self::ConnectorsView],
Self::WorkflowsManage => vec![
Self::WorkflowsView,
Self::WorkflowsManage,
Self::ConnectorsView,
],
Self::AnalyticsView => vec![Self::AnalyticsView, Self::OperationsView],
Self::UsersView => vec![Self::UsersView],
Self::UsersManage => {
vec![Self::UsersView, Self::UsersManage]
}
Self::ReconOpsView => vec![Self::ReconOpsView],
Self::ReconOpsManage => vec![Self::ReconOpsView, Self::ReconOpsManage],
Self::ReconReportsView => vec![Self::ReconReportsView],
Self::ReconReportsManage => vec![Self::ReconReportsView, Self::ReconReportsManage],
Self::AccountView => vec![Self::AccountView],
Self::AccountManage => vec![Self::AccountView, Self::AccountManage],
Self::InternalManage => vec![Self::InternalManage],
Self::ThemeView => vec![Self::ThemeView, Self::AccountView],
Self::ThemeManage => vec![Self::ThemeManage, Self::AccountView],
}
}
}
pub trait ParentGroupExt {
fn resources(&self) -> Vec<Resource>;
fn get_descriptions_for_groups(
entity_type: EntityType,
groups: Vec<PermissionGroup>,
) -> Option<HashMap<ParentGroup, String>>;
fn get_available_scopes(&self) -> Vec<PermissionScope>;
}
impl ParentGroupExt for ParentGroup {
fn resources(&self) -> Vec<Resource> {
match self {
Self::Operations => OPERATIONS.to_vec(),
Self::Connectors => CONNECTORS.to_vec(),
Self::Workflows => WORKFLOWS.to_vec(),
Self::Analytics => ANALYTICS.to_vec(),
Self::Users => USERS.to_vec(),
Self::Account => ACCOUNT.to_vec(),
Self::ReconOps => RECON_OPS.to_vec(),
Self::ReconReports => RECON_REPORTS.to_vec(),
Self::Internal => INTERNAL.to_vec(),
Self::Theme => THEME.to_vec(),
}
}
fn get_descriptions_for_groups(
entity_type: EntityType,
groups: Vec<PermissionGroup>,
) -> Option<HashMap<Self, String>> {
let descriptions_map = Self::iter()
.filter_map(|parent| {
if !groups.iter().any(|group| group.parent() == parent) {
return None;
}
let filtered_resources =
permissions::filter_resources_by_entity_type(parent.resources(), entity_type)?;
let description = filtered_resources
.iter()
.map(|res| permissions::get_resource_name(*res, entity_type))
.collect::<Option<Vec<_>>>()?
.join(", ");
Some((parent, description))
})
.collect::<HashMap<_, _>>();
descriptions_map
.is_empty()
.not()
.then_some(descriptions_map)
}
fn get_available_scopes(&self) -> Vec<PermissionScope> {
PermissionGroup::iter()
.filter(|group| group.parent() == *self)
.map(|group| group.scope())
.collect()
}
}
pub static OPERATIONS: [Resource; 8] = [
Resource::Payment,
Resource::Refund,
Resource::Mandate,
Resource::Dispute,
Resource::Customer,
Resource::Payout,
Resource::Report,
Resource::Account,
];
pub static CONNECTORS: [Resource; 2] = [Resource::Connector, Resource::Account];
pub static WORKFLOWS: [Resource; 5] = [
Resource::Routing,
Resource::ThreeDsDecisionManager,
Resource::SurchargeDecisionManager,
Resource::Account,
Resource::RevenueRecovery,
];
pub static ANALYTICS: [Resource; 3] = [Resource::Analytics, Resource::Report, Resource::Account];
pub static USERS: [Resource; 2] = [Resource::User, Resource::Account];
pub static ACCOUNT: [Resource; 3] = [Resource::Account, Resource::ApiKey, Resource::WebhookEvent];
pub static RECON_OPS: [Resource; 8] = [
Resource::ReconToken,
Resource::ReconFiles,
Resource::ReconUpload,
Resource::RunRecon,
Resource::ReconConfig,
Resource::ReconAndSettlementAnalytics,
Resource::ReconReports,
Resource::Account,
];
pub static INTERNAL: [Resource; 1] = [Resource::InternalConnector];
pub static RECON_REPORTS: [Resource; 4] = [
Resource::ReconToken,
Resource::ReconAndSettlementAnalytics,
Resource::ReconReports,
Resource::Account,
];
pub static THEME: [Resource; 1] = [Resource::Theme];
// File: crates/router/src/services/authorization/info.rs
use std::ops::Not;
use api_models::user_role::GroupInfo;
use common_enums::{ParentGroup, PermissionGroup};
use strum::IntoEnumIterator;
// TODO: To be deprecated
pub fn get_group_authorization_info() -> Option<Vec<GroupInfo>> {
let groups = PermissionGroup::iter()
.filter_map(get_group_info_from_permission_group)
.collect::<Vec<_>>();
groups.is_empty().not().then_some(groups)
}
// TODO: To be deprecated
fn get_group_info_from_permission_group(group: PermissionGroup) -> Option<GroupInfo> {
let description = get_group_description(group)?;
Some(GroupInfo { group, description })
}
// TODO: To be deprecated
fn get_group_description(group: PermissionGroup) -> Option<&'static str> {
match group {
PermissionGroup::OperationsView => {
Some("View Payments, Refunds, Payouts, Mandates, Disputes and Customers")
}
PermissionGroup::OperationsManage => {
Some("Create, modify and delete Payments, Refunds, Payouts, Mandates, Disputes and Customers")
}
PermissionGroup::ConnectorsView => {
Some("View connected Payment Processors, Payout Processors and Fraud & Risk Manager details")
}
PermissionGroup::ConnectorsManage => Some("Create, modify and delete connectors like Payment Processors, Payout Processors and Fraud & Risk Manager"),
PermissionGroup::WorkflowsView => {
Some("View Routing, 3DS Decision Manager, Surcharge Decision Manager")
}
PermissionGroup::WorkflowsManage => {
Some("Create, modify and delete Routing, 3DS Decision Manager, Surcharge Decision Manager")
}
PermissionGroup::AnalyticsView => Some("View Analytics"),
PermissionGroup::UsersView => Some("View Users"),
PermissionGroup::UsersManage => Some("Manage and invite Users to the Team"),
PermissionGroup::AccountView => Some("View Merchant Details"),
PermissionGroup::AccountManage => Some("Create, modify and delete Merchant Details like api keys, webhooks, etc"),
PermissionGroup::ReconReportsView => Some("View reconciliation reports and analytics"),
PermissionGroup::ReconReportsManage => Some("Manage reconciliation reports"),
PermissionGroup::ReconOpsView => Some("View and access all reconciliation operations including reports and analytics"),
PermissionGroup::ReconOpsManage => Some("Manage all reconciliation operations including reports and analytics"),
PermissionGroup::ThemeView => Some("View Themes"),
PermissionGroup::ThemeManage => Some("Manage Themes"),
PermissionGroup::InternalManage => None, // Internal group, no user-facing description
}
}
pub fn get_parent_group_description(group: ParentGroup) -> Option<&'static str> {
match group {
ParentGroup::Operations => Some("Payments, Refunds, Payouts, Mandates, Disputes and Customers"),
ParentGroup::Connectors => Some("Create, modify and delete connectors like Payment Processors, Payout Processors and Fraud & Risk Manager"),
ParentGroup::Workflows => Some("Create, modify and delete Routing, 3DS Decision Manager, Surcharge Decision Manager"),
ParentGroup::Analytics => Some("View Analytics"),
ParentGroup::Users => Some("Manage and invite Users to the Team"),
ParentGroup::Account => Some("Create, modify and delete Merchant Details like api keys, webhooks, etc"),
ParentGroup::ReconOps => Some("View, manage reconciliation operations like upload and process files, run reconciliation etc"),
ParentGroup::ReconReports => Some("View, manage reconciliation reports and analytics"),
ParentGroup::Theme => Some("Manage and view themes for the organization"),
ParentGroup::Internal => None, // Internal group, no user-facing description
}
}
// File: crates/router/src/services/authorization/permissions.rs
use common_enums::{EntityType, PermissionScope, Resource};
use router_derive::generate_permissions;
generate_permissions! {
permissions: [
Payment: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
Refund: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
Dispute: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
Mandate: {
scopes: [Read, Write],
entities: [Merchant]
},
Customer: {
scopes: [Read, Write],
entities: [Merchant]
},
Payout: {
scopes: [Read],
entities: [Profile, Merchant]
},
ApiKey: {
scopes: [Read, Write],
entities: [Merchant]
},
Account: {
scopes: [Read, Write],
entities: [Profile, Merchant, Organization, Tenant]
},
Connector: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
Routing: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
Subscription: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
ThreeDsDecisionManager: {
scopes: [Read, Write],
entities: [Merchant, Profile]
},
SurchargeDecisionManager: {
scopes: [Read, Write],
entities: [Merchant]
},
Analytics: {
scopes: [Read],
entities: [Profile, Merchant, Organization]
},
Report: {
scopes: [Read],
entities: [Profile, Merchant, Organization]
},
User: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
WebhookEvent: {
scopes: [Read, Write],
entities: [Profile, Merchant]
},
ReconToken: {
scopes: [Read],
entities: [Merchant]
},
ReconFiles: {
scopes: [Read, Write],
entities: [Merchant]
},
ReconAndSettlementAnalytics: {
scopes: [Read],
entities: [Merchant]
},
ReconUpload: {
scopes: [Read, Write],
entities: [Merchant]
},
ReconReports: {
scopes: [Read, Write],
entities: [Merchant]
},
RunRecon: {
scopes: [Read, Write],
entities: [Merchant]
},
ReconConfig: {
scopes: [Read, Write],
entities: [Merchant]
},
RevenueRecovery: {
scopes: [Read],
entities: [Profile]
},
InternalConnector: {
scopes: [Write],
entities: [Merchant]
},
Theme: {
scopes: [Read,Write],
entities: [Organization]
}
]
}
pub fn get_resource_name(resource: Resource, entity_type: EntityType) -> Option<&'static str> {
match (resource, entity_type) {
(Resource::Payment, _) => Some("Payments"),
(Resource::Refund, _) => Some("Refunds"),
(Resource::Dispute, _) => Some("Disputes"),
(Resource::Mandate, _) => Some("Mandates"),
(Resource::Customer, _) => Some("Customers"),
(Resource::Payout, _) => Some("Payouts"),
(Resource::ApiKey, _) => Some("Api Keys"),
(Resource::Connector, _) => {
Some("Payment Processors, Payout Processors, Fraud & Risk Managers")
}
(Resource::Routing, _) => Some("Routing"),
(Resource::Subscription, _) => Some("Subscription"),
(Resource::RevenueRecovery, _) => Some("Revenue Recovery"),
(Resource::ThreeDsDecisionManager, _) => Some("3DS Decision Manager"),
(Resource::SurchargeDecisionManager, _) => Some("Surcharge Decision Manager"),
(Resource::Analytics, _) => Some("Analytics"),
(Resource::Report, _) => Some("Operation Reports"),
(Resource::User, _) => Some("Users"),
(Resource::WebhookEvent, _) => Some("Webhook Events"),
(Resource::ReconUpload, _) => Some("Reconciliation File Upload"),
(Resource::RunRecon, _) => Some("Run Reconciliation Process"),
(Resource::ReconConfig, _) => Some("Reconciliation Configurations"),
(Resource::ReconToken, _) => Some("Generate & Verify Reconciliation Token"),
(Resource::ReconFiles, _) => Some("Reconciliation Process Manager"),
(Resource::ReconReports, _) => Some("Reconciliation Reports"),
(Resource::ReconAndSettlementAnalytics, _) => Some("Reconciliation Analytics"),
(Resource::Account, EntityType::Profile) => Some("Business Profile Account"),
(Resource::Account, EntityType::Merchant) => Some("Merchant Account"),
(Resource::Account, EntityType::Organization) => Some("Organization Account"),
(Resource::Account, EntityType::Tenant) => Some("Tenant Account"),
(Resource::Theme, _) => Some("Themes"),
(Resource::InternalConnector, _) => None,
}
}
pub fn get_scope_name(scope: PermissionScope) -> &'static str {
match scope {
PermissionScope::Read => "View",
PermissionScope::Write => "View and Manage",
}
}
pub fn filter_resources_by_entity_type(
resources: Vec<Resource>,
entity_type: EntityType,
) -> Option<Vec<Resource>> {
let filtered: Vec<Resource> = resources
.into_iter()
.filter(|res| res.entities().iter().any(|entity| entity <= &entity_type))
.collect();
(!filtered.is_empty()).then_some(filtered)
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/services/authorization/roles.rs",
"crates/router/src/services/authorization/permission_groups.rs",
"crates/router/src/services/authorization/info.rs",
"crates/router/src/services/authorization/permissions.rs"
],
"module": "crates/router/src/services/authorization",
"num_files": 4,
"token_count": 4861
}
|
module_-3545356380806040343
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/services/authorization/roles
Files: 1
</path>
<module>
// File: crates/router/src/services/authorization/roles/predefined_roles.rs
use std::{collections::HashMap, sync::LazyLock};
use common_enums::{EntityType, PermissionGroup, RoleScope};
use super::RoleInfo;
use crate::consts;
pub static PREDEFINED_ROLES: LazyLock<HashMap<&'static str, RoleInfo>> = LazyLock::new(|| {
let mut roles = HashMap::new();
// Internal Roles
roles.insert(
common_utils::consts::ROLE_ID_INTERNAL_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::ConnectorsManage,
PermissionGroup::WorkflowsView,
PermissionGroup::WorkflowsManage,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconOpsManage,
PermissionGroup::ReconReportsView,
PermissionGroup::ReconReportsManage,
],
role_id: common_utils::consts::ROLE_ID_INTERNAL_ADMIN.to_string(),
role_name: "internal_admin".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: false,
is_deletable: false,
is_updatable: false,
is_internal: true,
},
);
roles.insert(
common_utils::consts::ROLE_ID_INTERNAL_VIEW_ONLY_USER,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::ConnectorsView,
PermissionGroup::WorkflowsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconReportsView,
],
role_id: common_utils::consts::ROLE_ID_INTERNAL_VIEW_ONLY_USER.to_string(),
role_name: "internal_view_only".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: false,
is_deletable: false,
is_updatable: false,
is_internal: true,
},
);
roles.insert(
common_utils::consts::ROLE_ID_INTERNAL_DEMO,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::ConnectorsView,
PermissionGroup::WorkflowsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconReportsView,
PermissionGroup::InternalManage,
],
role_id: common_utils::consts::ROLE_ID_INTERNAL_DEMO.to_string(),
role_name: "internal_demo".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: false,
is_deletable: false,
is_updatable: false,
is_internal: true,
},
);
// Tenant Roles
roles.insert(
common_utils::consts::ROLE_ID_TENANT_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::ConnectorsManage,
PermissionGroup::WorkflowsView,
PermissionGroup::WorkflowsManage,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconOpsManage,
PermissionGroup::ReconReportsView,
PermissionGroup::ReconReportsManage,
],
role_id: common_utils::consts::ROLE_ID_TENANT_ADMIN.to_string(),
role_name: "tenant_admin".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Tenant,
is_invitable: false,
is_deletable: false,
is_updatable: false,
is_internal: false,
},
);
// Organization Roles
roles.insert(
common_utils::consts::ROLE_ID_ORGANIZATION_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::ConnectorsManage,
PermissionGroup::WorkflowsView,
PermissionGroup::WorkflowsManage,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconOpsManage,
PermissionGroup::ReconReportsView,
PermissionGroup::ReconReportsManage,
PermissionGroup::ThemeView,
PermissionGroup::ThemeManage,
],
role_id: common_utils::consts::ROLE_ID_ORGANIZATION_ADMIN.to_string(),
role_name: "organization_admin".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Organization,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
// MERCHANT ROLES
roles.insert(
consts::user_role::ROLE_ID_MERCHANT_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::ConnectorsManage,
PermissionGroup::WorkflowsView,
PermissionGroup::WorkflowsManage,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconOpsManage,
PermissionGroup::ReconReportsView,
PermissionGroup::ReconReportsManage,
],
role_id: consts::user_role::ROLE_ID_MERCHANT_ADMIN.to_string(),
role_name: "merchant_admin".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_MERCHANT_VIEW_ONLY,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::ConnectorsView,
PermissionGroup::WorkflowsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconReportsView,
],
role_id: consts::user_role::ROLE_ID_MERCHANT_VIEW_ONLY.to_string(),
role_name: "merchant_view_only".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_MERCHANT_IAM_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
],
role_id: consts::user_role::ROLE_ID_MERCHANT_IAM_ADMIN.to_string(),
role_name: "merchant_iam".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_MERCHANT_DEVELOPER,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::ConnectorsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconReportsView,
],
role_id: consts::user_role::ROLE_ID_MERCHANT_DEVELOPER.to_string(),
role_name: "merchant_developer".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_MERCHANT_OPERATOR,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::WorkflowsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconOpsManage,
PermissionGroup::ReconReportsView,
],
role_id: consts::user_role::ROLE_ID_MERCHANT_OPERATOR.to_string(),
role_name: "merchant_operator".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_MERCHANT_CUSTOMER_SUPPORT,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::ReconOpsView,
PermissionGroup::ReconReportsView,
],
role_id: consts::user_role::ROLE_ID_MERCHANT_CUSTOMER_SUPPORT.to_string(),
role_name: "customer_support".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Merchant,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
// Profile Roles
roles.insert(
consts::user_role::ROLE_ID_PROFILE_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::ConnectorsManage,
PermissionGroup::WorkflowsView,
PermissionGroup::WorkflowsManage,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
],
role_id: consts::user_role::ROLE_ID_PROFILE_ADMIN.to_string(),
role_name: "profile_admin".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Profile,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_PROFILE_VIEW_ONLY,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::ConnectorsView,
PermissionGroup::WorkflowsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
],
role_id: consts::user_role::ROLE_ID_PROFILE_VIEW_ONLY.to_string(),
role_name: "profile_view_only".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Profile,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_PROFILE_IAM_ADMIN,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::UsersManage,
PermissionGroup::AccountView,
],
role_id: consts::user_role::ROLE_ID_PROFILE_IAM_ADMIN.to_string(),
role_name: "profile_iam".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Profile,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_PROFILE_DEVELOPER,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::ConnectorsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
PermissionGroup::AccountManage,
],
role_id: consts::user_role::ROLE_ID_PROFILE_DEVELOPER.to_string(),
role_name: "profile_developer".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Profile,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_PROFILE_OPERATOR,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::OperationsManage,
PermissionGroup::ConnectorsView,
PermissionGroup::WorkflowsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
],
role_id: consts::user_role::ROLE_ID_PROFILE_OPERATOR.to_string(),
role_name: "profile_operator".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Profile,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles.insert(
consts::user_role::ROLE_ID_PROFILE_CUSTOMER_SUPPORT,
RoleInfo {
groups: vec![
PermissionGroup::OperationsView,
PermissionGroup::AnalyticsView,
PermissionGroup::UsersView,
PermissionGroup::AccountView,
],
role_id: consts::user_role::ROLE_ID_PROFILE_CUSTOMER_SUPPORT.to_string(),
role_name: "profile_customer_support".to_string(),
scope: RoleScope::Organization,
entity_type: EntityType::Profile,
is_invitable: true,
is_deletable: true,
is_updatable: true,
is_internal: false,
},
);
roles
});
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/services/authorization/roles/predefined_roles.rs"
],
"module": "crates/router/src/services/authorization/roles",
"num_files": 1,
"token_count": 3053
}
|
module_7245679582816518133
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/services/authentication
Files: 4
</path>
<module>
// File: crates/router/src/services/authentication/decision.rs
use common_utils::{errors::CustomResult, request::RequestContent};
use masking::{ErasedMaskSerialize, Secret};
use serde::Serialize;
use storage_impl::errors::ApiClientError;
use crate::{
core::metrics,
routes::{app::settings::DecisionConfig, SessionState},
};
// # Consts
//
const DECISION_ENDPOINT: &str = "/rule";
const RULE_ADD_METHOD: common_utils::request::Method = common_utils::request::Method::Post;
const RULE_DELETE_METHOD: common_utils::request::Method = common_utils::request::Method::Delete;
pub const REVOKE: &str = "REVOKE";
pub const ADD: &str = "ADD";
// # Types
//
/// [`RuleRequest`] is a request body used to register a new authentication method in the proxy.
#[derive(Debug, Serialize)]
pub struct RuleRequest {
/// [`tag`] similar to a partition key, which can be used by the decision service to tag rules
/// by partitioning identifiers. (e.g. `tenant_id`)
pub tag: String,
/// [`variant`] is the type of authentication method to be registered.
#[serde(flatten)]
pub variant: AuthRuleType,
/// [`expiry`] is the time **in seconds** after which the rule should be removed
pub expiry: Option<u64>,
}
#[derive(Debug, Serialize)]
pub struct RuleDeleteRequest {
pub tag: String,
#[serde(flatten)]
pub variant: AuthType,
}
#[derive(Debug, Serialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum AuthType {
/// [`ApiKey`] is an authentication method that uses an API key. This is used with [`ApiKey`]
ApiKey { api_key: Secret<String> },
}
#[derive(Debug, Serialize)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum AuthRuleType {
/// [`ApiKey`] is an authentication method that uses an API key. This is used with [`ApiKey`]
/// and [`PublishableKey`] authentication methods.
ApiKey {
api_key: Secret<String>,
identifiers: Identifiers,
},
}
#[allow(clippy::enum_variant_names)]
#[derive(Debug, Serialize, Clone)]
#[serde(tag = "type", rename_all = "snake_case")]
pub enum Identifiers {
/// [`ApiKey`] is an authentication method that uses an API key. This is used with [`ApiKey`]
ApiKey {
merchant_id: common_utils::id_type::MerchantId,
key_id: common_utils::id_type::ApiKeyId,
},
/// [`PublishableKey`] is an authentication method that uses a publishable key. This is used with [`PublishableKey`]
PublishableKey { merchant_id: String },
}
// # Decision Service
//
pub async fn add_api_key(
state: &SessionState,
api_key: Secret<String>,
merchant_id: common_utils::id_type::MerchantId,
key_id: common_utils::id_type::ApiKeyId,
expiry: Option<u64>,
) -> CustomResult<(), ApiClientError> {
let decision_config = if let Some(config) = &state.conf.decision {
config
} else {
return Ok(());
};
let rule = RuleRequest {
tag: state.tenant.schema.clone(),
expiry,
variant: AuthRuleType::ApiKey {
api_key,
identifiers: Identifiers::ApiKey {
merchant_id,
key_id,
},
},
};
call_decision_service(state, decision_config, rule, RULE_ADD_METHOD).await
}
pub async fn add_publishable_key(
state: &SessionState,
api_key: Secret<String>,
merchant_id: common_utils::id_type::MerchantId,
expiry: Option<u64>,
) -> CustomResult<(), ApiClientError> {
let decision_config = if let Some(config) = &state.conf.decision {
config
} else {
return Ok(());
};
let rule = RuleRequest {
tag: state.tenant.schema.clone(),
expiry,
variant: AuthRuleType::ApiKey {
api_key,
identifiers: Identifiers::PublishableKey {
merchant_id: merchant_id.get_string_repr().to_owned(),
},
},
};
call_decision_service(state, decision_config, rule, RULE_ADD_METHOD).await
}
async fn call_decision_service<T: ErasedMaskSerialize + Send + 'static>(
state: &SessionState,
decision_config: &DecisionConfig,
rule: T,
method: common_utils::request::Method,
) -> CustomResult<(), ApiClientError> {
let mut request = common_utils::request::Request::new(
method,
&(decision_config.base_url.clone() + DECISION_ENDPOINT),
);
request.set_body(RequestContent::Json(Box::new(rule)));
request.add_default_headers();
let response = state
.api_client
.send_request(state, request, None, false)
.await;
match response {
Err(error) => {
router_env::error!("Failed while calling the decision service: {:?}", error);
Err(error)
}
Ok(response) => {
router_env::info!("Decision service response: {:?}", response);
Ok(())
}
}
}
pub async fn revoke_api_key(
state: &SessionState,
api_key: Secret<String>,
) -> CustomResult<(), ApiClientError> {
let decision_config = if let Some(config) = &state.conf.decision {
config
} else {
return Ok(());
};
let rule = RuleDeleteRequest {
tag: state.tenant.schema.clone(),
variant: AuthType::ApiKey { api_key },
};
call_decision_service(state, decision_config, rule, RULE_DELETE_METHOD).await
}
/// Safety: i64::MAX < u64::MAX
#[allow(clippy::as_conversions)]
pub fn convert_expiry(expiry: time::PrimitiveDateTime) -> u64 {
let now = common_utils::date_time::now();
let duration = expiry - now;
let output = duration.whole_seconds();
match output {
i64::MIN..=0 => 0,
_ => output as u64,
}
}
pub fn spawn_tracked_job<E, F>(future: F, request_type: &'static str)
where
E: std::fmt::Debug,
F: futures::Future<Output = Result<(), E>> + Send + 'static,
{
metrics::API_KEY_REQUEST_INITIATED
.add(1, router_env::metric_attributes!(("type", request_type)));
tokio::spawn(async move {
match future.await {
Ok(_) => {
metrics::API_KEY_REQUEST_COMPLETED
.add(1, router_env::metric_attributes!(("type", request_type)));
}
Err(e) => {
router_env::error!("Error in tracked job: {:?}", e);
}
}
});
}
// File: crates/router/src/services/authentication/detached.rs
use std::{borrow::Cow, string::ToString};
use actix_web::http::header::HeaderMap;
use common_utils::{
crypto::VerifySignature,
id_type::{ApiKeyId, MerchantId},
};
use error_stack::ResultExt;
use hyperswitch_domain_models::errors::api_error_response::ApiErrorResponse;
use crate::core::errors::RouterResult;
const HEADER_AUTH_TYPE: &str = "x-auth-type";
const HEADER_MERCHANT_ID: &str = "x-merchant-id";
const HEADER_KEY_ID: &str = "x-key-id";
const HEADER_CHECKSUM: &str = "x-checksum";
#[derive(Debug)]
pub struct ExtractedPayload {
pub payload_type: PayloadType,
pub merchant_id: Option<MerchantId>,
pub key_id: Option<ApiKeyId>,
}
#[derive(strum::EnumString, strum::Display, PartialEq, Debug)]
#[strum(serialize_all = "snake_case")]
pub enum PayloadType {
ApiKey,
PublishableKey,
}
pub trait GetAuthType {
fn get_auth_type(&self) -> PayloadType;
}
impl ExtractedPayload {
pub fn from_headers(headers: &HeaderMap) -> RouterResult<Self> {
let merchant_id = headers
.get(HEADER_MERCHANT_ID)
.and_then(|value| value.to_str().ok())
.ok_or_else(|| ApiErrorResponse::InvalidRequestData {
message: format!("`{HEADER_MERCHANT_ID}` header is invalid or not present"),
})
.map_err(error_stack::Report::from)
.and_then(|merchant_id| {
MerchantId::try_from(Cow::from(merchant_id.to_string())).change_context(
ApiErrorResponse::InvalidRequestData {
message: format!(
"`{HEADER_MERCHANT_ID}` header is invalid or not present",
),
},
)
})?;
let auth_type: PayloadType = headers
.get(HEADER_AUTH_TYPE)
.and_then(|inner| inner.to_str().ok())
.ok_or_else(|| ApiErrorResponse::InvalidRequestData {
message: format!("`{HEADER_AUTH_TYPE}` header not present"),
})?
.parse::<PayloadType>()
.change_context(ApiErrorResponse::InvalidRequestData {
message: format!("`{HEADER_AUTH_TYPE}` header not present"),
})?;
let key_id = headers
.get(HEADER_KEY_ID)
.and_then(|value| value.to_str().ok())
.map(|key_id| ApiKeyId::try_from(Cow::from(key_id.to_string())))
.transpose()
.change_context(ApiErrorResponse::InvalidRequestData {
message: format!("`{HEADER_KEY_ID}` header is invalid or not present"),
})?;
Ok(Self {
payload_type: auth_type,
merchant_id: Some(merchant_id),
key_id,
})
}
pub fn verify_checksum(
&self,
headers: &HeaderMap,
algo: impl VerifySignature,
secret: &[u8],
) -> bool {
let output = || {
let checksum = headers.get(HEADER_CHECKSUM)?.to_str().ok()?;
let payload = self.generate_payload();
algo.verify_signature(secret, &hex::decode(checksum).ok()?, payload.as_bytes())
.ok()
};
output().unwrap_or(false)
}
// The payload should be `:` separated strings of all the fields
fn generate_payload(&self) -> String {
append_option(
&self.payload_type.to_string(),
&self
.merchant_id
.as_ref()
.map(|inner| append_api_key(inner.get_string_repr(), &self.key_id)),
)
}
}
#[inline]
fn append_option(prefix: &str, data: &Option<String>) -> String {
match data {
Some(inner) => format!("{prefix}:{inner}"),
None => prefix.to_string(),
}
}
#[inline]
fn append_api_key(prefix: &str, data: &Option<ApiKeyId>) -> String {
match data {
Some(inner) => format!("{}:{}", prefix, inner.get_string_repr()),
None => prefix.to_string(),
}
}
// File: crates/router/src/services/authentication/cookies.rs
use cookie::Cookie;
#[cfg(feature = "olap")]
use cookie::{
time::{Duration, OffsetDateTime},
SameSite,
};
use error_stack::{report, ResultExt};
#[cfg(feature = "olap")]
use masking::Mask;
#[cfg(feature = "olap")]
use masking::{ExposeInterface, Secret};
use crate::{
consts::JWT_TOKEN_COOKIE_NAME,
core::errors::{ApiErrorResponse, RouterResult},
};
#[cfg(feature = "olap")]
use crate::{
consts::JWT_TOKEN_TIME_IN_SECS,
core::errors::{UserErrors, UserResponse},
services::ApplicationResponse,
};
#[cfg(feature = "olap")]
pub fn set_cookie_response<R>(response: R, token: Secret<String>) -> UserResponse<R> {
let jwt_expiry_in_seconds = JWT_TOKEN_TIME_IN_SECS
.try_into()
.map_err(|_| UserErrors::InternalServerError)?;
let (expiry, max_age) = get_expiry_and_max_age_from_seconds(jwt_expiry_in_seconds);
let header_value = create_cookie(token, expiry, max_age)
.to_string()
.into_masked();
let header_key = get_set_cookie_header();
let header = vec![(header_key, header_value)];
Ok(ApplicationResponse::JsonWithHeaders((response, header)))
}
#[cfg(feature = "olap")]
pub fn remove_cookie_response() -> UserResponse<()> {
let (expiry, max_age) = get_expiry_and_max_age_from_seconds(0);
let header_key = get_set_cookie_header();
let header_value = create_cookie("".to_string().into(), expiry, max_age)
.to_string()
.into_masked();
let header = vec![(header_key, header_value)];
Ok(ApplicationResponse::JsonWithHeaders(((), header)))
}
pub fn get_jwt_from_cookies(cookies: &str) -> RouterResult<String> {
Cookie::split_parse(cookies)
.find_map(|cookie| {
cookie
.ok()
.filter(|parsed_cookie| parsed_cookie.name() == JWT_TOKEN_COOKIE_NAME)
.map(|parsed_cookie| parsed_cookie.value().to_owned())
})
.ok_or(report!(ApiErrorResponse::InvalidJwtToken))
.attach_printable("Unable to find JWT token in cookies")
}
#[cfg(feature = "olap")]
fn create_cookie<'c>(
token: Secret<String>,
expires: OffsetDateTime,
max_age: Duration,
) -> Cookie<'c> {
Cookie::build((JWT_TOKEN_COOKIE_NAME, token.expose()))
.http_only(true)
.secure(true)
.same_site(SameSite::Strict)
.path("/")
.expires(expires)
.max_age(max_age)
.build()
}
#[cfg(feature = "olap")]
fn get_expiry_and_max_age_from_seconds(seconds: i64) -> (OffsetDateTime, Duration) {
let max_age = Duration::seconds(seconds);
let expiry = OffsetDateTime::now_utc().saturating_add(max_age);
(expiry, max_age)
}
#[cfg(feature = "olap")]
fn get_set_cookie_header() -> String {
actix_http::header::SET_COOKIE.to_string()
}
pub fn get_cookie_header() -> String {
actix_http::header::COOKIE.to_string()
}
// File: crates/router/src/services/authentication/blacklist.rs
use std::sync::Arc;
#[cfg(feature = "olap")]
use common_utils::date_time;
use error_stack::ResultExt;
use redis_interface::RedisConnectionPool;
use super::AuthToken;
#[cfg(feature = "olap")]
use super::{SinglePurposeOrLoginToken, SinglePurposeToken};
#[cfg(feature = "email")]
use crate::consts::{EMAIL_TOKEN_BLACKLIST_PREFIX, EMAIL_TOKEN_TIME_IN_SECS};
use crate::{
consts::{JWT_TOKEN_TIME_IN_SECS, ROLE_BLACKLIST_PREFIX, USER_BLACKLIST_PREFIX},
core::errors::{ApiErrorResponse, RouterResult},
routes::app::SessionStateInfo,
};
#[cfg(feature = "olap")]
use crate::{
core::errors::{UserErrors, UserResult},
routes::SessionState,
services::authorization as authz,
};
#[cfg(feature = "olap")]
pub async fn insert_user_in_blacklist(state: &SessionState, user_id: &str) -> UserResult<()> {
let user_blacklist_key = format!("{USER_BLACKLIST_PREFIX}{user_id}");
let expiry =
expiry_to_i64(JWT_TOKEN_TIME_IN_SECS).change_context(UserErrors::InternalServerError)?;
let redis_conn = get_redis_connection_for_global_tenant(state)
.change_context(UserErrors::InternalServerError)?;
redis_conn
.set_key_with_expiry(
&user_blacklist_key.as_str().into(),
date_time::now_unix_timestamp(),
expiry,
)
.await
.change_context(UserErrors::InternalServerError)
}
#[cfg(feature = "olap")]
pub async fn insert_role_in_blacklist(state: &SessionState, role_id: &str) -> UserResult<()> {
let role_blacklist_key = format!("{ROLE_BLACKLIST_PREFIX}{role_id}");
let expiry =
expiry_to_i64(JWT_TOKEN_TIME_IN_SECS).change_context(UserErrors::InternalServerError)?;
let redis_conn = get_redis_connection_for_global_tenant(state)
.change_context(UserErrors::InternalServerError)?;
redis_conn
.set_key_with_expiry(
&role_blacklist_key.as_str().into(),
date_time::now_unix_timestamp(),
expiry,
)
.await
.change_context(UserErrors::InternalServerError)?;
invalidate_role_cache(state, role_id)
.await
.change_context(UserErrors::InternalServerError)
}
#[cfg(feature = "olap")]
async fn invalidate_role_cache(state: &SessionState, role_id: &str) -> RouterResult<()> {
let redis_conn = get_redis_connection_for_global_tenant(state)?;
redis_conn
.delete_key(&authz::get_cache_key_from_role_id(role_id).as_str().into())
.await
.map(|_| ())
.change_context(ApiErrorResponse::InternalServerError)
}
pub async fn check_user_in_blacklist<A: SessionStateInfo>(
state: &A,
user_id: &str,
token_expiry: u64,
) -> RouterResult<bool> {
let token = format!("{USER_BLACKLIST_PREFIX}{user_id}");
let token_issued_at = expiry_to_i64(token_expiry - JWT_TOKEN_TIME_IN_SECS)?;
let redis_conn = get_redis_connection_for_global_tenant(state)?;
redis_conn
.get_key::<Option<i64>>(&token.as_str().into())
.await
.change_context(ApiErrorResponse::InternalServerError)
.map(|timestamp| timestamp > Some(token_issued_at))
}
pub async fn check_role_in_blacklist<A: SessionStateInfo>(
state: &A,
role_id: &str,
token_expiry: u64,
) -> RouterResult<bool> {
let token = format!("{ROLE_BLACKLIST_PREFIX}{role_id}");
let token_issued_at = expiry_to_i64(token_expiry - JWT_TOKEN_TIME_IN_SECS)?;
let redis_conn = get_redis_connection_for_global_tenant(state)?;
redis_conn
.get_key::<Option<i64>>(&token.as_str().into())
.await
.change_context(ApiErrorResponse::InternalServerError)
.map(|timestamp| timestamp > Some(token_issued_at))
}
#[cfg(feature = "email")]
pub async fn insert_email_token_in_blacklist(state: &SessionState, token: &str) -> UserResult<()> {
let redis_conn = get_redis_connection_for_global_tenant(state)
.change_context(UserErrors::InternalServerError)?;
let blacklist_key = format!("{EMAIL_TOKEN_BLACKLIST_PREFIX}{token}");
let expiry =
expiry_to_i64(EMAIL_TOKEN_TIME_IN_SECS).change_context(UserErrors::InternalServerError)?;
redis_conn
.set_key_with_expiry(&blacklist_key.as_str().into(), true, expiry)
.await
.change_context(UserErrors::InternalServerError)
}
#[cfg(feature = "email")]
pub async fn check_email_token_in_blacklist(state: &SessionState, token: &str) -> UserResult<()> {
let redis_conn = get_redis_connection_for_global_tenant(state)
.change_context(UserErrors::InternalServerError)?;
let blacklist_key = format!("{EMAIL_TOKEN_BLACKLIST_PREFIX}{token}");
let key_exists = redis_conn
.exists::<()>(&blacklist_key.as_str().into())
.await
.change_context(UserErrors::InternalServerError)?;
if key_exists {
return Err(UserErrors::LinkInvalid.into());
}
Ok(())
}
fn get_redis_connection_for_global_tenant<A: SessionStateInfo>(
state: &A,
) -> RouterResult<Arc<RedisConnectionPool>> {
state
.global_store()
.get_redis_conn()
.change_context(ApiErrorResponse::InternalServerError)
.attach_printable("Failed to get redis connection")
}
fn expiry_to_i64(expiry: u64) -> RouterResult<i64> {
i64::try_from(expiry).change_context(ApiErrorResponse::InternalServerError)
}
#[async_trait::async_trait]
pub trait BlackList {
async fn check_in_blacklist<A>(&self, state: &A) -> RouterResult<bool>
where
A: SessionStateInfo + Sync;
}
#[async_trait::async_trait]
impl BlackList for AuthToken {
async fn check_in_blacklist<A>(&self, state: &A) -> RouterResult<bool>
where
A: SessionStateInfo + Sync,
{
Ok(
check_user_in_blacklist(state, &self.user_id, self.exp).await?
|| check_role_in_blacklist(state, &self.role_id, self.exp).await?,
)
}
}
#[cfg(feature = "olap")]
#[async_trait::async_trait]
impl BlackList for SinglePurposeToken {
async fn check_in_blacklist<A>(&self, state: &A) -> RouterResult<bool>
where
A: SessionStateInfo + Sync,
{
check_user_in_blacklist(state, &self.user_id, self.exp).await
}
}
#[cfg(feature = "olap")]
#[async_trait::async_trait]
impl BlackList for SinglePurposeOrLoginToken {
async fn check_in_blacklist<A>(&self, state: &A) -> RouterResult<bool>
where
A: SessionStateInfo + Sync,
{
check_user_in_blacklist(state, &self.user_id, self.exp).await
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/services/authentication/decision.rs",
"crates/router/src/services/authentication/detached.rs",
"crates/router/src/services/authentication/cookies.rs",
"crates/router/src/services/authentication/blacklist.rs"
],
"module": "crates/router/src/services/authentication",
"num_files": 4,
"token_count": 4567
}
|
module_4669788984393363698
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: router
Module: crates/router/src/services/email
Files: 1
</path>
<module>
// File: crates/router/src/services/email/types.rs
use api_models::user::dashboard_metadata::ProdIntent;
use common_enums::{EntityType, MerchantProductType};
use common_utils::{errors::CustomResult, pii, types::user::EmailThemeConfig};
use error_stack::ResultExt;
use external_services::email::{EmailContents, EmailData, EmailError};
use masking::{ExposeInterface, PeekInterface, Secret};
use crate::{configs, consts, routes::SessionState};
#[cfg(feature = "olap")]
use crate::{
core::errors::{UserErrors, UserResult},
services::jwt,
types::domain,
};
pub enum EmailBody {
Verify {
link: String,
entity_name: String,
entity_logo_url: String,
primary_color: String,
background_color: String,
foreground_color: String,
},
Reset {
link: String,
user_name: String,
entity_name: String,
entity_logo_url: String,
primary_color: String,
background_color: String,
foreground_color: String,
},
MagicLink {
link: String,
user_name: String,
entity_name: String,
entity_logo_url: String,
primary_color: String,
background_color: String,
foreground_color: String,
},
InviteUser {
link: String,
user_name: String,
entity_name: String,
entity_logo_url: String,
primary_color: String,
background_color: String,
foreground_color: String,
},
AcceptInviteFromEmail {
link: String,
user_name: String,
entity_name: String,
entity_logo_url: String,
primary_color: String,
background_color: String,
foreground_color: String,
},
BizEmailProd {
user_name: String,
poc_email: String,
legal_business_name: String,
business_location: String,
business_website: String,
product_type: MerchantProductType,
},
ReconActivation {
user_name: String,
},
ProFeatureRequest {
feature_name: String,
merchant_id: common_utils::id_type::MerchantId,
user_name: String,
user_email: String,
},
ApiKeyExpiryReminder {
expires_in: u8,
api_key_name: String,
prefix: String,
},
WelcomeToCommunity,
}
pub mod html {
use crate::services::email::types::EmailBody;
pub fn get_html_body(email_body: EmailBody) -> String {
match email_body {
EmailBody::Verify {
link,
entity_name,
entity_logo_url,
primary_color,
background_color,
foreground_color,
} => {
format!(
include_str!("assets/verify.html"),
link = link,
entity_name = entity_name,
entity_logo_url = entity_logo_url,
primary_color = primary_color,
background_color = background_color,
foreground_color = foreground_color
)
}
EmailBody::Reset {
link,
user_name,
entity_name,
entity_logo_url,
primary_color,
background_color,
foreground_color,
} => {
format!(
include_str!("assets/reset.html"),
link = link,
username = user_name,
entity_name = entity_name,
entity_logo_url = entity_logo_url,
primary_color = primary_color,
background_color = background_color,
foreground_color = foreground_color
)
}
EmailBody::MagicLink {
link,
user_name,
entity_name,
entity_logo_url,
primary_color,
background_color,
foreground_color,
} => {
format!(
include_str!("assets/magic_link.html"),
username = user_name,
link = link,
entity_name = entity_name,
entity_logo_url = entity_logo_url,
primary_color = primary_color,
background_color = background_color,
foreground_color = foreground_color
)
}
EmailBody::InviteUser {
link,
user_name,
entity_name,
entity_logo_url,
primary_color,
background_color,
foreground_color,
} => {
format!(
include_str!("assets/invite.html"),
username = user_name,
link = link,
entity_name = entity_name,
entity_logo_url = entity_logo_url,
primary_color = primary_color,
background_color = background_color,
foreground_color = foreground_color
)
}
// TODO: Change the linked html for accept invite from email
EmailBody::AcceptInviteFromEmail {
link,
user_name,
entity_name,
entity_logo_url,
primary_color,
background_color,
foreground_color,
} => {
format!(
include_str!("assets/invite.html"),
username = user_name,
link = link,
entity_name = entity_name,
entity_logo_url = entity_logo_url,
primary_color = primary_color,
background_color = background_color,
foreground_color = foreground_color
)
}
EmailBody::ReconActivation { user_name } => {
format!(
include_str!("assets/recon_activation.html"),
username = user_name,
)
}
EmailBody::BizEmailProd {
user_name,
poc_email,
legal_business_name,
business_location,
business_website,
product_type,
} => {
format!(
include_str!("assets/bizemailprod.html"),
poc_email = poc_email,
legal_business_name = legal_business_name,
business_location = business_location,
business_website = business_website,
username = user_name,
product_type = product_type
)
}
EmailBody::ProFeatureRequest {
feature_name,
merchant_id,
user_name,
user_email,
} => format!(
"Dear Hyperswitch Support Team,
Dashboard Pro Feature Request,
Feature name : {feature_name}
Merchant ID : {}
Merchant Name : {user_name}
Email : {user_email}
(note: This is an auto generated email. Use merchant email for any further communications)",
merchant_id.get_string_repr()
),
EmailBody::ApiKeyExpiryReminder {
expires_in,
api_key_name,
prefix,
} => format!(
include_str!("assets/api_key_expiry_reminder.html"),
api_key_name = api_key_name,
prefix = prefix,
expires_in = expires_in,
),
EmailBody::WelcomeToCommunity => {
include_str!("assets/welcome_to_community.html").to_string()
}
}
}
}
#[derive(serde::Serialize, serde::Deserialize)]
pub struct EmailToken {
email: String,
flow: domain::Origin,
exp: u64,
entity: Option<Entity>,
}
#[derive(serde::Serialize, serde::Deserialize, Clone)]
pub struct Entity {
pub entity_id: String,
pub entity_type: EntityType,
}
impl Entity {
pub fn get_entity_type(&self) -> EntityType {
self.entity_type
}
pub fn get_entity_id(&self) -> &str {
&self.entity_id
}
}
impl EmailToken {
pub async fn new_token(
email: domain::UserEmail,
entity: Option<Entity>,
flow: domain::Origin,
settings: &configs::Settings,
) -> UserResult<String> {
let expiration_duration = std::time::Duration::from_secs(consts::EMAIL_TOKEN_TIME_IN_SECS);
let exp = jwt::generate_exp(expiration_duration)?.as_secs();
let token_payload = Self {
email: email.get_secret().expose(),
flow,
exp,
entity,
};
jwt::generate_jwt(&token_payload, settings).await
}
pub fn get_email(&self) -> UserResult<domain::UserEmail> {
pii::Email::try_from(self.email.clone())
.change_context(UserErrors::InternalServerError)
.and_then(domain::UserEmail::from_pii_email)
}
pub fn get_entity(&self) -> Option<&Entity> {
self.entity.as_ref()
}
pub fn get_flow(&self) -> domain::Origin {
self.flow.clone()
}
}
pub fn get_link_with_token(
base_url: impl std::fmt::Display,
token: impl std::fmt::Display,
action: impl std::fmt::Display,
auth_id: &Option<impl std::fmt::Display>,
theme_id: &Option<impl std::fmt::Display>,
) -> String {
let mut email_url = format!("{base_url}/user/{action}?token={token}");
if let Some(auth_id) = auth_id {
email_url = format!("{email_url}&auth_id={auth_id}");
}
if let Some(theme_id) = theme_id {
email_url = format!("{email_url}&theme_id={theme_id}");
}
email_url
}
pub struct VerifyEmail {
pub recipient_email: domain::UserEmail,
pub settings: std::sync::Arc<configs::Settings>,
pub auth_id: Option<String>,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
/// Currently only HTML is supported
#[async_trait::async_trait]
impl EmailData for VerifyEmail {
async fn get_email_data(&self, base_url: &str) -> CustomResult<EmailContents, EmailError> {
let token = EmailToken::new_token(
self.recipient_email.clone(),
None,
domain::Origin::VerifyEmail,
&self.settings,
)
.await
.change_context(EmailError::TokenGenerationFailure)?;
let verify_email_link = get_link_with_token(
base_url,
token,
"verify_email",
&self.auth_id,
&self.theme_id,
);
let body = html::get_html_body(EmailBody::Verify {
link: verify_email_link,
entity_name: self.theme_config.entity_name.clone(),
entity_logo_url: self.theme_config.entity_logo_url.clone(),
primary_color: self.theme_config.primary_color.clone(),
background_color: self.theme_config.background_color.clone(),
foreground_color: self.theme_config.foreground_color.clone(),
});
Ok(EmailContents {
subject: format!(
"Welcome to the {} community!",
self.theme_config.entity_name
),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
pub struct ResetPassword {
pub recipient_email: domain::UserEmail,
pub user_name: domain::UserName,
pub settings: std::sync::Arc<configs::Settings>,
pub auth_id: Option<String>,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
#[async_trait::async_trait]
impl EmailData for ResetPassword {
async fn get_email_data(&self, base_url: &str) -> CustomResult<EmailContents, EmailError> {
let token = EmailToken::new_token(
self.recipient_email.clone(),
None,
domain::Origin::ResetPassword,
&self.settings,
)
.await
.change_context(EmailError::TokenGenerationFailure)?;
let reset_password_link = get_link_with_token(
base_url,
token,
"set_password",
&self.auth_id,
&self.theme_id,
);
let body = html::get_html_body(EmailBody::Reset {
link: reset_password_link,
user_name: self.user_name.clone().get_secret().expose(),
entity_name: self.theme_config.entity_name.clone(),
entity_logo_url: self.theme_config.entity_logo_url.clone(),
primary_color: self.theme_config.primary_color.clone(),
background_color: self.theme_config.background_color.clone(),
foreground_color: self.theme_config.foreground_color.clone(),
});
Ok(EmailContents {
subject: format!(
"Get back to {} - Reset Your Password Now!",
self.theme_config.entity_name
),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
pub struct MagicLink {
pub recipient_email: domain::UserEmail,
pub user_name: domain::UserName,
pub settings: std::sync::Arc<configs::Settings>,
pub auth_id: Option<String>,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
#[async_trait::async_trait]
impl EmailData for MagicLink {
async fn get_email_data(&self, base_url: &str) -> CustomResult<EmailContents, EmailError> {
let token = EmailToken::new_token(
self.recipient_email.clone(),
None,
domain::Origin::MagicLink,
&self.settings,
)
.await
.change_context(EmailError::TokenGenerationFailure)?;
let magic_link_login = get_link_with_token(
base_url,
token,
"verify_email",
&self.auth_id,
&self.theme_id,
);
let body = html::get_html_body(EmailBody::MagicLink {
link: magic_link_login,
user_name: self.user_name.clone().get_secret().expose(),
entity_name: self.theme_config.entity_name.clone(),
entity_logo_url: self.theme_config.entity_logo_url.clone(),
primary_color: self.theme_config.primary_color.clone(),
background_color: self.theme_config.background_color.clone(),
foreground_color: self.theme_config.foreground_color.clone(),
});
Ok(EmailContents {
subject: format!(
"Unlock {}: Use Your Magic Link to Sign In",
self.theme_config.entity_name
),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
pub struct InviteUser {
pub recipient_email: domain::UserEmail,
pub user_name: domain::UserName,
pub settings: std::sync::Arc<configs::Settings>,
pub entity: Entity,
pub auth_id: Option<String>,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
#[async_trait::async_trait]
impl EmailData for InviteUser {
async fn get_email_data(&self, base_url: &str) -> CustomResult<EmailContents, EmailError> {
let token = EmailToken::new_token(
self.recipient_email.clone(),
Some(self.entity.clone()),
domain::Origin::AcceptInvitationFromEmail,
&self.settings,
)
.await
.change_context(EmailError::TokenGenerationFailure)?;
let invite_user_link = get_link_with_token(
base_url,
token,
"accept_invite_from_email",
&self.auth_id,
&self.theme_id,
);
let body = html::get_html_body(EmailBody::AcceptInviteFromEmail {
link: invite_user_link,
user_name: self.user_name.clone().get_secret().expose(),
entity_name: self.theme_config.entity_name.clone(),
entity_logo_url: self.theme_config.entity_logo_url.clone(),
primary_color: self.theme_config.primary_color.clone(),
background_color: self.theme_config.background_color.clone(),
foreground_color: self.theme_config.foreground_color.clone(),
});
Ok(EmailContents {
subject: format!(
"You have been invited to join {} Community!",
self.theme_config.entity_name
),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
pub struct ReconActivation {
pub recipient_email: domain::UserEmail,
pub user_name: domain::UserName,
pub subject: &'static str,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
#[async_trait::async_trait]
impl EmailData for ReconActivation {
async fn get_email_data(&self, _base_url: &str) -> CustomResult<EmailContents, EmailError> {
let body = html::get_html_body(EmailBody::ReconActivation {
user_name: self.user_name.clone().get_secret().expose(),
});
Ok(EmailContents {
subject: self.subject.to_string(),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
pub struct BizEmailProd {
pub recipient_email: domain::UserEmail,
pub user_name: Secret<String>,
pub poc_email: Secret<String>,
pub legal_business_name: String,
pub business_location: String,
pub business_website: String,
pub settings: std::sync::Arc<configs::Settings>,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
pub product_type: MerchantProductType,
}
impl BizEmailProd {
pub fn new(
state: &SessionState,
data: ProdIntent,
theme_id: Option<String>,
theme_config: EmailThemeConfig,
) -> UserResult<Self> {
Ok(Self {
recipient_email: domain::UserEmail::from_pii_email(
state.conf.email.prod_intent_recipient_email.clone(),
)?,
settings: state.conf.clone(),
user_name: data
.poc_name
.map(|s| Secret::new(s.peek().clone().into_inner()))
.unwrap_or_default(),
poc_email: data
.poc_email
.map(|s| Secret::new(s.peek().clone()))
.unwrap_or_default(),
legal_business_name: data
.legal_business_name
.map(|s| s.into_inner())
.unwrap_or_default(),
business_location: data
.business_location
.unwrap_or(common_enums::CountryAlpha2::AD)
.to_string(),
business_website: data
.business_website
.map(|s| s.into_inner())
.unwrap_or_default(),
theme_id,
theme_config,
product_type: data.product_type,
})
}
}
#[async_trait::async_trait]
impl EmailData for BizEmailProd {
async fn get_email_data(&self, _base_url: &str) -> CustomResult<EmailContents, EmailError> {
let body = html::get_html_body(EmailBody::BizEmailProd {
user_name: self.user_name.clone().expose(),
poc_email: self.poc_email.clone().expose(),
legal_business_name: self.legal_business_name.clone(),
business_location: self.business_location.clone(),
business_website: self.business_website.clone(),
product_type: self.product_type,
});
Ok(EmailContents {
subject: "New Prod Intent".to_string(),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
pub struct ProFeatureRequest {
pub recipient_email: domain::UserEmail,
pub feature_name: String,
pub merchant_id: common_utils::id_type::MerchantId,
pub user_name: domain::UserName,
pub user_email: domain::UserEmail,
pub subject: String,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
#[async_trait::async_trait]
impl EmailData for ProFeatureRequest {
async fn get_email_data(&self, _base_url: &str) -> CustomResult<EmailContents, EmailError> {
let recipient = self.recipient_email.clone().into_inner();
let body = html::get_html_body(EmailBody::ProFeatureRequest {
user_name: self.user_name.clone().get_secret().expose(),
feature_name: self.feature_name.clone(),
merchant_id: self.merchant_id.clone(),
user_email: self.user_email.clone().get_secret().expose(),
});
Ok(EmailContents {
subject: self.subject.clone(),
body: external_services::email::IntermediateString::new(body),
recipient,
})
}
}
pub struct ApiKeyExpiryReminder {
pub recipient_email: domain::UserEmail,
pub subject: &'static str,
pub expires_in: u8,
pub api_key_name: String,
pub prefix: String,
pub theme_id: Option<String>,
pub theme_config: EmailThemeConfig,
}
#[async_trait::async_trait]
impl EmailData for ApiKeyExpiryReminder {
async fn get_email_data(&self, _base_url: &str) -> CustomResult<EmailContents, EmailError> {
let recipient = self.recipient_email.clone().into_inner();
let body = html::get_html_body(EmailBody::ApiKeyExpiryReminder {
expires_in: self.expires_in,
api_key_name: self.api_key_name.clone(),
prefix: self.prefix.clone(),
});
Ok(EmailContents {
subject: self.subject.to_string(),
body: external_services::email::IntermediateString::new(body),
recipient,
})
}
}
pub struct WelcomeToCommunity {
pub recipient_email: domain::UserEmail,
}
#[async_trait::async_trait]
impl EmailData for WelcomeToCommunity {
async fn get_email_data(&self, _base_url: &str) -> CustomResult<EmailContents, EmailError> {
let body = html::get_html_body(EmailBody::WelcomeToCommunity);
Ok(EmailContents {
subject: "Thank you for signing up on Hyperswitch Dashboard!".to_string(),
body: external_services::email::IntermediateString::new(body),
recipient: self.recipient_email.clone().into_inner(),
})
}
}
</module>
|
{
"crate": "router",
"file": null,
"files": [
"crates/router/src/services/email/types.rs"
],
"module": "crates/router/src/services/email",
"num_files": 1,
"token_count": 4533
}
|
module_4882760121450281612
|
clm
|
module
|
<path>
Repository: hyperswitch
Crate: connector-template
Module: connector-template
Files: 3
</path>
<module>
// File: connector-template/test.rs
use hyperswitch_domain_models::payment_method_data::{Card, PaymentMethodData};
use masking::Secret;
use router::{
types::{self, api, storage::enums,
}};
use crate::utils::{self, ConnectorActions};
use test_utils::connector_auth;
#[derive(Clone, Copy)]
struct {{project-name | downcase | pascal_case}}Test;
impl ConnectorActions for {{project-name | downcase | pascal_case}}Test {}
impl utils::Connector for {{project-name | downcase | pascal_case}}Test {
fn get_data(&self) -> api::ConnectorData {
use router::connector::{{project-name | downcase | pascal_case}};
utils::construct_connector_data_old(
Box::new({{project-name | downcase | pascal_case}}::new()),
types::Connector::Plaid,
api::GetToken::Connector,
None,
)
}
fn get_auth_token(&self) -> types::ConnectorAuthType {
utils::to_connector_auth_type(
connector_auth::ConnectorAuthentication::new()
.{{project-name | downcase}}
.expect("Missing connector authentication configuration").into(),
)
}
fn get_name(&self) -> String {
"{{project-name | downcase}}".to_string()
}
}
static CONNECTOR: {{project-name | downcase | pascal_case}}Test = {{project-name | downcase | pascal_case}}Test {};
fn get_default_payment_info() -> Option<utils::PaymentInfo> {
None
}
fn payment_method_details() -> Option<types::PaymentsAuthorizeData> {
None
}
// Cards Positive Tests
// Creates a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_only_authorize_payment() {
let response = CONNECTOR
.authorize_payment(payment_method_details(), get_default_payment_info())
.await
.expect("Authorize payment response");
assert_eq!(response.status, enums::AttemptStatus::Authorized);
}
// Captures a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_capture_authorized_payment() {
let response = CONNECTOR
.authorize_and_capture_payment(payment_method_details(), None, get_default_payment_info())
.await
.expect("Capture payment response");
assert_eq!(response.status, enums::AttemptStatus::Charged);
}
// Partially captures a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_partially_capture_authorized_payment() {
let response = CONNECTOR
.authorize_and_capture_payment(
payment_method_details(),
Some(types::PaymentsCaptureData {
amount_to_capture: 50,
..utils::PaymentCaptureType::default().0
}),
get_default_payment_info(),
)
.await
.expect("Capture payment response");
assert_eq!(response.status, enums::AttemptStatus::Charged);
}
// Synchronizes a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_authorized_payment() {
let authorize_response = CONNECTOR
.authorize_payment(payment_method_details(), get_default_payment_info())
.await
.expect("Authorize payment response");
let txn_id = utils::get_connector_transaction_id(authorize_response.response);
let response = CONNECTOR
.psync_retry_till_status_matches(
enums::AttemptStatus::Authorized,
Some(types::PaymentsSyncData {
connector_transaction_id: types::ResponseId::ConnectorTransactionId(
txn_id.unwrap(),
),
..Default::default()
}),
get_default_payment_info(),
)
.await
.expect("PSync response");
assert_eq!(response.status, enums::AttemptStatus::Authorized,);
}
// Voids a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_void_authorized_payment() {
let response = CONNECTOR
.authorize_and_void_payment(
payment_method_details(),
Some(types::PaymentsCancelData {
connector_transaction_id: String::from(""),
cancellation_reason: Some("requested_by_customer".to_string()),
..Default::default()
}),
get_default_payment_info(),
)
.await
.expect("Void payment response");
assert_eq!(response.status, enums::AttemptStatus::Voided);
}
// Refunds a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_refund_manually_captured_payment() {
let response = CONNECTOR
.capture_payment_and_refund(payment_method_details(), None, None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Partially refunds a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_partially_refund_manually_captured_payment() {
let response = CONNECTOR
.capture_payment_and_refund(
payment_method_details(),
None,
Some(types::RefundsData {
refund_amount: 50,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Synchronizes a refund using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_manually_captured_refund() {
let refund_response = CONNECTOR
.capture_payment_and_refund(payment_method_details(), None, None, get_default_payment_info())
.await
.unwrap();
let response = CONNECTOR
.rsync_retry_till_status_matches(
enums::RefundStatus::Success,
refund_response.response.unwrap().connector_refund_id,
None,
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Creates a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_make_payment() {
let authorize_response = CONNECTOR.make_payment(payment_method_details(), get_default_payment_info()).await.unwrap();
assert_eq!(authorize_response.status, enums::AttemptStatus::Charged);
}
// Synchronizes a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_auto_captured_payment() {
let authorize_response = CONNECTOR.make_payment(payment_method_details(), get_default_payment_info()).await.unwrap();
assert_eq!(authorize_response.status, enums::AttemptStatus::Charged);
let txn_id = utils::get_connector_transaction_id(authorize_response.response);
assert_ne!(txn_id, None, "Empty connector transaction id");
let response = CONNECTOR
.psync_retry_till_status_matches(
enums::AttemptStatus::Charged,
Some(types::PaymentsSyncData {
connector_transaction_id: types::ResponseId::ConnectorTransactionId(
txn_id.unwrap(),
),
capture_method: Some(enums::CaptureMethod::Automatic),
..Default::default()
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(response.status, enums::AttemptStatus::Charged,);
}
// Refunds a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_refund_auto_captured_payment() {
let response = CONNECTOR
.make_payment_and_refund(payment_method_details(), None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Partially refunds a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_partially_refund_succeeded_payment() {
let refund_response = CONNECTOR
.make_payment_and_refund(
payment_method_details(),
Some(types::RefundsData {
refund_amount: 50,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
refund_response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Creates multiple refunds against a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_refund_succeeded_payment_multiple_times() {
CONNECTOR
.make_payment_and_multiple_refund(
payment_method_details(),
Some(types::RefundsData {
refund_amount: 50,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await;
}
// Synchronizes a refund using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_refund() {
let refund_response = CONNECTOR
.make_payment_and_refund(payment_method_details(), None, get_default_payment_info())
.await
.unwrap();
let response = CONNECTOR
.rsync_retry_till_status_matches(
enums::RefundStatus::Success,
refund_response.response.unwrap().connector_refund_id,
None,
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Cards Negative scenarios
// Creates a payment with incorrect CVC.
#[actix_web::test]
async fn should_fail_payment_for_incorrect_cvc() {
let response = CONNECTOR
.make_payment(
Some(types::PaymentsAuthorizeData {
payment_method_data: PaymentMethodData::Card(Card {
card_cvc: Secret::new("12345".to_string()),
..utils::CCardType::default().0
}),
..utils::PaymentAuthorizeType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Your card's security code is invalid.".to_string(),
);
}
// Creates a payment with incorrect expiry month.
#[actix_web::test]
async fn should_fail_payment_for_invalid_exp_month() {
let response = CONNECTOR
.make_payment(
Some(types::PaymentsAuthorizeData {
payment_method_data: PaymentMethodData::Card(Card {
card_exp_month: Secret::new("20".to_string()),
..utils::CCardType::default().0
}),
..utils::PaymentAuthorizeType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Your card's expiration month is invalid.".to_string(),
);
}
// Creates a payment with incorrect expiry year.
#[actix_web::test]
async fn should_fail_payment_for_incorrect_expiry_year() {
let response = CONNECTOR
.make_payment(
Some(types::PaymentsAuthorizeData {
payment_method_data: PaymentMethodData::Card(Card {
card_exp_year: Secret::new("2000".to_string()),
..utils::CCardType::default().0
}),
..utils::PaymentAuthorizeType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Your card's expiration year is invalid.".to_string(),
);
}
// Voids a payment using automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_fail_void_payment_for_auto_capture() {
let authorize_response = CONNECTOR.make_payment(payment_method_details(), get_default_payment_info()).await.unwrap();
assert_eq!(authorize_response.status, enums::AttemptStatus::Charged);
let txn_id = utils::get_connector_transaction_id(authorize_response.response);
assert_ne!(txn_id, None, "Empty connector transaction id");
let void_response = CONNECTOR
.void_payment(txn_id.unwrap(), None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
void_response.response.unwrap_err().message,
"You cannot cancel this PaymentIntent because it has a status of succeeded."
);
}
// Captures a payment using invalid connector payment id.
#[actix_web::test]
async fn should_fail_capture_for_invalid_payment() {
let capture_response = CONNECTOR
.capture_payment("123456789".to_string(), None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
capture_response.response.unwrap_err().message,
String::from("No such payment_intent: '123456789'")
);
}
// Refunds a payment with refund amount higher than payment amount.
#[actix_web::test]
async fn should_fail_for_refund_amount_higher_than_payment_amount() {
let response = CONNECTOR
.make_payment_and_refund(
payment_method_details(),
Some(types::RefundsData {
refund_amount: 150,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Refund amount (₹1.50) is greater than charge amount (₹1.00)",
);
}
// Connector dependent test cases goes here
// [#478]: add unit tests for non 3DS, wallets & webhooks in connector tests
// File: connector-template/transformers.rs
use common_enums::enums;
use serde::{Deserialize, Serialize};
use masking::Secret;
use common_utils::types::{StringMinorUnit};
use hyperswitch_domain_models::{
payment_method_data::PaymentMethodData,
router_data::{ConnectorAuthType, RouterData},
router_flow_types::refunds::{Execute, RSync},
router_request_types::ResponseId,
router_response_types::{PaymentsResponseData, RefundsResponseData},
types::{PaymentsAuthorizeRouterData, RefundsRouterData},
};
use hyperswitch_interfaces::errors;
use crate::types::{RefundsResponseRouterData, ResponseRouterData};
//TODO: Fill the struct with respective fields
pub struct {{project-name | downcase | pascal_case}}RouterData<T> {
pub amount: StringMinorUnit, // The type of amount that a connector accepts, for example, String, i64, f64, etc.
pub router_data: T,
}
impl<T>
From<(
StringMinorUnit,
T,
)> for {{project-name | downcase | pascal_case}}RouterData<T>
{
fn from(
(amount, item): (
StringMinorUnit,
T,
),
) -> Self {
//Todo : use utils to convert the amount to the type of amount that a connector accepts
Self {
amount,
router_data: item,
}
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Serialize, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}PaymentsRequest {
amount: StringMinorUnit,
card: {{project-name | downcase | pascal_case}}Card
}
#[derive(Default, Debug, Serialize, Eq, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}Card {
number: cards::CardNumber,
expiry_month: Secret<String>,
expiry_year: Secret<String>,
cvc: Secret<String>,
complete: bool,
}
impl TryFrom<&{{project-name | downcase | pascal_case}}RouterData<&PaymentsAuthorizeRouterData>> for {{project-name | downcase | pascal_case}}PaymentsRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &{{project-name | downcase | pascal_case}}RouterData<&PaymentsAuthorizeRouterData>) -> Result<Self,Self::Error> {
match item.router_data.request.payment_method_data.clone() {
PaymentMethodData::Card(_) => {
Err(errors::ConnectorError::NotImplemented("Card payment method not implemented".to_string()).into())
},
_ => Err(errors::ConnectorError::NotImplemented("Payment method".to_string()).into()),
}
}
}
//TODO: Fill the struct with respective fields
// Auth Struct
pub struct {{project-name | downcase | pascal_case}}AuthType {
pub(super) api_key: Secret<String>
}
impl TryFrom<&ConnectorAuthType> for {{project-name | downcase | pascal_case}}AuthType {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(auth_type: &ConnectorAuthType) -> Result<Self, Self::Error> {
match auth_type {
ConnectorAuthType::HeaderKey { api_key } => Ok(Self {
api_key: api_key.to_owned(),
}),
_ => Err(errors::ConnectorError::FailedToObtainAuthType.into()),
}
}
}
// PaymentsResponse
//TODO: Append the remaining status flags
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum {{project-name | downcase | pascal_case}}PaymentStatus {
Succeeded,
Failed,
#[default]
Processing,
}
impl From<{{project-name | downcase | pascal_case}}PaymentStatus> for common_enums::AttemptStatus {
fn from(item: {{project-name | downcase | pascal_case}}PaymentStatus) -> Self {
match item {
{{project-name | downcase | pascal_case}}PaymentStatus::Succeeded => Self::Charged,
{{project-name | downcase | pascal_case}}PaymentStatus::Failed => Self::Failure,
{{project-name | downcase | pascal_case}}PaymentStatus::Processing => Self::Authorizing,
}
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}PaymentsResponse {
status: {{project-name | downcase | pascal_case}}PaymentStatus,
id: String,
}
impl<F,T> TryFrom<ResponseRouterData<F, {{project-name | downcase | pascal_case}}PaymentsResponse, T, PaymentsResponseData>> for RouterData<F, T, PaymentsResponseData> {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: ResponseRouterData<F, {{project-name | downcase | pascal_case}}PaymentsResponse, T, PaymentsResponseData>) -> Result<Self,Self::Error> {
Ok(Self {
status: common_enums::AttemptStatus::from(item.response.status),
response: Ok(PaymentsResponseData::TransactionResponse {
resource_id: ResponseId::ConnectorTransactionId(item.response.id),
redirection_data: Box::new(None),
mandate_reference: Box::new(None),
connector_metadata: None,
network_txn_id: None,
connector_response_reference_id: None,
incremental_authorization_allowed: None,
charges: None,
}),
..item.data
})
}
}
//TODO: Fill the struct with respective fields
// REFUND :
// Type definition for RefundRequest
#[derive(Default, Debug, Serialize)]
pub struct {{project-name | downcase | pascal_case}}RefundRequest {
pub amount: StringMinorUnit
}
impl<F> TryFrom<&{{project-name | downcase | pascal_case}}RouterData<&RefundsRouterData<F>>> for {{project-name | downcase | pascal_case}}RefundRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &{{project-name | downcase | pascal_case}}RouterData<&RefundsRouterData<F>>) -> Result<Self,Self::Error> {
Ok(Self {
amount: item.amount.to_owned(),
})
}
}
// Type definition for Refund Response
#[allow(dead_code)]
#[derive(Debug, Copy, Serialize, Default, Deserialize, Clone)]
pub enum RefundStatus {
Succeeded,
Failed,
#[default]
Processing,
}
impl From<RefundStatus> for enums::RefundStatus {
fn from(item: RefundStatus) -> Self {
match item {
RefundStatus::Succeeded => Self::Success,
RefundStatus::Failed => Self::Failure,
RefundStatus::Processing => Self::Pending,
//TODO: Review mapping
}
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct RefundResponse {
id: String,
status: RefundStatus
}
impl TryFrom<RefundsResponseRouterData<Execute, RefundResponse>>
for RefundsRouterData<Execute>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: RefundsResponseRouterData<Execute, RefundResponse>,
) -> Result<Self, Self::Error> {
Ok(Self {
response: Ok(RefundsResponseData {
connector_refund_id: item.response.id.to_string(),
refund_status: enums::RefundStatus::from(item.response.status),
}),
..item.data
})
}
}
impl TryFrom<RefundsResponseRouterData<RSync, RefundResponse>> for RefundsRouterData<RSync>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: RefundsResponseRouterData<RSync, RefundResponse>) -> Result<Self,Self::Error> {
Ok(Self {
response: Ok(RefundsResponseData {
connector_refund_id: item.response.id.to_string(),
refund_status: enums::RefundStatus::from(item.response.status),
}),
..item.data
})
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Serialize, Deserialize, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}ErrorResponse {
pub status_code: u16,
pub code: String,
pub message: String,
pub reason: Option<String>,
pub network_advice_code: Option<String>,
pub network_decline_code: Option<String>,
pub network_error_message: Option<String>,
}
// File: connector-template/mod.rs
pub mod transformers;
use error_stack::{report, ResultExt};
use masking::{ExposeInterface, Mask};
use common_utils::{
errors::CustomResult,
ext_traits::BytesExt,
types::{AmountConvertor, StringMinorUnit, StringMinorUnitForConnector},
request::{Method, Request, RequestBuilder, RequestContent},
};
use hyperswitch_domain_models::{
router_data::{AccessToken, ConnectorAuthType, ErrorResponse, RouterData},
router_flow_types::{
access_token_auth::AccessTokenAuth,
payments::{
Authorize, Capture, PSync, PaymentMethodToken, Session,
SetupMandate, Void,
},
refunds::{Execute, RSync},
},
router_request_types::{
AccessTokenRequestData, PaymentMethodTokenizationData,
PaymentsAuthorizeData, PaymentsCancelData, PaymentsCaptureData, PaymentsSessionData,
PaymentsSyncData, RefundsData, SetupMandateRequestData,
},
router_response_types::{PaymentsResponseData, RefundsResponseData},
types::{
PaymentsAuthorizeRouterData,
PaymentsCaptureRouterData, PaymentsSyncRouterData, RefundSyncRouterData, RefundsRouterData,
},
};
use hyperswitch_interfaces::{
api::{self, ConnectorCommon, ConnectorCommonExt, ConnectorIntegration, ConnectorValidation, ConnectorSpecifications},
configs::Connectors,
errors,
events::connector_api_logs::ConnectorEvent,
types::{self, Response},
webhooks,
};
use std::sync::LazyLock;
use common_enums::enums;
use hyperswitch_interfaces::api::ConnectorSpecifications;
use hyperswitch_domain_models::router_response_types::{ConnectorInfo, SupportedPaymentMethods};
use crate::{
constants::headers,
types::ResponseRouterData,
utils,
};
use hyperswitch_domain_models::payment_method_data::PaymentMethodData;
use transformers as {{project-name | downcase}};
#[derive(Clone)]
pub struct {{project-name | downcase | pascal_case}} {
amount_converter: &'static (dyn AmountConvertor<Output = StringMinorUnit> + Sync)
}
impl {{project-name | downcase | pascal_case}} {
pub fn new() -> &'static Self {
&Self {
amount_converter: &StringMinorUnitForConnector
}
}
}
impl api::Payment for {{project-name | downcase | pascal_case}} {}
impl api::PaymentSession for {{project-name | downcase | pascal_case}} {}
impl api::ConnectorAccessToken for {{project-name | downcase | pascal_case}} {}
impl api::MandateSetup for {{project-name | downcase | pascal_case}} {}
impl api::PaymentAuthorize for {{project-name | downcase | pascal_case}} {}
impl api::PaymentSync for {{project-name | downcase | pascal_case}} {}
impl api::PaymentCapture for {{project-name | downcase | pascal_case}} {}
impl api::PaymentVoid for {{project-name | downcase | pascal_case}} {}
impl api::Refund for {{project-name | downcase | pascal_case}} {}
impl api::RefundExecute for {{project-name | downcase | pascal_case}} {}
impl api::RefundSync for {{project-name | downcase | pascal_case}} {}
impl api::PaymentToken for {{project-name | downcase | pascal_case}} {}
impl
ConnectorIntegration<
PaymentMethodToken,
PaymentMethodTokenizationData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
// Not Implemented (R)
}
impl<Flow, Request, Response> ConnectorCommonExt<Flow, Request, Response> for {{project-name | downcase | pascal_case}}
where
Self: ConnectorIntegration<Flow, Request, Response>,{
fn build_headers(
&self,
req: &RouterData<Flow, Request, Response>,
_connectors: &Connectors,
) -> CustomResult<Vec<(String, masking::Maskable<String>)>, errors::ConnectorError> {
let mut header = vec![(
headers::CONTENT_TYPE.to_string(),
self.get_content_type().to_string().into(),
)];
let mut api_key = self.get_auth_header(&req.connector_auth_type)?;
header.append(&mut api_key);
Ok(header)
}
}
impl ConnectorCommon for {{project-name | downcase | pascal_case}} {
fn id(&self) -> &'static str {
"{{project-name | downcase}}"
}
fn get_currency_unit(&self) -> api::CurrencyUnit {
todo!()
// TODO! Check connector documentation, on which unit they are processing the currency.
// If the connector accepts amount in lower unit ( i.e cents for USD) then return api::CurrencyUnit::Minor,
// if connector accepts amount in base unit (i.e dollars for USD) then return api::CurrencyUnit::Base
}
fn common_get_content_type(&self) -> &'static str {
"application/json"
}
fn base_url<'a>(&self, connectors: &'a Connectors) -> &'a str {
connectors.{{project-name}}.base_url.as_ref()
}
fn get_auth_header(&self, auth_type:&ConnectorAuthType)-> CustomResult<Vec<(String,masking::Maskable<String>)>,errors::ConnectorError> {
let auth = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}AuthType::try_from(auth_type)
.change_context(errors::ConnectorError::FailedToObtainAuthType)?;
Ok(vec![(headers::AUTHORIZATION.to_string(), auth.api_key.expose().into_masked())])
}
fn build_error_response(
&self,
res: Response,
event_builder: Option<&mut ConnectorEvent>,
) -> CustomResult<ErrorResponse, errors::ConnectorError> {
let response: {{project-name | downcase}}::{{project-name | downcase | pascal_case}}ErrorResponse = res
.response
.parse_struct("{{project-name | downcase | pascal_case}}ErrorResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
Ok(ErrorResponse {
status_code: res.status_code,
code: response.code,
message: response.message,
reason: response.reason,
attempt_status: None,
connector_transaction_id: None,
network_advice_code: None,
network_decline_code: None,
network_error_message: None,
})
}
}
impl ConnectorValidation for {{project-name | downcase | pascal_case}}
{
fn validate_mandate_payment(
&self,
_pm_type: Option<enums::PaymentMethodType>,
pm_data: PaymentMethodData,
) -> CustomResult<(), errors::ConnectorError> {
match pm_data {
PaymentMethodData::Card(_) => Err(errors::ConnectorError::NotImplemented(
"validate_mandate_payment does not support cards".to_string(),
)
.into()),
_ => Ok(()),
}
}
fn validate_psync_reference_id(
&self,
_data: &PaymentsSyncData,
_is_three_ds: bool,
_status: enums::AttemptStatus,
_connector_meta_data: Option<common_utils::pii::SecretSerdeValue>,
) -> CustomResult<(), errors::ConnectorError> {
Ok(())
}
}
impl
ConnectorIntegration<
Session,
PaymentsSessionData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
//TODO: implement sessions flow
}
impl ConnectorIntegration<AccessTokenAuth, AccessTokenRequestData, AccessToken>
for {{project-name | downcase | pascal_case}}
{
}
impl
ConnectorIntegration<
SetupMandate,
SetupMandateRequestData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
}
impl
ConnectorIntegration<
Authorize,
PaymentsAuthorizeData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}} {
fn get_headers(&self, req: &PaymentsAuthorizeRouterData, connectors: &Connectors,) -> CustomResult<Vec<(String, masking::Maskable<String>)>,errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &PaymentsAuthorizeRouterData,
_connectors: &Connectors,) -> CustomResult<String,errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn get_request_body(&self, req: &PaymentsAuthorizeRouterData, _connectors: &Connectors,) -> CustomResult<RequestContent, errors::ConnectorError> {
let amount = utils::convert_amount(
self.amount_converter,
req.request.minor_amount,
req.request.currency,
)?;
let connector_router_data =
{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RouterData::from((
amount,
req,
));
let connector_req = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsRequest::try_from(&connector_router_data)?;
Ok(RequestContent::Json(Box::new(connector_req)))
}
fn build_request(
&self,
req: &PaymentsAuthorizeRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&types::PaymentsAuthorizeType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(types::PaymentsAuthorizeType::get_headers(
self, req, connectors,
)?)
.set_body(types::PaymentsAuthorizeType::get_request_body(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &PaymentsAuthorizeRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<PaymentsAuthorizeRouterData,errors::ConnectorError> {
let response: {{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsResponse = res.response.parse_struct("{{project-name | downcase | pascal_case}} PaymentsAuthorizeResponse").change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(&self, res: Response, event_builder: Option<&mut ConnectorEvent>) -> CustomResult<ErrorResponse,errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<PSync, PaymentsSyncData, PaymentsResponseData>
for {{project-name | downcase | pascal_case}}
{
fn get_headers(
&self,
req: &PaymentsSyncRouterData,
connectors: &Connectors,
) -> CustomResult<Vec<(String, masking::Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &PaymentsSyncRouterData,
_connectors: &Connectors,
) -> CustomResult<String, errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn build_request(
&self,
req: &PaymentsSyncRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Get)
.url(&types::PaymentsSyncType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::PaymentsSyncType::get_headers(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &PaymentsSyncRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<PaymentsSyncRouterData, errors::ConnectorError> {
let response: {{project-name | downcase}}:: {{project-name | downcase | pascal_case}}PaymentsResponse = res
.response
.parse_struct("{{project-name | downcase}} PaymentsSyncResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: Response,
event_builder: Option<&mut ConnectorEvent>
) -> CustomResult<ErrorResponse, errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<
Capture,
PaymentsCaptureData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
fn get_headers(
&self,
req: &PaymentsCaptureRouterData,
connectors: &Connectors,
) -> CustomResult<Vec<(String, masking::Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &PaymentsCaptureRouterData,
_connectors: &Connectors,
) -> CustomResult<String, errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn get_request_body(
&self,
_req: &PaymentsCaptureRouterData,
_connectors: &Connectors,
) -> CustomResult<RequestContent, errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_request_body method".to_string()).into())
}
fn build_request(
&self,
req: &PaymentsCaptureRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&types::PaymentsCaptureType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::PaymentsCaptureType::get_headers(
self, req, connectors,
)?)
.set_body(types::PaymentsCaptureType::get_request_body(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &PaymentsCaptureRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<PaymentsCaptureRouterData, errors::ConnectorError> {
let response: {{project-name | downcase }}::{{project-name | downcase | pascal_case}}PaymentsResponse = res
.response
.parse_struct("{{project-name | downcase | pascal_case}} PaymentsCaptureResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: Response,
event_builder: Option<&mut ConnectorEvent>
) -> CustomResult<ErrorResponse, errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<
Void,
PaymentsCancelData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{}
impl
ConnectorIntegration<
Execute,
RefundsData,
RefundsResponseData,
> for {{project-name | downcase | pascal_case}} {
fn get_headers(&self, req: &RefundsRouterData<Execute>, connectors: &Connectors,) -> CustomResult<Vec<(String,masking::Maskable<String>)>,errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &RefundsRouterData<Execute>,
_connectors: &Connectors,) -> CustomResult<String,errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn get_request_body(&self, req: &RefundsRouterData<Execute>, _connectors: &Connectors,) -> CustomResult<RequestContent, errors::ConnectorError> {
let refund_amount = utils::convert_amount(
self.amount_converter,
req.request.minor_refund_amount,
req.request.currency,
)?;
let connector_router_data =
{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RouterData::from((
refund_amount,
req,
));
let connector_req = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}RefundRequest::try_from(&connector_router_data)?;
Ok(RequestContent::Json(Box::new(connector_req)))
}
fn build_request(&self, req: &RefundsRouterData<Execute>, connectors: &Connectors,) -> CustomResult<Option<Request>,errors::ConnectorError> {
let request = RequestBuilder::new()
.method(Method::Post)
.url(&types::RefundExecuteType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::RefundExecuteType::get_headers(self, req, connectors)?)
.set_body(types::RefundExecuteType::get_request_body(self, req, connectors)?)
.build();
Ok(Some(request))
}
fn handle_response(
&self,
data: &RefundsRouterData<Execute>,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<RefundsRouterData<Execute>,errors::ConnectorError> {
let response: {{project-name| downcase}}::RefundResponse = res.response.parse_struct("{{project-name | downcase}} RefundResponse").change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(&self, res: Response, event_builder: Option<&mut ConnectorEvent>) -> CustomResult<ErrorResponse,errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<RSync, RefundsData, RefundsResponseData> for {{project-name | downcase | pascal_case}} {
fn get_headers(&self, req: &RefundSyncRouterData,connectors: &Connectors,) -> CustomResult<Vec<(String, masking::Maskable<String>)>,errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &RefundSyncRouterData,_connectors: &Connectors,) -> CustomResult<String,errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn build_request(
&self,
req: &RefundSyncRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Get)
.url(&types::RefundSyncType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::RefundSyncType::get_headers(self, req, connectors)?)
.set_body(types::RefundSyncType::get_request_body(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &RefundSyncRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<RefundSyncRouterData,errors::ConnectorError,> {
let response: {{project-name | downcase}}::RefundResponse = res.response.parse_struct("{{project-name | downcase}} RefundSyncResponse").change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(&self, res: Response, event_builder: Option<&mut ConnectorEvent>) -> CustomResult<ErrorResponse,errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
#[async_trait::async_trait]
impl webhooks::IncomingWebhook for {{project-name | downcase | pascal_case}} {
fn get_webhook_object_reference_id(
&self,
_request: &webhooks::IncomingWebhookRequestDetails<'_>,
) -> CustomResult<api_models::webhooks::ObjectReferenceId, errors::ConnectorError> {
Err(report!(errors::ConnectorError::WebhooksNotImplemented))
}
fn get_webhook_event_type(
&self,
_request: &webhooks::IncomingWebhookRequestDetails<'_>,
) -> CustomResult<api_models::webhooks::IncomingWebhookEvent, errors::ConnectorError> {
Err(report!(errors::ConnectorError::WebhooksNotImplemented))
}
fn get_webhook_resource_object(
&self,
_request: &webhooks::IncomingWebhookRequestDetails<'_>,
) -> CustomResult<Box<dyn masking::ErasedMaskSerialize>, errors::ConnectorError> {
Err(report!(errors::ConnectorError::WebhooksNotImplemented))
}
}
static {{project-name | upcase}}_SUPPORTED_PAYMENT_METHODS: LazyLock<SupportedPaymentMethods> =
LazyLock::new(SupportedPaymentMethods::new);
static {{project-name | upcase}}_CONNECTOR_INFO: ConnectorInfo = ConnectorInfo {
display_name: "{{project-name | downcase | pascal_case}}",
description: "{{project-name | downcase | pascal_case}} connector",
connector_type: enums::HyperswitchConnectorCategory::PaymentGateway,
};
static {{project-name | upcase}}_SUPPORTED_WEBHOOK_FLOWS: [enums::EventClass; 0] = [];
impl ConnectorSpecifications for {{project-name | downcase | pascal_case}} {
fn get_connector_about(&self) -> Option<&'static ConnectorInfo> {
Some(&{{project-name | upcase}}_CONNECTOR_INFO)
}
fn get_supported_payment_methods(&self) -> Option<&'static SupportedPaymentMethods> {
Some(&*{{project-name | upcase}}_SUPPORTED_PAYMENT_METHODS)
}
fn get_supported_webhook_flows(&self) -> Option<&'static [enums::EventClass]> {
Some(&{{project-name | upcase}}_SUPPORTED_WEBHOOK_FLOWS)
}
}
</module>
|
{
"crate": "connector-template",
"file": null,
"files": [
"connector-template/test.rs",
"connector-template/transformers.rs",
"connector-template/mod.rs"
],
"module": "connector-template",
"num_files": 3,
"token_count": 9951
}
|
crate_-7795428478907733935
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: smithy
Files: 1
</path>
<crate>
// File: crates/smithy/src/lib.rs
// crates/smithy/lib.rs - Fixed with proper optional type handling in flattening
use proc_macro::TokenStream;
use proc_macro2::TokenStream as TokenStream2;
use quote::quote;
use smithy_core::{SmithyConstraint, SmithyEnumVariant, SmithyField};
use syn::{parse_macro_input, Attribute, DeriveInput, Fields, Lit, Meta, Variant};
/// Derive macro for generating Smithy models from Rust structs and enums
#[proc_macro_derive(SmithyModel, attributes(smithy))]
pub fn derive_smithy_model(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
match generate_smithy_impl(&input) {
Ok(tokens) => tokens.into(),
Err(err) => err.to_compile_error().into(),
}
}
fn generate_smithy_impl(input: &DeriveInput) -> syn::Result<TokenStream2> {
let name = &input.ident;
let (namespace, is_mixin) = extract_namespace_and_mixin(&input.attrs)?;
match &input.data {
syn::Data::Struct(data_struct) => {
generate_struct_impl(name, &namespace, data_struct, &input.attrs, is_mixin)
}
syn::Data::Enum(data_enum) => generate_enum_impl(name, &namespace, data_enum, &input.attrs),
_ => Err(syn::Error::new_spanned(
input,
"SmithyModel can only be derived for structs and enums",
)),
}
}
fn generate_struct_impl(
name: &syn::Ident,
namespace: &str,
data_struct: &syn::DataStruct,
attrs: &[Attribute],
is_mixin: bool,
) -> syn::Result<TokenStream2> {
let fields = extract_fields(&data_struct.fields)?;
let struct_doc = extract_documentation(attrs);
let struct_doc_expr = struct_doc
.as_ref()
.map(|doc| quote! { Some(#doc.to_string()) })
.unwrap_or(quote! { None });
let field_implementations = fields.iter().map(|field| {
let field_name = &field.name;
let value_type = &field.value_type;
let documentation = &field.documentation;
let constraints = &field.constraints;
let optional = field.optional;
let flatten = field.flatten;
if flatten {
// Extract the inner type from Option<T> if it's an optional type
let inner_type = if value_type.starts_with("Option<") && value_type.ends_with('>') {
let start_idx = "Option<".len();
let end_idx = value_type.len() - 1;
&value_type[start_idx..end_idx]
} else {
value_type
};
let inner_type_ident = syn::parse_str::<syn::Type>(inner_type).unwrap();
// For flattened fields, we merge the fields from the inner type
// but we don't add the field itself to the structure
quote! {
{
let flattened_model = <#inner_type_ident as smithy_core::SmithyModelGenerator>::generate_smithy_model();
let flattened_struct_name = stringify!(#inner_type_ident).to_string();
for (shape_name, shape) in flattened_model.shapes {
if shape_name == flattened_struct_name {
match shape {
smithy_core::SmithyShape::Structure { members: flattened_members, .. } |
smithy_core::SmithyShape::Union { members: flattened_members, .. } => {
members.extend(flattened_members);
}
_ => {
// Potentially handle other shapes or log a warning
}
}
} else {
shapes.insert(shape_name, shape);
}
}
}
}
} else {
let field_doc = documentation
.as_ref()
.map(|doc| quote! { Some(#doc.to_string()) })
.unwrap_or(quote! { None });
let mut all_constraints = constraints.clone();
if !optional && !all_constraints.iter().any(|c| matches!(c, SmithyConstraint::Required)) {
all_constraints.push(SmithyConstraint::Required);
}
let traits = if all_constraints.is_empty() {
quote! { vec![] }
} else {
let trait_tokens = all_constraints
.iter()
.map(|constraint| match constraint {
SmithyConstraint::Pattern(pattern) => quote! {
smithy_core::SmithyTrait::Pattern { pattern: #pattern.to_string() }
},
SmithyConstraint::Range(min, max) => {
let min_expr = min.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
let max_expr = max.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
quote! {
smithy_core::SmithyTrait::Range {
min: #min_expr,
max: #max_expr
}
}
},
SmithyConstraint::Length(min, max) => {
let min_expr = min.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
let max_expr = max.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
quote! {
smithy_core::SmithyTrait::Length {
min: #min_expr,
max: #max_expr
}
}
},
SmithyConstraint::Required => quote! {
smithy_core::SmithyTrait::Required
},
SmithyConstraint::HttpLabel => quote! {
smithy_core::SmithyTrait::HttpLabel
},
SmithyConstraint::HttpQuery(name) => quote! {
smithy_core::SmithyTrait::HttpQuery { name: #name.to_string() }
},
})
.collect::<Vec<_>>();
quote! { vec![#(#trait_tokens),*] }
};
quote! {
{
let (target_type, new_shapes) = smithy_core::types::resolve_type_and_generate_shapes(#value_type, &mut shapes).unwrap();
shapes.extend(new_shapes);
members.insert(#field_name.to_string(), smithy_core::SmithyMember {
target: target_type,
documentation: #field_doc,
traits: #traits,
});
}
}
}
});
let traits_expr = if is_mixin {
quote! { vec![smithy_core::SmithyTrait::Mixin] }
} else {
quote! { vec![] }
};
let expanded = quote! {
impl smithy_core::SmithyModelGenerator for #name {
fn generate_smithy_model() -> smithy_core::SmithyModel {
let mut shapes = std::collections::HashMap::new();
let mut members = std::collections::HashMap::new();
#(#field_implementations;)*
let shape = smithy_core::SmithyShape::Structure {
members,
documentation: #struct_doc_expr,
traits: #traits_expr
};
shapes.insert(stringify!(#name).to_string(), shape);
smithy_core::SmithyModel {
namespace: #namespace.to_string(),
shapes
}
}
}
};
Ok(expanded)
}
fn generate_enum_impl(
name: &syn::Ident,
namespace: &str,
data_enum: &syn::DataEnum,
attrs: &[Attribute],
) -> syn::Result<TokenStream2> {
let variants = extract_enum_variants(&data_enum.variants)?;
let serde_enum_attrs = parse_serde_enum_attributes(attrs)?;
let enum_doc = extract_documentation(attrs);
let enum_doc_expr = enum_doc
.as_ref()
.map(|doc| quote! { Some(#doc.to_string()) })
.unwrap_or(quote! { None });
// Check if this is a string enum (all variants are unit variants) or a union
let is_string_enum = variants.iter().all(|v| v.fields.is_empty());
if is_string_enum {
// Generate as Smithy enum
let variant_implementations = variants
.iter()
.map(|variant| {
let variant_name = &variant.name;
let variant_doc = variant
.documentation
.as_ref()
.map(|doc| quote! { Some(#doc.to_string()) })
.unwrap_or(quote! { None });
// Apply serde rename transformation if specified
let rename_all = serde_enum_attrs.rename_all.as_deref();
let transformed_name = if let Some(rename_pattern) = rename_all {
// Generate the transformation at compile time
let transformed = transform_variant_name(variant_name, Some(rename_pattern));
quote! { #transformed.to_string() }
} else {
quote! { #variant_name.to_string() }
};
quote! {
enum_values.insert(#transformed_name, smithy_core::SmithyEnumValue {
name: #transformed_name,
documentation: #variant_doc,
is_default: false,
});
}
})
.collect::<Vec<_>>();
let expanded = quote! {
impl smithy_core::SmithyModelGenerator for #name {
fn generate_smithy_model() -> smithy_core::SmithyModel {
let mut shapes = std::collections::HashMap::new();
let mut enum_values = std::collections::HashMap::new();
#(#variant_implementations)*
let shape = smithy_core::SmithyShape::Enum {
values: enum_values,
documentation: #enum_doc_expr,
traits: vec![]
};
shapes.insert(stringify!(#name).to_string(), shape);
smithy_core::SmithyModel {
namespace: #namespace.to_string(),
shapes
}
}
}
};
Ok(expanded)
} else {
// Generate as Smithy union
let variant_implementations = variants
.iter()
.filter_map(|variant| {
let variant_name = &variant.name;
let variant_doc = variant
.documentation
.as_ref()
.map(|doc| quote! { Some(#doc.to_string()) })
.unwrap_or(quote! { None });
let target_type_expr = if variant.fields.is_empty() {
// If there are no fields with `value_type`, this variant should be skipped.
return None;
} else if variant.fields.len() == 1 {
// Single field - reference the type directly instead of creating a wrapper
let field = &variant.fields[0];
let field_value_type = &field.value_type;
if field_value_type.is_empty() {
return None;
}
quote! {
{
let (target_type, new_shapes) = smithy_core::types::resolve_type_and_generate_shapes(#field_value_type, &mut shapes).unwrap();
shapes.extend(new_shapes);
target_type
}
}
} else {
// Multiple fields - create an inline structure
let inline_struct_members = variant.fields.iter().map(|field| {
let field_name = &field.name;
let field_value_type = &field.value_type;
let field_doc = field
.documentation
.as_ref()
.map(|doc| quote! { Some(#doc.to_string()) })
.unwrap_or(quote! { None });
let mut field_constraints = field.constraints.clone();
if !field.optional && !field_constraints.iter().any(|c| matches!(c, SmithyConstraint::Required)) {
field_constraints.push(SmithyConstraint::Required);
}
let field_traits = if field_constraints.is_empty() {
quote! { vec![] }
} else {
let trait_tokens = field_constraints
.iter()
.map(|constraint| match constraint {
SmithyConstraint::Pattern(pattern) => quote! {
smithy_core::SmithyTrait::Pattern { pattern: #pattern.to_string() }
},
SmithyConstraint::Range(min, max) => {
let min_expr = min.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
let max_expr = max.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
quote! {
smithy_core::SmithyTrait::Range {
min: #min_expr,
max: #max_expr
}
}
},
SmithyConstraint::Length(min, max) => {
let min_expr = min.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
let max_expr = max.map(|v| quote! { Some(#v) }).unwrap_or(quote! { None });
quote! {
smithy_core::SmithyTrait::Length {
min: #min_expr,
max: #max_expr
}
}
},
SmithyConstraint::Required => quote! {
smithy_core::SmithyTrait::Required
},
SmithyConstraint::HttpLabel => quote! {
smithy_core::SmithyTrait::HttpLabel
},
SmithyConstraint::HttpQuery(name) => quote! {
smithy_core::SmithyTrait::HttpQuery { name: #name.to_string() }
},
})
.collect::<Vec<_>>();
quote! { vec![#(#trait_tokens),*] }
};
quote! {
{
let (field_target, field_shapes) = smithy_core::types::resolve_type_and_generate_shapes(#field_value_type, &mut shapes).unwrap();
shapes.extend(field_shapes);
inline_members.insert(#field_name.to_string(), smithy_core::SmithyMember {
target: field_target,
documentation: #field_doc,
traits: #field_traits,
});
}
}
});
quote! {
{
let inline_struct_name = format!("{}{}Data", stringify!(#name), #variant_name);
let mut inline_members = std::collections::HashMap::new();
#(#inline_struct_members)*
let inline_shape = smithy_core::SmithyShape::Structure {
members: inline_members,
documentation: None,
traits: vec![],
};
shapes.insert(inline_struct_name.clone(), inline_shape);
inline_struct_name
}
}
};
// Apply serde rename transformation if specified
let rename_all = serde_enum_attrs.rename_all.as_deref();
let transformed_name = if let Some(rename_pattern) = rename_all {
// Generate the transformation at compile time
let transformed = transform_variant_name(variant_name, Some(rename_pattern));
quote! { #transformed.to_string() }
} else {
quote! { #variant_name.to_string() }
};
Some(quote! {
let target_type = #target_type_expr;
members.insert(#transformed_name, smithy_core::SmithyMember {
target: target_type,
documentation: #variant_doc,
traits: vec![]
});
})
})
.collect::<Vec<_>>();
let expanded = quote! {
impl smithy_core::SmithyModelGenerator for #name {
fn generate_smithy_model() -> smithy_core::SmithyModel {
let mut shapes = std::collections::HashMap::new();
let mut members = std::collections::HashMap::new();
#(#variant_implementations;)*
let shape = smithy_core::SmithyShape::Union {
members,
documentation: #enum_doc_expr,
traits: vec![]
};
shapes.insert(stringify!(#name).to_string(), shape);
smithy_core::SmithyModel {
namespace: #namespace.to_string(),
shapes
}
}
}
};
Ok(expanded)
}
}
fn extract_namespace_and_mixin(attrs: &[Attribute]) -> syn::Result<(String, bool)> {
for attr in attrs {
if attr.path().is_ident("smithy") {
let mut namespace = None;
let mut mixin = false;
attr.parse_nested_meta(|meta| {
if meta.path.is_ident("namespace") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
namespace = Some(lit_str.value());
}
}
} else if meta.path.is_ident("mixin") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Bool(lit_bool)) = value.parse::<Lit>() {
mixin = lit_bool.value;
}
}
}
Ok(())
})?; // Propagate parsing errors
return Ok((
namespace.unwrap_or_else(|| "com.hyperswitch.default".to_string()),
mixin,
));
}
}
Ok(("com.hyperswitch.default".to_string(), false))
}
fn extract_fields(fields: &Fields) -> syn::Result<Vec<SmithyField>> {
let mut smithy_fields = Vec::new();
match fields {
Fields::Named(fields_named) => {
for field in &fields_named.named {
let field_name = field.ident.as_ref().unwrap().to_string();
let field_attrs = parse_smithy_field_attributes(&field.attrs)?;
let serde_attrs = parse_serde_attributes(&field.attrs)?;
if let Some(value_type) = field_attrs.value_type {
let documentation = extract_documentation(&field.attrs);
let optional = value_type.trim().starts_with("Option<");
smithy_fields.push(SmithyField {
name: field_name,
value_type,
constraints: field_attrs.constraints,
documentation,
optional,
flatten: serde_attrs.flatten,
});
}
}
}
_ => {
return Err(syn::Error::new_spanned(
fields,
"Only named fields are supported",
))
}
}
Ok(smithy_fields)
}
fn extract_enum_variants(
variants: &syn::punctuated::Punctuated<Variant, syn::token::Comma>,
) -> syn::Result<Vec<SmithyEnumVariant>> {
let mut smithy_variants = Vec::new();
for variant in variants {
let variant_name = variant.ident.to_string();
let documentation = extract_documentation(&variant.attrs);
let variant_attrs = parse_smithy_field_attributes(&variant.attrs)?;
// Extract fields from the variant
let fields = match &variant.fields {
Fields::Unit => Vec::new(),
Fields::Named(fields_named) => {
let mut variant_fields = Vec::new();
for field in &fields_named.named {
let field_name = field.ident.as_ref().unwrap().to_string();
let field_attrs = parse_smithy_field_attributes(&field.attrs)?;
if let Some(value_type) = field_attrs.value_type {
let field_documentation = extract_documentation(&field.attrs);
let optional = value_type.trim().starts_with("Option<");
variant_fields.push(SmithyField {
name: field_name,
value_type,
constraints: field_attrs.constraints,
documentation: field_documentation,
optional,
flatten: false,
});
}
}
variant_fields
}
Fields::Unnamed(fields_unnamed) => {
let mut variant_fields = Vec::new();
for (index, field) in fields_unnamed.unnamed.iter().enumerate() {
let field_name = format!("field_{}", index);
let field_attrs = parse_smithy_field_attributes(&field.attrs)?;
// For single unnamed fields, use the variant attribute if field doesn't have one
let value_type = field_attrs
.value_type
.or_else(|| variant_attrs.value_type.clone());
if let Some(value_type) = value_type {
let field_documentation = extract_documentation(&field.attrs);
let optional = value_type.trim().starts_with("Option<");
variant_fields.push(SmithyField {
name: field_name,
value_type,
constraints: field_attrs.constraints,
documentation: field_documentation,
optional,
flatten: false,
});
}
}
variant_fields
}
};
smithy_variants.push(SmithyEnumVariant {
name: variant_name,
fields,
constraints: variant_attrs.constraints,
documentation,
});
}
Ok(smithy_variants)
}
#[derive(Default)]
struct SmithyFieldAttributes {
value_type: Option<String>,
constraints: Vec<SmithyConstraint>,
}
#[derive(Default)]
struct SerdeAttributes {
flatten: bool,
}
#[derive(Default)]
struct SerdeEnumAttributes {
rename_all: Option<String>,
}
fn parse_serde_attributes(attrs: &[Attribute]) -> syn::Result<SerdeAttributes> {
let mut serde_attributes = SerdeAttributes::default();
for attr in attrs {
if attr.path().is_ident("serde") {
if let Ok(list) = attr.meta.require_list() {
if list.path.is_ident("serde") {
for item in list.tokens.clone() {
if let Some(ident) = item.to_string().split_whitespace().next() {
if ident == "flatten" {
serde_attributes.flatten = true;
}
}
}
}
}
}
}
Ok(serde_attributes)
}
fn parse_serde_enum_attributes(attrs: &[Attribute]) -> syn::Result<SerdeEnumAttributes> {
let mut serde_enum_attributes = SerdeEnumAttributes::default();
for attr in attrs {
if attr.path().is_ident("serde") {
// Use more robust parsing that handles all serde attributes
let parse_result = attr.parse_nested_meta(|meta| {
if meta.path.is_ident("rename_all") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
serde_enum_attributes.rename_all = Some(lit_str.value());
}
}
} else if meta.path.is_ident("tag") {
// Parse and ignore the tag attribute
if let Ok(value) = meta.value() {
let _ = value.parse::<Lit>();
}
} else if meta.path.is_ident("content") {
// Parse and ignore the content attribute
if let Ok(value) = meta.value() {
let _ = value.parse::<Lit>();
}
} else if meta.path.is_ident("rename") {
// Parse and ignore the rename attribute (used for enum renaming)
if let Ok(value) = meta.value() {
let _ = value.parse::<Lit>();
}
} else if meta.path.is_ident("deny_unknown_fields") {
// Handle deny_unknown_fields (no value needed)
// This is a flag attribute with no value
} else if meta.path.is_ident("skip_serializing") {
// Handle skip_serializing
} else if meta.path.is_ident("skip_deserializing") {
// Handle skip_deserializing
} else if meta.path.is_ident("skip_serializing_if") {
// Handle skip_serializing_if
if let Ok(value) = meta.value() {
let _ = value.parse::<syn::Expr>();
}
} else if meta.path.is_ident("default") {
// Handle default attribute
// Could have a value or be a flag
if meta.value().is_ok() {
let _ = meta.value().and_then(|v| v.parse::<syn::Expr>());
}
} else if meta.path.is_ident("flatten") {
// Handle flatten (flag attribute)
} else if meta.path.is_ident("untagged") {
// Handle untagged (flag attribute)
} else if meta.path.is_ident("bound") {
// Handle bound attribute
if let Ok(value) = meta.value() {
let _ = value.parse::<Lit>();
}
}
// Silently ignore any other serde attributes to prevent parsing errors
Ok(())
});
// If parsing failed, provide a more helpful error message
if let Err(e) = parse_result {
return Err(syn::Error::new_spanned(
attr,
format!("Failed to parse serde attribute: {}. This may be due to multiple serde attributes on separate lines. Consider consolidating them into a single #[serde(...)] attribute.", e)
));
}
}
}
Ok(serde_enum_attributes)
}
fn transform_variant_name(name: &str, rename_all: Option<&str>) -> String {
match rename_all {
Some("snake_case") => to_snake_case(name),
Some("camelCase") => to_camel_case(name),
Some("kebab-case") => to_kebab_case(name),
Some("PascalCase") => name.to_string(), // No change for PascalCase
Some("SCREAMING_SNAKE_CASE") => to_screaming_snake_case(name),
Some("lowercase") => name.to_lowercase(),
Some("UPPERCASE") => name.to_uppercase(),
_ => name.to_string(), // No transformation if no rename_all or unknown pattern
}
}
fn to_snake_case(input: &str) -> String {
let mut result = String::new();
let chars = input.chars();
for ch in chars {
if ch.is_uppercase() && !result.is_empty() {
// Add underscore before uppercase letters (except the first character)
result.push('_');
}
result.push(ch.to_lowercase().next().unwrap());
}
result
}
fn to_camel_case(input: &str) -> String {
let mut result = String::new();
let mut chars = input.chars();
// First character should be lowercase
if let Some(ch) = chars.next() {
result.push(ch.to_lowercase().next().unwrap());
}
// Rest of the characters remain the same
for ch in chars {
result.push(ch);
}
result
}
fn to_kebab_case(input: &str) -> String {
let mut result = String::new();
for ch in input.chars() {
if ch.is_uppercase() && !result.is_empty() {
// Add hyphen before uppercase letters (except the first character)
result.push('-');
}
result.push(ch.to_lowercase().next().unwrap());
}
result
}
fn to_screaming_snake_case(input: &str) -> String {
let mut result = String::new();
for ch in input.chars() {
if ch.is_uppercase() && !result.is_empty() {
// Add underscore before uppercase letters (except the first character)
result.push('_');
}
result.push(ch.to_uppercase().next().unwrap());
}
result
}
fn parse_smithy_field_attributes(attrs: &[Attribute]) -> syn::Result<SmithyFieldAttributes> {
let mut field_attributes = SmithyFieldAttributes::default();
for attr in attrs {
if attr.path().is_ident("smithy") {
attr.parse_nested_meta(|meta| {
if meta.path.is_ident("value_type") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
field_attributes.value_type = Some(lit_str.value());
}
}
} else if meta.path.is_ident("pattern") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
field_attributes
.constraints
.push(SmithyConstraint::Pattern(lit_str.value()));
}
}
} else if meta.path.is_ident("range") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
let range_str = lit_str.value();
match parse_range(&range_str) {
Ok((min, max)) => {
field_attributes
.constraints
.push(SmithyConstraint::Range(min, max));
}
Err(e) => {
return Err(syn::Error::new_spanned(
&meta.path,
format!("Invalid range: {}", e),
));
}
}
}
}
} else if meta.path.is_ident("length") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
let length_str = lit_str.value();
match parse_length(&length_str) {
Ok((min, max)) => {
field_attributes
.constraints
.push(SmithyConstraint::Length(min, max));
}
Err(e) => {
return Err(syn::Error::new_spanned(
&meta.path,
format!("Invalid length: {}", e),
));
}
}
}
}
} else if meta.path.is_ident("required") {
field_attributes
.constraints
.push(SmithyConstraint::Required);
} else if meta.path.is_ident("http_label") {
field_attributes
.constraints
.push(SmithyConstraint::HttpLabel);
} else if meta.path.is_ident("http_query") {
if let Ok(value) = meta.value() {
if let Ok(Lit::Str(lit_str)) = value.parse::<Lit>() {
field_attributes
.constraints
.push(SmithyConstraint::HttpQuery(lit_str.value()));
}
}
}
Ok(())
})?;
}
}
// Automatically add Required for http_label fields
if field_attributes
.constraints
.iter()
.any(|c| matches!(c, SmithyConstraint::HttpLabel))
&& !field_attributes
.constraints
.iter()
.any(|c| matches!(c, SmithyConstraint::Required))
{
field_attributes
.constraints
.push(SmithyConstraint::Required);
}
Ok(field_attributes)
}
fn extract_documentation(attrs: &[Attribute]) -> Option<String> {
let mut docs = Vec::new();
for attr in attrs {
if attr.path().is_ident("doc") {
if let Meta::NameValue(meta_name_value) = &attr.meta {
if let syn::Expr::Lit(expr_lit) = &meta_name_value.value {
if let Lit::Str(lit_str) = &expr_lit.lit {
docs.push(lit_str.value().trim().to_string());
}
}
}
}
}
if docs.is_empty() {
None
} else {
Some(docs.join(" "))
}
}
fn parse_range(range_str: &str) -> Result<(Option<i64>, Option<i64>), String> {
if range_str.contains("..=") {
let parts: Vec<&str> = range_str.split("..=").collect();
if parts.len() != 2 {
return Err(
"Invalid range format: must be 'min..=max', '..=max', or 'min..='".to_string(),
);
}
let min = if parts[0].is_empty() {
None
} else {
Some(
parts[0]
.parse()
.map_err(|_| format!("Invalid range min: '{}'", parts[0]))?,
)
};
let max = if parts[1].is_empty() {
None
} else {
Some(
parts[1]
.parse()
.map_err(|_| format!("Invalid range max: '{}'", parts[1]))?,
)
};
Ok((min, max))
} else if range_str.contains("..") {
let parts: Vec<&str> = range_str.split("..").collect();
if parts.len() != 2 {
return Err(
"Invalid range format: must be 'min..max', '..max', or 'min..'".to_string(),
);
}
let min = if parts[0].is_empty() {
None
} else {
Some(
parts[0]
.parse()
.map_err(|_| format!("Invalid range min: '{}'", parts[0]))?,
)
};
let max = if parts[1].is_empty() {
None
} else {
Some(
parts[1]
.parse::<i64>()
.map_err(|_| format!("Invalid range max: '{}'", parts[1]))?
- 1,
)
};
Ok((min, max))
} else {
Err("Invalid range format: must contain '..' or '..='".to_string())
}
}
fn parse_length(length_str: &str) -> Result<(Option<u64>, Option<u64>), String> {
if length_str.contains("..=") {
let parts: Vec<&str> = length_str.split("..=").collect();
if parts.len() != 2 {
return Err(
"Invalid length format: must be 'min..=max', '..=max', or 'min..='".to_string(),
);
}
let min = if parts[0].is_empty() {
None
} else {
Some(
parts[0]
.parse()
.map_err(|_| format!("Invalid length min: '{}'", parts[0]))?,
)
};
let max = if parts[1].is_empty() {
None
} else {
Some(
parts[1]
.parse()
.map_err(|_| format!("Invalid length max: '{}'", parts[1]))?,
)
};
Ok((min, max))
} else if length_str.contains("..") {
let parts: Vec<&str> = length_str.split("..").collect();
if parts.len() != 2 {
return Err(
"Invalid length format: must be 'min..max', '..max', or 'min..'".to_string(),
);
}
let min = if parts[0].is_empty() {
None
} else {
Some(
parts[0]
.parse()
.map_err(|_| format!("Invalid length min: '{}'", parts[0]))?,
)
};
let max = if parts[1].is_empty() {
None
} else {
Some(
parts[1]
.parse::<u64>()
.map_err(|_| format!("Invalid length max: '{}'", parts[1]))?
- 1,
)
};
Ok((min, max))
} else {
Err("Invalid length format: must contain '..' or '..='".to_string())
}
}
</crate>
|
{
"crate": "smithy",
"file": null,
"files": [
"crates/smithy/src/lib.rs"
],
"module": null,
"num_files": 1,
"token_count": 7365
}
|
crate_-6613471232135223333
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: hyperswitch_constraint_graph
Files: 6
</path>
<crate>
// File: crates/hyperswitch_constraint_graph/src/types.rs
use std::{
any::Any,
fmt, hash,
ops::{Deref, DerefMut},
sync::Arc,
};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{dense_map::impl_entity, error::AnalysisTrace};
pub trait KeyNode: fmt::Debug + Clone + hash::Hash + serde::Serialize + PartialEq + Eq {}
pub trait ValueNode: fmt::Debug + Clone + hash::Hash + serde::Serialize + PartialEq + Eq {
type Key: KeyNode;
fn get_key(&self) -> Self::Key;
}
#[cfg(feature = "viz")]
pub trait NodeViz {
fn viz(&self) -> String;
}
#[derive(Debug, Clone, Copy, serde::Serialize, PartialEq, Eq, Hash)]
#[serde(transparent)]
pub struct NodeId(usize);
impl_entity!(NodeId);
#[derive(Debug)]
pub struct Node<V: ValueNode> {
pub node_type: NodeType<V>,
pub preds: Vec<EdgeId>,
pub succs: Vec<EdgeId>,
}
impl<V: ValueNode> Node<V> {
pub(crate) fn new(node_type: NodeType<V>) -> Self {
Self {
node_type,
preds: Vec::new(),
succs: Vec::new(),
}
}
}
#[derive(Debug, PartialEq, Eq)]
pub enum NodeType<V: ValueNode> {
AllAggregator,
AnyAggregator,
InAggregator(FxHashSet<V>),
Value(NodeValue<V>),
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, serde::Serialize)]
#[serde(tag = "type", content = "value", rename_all = "snake_case")]
pub enum NodeValue<V: ValueNode> {
Key(<V as ValueNode>::Key),
Value(V),
}
impl<V: ValueNode> From<V> for NodeValue<V> {
fn from(value: V) -> Self {
Self::Value(value)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct EdgeId(usize);
impl_entity!(EdgeId);
#[derive(
Debug, Clone, Copy, serde::Serialize, PartialEq, Eq, Hash, strum::Display, PartialOrd, Ord,
)]
pub enum Strength {
Weak,
Normal,
Strong,
}
impl Strength {
pub fn get_resolved_strength(prev_strength: Self, curr_strength: Self) -> Self {
std::cmp::max(prev_strength, curr_strength)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub enum Relation {
Positive,
Negative,
}
impl From<Relation> for bool {
fn from(value: Relation) -> Self {
matches!(value, Relation::Positive)
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, strum::Display, serde::Serialize)]
pub enum RelationResolution {
Positive,
Negative,
Contradiction,
}
impl From<Relation> for RelationResolution {
fn from(value: Relation) -> Self {
match value {
Relation::Positive => Self::Positive,
Relation::Negative => Self::Negative,
}
}
}
impl RelationResolution {
pub fn get_resolved_relation(prev_relation: Self, curr_relation: Self) -> Self {
if prev_relation != curr_relation {
Self::Contradiction
} else {
curr_relation
}
}
}
#[derive(Debug, Clone)]
pub struct Edge {
pub strength: Strength,
pub relation: Relation,
pub pred: NodeId,
pub succ: NodeId,
pub domain: Option<DomainId>,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct DomainId(usize);
impl_entity!(DomainId);
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct DomainIdentifier(String);
impl DomainIdentifier {
pub fn new(identifier: String) -> Self {
Self(identifier)
}
pub fn into_inner(&self) -> String {
self.0.clone()
}
}
impl From<String> for DomainIdentifier {
fn from(value: String) -> Self {
Self(value)
}
}
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct DomainInfo {
pub domain_identifier: DomainIdentifier,
pub domain_description: String,
}
pub trait CheckingContext {
type Value: ValueNode;
fn from_node_values<L>(vals: impl IntoIterator<Item = L>) -> Self
where
L: Into<Self::Value>;
fn check_presence(&self, value: &NodeValue<Self::Value>, strength: Strength) -> bool;
fn get_values_by_key(
&self,
expected: &<Self::Value as ValueNode>::Key,
) -> Option<Vec<Self::Value>>;
}
#[derive(Debug, Clone, serde::Serialize)]
pub struct Memoization<V: ValueNode>(
#[allow(clippy::type_complexity)]
FxHashMap<(NodeId, Relation, Strength), Result<(), Arc<AnalysisTrace<V>>>>,
);
impl<V: ValueNode> Memoization<V> {
pub fn new() -> Self {
Self(FxHashMap::default())
}
}
impl<V: ValueNode> Default for Memoization<V> {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl<V: ValueNode> Deref for Memoization<V> {
type Target = FxHashMap<(NodeId, Relation, Strength), Result<(), Arc<AnalysisTrace<V>>>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<V: ValueNode> DerefMut for Memoization<V> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
#[derive(Debug, Clone)]
pub struct CycleCheck(FxHashMap<NodeId, (Strength, RelationResolution)>);
impl CycleCheck {
pub fn new() -> Self {
Self(FxHashMap::default())
}
}
impl Default for CycleCheck {
#[inline]
fn default() -> Self {
Self::new()
}
}
impl Deref for CycleCheck {
type Target = FxHashMap<NodeId, (Strength, RelationResolution)>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for CycleCheck {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
pub trait Metadata: erased_serde::Serialize + Any + Send + Sync + fmt::Debug {}
erased_serde::serialize_trait_object!(Metadata);
impl<M> Metadata for M where M: erased_serde::Serialize + Any + Send + Sync + fmt::Debug {}
// File: crates/hyperswitch_constraint_graph/src/graph.rs
use std::sync::{Arc, Weak};
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
builder,
dense_map::DenseMap,
error::{self, AnalysisTrace, GraphError},
types::{
CheckingContext, CycleCheck, DomainId, DomainIdentifier, DomainInfo, Edge, EdgeId,
Memoization, Metadata, Node, NodeId, NodeType, NodeValue, Relation, RelationResolution,
Strength, ValueNode,
},
};
#[derive(Debug)]
struct CheckNodeContext<'a, V: ValueNode, C: CheckingContext<Value = V>> {
ctx: &'a C,
node: &'a Node<V>,
node_id: NodeId,
relation: Relation,
strength: Strength,
memo: &'a mut Memoization<V>,
cycle_map: &'a mut CycleCheck,
domains: Option<&'a [DomainId]>,
}
#[derive(Debug)]
pub struct ConstraintGraph<V: ValueNode> {
pub domain: DenseMap<DomainId, DomainInfo>,
pub domain_identifier_map: FxHashMap<DomainIdentifier, DomainId>,
pub nodes: DenseMap<NodeId, Node<V>>,
pub edges: DenseMap<EdgeId, Edge>,
pub value_map: FxHashMap<NodeValue<V>, NodeId>,
pub node_info: DenseMap<NodeId, Option<&'static str>>,
pub node_metadata: DenseMap<NodeId, Option<Arc<dyn Metadata>>>,
}
impl<V> ConstraintGraph<V>
where
V: ValueNode,
{
fn get_predecessor_edges_by_domain(
&self,
node_id: NodeId,
domains: Option<&[DomainId]>,
) -> Result<Vec<&Edge>, GraphError<V>> {
let node = self.nodes.get(node_id).ok_or(GraphError::NodeNotFound)?;
let mut final_list = Vec::new();
for &pred in &node.preds {
let edge = self.edges.get(pred).ok_or(GraphError::EdgeNotFound)?;
if let Some((domain_id, domains)) = edge.domain.zip(domains) {
if domains.contains(&domain_id) {
final_list.push(edge);
}
} else if edge.domain.is_none() {
final_list.push(edge);
}
}
Ok(final_list)
}
#[allow(clippy::too_many_arguments)]
pub fn check_node<C>(
&self,
ctx: &C,
node_id: NodeId,
relation: Relation,
strength: Strength,
memo: &mut Memoization<V>,
cycle_map: &mut CycleCheck,
domains: Option<&[String]>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let domains = domains
.map(|domain_idents| {
domain_idents
.iter()
.map(|domain_ident| {
self.domain_identifier_map
.get(&DomainIdentifier::new(domain_ident.to_string()))
.copied()
.ok_or(GraphError::DomainNotFound)
})
.collect::<Result<Vec<_>, _>>()
})
.transpose()?;
self.check_node_inner(
ctx,
node_id,
relation,
strength,
memo,
cycle_map,
domains.as_deref(),
)
}
#[allow(clippy::too_many_arguments)]
pub fn check_node_inner<C>(
&self,
ctx: &C,
node_id: NodeId,
relation: Relation,
strength: Strength,
memo: &mut Memoization<V>,
cycle_map: &mut CycleCheck,
domains: Option<&[DomainId]>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let node = self.nodes.get(node_id).ok_or(GraphError::NodeNotFound)?;
if let Some(already_memo) = memo.get(&(node_id, relation, strength)) {
already_memo
.clone()
.map_err(|err| GraphError::AnalysisError(Arc::downgrade(&err)))
} else if let Some((initial_strength, initial_relation)) = cycle_map.get(&node_id).copied()
{
let strength_relation = Strength::get_resolved_strength(initial_strength, strength);
let relation_resolve =
RelationResolution::get_resolved_relation(initial_relation, relation.into());
cycle_map.entry(node_id).and_modify(|value| {
value.0 = strength_relation;
value.1 = relation_resolve
});
Ok(())
} else {
let check_node_context = CheckNodeContext {
node,
node_id,
relation,
strength,
memo,
cycle_map,
ctx,
domains,
};
match &node.node_type {
NodeType::AllAggregator => self.validate_all_aggregator(check_node_context),
NodeType::AnyAggregator => self.validate_any_aggregator(check_node_context),
NodeType::InAggregator(expected) => {
self.validate_in_aggregator(check_node_context, expected)
}
NodeType::Value(val) => self.validate_value_node(check_node_context, val),
}
}
}
fn validate_all_aggregator<C>(
&self,
vald: CheckNodeContext<'_, V, C>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let mut unsatisfied = Vec::<Weak<AnalysisTrace<V>>>::new();
for edge in self.get_predecessor_edges_by_domain(vald.node_id, vald.domains)? {
vald.cycle_map
.insert(vald.node_id, (vald.strength, vald.relation.into()));
if let Err(e) = self.check_node_inner(
vald.ctx,
edge.pred,
edge.relation,
edge.strength,
vald.memo,
vald.cycle_map,
vald.domains,
) {
unsatisfied.push(e.get_analysis_trace()?);
}
if let Some((_resolved_strength, resolved_relation)) =
vald.cycle_map.remove(&vald.node_id)
{
if resolved_relation == RelationResolution::Contradiction {
let err = Arc::new(AnalysisTrace::Contradiction {
relation: resolved_relation,
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
return Err(GraphError::AnalysisError(Arc::downgrade(&err)));
}
}
}
if !unsatisfied.is_empty() {
let err = Arc::new(AnalysisTrace::AllAggregation {
unsatisfied,
info: self.node_info.get(vald.node_id).copied().flatten(),
metadata: self.node_metadata.get(vald.node_id).cloned().flatten(),
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
Err(GraphError::AnalysisError(Arc::downgrade(&err)))
} else {
vald.memo
.insert((vald.node_id, vald.relation, vald.strength), Ok(()));
Ok(())
}
}
fn validate_any_aggregator<C>(
&self,
vald: CheckNodeContext<'_, V, C>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let mut unsatisfied = Vec::<Weak<AnalysisTrace<V>>>::new();
let mut matched_one = false;
for edge in self.get_predecessor_edges_by_domain(vald.node_id, vald.domains)? {
vald.cycle_map
.insert(vald.node_id, (vald.strength, vald.relation.into()));
if let Err(e) = self.check_node_inner(
vald.ctx,
edge.pred,
edge.relation,
edge.strength,
vald.memo,
vald.cycle_map,
vald.domains,
) {
unsatisfied.push(e.get_analysis_trace()?);
} else {
matched_one = true;
}
if let Some((_resolved_strength, resolved_relation)) =
vald.cycle_map.remove(&vald.node_id)
{
if resolved_relation == RelationResolution::Contradiction {
let err = Arc::new(AnalysisTrace::Contradiction {
relation: resolved_relation,
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
return Err(GraphError::AnalysisError(Arc::downgrade(&err)));
}
}
}
if matched_one || vald.node.preds.is_empty() {
vald.memo
.insert((vald.node_id, vald.relation, vald.strength), Ok(()));
Ok(())
} else {
let err = Arc::new(AnalysisTrace::AnyAggregation {
unsatisfied: unsatisfied.clone(),
info: self.node_info.get(vald.node_id).copied().flatten(),
metadata: self.node_metadata.get(vald.node_id).cloned().flatten(),
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
Err(GraphError::AnalysisError(Arc::downgrade(&err)))
}
}
fn validate_in_aggregator<C>(
&self,
vald: CheckNodeContext<'_, V, C>,
expected: &FxHashSet<V>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let the_key = expected
.iter()
.next()
.ok_or_else(|| GraphError::MalformedGraph {
reason: "An OnlyIn aggregator node must have at least one expected value"
.to_string(),
})?
.get_key();
let ctx_vals = if let Some(vals) = vald.ctx.get_values_by_key(&the_key) {
vals
} else {
return if let Strength::Weak = vald.strength {
vald.memo
.insert((vald.node_id, vald.relation, vald.strength), Ok(()));
Ok(())
} else {
let err = Arc::new(AnalysisTrace::InAggregation {
expected: expected.iter().cloned().collect(),
found: None,
relation: vald.relation,
info: self.node_info.get(vald.node_id).copied().flatten(),
metadata: self.node_metadata.get(vald.node_id).cloned().flatten(),
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
Err(GraphError::AnalysisError(Arc::downgrade(&err)))
};
};
let relation_bool: bool = vald.relation.into();
for ctx_value in ctx_vals {
if expected.contains(&ctx_value) != relation_bool {
let err = Arc::new(AnalysisTrace::InAggregation {
expected: expected.iter().cloned().collect(),
found: Some(ctx_value.clone()),
relation: vald.relation,
info: self.node_info.get(vald.node_id).copied().flatten(),
metadata: self.node_metadata.get(vald.node_id).cloned().flatten(),
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
Err(GraphError::AnalysisError(Arc::downgrade(&err)))?;
}
}
vald.memo
.insert((vald.node_id, vald.relation, vald.strength), Ok(()));
Ok(())
}
fn validate_value_node<C>(
&self,
vald: CheckNodeContext<'_, V, C>,
val: &NodeValue<V>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let mut errors = Vec::<Weak<AnalysisTrace<V>>>::new();
let mut matched_one = false;
self.context_analysis(
vald.node_id,
vald.relation,
vald.strength,
vald.ctx,
val,
vald.memo,
)?;
for edge in self.get_predecessor_edges_by_domain(vald.node_id, vald.domains)? {
vald.cycle_map
.insert(vald.node_id, (vald.strength, vald.relation.into()));
let result = self.check_node_inner(
vald.ctx,
edge.pred,
edge.relation,
edge.strength,
vald.memo,
vald.cycle_map,
vald.domains,
);
if let Some((resolved_strength, resolved_relation)) =
vald.cycle_map.remove(&vald.node_id)
{
if resolved_relation == RelationResolution::Contradiction {
let err = Arc::new(AnalysisTrace::Contradiction {
relation: resolved_relation,
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
return Err(GraphError::AnalysisError(Arc::downgrade(&err)));
} else if resolved_strength != vald.strength {
self.context_analysis(
vald.node_id,
vald.relation,
resolved_strength,
vald.ctx,
val,
vald.memo,
)?
}
}
match (edge.strength, result) {
(Strength::Strong, Err(trace)) => {
let err = Arc::new(AnalysisTrace::Value {
value: val.clone(),
relation: vald.relation,
info: self.node_info.get(vald.node_id).copied().flatten(),
metadata: self.node_metadata.get(vald.node_id).cloned().flatten(),
predecessors: Some(error::ValueTracePredecessor::Mandatory(Box::new(
trace.get_analysis_trace()?,
))),
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
Err(GraphError::AnalysisError(Arc::downgrade(&err)))?;
}
(Strength::Strong, Ok(_)) => {
matched_one = true;
}
(Strength::Normal | Strength::Weak, Err(trace)) => {
errors.push(trace.get_analysis_trace()?);
}
(Strength::Normal | Strength::Weak, Ok(_)) => {
matched_one = true;
}
}
}
if matched_one || vald.node.preds.is_empty() {
vald.memo
.insert((vald.node_id, vald.relation, vald.strength), Ok(()));
Ok(())
} else {
let err = Arc::new(AnalysisTrace::Value {
value: val.clone(),
relation: vald.relation,
info: self.node_info.get(vald.node_id).copied().flatten(),
metadata: self.node_metadata.get(vald.node_id).cloned().flatten(),
predecessors: Some(error::ValueTracePredecessor::OneOf(errors.clone())),
});
vald.memo.insert(
(vald.node_id, vald.relation, vald.strength),
Err(Arc::clone(&err)),
);
Err(GraphError::AnalysisError(Arc::downgrade(&err)))
}
}
fn context_analysis<C>(
&self,
node_id: NodeId,
relation: Relation,
strength: Strength,
ctx: &C,
val: &NodeValue<V>,
memo: &mut Memoization<V>,
) -> Result<(), GraphError<V>>
where
C: CheckingContext<Value = V>,
{
let in_context = ctx.check_presence(val, strength);
let relation_bool: bool = relation.into();
if in_context != relation_bool {
let err = Arc::new(AnalysisTrace::Value {
value: val.clone(),
relation,
predecessors: None,
info: self.node_info.get(node_id).copied().flatten(),
metadata: self.node_metadata.get(node_id).cloned().flatten(),
});
memo.insert((node_id, relation, strength), Err(Arc::clone(&err)));
Err(GraphError::AnalysisError(Arc::downgrade(&err)))?;
}
if !relation_bool {
memo.insert((node_id, relation, strength), Ok(()));
return Ok(());
}
Ok(())
}
pub fn combine(g1: &Self, g2: &Self) -> Result<Self, GraphError<V>> {
let mut node_builder = builder::ConstraintGraphBuilder::new();
let mut g1_old2new_id = DenseMap::<NodeId, NodeId>::new();
let mut g2_old2new_id = DenseMap::<NodeId, NodeId>::new();
let mut g1_old2new_domain_id = DenseMap::<DomainId, DomainId>::new();
let mut g2_old2new_domain_id = DenseMap::<DomainId, DomainId>::new();
let add_domain = |node_builder: &mut builder::ConstraintGraphBuilder<V>,
domain: DomainInfo|
-> Result<DomainId, GraphError<V>> {
node_builder.make_domain(
domain.domain_identifier.into_inner(),
&domain.domain_description,
)
};
let add_node = |node_builder: &mut builder::ConstraintGraphBuilder<V>,
node: &Node<V>|
-> Result<NodeId, GraphError<V>> {
match &node.node_type {
NodeType::Value(node_value) => {
Ok(node_builder.make_value_node(node_value.clone(), None, None::<()>))
}
NodeType::AllAggregator => {
Ok(node_builder.make_all_aggregator(&[], None, None::<()>, None)?)
}
NodeType::AnyAggregator => {
Ok(node_builder.make_any_aggregator(&[], None, None::<()>, None)?)
}
NodeType::InAggregator(expected) => Ok(node_builder.make_in_aggregator(
expected.iter().cloned().collect(),
None,
None::<()>,
)?),
}
};
for (_old_domain_id, domain) in g1.domain.iter() {
let new_domain_id = add_domain(&mut node_builder, domain.clone())?;
g1_old2new_domain_id.push(new_domain_id);
}
for (_old_domain_id, domain) in g2.domain.iter() {
let new_domain_id = add_domain(&mut node_builder, domain.clone())?;
g2_old2new_domain_id.push(new_domain_id);
}
for (_old_node_id, node) in g1.nodes.iter() {
let new_node_id = add_node(&mut node_builder, node)?;
g1_old2new_id.push(new_node_id);
}
for (_old_node_id, node) in g2.nodes.iter() {
let new_node_id = add_node(&mut node_builder, node)?;
g2_old2new_id.push(new_node_id);
}
for edge in g1.edges.values() {
let new_pred_id = g1_old2new_id
.get(edge.pred)
.ok_or(GraphError::NodeNotFound)?;
let new_succ_id = g1_old2new_id
.get(edge.succ)
.ok_or(GraphError::NodeNotFound)?;
let domain_ident = edge
.domain
.map(|domain_id| g1.domain.get(domain_id).ok_or(GraphError::DomainNotFound))
.transpose()?
.map(|domain| domain.domain_identifier.clone());
node_builder.make_edge(
*new_pred_id,
*new_succ_id,
edge.strength,
edge.relation,
domain_ident,
)?;
}
for edge in g2.edges.values() {
let new_pred_id = g2_old2new_id
.get(edge.pred)
.ok_or(GraphError::NodeNotFound)?;
let new_succ_id = g2_old2new_id
.get(edge.succ)
.ok_or(GraphError::NodeNotFound)?;
let domain_ident = edge
.domain
.map(|domain_id| g2.domain.get(domain_id).ok_or(GraphError::DomainNotFound))
.transpose()?
.map(|domain| domain.domain_identifier.clone());
node_builder.make_edge(
*new_pred_id,
*new_succ_id,
edge.strength,
edge.relation,
domain_ident,
)?;
}
Ok(node_builder.build())
}
}
#[cfg(feature = "viz")]
mod viz {
use graphviz_rust::{
dot_generator::*,
dot_structures::*,
printer::{DotPrinter, PrinterContext},
};
use crate::{dense_map::EntityId, types, ConstraintGraph, NodeViz, ValueNode};
fn get_node_id(node_id: types::NodeId) -> String {
format!("N{}", node_id.get_id())
}
impl<V> ConstraintGraph<V>
where
V: ValueNode + NodeViz,
<V as ValueNode>::Key: NodeViz,
{
fn get_node_label(node: &types::Node<V>) -> String {
let label = match &node.node_type {
types::NodeType::Value(types::NodeValue::Key(key)) => format!("any {}", key.viz()),
types::NodeType::Value(types::NodeValue::Value(val)) => {
format!("{} = {}", val.get_key().viz(), val.viz())
}
types::NodeType::AllAggregator => "&&".to_string(),
types::NodeType::AnyAggregator => "| |".to_string(),
types::NodeType::InAggregator(agg) => {
let key = if let Some(val) = agg.iter().next() {
val.get_key().viz()
} else {
return "empty in".to_string();
};
let nodes = agg.iter().map(NodeViz::viz).collect::<Vec<_>>();
format!("{key} in [{}]", nodes.join(", "))
}
};
format!("\"{label}\"")
}
fn build_node(cg_node_id: types::NodeId, cg_node: &types::Node<V>) -> Node {
let viz_node_id = get_node_id(cg_node_id);
let viz_node_label = Self::get_node_label(cg_node);
node!(viz_node_id; attr!("label", viz_node_label))
}
fn build_edge(cg_edge: &types::Edge) -> Edge {
let pred_vertex = get_node_id(cg_edge.pred);
let succ_vertex = get_node_id(cg_edge.succ);
let arrowhead = match cg_edge.strength {
types::Strength::Weak => "onormal",
types::Strength::Normal => "normal",
types::Strength::Strong => "normalnormal",
};
let color = match cg_edge.relation {
types::Relation::Positive => "blue",
types::Relation::Negative => "red",
};
edge!(
node_id!(pred_vertex) => node_id!(succ_vertex);
attr!("arrowhead", arrowhead),
attr!("color", color)
)
}
pub fn get_viz_digraph(&self) -> Graph {
graph!(
strict di id!("constraint_graph"),
self.nodes
.iter()
.map(|(node_id, node)| Self::build_node(node_id, node))
.map(Stmt::Node)
.chain(self.edges.values().map(Self::build_edge).map(Stmt::Edge))
.collect::<Vec<_>>()
)
}
pub fn get_viz_digraph_string(&self) -> String {
let mut ctx = PrinterContext::default();
let digraph = self.get_viz_digraph();
digraph.print(&mut ctx)
}
}
}
// File: crates/hyperswitch_constraint_graph/src/error.rs
use std::sync::{Arc, Weak};
use crate::types::{Metadata, NodeValue, Relation, RelationResolution, ValueNode};
#[derive(Debug, Clone, serde::Serialize)]
#[serde(tag = "type", content = "predecessor", rename_all = "snake_case")]
pub enum ValueTracePredecessor<V: ValueNode> {
Mandatory(Box<Weak<AnalysisTrace<V>>>),
OneOf(Vec<Weak<AnalysisTrace<V>>>),
}
#[derive(Debug, Clone, serde::Serialize)]
#[serde(tag = "type", content = "trace", rename_all = "snake_case")]
pub enum AnalysisTrace<V: ValueNode> {
Value {
value: NodeValue<V>,
relation: Relation,
predecessors: Option<ValueTracePredecessor<V>>,
info: Option<&'static str>,
metadata: Option<Arc<dyn Metadata>>,
},
AllAggregation {
unsatisfied: Vec<Weak<AnalysisTrace<V>>>,
info: Option<&'static str>,
metadata: Option<Arc<dyn Metadata>>,
},
AnyAggregation {
unsatisfied: Vec<Weak<AnalysisTrace<V>>>,
info: Option<&'static str>,
metadata: Option<Arc<dyn Metadata>>,
},
InAggregation {
expected: Vec<V>,
found: Option<V>,
relation: Relation,
info: Option<&'static str>,
metadata: Option<Arc<dyn Metadata>>,
},
Contradiction {
relation: RelationResolution,
},
}
#[derive(Debug, Clone, serde::Serialize, thiserror::Error)]
#[serde(tag = "type", content = "info", rename_all = "snake_case")]
pub enum GraphError<V: ValueNode> {
#[error("An edge was not found in the graph")]
EdgeNotFound,
#[error("Attempted to create a conflicting edge between two nodes")]
ConflictingEdgeCreated,
#[error("Cycle detected in graph")]
CycleDetected,
#[error("Domain wasn't found in the Graph")]
DomainNotFound,
#[error("Malformed Graph: {reason}")]
MalformedGraph { reason: String },
#[error("A node was not found in the graph")]
NodeNotFound,
#[error("A value node was not found: {0:#?}")]
ValueNodeNotFound(V),
#[error("No values provided for an 'in' aggregator node")]
NoInAggregatorValues,
#[error("Error during analysis: {0:#?}")]
AnalysisError(Weak<AnalysisTrace<V>>),
}
impl<V: ValueNode> GraphError<V> {
pub fn get_analysis_trace(self) -> Result<Weak<AnalysisTrace<V>>, Self> {
match self {
Self::AnalysisError(trace) => Ok(trace),
_ => Err(self),
}
}
}
// File: crates/hyperswitch_constraint_graph/src/lib.rs
pub mod builder;
mod dense_map;
pub mod error;
pub mod graph;
pub mod types;
pub use builder::ConstraintGraphBuilder;
pub use error::{AnalysisTrace, GraphError};
pub use graph::ConstraintGraph;
#[cfg(feature = "viz")]
pub use types::NodeViz;
pub use types::{
CheckingContext, CycleCheck, DomainId, DomainIdentifier, Edge, EdgeId, KeyNode, Memoization,
Node, NodeId, NodeValue, Relation, Strength, ValueNode,
};
// File: crates/hyperswitch_constraint_graph/src/dense_map.rs
use std::{fmt, iter, marker::PhantomData, ops, slice, vec};
pub trait EntityId {
fn get_id(&self) -> usize;
fn with_id(id: usize) -> Self;
}
macro_rules! impl_entity {
($name:ident) => {
impl $crate::dense_map::EntityId for $name {
#[inline]
fn get_id(&self) -> usize {
self.0
}
#[inline]
fn with_id(id: usize) -> Self {
Self(id)
}
}
};
}
pub(crate) use impl_entity;
pub struct DenseMap<K, V> {
data: Vec<V>,
_marker: PhantomData<K>,
}
impl<K, V> DenseMap<K, V> {
pub fn new() -> Self {
Self {
data: Vec::new(),
_marker: PhantomData,
}
}
}
impl<K, V> Default for DenseMap<K, V> {
fn default() -> Self {
Self::new()
}
}
impl<K, V> DenseMap<K, V>
where
K: EntityId,
{
pub fn push(&mut self, elem: V) -> K {
let curr_len = self.data.len();
self.data.push(elem);
K::with_id(curr_len)
}
#[inline]
pub fn get(&self, idx: K) -> Option<&V> {
self.data.get(idx.get_id())
}
#[inline]
pub fn get_mut(&mut self, idx: K) -> Option<&mut V> {
self.data.get_mut(idx.get_id())
}
#[inline]
pub fn contains_key(&self, key: K) -> bool {
key.get_id() < self.data.len()
}
#[inline]
pub fn keys(&self) -> Keys<K> {
Keys::new(0..self.data.len())
}
#[inline]
pub fn into_keys(self) -> Keys<K> {
Keys::new(0..self.data.len())
}
#[inline]
pub fn values(&self) -> slice::Iter<'_, V> {
self.data.iter()
}
#[inline]
pub fn values_mut(&mut self) -> slice::IterMut<'_, V> {
self.data.iter_mut()
}
#[inline]
pub fn into_values(self) -> vec::IntoIter<V> {
self.data.into_iter()
}
#[inline]
pub fn iter(&self) -> Iter<'_, K, V> {
Iter::new(self.data.iter())
}
#[inline]
pub fn iter_mut(&mut self) -> IterMut<'_, K, V> {
IterMut::new(self.data.iter_mut())
}
}
impl<K, V> fmt::Debug for DenseMap<K, V>
where
K: EntityId + fmt::Debug,
V: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_map().entries(self.iter()).finish()
}
}
pub struct Keys<K> {
inner: ops::Range<usize>,
_marker: PhantomData<K>,
}
impl<K> Keys<K> {
fn new(range: ops::Range<usize>) -> Self {
Self {
inner: range,
_marker: PhantomData,
}
}
}
impl<K> Iterator for Keys<K>
where
K: EntityId,
{
type Item = K;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(K::with_id)
}
}
pub struct Iter<'a, K, V> {
inner: iter::Enumerate<slice::Iter<'a, V>>,
_marker: PhantomData<K>,
}
impl<'a, K, V> Iter<'a, K, V> {
fn new(iter: slice::Iter<'a, V>) -> Self {
Self {
inner: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<'a, K, V> Iterator for Iter<'a, K, V>
where
K: EntityId,
{
type Item = (K, &'a V);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(id, val)| (K::with_id(id), val))
}
}
pub struct IterMut<'a, K, V> {
inner: iter::Enumerate<slice::IterMut<'a, V>>,
_marker: PhantomData<K>,
}
impl<'a, K, V> IterMut<'a, K, V> {
fn new(iter: slice::IterMut<'a, V>) -> Self {
Self {
inner: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<'a, K, V> Iterator for IterMut<'a, K, V>
where
K: EntityId,
{
type Item = (K, &'a mut V);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(id, val)| (K::with_id(id), val))
}
}
pub struct IntoIter<K, V> {
inner: iter::Enumerate<vec::IntoIter<V>>,
_marker: PhantomData<K>,
}
impl<K, V> IntoIter<K, V> {
fn new(iter: vec::IntoIter<V>) -> Self {
Self {
inner: iter.enumerate(),
_marker: PhantomData,
}
}
}
impl<K, V> Iterator for IntoIter<K, V>
where
K: EntityId,
{
type Item = (K, V);
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|(id, val)| (K::with_id(id), val))
}
}
impl<K, V> IntoIterator for DenseMap<K, V>
where
K: EntityId,
{
type Item = (K, V);
type IntoIter = IntoIter<K, V>;
fn into_iter(self) -> Self::IntoIter {
IntoIter::new(self.data.into_iter())
}
}
impl<K, V> FromIterator<V> for DenseMap<K, V>
where
K: EntityId,
{
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = V>,
{
Self {
data: Vec::from_iter(iter),
_marker: PhantomData,
}
}
}
// File: crates/hyperswitch_constraint_graph/src/builder.rs
use std::sync::Arc;
use rustc_hash::{FxHashMap, FxHashSet};
use crate::{
dense_map::DenseMap,
error::GraphError,
graph::ConstraintGraph,
types::{
DomainId, DomainIdentifier, DomainInfo, Edge, EdgeId, Metadata, Node, NodeId, NodeType,
NodeValue, Relation, Strength, ValueNode,
},
};
pub enum DomainIdOrIdentifier {
DomainId(DomainId),
DomainIdentifier(DomainIdentifier),
}
impl From<String> for DomainIdOrIdentifier {
fn from(value: String) -> Self {
Self::DomainIdentifier(DomainIdentifier::new(value))
}
}
impl From<DomainIdentifier> for DomainIdOrIdentifier {
fn from(value: DomainIdentifier) -> Self {
Self::DomainIdentifier(value)
}
}
impl From<DomainId> for DomainIdOrIdentifier {
fn from(value: DomainId) -> Self {
Self::DomainId(value)
}
}
#[derive(Debug)]
pub struct ConstraintGraphBuilder<V: ValueNode> {
domain: DenseMap<DomainId, DomainInfo>,
nodes: DenseMap<NodeId, Node<V>>,
edges: DenseMap<EdgeId, Edge>,
domain_identifier_map: FxHashMap<DomainIdentifier, DomainId>,
value_map: FxHashMap<NodeValue<V>, NodeId>,
edges_map: FxHashMap<(NodeId, NodeId, Option<DomainId>), EdgeId>,
node_info: DenseMap<NodeId, Option<&'static str>>,
node_metadata: DenseMap<NodeId, Option<Arc<dyn Metadata>>>,
}
#[allow(clippy::new_without_default)]
impl<V> ConstraintGraphBuilder<V>
where
V: ValueNode,
{
pub fn new() -> Self {
Self {
domain: DenseMap::new(),
nodes: DenseMap::new(),
edges: DenseMap::new(),
domain_identifier_map: FxHashMap::default(),
value_map: FxHashMap::default(),
edges_map: FxHashMap::default(),
node_info: DenseMap::new(),
node_metadata: DenseMap::new(),
}
}
pub fn build(self) -> ConstraintGraph<V> {
ConstraintGraph {
domain: self.domain,
domain_identifier_map: self.domain_identifier_map,
nodes: self.nodes,
edges: self.edges,
value_map: self.value_map,
node_info: self.node_info,
node_metadata: self.node_metadata,
}
}
fn retrieve_domain_from_identifier(
&self,
domain_ident: DomainIdentifier,
) -> Result<DomainId, GraphError<V>> {
self.domain_identifier_map
.get(&domain_ident)
.copied()
.ok_or(GraphError::DomainNotFound)
}
pub fn make_domain(
&mut self,
domain_identifier: String,
domain_description: &str,
) -> Result<DomainId, GraphError<V>> {
let domain_identifier = DomainIdentifier::new(domain_identifier);
Ok(self
.domain_identifier_map
.clone()
.get(&domain_identifier)
.map_or_else(
|| {
let domain_id = self.domain.push(DomainInfo {
domain_identifier: domain_identifier.clone(),
domain_description: domain_description.to_string(),
});
self.domain_identifier_map
.insert(domain_identifier, domain_id);
domain_id
},
|domain_id| *domain_id,
))
}
pub fn make_value_node<M: Metadata>(
&mut self,
value: NodeValue<V>,
info: Option<&'static str>,
metadata: Option<M>,
) -> NodeId {
self.value_map.get(&value).copied().unwrap_or_else(|| {
let node_id = self.nodes.push(Node::new(NodeType::Value(value.clone())));
let _node_info_id = self.node_info.push(info);
let _node_metadata_id = self
.node_metadata
.push(metadata.map(|meta| -> Arc<dyn Metadata> { Arc::new(meta) }));
self.value_map.insert(value, node_id);
node_id
})
}
pub fn make_edge<T: Into<DomainIdOrIdentifier>>(
&mut self,
pred_id: NodeId,
succ_id: NodeId,
strength: Strength,
relation: Relation,
domain: Option<T>,
) -> Result<EdgeId, GraphError<V>> {
self.ensure_node_exists(pred_id)?;
self.ensure_node_exists(succ_id)?;
let domain_id = domain
.map(|d| match d.into() {
DomainIdOrIdentifier::DomainIdentifier(ident) => {
self.retrieve_domain_from_identifier(ident)
}
DomainIdOrIdentifier::DomainId(domain_id) => {
self.ensure_domain_exists(domain_id).map(|_| domain_id)
}
})
.transpose()?;
self.edges_map
.get(&(pred_id, succ_id, domain_id))
.copied()
.and_then(|edge_id| self.edges.get(edge_id).cloned().map(|edge| (edge_id, edge)))
.map_or_else(
|| {
let edge_id = self.edges.push(Edge {
strength,
relation,
pred: pred_id,
succ: succ_id,
domain: domain_id,
});
self.edges_map
.insert((pred_id, succ_id, domain_id), edge_id);
let pred = self
.nodes
.get_mut(pred_id)
.ok_or(GraphError::NodeNotFound)?;
pred.succs.push(edge_id);
let succ = self
.nodes
.get_mut(succ_id)
.ok_or(GraphError::NodeNotFound)?;
succ.preds.push(edge_id);
Ok(edge_id)
},
|(edge_id, edge)| {
if edge.strength == strength && edge.relation == relation {
Ok(edge_id)
} else {
Err(GraphError::ConflictingEdgeCreated)
}
},
)
}
pub fn make_all_aggregator<M: Metadata>(
&mut self,
nodes: &[(NodeId, Relation, Strength)],
info: Option<&'static str>,
metadata: Option<M>,
domain_id: Option<DomainId>,
) -> Result<NodeId, GraphError<V>> {
nodes
.iter()
.try_for_each(|(node_id, _, _)| self.ensure_node_exists(*node_id))?;
let aggregator_id = self.nodes.push(Node::new(NodeType::AllAggregator));
let _aggregator_info_id = self.node_info.push(info);
let _node_metadata_id = self
.node_metadata
.push(metadata.map(|meta| -> Arc<dyn Metadata> { Arc::new(meta) }));
for (node_id, relation, strength) in nodes {
self.make_edge(*node_id, aggregator_id, *strength, *relation, domain_id)?;
}
Ok(aggregator_id)
}
pub fn make_any_aggregator<M: Metadata>(
&mut self,
nodes: &[(NodeId, Relation, Strength)],
info: Option<&'static str>,
metadata: Option<M>,
domain_id: Option<DomainId>,
) -> Result<NodeId, GraphError<V>> {
nodes
.iter()
.try_for_each(|(node_id, _, _)| self.ensure_node_exists(*node_id))?;
let aggregator_id = self.nodes.push(Node::new(NodeType::AnyAggregator));
let _aggregator_info_id = self.node_info.push(info);
let _node_metadata_id = self
.node_metadata
.push(metadata.map(|meta| -> Arc<dyn Metadata> { Arc::new(meta) }));
for (node_id, relation, strength) in nodes {
self.make_edge(*node_id, aggregator_id, *strength, *relation, domain_id)?;
}
Ok(aggregator_id)
}
pub fn make_in_aggregator<M: Metadata>(
&mut self,
values: Vec<V>,
info: Option<&'static str>,
metadata: Option<M>,
) -> Result<NodeId, GraphError<V>> {
let key = values
.first()
.ok_or(GraphError::NoInAggregatorValues)?
.get_key();
for val in &values {
if val.get_key() != key {
Err(GraphError::MalformedGraph {
reason: "Values for 'In' aggregator not of same key".to_string(),
})?;
}
}
let node_id = self
.nodes
.push(Node::new(NodeType::InAggregator(FxHashSet::from_iter(
values,
))));
let _aggregator_info_id = self.node_info.push(info);
let _node_metadata_id = self
.node_metadata
.push(metadata.map(|meta| -> Arc<dyn Metadata> { Arc::new(meta) }));
Ok(node_id)
}
fn ensure_node_exists(&self, id: NodeId) -> Result<(), GraphError<V>> {
if self.nodes.contains_key(id) {
Ok(())
} else {
Err(GraphError::NodeNotFound)
}
}
fn ensure_domain_exists(&self, id: DomainId) -> Result<(), GraphError<V>> {
if self.domain.contains_key(id) {
Ok(())
} else {
Err(GraphError::DomainNotFound)
}
}
}
</crate>
|
{
"crate": "hyperswitch_constraint_graph",
"file": null,
"files": [
"crates/hyperswitch_constraint_graph/src/types.rs",
"crates/hyperswitch_constraint_graph/src/graph.rs",
"crates/hyperswitch_constraint_graph/src/error.rs",
"crates/hyperswitch_constraint_graph/src/lib.rs",
"crates/hyperswitch_constraint_graph/src/dense_map.rs",
"crates/hyperswitch_constraint_graph/src/builder.rs"
],
"module": null,
"num_files": 6,
"token_count": 10631
}
|
crate_1015594627859431533
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: connector_configs
Files: 5
</path>
<crate>
// File: crates/connector_configs/src/lib.rs
pub mod common_config;
pub mod connector;
pub mod response_modifier;
pub mod transformer;
// File: crates/connector_configs/src/response_modifier.rs
use crate::common_config::{
CardProvider, ConnectorApiIntegrationPayload, DashboardPaymentMethodPayload,
DashboardRequestPayload, Provider,
};
impl ConnectorApiIntegrationPayload {
pub fn get_transformed_response_payload(response: Self) -> DashboardRequestPayload {
let mut wallet_details: Vec<Provider> = Vec::new();
let mut bank_redirect_details: Vec<Provider> = Vec::new();
let mut pay_later_details: Vec<Provider> = Vec::new();
let mut debit_details: Vec<CardProvider> = Vec::new();
let mut credit_details: Vec<CardProvider> = Vec::new();
let mut bank_transfer_details: Vec<Provider> = Vec::new();
let mut crypto_details: Vec<Provider> = Vec::new();
let mut bank_debit_details: Vec<Provider> = Vec::new();
let mut reward_details: Vec<Provider> = Vec::new();
let mut real_time_payment_details: Vec<Provider> = Vec::new();
let mut upi_details: Vec<Provider> = Vec::new();
let mut voucher_details: Vec<Provider> = Vec::new();
let mut gift_card_details: Vec<Provider> = Vec::new();
let mut card_redirect_details: Vec<Provider> = Vec::new();
let mut open_banking_details: Vec<Provider> = Vec::new();
let mut mobile_payment_details: Vec<Provider> = Vec::new();
if let Some(payment_methods_enabled) = response.payment_methods_enabled.clone() {
for methods in payment_methods_enabled {
match methods.payment_method {
api_models::enums::PaymentMethod::Card => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
match method_type.payment_method_type {
api_models::enums::PaymentMethodType::Credit => {
if let Some(card_networks) = method_type.card_networks {
for card in card_networks {
credit_details.push(CardProvider {
payment_method_type: card,
accepted_currencies: method_type
.accepted_currencies
.clone(),
accepted_countries: method_type
.accepted_countries
.clone(),
})
}
}
}
api_models::enums::PaymentMethodType::Debit => {
if let Some(card_networks) = method_type.card_networks {
for card in card_networks {
// debit_details.push(card)
debit_details.push(CardProvider {
payment_method_type: card,
accepted_currencies: method_type
.accepted_currencies
.clone(),
accepted_countries: method_type
.accepted_countries
.clone(),
})
}
}
}
_ => (),
}
}
}
}
api_models::enums::PaymentMethod::Wallet => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
// wallet_details.push(method_type.payment_method_type)
wallet_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::BankRedirect => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
bank_redirect_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::PayLater => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
pay_later_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::BankTransfer => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
bank_transfer_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::Crypto => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
crypto_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::BankDebit => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
bank_debit_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::Reward => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
reward_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::RealTimePayment => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
real_time_payment_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::OpenBanking => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
open_banking_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::Upi => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
upi_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::Voucher => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
voucher_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::GiftCard => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
gift_card_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::CardRedirect => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
card_redirect_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
api_models::enums::PaymentMethod::MobilePayment => {
if let Some(payment_method_types) = methods.payment_method_types {
for method_type in payment_method_types {
mobile_payment_details.push(Provider {
payment_method_type: method_type.payment_method_type,
accepted_currencies: method_type.accepted_currencies.clone(),
accepted_countries: method_type.accepted_countries.clone(),
payment_experience: method_type.payment_experience,
})
}
}
}
}
}
}
let open_banking = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::OpenBanking,
payment_method_type: api_models::enums::PaymentMethod::OpenBanking.to_string(),
provider: Some(open_banking_details),
card_provider: None,
};
let upi = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Upi,
payment_method_type: api_models::enums::PaymentMethod::Upi.to_string(),
provider: Some(upi_details),
card_provider: None,
};
let voucher: DashboardPaymentMethodPayload = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Voucher,
payment_method_type: api_models::enums::PaymentMethod::Voucher.to_string(),
provider: Some(voucher_details),
card_provider: None,
};
let gift_card: DashboardPaymentMethodPayload = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::GiftCard,
payment_method_type: api_models::enums::PaymentMethod::GiftCard.to_string(),
provider: Some(gift_card_details),
card_provider: None,
};
let reward = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Reward,
payment_method_type: api_models::enums::PaymentMethod::Reward.to_string(),
provider: Some(reward_details),
card_provider: None,
};
let real_time_payment = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::RealTimePayment,
payment_method_type: api_models::enums::PaymentMethod::RealTimePayment.to_string(),
provider: Some(real_time_payment_details),
card_provider: None,
};
let wallet = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Wallet,
payment_method_type: api_models::enums::PaymentMethod::Wallet.to_string(),
provider: Some(wallet_details),
card_provider: None,
};
let bank_redirect = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::BankRedirect,
payment_method_type: api_models::enums::PaymentMethod::BankRedirect.to_string(),
provider: Some(bank_redirect_details),
card_provider: None,
};
let bank_debit = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::BankDebit,
payment_method_type: api_models::enums::PaymentMethod::BankDebit.to_string(),
provider: Some(bank_debit_details),
card_provider: None,
};
let bank_transfer = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::BankTransfer,
payment_method_type: api_models::enums::PaymentMethod::BankTransfer.to_string(),
provider: Some(bank_transfer_details),
card_provider: None,
};
let crypto = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Crypto,
payment_method_type: api_models::enums::PaymentMethod::Crypto.to_string(),
provider: Some(crypto_details),
card_provider: None,
};
let card_redirect = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::CardRedirect,
payment_method_type: api_models::enums::PaymentMethod::CardRedirect.to_string(),
provider: Some(card_redirect_details),
card_provider: None,
};
let pay_later = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::PayLater,
payment_method_type: api_models::enums::PaymentMethod::PayLater.to_string(),
provider: Some(pay_later_details),
card_provider: None,
};
let debit_details = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Card,
payment_method_type: api_models::enums::PaymentMethodType::Debit.to_string(),
provider: None,
card_provider: Some(debit_details),
};
let credit_details = DashboardPaymentMethodPayload {
payment_method: api_models::enums::PaymentMethod::Card,
payment_method_type: api_models::enums::PaymentMethodType::Credit.to_string(),
provider: None,
card_provider: Some(credit_details),
};
DashboardRequestPayload {
connector: response.connector_name,
payment_methods_enabled: Some(vec![
open_banking,
upi,
voucher,
reward,
real_time_payment,
wallet,
bank_redirect,
bank_debit,
bank_transfer,
crypto,
card_redirect,
pay_later,
debit_details,
credit_details,
gift_card,
]),
metadata: response.metadata,
}
}
}
// File: crates/connector_configs/src/common_config.rs
use api_models::{payment_methods, payments};
use serde::{Deserialize, Serialize};
use utoipa::ToSchema;
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub struct ZenApplePay {
pub terminal_uuid: Option<String>,
pub pay_wall_secret: Option<String>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(untagged)]
pub enum ApplePayData {
ApplePay(payments::ApplePayMetadata),
ApplePayCombined(payments::ApplePayCombinedMetadata),
Zen(ZenApplePay),
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct GpayDashboardPayLoad {
#[serde(skip_serializing_if = "Option::is_none")]
pub gateway_merchant_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none", rename = "stripe:version")]
pub stripe_version: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
#[serde(rename(
serialize = "stripe_publishable_key",
deserialize = "stripe:publishable_key"
))]
#[serde(alias = "stripe:publishable_key")]
#[serde(alias = "stripe_publishable_key")]
pub stripe_publishable_key: Option<String>,
pub merchant_name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub merchant_id: Option<String>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub struct ZenGooglePay {
pub terminal_uuid: Option<String>,
pub pay_wall_secret: Option<String>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(untagged)]
pub enum GooglePayData {
Standard(GpayDashboardPayLoad),
Zen(ZenGooglePay),
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct PaypalSdkData {
pub client_id: Option<String>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(untagged)]
pub enum GoogleApiModelData {
Standard(payments::GpayMetaData),
Zen(ZenGooglePay),
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, ToSchema)]
#[serde(rename_all = "snake_case")]
pub struct PaymentMethodsEnabled {
pub payment_method: api_models::enums::PaymentMethod,
pub payment_method_types: Option<Vec<payment_methods::RequestPaymentMethodTypes>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
#[serde(rename_all = "snake_case")]
pub struct ApiModelMetaData {
pub merchant_config_currency: Option<api_models::enums::Currency>,
pub merchant_account_id: Option<String>,
pub account_name: Option<String>,
pub terminal_id: Option<String>,
pub merchant_id: Option<String>,
pub google_pay: Option<GoogleApiModelData>,
pub paypal_sdk: Option<PaypalSdkData>,
pub apple_pay: Option<ApplePayData>,
pub apple_pay_combined: Option<ApplePayData>,
pub endpoint_prefix: Option<String>,
pub mcc: Option<String>,
pub merchant_country_code: Option<String>,
pub merchant_name: Option<String>,
pub acquirer_bin: Option<String>,
pub acquirer_merchant_id: Option<String>,
pub acquirer_country_code: Option<String>,
pub three_ds_requestor_name: Option<String>,
pub three_ds_requestor_id: Option<String>,
pub pull_mechanism_for_external_3ds_enabled: Option<bool>,
pub klarna_region: Option<KlarnaEndpoint>,
pub source_balance_account: Option<String>,
pub brand_id: Option<String>,
pub destination_account_number: Option<String>,
pub dpa_id: Option<String>,
pub dpa_name: Option<String>,
pub locale: Option<String>,
pub card_brands: Option<Vec<String>>,
pub merchant_category_code: Option<String>,
pub merchant_configuration_id: Option<String>,
pub tenant_id: Option<String>,
pub platform_url: Option<String>,
pub account_id: Option<serde_json::Value>,
pub site: Option<String>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Clone, Serialize, Deserialize, ToSchema)]
pub enum KlarnaEndpoint {
Europe,
NorthAmerica,
Oceania,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, ToSchema, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct CardProvider {
pub payment_method_type: api_models::enums::CardNetwork,
/// List of currencies accepted or has the processing capabilities of the processor
#[schema(example = json!(
{
"type": "specific_accepted",
"list": ["USD", "INR"]
}
), value_type = Option<AcceptedCurrencies>)]
pub accepted_currencies: Option<api_models::admin::AcceptedCurrencies>,
#[schema(example = json!(
{
"type": "specific_accepted",
"list": ["UK", "AU"]
}
), value_type = Option<AcceptedCountries>)]
pub accepted_countries: Option<api_models::admin::AcceptedCountries>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, ToSchema, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct Provider {
pub payment_method_type: api_models::enums::PaymentMethodType,
/// List of currencies accepted or has the processing capabilities of the processor
#[schema(example = json!(
{
"type": "specific_accepted",
"list": ["USD", "INR"]
}
), value_type = Option<AcceptedCurrencies>)]
pub accepted_currencies: Option<api_models::admin::AcceptedCurrencies>,
#[schema(example = json!(
{
"type": "specific_accepted",
"list": ["UK", "AU"]
}
), value_type = Option<AcceptedCountries>)]
pub accepted_countries: Option<api_models::admin::AcceptedCountries>,
pub payment_experience: Option<api_models::enums::PaymentExperience>,
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize, ToSchema)]
#[serde(rename_all = "snake_case")]
pub struct ConnectorApiIntegrationPayload {
pub connector_type: String,
pub profile_id: common_utils::id_type::ProfileId,
pub connector_name: api_models::enums::Connector,
#[serde(skip_deserializing)]
#[schema(example = "stripe_US_travel")]
pub connector_label: Option<String>,
pub merchant_connector_id: Option<String>,
pub disabled: bool,
pub test_mode: bool,
pub payment_methods_enabled: Option<Vec<PaymentMethodsEnabled>>,
pub metadata: Option<ApiModelMetaData>,
pub connector_webhook_details: Option<api_models::admin::MerchantConnectorWebhookDetails>,
}
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub struct DashboardPaymentMethodPayload {
pub payment_method: api_models::enums::PaymentMethod,
pub payment_method_type: String,
pub provider: Option<Vec<Provider>>,
pub card_provider: Option<Vec<CardProvider>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde_with::skip_serializing_none]
#[serde(rename_all = "snake_case")]
pub struct DashboardRequestPayload {
pub connector: api_models::enums::Connector,
pub payment_methods_enabled: Option<Vec<DashboardPaymentMethodPayload>>,
pub metadata: Option<ApiModelMetaData>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(tag = "type", content = "options")]
pub enum InputType {
Text,
Number,
Toggle,
Radio(Vec<String>),
Select(Vec<String>),
MultiSelect(Vec<String>),
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, serde::Serialize, Clone)]
#[serde(rename_all = "snake_case")]
pub struct InputData {
pub name: String,
pub label: String,
pub placeholder: String,
pub required: bool,
#[serde(flatten)]
pub input_type: InputType,
}
// File: crates/connector_configs/src/transformer.rs
use std::str::FromStr;
use api_models::{
enums::{
Connector, PaymentMethod,
PaymentMethodType::{self, AliPay, ApplePay, GooglePay, Klarna, Paypal, WeChatPay},
},
payment_methods,
refunds::MinorUnit,
};
use crate::common_config::{
ConnectorApiIntegrationPayload, DashboardRequestPayload, PaymentMethodsEnabled, Provider,
};
impl DashboardRequestPayload {
pub fn transform_card(
payment_method_type: PaymentMethodType,
card_provider: Vec<api_models::enums::CardNetwork>,
) -> payment_methods::RequestPaymentMethodTypes {
payment_methods::RequestPaymentMethodTypes {
payment_method_type,
card_networks: Some(card_provider),
minimum_amount: Some(MinorUnit::zero()),
maximum_amount: Some(MinorUnit::new(68607706)),
recurring_enabled: Some(true),
installment_payment_enabled: Some(false),
accepted_currencies: None,
accepted_countries: None,
payment_experience: None,
}
}
pub fn get_payment_experience(
connector: Connector,
payment_method_type: PaymentMethodType,
payment_method: PaymentMethod,
payment_experience: Option<api_models::enums::PaymentExperience>,
) -> Option<api_models::enums::PaymentExperience> {
match payment_method {
PaymentMethod::BankRedirect => None,
_ => match (connector, payment_method_type) {
#[cfg(feature = "dummy_connector")]
(Connector::DummyConnector4, _) | (Connector::DummyConnector7, _) => {
Some(api_models::enums::PaymentExperience::RedirectToUrl)
}
(Connector::Paypal, Paypal) => payment_experience,
(Connector::Klarna, Klarna) => payment_experience,
(Connector::Zen, GooglePay) | (Connector::Zen, ApplePay) => {
Some(api_models::enums::PaymentExperience::RedirectToUrl)
}
(Connector::Braintree, Paypal) => {
Some(api_models::enums::PaymentExperience::InvokeSdkClient)
}
(Connector::Globepay, AliPay)
| (Connector::Globepay, WeChatPay)
| (Connector::Stripe, WeChatPay) => {
Some(api_models::enums::PaymentExperience::DisplayQrCode)
}
(_, GooglePay)
| (_, ApplePay)
| (_, PaymentMethodType::SamsungPay)
| (_, PaymentMethodType::Paze)
| (_, PaymentMethodType::AmazonPay) => {
Some(api_models::enums::PaymentExperience::InvokeSdkClient)
}
(_, PaymentMethodType::DirectCarrierBilling) => {
Some(api_models::enums::PaymentExperience::CollectOtp)
}
(_, PaymentMethodType::Cashapp) | (_, PaymentMethodType::Swish) => {
Some(api_models::enums::PaymentExperience::DisplayQrCode)
}
_ => Some(api_models::enums::PaymentExperience::RedirectToUrl),
},
}
}
pub fn transform_payment_method(
connector: Connector,
provider: Vec<Provider>,
payment_method: PaymentMethod,
) -> Vec<payment_methods::RequestPaymentMethodTypes> {
let mut payment_method_types = Vec::new();
for method_type in provider {
let data = payment_methods::RequestPaymentMethodTypes {
payment_method_type: method_type.payment_method_type,
card_networks: None,
minimum_amount: Some(MinorUnit::zero()),
maximum_amount: Some(MinorUnit::new(68607706)),
recurring_enabled: Some(true),
installment_payment_enabled: Some(false),
accepted_currencies: method_type.accepted_currencies,
accepted_countries: method_type.accepted_countries,
payment_experience: Self::get_payment_experience(
connector,
method_type.payment_method_type,
payment_method,
method_type.payment_experience,
),
};
payment_method_types.push(data)
}
payment_method_types
}
pub fn create_connector_request(
request: Self,
api_response: ConnectorApiIntegrationPayload,
) -> ConnectorApiIntegrationPayload {
let mut card_payment_method_types = Vec::new();
let mut payment_method_enabled = Vec::new();
if let Some(payment_methods_enabled) = request.payment_methods_enabled.clone() {
for payload in payment_methods_enabled {
match payload.payment_method {
PaymentMethod::Card => {
if let Some(card_provider) = payload.card_provider {
let payment_type =
PaymentMethodType::from_str(&payload.payment_method_type)
.map_err(|_| "Invalid key received".to_string());
if let Ok(payment_type) = payment_type {
for method in card_provider {
let data = payment_methods::RequestPaymentMethodTypes {
payment_method_type: payment_type,
card_networks: Some(vec![method.payment_method_type]),
minimum_amount: Some(MinorUnit::zero()),
maximum_amount: Some(MinorUnit::new(68607706)),
recurring_enabled: Some(true),
installment_payment_enabled: Some(false),
accepted_currencies: method.accepted_currencies,
accepted_countries: method.accepted_countries,
payment_experience: None,
};
card_payment_method_types.push(data)
}
}
}
}
PaymentMethod::BankRedirect
| PaymentMethod::Wallet
| PaymentMethod::PayLater
| PaymentMethod::BankTransfer
| PaymentMethod::Crypto
| PaymentMethod::BankDebit
| PaymentMethod::Reward
| PaymentMethod::RealTimePayment
| PaymentMethod::Upi
| PaymentMethod::Voucher
| PaymentMethod::GiftCard
| PaymentMethod::OpenBanking
| PaymentMethod::CardRedirect
| PaymentMethod::MobilePayment => {
if let Some(provider) = payload.provider {
let val = Self::transform_payment_method(
request.connector,
provider,
payload.payment_method,
);
if !val.is_empty() {
let methods = PaymentMethodsEnabled {
payment_method: payload.payment_method,
payment_method_types: Some(val),
};
payment_method_enabled.push(methods);
}
}
}
};
}
if !card_payment_method_types.is_empty() {
let card = PaymentMethodsEnabled {
payment_method: PaymentMethod::Card,
payment_method_types: Some(card_payment_method_types),
};
payment_method_enabled.push(card);
}
}
ConnectorApiIntegrationPayload {
connector_type: api_response.connector_type,
profile_id: api_response.profile_id,
connector_name: api_response.connector_name,
connector_label: api_response.connector_label,
merchant_connector_id: api_response.merchant_connector_id,
disabled: api_response.disabled,
test_mode: api_response.test_mode,
payment_methods_enabled: Some(payment_method_enabled),
connector_webhook_details: api_response.connector_webhook_details,
metadata: request.metadata,
}
}
}
// File: crates/connector_configs/src/connector.rs
use std::collections::HashMap;
#[cfg(feature = "payouts")]
use api_models::enums::PayoutConnectors;
use api_models::{
enums::{AuthenticationConnectors, Connector, PmAuthConnectors, TaxConnectors},
payments,
};
use serde::{Deserialize, Serialize};
use toml;
use crate::common_config::{CardProvider, InputData, Provider, ZenApplePay};
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct PayloadCurrencyAuthKeyType {
pub api_key: String,
pub processing_account_id: String,
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct Classic {
pub password_classic: String,
pub username_classic: String,
pub merchant_id_classic: String,
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct Evoucher {
pub password_evoucher: String,
pub username_evoucher: String,
pub merchant_id_evoucher: String,
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct CashtoCodeCurrencyAuthKeyType {
pub classic: Classic,
pub evoucher: Evoucher,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum CurrencyAuthValue {
CashtoCode(CashtoCodeCurrencyAuthKeyType),
Payload(PayloadCurrencyAuthKeyType),
}
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub enum ConnectorAuthType {
HeaderKey {
api_key: String,
},
BodyKey {
api_key: String,
key1: String,
},
SignatureKey {
api_key: String,
key1: String,
api_secret: String,
},
MultiAuthKey {
api_key: String,
key1: String,
api_secret: String,
key2: String,
},
CurrencyAuthKey {
auth_key_map: HashMap<String, CurrencyAuthValue>,
},
CertificateAuth {
certificate: String,
private_key: String,
},
#[default]
NoKey,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
#[serde(untagged)]
pub enum ApplePayTomlConfig {
Standard(Box<payments::ApplePayMetadata>),
Zen(ZenApplePay),
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum KlarnaEndpoint {
Europe,
NorthAmerica,
Oceania,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ConfigMerchantAdditionalDetails {
pub open_banking_recipient_data: Option<InputData>,
pub account_data: Option<InputData>,
pub iban: Option<Vec<InputData>>,
pub bacs: Option<Vec<InputData>>,
pub connector_recipient_id: Option<InputData>,
pub wallet_id: Option<InputData>,
pub faster_payments: Option<Vec<InputData>>,
pub sepa: Option<Vec<InputData>>,
pub sepa_instant: Option<Vec<InputData>>,
pub elixir: Option<Vec<InputData>>,
pub bankgiro: Option<Vec<InputData>>,
pub plusgiro: Option<Vec<InputData>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct AccountIdConfigForCard {
pub three_ds: Option<Vec<InputData>>,
pub no_three_ds: Option<Vec<InputData>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct AccountIdConfigForRedirect {
pub three_ds: Option<Vec<InputData>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct AccountIdConfigForApplePay {
pub encrypt: Option<Vec<InputData>>,
pub decrypt: Option<Vec<InputData>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct AccountIDSupportedMethods {
apple_pay: HashMap<String, AccountIdConfigForApplePay>,
card: HashMap<String, AccountIdConfigForCard>,
interac: HashMap<String, AccountIdConfigForRedirect>,
pay_safe_card: HashMap<String, AccountIdConfigForRedirect>,
skrill: HashMap<String, AccountIdConfigForRedirect>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ConfigMetadata {
pub merchant_config_currency: Option<InputData>,
pub merchant_account_id: Option<InputData>,
pub account_name: Option<InputData>,
pub account_type: Option<InputData>,
pub terminal_id: Option<InputData>,
pub google_pay: Option<Vec<InputData>>,
pub apple_pay: Option<Vec<InputData>>,
pub merchant_id: Option<InputData>,
pub endpoint_prefix: Option<InputData>,
pub mcc: Option<InputData>,
pub merchant_country_code: Option<InputData>,
pub merchant_name: Option<InputData>,
pub acquirer_bin: Option<InputData>,
pub acquirer_merchant_id: Option<InputData>,
pub acquirer_country_code: Option<InputData>,
pub three_ds_requestor_name: Option<InputData>,
pub three_ds_requestor_id: Option<InputData>,
pub pull_mechanism_for_external_3ds_enabled: Option<InputData>,
pub klarna_region: Option<InputData>,
pub pricing_type: Option<InputData>,
pub source_balance_account: Option<InputData>,
pub brand_id: Option<InputData>,
pub destination_account_number: Option<InputData>,
pub dpa_id: Option<InputData>,
pub dpa_name: Option<InputData>,
pub locale: Option<InputData>,
pub card_brands: Option<InputData>,
pub merchant_category_code: Option<InputData>,
pub merchant_configuration_id: Option<InputData>,
pub currency_id: Option<InputData>,
pub platform_id: Option<InputData>,
pub ledger_account_id: Option<InputData>,
pub tenant_id: Option<InputData>,
pub platform_url: Option<InputData>,
pub report_group: Option<InputData>,
pub proxy_url: Option<InputData>,
pub shop_name: Option<InputData>,
pub merchant_funding_source: Option<InputData>,
pub account_id: Option<AccountIDSupportedMethods>,
pub name: Option<InputData>,
pub client_merchant_reference_id: Option<InputData>,
pub route: Option<InputData>,
pub mid: Option<InputData>,
pub tid: Option<InputData>,
pub site: Option<InputData>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ConnectorWalletDetailsConfig {
pub samsung_pay: Option<Vec<InputData>>,
pub paze: Option<Vec<InputData>>,
pub google_pay: Option<Vec<InputData>>,
pub amazon_pay: Option<Vec<InputData>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ConnectorTomlConfig {
pub connector_auth: Option<ConnectorAuthType>,
pub connector_webhook_details: Option<api_models::admin::MerchantConnectorWebhookDetails>,
pub metadata: Option<Box<ConfigMetadata>>,
pub connector_wallets_details: Option<Box<ConnectorWalletDetailsConfig>>,
pub additional_merchant_data: Option<Box<ConfigMerchantAdditionalDetails>>,
pub credit: Option<Vec<CardProvider>>,
pub debit: Option<Vec<CardProvider>>,
pub bank_transfer: Option<Vec<Provider>>,
pub bank_redirect: Option<Vec<Provider>>,
pub bank_debit: Option<Vec<Provider>>,
pub open_banking: Option<Vec<Provider>>,
pub pay_later: Option<Vec<Provider>>,
pub wallet: Option<Vec<Provider>>,
pub crypto: Option<Vec<Provider>>,
pub reward: Option<Vec<Provider>>,
pub upi: Option<Vec<Provider>>,
pub voucher: Option<Vec<Provider>>,
pub gift_card: Option<Vec<Provider>>,
pub card_redirect: Option<Vec<Provider>>,
pub is_verifiable: Option<bool>,
pub real_time_payment: Option<Vec<Provider>>,
}
#[serde_with::skip_serializing_none]
#[derive(Debug, Deserialize, Serialize, Clone)]
pub struct ConnectorConfig {
pub authipay: Option<ConnectorTomlConfig>,
pub juspaythreedsserver: Option<ConnectorTomlConfig>,
pub katapult: Option<ConnectorTomlConfig>,
pub aci: Option<ConnectorTomlConfig>,
pub adyen: Option<ConnectorTomlConfig>,
pub affirm: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub adyen_payout: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub adyenplatform_payout: Option<ConnectorTomlConfig>,
pub airwallex: Option<ConnectorTomlConfig>,
pub amazonpay: Option<ConnectorTomlConfig>,
pub archipel: Option<ConnectorTomlConfig>,
pub authorizedotnet: Option<ConnectorTomlConfig>,
pub bamboraapac: Option<ConnectorTomlConfig>,
pub bankofamerica: Option<ConnectorTomlConfig>,
pub barclaycard: Option<ConnectorTomlConfig>,
pub billwerk: Option<ConnectorTomlConfig>,
pub bitpay: Option<ConnectorTomlConfig>,
pub blackhawknetwork: Option<ConnectorTomlConfig>,
pub calida: Option<ConnectorTomlConfig>,
pub bluesnap: Option<ConnectorTomlConfig>,
pub boku: Option<ConnectorTomlConfig>,
pub braintree: Option<ConnectorTomlConfig>,
pub breadpay: Option<ConnectorTomlConfig>,
pub cardinal: Option<ConnectorTomlConfig>,
pub cashtocode: Option<ConnectorTomlConfig>,
pub celero: Option<ConnectorTomlConfig>,
pub chargebee: Option<ConnectorTomlConfig>,
pub custombilling: Option<ConnectorTomlConfig>,
pub checkbook: Option<ConnectorTomlConfig>,
pub checkout: Option<ConnectorTomlConfig>,
pub coinbase: Option<ConnectorTomlConfig>,
pub coingate: Option<ConnectorTomlConfig>,
pub cryptopay: Option<ConnectorTomlConfig>,
pub ctp_visa: Option<ConnectorTomlConfig>,
pub cybersource: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub cybersource_payout: Option<ConnectorTomlConfig>,
pub iatapay: Option<ConnectorTomlConfig>,
pub itaubank: Option<ConnectorTomlConfig>,
pub opennode: Option<ConnectorTomlConfig>,
pub bambora: Option<ConnectorTomlConfig>,
pub datatrans: Option<ConnectorTomlConfig>,
pub deutschebank: Option<ConnectorTomlConfig>,
pub digitalvirgo: Option<ConnectorTomlConfig>,
pub dlocal: Option<ConnectorTomlConfig>,
pub dwolla: Option<ConnectorTomlConfig>,
pub ebanx_payout: Option<ConnectorTomlConfig>,
pub elavon: Option<ConnectorTomlConfig>,
pub facilitapay: Option<ConnectorTomlConfig>,
pub finix: Option<ConnectorTomlConfig>,
pub fiserv: Option<ConnectorTomlConfig>,
pub fiservemea: Option<ConnectorTomlConfig>,
pub fiuu: Option<ConnectorTomlConfig>,
pub flexiti: Option<ConnectorTomlConfig>,
pub forte: Option<ConnectorTomlConfig>,
pub getnet: Option<ConnectorTomlConfig>,
pub gigadat: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub gigadat_payout: Option<ConnectorTomlConfig>,
pub globalpay: Option<ConnectorTomlConfig>,
pub globepay: Option<ConnectorTomlConfig>,
pub gocardless: Option<ConnectorTomlConfig>,
pub gpayments: Option<ConnectorTomlConfig>,
pub hipay: Option<ConnectorTomlConfig>,
pub helcim: Option<ConnectorTomlConfig>,
pub hyperswitch_vault: Option<ConnectorTomlConfig>,
pub hyperwallet: Option<ConnectorTomlConfig>,
pub inespay: Option<ConnectorTomlConfig>,
pub jpmorgan: Option<ConnectorTomlConfig>,
pub klarna: Option<ConnectorTomlConfig>,
pub loonio: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub loonio_payout: Option<ConnectorTomlConfig>,
pub mifinity: Option<ConnectorTomlConfig>,
pub mollie: Option<ConnectorTomlConfig>,
pub moneris: Option<ConnectorTomlConfig>,
pub mpgs: Option<ConnectorTomlConfig>,
pub multisafepay: Option<ConnectorTomlConfig>,
pub nexinets: Option<ConnectorTomlConfig>,
pub nexixpay: Option<ConnectorTomlConfig>,
pub nmi: Option<ConnectorTomlConfig>,
pub nomupay_payout: Option<ConnectorTomlConfig>,
pub noon: Option<ConnectorTomlConfig>,
pub nordea: Option<ConnectorTomlConfig>,
pub novalnet: Option<ConnectorTomlConfig>,
pub nuvei_payout: Option<ConnectorTomlConfig>,
pub nuvei: Option<ConnectorTomlConfig>,
pub paybox: Option<ConnectorTomlConfig>,
pub payload: Option<ConnectorTomlConfig>,
pub payme: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub payone_payout: Option<ConnectorTomlConfig>,
pub paypal: Option<ConnectorTomlConfig>,
pub paysafe: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub paypal_payout: Option<ConnectorTomlConfig>,
pub paystack: Option<ConnectorTomlConfig>,
pub paytm: Option<ConnectorTomlConfig>,
pub payu: Option<ConnectorTomlConfig>,
pub peachpayments: Option<ConnectorTomlConfig>,
pub phonepe: Option<ConnectorTomlConfig>,
pub placetopay: Option<ConnectorTomlConfig>,
pub plaid: Option<ConnectorTomlConfig>,
pub powertranz: Option<ConnectorTomlConfig>,
pub prophetpay: Option<ConnectorTomlConfig>,
pub razorpay: Option<ConnectorTomlConfig>,
pub recurly: Option<ConnectorTomlConfig>,
pub riskified: Option<ConnectorTomlConfig>,
pub rapyd: Option<ConnectorTomlConfig>,
pub redsys: Option<ConnectorTomlConfig>,
pub santander: Option<ConnectorTomlConfig>,
pub shift4: Option<ConnectorTomlConfig>,
pub sift: Option<ConnectorTomlConfig>,
pub silverflow: Option<ConnectorTomlConfig>,
pub stripe: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub stripe_payout: Option<ConnectorTomlConfig>,
pub stripebilling: Option<ConnectorTomlConfig>,
pub signifyd: Option<ConnectorTomlConfig>,
pub tersouro: Option<ConnectorTomlConfig>,
pub tokenex: Option<ConnectorTomlConfig>,
pub tokenio: Option<ConnectorTomlConfig>,
pub trustpay: Option<ConnectorTomlConfig>,
pub trustpayments: Option<ConnectorTomlConfig>,
pub threedsecureio: Option<ConnectorTomlConfig>,
pub netcetera: Option<ConnectorTomlConfig>,
pub tsys: Option<ConnectorTomlConfig>,
pub vgs: Option<ConnectorTomlConfig>,
pub volt: Option<ConnectorTomlConfig>,
pub wellsfargo: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub wise_payout: Option<ConnectorTomlConfig>,
pub worldline: Option<ConnectorTomlConfig>,
pub worldpay: Option<ConnectorTomlConfig>,
#[cfg(feature = "payouts")]
pub worldpay_payout: Option<ConnectorTomlConfig>,
pub worldpayvantiv: Option<ConnectorTomlConfig>,
pub worldpayxml: Option<ConnectorTomlConfig>,
pub xendit: Option<ConnectorTomlConfig>,
pub square: Option<ConnectorTomlConfig>,
pub stax: Option<ConnectorTomlConfig>,
pub dummy_connector: Option<ConnectorTomlConfig>,
pub stripe_test: Option<ConnectorTomlConfig>,
pub paypal_test: Option<ConnectorTomlConfig>,
pub zen: Option<ConnectorTomlConfig>,
pub zsl: Option<ConnectorTomlConfig>,
pub taxjar: Option<ConnectorTomlConfig>,
pub tesouro: Option<ConnectorTomlConfig>,
pub ctp_mastercard: Option<ConnectorTomlConfig>,
pub unified_authentication_service: Option<ConnectorTomlConfig>,
}
impl ConnectorConfig {
fn new() -> Result<Self, String> {
let config_str = if cfg!(feature = "production") {
include_str!("../toml/production.toml")
} else if cfg!(feature = "sandbox") {
include_str!("../toml/sandbox.toml")
} else {
include_str!("../toml/development.toml")
};
let config = toml::from_str::<Self>(config_str);
match config {
Ok(data) => Ok(data),
Err(err) => Err(err.to_string()),
}
}
#[cfg(feature = "payouts")]
pub fn get_payout_connector_config(
connector: PayoutConnectors,
) -> Result<Option<ConnectorTomlConfig>, String> {
let connector_data = Self::new()?;
match connector {
PayoutConnectors::Adyen => Ok(connector_data.adyen_payout),
PayoutConnectors::Adyenplatform => Ok(connector_data.adyenplatform_payout),
PayoutConnectors::Cybersource => Ok(connector_data.cybersource_payout),
PayoutConnectors::Ebanx => Ok(connector_data.ebanx_payout),
PayoutConnectors::Gigadat => Ok(connector_data.gigadat_payout),
PayoutConnectors::Loonio => Ok(connector_data.loonio_payout),
PayoutConnectors::Nomupay => Ok(connector_data.nomupay_payout),
PayoutConnectors::Nuvei => Ok(connector_data.nuvei_payout),
PayoutConnectors::Payone => Ok(connector_data.payone_payout),
PayoutConnectors::Paypal => Ok(connector_data.paypal_payout),
PayoutConnectors::Stripe => Ok(connector_data.stripe_payout),
PayoutConnectors::Wise => Ok(connector_data.wise_payout),
PayoutConnectors::Worldpay => Ok(connector_data.worldpay_payout),
}
}
pub fn get_authentication_connector_config(
connector: AuthenticationConnectors,
) -> Result<Option<ConnectorTomlConfig>, String> {
let connector_data = Self::new()?;
match connector {
AuthenticationConnectors::Threedsecureio => Ok(connector_data.threedsecureio),
AuthenticationConnectors::Netcetera => Ok(connector_data.netcetera),
AuthenticationConnectors::Gpayments => Ok(connector_data.gpayments),
AuthenticationConnectors::CtpMastercard => Ok(connector_data.ctp_mastercard),
AuthenticationConnectors::CtpVisa => Ok(connector_data.ctp_visa),
AuthenticationConnectors::UnifiedAuthenticationService => {
Ok(connector_data.unified_authentication_service)
}
AuthenticationConnectors::Juspaythreedsserver => Ok(connector_data.juspaythreedsserver),
AuthenticationConnectors::Cardinal => Ok(connector_data.cardinal),
}
}
pub fn get_tax_processor_config(
connector: TaxConnectors,
) -> Result<Option<ConnectorTomlConfig>, String> {
let connector_data = Self::new()?;
match connector {
TaxConnectors::Taxjar => Ok(connector_data.taxjar),
}
}
pub fn get_pm_authentication_processor_config(
connector: PmAuthConnectors,
) -> Result<Option<ConnectorTomlConfig>, String> {
let connector_data = Self::new()?;
match connector {
PmAuthConnectors::Plaid => Ok(connector_data.plaid),
}
}
pub fn get_connector_config(
connector: Connector,
) -> Result<Option<ConnectorTomlConfig>, String> {
let connector_data = Self::new()?;
match connector {
Connector::Aci => Ok(connector_data.aci),
Connector::Authipay => Ok(connector_data.authipay),
Connector::Adyen => Ok(connector_data.adyen),
Connector::Affirm => Ok(connector_data.affirm),
Connector::Adyenplatform => Err("Use get_payout_connector_config".to_string()),
Connector::Airwallex => Ok(connector_data.airwallex),
Connector::Amazonpay => Ok(connector_data.amazonpay),
Connector::Archipel => Ok(connector_data.archipel),
Connector::Authorizedotnet => Ok(connector_data.authorizedotnet),
Connector::Bamboraapac => Ok(connector_data.bamboraapac),
Connector::Bankofamerica => Ok(connector_data.bankofamerica),
Connector::Barclaycard => Ok(connector_data.barclaycard),
Connector::Billwerk => Ok(connector_data.billwerk),
Connector::Bitpay => Ok(connector_data.bitpay),
Connector::Bluesnap => Ok(connector_data.bluesnap),
Connector::Calida => Ok(connector_data.calida),
Connector::Blackhawknetwork => Ok(connector_data.blackhawknetwork),
Connector::Boku => Ok(connector_data.boku),
Connector::Braintree => Ok(connector_data.braintree),
Connector::Breadpay => Ok(connector_data.breadpay),
Connector::Cashtocode => Ok(connector_data.cashtocode),
Connector::Cardinal => Ok(connector_data.cardinal),
Connector::Celero => Ok(connector_data.celero),
Connector::Chargebee => Ok(connector_data.chargebee),
Connector::Checkbook => Ok(connector_data.checkbook),
Connector::Checkout => Ok(connector_data.checkout),
Connector::Coinbase => Ok(connector_data.coinbase),
Connector::Coingate => Ok(connector_data.coingate),
Connector::Cryptopay => Ok(connector_data.cryptopay),
Connector::CtpVisa => Ok(connector_data.ctp_visa),
Connector::Custombilling => Ok(connector_data.custombilling),
Connector::Cybersource => Ok(connector_data.cybersource),
#[cfg(feature = "dummy_connector")]
Connector::DummyBillingConnector => Ok(connector_data.dummy_connector),
Connector::Iatapay => Ok(connector_data.iatapay),
Connector::Itaubank => Ok(connector_data.itaubank),
Connector::Opennode => Ok(connector_data.opennode),
Connector::Bambora => Ok(connector_data.bambora),
Connector::Datatrans => Ok(connector_data.datatrans),
Connector::Deutschebank => Ok(connector_data.deutschebank),
Connector::Digitalvirgo => Ok(connector_data.digitalvirgo),
Connector::Dlocal => Ok(connector_data.dlocal),
Connector::Dwolla => Ok(connector_data.dwolla),
Connector::Ebanx => Ok(connector_data.ebanx_payout),
Connector::Elavon => Ok(connector_data.elavon),
Connector::Facilitapay => Ok(connector_data.facilitapay),
Connector::Finix => Ok(connector_data.finix),
Connector::Fiserv => Ok(connector_data.fiserv),
Connector::Fiservemea => Ok(connector_data.fiservemea),
Connector::Fiuu => Ok(connector_data.fiuu),
Connector::Flexiti => Ok(connector_data.flexiti),
Connector::Forte => Ok(connector_data.forte),
Connector::Getnet => Ok(connector_data.getnet),
Connector::Gigadat => Ok(connector_data.gigadat),
Connector::Globalpay => Ok(connector_data.globalpay),
Connector::Globepay => Ok(connector_data.globepay),
Connector::Gocardless => Ok(connector_data.gocardless),
Connector::Gpayments => Ok(connector_data.gpayments),
Connector::Hipay => Ok(connector_data.hipay),
Connector::HyperswitchVault => Ok(connector_data.hyperswitch_vault),
Connector::Helcim => Ok(connector_data.helcim),
Connector::Inespay => Ok(connector_data.inespay),
Connector::Jpmorgan => Ok(connector_data.jpmorgan),
Connector::Juspaythreedsserver => Ok(connector_data.juspaythreedsserver),
Connector::Klarna => Ok(connector_data.klarna),
Connector::Loonio => Ok(connector_data.loonio),
Connector::Mifinity => Ok(connector_data.mifinity),
Connector::Mollie => Ok(connector_data.mollie),
Connector::Moneris => Ok(connector_data.moneris),
Connector::Multisafepay => Ok(connector_data.multisafepay),
Connector::Nexinets => Ok(connector_data.nexinets),
Connector::Nexixpay => Ok(connector_data.nexixpay),
Connector::Prophetpay => Ok(connector_data.prophetpay),
Connector::Nmi => Ok(connector_data.nmi),
Connector::Nordea => Ok(connector_data.nordea),
Connector::Nomupay => Err("Use get_payout_connector_config".to_string()),
Connector::Novalnet => Ok(connector_data.novalnet),
Connector::Noon => Ok(connector_data.noon),
Connector::Nuvei => Ok(connector_data.nuvei),
Connector::Paybox => Ok(connector_data.paybox),
Connector::Payload => Ok(connector_data.payload),
Connector::Payme => Ok(connector_data.payme),
Connector::Payone => Err("Use get_payout_connector_config".to_string()),
Connector::Paypal => Ok(connector_data.paypal),
Connector::Paysafe => Ok(connector_data.paysafe),
Connector::Paystack => Ok(connector_data.paystack),
Connector::Payu => Ok(connector_data.payu),
Connector::Peachpayments => Ok(connector_data.peachpayments),
Connector::Placetopay => Ok(connector_data.placetopay),
Connector::Plaid => Ok(connector_data.plaid),
Connector::Powertranz => Ok(connector_data.powertranz),
Connector::Razorpay => Ok(connector_data.razorpay),
Connector::Rapyd => Ok(connector_data.rapyd),
Connector::Recurly => Ok(connector_data.recurly),
Connector::Redsys => Ok(connector_data.redsys),
Connector::Riskified => Ok(connector_data.riskified),
Connector::Santander => Ok(connector_data.santander),
Connector::Shift4 => Ok(connector_data.shift4),
Connector::Signifyd => Ok(connector_data.signifyd),
Connector::Silverflow => Ok(connector_data.silverflow),
Connector::Square => Ok(connector_data.square),
Connector::Stax => Ok(connector_data.stax),
Connector::Stripe => Ok(connector_data.stripe),
Connector::Stripebilling => Ok(connector_data.stripebilling),
Connector::Tesouro => Ok(connector_data.tesouro),
Connector::Tokenex => Ok(connector_data.tokenex),
Connector::Tokenio => Ok(connector_data.tokenio),
Connector::Trustpay => Ok(connector_data.trustpay),
Connector::Trustpayments => Ok(connector_data.trustpayments),
Connector::Threedsecureio => Ok(connector_data.threedsecureio),
Connector::Taxjar => Ok(connector_data.taxjar),
Connector::Tsys => Ok(connector_data.tsys),
Connector::Vgs => Ok(connector_data.vgs),
Connector::Volt => Ok(connector_data.volt),
Connector::Wellsfargo => Ok(connector_data.wellsfargo),
Connector::Wise => Err("Use get_payout_connector_config".to_string()),
Connector::Worldline => Ok(connector_data.worldline),
Connector::Worldpay => Ok(connector_data.worldpay),
Connector::Worldpayvantiv => Ok(connector_data.worldpayvantiv),
Connector::Worldpayxml => Ok(connector_data.worldpayxml),
Connector::Zen => Ok(connector_data.zen),
Connector::Zsl => Ok(connector_data.zsl),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector1 => Ok(connector_data.dummy_connector),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector2 => Ok(connector_data.dummy_connector),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector3 => Ok(connector_data.dummy_connector),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector4 => Ok(connector_data.stripe_test),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector5 => Ok(connector_data.dummy_connector),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector6 => Ok(connector_data.dummy_connector),
#[cfg(feature = "dummy_connector")]
Connector::DummyConnector7 => Ok(connector_data.paypal_test),
Connector::Netcetera => Ok(connector_data.netcetera),
Connector::CtpMastercard => Ok(connector_data.ctp_mastercard),
Connector::Xendit => Ok(connector_data.xendit),
Connector::Paytm => Ok(connector_data.paytm),
Connector::Phonepe => Ok(connector_data.phonepe),
}
}
}
</crate>
|
{
"crate": "connector_configs",
"file": null,
"files": [
"crates/connector_configs/src/lib.rs",
"crates/connector_configs/src/response_modifier.rs",
"crates/connector_configs/src/common_config.rs",
"crates/connector_configs/src/transformer.rs",
"crates/connector_configs/src/connector.rs"
],
"module": null,
"num_files": 5,
"token_count": 12669
}
|
crate_2014729343969739924
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: euclid_wasm
Files: 3
</path>
<crate>
// File: crates/euclid_wasm/src/types.rs
use euclid::frontend::dir::DirKeyKind;
#[cfg(feature = "payouts")]
use euclid::frontend::dir::PayoutDirKeyKind;
use serde::Serialize;
#[derive(Serialize, Clone)]
pub struct Details<'a> {
pub description: Option<&'a str>,
pub kind: DirKeyKind,
}
#[cfg(feature = "payouts")]
#[derive(Serialize, Clone)]
pub struct PayoutDetails<'a> {
pub description: Option<&'a str>,
pub kind: PayoutDirKeyKind,
}
// File: crates/euclid_wasm/src/lib.rs
#![allow(non_upper_case_globals)]
mod types;
mod utils;
use std::{
collections::{HashMap, HashSet},
str::FromStr,
sync::OnceLock,
};
use api_models::{
enums as api_model_enums, routing::ConnectorSelection,
surcharge_decision_configs::SurchargeDecisionConfigs,
};
use common_enums::RoutableConnectors;
use common_types::three_ds_decision_rule_engine::ThreeDSDecisionRule;
use connector_configs::{
common_config::{ConnectorApiIntegrationPayload, DashboardRequestPayload},
connector,
};
use currency_conversion::{
conversion::convert as convert_currency, types as currency_conversion_types,
};
use euclid::{
backend::{inputs, interpreter::InterpreterBackend, EuclidBackend},
dssa::{self, analyzer, graph::CgraphExt, state_machine},
frontend::{
ast,
dir::{self, enums as dir_enums, EuclidDirFilter},
},
};
use strum::{EnumMessage, EnumProperty, VariantNames};
use wasm_bindgen::prelude::*;
use crate::utils::JsResultExt;
type JsResult = Result<JsValue, JsValue>;
use api_models::payment_methods::CountryCodeWithName;
#[cfg(feature = "payouts")]
use common_enums::PayoutStatus;
use common_enums::{
CountryAlpha2, DisputeStatus, EventClass, EventType, IntentStatus, MandateStatus,
MerchantCategoryCode, MerchantCategoryCodeWithName, RefundStatus,
};
use strum::IntoEnumIterator;
struct SeedData {
cgraph: hyperswitch_constraint_graph::ConstraintGraph<dir::DirValue>,
connectors: Vec<ast::ConnectorChoice>,
}
static SEED_DATA: OnceLock<SeedData> = OnceLock::new();
static SEED_FOREX: OnceLock<currency_conversion_types::ExchangeRates> = OnceLock::new();
/// This function can be used by the frontend to educate wasm about the forex rates data.
/// The input argument is a struct fields base_currency and conversion where later is all the conversions associated with the base_currency
/// to all different currencies present.
#[wasm_bindgen(js_name = setForexData)]
pub fn seed_forex(forex: JsValue) -> JsResult {
let forex: currency_conversion_types::ExchangeRates = serde_wasm_bindgen::from_value(forex)?;
SEED_FOREX
.set(forex)
.map_err(|_| "Forex has already been seeded".to_string())
.err_to_js()?;
Ok(JsValue::NULL)
}
/// This function can be used to perform currency_conversion on the input amount, from_currency,
/// to_currency which are all expected to be one of currencies we already have in our Currency
/// enum.
#[wasm_bindgen(js_name = convertCurrency)]
pub fn convert_forex_value(amount: i64, from_currency: JsValue, to_currency: JsValue) -> JsResult {
let forex_data = SEED_FOREX
.get()
.ok_or("Forex Data not seeded")
.err_to_js()?;
let from_currency: common_enums::Currency = serde_wasm_bindgen::from_value(from_currency)?;
let to_currency: common_enums::Currency = serde_wasm_bindgen::from_value(to_currency)?;
let converted_amount = convert_currency(forex_data, from_currency, to_currency, amount)
.map_err(|_| "conversion not possible for provided values")
.err_to_js()?;
Ok(serde_wasm_bindgen::to_value(&converted_amount)?)
}
/// This function can be used by the frontend to get all the two letter country codes
/// along with their country names.
#[wasm_bindgen(js_name=getTwoLetterCountryCode)]
pub fn get_two_letter_country_code() -> JsResult {
let country_code_with_name = CountryAlpha2::iter()
.map(|country_code| CountryCodeWithName {
code: country_code,
name: common_enums::Country::from_alpha2(country_code),
})
.collect::<Vec<_>>();
Ok(serde_wasm_bindgen::to_value(&country_code_with_name)?)
}
/// This function can be used by the frontend to get all the merchant category codes
/// along with their names.
#[wasm_bindgen(js_name=getMerchantCategoryCodeWithName)]
pub fn get_merchant_category_code_with_name() -> JsResult {
let merchant_category_codes_with_name = MerchantCategoryCode::iter()
.map(|mcc_value| MerchantCategoryCodeWithName {
code: mcc_value,
name: mcc_value.to_merchant_category_name(),
})
.collect::<Vec<_>>();
Ok(serde_wasm_bindgen::to_value(
&merchant_category_codes_with_name,
)?)
}
/// This function can be used by the frontend to provide the WASM with information about
/// all the merchant's connector accounts. The input argument is a vector of all the merchant's
/// connector accounts from the API.
#[cfg(feature = "v1")]
#[wasm_bindgen(js_name = seedKnowledgeGraph)]
pub fn seed_knowledge_graph(mcas: JsValue) -> JsResult {
let mcas: Vec<api_models::admin::MerchantConnectorResponse> =
serde_wasm_bindgen::from_value(mcas)?;
let connectors: Vec<ast::ConnectorChoice> = mcas
.iter()
.map(|mca| {
Ok::<_, strum::ParseError>(ast::ConnectorChoice {
connector: RoutableConnectors::from_str(&mca.connector_name)?,
})
})
.collect::<Result<_, _>>()
.map_err(|_| "invalid connector name received")
.err_to_js()?;
let pm_filter = kgraph_utils::types::PaymentMethodFilters(HashMap::new());
let config = kgraph_utils::types::CountryCurrencyFilter {
connector_configs: HashMap::new(),
default_configs: Some(pm_filter),
};
let mca_graph = kgraph_utils::mca::make_mca_graph(mcas, &config).err_to_js()?;
let analysis_graph = hyperswitch_constraint_graph::ConstraintGraph::combine(
&mca_graph,
&dssa::truth::ANALYSIS_GRAPH,
)
.err_to_js()?;
SEED_DATA
.set(SeedData {
cgraph: analysis_graph,
connectors,
})
.map_err(|_| "Knowledge Graph has been already seeded".to_string())
.err_to_js()?;
Ok(JsValue::NULL)
}
/// This function allows the frontend to get all the merchant's configured
/// connectors that are valid for a rule based on the conditions specified in
/// the rule
#[wasm_bindgen(js_name = getValidConnectorsForRule)]
pub fn get_valid_connectors_for_rule(rule: JsValue) -> JsResult {
let seed_data = SEED_DATA.get().ok_or("Data not seeded").err_to_js()?;
let rule: ast::Rule<ConnectorSelection> = serde_wasm_bindgen::from_value(rule)?;
let dir_rule = ast::lowering::lower_rule(rule).err_to_js()?;
let mut valid_connectors: Vec<(ast::ConnectorChoice, dir::DirValue)> = seed_data
.connectors
.iter()
.cloned()
.map(|choice| (choice.clone(), dir::DirValue::Connector(Box::new(choice))))
.collect();
let mut invalid_connectors: HashSet<ast::ConnectorChoice> = HashSet::new();
let mut ctx_manager = state_machine::RuleContextManager::new(&dir_rule, &[]);
let dummy_meta = HashMap::new();
// For every conjunctive context in the Rule, verify validity of all still-valid connectors
// using the knowledge graph
while let Some(ctx) = ctx_manager.advance_mut().err_to_js()? {
// Standalone conjunctive context analysis to ensure the context itself is valid before
// checking it against merchant's connectors
seed_data
.cgraph
.perform_context_analysis(
ctx,
&mut hyperswitch_constraint_graph::Memoization::new(),
None,
)
.err_to_js()?;
// Update conjunctive context and run analysis on all of merchant's connectors.
for (conn, choice) in &valid_connectors {
if invalid_connectors.contains(conn) {
continue;
}
let ctx_val = dssa::types::ContextValue::assertion(choice, &dummy_meta);
ctx.push(ctx_val);
let analysis_result = seed_data.cgraph.perform_context_analysis(
ctx,
&mut hyperswitch_constraint_graph::Memoization::new(),
None,
);
if analysis_result.is_err() {
invalid_connectors.insert(conn.clone());
}
ctx.pop();
}
}
valid_connectors.retain(|(k, _)| !invalid_connectors.contains(k));
let valid_connectors: Vec<ast::ConnectorChoice> =
valid_connectors.into_iter().map(|c| c.0).collect();
Ok(serde_wasm_bindgen::to_value(&valid_connectors)?)
}
#[wasm_bindgen(js_name = analyzeProgram)]
pub fn analyze_program(js_program: JsValue) -> JsResult {
let program: ast::Program<ConnectorSelection> = serde_wasm_bindgen::from_value(js_program)?;
analyzer::analyze(program, SEED_DATA.get().map(|sd| &sd.cgraph)).err_to_js()?;
Ok(JsValue::NULL)
}
#[wasm_bindgen(js_name = runProgram)]
pub fn run_program(program: JsValue, input: JsValue) -> JsResult {
let program: ast::Program<ConnectorSelection> = serde_wasm_bindgen::from_value(program)?;
let input: inputs::BackendInput = serde_wasm_bindgen::from_value(input)?;
let backend = InterpreterBackend::with_program(program).err_to_js()?;
let res: euclid::backend::BackendOutput<ConnectorSelection> =
backend.execute(input).err_to_js()?;
Ok(serde_wasm_bindgen::to_value(&res)?)
}
#[wasm_bindgen(js_name = getAllConnectors)]
pub fn get_all_connectors() -> JsResult {
Ok(serde_wasm_bindgen::to_value(RoutableConnectors::VARIANTS)?)
}
#[wasm_bindgen(js_name = getAllKeys)]
pub fn get_all_keys() -> JsResult {
let excluded_keys = [
"Connector",
// 3DS Decision Rule Keys should not be included in the payument routing keys
"issuer_name",
"issuer_country",
"customer_device_platform",
"customer_device_type",
"customer_device_display_size",
"acquirer_country",
"acquirer_fraud_rate",
];
let keys: Vec<&'static str> = dir::DirKeyKind::VARIANTS
.iter()
.copied()
.filter(|s| !excluded_keys.contains(s))
.collect();
Ok(serde_wasm_bindgen::to_value(&keys)?)
}
#[wasm_bindgen(js_name = getKeyType)]
pub fn get_key_type(key: &str) -> Result<String, String> {
let key = dir::DirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?;
let key_str = key.get_type().to_string();
Ok(key_str)
}
#[wasm_bindgen(js_name = getThreeDsKeys)]
pub fn get_three_ds_keys() -> JsResult {
let keys = <common_types::payments::ConditionalConfigs as EuclidDirFilter>::ALLOWED;
Ok(serde_wasm_bindgen::to_value(keys)?)
}
#[wasm_bindgen(js_name= getSurchargeKeys)]
pub fn get_surcharge_keys() -> JsResult {
let keys = <SurchargeDecisionConfigs as EuclidDirFilter>::ALLOWED;
Ok(serde_wasm_bindgen::to_value(keys)?)
}
#[wasm_bindgen(js_name= getThreeDsDecisionRuleKeys)]
pub fn get_three_ds_decision_rule_keys() -> JsResult {
let keys = <ThreeDSDecisionRule as EuclidDirFilter>::ALLOWED;
Ok(serde_wasm_bindgen::to_value(keys)?)
}
#[wasm_bindgen(js_name=parseToString)]
pub fn parser(val: String) -> String {
ron_parser::my_parse(val)
}
#[wasm_bindgen(js_name = getVariantValues)]
pub fn get_variant_values(key: &str) -> Result<JsValue, JsValue> {
let key = dir::DirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?;
let variants: &[&str] = match key {
dir::DirKeyKind::PaymentMethod => dir_enums::PaymentMethod::VARIANTS,
dir::DirKeyKind::CardType => dir_enums::CardType::VARIANTS,
dir::DirKeyKind::CardNetwork => dir_enums::CardNetwork::VARIANTS,
dir::DirKeyKind::PayLaterType => dir_enums::PayLaterType::VARIANTS,
dir::DirKeyKind::WalletType => dir_enums::WalletType::VARIANTS,
dir::DirKeyKind::BankRedirectType => dir_enums::BankRedirectType::VARIANTS,
dir::DirKeyKind::CryptoType => dir_enums::CryptoType::VARIANTS,
dir::DirKeyKind::RewardType => dir_enums::RewardType::VARIANTS,
dir::DirKeyKind::AuthenticationType => dir_enums::AuthenticationType::VARIANTS,
dir::DirKeyKind::CaptureMethod => dir_enums::CaptureMethod::VARIANTS,
dir::DirKeyKind::PaymentCurrency => dir_enums::PaymentCurrency::VARIANTS,
dir::DirKeyKind::BusinessCountry => dir_enums::Country::VARIANTS,
dir::DirKeyKind::BillingCountry => dir_enums::Country::VARIANTS,
dir::DirKeyKind::BankTransferType => dir_enums::BankTransferType::VARIANTS,
dir::DirKeyKind::UpiType => dir_enums::UpiType::VARIANTS,
dir::DirKeyKind::SetupFutureUsage => dir_enums::SetupFutureUsage::VARIANTS,
dir::DirKeyKind::PaymentType => dir_enums::PaymentType::VARIANTS,
dir::DirKeyKind::MandateType => dir_enums::MandateType::VARIANTS,
dir::DirKeyKind::MandateAcceptanceType => dir_enums::MandateAcceptanceType::VARIANTS,
dir::DirKeyKind::CardRedirectType => dir_enums::CardRedirectType::VARIANTS,
dir::DirKeyKind::GiftCardType => dir_enums::GiftCardType::VARIANTS,
dir::DirKeyKind::VoucherType => dir_enums::VoucherType::VARIANTS,
dir::DirKeyKind::BankDebitType => dir_enums::BankDebitType::VARIANTS,
dir::DirKeyKind::RealTimePaymentType => dir_enums::RealTimePaymentType::VARIANTS,
dir::DirKeyKind::OpenBankingType => dir_enums::OpenBankingType::VARIANTS,
dir::DirKeyKind::MobilePaymentType => dir_enums::MobilePaymentType::VARIANTS,
dir::DirKeyKind::IssuerCountry => dir_enums::Country::VARIANTS,
dir::DirKeyKind::AcquirerCountry => dir_enums::Country::VARIANTS,
dir::DirKeyKind::CustomerDeviceType => dir_enums::CustomerDeviceType::VARIANTS,
dir::DirKeyKind::CustomerDevicePlatform => dir_enums::CustomerDevicePlatform::VARIANTS,
dir::DirKeyKind::CustomerDeviceDisplaySize => {
dir_enums::CustomerDeviceDisplaySize::VARIANTS
}
dir::DirKeyKind::PaymentAmount
| dir::DirKeyKind::Connector
| dir::DirKeyKind::CardBin
| dir::DirKeyKind::BusinessLabel
| dir::DirKeyKind::MetaData
| dir::DirKeyKind::IssuerName
| dir::DirKeyKind::AcquirerFraudRate => Err("Key does not have variants".to_string())?,
};
Ok(serde_wasm_bindgen::to_value(variants)?)
}
#[wasm_bindgen(js_name = addTwo)]
pub fn add_two(n1: i64, n2: i64) -> i64 {
n1 + n2
}
#[wasm_bindgen(js_name = getDescriptionCategory)]
pub fn get_description_category() -> JsResult {
let keys = dir::DirKeyKind::VARIANTS
.iter()
.copied()
.filter(|s| s != &"Connector")
.collect::<Vec<&'static str>>();
let mut category: HashMap<Option<&str>, Vec<types::Details<'_>>> = HashMap::new();
for key in keys {
let dir_key =
dir::DirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?;
let details = types::Details {
description: dir_key.get_detailed_message(),
kind: dir_key.clone(),
};
category
.entry(dir_key.get_str("Category"))
.and_modify(|val| val.push(details.clone()))
.or_insert(vec![details]);
}
Ok(serde_wasm_bindgen::to_value(&category)?)
}
#[wasm_bindgen(js_name = getConnectorConfig)]
pub fn get_connector_config(key: &str) -> JsResult {
let key = api_model_enums::Connector::from_str(key)
.map_err(|_| "Invalid key received".to_string())?;
let res = connector::ConnectorConfig::get_connector_config(key)?;
Ok(serde_wasm_bindgen::to_value(&res)?)
}
#[cfg(feature = "payouts")]
#[wasm_bindgen(js_name = getPayoutConnectorConfig)]
pub fn get_payout_connector_config(key: &str) -> JsResult {
let key = api_model_enums::PayoutConnectors::from_str(key)
.map_err(|_| "Invalid key received".to_string())?;
let res = connector::ConnectorConfig::get_payout_connector_config(key)?;
Ok(serde_wasm_bindgen::to_value(&res)?)
}
#[wasm_bindgen(js_name = getAuthenticationConnectorConfig)]
pub fn get_authentication_connector_config(key: &str) -> JsResult {
let key = api_model_enums::AuthenticationConnectors::from_str(key)
.map_err(|_| "Invalid key received".to_string())?;
let res = connector::ConnectorConfig::get_authentication_connector_config(key)?;
Ok(serde_wasm_bindgen::to_value(&res)?)
}
#[wasm_bindgen(js_name = getTaxProcessorConfig)]
pub fn get_tax_processor_config(key: &str) -> JsResult {
let key = api_model_enums::TaxConnectors::from_str(key)
.map_err(|_| "Invalid key received".to_string())?;
let res = connector::ConnectorConfig::get_tax_processor_config(key)?;
Ok(serde_wasm_bindgen::to_value(&res)?)
}
#[wasm_bindgen(js_name = getPMAuthenticationProcessorConfig)]
pub fn get_pm_authentication_processor_config(key: &str) -> JsResult {
let key: api_model_enums::PmAuthConnectors = api_model_enums::PmAuthConnectors::from_str(key)
.map_err(|_| "Invalid key received".to_string())?;
let res = connector::ConnectorConfig::get_pm_authentication_processor_config(key)?;
Ok(serde_wasm_bindgen::to_value(&res)?)
}
#[wasm_bindgen(js_name = getRequestPayload)]
pub fn get_request_payload(input: JsValue, response: JsValue) -> JsResult {
let input: DashboardRequestPayload = serde_wasm_bindgen::from_value(input)?;
let api_response: ConnectorApiIntegrationPayload = serde_wasm_bindgen::from_value(response)?;
let result = DashboardRequestPayload::create_connector_request(input, api_response);
Ok(serde_wasm_bindgen::to_value(&result)?)
}
#[wasm_bindgen(js_name = getResponsePayload)]
pub fn get_response_payload(input: JsValue) -> JsResult {
let input: ConnectorApiIntegrationPayload = serde_wasm_bindgen::from_value(input)?;
let result = ConnectorApiIntegrationPayload::get_transformed_response_payload(input);
Ok(serde_wasm_bindgen::to_value(&result)?)
}
#[cfg(feature = "payouts")]
#[wasm_bindgen(js_name = getAllPayoutKeys)]
pub fn get_all_payout_keys() -> JsResult {
let keys: Vec<&'static str> = dir::PayoutDirKeyKind::VARIANTS.to_vec();
Ok(serde_wasm_bindgen::to_value(&keys)?)
}
#[cfg(feature = "payouts")]
#[wasm_bindgen(js_name = getPayoutVariantValues)]
pub fn get_payout_variant_values(key: &str) -> Result<JsValue, JsValue> {
let key =
dir::PayoutDirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?;
let variants: &[&str] = match key {
dir::PayoutDirKeyKind::BusinessCountry => dir_enums::BusinessCountry::VARIANTS,
dir::PayoutDirKeyKind::BillingCountry => dir_enums::BillingCountry::VARIANTS,
dir::PayoutDirKeyKind::PayoutCurrency => dir_enums::PaymentCurrency::VARIANTS,
dir::PayoutDirKeyKind::PayoutType => dir_enums::PayoutType::VARIANTS,
dir::PayoutDirKeyKind::WalletType => dir_enums::PayoutWalletType::VARIANTS,
dir::PayoutDirKeyKind::BankTransferType => dir_enums::PayoutBankTransferType::VARIANTS,
dir::PayoutDirKeyKind::PayoutAmount | dir::PayoutDirKeyKind::BusinessLabel => {
Err("Key does not have variants".to_string())?
}
};
Ok(serde_wasm_bindgen::to_value(variants)?)
}
#[cfg(feature = "payouts")]
#[wasm_bindgen(js_name = getPayoutDescriptionCategory)]
pub fn get_payout_description_category() -> JsResult {
let keys = dir::PayoutDirKeyKind::VARIANTS.to_vec();
let mut category: HashMap<Option<&str>, Vec<types::PayoutDetails<'_>>> = HashMap::new();
for key in keys {
let dir_key =
dir::PayoutDirKeyKind::from_str(key).map_err(|_| "Invalid key received".to_string())?;
let details = types::PayoutDetails {
description: dir_key.get_detailed_message(),
kind: dir_key.clone(),
};
category
.entry(dir_key.get_str("Category"))
.and_modify(|val| val.push(details.clone()))
.or_insert(vec![details]);
}
Ok(serde_wasm_bindgen::to_value(&category)?)
}
#[wasm_bindgen(js_name = getValidWebhookStatus)]
pub fn get_valid_webhook_status(key: &str) -> JsResult {
let event_class = EventClass::from_str(key)
.map_err(|_| "Invalid webhook event type received".to_string())
.err_to_js()?;
match event_class {
EventClass::Payments => {
let statuses: Vec<IntentStatus> = IntentStatus::iter()
.filter(|intent_status| Into::<Option<EventType>>::into(*intent_status).is_some())
.collect();
Ok(serde_wasm_bindgen::to_value(&statuses)?)
}
EventClass::Refunds => {
let statuses: Vec<RefundStatus> = RefundStatus::iter()
.filter(|status| Into::<Option<EventType>>::into(*status).is_some())
.collect();
Ok(serde_wasm_bindgen::to_value(&statuses)?)
}
EventClass::Disputes => {
let statuses: Vec<DisputeStatus> = DisputeStatus::iter().collect();
Ok(serde_wasm_bindgen::to_value(&statuses)?)
}
EventClass::Mandates => {
let statuses: Vec<MandateStatus> = MandateStatus::iter()
.filter(|status| Into::<Option<EventType>>::into(*status).is_some())
.collect();
Ok(serde_wasm_bindgen::to_value(&statuses)?)
}
#[cfg(feature = "payouts")]
EventClass::Payouts => {
let statuses: Vec<PayoutStatus> = PayoutStatus::iter()
.filter(|status| Into::<Option<EventType>>::into(*status).is_some())
.collect();
Ok(serde_wasm_bindgen::to_value(&statuses)?)
}
}
}
// File: crates/euclid_wasm/src/utils.rs
use wasm_bindgen::prelude::*;
pub trait JsResultExt<T> {
fn err_to_js(self) -> Result<T, JsValue>;
}
impl<T, E> JsResultExt<T> for Result<T, E>
where
E: serde::Serialize,
{
fn err_to_js(self) -> Result<T, JsValue> {
match self {
Ok(t) => Ok(t),
Err(e) => Err(serde_wasm_bindgen::to_value(&e)?),
}
}
}
</crate>
|
{
"crate": "euclid_wasm",
"file": null,
"files": [
"crates/euclid_wasm/src/types.rs",
"crates/euclid_wasm/src/lib.rs",
"crates/euclid_wasm/src/utils.rs"
],
"module": null,
"num_files": 3,
"token_count": 5569
}
|
crate_-555093119289050417
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: smithy-core
Files: 3
</path>
<crate>
// File: crates/smithy-core/src/types.rs
// crates/smithy-core/types.rs
use std::collections::HashMap;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SmithyModel {
pub namespace: String,
pub shapes: HashMap<String, SmithyShape>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "type")]
pub enum SmithyShape {
#[serde(rename = "structure")]
Structure {
members: HashMap<String, SmithyMember>,
#[serde(skip_serializing_if = "Option::is_none")]
documentation: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "string")]
String {
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "integer")]
Integer {
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "long")]
Long {
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "boolean")]
Boolean {
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "list")]
List {
member: Box<SmithyMember>,
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "union")]
Union {
members: HashMap<String, SmithyMember>,
#[serde(skip_serializing_if = "Option::is_none")]
documentation: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
#[serde(rename = "enum")]
Enum {
values: HashMap<String, SmithyEnumValue>,
#[serde(skip_serializing_if = "Option::is_none")]
documentation: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
traits: Vec<SmithyTrait>,
},
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SmithyMember {
pub target: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub documentation: Option<String>,
#[serde(skip_serializing_if = "Vec::is_empty")]
pub traits: Vec<SmithyTrait>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SmithyEnumValue {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub documentation: Option<String>,
pub is_default: bool,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(tag = "trait")]
pub enum SmithyTrait {
#[serde(rename = "smithy.api#pattern")]
Pattern { pattern: String },
#[serde(rename = "smithy.api#range")]
Range { min: Option<i64>, max: Option<i64> },
#[serde(rename = "smithy.api#required")]
Required,
#[serde(rename = "smithy.api#documentation")]
Documentation { documentation: String },
#[serde(rename = "smithy.api#length")]
Length { min: Option<u64>, max: Option<u64> },
#[serde(rename = "smithy.api#httpLabel")]
HttpLabel,
#[serde(rename = "smithy.api#httpQuery")]
HttpQuery { name: String },
#[serde(rename = "smithy.api#mixin")]
Mixin,
}
#[derive(Debug, Clone)]
pub struct SmithyField {
pub name: String,
pub value_type: String,
pub constraints: Vec<SmithyConstraint>,
pub documentation: Option<String>,
pub optional: bool,
pub flatten: bool,
}
#[derive(Debug, Clone)]
pub struct SmithyEnumVariant {
pub name: String,
pub fields: Vec<SmithyField>,
pub constraints: Vec<SmithyConstraint>,
pub documentation: Option<String>,
}
#[derive(Debug, Clone)]
pub enum SmithyConstraint {
Pattern(String),
Range(Option<i64>, Option<i64>),
Length(Option<u64>, Option<u64>),
Required,
HttpLabel,
HttpQuery(String),
}
pub trait SmithyModelGenerator {
fn generate_smithy_model() -> SmithyModel;
}
// Helper functions moved from the proc-macro crate to be accessible by it.
pub fn resolve_type_and_generate_shapes(
value_type: &str,
shapes: &mut HashMap<String, SmithyShape>,
) -> Result<(String, HashMap<String, SmithyShape>), syn::Error> {
let value_type = value_type.trim();
let value_type_span = proc_macro2::Span::call_site();
let mut generated_shapes = HashMap::new();
let target_type = match value_type {
"String" | "str" => "smithy.api#String".to_string(),
"i8" | "i16" | "i32" | "u8" | "u16" | "u32" => "smithy.api#Integer".to_string(),
"i64" | "u64" | "isize" | "usize" => "smithy.api#Long".to_string(),
"f32" => "smithy.api#Float".to_string(),
"f64" => "smithy.api#Double".to_string(),
"bool" => "smithy.api#Boolean".to_string(),
"PrimitiveDateTime" | "time::PrimitiveDateTime" => "smithy.api#Timestamp".to_string(),
"Amount" | "MinorUnit" => "smithy.api#Long".to_string(),
"serde_json::Value" | "Value" | "Object" => "smithy.api#Document".to_string(),
"Url" | "url::Url" => "smithy.api#String".to_string(),
vt if vt.starts_with("Option<") && vt.ends_with('>') => {
let inner_type = extract_generic_inner_type(vt, "Option")
.map_err(|e| syn::Error::new(value_type_span, e))?;
let (resolved_type, new_shapes) = resolve_type_and_generate_shapes(inner_type, shapes)?;
generated_shapes.extend(new_shapes);
resolved_type
}
vt if vt.starts_with("Vec<") && vt.ends_with('>') => {
let inner_type = extract_generic_inner_type(vt, "Vec")
.map_err(|e| syn::Error::new(value_type_span, e))?;
let (inner_smithy_type, new_shapes) =
resolve_type_and_generate_shapes(inner_type, shapes)?;
generated_shapes.extend(new_shapes);
let list_shape_name = format!(
"{}List",
inner_smithy_type
.split("::")
.last()
.unwrap_or(&inner_smithy_type)
.split('#')
.next_back()
.unwrap_or(&inner_smithy_type)
);
if !shapes.contains_key(&list_shape_name)
&& !generated_shapes.contains_key(&list_shape_name)
{
let list_shape = SmithyShape::List {
member: Box::new(SmithyMember {
target: inner_smithy_type,
documentation: None,
traits: vec![],
}),
traits: vec![],
};
generated_shapes.insert(list_shape_name.clone(), list_shape);
}
list_shape_name
}
vt if vt.starts_with("Box<") && vt.ends_with('>') => {
let inner_type = extract_generic_inner_type(vt, "Box")
.map_err(|e| syn::Error::new(value_type_span, e))?;
let (resolved_type, new_shapes) = resolve_type_and_generate_shapes(inner_type, shapes)?;
generated_shapes.extend(new_shapes);
resolved_type
}
vt if vt.starts_with("Secret<") && vt.ends_with('>') => {
let inner_type = extract_generic_inner_type(vt, "Secret")
.map_err(|e| syn::Error::new(value_type_span, e))?;
let (resolved_type, new_shapes) = resolve_type_and_generate_shapes(inner_type, shapes)?;
generated_shapes.extend(new_shapes);
resolved_type
}
vt if vt.starts_with("HashMap<") && vt.ends_with('>') => {
let inner_types = extract_generic_inner_type(vt, "HashMap")
.map_err(|e| syn::Error::new(value_type_span, e))?;
let (key_type, value_type) =
parse_map_types(inner_types).map_err(|e| syn::Error::new(value_type_span, e))?;
let (key_smithy_type, key_shapes) = resolve_type_and_generate_shapes(key_type, shapes)?;
generated_shapes.extend(key_shapes);
let (value_smithy_type, value_shapes) =
resolve_type_and_generate_shapes(value_type, shapes)?;
generated_shapes.extend(value_shapes);
format!(
"smithy.api#Map<key: {}, value: {}>",
key_smithy_type, value_smithy_type
)
}
vt if vt.starts_with("BTreeMap<") && vt.ends_with('>') => {
let inner_types = extract_generic_inner_type(vt, "BTreeMap")
.map_err(|e| syn::Error::new(value_type_span, e))?;
let (key_type, value_type) =
parse_map_types(inner_types).map_err(|e| syn::Error::new(value_type_span, e))?;
let (key_smithy_type, key_shapes) = resolve_type_and_generate_shapes(key_type, shapes)?;
generated_shapes.extend(key_shapes);
let (value_smithy_type, value_shapes) =
resolve_type_and_generate_shapes(value_type, shapes)?;
generated_shapes.extend(value_shapes);
format!(
"smithy.api#Map<key: {}, value: {}>",
key_smithy_type, value_smithy_type
)
}
_ => {
if value_type.contains("::") {
value_type.replace("::", ".")
} else {
value_type.to_string()
}
}
};
Ok((target_type, generated_shapes))
}
fn extract_generic_inner_type<'a>(full_type: &'a str, wrapper: &str) -> Result<&'a str, String> {
let expected_start = format!("{}<", wrapper);
if !full_type.starts_with(&expected_start) || !full_type.ends_with('>') {
return Err(format!("Invalid {} type format: {}", wrapper, full_type));
}
let start_idx = expected_start.len();
let end_idx = full_type.len() - 1;
if start_idx >= end_idx {
return Err(format!("Empty {} type: {}", wrapper, full_type));
}
if start_idx >= full_type.len() || end_idx > full_type.len() {
return Err(format!(
"Invalid index bounds for {} type: {}",
wrapper, full_type
));
}
Ok(full_type
.get(start_idx..end_idx)
.ok_or_else(|| {
format!(
"Failed to extract inner type from {}: {}",
wrapper, full_type
)
})?
.trim())
}
fn parse_map_types(inner_types: &str) -> Result<(&str, &str), String> {
// Handle nested generics by counting angle brackets
let mut bracket_count = 0;
let mut comma_pos = None;
for (i, ch) in inner_types.char_indices() {
match ch {
'<' => bracket_count += 1,
'>' => bracket_count -= 1,
',' if bracket_count == 0 => {
comma_pos = Some(i);
break;
}
_ => {}
}
}
if let Some(pos) = comma_pos {
let key_type = inner_types
.get(..pos)
.ok_or_else(|| format!("Invalid key type bounds in map: {}", inner_types))?
.trim();
let value_type = inner_types
.get(pos + 1..)
.ok_or_else(|| format!("Invalid value type bounds in map: {}", inner_types))?
.trim();
if key_type.is_empty() || value_type.is_empty() {
return Err(format!("Invalid map type format: {}", inner_types));
}
Ok((key_type, value_type))
} else {
Err(format!(
"Invalid map type format, missing comma: {}",
inner_types
))
}
}
// File: crates/smithy-core/src/lib.rs
// // crates/smithy-core/lib.rs
pub mod generator;
pub mod types;
pub use generator::SmithyGenerator;
pub use types::*;
// File: crates/smithy-core/src/generator.rs
// crates/smithy-core/generator.rs
use std::{collections::HashMap, fs, path::Path};
use crate::types::{self as types, SmithyModel};
/// Generator for creating Smithy IDL files from models
pub struct SmithyGenerator {
models: Vec<SmithyModel>,
}
impl SmithyGenerator {
pub fn new() -> Self {
Self { models: Vec::new() }
}
pub fn add_model(&mut self, model: SmithyModel) {
self.models.push(model);
}
pub fn generate_idl(&self, output_dir: &Path) -> Result<(), Box<dyn std::error::Error>> {
fs::create_dir_all(output_dir)?;
let mut namespace_models: HashMap<String, Vec<&SmithyModel>> = HashMap::new();
let mut shape_to_namespace: HashMap<String, String> = HashMap::new();
// First, build a map of all shape names to their namespaces
for model in &self.models {
for shape_name in model.shapes.keys() {
shape_to_namespace.insert(shape_name.clone(), model.namespace.clone());
}
}
// Group models by namespace for file generation
for model in &self.models {
namespace_models
.entry(model.namespace.clone())
.or_default()
.push(model);
}
for (namespace, models) in namespace_models {
let filename = format!("{}.smithy", namespace.replace('.', "_"));
let filepath = output_dir.join(filename);
let mut content = String::new();
content.push_str("$version: \"2\"\n\n");
content.push_str(&format!("namespace {}\n\n", namespace));
// Collect all unique shape definitions for the current namespace
let mut shapes_in_namespace = HashMap::new();
for model in models {
for (shape_name, shape) in &model.shapes {
shapes_in_namespace.insert(shape_name.clone(), shape.clone());
}
}
// Generate definitions for each shape in the namespace
for (shape_name, shape) in &shapes_in_namespace {
content.push_str(&self.generate_shape_definition(
shape_name,
shape,
&namespace,
&shape_to_namespace,
));
content.push_str("\n\n");
}
fs::write(filepath, content)?;
}
Ok(())
}
fn generate_shape_definition(
&self,
name: &str,
shape: &types::SmithyShape,
current_namespace: &str,
shape_to_namespace: &HashMap<String, String>,
) -> String {
let resolve_target =
|target: &str| self.resolve_type(target, current_namespace, shape_to_namespace);
match shape {
types::SmithyShape::Structure {
members,
documentation,
traits,
} => {
let mut def = String::new();
if let Some(doc) = documentation {
def.push_str(&format!("/// {}\n", doc));
}
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("structure {} {{\n", name));
for (member_name, member) in members {
if let Some(doc) = &member.documentation {
def.push_str(&format!(" /// {}\n", doc));
}
for smithy_trait in &member.traits {
def.push_str(&format!(" @{}\n", self.trait_to_string(smithy_trait)));
}
let resolved_target = resolve_target(&member.target);
def.push_str(&format!(" {}: {}\n", member_name, resolved_target));
}
def.push('}');
def
}
types::SmithyShape::Union {
members,
documentation,
traits,
} => {
let mut def = String::new();
if let Some(doc) = documentation {
def.push_str(&format!("/// {}\n", doc));
}
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("union {} {{\n", name));
for (member_name, member) in members {
if let Some(doc) = &member.documentation {
def.push_str(&format!(" /// {}\n", doc));
}
for smithy_trait in &member.traits {
def.push_str(&format!(" @{}\n", self.trait_to_string(smithy_trait)));
}
let resolved_target = resolve_target(&member.target);
def.push_str(&format!(" {}: {}\n", member_name, resolved_target));
}
def.push('}');
def
}
types::SmithyShape::Enum {
values,
documentation,
traits,
} => {
let mut def = String::new();
if let Some(doc) = documentation {
def.push_str(&format!("/// {}\n", doc));
}
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("enum {} {{\n", name));
for (value_name, enum_value) in values {
if let Some(doc) = &enum_value.documentation {
def.push_str(&format!(" /// {}\n", doc));
}
def.push_str(&format!(" {}\n", value_name));
}
def.push('}');
def
}
types::SmithyShape::String { traits } => {
let mut def = String::new();
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("string {}", name));
def
}
types::SmithyShape::Integer { traits } => {
let mut def = String::new();
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("integer {}", name));
def
}
types::SmithyShape::Long { traits } => {
let mut def = String::new();
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("long {}", name));
def
}
types::SmithyShape::Boolean { traits } => {
let mut def = String::new();
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("boolean {}", name));
def
}
types::SmithyShape::List { member, traits } => {
let mut def = String::new();
for smithy_trait in traits {
def.push_str(&format!("@{}\n", self.trait_to_string(smithy_trait)));
}
def.push_str(&format!("list {} {{\n", name));
let resolved_target = resolve_target(&member.target);
def.push_str(&format!(" member: {}\n", resolved_target));
def.push('}');
def
}
}
}
fn resolve_type(
&self,
target: &str,
current_namespace: &str,
shape_to_namespace: &HashMap<String, String>,
) -> String {
// If the target is a primitive or a fully qualified Smithy type, return it as is
if target.starts_with("smithy.api#") {
return target.to_string();
}
// If the target is a custom type, resolve its namespace
if let Some(target_namespace) = shape_to_namespace.get(target) {
if target_namespace == current_namespace {
// The type is in the same namespace, so no qualification is needed
target.to_string()
} else {
// The type is in a different namespace, so it needs to be fully qualified
format!("{}#{}", target_namespace, target)
}
} else {
// If the type is not found in the shape map, it might be a primitive
// or an unresolved type. For now, return it as is.
target.to_string()
}
}
fn trait_to_string(&self, smithy_trait: &types::SmithyTrait) -> String {
match smithy_trait {
types::SmithyTrait::Pattern { pattern } => {
format!("pattern(\"{}\")", pattern)
}
types::SmithyTrait::Range { min, max } => match (min, max) {
(Some(min), Some(max)) => format!("range(min: {}, max: {})", min, max),
(Some(min), None) => format!("range(min: {})", min),
(None, Some(max)) => format!("range(max: {})", max),
(None, None) => "range".to_string(),
},
types::SmithyTrait::Required => "required".to_string(),
types::SmithyTrait::Documentation { documentation } => {
format!("documentation(\"{}\")", documentation)
}
types::SmithyTrait::Length { min, max } => match (min, max) {
(Some(min), Some(max)) => format!("length(min: {}, max: {})", min, max),
(Some(min), None) => format!("length(min: {})", min),
(None, Some(max)) => format!("length(max: {})", max),
(None, None) => "length".to_string(),
},
types::SmithyTrait::HttpLabel => "httpLabel".to_string(),
types::SmithyTrait::HttpQuery { name } => {
format!("httpQuery(\"{}\")", name)
}
types::SmithyTrait::Mixin => "mixin".to_string(),
}
}
}
impl Default for SmithyGenerator {
fn default() -> Self {
Self::new()
}
}
</crate>
|
{
"crate": "smithy-core",
"file": null,
"files": [
"crates/smithy-core/src/types.rs",
"crates/smithy-core/src/lib.rs",
"crates/smithy-core/src/generator.rs"
],
"module": null,
"num_files": 3,
"token_count": 4880
}
|
crate_-1820972171009270063
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: cards
Files: 3
</path>
<crate>
// File: crates/cards/tests/basic.rs
#![allow(clippy::unwrap_used, clippy::expect_used)]
use cards::{CardExpiration, CardExpirationMonth, CardExpirationYear, CardSecurityCode};
use common_utils::date_time;
use masking::PeekInterface;
#[test]
fn test_card_security_code() {
// no panic
let valid_card_security_code = CardSecurityCode::try_from(1234).unwrap();
assert_eq!(*valid_card_security_code.peek(), 1234);
let serialized = serde_json::to_string(&valid_card_security_code).unwrap();
assert_eq!(serialized, "1234");
let derialized = serde_json::from_str::<CardSecurityCode>(&serialized).unwrap();
assert_eq!(*derialized.peek(), 1234);
let invalid_deserialization = serde_json::from_str::<CardSecurityCode>("00");
assert!(invalid_deserialization.is_err());
}
#[test]
fn test_card_expiration_month() {
// no panic
let card_exp_month = CardExpirationMonth::try_from(12).unwrap();
// will panic on unwrap
let invalid_card_exp_month = CardExpirationMonth::try_from(13);
assert_eq!(*card_exp_month.peek(), 12);
assert!(invalid_card_exp_month.is_err());
let serialized = serde_json::to_string(&card_exp_month).unwrap();
assert_eq!(serialized, "12");
let derialized = serde_json::from_str::<CardExpirationMonth>(&serialized).unwrap();
assert_eq!(*derialized.peek(), 12);
let invalid_deserialization = serde_json::from_str::<CardExpirationMonth>("13");
assert!(invalid_deserialization.is_err());
}
#[test]
fn test_card_expiration_year() {
let curr_date = date_time::now();
let curr_year = u16::try_from(curr_date.year()).expect("valid year");
// no panic
let card_exp_year = CardExpirationYear::try_from(curr_year).unwrap();
// will panic on unwrap
let invalid_card_exp_year = CardExpirationYear::try_from(curr_year - 1);
assert_eq!(*card_exp_year.peek(), curr_year);
assert!(invalid_card_exp_year.is_err());
let serialized = serde_json::to_string(&card_exp_year).unwrap();
assert_eq!(serialized, curr_year.to_string());
let derialized = serde_json::from_str::<CardExpirationYear>(&serialized).unwrap();
assert_eq!(*derialized.peek(), curr_year);
let invalid_deserialization = serde_json::from_str::<CardExpirationYear>("123");
assert!(invalid_deserialization.is_err());
}
#[test]
fn test_card_expiration() {
let curr_date = date_time::now();
let curr_year = u16::try_from(curr_date.year()).expect("valid year");
let curr_month = u8::from(curr_date.month());
// no panic
let card_exp = CardExpiration::try_from((curr_month, curr_year)).unwrap();
// will panic on unwrap
let invalid_card_exp = CardExpiration::try_from((13, curr_year));
assert_eq!(*card_exp.get_month().peek(), curr_month);
assert_eq!(*card_exp.get_year().peek(), curr_year);
assert!(!card_exp.is_expired().unwrap());
assert!(invalid_card_exp.is_err());
let serialized = serde_json::to_string(&card_exp).unwrap();
let expected_string = format!(r#"{{"month":{},"year":{}}}"#, 3, curr_year);
assert_eq!(serialized, expected_string);
let derialized = serde_json::from_str::<CardExpiration>(&serialized).unwrap();
assert_eq!(*derialized.get_month().peek(), 3);
assert_eq!(*derialized.get_year().peek(), curr_year);
let invalid_serialized_string = r#"{"month":13,"year":123}"#;
let invalid_deserialization = serde_json::from_str::<CardExpiration>(invalid_serialized_string);
assert!(invalid_deserialization.is_err());
}
// File: crates/cards/src/lib.rs
pub mod validate;
use std::ops::Deref;
use common_utils::{date_time, errors};
use error_stack::report;
use masking::{PeekInterface, StrongSecret};
use serde::{de, Deserialize, Serialize};
use time::{Date, Duration, PrimitiveDateTime, Time};
pub use crate::validate::{CardNumber, CardNumberStrategy, CardNumberValidationErr, NetworkToken};
#[derive(Serialize)]
pub struct CardSecurityCode(StrongSecret<u16>);
impl TryFrom<u16> for CardSecurityCode {
type Error = error_stack::Report<errors::ValidationError>;
fn try_from(csc: u16) -> Result<Self, Self::Error> {
if (0..=9999).contains(&csc) {
Ok(Self(StrongSecret::new(csc)))
} else {
Err(report!(errors::ValidationError::InvalidValue {
message: "invalid card security code".to_string()
}))
}
}
}
impl<'de> Deserialize<'de> for CardSecurityCode {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let csc = u16::deserialize(deserializer)?;
csc.try_into().map_err(de::Error::custom)
}
}
#[derive(Clone, Debug, Serialize)]
pub struct CardExpirationMonth(StrongSecret<u8>);
impl CardExpirationMonth {
pub fn two_digits(&self) -> String {
format!("{:02}", self.peek())
}
}
impl TryFrom<u8> for CardExpirationMonth {
type Error = error_stack::Report<errors::ValidationError>;
fn try_from(month: u8) -> Result<Self, Self::Error> {
if (1..=12).contains(&month) {
Ok(Self(StrongSecret::new(month)))
} else {
Err(report!(errors::ValidationError::InvalidValue {
message: "invalid card expiration month".to_string()
}))
}
}
}
impl<'de> Deserialize<'de> for CardExpirationMonth {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let month = u8::deserialize(deserializer)?;
month.try_into().map_err(de::Error::custom)
}
}
#[derive(Clone, Debug, Serialize)]
pub struct CardExpirationYear(StrongSecret<u16>);
impl CardExpirationYear {
pub fn four_digits(&self) -> String {
self.peek().to_string()
}
pub fn two_digits(&self) -> String {
let year = self.peek() % 100;
year.to_string()
}
}
impl TryFrom<u16> for CardExpirationYear {
type Error = error_stack::Report<errors::ValidationError>;
fn try_from(year: u16) -> Result<Self, Self::Error> {
let curr_year = u16::try_from(date_time::now().year()).map_err(|_| {
report!(errors::ValidationError::InvalidValue {
message: "invalid year".to_string()
})
})?;
if year >= curr_year {
Ok(Self(StrongSecret::<u16>::new(year)))
} else {
Err(report!(errors::ValidationError::InvalidValue {
message: "invalid card expiration year".to_string()
}))
}
}
}
impl<'de> Deserialize<'de> for CardExpirationYear {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let year = u16::deserialize(deserializer)?;
year.try_into().map_err(de::Error::custom)
}
}
#[derive(Serialize, Deserialize)]
pub struct CardExpiration {
pub month: CardExpirationMonth,
pub year: CardExpirationYear,
}
impl CardExpiration {
pub fn is_expired(&self) -> Result<bool, error_stack::Report<errors::ValidationError>> {
let current_datetime_utc = date_time::now();
let expiration_month = time::Month::try_from(*self.month.peek()).map_err(|_| {
report!(errors::ValidationError::InvalidValue {
message: "invalid month".to_string()
})
})?;
let expiration_year = *self.year.peek();
let expiration_day = expiration_month.length(i32::from(expiration_year));
let expiration_date =
Date::from_calendar_date(i32::from(expiration_year), expiration_month, expiration_day)
.map_err(|_| {
report!(errors::ValidationError::InvalidValue {
message: "error while constructing calendar date".to_string()
})
})?;
let expiration_time = Time::MIDNIGHT;
// actual expiry date specified on card w.r.t. local timezone
// max diff b/w utc and other timezones is 14 hours
let mut expiration_datetime_utc = PrimitiveDateTime::new(expiration_date, expiration_time);
// compensating time difference b/w local and utc timezone by adding a day
expiration_datetime_utc = expiration_datetime_utc.saturating_add(Duration::days(1));
Ok(current_datetime_utc > expiration_datetime_utc)
}
pub fn get_month(&self) -> &CardExpirationMonth {
&self.month
}
pub fn get_year(&self) -> &CardExpirationYear {
&self.year
}
}
impl TryFrom<(u8, u16)> for CardExpiration {
type Error = error_stack::Report<errors::ValidationError>;
fn try_from(items: (u8, u16)) -> errors::CustomResult<Self, errors::ValidationError> {
let month = CardExpirationMonth::try_from(items.0)?;
let year = CardExpirationYear::try_from(items.1)?;
Ok(Self { month, year })
}
}
impl Deref for CardSecurityCode {
type Target = StrongSecret<u16>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for CardExpirationMonth {
type Target = StrongSecret<u8>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl Deref for CardExpirationYear {
type Target = StrongSecret<u16>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
// File: crates/cards/src/validate.rs
use std::{collections::HashMap, fmt, ops::Deref, str::FromStr, sync::LazyLock};
use common_utils::errors::ValidationError;
use error_stack::report;
use masking::{PeekInterface, Strategy, StrongSecret, WithType};
use regex::Regex;
#[cfg(not(target_arch = "wasm32"))]
use router_env::{logger, which as router_env_which, Env};
use serde::{Deserialize, Deserializer, Serialize};
use thiserror::Error;
/// Minimum limit of a card number will not be less than 8 by ISO standards
pub const MIN_CARD_NUMBER_LENGTH: usize = 8;
/// Maximum limit of a card number will not exceed 19 by ISO standards
pub const MAX_CARD_NUMBER_LENGTH: usize = 19;
#[derive(Debug, Deserialize, Serialize, Error)]
#[error("{0}")]
pub struct CardNumberValidationErr(&'static str);
/// Card number
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize)]
pub struct CardNumber(StrongSecret<String, CardNumberStrategy>);
//Network Token
#[derive(Clone, Debug, Default, Eq, PartialEq, Serialize)]
pub struct NetworkToken(StrongSecret<String, CardNumberStrategy>);
impl CardNumber {
pub fn get_card_isin(&self) -> String {
self.0.peek().chars().take(6).collect::<String>()
}
pub fn get_extended_card_bin(&self) -> String {
self.0.peek().chars().take(8).collect::<String>()
}
pub fn get_card_no(&self) -> String {
self.0.peek().chars().collect::<String>()
}
pub fn get_last4(&self) -> String {
self.0
.peek()
.chars()
.rev()
.take(4)
.collect::<String>()
.chars()
.rev()
.collect::<String>()
}
pub fn is_cobadged_card(&self) -> Result<bool, error_stack::Report<ValidationError>> {
/// Regex to identify card networks
static CARD_NETWORK_REGEX: LazyLock<HashMap<&str, Result<Regex, regex::Error>>> =
LazyLock::new(|| {
let mut map = HashMap::new();
map.insert(
"Mastercard",
Regex::new(r"^(222[1-9]|22[3-9][0-9]|2[3-6][0-9]{2}|27[0-1][0-9]|2720|5[1-5])"),
);
map.insert("American Express", Regex::new(r"^3[47]"));
map.insert("Visa", Regex::new(r"^4"));
map.insert(
"Discover",
Regex::new(
r"^(6011|64[4-9]|65|622126|622[1-9][0-9][0-9]|6229[0-1][0-9]|622925)",
),
);
map.insert(
"Maestro",
Regex::new(r"^(5018|5081|5044|504681|504993|5020|502260|5038|5893|603845|603123|6304|6759|676[1-3]|6220|504834|504817|504645|504775|600206|627741)"),
);
map.insert(
"RuPay",
Regex::new(r"^(508227|508[5-9]|603741|60698[5-9]|60699|607[0-8]|6079[0-7]|60798[0-4]|60800[1-9]|6080[1-9]|608[1-4]|608500|6521[5-9]|652[2-9]|6530|6531[0-4]|817290|817368|817378|353800|82)"),
);
map.insert("Diners Club", Regex::new(r"^(36|38|39|30[0-5])"));
map.insert("JCB", Regex::new(r"^35(2[89]|[3-8][0-9])"));
map.insert("CarteBlanche", Regex::new(r"^389[0-9]{11}$"));
map.insert("Sodex", Regex::new(r"^(637513)"));
map.insert("BAJAJ", Regex::new(r"^(203040)"));
map.insert("CartesBancaires", Regex::new(r"^(401(005|006|581)|4021(01|02)|403550|405936|406572|41(3849|4819|50(56|59|62|71|74)|6286|65(37|79)|71[7])|420110|423460|43(47(21|22)|50(48|49|50|51|52)|7875|95(09|11|15|39|98)|96(03|18|19|20|22|72))|4424(48|49|50|51|52|57)|448412|4505(19|60)|45(33|56[6-8]|61|62[^3]|6955|7452|7717|93[02379])|46(099|54(76|77)|6258|6575|98[023])|47(4107|71(73|74|86)|72(65|93)|9619)|48(1091|3622|6519)|49(7|83[5-9]|90(0[1-6]|1[0-6]|2[0-3]|3[0-3]|4[0-3]|5[0-2]|68|9[256789]))|5075(89|90|93|94|97)|51(0726|3([0-7]|8[56]|9(00|38))|5214|62(07|36)|72(22|43)|73(65|66)|7502|7647|8101|9920)|52(0993|1662|3718|7429|9227|93(13|14|31)|94(14|21|30|40|47|55|56|[6-9])|9542)|53(0901|10(28|30)|1195|23(4[4-7])|2459|25(09|34|54|56)|3801|41(02|05|11)|50(29|66)|5324|61(07|15)|71(06|12)|8011)|54(2848|5157|9538|98(5[89]))|55(39(79|93)|42(05|60)|4965|7008|88(67|82)|89(29|4[23])|9618|98(09|10))|56(0408|12(0[2-6]|4[134]|5[04678]))|58(17(0[0-7]|15|2[14]|3[16789]|4[0-9]|5[016]|6[269]|7[3789]|8[0-7]|9[017])|55(0[2-5]|7[7-9]|8[0-2])))"));
map
});
let mut no_of_supported_card_networks = 0;
let card_number_str = self.get_card_no();
for (_, regex) in CARD_NETWORK_REGEX.iter() {
let card_regex = match regex.as_ref() {
Ok(regex) => Ok(regex),
Err(_) => Err(report!(ValidationError::InvalidValue {
message: "Invalid regex expression".into(),
})),
}?;
if card_regex.is_match(&card_number_str) {
no_of_supported_card_networks += 1;
if no_of_supported_card_networks > 1 {
break;
}
}
}
Ok(no_of_supported_card_networks > 1)
}
}
impl NetworkToken {
pub fn get_card_isin(&self) -> String {
self.0.peek().chars().take(6).collect::<String>()
}
pub fn get_extended_card_bin(&self) -> String {
self.0.peek().chars().take(8).collect::<String>()
}
pub fn get_card_no(&self) -> String {
self.0.peek().chars().collect::<String>()
}
pub fn get_last4(&self) -> String {
self.0
.peek()
.chars()
.rev()
.take(4)
.collect::<String>()
.chars()
.rev()
.collect::<String>()
}
}
impl FromStr for CardNumber {
type Err = CardNumberValidationErr;
fn from_str(card_number: &str) -> Result<Self, Self::Err> {
// Valid test cards for threedsecureio
let valid_test_cards = vec![
"4000100511112003",
"6000100611111203",
"3000100811111072",
"9000100111111111",
];
#[cfg(not(target_arch = "wasm32"))]
let valid_test_cards = match router_env_which() {
Env::Development | Env::Sandbox => valid_test_cards,
Env::Production => vec![],
};
let card_number = card_number.split_whitespace().collect::<String>();
let is_card_valid = sanitize_card_number(&card_number)?;
if valid_test_cards.contains(&card_number.as_str()) || is_card_valid {
Ok(Self(StrongSecret::new(card_number)))
} else {
Err(CardNumberValidationErr("card number invalid"))
}
}
}
impl FromStr for NetworkToken {
type Err = CardNumberValidationErr;
fn from_str(network_token: &str) -> Result<Self, Self::Err> {
// Valid test cards for threedsecureio
let valid_test_network_tokens = vec![
"4000100511112003",
"6000100611111203",
"3000100811111072",
"9000100111111111",
];
#[cfg(not(target_arch = "wasm32"))]
let valid_test_network_tokens = match router_env_which() {
Env::Development | Env::Sandbox => valid_test_network_tokens,
Env::Production => vec![],
};
let network_token = network_token.split_whitespace().collect::<String>();
let is_network_token_valid = sanitize_card_number(&network_token)?;
if valid_test_network_tokens.contains(&network_token.as_str()) || is_network_token_valid {
Ok(Self(StrongSecret::new(network_token)))
} else {
Err(CardNumberValidationErr("network token invalid"))
}
}
}
pub fn sanitize_card_number(card_number: &str) -> Result<bool, CardNumberValidationErr> {
let is_card_number_valid = Ok(card_number)
.and_then(validate_card_number_chars)
.and_then(validate_card_number_length)
.map(|number| luhn(&number))?;
Ok(is_card_number_valid)
}
/// # Panics
///
/// Never, as a single character will never be greater than 10, or `u8`
pub fn validate_card_number_chars(number: &str) -> Result<Vec<u8>, CardNumberValidationErr> {
let data = number.chars().try_fold(
Vec::with_capacity(MAX_CARD_NUMBER_LENGTH),
|mut data, character| {
data.push(
#[allow(clippy::expect_used)]
character
.to_digit(10)
.ok_or(CardNumberValidationErr(
"invalid character found in card number",
))?
.try_into()
.expect("error while converting a single character to u8"), // safety, a single character will never be greater `u8`
);
Ok::<Vec<u8>, CardNumberValidationErr>(data)
},
)?;
Ok(data)
}
pub fn validate_card_number_length(number: Vec<u8>) -> Result<Vec<u8>, CardNumberValidationErr> {
if number.len() >= MIN_CARD_NUMBER_LENGTH && number.len() <= MAX_CARD_NUMBER_LENGTH {
Ok(number)
} else {
Err(CardNumberValidationErr("invalid card number length"))
}
}
#[allow(clippy::as_conversions)]
pub fn luhn(number: &[u8]) -> bool {
number
.iter()
.rev()
.enumerate()
.map(|(idx, element)| {
((*element * 2) / 10 + (*element * 2) % 10) * ((idx as u8) % 2)
+ (*element) * (((idx + 1) as u8) % 2)
})
.sum::<u8>()
% 10
== 0
}
impl TryFrom<String> for CardNumber {
type Error = CardNumberValidationErr;
fn try_from(value: String) -> Result<Self, Self::Error> {
Self::from_str(&value)
}
}
impl TryFrom<String> for NetworkToken {
type Error = CardNumberValidationErr;
fn try_from(value: String) -> Result<Self, Self::Error> {
Self::from_str(&value)
}
}
impl Deref for CardNumber {
type Target = StrongSecret<String, CardNumberStrategy>;
fn deref(&self) -> &StrongSecret<String, CardNumberStrategy> {
&self.0
}
}
impl Deref for NetworkToken {
type Target = StrongSecret<String, CardNumberStrategy>;
fn deref(&self) -> &StrongSecret<String, CardNumberStrategy> {
&self.0
}
}
impl<'de> Deserialize<'de> for CardNumber {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let s = String::deserialize(d)?;
Self::from_str(&s).map_err(serde::de::Error::custom)
}
}
impl<'de> Deserialize<'de> for NetworkToken {
fn deserialize<D: Deserializer<'de>>(d: D) -> Result<Self, D::Error> {
let s = String::deserialize(d)?;
Self::from_str(&s).map_err(serde::de::Error::custom)
}
}
pub enum CardNumberStrategy {}
impl<T> Strategy<T> for CardNumberStrategy
where
T: AsRef<str>,
{
fn fmt(val: &T, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let val_str: &str = val.as_ref();
if val_str.len() < 15 || val_str.len() > 19 {
return WithType::fmt(val, f);
}
if let Some(value) = val_str.get(..6) {
write!(f, "{}{}", value, "*".repeat(val_str.len() - 6))
} else {
#[cfg(not(target_arch = "wasm32"))]
logger::error!("Invalid card number {val_str}");
WithType::fmt(val, f)
}
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::unwrap_used)]
use masking::Secret;
use super::*;
#[test]
fn valid_card_number() {
let s = "371449635398431";
assert_eq!(
CardNumber::from_str(s).unwrap(),
CardNumber(StrongSecret::from_str(s).unwrap())
);
}
#[test]
fn invalid_card_number_length() {
let s = "371446";
assert_eq!(
CardNumber::from_str(s).unwrap_err().to_string(),
"invalid card number length".to_string()
);
}
#[test]
fn card_number_with_non_digit_character() {
let s = "371446431 A";
assert_eq!(
CardNumber::from_str(s).unwrap_err().to_string(),
"invalid character found in card number".to_string()
);
}
#[test]
fn invalid_card_number() {
let s = "371446431";
assert_eq!(
CardNumber::from_str(s).unwrap_err().to_string(),
"card number invalid".to_string()
);
}
#[test]
fn card_number_no_whitespace() {
let s = "3714 4963 5398 431";
assert_eq!(
CardNumber::from_str(s).unwrap().to_string(),
"371449*********"
);
}
#[test]
fn test_valid_card_number_masking() {
let secret: Secret<String, CardNumberStrategy> =
Secret::new("1234567890987654".to_string());
assert_eq!("123456**********", format!("{secret:?}"));
}
#[test]
fn test_invalid_card_number_masking() {
let secret: Secret<String, CardNumberStrategy> = Secret::new("9123456789".to_string());
assert_eq!("*** alloc::string::String ***", format!("{secret:?}"));
}
#[test]
fn test_valid_card_number_strong_secret_masking() {
let card_number = CardNumber::from_str("3714 4963 5398 431").unwrap();
let secret = &(*card_number);
assert_eq!("371449*********", format!("{secret:?}"));
}
#[test]
fn test_valid_card_number_deserialization() {
let card_number = serde_json::from_str::<CardNumber>(r#""3714 4963 5398 431""#).unwrap();
let secret = card_number.to_string();
assert_eq!(r#""371449*********""#, format!("{secret:?}"));
}
#[test]
fn test_invalid_card_number_deserialization() {
let card_number = serde_json::from_str::<CardNumber>(r#""1234 5678""#);
let error_msg = card_number.unwrap_err().to_string();
assert_eq!(error_msg, "card number invalid".to_string());
}
}
</crate>
|
{
"crate": "cards",
"file": null,
"files": [
"crates/cards/tests/basic.rs",
"crates/cards/src/lib.rs",
"crates/cards/src/validate.rs"
],
"module": null,
"num_files": 3,
"token_count": 6734
}
|
crate_-4044121403850866037
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: currency_conversion
Files: 4
</path>
<crate>
// File: crates/currency_conversion/src/types.rs
use std::collections::HashMap;
use common_enums::Currency;
use rust_decimal::Decimal;
use rusty_money::iso;
use crate::error::CurrencyConversionError;
/// Cached currency store of base currency
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct ExchangeRates {
pub base_currency: Currency,
pub conversion: HashMap<Currency, CurrencyFactors>,
}
/// Stores the multiplicative factor for conversion between currency to base and vice versa
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct CurrencyFactors {
/// The factor that will be multiplied to provide Currency output
pub to_factor: Decimal,
/// The factor that will be multiplied to provide for the base output
pub from_factor: Decimal,
}
impl CurrencyFactors {
pub fn new(to_factor: Decimal, from_factor: Decimal) -> Self {
Self {
to_factor,
from_factor,
}
}
}
impl ExchangeRates {
pub fn new(base_currency: Currency, conversion: HashMap<Currency, CurrencyFactors>) -> Self {
Self {
base_currency,
conversion,
}
}
/// The flow here is from_currency -> base_currency -> to_currency
/// from to_currency -> base currency
pub fn forward_conversion(
&self,
amt: Decimal,
from_currency: Currency,
) -> Result<Decimal, CurrencyConversionError> {
let from_factor = self
.conversion
.get(&from_currency)
.ok_or_else(|| {
CurrencyConversionError::ConversionNotSupported(from_currency.to_string())
})?
.from_factor;
amt.checked_mul(from_factor)
.ok_or(CurrencyConversionError::DecimalMultiplicationFailed)
}
/// from base_currency -> to_currency
pub fn backward_conversion(
&self,
amt: Decimal,
to_currency: Currency,
) -> Result<Decimal, CurrencyConversionError> {
let to_factor = self
.conversion
.get(&to_currency)
.ok_or_else(|| {
CurrencyConversionError::ConversionNotSupported(to_currency.to_string())
})?
.to_factor;
amt.checked_mul(to_factor)
.ok_or(CurrencyConversionError::DecimalMultiplicationFailed)
}
}
pub fn currency_match(currency: Currency) -> &'static iso::Currency {
match currency {
Currency::AED => iso::AED,
Currency::AFN => iso::AFN,
Currency::ALL => iso::ALL,
Currency::AMD => iso::AMD,
Currency::ANG => iso::ANG,
Currency::AOA => iso::AOA,
Currency::ARS => iso::ARS,
Currency::AUD => iso::AUD,
Currency::AWG => iso::AWG,
Currency::AZN => iso::AZN,
Currency::BAM => iso::BAM,
Currency::BBD => iso::BBD,
Currency::BDT => iso::BDT,
Currency::BGN => iso::BGN,
Currency::BHD => iso::BHD,
Currency::BIF => iso::BIF,
Currency::BMD => iso::BMD,
Currency::BND => iso::BND,
Currency::BOB => iso::BOB,
Currency::BRL => iso::BRL,
Currency::BSD => iso::BSD,
Currency::BTN => iso::BTN,
Currency::BWP => iso::BWP,
Currency::BYN => iso::BYN,
Currency::BZD => iso::BZD,
Currency::CAD => iso::CAD,
Currency::CDF => iso::CDF,
Currency::CHF => iso::CHF,
Currency::CLF => iso::CLF,
Currency::CLP => iso::CLP,
Currency::CNY => iso::CNY,
Currency::COP => iso::COP,
Currency::CRC => iso::CRC,
Currency::CUC => iso::CUC,
Currency::CUP => iso::CUP,
Currency::CVE => iso::CVE,
Currency::CZK => iso::CZK,
Currency::DJF => iso::DJF,
Currency::DKK => iso::DKK,
Currency::DOP => iso::DOP,
Currency::DZD => iso::DZD,
Currency::EGP => iso::EGP,
Currency::ERN => iso::ERN,
Currency::ETB => iso::ETB,
Currency::EUR => iso::EUR,
Currency::FJD => iso::FJD,
Currency::FKP => iso::FKP,
Currency::GBP => iso::GBP,
Currency::GEL => iso::GEL,
Currency::GHS => iso::GHS,
Currency::GIP => iso::GIP,
Currency::GMD => iso::GMD,
Currency::GNF => iso::GNF,
Currency::GTQ => iso::GTQ,
Currency::GYD => iso::GYD,
Currency::HKD => iso::HKD,
Currency::HNL => iso::HNL,
Currency::HRK => iso::HRK,
Currency::HTG => iso::HTG,
Currency::HUF => iso::HUF,
Currency::IDR => iso::IDR,
Currency::ILS => iso::ILS,
Currency::INR => iso::INR,
Currency::IQD => iso::IQD,
Currency::IRR => iso::IRR,
Currency::ISK => iso::ISK,
Currency::JMD => iso::JMD,
Currency::JOD => iso::JOD,
Currency::JPY => iso::JPY,
Currency::KES => iso::KES,
Currency::KGS => iso::KGS,
Currency::KHR => iso::KHR,
Currency::KMF => iso::KMF,
Currency::KPW => iso::KPW,
Currency::KRW => iso::KRW,
Currency::KWD => iso::KWD,
Currency::KYD => iso::KYD,
Currency::KZT => iso::KZT,
Currency::LAK => iso::LAK,
Currency::LBP => iso::LBP,
Currency::LKR => iso::LKR,
Currency::LRD => iso::LRD,
Currency::LSL => iso::LSL,
Currency::LYD => iso::LYD,
Currency::MAD => iso::MAD,
Currency::MDL => iso::MDL,
Currency::MGA => iso::MGA,
Currency::MKD => iso::MKD,
Currency::MMK => iso::MMK,
Currency::MNT => iso::MNT,
Currency::MOP => iso::MOP,
Currency::MRU => iso::MRU,
Currency::MUR => iso::MUR,
Currency::MVR => iso::MVR,
Currency::MWK => iso::MWK,
Currency::MXN => iso::MXN,
Currency::MYR => iso::MYR,
Currency::MZN => iso::MZN,
Currency::NAD => iso::NAD,
Currency::NGN => iso::NGN,
Currency::NIO => iso::NIO,
Currency::NOK => iso::NOK,
Currency::NPR => iso::NPR,
Currency::NZD => iso::NZD,
Currency::OMR => iso::OMR,
Currency::PAB => iso::PAB,
Currency::PEN => iso::PEN,
Currency::PGK => iso::PGK,
Currency::PHP => iso::PHP,
Currency::PKR => iso::PKR,
Currency::PLN => iso::PLN,
Currency::PYG => iso::PYG,
Currency::QAR => iso::QAR,
Currency::RON => iso::RON,
Currency::RSD => iso::RSD,
Currency::RUB => iso::RUB,
Currency::RWF => iso::RWF,
Currency::SAR => iso::SAR,
Currency::SBD => iso::SBD,
Currency::SCR => iso::SCR,
Currency::SDG => iso::SDG,
Currency::SEK => iso::SEK,
Currency::SGD => iso::SGD,
Currency::SHP => iso::SHP,
Currency::SLE => iso::SLE,
Currency::SLL => iso::SLL,
Currency::SOS => iso::SOS,
Currency::SRD => iso::SRD,
Currency::SSP => iso::SSP,
Currency::STD => iso::STD,
Currency::STN => iso::STN,
Currency::SVC => iso::SVC,
Currency::SYP => iso::SYP,
Currency::SZL => iso::SZL,
Currency::THB => iso::THB,
Currency::TJS => iso::TJS,
Currency::TND => iso::TND,
Currency::TMT => iso::TMT,
Currency::TOP => iso::TOP,
Currency::TTD => iso::TTD,
Currency::TRY => iso::TRY,
Currency::TWD => iso::TWD,
Currency::TZS => iso::TZS,
Currency::UAH => iso::UAH,
Currency::UGX => iso::UGX,
Currency::USD => iso::USD,
Currency::UYU => iso::UYU,
Currency::UZS => iso::UZS,
Currency::VES => iso::VES,
Currency::VND => iso::VND,
Currency::VUV => iso::VUV,
Currency::WST => iso::WST,
Currency::XAF => iso::XAF,
Currency::XCD => iso::XCD,
Currency::XOF => iso::XOF,
Currency::XPF => iso::XPF,
Currency::YER => iso::YER,
Currency::ZAR => iso::ZAR,
Currency::ZMW => iso::ZMW,
Currency::ZWL => iso::ZWL,
}
}
// File: crates/currency_conversion/src/conversion.rs
use common_enums::Currency;
use rust_decimal::Decimal;
use rusty_money::Money;
use crate::{
error::CurrencyConversionError,
types::{currency_match, ExchangeRates},
};
pub fn convert(
ex_rates: &ExchangeRates,
from_currency: Currency,
to_currency: Currency,
amount: i64,
) -> Result<Decimal, CurrencyConversionError> {
let money_minor = Money::from_minor(amount, currency_match(from_currency));
let base_currency = ex_rates.base_currency;
if to_currency == base_currency {
ex_rates.forward_conversion(*money_minor.amount(), from_currency)
} else if from_currency == base_currency {
ex_rates.backward_conversion(*money_minor.amount(), to_currency)
} else {
let base_conversion_amt =
ex_rates.forward_conversion(*money_minor.amount(), from_currency)?;
ex_rates.backward_conversion(base_conversion_amt, to_currency)
}
}
#[cfg(test)]
mod tests {
#![allow(clippy::expect_used, clippy::print_stdout)]
use std::collections::HashMap;
use crate::types::CurrencyFactors;
#[test]
fn currency_to_currency_conversion() {
use super::*;
let mut conversion: HashMap<Currency, CurrencyFactors> = HashMap::new();
let inr_conversion_rates =
CurrencyFactors::new(Decimal::new(823173, 4), Decimal::new(1214, 5));
let szl_conversion_rates =
CurrencyFactors::new(Decimal::new(194423, 4), Decimal::new(514, 4));
let convert_from = Currency::SZL;
let convert_to = Currency::INR;
let amount = 2000;
let base_currency = Currency::USD;
conversion.insert(convert_from, inr_conversion_rates);
conversion.insert(convert_to, szl_conversion_rates);
let sample_rate = ExchangeRates::new(base_currency, conversion);
let res =
convert(&sample_rate, convert_from, convert_to, amount).expect("converted_currency");
println!("The conversion from {amount} {convert_from} to {convert_to} is {res:?}");
}
#[test]
fn currency_to_base_conversion() {
use super::*;
let mut conversion: HashMap<Currency, CurrencyFactors> = HashMap::new();
let inr_conversion_rates =
CurrencyFactors::new(Decimal::new(823173, 4), Decimal::new(1214, 5));
let usd_conversion_rates = CurrencyFactors::new(Decimal::new(1, 0), Decimal::new(1, 0));
let convert_from = Currency::INR;
let convert_to = Currency::USD;
let amount = 2000;
let base_currency = Currency::USD;
conversion.insert(convert_from, inr_conversion_rates);
conversion.insert(convert_to, usd_conversion_rates);
let sample_rate = ExchangeRates::new(base_currency, conversion);
let res =
convert(&sample_rate, convert_from, convert_to, amount).expect("converted_currency");
println!("The conversion from {amount} {convert_from} to {convert_to} is {res:?}");
}
#[test]
fn base_to_currency_conversion() {
use super::*;
let mut conversion: HashMap<Currency, CurrencyFactors> = HashMap::new();
let inr_conversion_rates =
CurrencyFactors::new(Decimal::new(823173, 4), Decimal::new(1214, 5));
let usd_conversion_rates = CurrencyFactors::new(Decimal::new(1, 0), Decimal::new(1, 0));
let convert_from = Currency::USD;
let convert_to = Currency::INR;
let amount = 2000;
let base_currency = Currency::USD;
conversion.insert(convert_from, usd_conversion_rates);
conversion.insert(convert_to, inr_conversion_rates);
let sample_rate = ExchangeRates::new(base_currency, conversion);
let res =
convert(&sample_rate, convert_from, convert_to, amount).expect("converted_currency");
println!("The conversion from {amount} {convert_from} to {convert_to} is {res:?}");
}
}
// File: crates/currency_conversion/src/error.rs
#[derive(Debug, thiserror::Error, serde::Serialize)]
#[serde(tag = "type", content = "info", rename_all = "snake_case")]
pub enum CurrencyConversionError {
#[error("Currency Conversion isn't possible")]
DecimalMultiplicationFailed,
#[error("Currency not supported: '{0}'")]
ConversionNotSupported(String),
}
// File: crates/currency_conversion/src/lib.rs
pub mod conversion;
pub mod error;
pub mod types;
</crate>
|
{
"crate": "currency_conversion",
"file": null,
"files": [
"crates/currency_conversion/src/types.rs",
"crates/currency_conversion/src/conversion.rs",
"crates/currency_conversion/src/error.rs",
"crates/currency_conversion/src/lib.rs"
],
"module": null,
"num_files": 4,
"token_count": 3212
}
|
crate_8172294842628083536
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: smithy-generator
Files: 2
</path>
<crate>
// File: crates/smithy-generator/build.rs
// crates/smithy-generator/build.rs
use std::{fs, path::Path};
use regex::Regex;
fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("cargo:rerun-if-changed=../");
run_build()
}
fn run_build() -> Result<(), Box<dyn std::error::Error>> {
let workspace_root = get_workspace_root()?;
let mut smithy_models = Vec::new();
// Scan all crates in the workspace for SmithyModel derives
let crates_dir = workspace_root.join("crates");
if let Ok(entries) = fs::read_dir(&crates_dir) {
for entry in entries.flatten() {
if entry.file_type().map(|ft| ft.is_dir()).unwrap_or(false) {
let crate_path = entry.path();
let crate_name = match crate_path.file_name() {
Some(name) => name.to_string_lossy(),
None => {
println!(
"cargo:warning=Skipping crate with invalid path: {}",
crate_path.display()
);
continue;
}
};
// Skip the smithy crate itself to avoid self-dependency
if crate_name == "smithy"
|| crate_name == "smithy-core"
|| crate_name == "smithy-generator"
{
continue;
}
if let Err(e) =
scan_crate_for_smithy_models(&crate_path, &crate_name, &mut smithy_models)
{
println!("cargo:warning=Failed to scan crate {}: {}", crate_name, e);
}
}
}
}
// Generate the registry file
generate_model_registry(&smithy_models)?;
Ok(())
}
fn get_workspace_root() -> Result<std::path::PathBuf, Box<dyn std::error::Error>> {
let manifest_dir = std::env::var("CARGO_MANIFEST_DIR")
.map_err(|_| "CARGO_MANIFEST_DIR environment variable not set")?;
let manifest_path = Path::new(&manifest_dir);
let parent1 = manifest_path
.parent()
.ok_or("Cannot get parent directory of CARGO_MANIFEST_DIR")?;
let workspace_root = parent1
.parent()
.ok_or("Cannot get workspace root directory")?;
Ok(workspace_root.to_path_buf())
}
fn scan_crate_for_smithy_models(
crate_path: &Path,
crate_name: &str,
models: &mut Vec<SmithyModelInfo>,
) -> Result<(), Box<dyn std::error::Error>> {
let src_path = crate_path.join("src");
if !src_path.exists() {
return Ok(());
}
scan_directory(&src_path, crate_name, "", models)?;
Ok(())
}
fn scan_directory(
dir: &Path,
crate_name: &str,
module_path: &str,
models: &mut Vec<SmithyModelInfo>,
) -> Result<(), Box<dyn std::error::Error>> {
if let Ok(entries) = fs::read_dir(dir) {
for entry in entries.flatten() {
let path = entry.path();
if path.is_dir() {
let dir_name = match path.file_name() {
Some(name) => name.to_string_lossy(),
None => {
println!(
"cargo:warning=Skipping directory with invalid name: {}",
path.display()
);
continue;
}
};
let new_module_path = if module_path.is_empty() {
dir_name.to_string()
} else {
format!("{}::{}", module_path, dir_name)
};
scan_directory(&path, crate_name, &new_module_path, models)?;
} else if path.extension().map(|ext| ext == "rs").unwrap_or(false) {
if let Err(e) = scan_rust_file(&path, crate_name, module_path, models) {
println!(
"cargo:warning=Failed to scan Rust file {}: {}",
path.display(),
e
);
}
}
}
}
Ok(())
}
fn scan_rust_file(
file_path: &Path,
crate_name: &str,
module_path: &str,
models: &mut Vec<SmithyModelInfo>,
) -> Result<(), Box<dyn std::error::Error>> {
if let Ok(content) = fs::read_to_string(file_path) {
// Enhanced regex that handles comments, doc comments, and multiple attributes
// between derive and struct/enum declarations
let re = Regex::new(r"(?ms)^#\[derive\(([^)]*(?:\([^)]*\))*[^)]*)\)\]\s*(?:(?:#\[[^\]]*\]\s*)|(?://[^\r\n]*\s*)|(?:///[^\r\n]*\s*)|(?:/\*.*?\*/\s*))*(?:pub\s+)?(?:struct|enum)\s+([A-Z][A-Za-z0-9_]*)\s*[<\{\(]")
.map_err(|e| format!("Failed to compile regex: {}", e))?;
for captures in re.captures_iter(&content) {
let derive_content = match captures.get(1) {
Some(capture) => capture.as_str(),
None => {
println!(
"cargo:warning=Missing derive content in regex capture for {}",
file_path.display()
);
continue;
}
};
let item_name = match captures.get(2) {
Some(capture) => capture.as_str(),
None => {
println!(
"cargo:warning=Missing item name in regex capture for {}",
file_path.display()
);
continue;
}
};
// Check if "SmithyModel" is present in the derive macro's content.
if derive_content.contains("SmithyModel") {
// Validate that the item name is a valid Rust identifier
if is_valid_rust_identifier(item_name) {
let full_module_path = create_module_path(file_path, crate_name, module_path)?;
models.push(SmithyModelInfo {
struct_name: item_name.to_string(),
module_path: full_module_path,
});
} else {
println!(
"cargo:warning=Skipping invalid identifier: {} in {}",
item_name,
file_path.display()
);
}
}
}
}
Ok(())
}
fn is_valid_rust_identifier(name: &str) -> bool {
if name.is_empty() {
return false;
}
// Rust identifiers must start with a letter or underscore
let first_char = match name.chars().next() {
Some(ch) => ch,
None => return false, // This shouldn't happen since we checked is_empty above, but being safe
};
if !first_char.is_ascii_alphabetic() && first_char != '_' {
return false;
}
// Must not be a Rust keyword
let keywords = [
"as", "break", "const", "continue", "crate", "else", "enum", "extern", "false", "fn",
"for", "if", "impl", "in", "let", "loop", "match", "mod", "move", "mut", "pub", "ref",
"return", "self", "Self", "static", "struct", "super", "trait", "true", "type", "unsafe",
"use", "where", "while", "async", "await", "dyn", "is", "abstract", "become", "box", "do",
"final", "macro", "override", "priv", "typeof", "unsized", "virtual", "yield", "try",
];
if keywords.contains(&name) {
return false;
}
// All other characters must be alphanumeric or underscore
name.chars().all(|c| c.is_ascii_alphanumeric() || c == '_')
}
fn create_module_path(
file_path: &Path,
crate_name: &str,
module_path: &str,
) -> Result<String, Box<dyn std::error::Error>> {
let file_name = file_path
.file_stem()
.and_then(|s| s.to_str())
.ok_or_else(|| {
format!(
"Cannot extract file name from path: {}",
file_path.display()
)
})?;
let crate_name_normalized = crate_name.replace('-', "_");
let result = if file_name == "lib" || file_name == "mod" {
if module_path.is_empty() {
crate_name_normalized
} else {
format!("{}::{}", crate_name_normalized, module_path)
}
} else if module_path.is_empty() {
format!("{}::{}", crate_name_normalized, file_name)
} else {
format!("{}::{}::{}", crate_name_normalized, module_path, file_name)
};
Ok(result)
}
#[derive(Debug)]
struct SmithyModelInfo {
struct_name: String,
module_path: String,
}
fn generate_model_registry(models: &[SmithyModelInfo]) -> Result<(), Box<dyn std::error::Error>> {
let out_dir = std::env::var("OUT_DIR").map_err(|_| "OUT_DIR environment variable not set")?;
let registry_path = Path::new(&out_dir).join("model_registry.rs");
let mut content = String::new();
content.push_str("// Auto-generated model registry\n");
content.push_str("// DO NOT EDIT - This file is generated by build.rs\n\n");
if !models.is_empty() {
content.push_str("use smithy_core::{SmithyModel, SmithyModelGenerator};\n\n");
// Generate imports
for model in models {
content.push_str(&format!(
"use {}::{};\n",
model.module_path, model.struct_name
));
}
content.push_str("\npub fn discover_smithy_models() -> Vec<SmithyModel> {\n");
content.push_str(" let mut models = Vec::new();\n\n");
// Generate model collection calls
for model in models {
content.push_str(&format!(
" models.push({}::generate_smithy_model());\n",
model.struct_name
));
}
content.push_str("\n models\n");
content.push_str("}\n");
} else {
// Generate empty function if no models found
content.push_str("use smithy_core::SmithyModel;\n\n");
content.push_str("pub fn discover_smithy_models() -> Vec<SmithyModel> {\n");
content.push_str(
" router_env::logger::info!(\"No SmithyModel structs found in workspace\");\n",
);
content.push_str(" Vec::new()\n");
content.push_str("}\n");
}
fs::write(®istry_path, content).map_err(|e| {
format!(
"Failed to write model registry to {}: {}",
registry_path.display(),
e
)
})?;
Ok(())
}
// File: crates/smithy-generator/src/main.rs
// crates/smithy-generator/main.rs
use std::path::Path;
use router_env::logger;
use smithy_core::SmithyGenerator;
// Include the auto-generated model registry
include!(concat!(env!("OUT_DIR"), "/model_registry.rs"));
fn main() -> Result<(), Box<dyn std::error::Error>> {
let mut generator = SmithyGenerator::new();
logger::info!("Discovering Smithy models from workspace...");
// Automatically discover and add all models
let models = discover_smithy_models();
logger::info!("Found {} Smithy models", models.len());
if models.is_empty() {
logger::info!("No SmithyModel structs found. Make sure your structs:");
logger::info!(" 1. Derive SmithyModel: #[derive(SmithyModel)]");
logger::info!(" 2. Are in a crate that smithy can access");
logger::info!(" 3. Have the correct smithy attributes");
return Ok(());
}
for model in models {
logger::info!(" Processing namespace: {}", model.namespace);
let shape_names: Vec<_> = model.shapes.keys().collect();
logger::info!(" Shapes: {:?}", shape_names);
generator.add_model(model);
}
logger::info!("Generating Smithy IDL files...");
// Generate IDL files
let output_dir = Path::new("smithy/models");
let absolute_output_dir = std::env::current_dir()?.join(output_dir);
logger::info!("Output directory: {}", absolute_output_dir.display());
generator.generate_idl(output_dir)?;
logger::info!("✅ Smithy models generated successfully!");
logger::info!("Files written to: {}", absolute_output_dir.display());
// List generated files
if let Ok(entries) = std::fs::read_dir(output_dir) {
logger::info!("Generated files:");
for entry in entries.flatten() {
if entry.file_type().map(|ft| ft.is_file()).unwrap_or(false) {
logger::info!(" - {}", entry.file_name().to_string_lossy());
}
}
}
Ok(())
}
</crate>
|
{
"crate": "smithy-generator",
"file": null,
"files": [
"crates/smithy-generator/build.rs",
"crates/smithy-generator/src/main.rs"
],
"module": null,
"num_files": 2,
"token_count": 2806
}
|
crate_4898127784085147749
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: drainer
Files: 16
</path>
<crate>
// File: crates/drainer/build.rs
fn main() {
#[cfg(feature = "vergen")]
router_env::vergen::generate_cargo_instructions();
}
// File: crates/drainer/src/stream.rs
use std::collections::HashMap;
use redis_interface as redis;
use router_env::{logger, tracing};
use crate::{errors, metrics, Store};
pub type StreamEntries = Vec<(String, HashMap<String, String>)>;
pub type StreamReadResult = HashMap<String, StreamEntries>;
impl Store {
#[inline(always)]
pub fn drainer_stream(&self, shard_key: &str) -> String {
// Example: {shard_5}_drainer_stream
format!("{{{}}}_{}", shard_key, self.config.drainer_stream_name,)
}
#[inline(always)]
pub(crate) fn get_stream_key_flag(&self, stream_index: u8) -> String {
format!("{}_in_use", self.get_drainer_stream_name(stream_index))
}
#[inline(always)]
pub(crate) fn get_drainer_stream_name(&self, stream_index: u8) -> String {
self.drainer_stream(format!("shard_{stream_index}").as_str())
}
#[router_env::instrument(skip_all)]
pub async fn is_stream_available(&self, stream_index: u8) -> bool {
let stream_key_flag = self.get_stream_key_flag(stream_index);
match self
.redis_conn
.set_key_if_not_exists_with_expiry(&stream_key_flag.as_str().into(), true, None)
.await
{
Ok(resp) => resp == redis::types::SetnxReply::KeySet,
Err(error) => {
logger::error!(operation="lock_stream",err=?error);
false
}
}
}
pub async fn make_stream_available(&self, stream_name_flag: &str) -> errors::DrainerResult<()> {
match self.redis_conn.delete_key(&stream_name_flag.into()).await {
Ok(redis::DelReply::KeyDeleted) => Ok(()),
Ok(redis::DelReply::KeyNotDeleted) => {
logger::error!("Tried to unlock a stream which is already unlocked");
Ok(())
}
Err(error) => Err(errors::DrainerError::from(error).into()),
}
}
pub async fn read_from_stream(
&self,
stream_name: &str,
max_read_count: u64,
) -> errors::DrainerResult<StreamReadResult> {
// "0-0" id gives first entry
let stream_id = "0-0";
let (output, execution_time) = common_utils::date_time::time_it(|| async {
self.redis_conn
.stream_read_entries(stream_name, stream_id, Some(max_read_count))
.await
.map_err(errors::DrainerError::from)
})
.await;
metrics::REDIS_STREAM_READ_TIME.record(
execution_time,
router_env::metric_attributes!(("stream", stream_name.to_owned())),
);
Ok(output?)
}
pub async fn trim_from_stream(
&self,
stream_name: &str,
minimum_entry_id: &str,
) -> errors::DrainerResult<usize> {
let trim_kind = redis::StreamCapKind::MinID;
let trim_type = redis::StreamCapTrim::Exact;
let trim_id = minimum_entry_id;
let (trim_result, execution_time) =
common_utils::date_time::time_it::<errors::DrainerResult<_>, _, _>(|| async {
let trim_result = self
.redis_conn
.stream_trim_entries(&stream_name.into(), (trim_kind, trim_type, trim_id))
.await
.map_err(errors::DrainerError::from)?;
// Since xtrim deletes entries below given id excluding the given id.
// Hence, deleting the minimum entry id
self.redis_conn
.stream_delete_entries(&stream_name.into(), minimum_entry_id)
.await
.map_err(errors::DrainerError::from)?;
Ok(trim_result)
})
.await;
metrics::REDIS_STREAM_TRIM_TIME.record(
execution_time,
router_env::metric_attributes!(("stream", stream_name.to_owned())),
);
// adding 1 because we are deleting the given id too
Ok(trim_result? + 1)
}
pub async fn delete_from_stream(
&self,
stream_name: &str,
entry_id: &str,
) -> errors::DrainerResult<()> {
let (_trim_result, execution_time) =
common_utils::date_time::time_it::<errors::DrainerResult<_>, _, _>(|| async {
self.redis_conn
.stream_delete_entries(&stream_name.into(), entry_id)
.await
.map_err(errors::DrainerError::from)?;
Ok(())
})
.await;
metrics::REDIS_STREAM_DEL_TIME.record(
execution_time,
router_env::metric_attributes!(("stream", stream_name.to_owned())),
);
Ok(())
}
}
// File: crates/drainer/src/logger.rs
#[doc(inline)]
pub use router_env::{debug, error, info, warn};
// File: crates/drainer/src/types.rs
use std::collections::HashMap;
use common_utils::errors;
use error_stack::ResultExt;
use serde::{de::value::MapDeserializer, Deserialize, Serialize};
use crate::{
kv,
utils::{deserialize_db_op, deserialize_i64},
};
#[derive(Deserialize, Serialize)]
pub struct StreamData {
pub request_id: String,
pub global_id: String,
#[serde(deserialize_with = "deserialize_db_op")]
pub typed_sql: kv::DBOperation,
#[serde(deserialize_with = "deserialize_i64")]
pub pushed_at: i64,
}
impl StreamData {
pub fn from_hashmap(
hashmap: HashMap<String, String>,
) -> errors::CustomResult<Self, errors::ParsingError> {
let iter = MapDeserializer::<
'_,
std::collections::hash_map::IntoIter<String, String>,
serde_json::error::Error,
>::new(hashmap.into_iter());
Self::deserialize(iter)
.change_context(errors::ParsingError::StructParseFailure("StreamData"))
}
}
// File: crates/drainer/src/handler.rs
use std::{
collections::HashMap,
sync::{atomic, Arc},
};
use common_utils::id_type;
use router_env::tracing::Instrument;
use tokio::{
sync::{mpsc, oneshot},
time::{self, Duration},
};
use crate::{
errors, instrument, logger, metrics, query::ExecuteQuery, tracing, utils, DrainerSettings,
Store, StreamData,
};
/// Handler handles the spawning and closing of drainer
/// Arc is used to enable creating a listener for graceful shutdown
#[derive(Clone)]
pub struct Handler {
inner: Arc<HandlerInner>,
}
impl std::ops::Deref for Handler {
type Target = HandlerInner;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
pub struct HandlerInner {
shutdown_interval: Duration,
loop_interval: Duration,
active_tasks: Arc<atomic::AtomicU64>,
conf: DrainerSettings,
stores: HashMap<id_type::TenantId, Arc<Store>>,
running: Arc<atomic::AtomicBool>,
}
impl Handler {
pub fn from_conf(
conf: DrainerSettings,
stores: HashMap<id_type::TenantId, Arc<Store>>,
) -> Self {
let shutdown_interval = Duration::from_millis(conf.shutdown_interval.into());
let loop_interval = Duration::from_millis(conf.loop_interval.into());
let active_tasks = Arc::new(atomic::AtomicU64::new(0));
let running = Arc::new(atomic::AtomicBool::new(true));
let handler = HandlerInner {
shutdown_interval,
loop_interval,
active_tasks,
conf,
stores,
running,
};
Self {
inner: Arc::new(handler),
}
}
pub fn close(&self) {
self.running.store(false, atomic::Ordering::SeqCst);
}
pub async fn spawn(&self) -> errors::DrainerResult<()> {
let mut stream_index: u8 = 0;
let jobs_picked = Arc::new(atomic::AtomicU8::new(0));
while self.running.load(atomic::Ordering::SeqCst) {
metrics::DRAINER_HEALTH.add(1, &[]);
for store in self.stores.values() {
if store.is_stream_available(stream_index).await {
let _task_handle = tokio::spawn(
drainer_handler(
store.clone(),
stream_index,
self.conf.max_read_count,
self.active_tasks.clone(),
jobs_picked.clone(),
)
.in_current_span(),
);
}
}
stream_index = utils::increment_stream_index(
(stream_index, jobs_picked.clone()),
self.conf.num_partitions,
)
.await;
time::sleep(self.loop_interval).await;
}
Ok(())
}
pub(crate) async fn shutdown_listener(&self, mut rx: mpsc::Receiver<()>) {
while let Some(_c) = rx.recv().await {
logger::info!("Awaiting shutdown!");
metrics::SHUTDOWN_SIGNAL_RECEIVED.add(1, &[]);
let shutdown_started = time::Instant::now();
rx.close();
//Check until the active tasks are zero. This does not include the tasks in the stream.
while self.active_tasks.load(atomic::Ordering::SeqCst) != 0 {
time::sleep(self.shutdown_interval).await;
}
logger::info!("Terminating drainer");
metrics::SUCCESSFUL_SHUTDOWN.add(1, &[]);
let shutdown_ended = shutdown_started.elapsed().as_secs_f64() * 1000f64;
metrics::CLEANUP_TIME.record(shutdown_ended, &[]);
self.close();
}
logger::info!(
tasks_remaining = self.active_tasks.load(atomic::Ordering::SeqCst),
"Drainer shutdown successfully"
)
}
pub fn spawn_error_handlers(&self, tx: mpsc::Sender<()>) -> errors::DrainerResult<()> {
let (redis_error_tx, redis_error_rx) = oneshot::channel();
let redis_conn_clone = self
.stores
.values()
.next()
.map(|store| store.redis_conn.clone());
match redis_conn_clone {
None => {
logger::error!("No redis connection found");
Err(
errors::DrainerError::UnexpectedError("No redis connection found".to_string())
.into(),
)
}
Some(redis_conn_clone) => {
// Spawn a task to monitor if redis is down or not
let _task_handle = tokio::spawn(
async move { redis_conn_clone.on_error(redis_error_tx).await }
.in_current_span(),
);
//Spawns a task to send shutdown signal if redis goes down
let _task_handle =
tokio::spawn(redis_error_receiver(redis_error_rx, tx).in_current_span());
Ok(())
}
}
}
}
pub async fn redis_error_receiver(rx: oneshot::Receiver<()>, shutdown_channel: mpsc::Sender<()>) {
match rx.await {
Ok(_) => {
logger::error!("The redis server failed");
let _ = shutdown_channel.send(()).await.map_err(|err| {
logger::error!("Failed to send signal to the shutdown channel {err}")
});
}
Err(err) => {
logger::error!("Channel receiver error {err}");
}
}
}
#[router_env::instrument(skip_all)]
async fn drainer_handler(
store: Arc<Store>,
stream_index: u8,
max_read_count: u64,
active_tasks: Arc<atomic::AtomicU64>,
jobs_picked: Arc<atomic::AtomicU8>,
) -> errors::DrainerResult<()> {
active_tasks.fetch_add(1, atomic::Ordering::Release);
let stream_name = store.get_drainer_stream_name(stream_index);
let drainer_result = Box::pin(drainer(
store.clone(),
max_read_count,
stream_name.as_str(),
jobs_picked,
))
.await;
if let Err(error) = drainer_result {
logger::error!(?error)
}
let flag_stream_name = store.get_stream_key_flag(stream_index);
let output = store.make_stream_available(flag_stream_name.as_str()).await;
active_tasks.fetch_sub(1, atomic::Ordering::Release);
output.inspect_err(|err| logger::error!(operation = "unlock_stream", err=?err))
}
#[instrument(skip_all, fields(global_id, request_id, session_id))]
async fn drainer(
store: Arc<Store>,
max_read_count: u64,
stream_name: &str,
jobs_picked: Arc<atomic::AtomicU8>,
) -> errors::DrainerResult<()> {
let stream_read = match store.read_from_stream(stream_name, max_read_count).await {
Ok(result) => {
jobs_picked.fetch_add(1, atomic::Ordering::SeqCst);
result
}
Err(error) => {
if let errors::DrainerError::RedisError(redis_err) = error.current_context() {
if let redis_interface::errors::RedisError::StreamEmptyOrNotAvailable =
redis_err.current_context()
{
metrics::STREAM_EMPTY.add(1, &[]);
return Ok(());
} else {
return Err(error);
}
} else {
return Err(error);
}
}
};
// parse_stream_entries returns error if no entries is found, handle it
let entries = utils::parse_stream_entries(
&stream_read,
store.redis_conn.add_prefix(stream_name).as_str(),
)?;
let read_count = entries.len();
metrics::JOBS_PICKED_PER_STREAM.add(
u64::try_from(read_count).unwrap_or(u64::MIN),
router_env::metric_attributes!(("stream", stream_name.to_owned())),
);
let session_id = common_utils::generate_id_with_default_len("drainer_session");
let mut last_processed_id = String::new();
for (entry_id, entry) in entries.clone() {
let data = match StreamData::from_hashmap(entry) {
Ok(data) => data,
Err(err) => {
logger::error!(operation = "deserialization", err=?err);
metrics::STREAM_PARSE_FAIL.add(
1,
router_env::metric_attributes!(("operation", "deserialization")),
);
// break from the loop in case of a deser error
break;
}
};
tracing::Span::current().record("request_id", data.request_id);
tracing::Span::current().record("global_id", data.global_id);
tracing::Span::current().record("session_id", &session_id);
match data.typed_sql.execute_query(&store, data.pushed_at).await {
Ok(_) => {
last_processed_id = entry_id;
}
Err(err) => match err.current_context() {
// In case of Uniqueviolation we can't really do anything to fix it so just clear
// it from the stream
diesel_models::errors::DatabaseError::UniqueViolation => {
last_processed_id = entry_id;
}
// break from the loop in case of an error in query
_ => break,
},
}
if store.use_legacy_version() {
store
.delete_from_stream(stream_name, &last_processed_id)
.await?;
}
}
if !(last_processed_id.is_empty() || store.use_legacy_version()) {
let entries_trimmed = store
.trim_from_stream(stream_name, &last_processed_id)
.await?;
if read_count != entries_trimmed {
logger::error!(
read_entries = %read_count,
trimmed_entries = %entries_trimmed,
?entries,
"Assertion Failed no. of entries read from the stream doesn't match no. of entries trimmed"
);
}
} else {
logger::error!(read_entries = %read_count,?entries,"No streams were processed in this session");
}
Ok(())
}
// File: crates/drainer/src/lib.rs
mod connection;
pub mod errors;
mod handler;
mod health_check;
pub mod logger;
pub(crate) mod metrics;
mod query;
pub mod services;
pub mod settings;
mod stream;
mod types;
mod utils;
use std::{collections::HashMap, sync::Arc};
mod secrets_transformers;
use actix_web::dev::Server;
use common_utils::{id_type, signals::get_allowed_signals};
use diesel_models::kv;
use error_stack::ResultExt;
use hyperswitch_interfaces::secrets_interface::secret_state::RawSecret;
use router_env::{
instrument,
tracing::{self, Instrument},
};
use tokio::sync::mpsc;
pub(crate) type Settings = settings::Settings<RawSecret>;
use crate::{
connection::pg_connection, services::Store, settings::DrainerSettings, types::StreamData,
};
pub async fn start_drainer(
stores: HashMap<id_type::TenantId, Arc<Store>>,
conf: DrainerSettings,
) -> errors::DrainerResult<()> {
let drainer_handler = handler::Handler::from_conf(conf, stores);
let (tx, rx) = mpsc::channel::<()>(1);
let signal = get_allowed_signals().change_context(errors::DrainerError::SignalError(
"Failed while getting allowed signals".to_string(),
))?;
let handle = signal.handle();
let task_handle =
tokio::spawn(common_utils::signals::signal_handler(signal, tx.clone()).in_current_span());
let handler_clone = drainer_handler.clone();
tokio::task::spawn(async move { handler_clone.shutdown_listener(rx).await });
drainer_handler.spawn_error_handlers(tx)?;
drainer_handler.spawn().await?;
handle.close();
let _ = task_handle
.await
.map_err(|err| logger::error!("Failed while joining signal handler: {:?}", err));
Ok(())
}
pub async fn start_web_server(
conf: Settings,
stores: HashMap<id_type::TenantId, Arc<Store>>,
) -> Result<Server, errors::DrainerError> {
let server = conf.server.clone();
let web_server = actix_web::HttpServer::new(move || {
actix_web::App::new().service(health_check::Health::server(conf.clone(), stores.clone()))
})
.bind((server.host.as_str(), server.port))?
.run();
let _ = web_server.handle();
Ok(web_server)
}
// File: crates/drainer/src/query.rs
use std::sync::Arc;
use common_utils::errors::CustomResult;
use diesel_models::errors::DatabaseError;
use crate::{kv, logger, metrics, pg_connection, services::Store};
#[async_trait::async_trait]
pub trait ExecuteQuery {
async fn execute_query(
self,
store: &Arc<Store>,
pushed_at: i64,
) -> CustomResult<(), DatabaseError>;
}
#[async_trait::async_trait]
impl ExecuteQuery for kv::DBOperation {
async fn execute_query(
self,
store: &Arc<Store>,
pushed_at: i64,
) -> CustomResult<(), DatabaseError> {
let conn = pg_connection(&store.master_pool).await;
let operation = self.operation();
let table = self.table();
let tags = router_env::metric_attributes!(("operation", operation), ("table", table));
let (result, execution_time) =
Box::pin(common_utils::date_time::time_it(|| self.execute(&conn))).await;
push_drainer_delay(pushed_at, operation, table, tags);
metrics::QUERY_EXECUTION_TIME.record(execution_time, tags);
match result {
Ok(result) => {
logger::info!(operation = operation, table = table, ?result);
metrics::SUCCESSFUL_QUERY_EXECUTION.add(1, tags);
Ok(())
}
Err(err) => {
logger::error!(operation = operation, table = table, ?err);
metrics::ERRORS_WHILE_QUERY_EXECUTION.add(1, tags);
Err(err)
}
}
}
}
#[inline(always)]
fn push_drainer_delay(
pushed_at: i64,
operation: &str,
table: &str,
tags: &[router_env::opentelemetry::KeyValue],
) {
let drained_at = common_utils::date_time::now_unix_timestamp();
let delay = drained_at - pushed_at;
logger::debug!(operation, table, delay = format!("{delay} secs"));
match u64::try_from(delay) {
Ok(delay) => metrics::DRAINER_DELAY_SECONDS.record(delay, tags),
Err(error) => logger::error!(
pushed_at,
drained_at,
delay,
?error,
"Invalid drainer delay"
),
}
}
// File: crates/drainer/src/services.rs
use std::sync::Arc;
use actix_web::{body, HttpResponse, ResponseError};
use error_stack::Report;
use redis_interface::RedisConnectionPool;
use crate::{
connection::{diesel_make_pg_pool, PgPool},
logger,
settings::Tenant,
};
#[derive(Clone)]
pub struct Store {
pub master_pool: PgPool,
pub redis_conn: Arc<RedisConnectionPool>,
pub config: StoreConfig,
pub request_id: Option<String>,
}
#[derive(Clone)]
pub struct StoreConfig {
pub drainer_stream_name: String,
pub drainer_num_partitions: u8,
pub use_legacy_version: bool,
}
impl Store {
/// # Panics
///
/// Panics if there is a failure while obtaining the HashiCorp client using the provided configuration.
/// This panic indicates a critical failure in setting up external services, and the application cannot proceed without a valid HashiCorp client.
pub async fn new(config: &crate::Settings, test_transaction: bool, tenant: &Tenant) -> Self {
let redis_conn = crate::connection::redis_connection(config).await;
Self {
master_pool: diesel_make_pg_pool(
config.master_database.get_inner(),
test_transaction,
&tenant.schema,
)
.await,
redis_conn: Arc::new(RedisConnectionPool::clone(
&redis_conn,
&tenant.redis_key_prefix,
)),
config: StoreConfig {
drainer_stream_name: config.drainer.stream_name.clone(),
drainer_num_partitions: config.drainer.num_partitions,
use_legacy_version: config.redis.use_legacy_version,
},
request_id: None,
}
}
pub fn use_legacy_version(&self) -> bool {
self.config.use_legacy_version
}
}
pub fn log_and_return_error_response<T>(error: Report<T>) -> HttpResponse
where
T: error_stack::Context + ResponseError + Clone,
{
logger::error!(?error);
let body = serde_json::json!({
"message": error.to_string()
})
.to_string();
HttpResponse::InternalServerError()
.content_type(mime::APPLICATION_JSON)
.body(body)
}
pub fn http_response_json<T: body::MessageBody + 'static>(response: T) -> HttpResponse {
HttpResponse::Ok()
.content_type(mime::APPLICATION_JSON)
.body(response)
}
// File: crates/drainer/src/metrics.rs
use router_env::{counter_metric, global_meter, histogram_metric_f64, histogram_metric_u64};
global_meter!(DRAINER_METER, "DRAINER");
counter_metric!(JOBS_PICKED_PER_STREAM, DRAINER_METER);
counter_metric!(CYCLES_COMPLETED_SUCCESSFULLY, DRAINER_METER);
counter_metric!(CYCLES_COMPLETED_UNSUCCESSFULLY, DRAINER_METER);
counter_metric!(ERRORS_WHILE_QUERY_EXECUTION, DRAINER_METER);
counter_metric!(SUCCESSFUL_QUERY_EXECUTION, DRAINER_METER);
counter_metric!(SHUTDOWN_SIGNAL_RECEIVED, DRAINER_METER);
counter_metric!(SUCCESSFUL_SHUTDOWN, DRAINER_METER);
counter_metric!(STREAM_EMPTY, DRAINER_METER);
counter_metric!(STREAM_PARSE_FAIL, DRAINER_METER);
counter_metric!(DRAINER_HEALTH, DRAINER_METER);
histogram_metric_f64!(QUERY_EXECUTION_TIME, DRAINER_METER); // Time in (ms) milliseconds
histogram_metric_f64!(REDIS_STREAM_READ_TIME, DRAINER_METER); // Time in (ms) milliseconds
histogram_metric_f64!(REDIS_STREAM_TRIM_TIME, DRAINER_METER); // Time in (ms) milliseconds
histogram_metric_f64!(CLEANUP_TIME, DRAINER_METER); // Time in (ms) milliseconds
histogram_metric_u64!(DRAINER_DELAY_SECONDS, DRAINER_METER); // Time in (s) seconds
histogram_metric_f64!(REDIS_STREAM_DEL_TIME, DRAINER_METER); // Time in (ms) milliseconds
// File: crates/drainer/src/errors.rs
use redis_interface as redis;
use thiserror::Error;
#[derive(Debug, Error)]
pub enum DrainerError {
#[error("Error in parsing config : {0}")]
ConfigParsingError(String),
#[error("Error during redis operation : {0:?}")]
RedisError(error_stack::Report<redis::errors::RedisError>),
#[error("Application configuration error: {0}")]
ConfigurationError(config::ConfigError),
#[error("Error while configuring signals: {0}")]
SignalError(String),
#[error("Error while parsing data from the stream: {0:?}")]
ParsingError(error_stack::Report<common_utils::errors::ParsingError>),
#[error("Unexpected error occurred: {0}")]
UnexpectedError(String),
#[error("I/O: {0}")]
IoError(std::io::Error),
}
#[derive(Debug, Error, Clone, serde::Serialize)]
pub enum HealthCheckError {
#[error("Database health check is failing with error: {message}")]
DbError { message: String },
#[error("Redis health check is failing with error: {message}")]
RedisError { message: String },
}
impl From<std::io::Error> for DrainerError {
fn from(err: std::io::Error) -> Self {
Self::IoError(err)
}
}
pub type DrainerResult<T> = error_stack::Result<T, DrainerError>;
impl From<config::ConfigError> for DrainerError {
fn from(err: config::ConfigError) -> Self {
Self::ConfigurationError(err)
}
}
impl From<error_stack::Report<redis::errors::RedisError>> for DrainerError {
fn from(err: error_stack::Report<redis::errors::RedisError>) -> Self {
Self::RedisError(err)
}
}
impl actix_web::ResponseError for HealthCheckError {
fn status_code(&self) -> reqwest::StatusCode {
use reqwest::StatusCode;
match self {
Self::DbError { .. } | Self::RedisError { .. } => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
// File: crates/drainer/src/main.rs
use std::collections::HashMap;
use drainer::{errors::DrainerResult, logger, services, settings, start_drainer, start_web_server};
use router_env::tracing::Instrument;
#[tokio::main]
async fn main() -> DrainerResult<()> {
// Get configuration
let cmd_line = <settings::CmdLineConf as clap::Parser>::parse();
#[allow(clippy::expect_used)]
let conf = settings::Settings::with_config_path(cmd_line.config_path)
.expect("Unable to construct application configuration");
#[allow(clippy::expect_used)]
conf.validate()
.expect("Failed to validate drainer configuration");
let state = settings::AppState::new(conf.clone()).await;
let mut stores = HashMap::new();
for (tenant_name, tenant) in conf.multitenancy.get_tenants() {
let store = std::sync::Arc::new(services::Store::new(&state.conf, false, tenant).await);
stores.insert(tenant_name.clone(), store);
}
#[allow(clippy::print_stdout)] // The logger has not yet been initialized
#[cfg(feature = "vergen")]
{
println!("Starting drainer (Version: {})", router_env::git_tag!());
}
let _guard = router_env::setup(
&conf.log,
router_env::service_name!(),
[router_env::service_name!()],
);
#[allow(clippy::expect_used)]
let web_server = Box::pin(start_web_server(
state.conf.as_ref().clone(),
stores.clone(),
))
.await
.expect("Failed to create the server");
tokio::spawn(
async move {
let _ = web_server.await;
logger::error!("The health check probe stopped working!");
}
.in_current_span(),
);
logger::debug!(startup_config=?conf);
logger::info!("Drainer started [{:?}] [{:?}]", conf.drainer, conf.log);
start_drainer(stores.clone(), conf.drainer).await?;
Ok(())
}
// File: crates/drainer/src/health_check.rs
use std::{collections::HashMap, sync::Arc};
use actix_web::{web, Scope};
use async_bb8_diesel::{AsyncConnection, AsyncRunQueryDsl};
use common_utils::{errors::CustomResult, id_type};
use diesel_models::{Config, ConfigNew};
use error_stack::ResultExt;
use router_env::{instrument, logger, tracing};
use crate::{
connection::pg_connection,
errors::HealthCheckError,
services::{self, log_and_return_error_response, Store},
Settings,
};
pub const TEST_STREAM_NAME: &str = "TEST_STREAM_0";
pub const TEST_STREAM_DATA: &[(&str, &str)] = &[("data", "sample_data")];
pub struct Health;
impl Health {
pub fn server(conf: Settings, stores: HashMap<id_type::TenantId, Arc<Store>>) -> Scope {
web::scope("health")
.app_data(web::Data::new(conf))
.app_data(web::Data::new(stores))
.service(web::resource("").route(web::get().to(health)))
.service(web::resource("/ready").route(web::get().to(deep_health_check)))
}
}
#[instrument(skip_all)]
pub async fn health() -> impl actix_web::Responder {
logger::info!("Drainer health was called");
actix_web::HttpResponse::Ok().body("Drainer health is good")
}
#[instrument(skip_all)]
pub async fn deep_health_check(
conf: web::Data<Settings>,
stores: web::Data<HashMap<String, Arc<Store>>>,
) -> impl actix_web::Responder {
let mut deep_health_res = HashMap::new();
for (tenant, store) in stores.iter() {
logger::info!("Tenant: {:?}", tenant);
let response = match deep_health_check_func(conf.clone(), store).await {
Ok(response) => serde_json::to_string(&response)
.map_err(|err| {
logger::error!(serialization_error=?err);
})
.unwrap_or_default(),
Err(err) => return log_and_return_error_response(err),
};
deep_health_res.insert(tenant.clone(), response);
}
services::http_response_json(
serde_json::to_string(&deep_health_res)
.map_err(|err| {
logger::error!(serialization_error=?err);
})
.unwrap_or_default(),
)
}
#[instrument(skip_all)]
pub async fn deep_health_check_func(
conf: web::Data<Settings>,
store: &Arc<Store>,
) -> Result<DrainerHealthCheckResponse, error_stack::Report<HealthCheckError>> {
logger::info!("Deep health check was called");
logger::debug!("Database health check begin");
let db_status = store
.health_check_db()
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(HealthCheckError::DbError { message })
})?;
logger::debug!("Database health check end");
logger::debug!("Redis health check begin");
let redis_status = store
.health_check_redis(&conf.into_inner())
.await
.map(|_| true)
.map_err(|error| {
let message = error.to_string();
error.change_context(HealthCheckError::RedisError { message })
})?;
logger::debug!("Redis health check end");
Ok(DrainerHealthCheckResponse {
database: db_status,
redis: redis_status,
})
}
#[derive(Debug, Clone, serde::Serialize, serde::Deserialize)]
pub struct DrainerHealthCheckResponse {
pub database: bool,
pub redis: bool,
}
#[async_trait::async_trait]
pub trait HealthCheckInterface {
async fn health_check_db(&self) -> CustomResult<(), HealthCheckDBError>;
async fn health_check_redis(&self, conf: &Settings) -> CustomResult<(), HealthCheckRedisError>;
}
#[async_trait::async_trait]
impl HealthCheckInterface for Store {
async fn health_check_db(&self) -> CustomResult<(), HealthCheckDBError> {
let conn = pg_connection(&self.master_pool).await;
conn
.transaction_async(|conn| {
Box::pin(async move {
let query =
diesel::select(diesel::dsl::sql::<diesel::sql_types::Integer>("1 + 1"));
let _x: i32 = query.get_result_async(&conn).await.map_err(|err| {
logger::error!(read_err=?err,"Error while reading element in the database");
HealthCheckDBError::DbReadError
})?;
logger::debug!("Database read was successful");
let config = ConfigNew {
key: "test_key".to_string(),
config: "test_value".to_string(),
};
config.insert(&conn).await.map_err(|err| {
logger::error!(write_err=?err,"Error while writing to database");
HealthCheckDBError::DbWriteError
})?;
logger::debug!("Database write was successful");
Config::delete_by_key(&conn, "test_key").await.map_err(|err| {
logger::error!(delete_err=?err,"Error while deleting element in the database");
HealthCheckDBError::DbDeleteError
})?;
logger::debug!("Database delete was successful");
Ok::<_, HealthCheckDBError>(())
})
})
.await?;
Ok(())
}
async fn health_check_redis(
&self,
_conf: &Settings,
) -> CustomResult<(), HealthCheckRedisError> {
let redis_conn = self.redis_conn.clone();
redis_conn
.serialize_and_set_key_with_expiry(&"test_key".into(), "test_value", 30)
.await
.change_context(HealthCheckRedisError::SetFailed)?;
logger::debug!("Redis set_key was successful");
redis_conn
.get_key::<()>(&"test_key".into())
.await
.change_context(HealthCheckRedisError::GetFailed)?;
logger::debug!("Redis get_key was successful");
redis_conn
.delete_key(&"test_key".into())
.await
.change_context(HealthCheckRedisError::DeleteFailed)?;
logger::debug!("Redis delete_key was successful");
redis_conn
.stream_append_entry(
&TEST_STREAM_NAME.into(),
&redis_interface::RedisEntryId::AutoGeneratedID,
TEST_STREAM_DATA.to_vec(),
)
.await
.change_context(HealthCheckRedisError::StreamAppendFailed)?;
logger::debug!("Stream append succeeded");
let output = redis_conn
.stream_read_entries(TEST_STREAM_NAME, "0-0", Some(10))
.await
.change_context(HealthCheckRedisError::StreamReadFailed)?;
logger::debug!("Stream read succeeded");
let (_, id_to_trim) = output
.get(&redis_conn.add_prefix(TEST_STREAM_NAME))
.and_then(|entries| {
entries
.last()
.map(|last_entry| (entries, last_entry.0.clone()))
})
.ok_or(error_stack::report!(
HealthCheckRedisError::StreamReadFailed
))?;
logger::debug!("Stream parse succeeded");
redis_conn
.stream_trim_entries(
&TEST_STREAM_NAME.into(),
(
redis_interface::StreamCapKind::MinID,
redis_interface::StreamCapTrim::Exact,
id_to_trim,
),
)
.await
.change_context(HealthCheckRedisError::StreamTrimFailed)?;
logger::debug!("Stream trim succeeded");
Ok(())
}
}
#[allow(clippy::enum_variant_names)]
#[derive(Debug, thiserror::Error)]
pub enum HealthCheckDBError {
#[error("Error while connecting to database")]
DbError,
#[error("Error while writing to database")]
DbWriteError,
#[error("Error while reading element in the database")]
DbReadError,
#[error("Error while deleting element in the database")]
DbDeleteError,
#[error("Unpredictable error occurred")]
UnknownError,
#[error("Error in database transaction")]
TransactionError,
}
impl From<diesel::result::Error> for HealthCheckDBError {
fn from(error: diesel::result::Error) -> Self {
match error {
diesel::result::Error::DatabaseError(_, _) => Self::DbError,
diesel::result::Error::RollbackErrorOnCommit { .. }
| diesel::result::Error::RollbackTransaction
| diesel::result::Error::AlreadyInTransaction
| diesel::result::Error::NotInTransaction
| diesel::result::Error::BrokenTransactionManager => Self::TransactionError,
_ => Self::UnknownError,
}
}
}
#[allow(clippy::enum_variant_names)]
#[derive(Debug, thiserror::Error)]
pub enum HealthCheckRedisError {
#[error("Failed to set key value in Redis")]
SetFailed,
#[error("Failed to get key value in Redis")]
GetFailed,
#[error("Failed to delete key value in Redis")]
DeleteFailed,
#[error("Failed to append data to the stream in Redis")]
StreamAppendFailed,
#[error("Failed to read data from the stream in Redis")]
StreamReadFailed,
#[error("Failed to trim data from the stream in Redis")]
StreamTrimFailed,
}
// File: crates/drainer/src/secrets_transformers.rs
use common_utils::errors::CustomResult;
use hyperswitch_interfaces::secrets_interface::{
secret_handler::SecretsHandler,
secret_state::{RawSecret, SecretStateContainer, SecuredSecret},
SecretManagementInterface, SecretsManagementError,
};
use crate::settings::{Database, Settings};
#[async_trait::async_trait]
impl SecretsHandler for Database {
async fn convert_to_raw_secret(
value: SecretStateContainer<Self, SecuredSecret>,
secret_management_client: &dyn SecretManagementInterface,
) -> CustomResult<SecretStateContainer<Self, RawSecret>, SecretsManagementError> {
let secured_db_config = value.get_inner();
let raw_db_password = secret_management_client
.get_secret(secured_db_config.password.clone())
.await?;
Ok(value.transition_state(|db| Self {
password: raw_db_password,
..db
}))
}
}
/// # Panics
///
/// Will panic even if fetching raw secret fails for at least one config value
pub async fn fetch_raw_secrets(
conf: Settings<SecuredSecret>,
secret_management_client: &dyn SecretManagementInterface,
) -> Settings<RawSecret> {
#[allow(clippy::expect_used)]
let database = Database::convert_to_raw_secret(conf.master_database, secret_management_client)
.await
.expect("Failed to decrypt database password");
Settings {
server: conf.server,
master_database: database,
redis: conf.redis,
log: conf.log,
drainer: conf.drainer,
encryption_management: conf.encryption_management,
secrets_management: conf.secrets_management,
multitenancy: conf.multitenancy,
}
}
// File: crates/drainer/src/settings.rs
use std::{collections::HashMap, path::PathBuf, sync::Arc};
use common_utils::{ext_traits::ConfigExt, id_type, DbConnectionParams};
use config::{Environment, File};
use external_services::managers::{
encryption_management::EncryptionManagementConfig, secrets_management::SecretsManagementConfig,
};
use hyperswitch_interfaces::{
encryption_interface::EncryptionManagementInterface,
secrets_interface::secret_state::{
RawSecret, SecretState, SecretStateContainer, SecuredSecret,
},
};
use masking::Secret;
use redis_interface as redis;
pub use router_env::config::{Log, LogConsole, LogFile, LogTelemetry};
use router_env::{env, logger};
use serde::Deserialize;
use crate::{errors, secrets_transformers};
#[derive(clap::Parser, Default)]
#[cfg_attr(feature = "vergen", command(version = router_env::version!()))]
pub struct CmdLineConf {
/// Config file.
/// Application will look for "config/config.toml" if this option isn't specified.
#[arg(short = 'f', long, value_name = "FILE")]
pub config_path: Option<PathBuf>,
}
#[derive(Clone)]
pub struct AppState {
pub conf: Arc<Settings<RawSecret>>,
pub encryption_client: Arc<dyn EncryptionManagementInterface>,
}
impl AppState {
/// # Panics
///
/// Panics if secret or encryption management client cannot be initiated
pub async fn new(conf: Settings<SecuredSecret>) -> Self {
#[allow(clippy::expect_used)]
let secret_management_client = conf
.secrets_management
.get_secret_management_client()
.await
.expect("Failed to create secret management client");
let raw_conf =
secrets_transformers::fetch_raw_secrets(conf, &*secret_management_client).await;
#[allow(clippy::expect_used)]
let encryption_client = raw_conf
.encryption_management
.get_encryption_management_client()
.await
.expect("Failed to create encryption management client");
Self {
conf: Arc::new(raw_conf),
encryption_client,
}
}
}
#[derive(Debug, Deserialize, Clone, Default)]
#[serde(default)]
pub struct Settings<S: SecretState> {
pub server: Server,
pub master_database: SecretStateContainer<Database, S>,
pub redis: redis::RedisSettings,
pub log: Log,
pub drainer: DrainerSettings,
pub encryption_management: EncryptionManagementConfig,
pub secrets_management: SecretsManagementConfig,
pub multitenancy: Multitenancy,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(default)]
pub struct Database {
pub username: String,
pub password: Secret<String>,
pub host: String,
pub port: u16,
pub dbname: String,
pub pool_size: u32,
pub connection_timeout: u64,
}
impl DbConnectionParams for Database {
fn get_username(&self) -> &str {
&self.username
}
fn get_password(&self) -> Secret<String> {
self.password.clone()
}
fn get_host(&self) -> &str {
&self.host
}
fn get_port(&self) -> u16 {
self.port
}
fn get_dbname(&self) -> &str {
&self.dbname
}
}
#[derive(Debug, Clone, Deserialize)]
#[serde(default)]
pub struct DrainerSettings {
pub stream_name: String,
pub num_partitions: u8,
pub max_read_count: u64,
pub shutdown_interval: u32, // in milliseconds
pub loop_interval: u32, // in milliseconds
}
#[derive(Debug, Deserialize, Clone, Default)]
pub struct Multitenancy {
pub enabled: bool,
pub tenants: TenantConfig,
}
impl Multitenancy {
pub fn get_tenants(&self) -> &HashMap<id_type::TenantId, Tenant> {
&self.tenants.0
}
pub fn get_tenant_ids(&self) -> Vec<id_type::TenantId> {
self.tenants
.0
.values()
.map(|tenant| tenant.tenant_id.clone())
.collect()
}
pub fn get_tenant(&self, tenant_id: &id_type::TenantId) -> Option<&Tenant> {
self.tenants.0.get(tenant_id)
}
}
#[derive(Debug, Clone, Default)]
pub struct TenantConfig(pub HashMap<id_type::TenantId, Tenant>);
impl<'de> Deserialize<'de> for TenantConfig {
fn deserialize<D: serde::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
#[derive(Deserialize)]
struct Inner {
base_url: String,
schema: String,
accounts_schema: String,
redis_key_prefix: String,
clickhouse_database: String,
}
let hashmap = <HashMap<id_type::TenantId, Inner>>::deserialize(deserializer)?;
Ok(Self(
hashmap
.into_iter()
.map(|(key, value)| {
(
key.clone(),
Tenant {
tenant_id: key,
base_url: value.base_url,
schema: value.schema,
accounts_schema: value.accounts_schema,
redis_key_prefix: value.redis_key_prefix,
clickhouse_database: value.clickhouse_database,
},
)
})
.collect(),
))
}
}
#[derive(Debug, Deserialize, Clone)]
pub struct Tenant {
pub tenant_id: id_type::TenantId,
pub base_url: String,
pub schema: String,
pub accounts_schema: String,
pub redis_key_prefix: String,
pub clickhouse_database: String,
}
#[derive(Debug, Deserialize, Clone)]
#[serde(default)]
pub struct Server {
pub port: u16,
pub workers: usize,
pub host: String,
}
impl Server {
pub fn validate(&self) -> Result<(), errors::DrainerError> {
common_utils::fp_utils::when(self.host.is_default_or_empty(), || {
Err(errors::DrainerError::ConfigParsingError(
"server host must not be empty".into(),
))
})
}
}
impl Default for Database {
fn default() -> Self {
Self {
username: String::new(),
password: String::new().into(),
host: "localhost".into(),
port: 5432,
dbname: String::new(),
pool_size: 5,
connection_timeout: 10,
}
}
}
impl Default for DrainerSettings {
fn default() -> Self {
Self {
stream_name: "DRAINER_STREAM".into(),
num_partitions: 64,
max_read_count: 100,
shutdown_interval: 1000, // in milliseconds
loop_interval: 100, // in milliseconds
}
}
}
impl Default for Server {
fn default() -> Self {
Self {
host: "127.0.0.1".to_string(),
port: 8080,
workers: 1,
}
}
}
impl Database {
fn validate(&self) -> Result<(), errors::DrainerError> {
use common_utils::fp_utils::when;
when(self.host.is_default_or_empty(), || {
Err(errors::DrainerError::ConfigParsingError(
"database host must not be empty".into(),
))
})?;
when(self.dbname.is_default_or_empty(), || {
Err(errors::DrainerError::ConfigParsingError(
"database name must not be empty".into(),
))
})?;
when(self.username.is_default_or_empty(), || {
Err(errors::DrainerError::ConfigParsingError(
"database user username must not be empty".into(),
))
})?;
when(self.password.is_default_or_empty(), || {
Err(errors::DrainerError::ConfigParsingError(
"database user password must not be empty".into(),
))
})
}
}
impl DrainerSettings {
fn validate(&self) -> Result<(), errors::DrainerError> {
common_utils::fp_utils::when(self.stream_name.is_default_or_empty(), || {
Err(errors::DrainerError::ConfigParsingError(
"drainer stream name must not be empty".into(),
))
})
}
}
impl Settings<SecuredSecret> {
pub fn new() -> Result<Self, errors::DrainerError> {
Self::with_config_path(None)
}
pub fn with_config_path(config_path: Option<PathBuf>) -> Result<Self, errors::DrainerError> {
// Configuration values are picked up in the following priority order (1 being least
// priority):
// 1. Defaults from the implementation of the `Default` trait.
// 2. Values from config file. The config file accessed depends on the environment
// specified by the `RUN_ENV` environment variable. `RUN_ENV` can be one of
// `development`, `sandbox` or `production`. If nothing is specified for `RUN_ENV`,
// `/config/development.toml` file is read.
// 3. Environment variables prefixed with `DRAINER` and each level separated by double
// underscores.
//
// Values in config file override the defaults in `Default` trait, and the values set using
// environment variables override both the defaults and the config file values.
let environment = env::which();
let config_path = router_env::Config::config_path(&environment.to_string(), config_path);
let config = router_env::Config::builder(&environment.to_string())?
.add_source(File::from(config_path).required(false))
.add_source(
Environment::with_prefix("DRAINER")
.try_parsing(true)
.separator("__")
.list_separator(",")
.with_list_parse_key("redis.cluster_urls"),
)
.build()?;
// The logger may not yet be initialized when constructing the application configuration
#[allow(clippy::print_stderr)]
serde_path_to_error::deserialize(config).map_err(|error| {
logger::error!(%error, "Unable to deserialize application configuration");
eprintln!("Unable to deserialize application configuration: {error}");
errors::DrainerError::from(error.into_inner())
})
}
pub fn validate(&self) -> Result<(), errors::DrainerError> {
self.server.validate()?;
self.master_database.get_inner().validate()?;
// The logger may not yet be initialized when validating the application configuration
#[allow(clippy::print_stderr)]
self.redis.validate().map_err(|error| {
eprintln!("{error}");
errors::DrainerError::ConfigParsingError("invalid Redis configuration".into())
})?;
self.drainer.validate()?;
// The logger may not yet be initialized when validating the application configuration
#[allow(clippy::print_stderr)]
self.secrets_management.validate().map_err(|error| {
eprintln!("{error}");
errors::DrainerError::ConfigParsingError(
"invalid secrets management configuration".into(),
)
})?;
// The logger may not yet be initialized when validating the application configuration
#[allow(clippy::print_stderr)]
self.encryption_management.validate().map_err(|error| {
eprintln!("{error}");
errors::DrainerError::ConfigParsingError(
"invalid encryption management configuration".into(),
)
})?;
Ok(())
}
}
// File: crates/drainer/src/connection.rs
use bb8::PooledConnection;
use common_utils::DbConnectionParams;
use diesel::PgConnection;
use crate::{settings::Database, Settings};
pub type PgPool = bb8::Pool<async_bb8_diesel::ConnectionManager<PgConnection>>;
#[allow(clippy::expect_used)]
pub async fn redis_connection(conf: &Settings) -> redis_interface::RedisConnectionPool {
redis_interface::RedisConnectionPool::new(&conf.redis)
.await
.expect("Failed to create Redis connection Pool")
}
// TODO: use stores defined in storage_impl instead
/// # Panics
///
/// Will panic if could not create a db pool
#[allow(clippy::expect_used)]
pub async fn diesel_make_pg_pool(
database: &Database,
_test_transaction: bool,
schema: &str,
) -> PgPool {
let database_url = database.get_database_url(schema);
let manager = async_bb8_diesel::ConnectionManager::<PgConnection>::new(database_url);
let pool = bb8::Pool::builder()
.max_size(database.pool_size)
.connection_timeout(std::time::Duration::from_secs(database.connection_timeout));
pool.build(manager)
.await
.expect("Failed to create PostgreSQL connection pool")
}
#[allow(clippy::expect_used)]
pub async fn pg_connection(
pool: &PgPool,
) -> PooledConnection<'_, async_bb8_diesel::ConnectionManager<PgConnection>> {
pool.get()
.await
.expect("Couldn't retrieve PostgreSQL connection")
}
// File: crates/drainer/src/utils.rs
use std::sync::{atomic, Arc};
use error_stack::report;
use redis_interface as redis;
use serde::de::Deserialize;
use crate::{
errors, kv, metrics,
stream::{StreamEntries, StreamReadResult},
};
pub fn parse_stream_entries<'a>(
read_result: &'a StreamReadResult,
stream_name: &str,
) -> errors::DrainerResult<&'a StreamEntries> {
read_result.get(stream_name).ok_or_else(|| {
report!(errors::DrainerError::RedisError(report!(
redis::errors::RedisError::NotFound
)))
})
}
pub(crate) fn deserialize_i64<'de, D>(deserializer: D) -> Result<i64, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = serde_json::Value::deserialize(deserializer)?;
match s {
serde_json::Value::String(str_val) => str_val.parse().map_err(serde::de::Error::custom),
serde_json::Value::Number(num_val) => match num_val.as_i64() {
Some(val) => Ok(val),
None => Err(serde::de::Error::custom(format!(
"could not convert {num_val:?} to i64"
))),
},
other => Err(serde::de::Error::custom(format!(
"unexpected data format - expected string or number, got: {other:?}"
))),
}
}
pub(crate) fn deserialize_db_op<'de, D>(deserializer: D) -> Result<kv::DBOperation, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = serde_json::Value::deserialize(deserializer)?;
match s {
serde_json::Value::String(str_val) => {
serde_json::from_str(&str_val).map_err(serde::de::Error::custom)
}
other => Err(serde::de::Error::custom(format!(
"unexpected data format - expected string got: {other:?}"
))),
}
}
// Here the output is in the format (stream_index, jobs_picked),
// similar to the first argument of the function
#[inline(always)]
pub async fn increment_stream_index(
(index, jobs_picked): (u8, Arc<atomic::AtomicU8>),
total_streams: u8,
) -> u8 {
if index == total_streams - 1 {
match jobs_picked.load(atomic::Ordering::SeqCst) {
0 => metrics::CYCLES_COMPLETED_UNSUCCESSFULLY.add(1, &[]),
_ => metrics::CYCLES_COMPLETED_SUCCESSFULLY.add(1, &[]),
}
jobs_picked.store(0, atomic::Ordering::SeqCst);
0
} else {
index + 1
}
}
</crate>
|
{
"crate": "drainer",
"file": null,
"files": [
"crates/drainer/build.rs",
"crates/drainer/src/stream.rs",
"crates/drainer/src/logger.rs",
"crates/drainer/src/types.rs",
"crates/drainer/src/handler.rs",
"crates/drainer/src/lib.rs",
"crates/drainer/src/query.rs",
"crates/drainer/src/services.rs",
"crates/drainer/src/metrics.rs",
"crates/drainer/src/errors.rs",
"crates/drainer/src/main.rs",
"crates/drainer/src/health_check.rs",
"crates/drainer/src/secrets_transformers.rs",
"crates/drainer/src/settings.rs",
"crates/drainer/src/connection.rs",
"crates/drainer/src/utils.rs"
],
"module": null,
"num_files": 16,
"token_count": 11943
}
|
crate_-8146234212431081914
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: events
Files: 2
</path>
<crate>
// File: crates/events/src/lib.rs
#![cfg_attr(docsrs, feature(doc_auto_cfg, doc_cfg_hide))]
#![cfg_attr(docsrs, doc(cfg_hide(doc)))]
#![warn(missing_docs)]
//! A generic event handler system.
//! This library consists of 4 parts:
//! Event Sink: A trait that defines how events are published. This could be a simple logger, a message queue, or a database.
//! EventContext: A struct that holds the event sink and metadata about the event. This is used to create events. This can be used to add metadata to all events, such as the user who triggered the event.
//! EventInfo: A trait that defines the metadata that is sent with the event. It works with the EventContext to add metadata to all events.
//! Event: A trait that defines the event itself. This trait is used to define the data that is sent with the event and defines the event's type & identifier.
mod actix;
use std::{collections::HashMap, sync::Arc};
use error_stack::{Result, ResultExt};
use masking::{ErasedMaskSerialize, Serialize};
use router_env::logger;
use serde::Serializer;
use serde_json::Value;
use time::PrimitiveDateTime;
/// Errors that can occur when working with events.
#[derive(Debug, Clone, thiserror::Error)]
pub enum EventsError {
/// An error occurred when publishing the event.
#[error("Generic Error")]
GenericError,
/// An error occurred when serializing the event.
#[error("Event serialization error")]
SerializationError,
/// An error occurred when publishing/producing the event.
#[error("Event publishing error")]
PublishError,
}
/// An event that can be published.
pub trait Event: EventInfo {
/// The type of the event.
type EventType;
/// The timestamp of the event.
fn timestamp(&self) -> PrimitiveDateTime;
/// The (unique) identifier of the event.
fn identifier(&self) -> String;
/// The class/type of the event. This is used to group/categorize events together.
fn class(&self) -> Self::EventType;
/// Metadata associated with the event
fn metadata(&self) -> HashMap<String, String> {
HashMap::new()
}
}
/// Hold the context information for any events
#[derive(Clone)]
pub struct EventContext<T, A>
where
A: MessagingInterface<MessageClass = T>,
{
message_sink: Arc<A>,
metadata: HashMap<String, Value>,
}
/// intermediary structure to build events with in-place info.
#[must_use = "make sure to call `emit` or `try_emit` to actually emit the event"]
pub struct EventBuilder<T, A, E, D>
where
A: MessagingInterface<MessageClass = T>,
E: Event<EventType = T, Data = D>,
{
message_sink: Arc<A>,
metadata: HashMap<String, Value>,
event: E,
}
/// A flattened event that flattens the context provided to it along with the actual event.
struct FlatMapEvent<T, A: Event<EventType = T>>(HashMap<String, Value>, A);
impl<T, A, E, D> EventBuilder<T, A, E, D>
where
A: MessagingInterface<MessageClass = T>,
E: Event<EventType = T, Data = D>,
{
/// Add metadata to the event.
pub fn with<F: ErasedMaskSerialize, G: EventInfo<Data = F> + 'static>(
mut self,
info: G,
) -> Self {
info.data()
.and_then(|i| {
i.masked_serialize()
.change_context(EventsError::SerializationError)
})
.map_err(|e| {
logger::error!("Error adding event info: {:?}", e);
})
.ok()
.and_then(|data| self.metadata.insert(info.key(), data));
self
}
/// Emit the event and log any errors.
pub fn emit(self) {
self.try_emit()
.map_err(|e| {
logger::error!("Error emitting event: {:?}", e);
})
.ok();
}
/// Emit the event.
pub fn try_emit(self) -> Result<(), EventsError> {
let ts = self.event.timestamp();
let metadata = self.event.metadata();
self.message_sink
.send_message(FlatMapEvent(self.metadata, self.event), metadata, ts)
}
}
impl<T, A> Serialize for FlatMapEvent<T, A>
where
A: Event<EventType = T>,
{
fn serialize<S>(&self, serializer: S) -> core::result::Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut serialize_map: HashMap<_, _> = self
.0
.iter()
.filter_map(|(k, v)| Some((k.clone(), v.masked_serialize().ok()?)))
.collect();
match self.1.data().map(|i| i.masked_serialize()) {
Ok(Ok(Value::Object(map))) => {
for (k, v) in map.into_iter() {
serialize_map.insert(k, v);
}
}
Ok(Ok(i)) => {
serialize_map.insert(self.1.key(), i);
}
i => {
logger::error!("Error serializing event: {:?}", i);
}
};
serialize_map.serialize(serializer)
}
}
impl<T, A> EventContext<T, A>
where
A: MessagingInterface<MessageClass = T>,
{
/// Create a new event context.
pub fn new(message_sink: A) -> Self {
Self {
message_sink: Arc::new(message_sink),
metadata: HashMap::new(),
}
}
/// Add metadata to the event context.
#[track_caller]
pub fn record_info<G: ErasedMaskSerialize, E: EventInfo<Data = G> + 'static>(
&mut self,
info: E,
) {
match info.data().and_then(|i| {
i.masked_serialize()
.change_context(EventsError::SerializationError)
}) {
Ok(data) => {
self.metadata.insert(info.key(), data);
}
Err(e) => {
logger::error!("Error recording event info: {:?}", e);
}
}
}
/// Emit an event.
pub fn try_emit<E: Event<EventType = T>>(&self, event: E) -> Result<(), EventsError> {
EventBuilder {
message_sink: self.message_sink.clone(),
metadata: self.metadata.clone(),
event,
}
.try_emit()
}
/// Emit an event.
pub fn emit<D, E: Event<EventType = T, Data = D>>(&self, event: E) {
EventBuilder {
message_sink: self.message_sink.clone(),
metadata: self.metadata.clone(),
event,
}
.emit()
}
/// Create an event builder.
pub fn event<D, E: Event<EventType = T, Data = D>>(
&self,
event: E,
) -> EventBuilder<T, A, E, D> {
EventBuilder {
message_sink: self.message_sink.clone(),
metadata: self.metadata.clone(),
event,
}
}
}
/// Add information/metadata to the current context of an event.
pub trait EventInfo {
/// The data that is sent with the event.
type Data: ErasedMaskSerialize;
/// The data that is sent with the event.
fn data(&self) -> Result<Self::Data, EventsError>;
/// The key identifying the data for an event.
fn key(&self) -> String;
}
impl EventInfo for (String, String) {
type Data = String;
fn data(&self) -> Result<String, EventsError> {
Ok(self.1.clone())
}
fn key(&self) -> String {
self.0.clone()
}
}
/// A messaging interface for sending messages/events.
/// This can be implemented for any messaging system, such as a message queue, a logger, or a database.
pub trait MessagingInterface {
/// The type of the event used for categorization by the event publisher.
type MessageClass;
/// Send a message that follows the defined message class.
fn send_message<T>(
&self,
data: T,
metadata: HashMap<String, String>,
timestamp: PrimitiveDateTime,
) -> Result<(), EventsError>
where
T: Message<Class = Self::MessageClass> + ErasedMaskSerialize;
}
/// A message that can be sent.
pub trait Message {
/// The type of the event used for categorization by the event publisher.
type Class;
/// The type of the event used for categorization by the event publisher.
fn get_message_class(&self) -> Self::Class;
/// The (unique) identifier of the event.
fn identifier(&self) -> String;
}
impl<T, A> Message for FlatMapEvent<T, A>
where
A: Event<EventType = T>,
{
type Class = T;
fn get_message_class(&self) -> Self::Class {
self.1.class()
}
fn identifier(&self) -> String {
self.1.identifier()
}
}
// File: crates/events/src/actix.rs
use router_env::tracing_actix_web::RequestId;
use crate::EventInfo;
impl EventInfo for RequestId {
type Data = String;
fn data(&self) -> error_stack::Result<String, crate::EventsError> {
Ok(self.as_hyphenated().to_string())
}
fn key(&self) -> String {
"request_id".to_string()
}
}
</crate>
|
{
"crate": "events",
"file": null,
"files": [
"crates/events/src/lib.rs",
"crates/events/src/actix.rs"
],
"module": null,
"num_files": 2,
"token_count": 2043
}
|
crate_2030016647139159862
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: pm_auth
Files: 10
</path>
<crate>
// File: crates/pm_auth/src/core.rs
pub mod errors;
// File: crates/pm_auth/src/consts.rs
pub const REQUEST_TIME_OUT: u64 = 30; // will timeout after the mentioned limit
pub const REQUEST_TIMEOUT_ERROR_CODE: &str = "TIMEOUT"; // timeout error code
pub const REQUEST_TIMEOUT_ERROR_MESSAGE: &str = "Connector did not respond in specified time"; // error message for timed out request
pub const NO_ERROR_CODE: &str = "No error code";
pub const NO_ERROR_MESSAGE: &str = "No error message";
// File: crates/pm_auth/src/types.rs
pub mod api;
use std::marker::PhantomData;
use api::auth_service::{BankAccountCredentials, ExchangeToken, LinkToken, RecipientCreate};
use api_models::enums as api_enums;
use common_enums::{CountryAlpha2, PaymentMethod, PaymentMethodType};
use common_utils::{id_type, types};
use masking::Secret;
#[derive(Debug, Clone)]
pub struct PaymentAuthRouterData<F, Request, Response> {
pub flow: PhantomData<F>,
pub merchant_id: Option<id_type::MerchantId>,
pub connector: Option<String>,
pub request: Request,
pub response: Result<Response, ErrorResponse>,
pub connector_auth_type: ConnectorAuthType,
pub connector_http_status_code: Option<u16>,
}
#[derive(Debug, Clone)]
pub struct LinkTokenRequest {
pub client_name: String,
pub country_codes: Option<Vec<String>>,
pub language: Option<String>,
pub user_info: Option<id_type::CustomerId>,
pub client_platform: Option<api_enums::ClientPlatform>,
pub android_package_name: Option<String>,
pub redirect_uri: Option<String>,
}
#[derive(Debug, Clone)]
pub struct LinkTokenResponse {
pub link_token: String,
}
pub type LinkTokenRouterData =
PaymentAuthRouterData<LinkToken, LinkTokenRequest, LinkTokenResponse>;
#[derive(Debug, Clone)]
pub struct ExchangeTokenRequest {
pub public_token: String,
}
#[derive(Debug, Clone)]
pub struct ExchangeTokenResponse {
pub access_token: String,
}
impl From<ExchangeTokenResponse> for api_models::pm_auth::ExchangeTokenCreateResponse {
fn from(value: ExchangeTokenResponse) -> Self {
Self {
access_token: value.access_token,
}
}
}
pub type ExchangeTokenRouterData =
PaymentAuthRouterData<ExchangeToken, ExchangeTokenRequest, ExchangeTokenResponse>;
#[derive(Debug, Clone)]
pub struct BankAccountCredentialsRequest {
pub access_token: Secret<String>,
pub optional_ids: Option<BankAccountOptionalIDs>,
}
#[derive(Debug, Clone)]
pub struct BankAccountOptionalIDs {
pub ids: Vec<Secret<String>>,
}
#[derive(Debug, Clone)]
pub struct BankAccountCredentialsResponse {
pub credentials: Vec<BankAccountDetails>,
}
#[derive(Debug, Clone)]
pub struct BankAccountDetails {
pub account_name: Option<String>,
pub account_details: PaymentMethodTypeDetails,
pub payment_method_type: PaymentMethodType,
pub payment_method: PaymentMethod,
pub account_id: Secret<String>,
pub account_type: Option<String>,
pub balance: Option<types::FloatMajorUnit>,
}
#[derive(Debug, Clone)]
pub enum PaymentMethodTypeDetails {
Ach(BankAccountDetailsAch),
Bacs(BankAccountDetailsBacs),
Sepa(BankAccountDetailsSepa),
}
#[derive(Debug, Clone)]
pub struct BankAccountDetailsAch {
pub account_number: Secret<String>,
pub routing_number: Secret<String>,
}
#[derive(Debug, Clone)]
pub struct BankAccountDetailsBacs {
pub account_number: Secret<String>,
pub sort_code: Secret<String>,
}
#[derive(Debug, Clone)]
pub struct BankAccountDetailsSepa {
pub iban: Secret<String>,
pub bic: Secret<String>,
}
pub type BankDetailsRouterData = PaymentAuthRouterData<
BankAccountCredentials,
BankAccountCredentialsRequest,
BankAccountCredentialsResponse,
>;
#[derive(Debug, Clone)]
pub struct RecipientCreateRequest {
pub name: String,
pub account_data: RecipientAccountData,
pub address: Option<RecipientCreateAddress>,
}
#[derive(Debug, Clone)]
pub struct RecipientCreateResponse {
pub recipient_id: String,
}
#[derive(Debug, Clone)]
pub enum RecipientAccountData {
Iban(Secret<String>),
Bacs {
sort_code: Secret<String>,
account_number: Secret<String>,
},
FasterPayments {
sort_code: Secret<String>,
account_number: Secret<String>,
},
Sepa(Secret<String>),
SepaInstant(Secret<String>),
Elixir {
account_number: Secret<String>,
iban: Secret<String>,
},
Bankgiro(Secret<String>),
Plusgiro(Secret<String>),
}
#[derive(Debug, Clone)]
pub struct RecipientCreateAddress {
pub street: String,
pub city: String,
pub postal_code: String,
pub country: CountryAlpha2,
}
pub type RecipientCreateRouterData =
PaymentAuthRouterData<RecipientCreate, RecipientCreateRequest, RecipientCreateResponse>;
pub type PaymentAuthLinkTokenType =
dyn api::ConnectorIntegration<LinkToken, LinkTokenRequest, LinkTokenResponse>;
pub type PaymentAuthExchangeTokenType =
dyn api::ConnectorIntegration<ExchangeToken, ExchangeTokenRequest, ExchangeTokenResponse>;
pub type PaymentAuthBankAccountDetailsType = dyn api::ConnectorIntegration<
BankAccountCredentials,
BankAccountCredentialsRequest,
BankAccountCredentialsResponse,
>;
pub type PaymentInitiationRecipientCreateType =
dyn api::ConnectorIntegration<RecipientCreate, RecipientCreateRequest, RecipientCreateResponse>;
#[derive(Clone, Debug, strum::EnumString, strum::Display)]
#[strum(serialize_all = "snake_case")]
pub enum PaymentMethodAuthConnectors {
Plaid,
}
#[derive(Debug, Clone)]
pub struct ResponseRouterData<Flow, R, Request, Response> {
pub response: R,
pub data: PaymentAuthRouterData<Flow, Request, Response>,
pub http_code: u16,
}
#[derive(Clone, Debug, serde::Serialize)]
pub struct ErrorResponse {
pub code: String,
pub message: String,
pub reason: Option<String>,
pub status_code: u16,
}
impl ErrorResponse {
fn get_not_implemented() -> Self {
Self {
code: "IR_00".to_string(),
message: "This API is under development and will be made available soon.".to_string(),
reason: None,
status_code: http::StatusCode::INTERNAL_SERVER_ERROR.as_u16(),
}
}
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub enum MerchantAccountData {
Iban {
iban: Secret<String>,
name: String,
},
Bacs {
account_number: Secret<String>,
sort_code: Secret<String>,
name: String,
},
FasterPayments {
account_number: Secret<String>,
sort_code: Secret<String>,
name: String,
},
Sepa {
iban: Secret<String>,
name: String,
},
SepaInstant {
iban: Secret<String>,
name: String,
},
Elixir {
account_number: Secret<String>,
iban: Secret<String>,
name: String,
},
Bankgiro {
number: Secret<String>,
name: String,
},
Plusgiro {
number: Secret<String>,
name: String,
},
}
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(rename_all = "snake_case")]
pub enum MerchantRecipientData {
ConnectorRecipientId(Secret<String>),
WalletId(Secret<String>),
AccountData(MerchantAccountData),
}
#[derive(Default, Debug, Clone, serde::Deserialize)]
pub enum ConnectorAuthType {
BodyKey {
client_id: Secret<String>,
secret: Secret<String>,
},
#[default]
NoKey,
}
#[derive(Clone, Debug)]
pub struct Response {
pub headers: Option<http::HeaderMap>,
pub response: bytes::Bytes,
pub status_code: u16,
}
#[derive(serde::Deserialize, Clone)]
pub struct AuthServiceQueryParam {
pub client_secret: Option<String>,
}
// File: crates/pm_auth/src/lib.rs
pub mod connector;
pub mod consts;
pub mod core;
pub mod types;
// File: crates/pm_auth/src/connector.rs
pub mod plaid;
pub use self::plaid::Plaid;
// File: crates/pm_auth/src/types/api.rs
pub mod auth_service;
use std::fmt::Debug;
use common_utils::{
errors::CustomResult,
request::{Request, RequestContent},
};
use masking::Maskable;
use crate::{
core::errors::ConnectorError,
types::{
self as auth_types,
api::auth_service::{AuthService, PaymentInitiation},
},
};
#[async_trait::async_trait]
pub trait ConnectorIntegration<T, Req, Resp>: ConnectorIntegrationAny<T, Req, Resp> + Sync {
fn get_headers(
&self,
_req: &super::PaymentAuthRouterData<T, Req, Resp>,
_connectors: &auth_types::PaymentMethodAuthConnectors,
) -> CustomResult<Vec<(String, Maskable<String>)>, ConnectorError> {
Ok(vec![])
}
fn get_content_type(&self) -> &'static str {
mime::APPLICATION_JSON.essence_str()
}
fn get_url(
&self,
_req: &super::PaymentAuthRouterData<T, Req, Resp>,
_connectors: &auth_types::PaymentMethodAuthConnectors,
) -> CustomResult<String, ConnectorError> {
Ok(String::new())
}
fn get_request_body(
&self,
_req: &super::PaymentAuthRouterData<T, Req, Resp>,
) -> CustomResult<RequestContent, ConnectorError> {
Ok(RequestContent::Json(Box::new(serde_json::json!(r#"{}"#))))
}
fn build_request(
&self,
_req: &super::PaymentAuthRouterData<T, Req, Resp>,
_connectors: &auth_types::PaymentMethodAuthConnectors,
) -> CustomResult<Option<Request>, ConnectorError> {
Ok(None)
}
fn handle_response(
&self,
data: &super::PaymentAuthRouterData<T, Req, Resp>,
_res: auth_types::Response,
) -> CustomResult<super::PaymentAuthRouterData<T, Req, Resp>, ConnectorError>
where
T: Clone,
Req: Clone,
Resp: Clone,
{
Ok(data.clone())
}
fn get_error_response(
&self,
_res: auth_types::Response,
) -> CustomResult<auth_types::ErrorResponse, ConnectorError> {
Ok(auth_types::ErrorResponse::get_not_implemented())
}
fn get_5xx_error_response(
&self,
res: auth_types::Response,
) -> CustomResult<auth_types::ErrorResponse, ConnectorError> {
let error_message = match res.status_code {
500 => "internal_server_error",
501 => "not_implemented",
502 => "bad_gateway",
503 => "service_unavailable",
504 => "gateway_timeout",
505 => "http_version_not_supported",
506 => "variant_also_negotiates",
507 => "insufficient_storage",
508 => "loop_detected",
510 => "not_extended",
511 => "network_authentication_required",
_ => "unknown_error",
};
Ok(auth_types::ErrorResponse {
code: res.status_code.to_string(),
message: error_message.to_string(),
reason: String::from_utf8(res.response.to_vec()).ok(),
status_code: res.status_code,
})
}
}
pub trait ConnectorCommonExt<Flow, Req, Resp>:
ConnectorCommon + ConnectorIntegration<Flow, Req, Resp>
{
fn build_headers(
&self,
_req: &auth_types::PaymentAuthRouterData<Flow, Req, Resp>,
_connectors: &auth_types::PaymentMethodAuthConnectors,
) -> CustomResult<Vec<(String, Maskable<String>)>, ConnectorError> {
Ok(Vec::new())
}
}
pub type BoxedConnectorIntegration<'a, T, Req, Resp> =
Box<&'a (dyn ConnectorIntegration<T, Req, Resp> + Send + Sync)>;
pub trait ConnectorIntegrationAny<T, Req, Resp>: Send + Sync + 'static {
fn get_connector_integration(&self) -> BoxedConnectorIntegration<'_, T, Req, Resp>;
}
impl<S, T, Req, Resp> ConnectorIntegrationAny<T, Req, Resp> for S
where
S: ConnectorIntegration<T, Req, Resp>,
{
fn get_connector_integration(&self) -> BoxedConnectorIntegration<'_, T, Req, Resp> {
Box::new(self)
}
}
pub trait AuthServiceConnector: AuthService + Send + Debug + PaymentInitiation {}
impl<T: Send + Debug + AuthService + PaymentInitiation> AuthServiceConnector for T {}
pub type BoxedPaymentAuthConnector = Box<&'static (dyn AuthServiceConnector + Sync)>;
#[derive(Clone, Debug)]
pub struct PaymentAuthConnectorData {
pub connector: BoxedPaymentAuthConnector,
pub connector_name: super::PaymentMethodAuthConnectors,
}
pub trait ConnectorCommon {
fn id(&self) -> &'static str;
fn get_auth_header(
&self,
_auth_type: &auth_types::ConnectorAuthType,
) -> CustomResult<Vec<(String, Maskable<String>)>, ConnectorError> {
Ok(Vec::new())
}
fn common_get_content_type(&self) -> &'static str {
"application/json"
}
fn base_url<'a>(&self, connectors: &'a auth_types::PaymentMethodAuthConnectors) -> &'a str;
fn build_error_response(
&self,
res: auth_types::Response,
) -> CustomResult<auth_types::ErrorResponse, ConnectorError> {
Ok(auth_types::ErrorResponse {
status_code: res.status_code,
code: crate::consts::NO_ERROR_CODE.to_string(),
message: crate::consts::NO_ERROR_MESSAGE.to_string(),
reason: None,
})
}
}
// File: crates/pm_auth/src/types/api/auth_service.rs
use crate::types::{
BankAccountCredentialsRequest, BankAccountCredentialsResponse, ExchangeTokenRequest,
ExchangeTokenResponse, LinkTokenRequest, LinkTokenResponse, RecipientCreateRequest,
RecipientCreateResponse,
};
pub trait AuthService:
super::ConnectorCommon
+ AuthServiceLinkToken
+ AuthServiceExchangeToken
+ AuthServiceBankAccountCredentials
{
}
pub trait PaymentInitiation: super::ConnectorCommon + PaymentInitiationRecipientCreate {}
#[derive(Debug, Clone)]
pub struct LinkToken;
pub trait AuthServiceLinkToken:
super::ConnectorIntegration<LinkToken, LinkTokenRequest, LinkTokenResponse>
{
}
#[derive(Debug, Clone)]
pub struct ExchangeToken;
pub trait AuthServiceExchangeToken:
super::ConnectorIntegration<ExchangeToken, ExchangeTokenRequest, ExchangeTokenResponse>
{
}
#[derive(Debug, Clone)]
pub struct BankAccountCredentials;
pub trait AuthServiceBankAccountCredentials:
super::ConnectorIntegration<
BankAccountCredentials,
BankAccountCredentialsRequest,
BankAccountCredentialsResponse,
>
{
}
#[derive(Debug, Clone)]
pub struct RecipientCreate;
pub trait PaymentInitiationRecipientCreate:
super::ConnectorIntegration<RecipientCreate, RecipientCreateRequest, RecipientCreateResponse>
{
}
// File: crates/pm_auth/src/core/errors.rs
#[derive(Debug, thiserror::Error, PartialEq)]
pub enum ConnectorError {
#[error("Failed to obtain authentication type")]
FailedToObtainAuthType,
#[error("Missing required field: {field_name}")]
MissingRequiredField { field_name: &'static str },
#[error("Failed to execute a processing step: {0:?}")]
ProcessingStepFailed(Option<bytes::Bytes>),
#[error("Failed to deserialize connector response")]
ResponseDeserializationFailed,
#[error("Failed to encode connector request")]
RequestEncodingFailed,
#[error("Invalid connector configuration: {config}")]
InvalidConnectorConfig { config: &'static str },
}
pub type CustomResult<T, E> = error_stack::Result<T, E>;
#[derive(Debug, thiserror::Error)]
pub enum ParsingError {
#[error("Failed to parse enum: {0}")]
EnumParseFailure(&'static str),
#[error("Failed to parse struct: {0}")]
StructParseFailure(&'static str),
#[error("Failed to serialize to {0} format")]
EncodeError(&'static str),
#[error("Unknown error while parsing")]
UnknownError,
}
// File: crates/pm_auth/src/connector/plaid.rs
pub mod transformers;
use std::fmt::Debug;
use common_utils::{
ext_traits::BytesExt,
request::{Method, Request, RequestBuilder, RequestContent},
};
use error_stack::ResultExt;
use masking::{Mask, Maskable};
use transformers as plaid;
use crate::{
core::errors,
types::{
self as auth_types,
api::{
auth_service::{
self, BankAccountCredentials, ExchangeToken, LinkToken, RecipientCreate,
},
ConnectorCommon, ConnectorCommonExt, ConnectorIntegration,
},
},
};
#[derive(Debug, Clone)]
pub struct Plaid;
impl<Flow, Request, Response> ConnectorCommonExt<Flow, Request, Response> for Plaid
where
Self: ConnectorIntegration<Flow, Request, Response>,
{
fn build_headers(
&self,
req: &auth_types::PaymentAuthRouterData<Flow, Request, Response>,
_connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
let mut header = vec![(
"Content-Type".to_string(),
self.get_content_type().to_string().into(),
)];
let mut auth = self.get_auth_header(&req.connector_auth_type)?;
header.append(&mut auth);
Ok(header)
}
}
impl ConnectorCommon for Plaid {
fn id(&self) -> &'static str {
"plaid"
}
fn common_get_content_type(&self) -> &'static str {
"application/json"
}
fn base_url<'a>(&self, _connectors: &'a auth_types::PaymentMethodAuthConnectors) -> &'a str {
"https://sandbox.plaid.com"
}
fn get_auth_header(
&self,
auth_type: &auth_types::ConnectorAuthType,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
let auth = plaid::PlaidAuthType::try_from(auth_type)
.change_context(errors::ConnectorError::FailedToObtainAuthType)?;
let client_id = auth.client_id.into_masked();
let secret = auth.secret.into_masked();
Ok(vec![
("PLAID-CLIENT-ID".to_string(), client_id),
("PLAID-SECRET".to_string(), secret),
])
}
fn build_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
let response: plaid::PlaidErrorResponse =
res.response
.parse_struct("PlaidErrorResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
Ok(auth_types::ErrorResponse {
status_code: res.status_code,
code: crate::consts::NO_ERROR_CODE.to_string(),
message: response.error_message,
reason: response.display_message,
})
}
}
impl auth_service::AuthService for Plaid {}
impl auth_service::PaymentInitiationRecipientCreate for Plaid {}
impl auth_service::PaymentInitiation for Plaid {}
impl auth_service::AuthServiceLinkToken for Plaid {}
impl ConnectorIntegration<LinkToken, auth_types::LinkTokenRequest, auth_types::LinkTokenResponse>
for Plaid
{
fn get_headers(
&self,
req: &auth_types::LinkTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::LinkTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!(
"{}{}",
self.base_url(connectors),
"/link/token/create"
))
}
fn get_request_body(
&self,
req: &auth_types::LinkTokenRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidLinkTokenRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::LinkTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentAuthLinkTokenType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(auth_types::PaymentAuthLinkTokenType::get_headers(
self, req, connectors,
)?)
.set_body(auth_types::PaymentAuthLinkTokenType::get_request_body(
self, req,
)?)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::LinkTokenRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::LinkTokenRouterData, errors::ConnectorError> {
let response: plaid::PlaidLinkTokenResponse = res
.response
.parse_struct("PlaidLinkTokenResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
<auth_types::LinkTokenRouterData>::try_from(auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
impl auth_service::AuthServiceExchangeToken for Plaid {}
impl
ConnectorIntegration<
ExchangeToken,
auth_types::ExchangeTokenRequest,
auth_types::ExchangeTokenResponse,
> for Plaid
{
fn get_headers(
&self,
req: &auth_types::ExchangeTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::ExchangeTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!(
"{}{}",
self.base_url(connectors),
"/item/public_token/exchange"
))
}
fn get_request_body(
&self,
req: &auth_types::ExchangeTokenRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidExchangeTokenRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::ExchangeTokenRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentAuthExchangeTokenType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(auth_types::PaymentAuthExchangeTokenType::get_headers(
self, req, connectors,
)?)
.set_body(auth_types::PaymentAuthExchangeTokenType::get_request_body(
self, req,
)?)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::ExchangeTokenRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ExchangeTokenRouterData, errors::ConnectorError> {
let response: plaid::PlaidExchangeTokenResponse = res
.response
.parse_struct("PlaidExchangeTokenResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
<auth_types::ExchangeTokenRouterData>::try_from(auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
impl auth_service::AuthServiceBankAccountCredentials for Plaid {}
impl
ConnectorIntegration<
BankAccountCredentials,
auth_types::BankAccountCredentialsRequest,
auth_types::BankAccountCredentialsResponse,
> for Plaid
{
fn get_headers(
&self,
req: &auth_types::BankDetailsRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::BankDetailsRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!("{}{}", self.base_url(connectors), "/auth/get"))
}
fn get_request_body(
&self,
req: &auth_types::BankDetailsRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidBankAccountCredentialsRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::BankDetailsRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentAuthBankAccountDetailsType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(auth_types::PaymentAuthBankAccountDetailsType::get_headers(
self, req, connectors,
)?)
.set_body(
auth_types::PaymentAuthBankAccountDetailsType::get_request_body(self, req)?,
)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::BankDetailsRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::BankDetailsRouterData, errors::ConnectorError> {
let response: plaid::PlaidBankAccountCredentialsResponse = res
.response
.parse_struct("PlaidBankAccountCredentialsResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
<auth_types::BankDetailsRouterData>::try_from(auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
impl
ConnectorIntegration<
RecipientCreate,
auth_types::RecipientCreateRequest,
auth_types::RecipientCreateResponse,
> for Plaid
{
fn get_headers(
&self,
req: &auth_types::RecipientCreateRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Vec<(String, Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &auth_types::RecipientCreateRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<String, errors::ConnectorError> {
Ok(format!(
"{}{}",
self.base_url(connectors),
"/payment_initiation/recipient/create"
))
}
fn get_request_body(
&self,
req: &auth_types::RecipientCreateRouterData,
) -> errors::CustomResult<RequestContent, errors::ConnectorError> {
let req_obj = plaid::PlaidRecipientCreateRequest::try_from(req)?;
Ok(RequestContent::Json(Box::new(req_obj)))
}
fn build_request(
&self,
req: &auth_types::RecipientCreateRouterData,
connectors: &auth_types::PaymentMethodAuthConnectors,
) -> errors::CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&auth_types::PaymentInitiationRecipientCreateType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(
auth_types::PaymentInitiationRecipientCreateType::get_headers(
self, req, connectors,
)?,
)
.set_body(
auth_types::PaymentInitiationRecipientCreateType::get_request_body(self, req)?,
)
.build(),
))
}
fn handle_response(
&self,
data: &auth_types::RecipientCreateRouterData,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::RecipientCreateRouterData, errors::ConnectorError> {
let response: plaid::PlaidRecipientCreateResponse = res
.response
.parse_struct("PlaidRecipientCreateResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
Ok(<auth_types::RecipientCreateRouterData>::from(
auth_types::ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
},
))
}
fn get_error_response(
&self,
res: auth_types::Response,
) -> errors::CustomResult<auth_types::ErrorResponse, errors::ConnectorError> {
self.build_error_response(res)
}
}
// File: crates/pm_auth/src/connector/plaid/transformers.rs
use std::collections::HashMap;
use common_enums::{PaymentMethod, PaymentMethodType};
use common_utils::{id_type, types as util_types};
use masking::{PeekInterface, Secret};
use serde::{Deserialize, Serialize};
use crate::{core::errors, types};
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidLinkTokenRequest {
client_name: String,
country_codes: Vec<String>,
language: String,
products: Vec<String>,
user: User,
android_package_name: Option<String>,
redirect_uri: Option<String>,
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct User {
pub client_user_id: id_type::CustomerId,
}
impl TryFrom<&types::LinkTokenRouterData> for PlaidLinkTokenRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &types::LinkTokenRouterData) -> Result<Self, Self::Error> {
Ok(Self {
client_name: item.request.client_name.clone(),
country_codes: item.request.country_codes.clone().ok_or(
errors::ConnectorError::MissingRequiredField {
field_name: "country_codes",
},
)?,
language: item.request.language.clone().unwrap_or("en".to_string()),
products: vec!["auth".to_string()],
user: User {
client_user_id: item.request.user_info.clone().ok_or(
errors::ConnectorError::MissingRequiredField {
field_name: "country_codes",
},
)?,
},
android_package_name: match item.request.client_platform {
Some(api_models::enums::ClientPlatform::Android) => {
item.request.android_package_name.clone()
}
Some(api_models::enums::ClientPlatform::Ios)
| Some(api_models::enums::ClientPlatform::Web)
| Some(api_models::enums::ClientPlatform::Unknown)
| None => None,
},
redirect_uri: match item.request.client_platform {
Some(api_models::enums::ClientPlatform::Ios) => item.request.redirect_uri.clone(),
Some(api_models::enums::ClientPlatform::Android)
| Some(api_models::enums::ClientPlatform::Web)
| Some(api_models::enums::ClientPlatform::Unknown)
| None => None,
},
})
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidLinkTokenResponse {
link_token: String,
}
impl<F, T>
TryFrom<types::ResponseRouterData<F, PlaidLinkTokenResponse, T, types::LinkTokenResponse>>
for types::PaymentAuthRouterData<F, T, types::LinkTokenResponse>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: types::ResponseRouterData<F, PlaidLinkTokenResponse, T, types::LinkTokenResponse>,
) -> Result<Self, Self::Error> {
Ok(Self {
response: Ok(types::LinkTokenResponse {
link_token: item.response.link_token,
}),
..item.data
})
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidExchangeTokenRequest {
public_token: String,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidExchangeTokenResponse {
pub access_token: String,
}
impl<F, T>
TryFrom<
types::ResponseRouterData<F, PlaidExchangeTokenResponse, T, types::ExchangeTokenResponse>,
> for types::PaymentAuthRouterData<F, T, types::ExchangeTokenResponse>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: types::ResponseRouterData<
F,
PlaidExchangeTokenResponse,
T,
types::ExchangeTokenResponse,
>,
) -> Result<Self, Self::Error> {
Ok(Self {
response: Ok(types::ExchangeTokenResponse {
access_token: item.response.access_token,
}),
..item.data
})
}
}
impl TryFrom<&types::ExchangeTokenRouterData> for PlaidExchangeTokenRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &types::ExchangeTokenRouterData) -> Result<Self, Self::Error> {
Ok(Self {
public_token: item.request.public_token.clone(),
})
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct PlaidRecipientCreateRequest {
pub name: String,
#[serde(flatten)]
pub account_data: PlaidRecipientAccountData,
pub address: Option<PlaidRecipientCreateAddress>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidRecipientCreateResponse {
pub recipient_id: String,
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum PlaidRecipientAccountData {
Iban(Secret<String>),
Bacs {
sort_code: Secret<String>,
account: Secret<String>,
},
}
impl TryFrom<&types::RecipientAccountData> for PlaidRecipientAccountData {
type Error = errors::ConnectorError;
fn try_from(item: &types::RecipientAccountData) -> Result<Self, Self::Error> {
match item {
types::RecipientAccountData::Iban(iban) => Ok(Self::Iban(iban.clone())),
types::RecipientAccountData::Bacs {
sort_code,
account_number,
} => Ok(Self::Bacs {
sort_code: sort_code.clone(),
account: account_number.clone(),
}),
types::RecipientAccountData::FasterPayments { .. }
| types::RecipientAccountData::Sepa(_)
| types::RecipientAccountData::SepaInstant(_)
| types::RecipientAccountData::Elixir { .. }
| types::RecipientAccountData::Bankgiro(_)
| types::RecipientAccountData::Plusgiro(_) => {
Err(errors::ConnectorError::InvalidConnectorConfig {
config: "Invalid payment method selected. Only Iban, Bacs Supported",
})
}
}
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
pub struct PlaidRecipientCreateAddress {
pub street: String,
pub city: String,
pub postal_code: String,
pub country: String,
}
impl From<&types::RecipientCreateAddress> for PlaidRecipientCreateAddress {
fn from(item: &types::RecipientCreateAddress) -> Self {
Self {
street: item.street.clone(),
city: item.city.clone(),
postal_code: item.postal_code.clone(),
country: common_enums::CountryAlpha2::to_string(&item.country),
}
}
}
impl TryFrom<&types::RecipientCreateRouterData> for PlaidRecipientCreateRequest {
type Error = errors::ConnectorError;
fn try_from(item: &types::RecipientCreateRouterData) -> Result<Self, Self::Error> {
Ok(Self {
name: item.request.name.clone(),
account_data: PlaidRecipientAccountData::try_from(&item.request.account_data)?,
address: item
.request
.address
.as_ref()
.map(PlaidRecipientCreateAddress::from),
})
}
}
impl<F, T>
From<
types::ResponseRouterData<
F,
PlaidRecipientCreateResponse,
T,
types::RecipientCreateResponse,
>,
> for types::PaymentAuthRouterData<F, T, types::RecipientCreateResponse>
{
fn from(
item: types::ResponseRouterData<
F,
PlaidRecipientCreateResponse,
T,
types::RecipientCreateResponse,
>,
) -> Self {
Self {
response: Ok(types::RecipientCreateResponse {
recipient_id: item.response.recipient_id,
}),
..item.data
}
}
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidBankAccountCredentialsRequest {
access_token: String,
options: Option<BankAccountCredentialsOptions>,
}
#[derive(Debug, Deserialize, PartialEq)]
pub struct PlaidBankAccountCredentialsResponse {
pub accounts: Vec<PlaidBankAccountCredentialsAccounts>,
pub numbers: PlaidBankAccountCredentialsNumbers,
// pub item: PlaidBankAccountCredentialsItem,
pub request_id: String,
}
#[derive(Debug, Serialize, Eq, PartialEq)]
#[serde(rename_all = "snake_case")]
pub struct BankAccountCredentialsOptions {
account_ids: Vec<String>,
}
#[derive(Debug, Deserialize, PartialEq)]
pub struct PlaidBankAccountCredentialsAccounts {
pub account_id: String,
pub name: String,
pub subtype: Option<String>,
pub balances: Option<PlaidBankAccountCredentialsBalances>,
}
#[derive(Debug, Deserialize, PartialEq)]
pub struct PlaidBankAccountCredentialsBalances {
pub available: Option<util_types::FloatMajorUnit>,
pub current: Option<util_types::FloatMajorUnit>,
pub limit: Option<util_types::FloatMajorUnit>,
pub iso_currency_code: Option<String>,
pub unofficial_currency_code: Option<String>,
pub last_updated_datetime: Option<String>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsNumbers {
pub ach: Vec<PlaidBankAccountCredentialsACH>,
pub eft: Vec<PlaidBankAccountCredentialsEFT>,
pub international: Vec<PlaidBankAccountCredentialsInternational>,
pub bacs: Vec<PlaidBankAccountCredentialsBacs>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsItem {
pub item_id: String,
pub institution_id: Option<String>,
pub webhook: Option<String>,
pub error: Option<PlaidErrorResponse>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsACH {
pub account_id: String,
pub account: String,
pub routing: String,
pub wire_routing: Option<String>,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsEFT {
pub account_id: String,
pub account: String,
pub institution: String,
pub branch: String,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsInternational {
pub account_id: String,
pub iban: String,
pub bic: String,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
pub struct PlaidBankAccountCredentialsBacs {
pub account_id: String,
pub account: String,
pub sort_code: String,
}
impl TryFrom<&types::BankDetailsRouterData> for PlaidBankAccountCredentialsRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &types::BankDetailsRouterData) -> Result<Self, Self::Error> {
let options = item.request.optional_ids.as_ref().map(|bank_account_ids| {
let ids = bank_account_ids
.ids
.iter()
.map(|id| id.peek().to_string())
.collect::<Vec<_>>();
BankAccountCredentialsOptions { account_ids: ids }
});
Ok(Self {
access_token: item.request.access_token.peek().to_string(),
options,
})
}
}
impl<F, T>
TryFrom<
types::ResponseRouterData<
F,
PlaidBankAccountCredentialsResponse,
T,
types::BankAccountCredentialsResponse,
>,
> for types::PaymentAuthRouterData<F, T, types::BankAccountCredentialsResponse>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: types::ResponseRouterData<
F,
PlaidBankAccountCredentialsResponse,
T,
types::BankAccountCredentialsResponse,
>,
) -> Result<Self, Self::Error> {
let (account_numbers, accounts_info) = (item.response.numbers, item.response.accounts);
let mut bank_account_vec = Vec::new();
let mut id_to_subtype = HashMap::new();
accounts_info.into_iter().for_each(|acc| {
id_to_subtype.insert(
acc.account_id,
(
acc.subtype,
acc.name,
acc.balances.and_then(|balance| balance.available),
),
);
});
account_numbers.ach.into_iter().for_each(|ach| {
let (acc_type, acc_name, available_balance) = if let Some((
_type,
name,
available_balance,
)) = id_to_subtype.get(&ach.account_id)
{
(_type.to_owned(), Some(name.clone()), *available_balance)
} else {
(None, None, None)
};
let account_details =
types::PaymentMethodTypeDetails::Ach(types::BankAccountDetailsAch {
account_number: Secret::new(ach.account),
routing_number: Secret::new(ach.routing),
});
let bank_details_new = types::BankAccountDetails {
account_name: acc_name,
account_details,
payment_method_type: PaymentMethodType::Ach,
payment_method: PaymentMethod::BankDebit,
account_id: ach.account_id.into(),
account_type: acc_type,
balance: available_balance,
};
bank_account_vec.push(bank_details_new);
});
account_numbers.bacs.into_iter().for_each(|bacs| {
let (acc_type, acc_name, available_balance) =
if let Some((_type, name, available_balance)) = id_to_subtype.get(&bacs.account_id)
{
(_type.to_owned(), Some(name.clone()), *available_balance)
} else {
(None, None, None)
};
let account_details =
types::PaymentMethodTypeDetails::Bacs(types::BankAccountDetailsBacs {
account_number: Secret::new(bacs.account),
sort_code: Secret::new(bacs.sort_code),
});
let bank_details_new = types::BankAccountDetails {
account_name: acc_name,
account_details,
payment_method_type: PaymentMethodType::Bacs,
payment_method: PaymentMethod::BankDebit,
account_id: bacs.account_id.into(),
account_type: acc_type,
balance: available_balance,
};
bank_account_vec.push(bank_details_new);
});
account_numbers.international.into_iter().for_each(|sepa| {
let (acc_type, acc_name, available_balance) =
if let Some((_type, name, available_balance)) = id_to_subtype.get(&sepa.account_id)
{
(_type.to_owned(), Some(name.clone()), *available_balance)
} else {
(None, None, None)
};
let account_details =
types::PaymentMethodTypeDetails::Sepa(types::BankAccountDetailsSepa {
iban: Secret::new(sepa.iban),
bic: Secret::new(sepa.bic),
});
let bank_details_new = types::BankAccountDetails {
account_name: acc_name,
account_details,
payment_method_type: PaymentMethodType::Sepa,
payment_method: PaymentMethod::BankDebit,
account_id: sepa.account_id.into(),
account_type: acc_type,
balance: available_balance,
};
bank_account_vec.push(bank_details_new);
});
Ok(Self {
response: Ok(types::BankAccountCredentialsResponse {
credentials: bank_account_vec,
}),
..item.data
})
}
}
pub struct PlaidAuthType {
pub client_id: Secret<String>,
pub secret: Secret<String>,
}
impl TryFrom<&types::ConnectorAuthType> for PlaidAuthType {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(auth_type: &types::ConnectorAuthType) -> Result<Self, Self::Error> {
match auth_type {
types::ConnectorAuthType::BodyKey { client_id, secret } => Ok(Self {
client_id: client_id.to_owned(),
secret: secret.to_owned(),
}),
_ => Err(errors::ConnectorError::FailedToObtainAuthType.into()),
}
}
}
#[derive(Debug, Deserialize, PartialEq, Eq)]
#[serde(rename_all = "snake_case")]
pub struct PlaidErrorResponse {
pub display_message: Option<String>,
pub error_code: Option<String>,
pub error_message: String,
pub error_type: Option<String>,
}
</crate>
|
{
"crate": "pm_auth",
"file": null,
"files": [
"crates/pm_auth/src/core.rs",
"crates/pm_auth/src/consts.rs",
"crates/pm_auth/src/types.rs",
"crates/pm_auth/src/lib.rs",
"crates/pm_auth/src/connector.rs",
"crates/pm_auth/src/types/api.rs",
"crates/pm_auth/src/types/api/auth_service.rs",
"crates/pm_auth/src/core/errors.rs",
"crates/pm_auth/src/connector/plaid.rs",
"crates/pm_auth/src/connector/plaid/transformers.rs"
],
"module": null,
"num_files": 10,
"token_count": 10154
}
|
crate_9088167450150810144
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: euclid_macros
Files: 4
</path>
<crate>
// File: crates/euclid_macros/src/lib.rs
mod inner;
use proc_macro::TokenStream;
#[proc_macro_derive(EnumNums)]
pub fn enum_nums(ts: TokenStream) -> TokenStream {
inner::enum_nums_inner(ts)
}
#[proc_macro]
pub fn knowledge(ts: TokenStream) -> TokenStream {
match inner::knowledge_inner(ts.into()) {
Ok(ts) => ts.into(),
Err(e) => e.into_compile_error().into(),
}
}
// File: crates/euclid_macros/src/inner.rs
mod enum_nums;
mod knowledge;
pub(crate) use enum_nums::enum_nums_inner;
pub(crate) use knowledge::knowledge_inner;
// File: crates/euclid_macros/src/inner/enum_nums.rs
use proc_macro::TokenStream;
use proc_macro2::{Span, TokenStream as TokenStream2};
use quote::quote;
fn error() -> TokenStream2 {
syn::Error::new(
Span::call_site(),
"'EnumNums' can only be derived on enums with unit variants".to_string(),
)
.to_compile_error()
}
pub(crate) fn enum_nums_inner(ts: TokenStream) -> TokenStream {
let derive_input = syn::parse_macro_input!(ts as syn::DeriveInput);
let enum_obj = match derive_input.data {
syn::Data::Enum(e) => e,
_ => return error().into(),
};
let enum_name = derive_input.ident;
let mut match_arms = Vec::<TokenStream2>::with_capacity(enum_obj.variants.len());
for (i, variant) in enum_obj.variants.iter().enumerate() {
match variant.fields {
syn::Fields::Unit => {}
_ => return error().into(),
}
let var_ident = &variant.ident;
match_arms.push(quote! { Self::#var_ident => #i });
}
let impl_block = quote! {
impl #enum_name {
pub fn to_num(&self) -> usize {
match self {
#(#match_arms),*
}
}
}
};
impl_block.into()
}
// File: crates/euclid_macros/src/inner/knowledge.rs
use std::{
fmt::{Display, Formatter},
hash::Hash,
rc::Rc,
};
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote};
use rustc_hash::{FxHashMap, FxHashSet};
use syn::{parse::Parse, Token};
mod strength {
syn::custom_punctuation!(Normal, ->);
syn::custom_punctuation!(Strong, ->>);
}
mod kw {
syn::custom_keyword!(any);
syn::custom_keyword!(not);
}
#[derive(Clone, PartialEq, Eq, Hash)]
enum Comparison {
LessThan,
Equal,
GreaterThan,
GreaterThanEqual,
LessThanEqual,
}
impl Display for Comparison {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
let symbol = match self {
Self::LessThan => "< ",
Self::Equal => return Ok(()),
Self::GreaterThanEqual => ">= ",
Self::LessThanEqual => "<= ",
Self::GreaterThan => "> ",
};
write!(f, "{symbol}")
}
}
impl Parse for Comparison {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
if input.peek(Token![>]) {
input.parse::<Token![>]>()?;
Ok(Self::GreaterThan)
} else if input.peek(Token![<]) {
input.parse::<Token![<]>()?;
Ok(Self::LessThan)
} else if input.peek(Token!(<=)) {
input.parse::<Token![<=]>()?;
Ok(Self::LessThanEqual)
} else if input.peek(Token!(>=)) {
input.parse::<Token![>=]>()?;
Ok(Self::GreaterThanEqual)
} else {
Ok(Self::Equal)
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
enum ValueType {
Any,
EnumVariant(String),
Number { number: i64, comparison: Comparison },
}
impl ValueType {
fn to_string(&self, key: &str) -> String {
match self {
Self::Any => format!("{key}(any)"),
Self::EnumVariant(s) => format!("{key}({s})"),
Self::Number { number, comparison } => {
format!("{key}({comparison}{number})")
}
}
}
}
impl Parse for ValueType {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(syn::Ident) {
let ident: syn::Ident = input.parse()?;
Ok(Self::EnumVariant(ident.to_string()))
} else if lookahead.peek(Token![>])
|| lookahead.peek(Token![<])
|| lookahead.peek(syn::LitInt)
{
let comparison: Comparison = input.parse()?;
let number: syn::LitInt = input.parse()?;
let num_val = number.base10_parse::<i64>()?;
Ok(Self::Number {
number: num_val,
comparison,
})
} else {
Err(lookahead.error())
}
}
}
#[derive(Clone, PartialEq, Eq, Hash)]
struct Atom {
key: String,
value: ValueType,
}
impl Display for Atom {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.value.to_string(&self.key))
}
}
impl Parse for Atom {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let maybe_any: syn::Ident = input.parse()?;
if maybe_any == "any" {
let actual_key: syn::Ident = input.parse()?;
Ok(Self {
key: actual_key.to_string(),
value: ValueType::Any,
})
} else {
let content;
syn::parenthesized!(content in input);
let value: ValueType = content.parse()?;
Ok(Self {
key: maybe_any.to_string(),
value,
})
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, strum::Display)]
enum Strength {
Normal,
Strong,
}
impl Parse for Strength {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let lookahead = input.lookahead1();
if lookahead.peek(strength::Strong) {
input.parse::<strength::Strong>()?;
Ok(Self::Strong)
} else if lookahead.peek(strength::Normal) {
input.parse::<strength::Normal>()?;
Ok(Self::Normal)
} else {
Err(lookahead.error())
}
}
}
#[derive(Clone, PartialEq, Eq, Hash, strum::Display)]
enum Relation {
Positive,
Negative,
}
enum AtomType {
Value {
relation: Relation,
atom: Rc<Atom>,
},
InAggregator {
key: String,
values: Vec<String>,
relation: Relation,
},
}
fn parse_atom_type_inner(
input: syn::parse::ParseStream<'_>,
key: syn::Ident,
relation: Relation,
) -> syn::Result<AtomType> {
let result = if input.peek(Token![in]) {
input.parse::<Token![in]>()?;
let bracketed;
syn::bracketed!(bracketed in input);
let mut values = Vec::<String>::new();
let first: syn::Ident = bracketed.parse()?;
values.push(first.to_string());
while !bracketed.is_empty() {
bracketed.parse::<Token![,]>()?;
let next: syn::Ident = bracketed.parse()?;
values.push(next.to_string());
}
AtomType::InAggregator {
key: key.to_string(),
values,
relation,
}
} else if input.peek(kw::any) {
input.parse::<kw::any>()?;
AtomType::Value {
relation,
atom: Rc::new(Atom {
key: key.to_string(),
value: ValueType::Any,
}),
}
} else {
let value: ValueType = input.parse()?;
AtomType::Value {
relation,
atom: Rc::new(Atom {
key: key.to_string(),
value,
}),
}
};
Ok(result)
}
impl Parse for AtomType {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let key: syn::Ident = input.parse()?;
let content;
syn::parenthesized!(content in input);
let relation = if content.peek(kw::not) {
content.parse::<kw::not>()?;
Relation::Negative
} else {
Relation::Positive
};
let result = parse_atom_type_inner(&content, key, relation)?;
if !content.is_empty() {
Err(content.error("Unexpected input received after atom value"))
} else {
Ok(result)
}
}
}
fn parse_rhs_atom(input: syn::parse::ParseStream<'_>) -> syn::Result<Atom> {
let key: syn::Ident = input.parse()?;
let content;
syn::parenthesized!(content in input);
let lookahead = content.lookahead1();
let value_type = if lookahead.peek(kw::any) {
content.parse::<kw::any>()?;
ValueType::Any
} else if lookahead.peek(syn::Ident) {
let variant = content.parse::<syn::Ident>()?;
ValueType::EnumVariant(variant.to_string())
} else {
return Err(lookahead.error());
};
if !content.is_empty() {
Err(content.error("Unexpected input received after atom value"))
} else {
Ok(Atom {
key: key.to_string(),
value: value_type,
})
}
}
struct Rule {
lhs: Vec<AtomType>,
strength: Strength,
rhs: Rc<Atom>,
}
impl Parse for Rule {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let first_atom: AtomType = input.parse()?;
let mut lhs: Vec<AtomType> = vec![first_atom];
while input.peek(Token![&]) {
input.parse::<Token![&]>()?;
let and_atom: AtomType = input.parse()?;
lhs.push(and_atom);
}
let strength: Strength = input.parse()?;
let rhs: Rc<Atom> = Rc::new(parse_rhs_atom(input)?);
input.parse::<Token![;]>()?;
Ok(Self { lhs, strength, rhs })
}
}
#[derive(Clone)]
struct Program {
rules: Vec<Rc<Rule>>,
}
impl Parse for Program {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let mut rules: Vec<Rc<Rule>> = Vec::new();
while !input.is_empty() {
rules.push(Rc::new(input.parse::<Rule>()?));
}
Ok(Self { rules })
}
}
struct GenContext {
next_idx: usize,
next_node_idx: usize,
idx2atom: FxHashMap<usize, Rc<Atom>>,
atom2idx: FxHashMap<Rc<Atom>, usize>,
edges: FxHashMap<usize, FxHashSet<usize>>,
compiled_atoms: FxHashMap<Rc<Atom>, proc_macro2::Ident>,
}
impl GenContext {
fn new() -> Self {
Self {
next_idx: 1,
next_node_idx: 1,
idx2atom: FxHashMap::default(),
atom2idx: FxHashMap::default(),
edges: FxHashMap::default(),
compiled_atoms: FxHashMap::default(),
}
}
fn register_node(&mut self, atom: Rc<Atom>) -> usize {
if let Some(idx) = self.atom2idx.get(&atom) {
*idx
} else {
let this_idx = self.next_idx;
self.next_idx += 1;
self.idx2atom.insert(this_idx, Rc::clone(&atom));
self.atom2idx.insert(atom, this_idx);
this_idx
}
}
fn register_edge(&mut self, from: usize, to: usize) -> Result<(), String> {
let node_children = self.edges.entry(from).or_default();
if node_children.contains(&to) {
Err("Duplicate edge detected".to_string())
} else {
node_children.insert(to);
self.edges.entry(to).or_default();
Ok(())
}
}
fn register_rule(&mut self, rule: &Rule) -> Result<(), String> {
let to_idx = self.register_node(Rc::clone(&rule.rhs));
for atom_type in &rule.lhs {
if let AtomType::Value { atom, .. } = atom_type {
let from_idx = self.register_node(Rc::clone(atom));
self.register_edge(from_idx, to_idx)?;
}
}
Ok(())
}
fn cycle_dfs(
&self,
node_id: usize,
explored: &mut FxHashSet<usize>,
visited: &mut FxHashSet<usize>,
order: &mut Vec<usize>,
) -> Result<Option<Vec<usize>>, String> {
if explored.contains(&node_id) {
let position = order
.iter()
.position(|v| *v == node_id)
.ok_or_else(|| "Error deciding cycle order".to_string())?;
let cycle_order = order
.get(position..)
.ok_or_else(|| "Error getting cycle order".to_string())?
.to_vec();
Ok(Some(cycle_order))
} else if visited.contains(&node_id) {
Ok(None)
} else {
visited.insert(node_id);
explored.insert(node_id);
order.push(node_id);
let dests = self
.edges
.get(&node_id)
.ok_or_else(|| "Error getting edges of node".to_string())?;
for dest in dests.iter().copied() {
if let Some(cycle) = self.cycle_dfs(dest, explored, visited, order)? {
return Ok(Some(cycle));
}
}
order.pop();
Ok(None)
}
}
fn detect_graph_cycles(&self) -> Result<(), String> {
let start_nodes = self.edges.keys().copied().collect::<Vec<usize>>();
let mut total_visited = FxHashSet::<usize>::default();
for node_id in start_nodes.iter().copied() {
let mut explored = FxHashSet::<usize>::default();
let mut order = Vec::<usize>::new();
match self.cycle_dfs(node_id, &mut explored, &mut total_visited, &mut order)? {
None => {}
Some(order) => {
let mut display_strings = Vec::<String>::with_capacity(order.len() + 1);
for cycle_node_id in order {
let node = self.idx2atom.get(&cycle_node_id).ok_or_else(|| {
"Failed to find node during cycle display creation".to_string()
})?;
display_strings.push(node.to_string());
}
let first = display_strings
.first()
.cloned()
.ok_or("Unable to fill cycle display array")?;
display_strings.push(first);
return Err(format!("Found cycle: {}", display_strings.join(" -> ")));
}
}
}
Ok(())
}
fn next_node_ident(&mut self) -> (proc_macro2::Ident, usize) {
let this_idx = self.next_node_idx;
self.next_node_idx += 1;
(format_ident!("_node_{this_idx}"), this_idx)
}
fn compile_atom(
&mut self,
atom: &Rc<Atom>,
tokens: &mut TokenStream,
) -> Result<proc_macro2::Ident, String> {
let maybe_ident = self.compiled_atoms.get(atom);
if let Some(ident) = maybe_ident {
Ok(ident.clone())
} else {
let (identifier, _) = self.next_node_ident();
let key = format_ident!("{}", &atom.key);
let the_value = match &atom.value {
ValueType::Any => quote! {
cgraph::NodeValue::Key(DirKey::new(DirKeyKind::#key,None))
},
ValueType::EnumVariant(variant) => {
let variant = format_ident!("{}", variant);
quote! {
cgraph::NodeValue::Value(DirValue::#key(#key::#variant))
}
}
ValueType::Number { number, comparison } => {
let comp_type = match comparison {
Comparison::Equal => quote! {
None
},
Comparison::LessThan => quote! {
Some(NumValueRefinement::LessThan)
},
Comparison::GreaterThan => quote! {
Some(NumValueRefinement::GreaterThan)
},
Comparison::GreaterThanEqual => quote! {
Some(NumValueRefinement::GreaterThanEqual)
},
Comparison::LessThanEqual => quote! {
Some(NumValueRefinement::LessThanEqual)
},
};
quote! {
cgraph::NodeValue::Value(DirValue::#key(NumValue {
number: #number,
refinement: #comp_type,
}))
}
}
};
let compiled = quote! {
let #identifier = graph.make_value_node(#the_value, None, None::<()>);
};
tokens.extend(compiled);
self.compiled_atoms
.insert(Rc::clone(atom), identifier.clone());
Ok(identifier)
}
}
fn compile_atom_type(
&mut self,
atom_type: &AtomType,
tokens: &mut TokenStream,
) -> Result<(proc_macro2::Ident, Relation), String> {
match atom_type {
AtomType::Value { relation, atom } => {
let node_ident = self.compile_atom(atom, tokens)?;
Ok((node_ident, relation.clone()))
}
AtomType::InAggregator {
key,
values,
relation,
} => {
let key_ident = format_ident!("{key}");
let mut values_tokens: Vec<TokenStream> = Vec::new();
for value in values {
let value_ident = format_ident!("{value}");
values_tokens.push(quote! { DirValue::#key_ident(#key_ident::#value_ident) });
}
let (node_ident, _) = self.next_node_ident();
let node_code = quote! {
let #node_ident = graph.make_in_aggregator(
Vec::from_iter([#(#values_tokens),*]),
None,
None::<()>,
).expect("Failed to make In aggregator");
};
tokens.extend(node_code);
Ok((node_ident, relation.clone()))
}
}
}
fn compile_rule(&mut self, rule: &Rule, tokens: &mut TokenStream) -> Result<(), String> {
let rhs_ident = self.compile_atom(&rule.rhs, tokens)?;
let mut node_details: Vec<(proc_macro2::Ident, Relation)> =
Vec::with_capacity(rule.lhs.len());
for lhs_atom_type in &rule.lhs {
let details = self.compile_atom_type(lhs_atom_type, tokens)?;
node_details.push(details);
}
if node_details.len() <= 1 {
let strength = format_ident!("{}", rule.strength.to_string());
for (from_node, relation) in &node_details {
let relation = format_ident!("{}", relation.to_string());
tokens.extend(quote! {
graph.make_edge(#from_node, #rhs_ident, cgraph::Strength::#strength, cgraph::Relation::#relation, None::<cgraph::DomainId>)
.expect("Failed to make edge");
});
}
} else {
let mut all_agg_nodes: Vec<TokenStream> = Vec::with_capacity(node_details.len());
for (from_node, relation) in &node_details {
let relation = format_ident!("{}", relation.to_string());
all_agg_nodes.push(
quote! { (#from_node, cgraph::Relation::#relation, cgraph::Strength::Strong) },
);
}
let strength = format_ident!("{}", rule.strength.to_string());
let (agg_node_ident, _) = self.next_node_ident();
tokens.extend(quote! {
let #agg_node_ident = graph.make_all_aggregator(&[#(#all_agg_nodes),*], None, None::<()>, None)
.expect("Failed to make all aggregator node");
graph.make_edge(#agg_node_ident, #rhs_ident, cgraph::Strength::#strength, cgraph::Relation::Positive, None::<cgraph::DomainId>)
.expect("Failed to create all aggregator edge");
});
}
Ok(())
}
fn compile(&mut self, program: Program) -> Result<TokenStream, String> {
let mut tokens = TokenStream::new();
for rule in &program.rules {
self.compile_rule(rule, &mut tokens)?;
}
let compiled = quote! {{
use euclid_graph_prelude::*;
let mut graph = cgraph::ConstraintGraphBuilder::new();
#tokens
graph.build()
}};
Ok(compiled)
}
}
pub(crate) fn knowledge_inner(ts: TokenStream) -> syn::Result<TokenStream> {
let program = syn::parse::<Program>(ts.into())?;
let mut gen_context = GenContext::new();
for rule in &program.rules {
gen_context
.register_rule(rule)
.map_err(|msg| syn::Error::new(Span::call_site(), msg))?;
}
gen_context
.detect_graph_cycles()
.map_err(|msg| syn::Error::new(Span::call_site(), msg))?;
gen_context
.compile(program)
.map_err(|msg| syn::Error::new(Span::call_site(), msg))
}
</crate>
|
{
"crate": "euclid_macros",
"file": null,
"files": [
"crates/euclid_macros/src/lib.rs",
"crates/euclid_macros/src/inner.rs",
"crates/euclid_macros/src/inner/enum_nums.rs",
"crates/euclid_macros/src/inner/knowledge.rs"
],
"module": null,
"num_files": 4,
"token_count": 4645
}
|
crate_1422686591602470332
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: masking
Files: 14
</path>
<crate>
// File: crates/masking/tests/basic.rs
#![allow(dead_code, clippy::unwrap_used, clippy::panic_in_result_fn)]
use masking::Secret;
#[cfg(feature = "serde")]
use masking::SerializableSecret;
#[cfg(feature = "alloc")]
use masking::ZeroizableSecret;
#[cfg(feature = "serde")]
use serde::Serialize;
#[test]
fn basic() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
#[cfg_attr(feature = "serde", derive(Serialize))]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AccountNumber(String);
#[cfg(feature = "alloc")]
impl ZeroizableSecret for AccountNumber {
fn zeroize(&mut self) {
self.0.zeroize();
}
}
#[cfg(feature = "serde")]
impl SerializableSecret for AccountNumber {}
#[cfg_attr(feature = "serde", derive(Serialize))]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Composite {
secret_number: Secret<AccountNumber>,
not_secret: String,
}
// construct
let secret_number = Secret::<AccountNumber>::new(AccountNumber("abc".to_string()));
let not_secret = "not secret".to_string();
let composite = Composite {
secret_number,
not_secret,
};
// clone
#[allow(clippy::redundant_clone)] // We are asserting that the cloned value is equal
let composite2 = composite.clone();
assert_eq!(composite, composite2);
// format
let got = format!("{composite:?}");
let exp = r#"Composite { secret_number: *** basic::basic::AccountNumber ***, not_secret: "not secret" }"#;
assert_eq!(got, exp);
// serialize
#[cfg(feature = "serde")]
{
let got = serde_json::to_string(&composite).unwrap();
let exp = r#"{"secret_number":"abc","not_secret":"not secret"}"#;
assert_eq!(got, exp);
}
// end
Ok(())
}
#[test]
fn without_serialize() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
#[cfg_attr(feature = "serde", derive(Serialize))]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AccountNumber(String);
#[cfg(feature = "alloc")]
impl ZeroizableSecret for AccountNumber {
fn zeroize(&mut self) {
self.0.zeroize();
}
}
#[cfg_attr(feature = "serde", derive(Serialize))]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Composite {
#[cfg_attr(feature = "serde", serde(skip))]
secret_number: Secret<AccountNumber>,
not_secret: String,
}
// construct
let secret_number = Secret::<AccountNumber>::new(AccountNumber("abc".to_string()));
let not_secret = "not secret".to_string();
let composite = Composite {
secret_number,
not_secret,
};
// format
let got = format!("{composite:?}");
let exp = r#"Composite { secret_number: *** basic::without_serialize::AccountNumber ***, not_secret: "not secret" }"#;
assert_eq!(got, exp);
// serialize
#[cfg(feature = "serde")]
{
let got = serde_json::to_string(&composite).unwrap();
let exp = r#"{"not_secret":"not secret"}"#;
assert_eq!(got, exp);
}
// end
Ok(())
}
#[test]
fn for_string() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
#[cfg_attr(all(feature = "alloc", feature = "serde"), derive(Serialize))]
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Composite {
secret_number: Secret<String>,
not_secret: String,
}
// construct
let secret_number = Secret::<String>::new("abc".to_string());
let not_secret = "not secret".to_string();
let composite = Composite {
secret_number,
not_secret,
};
// clone
#[allow(clippy::redundant_clone)] // We are asserting that the cloned value is equal
let composite2 = composite.clone();
assert_eq!(composite, composite2);
// format
let got = format!("{composite:?}");
let exp =
r#"Composite { secret_number: *** alloc::string::String ***, not_secret: "not secret" }"#;
assert_eq!(got, exp);
// serialize
#[cfg(all(feature = "alloc", feature = "serde"))]
{
let got = serde_json::to_string(&composite).unwrap();
let exp = r#"{"secret_number":"abc","not_secret":"not secret"}"#;
assert_eq!(got, exp);
}
// end
Ok(())
}
// File: crates/masking/src/strategy.rs
use core::fmt;
/// Debugging trait which is specialized for handling secret values
pub trait Strategy<T> {
/// Format information about the secret's type.
fn fmt(value: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result;
}
/// Debug with type
#[cfg_attr(feature = "serde", derive(serde::Deserialize))]
#[derive(Debug, Copy, Clone, PartialEq)]
pub enum WithType {}
impl<T> Strategy<T> for WithType {
fn fmt(_: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str("*** ")?;
fmt.write_str(std::any::type_name::<T>())?;
fmt.write_str(" ***")
}
}
/// Debug without type
pub enum WithoutType {}
impl<T> Strategy<T> for WithoutType {
fn fmt(_: &T, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
fmt.write_str("*** ***")
}
}
// File: crates/masking/src/abs.rs
//! Abstract data types.
use crate::Secret;
/// Interface to expose a reference to an inner secret
pub trait PeekInterface<S> {
/// Only method providing access to the secret value.
fn peek(&self) -> &S;
/// Provide a mutable reference to the inner value.
fn peek_mut(&mut self) -> &mut S;
}
/// Interface that consumes a option secret and returns the value.
pub trait ExposeOptionInterface<S> {
/// Expose option.
fn expose_option(self) -> S;
}
/// Interface that consumes a secret and returns the inner value.
pub trait ExposeInterface<S> {
/// Consume the secret and return the inner value
fn expose(self) -> S;
}
impl<S, I> ExposeOptionInterface<Option<S>> for Option<Secret<S, I>>
where
S: Clone,
I: crate::Strategy<S>,
{
fn expose_option(self) -> Option<S> {
self.map(ExposeInterface::expose)
}
}
impl<S, I> ExposeInterface<S> for Secret<S, I>
where
I: crate::Strategy<S>,
{
fn expose(self) -> S {
self.inner_secret
}
}
/// Interface that consumes a secret and converts it to a secret with a different masking strategy.
pub trait SwitchStrategy<FromStrategy, ToStrategy> {
/// The type returned by `switch_strategy()`.
type Output;
/// Consumes the secret and converts it to a secret with a different masking strategy.
fn switch_strategy(self) -> Self::Output;
}
impl<S, FromStrategy, ToStrategy> SwitchStrategy<FromStrategy, ToStrategy>
for Secret<S, FromStrategy>
where
FromStrategy: crate::Strategy<S>,
ToStrategy: crate::Strategy<S>,
{
type Output = Secret<S, ToStrategy>;
fn switch_strategy(self) -> Self::Output {
Secret::new(self.inner_secret)
}
}
// File: crates/masking/src/strong_secret.rs
//! Structure describing secret.
use std::{fmt, marker::PhantomData};
use subtle::ConstantTimeEq;
use zeroize::{self, Zeroize as ZeroizableSecret};
use crate::{strategy::Strategy, PeekInterface};
/// Secret thing.
///
/// To get access to value use method `expose()` of trait [`crate::ExposeInterface`].
pub struct StrongSecret<Secret: ZeroizableSecret, MaskingStrategy = crate::WithType> {
/// Inner secret value
pub(crate) inner_secret: Secret,
pub(crate) masking_strategy: PhantomData<MaskingStrategy>,
}
impl<Secret: ZeroizableSecret, MaskingStrategy> StrongSecret<Secret, MaskingStrategy> {
/// Take ownership of a secret value
pub fn new(secret: Secret) -> Self {
Self {
inner_secret: secret,
masking_strategy: PhantomData,
}
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> PeekInterface<Secret>
for StrongSecret<Secret, MaskingStrategy>
{
fn peek(&self) -> &Secret {
&self.inner_secret
}
fn peek_mut(&mut self) -> &mut Secret {
&mut self.inner_secret
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> From<Secret>
for StrongSecret<Secret, MaskingStrategy>
{
fn from(secret: Secret) -> Self {
Self::new(secret)
}
}
impl<Secret: Clone + ZeroizableSecret, MaskingStrategy> Clone
for StrongSecret<Secret, MaskingStrategy>
{
fn clone(&self) -> Self {
Self {
inner_secret: self.inner_secret.clone(),
masking_strategy: PhantomData,
}
}
}
impl<Secret, MaskingStrategy> PartialEq for StrongSecret<Secret, MaskingStrategy>
where
Self: PeekInterface<Secret>,
Secret: ZeroizableSecret + StrongEq,
{
fn eq(&self, other: &Self) -> bool {
StrongEq::strong_eq(self.peek(), other.peek())
}
}
impl<Secret, MaskingStrategy> Eq for StrongSecret<Secret, MaskingStrategy>
where
Self: PeekInterface<Secret>,
Secret: ZeroizableSecret + StrongEq,
{
}
impl<Secret: ZeroizableSecret, MaskingStrategy: Strategy<Secret>> fmt::Debug
for StrongSecret<Secret, MaskingStrategy>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
MaskingStrategy::fmt(&self.inner_secret, f)
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy: Strategy<Secret>> fmt::Display
for StrongSecret<Secret, MaskingStrategy>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
MaskingStrategy::fmt(&self.inner_secret, f)
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> Default for StrongSecret<Secret, MaskingStrategy>
where
Secret: ZeroizableSecret + Default,
{
fn default() -> Self {
Secret::default().into()
}
}
impl<Secret: ZeroizableSecret, MaskingStrategy> Drop for StrongSecret<Secret, MaskingStrategy> {
fn drop(&mut self) {
self.inner_secret.zeroize();
}
}
trait StrongEq {
fn strong_eq(&self, other: &Self) -> bool;
}
impl StrongEq for String {
fn strong_eq(&self, other: &Self) -> bool {
let lhs = self.as_bytes();
let rhs = other.as_bytes();
bool::from(lhs.ct_eq(rhs))
}
}
impl StrongEq for Vec<u8> {
fn strong_eq(&self, other: &Self) -> bool {
let lhs = &self;
let rhs = &other;
bool::from(lhs.ct_eq(rhs))
}
}
#[cfg(feature = "proto_tonic")]
impl<T> prost::Message for StrongSecret<T, crate::WithType>
where
T: prost::Message + Default + Clone + ZeroizableSecret,
{
fn encode_raw(&self, buf: &mut impl bytes::BufMut) {
self.peek().encode_raw(buf);
}
fn merge_field(
&mut self,
tag: u32,
wire_type: prost::encoding::WireType,
buf: &mut impl bytes::Buf,
ctx: prost::encoding::DecodeContext,
) -> Result<(), prost::DecodeError> {
if tag == 1 {
self.peek_mut().merge_field(tag, wire_type, buf, ctx)
} else {
prost::encoding::skip_field(wire_type, tag, buf, ctx)
}
}
fn encoded_len(&self) -> usize {
self.peek().encoded_len()
}
fn clear(&mut self) {
self.peek_mut().clear();
}
}
// File: crates/masking/src/serde.rs
//! Serde-related.
pub use erased_serde::Serialize as ErasedSerialize;
pub use serde::{de, Deserialize, Serialize, Serializer};
use serde_json::{value::Serializer as JsonValueSerializer, Value};
use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret};
/// Marker trait for secret types which can be [`Serialize`]-d by [`serde`].
///
/// When the `serde` feature of this crate is enabled and types are marked with
/// this trait, they receive a [`Serialize` impl] for `Secret<T>`.
/// (NOTE: all types which impl `DeserializeOwned` receive a [`Deserialize`]
/// impl)
///
/// This is done deliberately to prevent accidental exfiltration of secrets
/// via `serde` serialization.
#[cfg_attr(docsrs, cfg(feature = "serde"))]
pub trait SerializableSecret: Serialize {}
// #[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
// pub trait NonSerializableSecret: Serialize {}
impl SerializableSecret for Value {}
impl SerializableSecret for u8 {}
impl SerializableSecret for u16 {}
impl SerializableSecret for i8 {}
impl SerializableSecret for i32 {}
impl SerializableSecret for i64 {}
impl SerializableSecret for url::Url {}
#[cfg(feature = "time")]
impl SerializableSecret for time::Date {}
impl<T: SerializableSecret> SerializableSecret for &T {}
impl<'de, T, I> Deserialize<'de> for Secret<T, I>
where
T: Clone + de::DeserializeOwned + Sized,
I: Strategy<T>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: de::Deserializer<'de>,
{
T::deserialize(deserializer).map(Self::new)
}
}
impl<T, I> Serialize for Secret<T, I>
where
T: SerializableSecret + Serialize + Sized,
I: Strategy<T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
pii_serializer::pii_serialize(self, serializer)
}
}
impl<'de, T, I> Deserialize<'de> for StrongSecret<T, I>
where
T: Clone + de::DeserializeOwned + Sized + ZeroizableSecret,
I: Strategy<T>,
{
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
T::deserialize(deserializer).map(Self::new)
}
}
impl<T, I> Serialize for StrongSecret<T, I>
where
T: SerializableSecret + Serialize + ZeroizableSecret + Sized,
I: Strategy<T>,
{
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
pii_serializer::pii_serialize(self, serializer)
}
}
/// Masked serialization.
///
/// the default behaviour for secrets is to serialize in exposed format since the common use cases
/// for storing the secret to database or sending it over the network requires the secret to be exposed
/// This method allows to serialize the secret in masked format if needed for logs or other insecure exposures
pub fn masked_serialize<T: Serialize>(value: &T) -> Result<Value, serde_json::Error> {
value.serialize(PIISerializer {
inner: JsonValueSerializer,
})
}
/// Masked serialization.
///
/// Trait object for supporting serialization to Value while accounting for masking
/// The usual Serde Serialize trait cannot be used as trait objects
/// like &dyn Serialize or boxed trait objects like Box<dyn Serialize> because of Rust's "object safety" rules.
/// In particular, the trait contains generic methods which cannot be made into a trait object.
/// In this case we remove the generic for assuming the serialization to be of 2 types only raw json or masked json
pub trait ErasedMaskSerialize: ErasedSerialize {
/// Masked serialization.
fn masked_serialize(&self) -> Result<Value, serde_json::Error>;
}
impl<T: Serialize + ErasedSerialize> ErasedMaskSerialize for T {
fn masked_serialize(&self) -> Result<Value, serde_json::Error> {
masked_serialize(self)
}
}
impl Serialize for dyn ErasedMaskSerialize + '_ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
erased_serde::serialize(self, serializer)
}
}
impl Serialize for dyn ErasedMaskSerialize + '_ + Send {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
erased_serde::serialize(self, serializer)
}
}
use pii_serializer::PIISerializer;
mod pii_serializer {
use std::fmt::Display;
pub(super) fn pii_serialize<
V: Serialize,
T: std::fmt::Debug + PeekInterface<V>,
S: Serializer,
>(
value: &T,
serializer: S,
) -> Result<S::Ok, S::Error> {
// Mask the value if the serializer is of type PIISerializer
// or send empty map if the serializer is of type FlatMapSerializer over PiiSerializer
if std::any::type_name::<S>() == std::any::type_name::<PIISerializer>() {
format!("{value:?}").serialize(serializer)
} else if std::any::type_name::<S>()
== std::any::type_name::<
serde::__private::ser::FlatMapSerializer<'_, SerializeMap<PIISerializer>>,
>()
{
std::collections::HashMap::<String, String>::from([]).serialize(serializer)
} else {
value.peek().serialize(serializer)
}
}
use serde::{Serialize, Serializer};
use serde_json::{value::Serializer as JsonValueSerializer, Map, Value};
use crate::PeekInterface;
pub(super) struct PIISerializer {
pub inner: JsonValueSerializer,
}
impl Clone for PIISerializer {
fn clone(&self) -> Self {
Self {
inner: JsonValueSerializer,
}
}
}
impl Serializer for PIISerializer {
type Ok = Value;
type Error = serde_json::Error;
type SerializeSeq = SerializeVec<Self>;
type SerializeTuple = SerializeVec<Self>;
type SerializeTupleStruct = SerializeVec<Self>;
type SerializeTupleVariant = SerializeTupleVariant<Self>;
type SerializeMap = SerializeMap<Self>;
type SerializeStruct = SerializeMap<Self>;
type SerializeStructVariant = SerializeStructVariant<Self>;
#[inline]
fn serialize_bool(self, value: bool) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_bool(value)
}
#[inline]
fn serialize_i8(self, value: i8) -> Result<Self::Ok, Self::Error> {
self.serialize_i64(value.into())
}
#[inline]
fn serialize_i16(self, value: i16) -> Result<Self::Ok, Self::Error> {
self.serialize_i64(value.into())
}
#[inline]
fn serialize_i32(self, value: i32) -> Result<Self::Ok, Self::Error> {
self.serialize_i64(value.into())
}
fn serialize_i64(self, value: i64) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_i64(value)
}
fn serialize_i128(self, value: i128) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_i128(value)
}
#[inline]
fn serialize_u8(self, value: u8) -> Result<Self::Ok, Self::Error> {
self.serialize_u64(value.into())
}
#[inline]
fn serialize_u16(self, value: u16) -> Result<Self::Ok, Self::Error> {
self.serialize_u64(value.into())
}
#[inline]
fn serialize_u32(self, value: u32) -> Result<Self::Ok, Self::Error> {
self.serialize_u64(value.into())
}
#[inline]
fn serialize_u64(self, value: u64) -> Result<Self::Ok, Self::Error> {
Ok(Value::Number(value.into()))
}
fn serialize_u128(self, value: u128) -> Result<Self::Ok, Self::Error> {
self.inner.serialize_u128(value)
}
#[inline]
fn serialize_f32(self, float: f32) -> Result<Self::Ok, Self::Error> {
Ok(Value::from(float))
}
#[inline]
fn serialize_f64(self, float: f64) -> Result<Self::Ok, Self::Error> {
Ok(Value::from(float))
}
#[inline]
fn serialize_char(self, value: char) -> Result<Self::Ok, Self::Error> {
let mut s = String::new();
s.push(value);
Ok(Value::String(s))
}
#[inline]
fn serialize_str(self, value: &str) -> Result<Self::Ok, Self::Error> {
Ok(Value::String(value.to_owned()))
}
fn serialize_bytes(self, value: &[u8]) -> Result<Self::Ok, Self::Error> {
let vec = value.iter().map(|&b| Value::Number(b.into())).collect();
Ok(Value::Array(vec))
}
#[inline]
fn serialize_unit(self) -> Result<Self::Ok, Self::Error> {
Ok(Value::Null)
}
#[inline]
fn serialize_unit_struct(self, _name: &'static str) -> Result<Self::Ok, Self::Error> {
self.serialize_unit()
}
#[inline]
fn serialize_unit_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
) -> Result<Self::Ok, Self::Error> {
self.serialize_str(variant)
}
#[inline]
fn serialize_newtype_struct<T>(
self,
_name: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Serialize,
{
value.serialize(self)
}
fn serialize_newtype_variant<T>(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
value: &T,
) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Serialize,
{
let mut values = Map::new();
values.insert(String::from(variant), value.serialize(self)?);
Ok(Value::Object(values))
}
#[inline]
fn serialize_none(self) -> Result<Self::Ok, Self::Error> {
self.serialize_unit()
}
#[inline]
fn serialize_some<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Serialize,
{
value.serialize(self)
}
fn serialize_seq(self, len: Option<usize>) -> Result<Self::SerializeSeq, Self::Error> {
Ok(SerializeVec {
vec: Vec::with_capacity(len.unwrap_or(0)),
ser: self,
})
}
fn serialize_tuple(self, len: usize) -> Result<Self::SerializeTuple, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeTupleStruct, Self::Error> {
self.serialize_seq(Some(len))
}
fn serialize_tuple_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
len: usize,
) -> Result<Self::SerializeTupleVariant, Self::Error> {
Ok(SerializeTupleVariant {
name: String::from(variant),
vec: Vec::with_capacity(len),
ser: self,
})
}
fn serialize_map(self, len: Option<usize>) -> Result<Self::SerializeMap, Self::Error> {
Ok(SerializeMap {
inner: self.clone().inner.serialize_map(len)?,
ser: self,
})
}
fn serialize_struct(
self,
_name: &'static str,
len: usize,
) -> Result<Self::SerializeStruct, Self::Error> {
self.serialize_map(Some(len))
}
fn serialize_struct_variant(
self,
_name: &'static str,
_variant_index: u32,
variant: &'static str,
_len: usize,
) -> Result<Self::SerializeStructVariant, Self::Error> {
Ok(SerializeStructVariant {
name: String::from(variant),
map: Map::new(),
ser: self,
})
}
fn collect_str<T>(self, value: &T) -> Result<Self::Ok, Self::Error>
where
T: ?Sized + Display,
{
self.inner.collect_str(value)
}
}
pub(super) struct SerializeVec<T: Serializer> {
vec: Vec<Value>,
ser: T,
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeSeq for SerializeVec<T> {
type Ok = Value;
type Error = T::Error;
fn serialize_element<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.vec.push(value.serialize(self.ser.clone())?);
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
Ok(Value::Array(self.vec))
}
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTuple for SerializeVec<T> {
type Ok = Value;
type Error = T::Error;
fn serialize_element<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
serde::ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
serde::ser::SerializeSeq::end(self)
}
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTupleStruct for SerializeVec<T> {
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
serde::ser::SerializeSeq::serialize_element(self, value)
}
fn end(self) -> Result<Self::Ok, Self::Error> {
serde::ser::SerializeSeq::end(self)
}
}
pub(super) struct SerializeStructVariant<T: Serializer> {
name: String,
map: Map<String, Value>,
ser: T,
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeStructVariant
for SerializeStructVariant<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, key: &'static str, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.map
.insert(String::from(key), value.serialize(self.ser.clone())?);
Ok(())
}
fn end(self) -> Result<Self::Ok, Self::Error> {
let mut object = Map::new();
object.insert(self.name, Value::Object(self.map));
Ok(Value::Object(object))
}
}
pub(super) struct SerializeTupleVariant<T: Serializer> {
name: String,
vec: Vec<Value>,
ser: T,
}
impl<T: Serializer<Ok = Value> + Clone> serde::ser::SerializeTupleVariant
for SerializeTupleVariant<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.vec.push(value.serialize(self.ser.clone())?);
Ok(())
}
fn end(self) -> Result<Value, Self::Error> {
let mut object = Map::new();
object.insert(self.name, Value::Array(self.vec));
Ok(Value::Object(object))
}
}
pub(super) struct SerializeMap<T: Serializer> {
inner: <serde_json::value::Serializer as Serializer>::SerializeMap,
ser: T,
}
impl<T: Serializer<Ok = Value, Error = serde_json::Error> + Clone> serde::ser::SerializeMap
for SerializeMap<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_key<V>(&mut self, key: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
self.inner.serialize_key(key)?;
Ok(())
}
fn serialize_value<V>(&mut self, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
let value = value.serialize(self.ser.clone())?;
self.inner.serialize_value(&value)?;
Ok(())
}
fn end(self) -> Result<Value, Self::Error> {
self.inner.end()
}
}
impl<T: Serializer<Ok = Value, Error = serde_json::Error> + Clone> serde::ser::SerializeStruct
for SerializeMap<T>
{
type Ok = Value;
type Error = T::Error;
fn serialize_field<V>(&mut self, key: &'static str, value: &V) -> Result<(), Self::Error>
where
V: ?Sized + Serialize,
{
serde::ser::SerializeMap::serialize_entry(self, key, value)
}
fn end(self) -> Result<Value, Self::Error> {
serde::ser::SerializeMap::end(self)
}
}
}
// File: crates/masking/src/maskable.rs
//! This module contains Masking objects and traits
use crate::{ExposeInterface, Secret};
/// An Enum that allows us to optionally mask data, based on which enum variant that data is stored
/// in.
#[derive(Clone, Eq, PartialEq)]
pub enum Maskable<T: Eq + PartialEq + Clone> {
/// Variant which masks the data by wrapping in a Secret
Masked(Secret<T>),
/// Varant which doesn't mask the data
Normal(T),
}
impl<T: std::fmt::Debug + Clone + Eq + PartialEq> std::fmt::Debug for Maskable<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::Masked(secret_value) => std::fmt::Debug::fmt(secret_value, f),
Self::Normal(value) => std::fmt::Debug::fmt(value, f),
}
}
}
impl<T: Eq + PartialEq + Clone + std::hash::Hash> std::hash::Hash for Maskable<T> {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
match self {
Self::Masked(value) => crate::PeekInterface::peek(value).hash(state),
Self::Normal(value) => value.hash(state),
}
}
}
impl<T: Eq + PartialEq + Clone> Maskable<T> {
/// Get the inner data while consuming self
pub fn into_inner(self) -> T {
match self {
Self::Masked(inner_secret) => inner_secret.expose(),
Self::Normal(inner) => inner,
}
}
/// Create a new Masked data
pub fn new_masked(item: Secret<T>) -> Self {
Self::Masked(item)
}
/// Create a new non-masked data
pub fn new_normal(item: T) -> Self {
Self::Normal(item)
}
/// Checks whether the data is masked.
/// Returns `true` if the data is wrapped in the `Masked` variant,
/// returns `false` otherwise.
pub fn is_masked(&self) -> bool {
matches!(self, Self::Masked(_))
}
/// Checks whether the data is normal (not masked).
/// Returns `true` if the data is wrapped in the `Normal` variant,
/// returns `false` otherwise.
pub fn is_normal(&self) -> bool {
matches!(self, Self::Normal(_))
}
}
/// Trait for providing a method on custom types for constructing `Maskable`
pub trait Mask {
/// The type returned by the `into_masked()` method. Must implement `PartialEq`, `Eq` and `Clone`
type Output: Eq + Clone + PartialEq;
/// Construct a `Maskable` instance that wraps `Self::Output` by consuming `self`
fn into_masked(self) -> Maskable<Self::Output>;
}
impl Mask for String {
type Output = Self;
fn into_masked(self) -> Maskable<Self::Output> {
Maskable::new_masked(self.into())
}
}
impl Mask for Secret<String> {
type Output = String;
fn into_masked(self) -> Maskable<Self::Output> {
Maskable::new_masked(self)
}
}
impl<T: Eq + PartialEq + Clone> From<T> for Maskable<T> {
fn from(value: T) -> Self {
Self::new_normal(value)
}
}
impl From<&str> for Maskable<String> {
fn from(value: &str) -> Self {
Self::new_normal(value.to_string())
}
}
// File: crates/masking/src/lib.rs
#![cfg_attr(docsrs, feature(doc_auto_cfg, doc_cfg_hide))]
#![cfg_attr(docsrs, doc(cfg_hide(doc)))]
#![warn(missing_docs)]
//! Personal Identifiable Information protection. Wrapper types and traits for secret management which help ensure they aren't accidentally copied, logged, or otherwise exposed (as much as possible), and also ensure secrets are securely wiped from memory when dropped.
//! Secret-keeping library inspired by secrecy.
#![doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR" ), "/", "README.md"))]
pub use zeroize::{self, DefaultIsZeroes, Zeroize as ZeroizableSecret};
mod strategy;
pub use strategy::{Strategy, WithType, WithoutType};
mod abs;
pub use abs::{ExposeInterface, ExposeOptionInterface, PeekInterface, SwitchStrategy};
mod secret;
mod strong_secret;
#[cfg(feature = "serde")]
pub use secret::JsonMaskStrategy;
pub use secret::Secret;
pub use strong_secret::StrongSecret;
#[cfg(feature = "alloc")]
extern crate alloc;
#[cfg(feature = "alloc")]
mod boxed;
#[cfg(feature = "bytes")]
mod bytes;
#[cfg(feature = "bytes")]
pub use self::bytes::SecretBytesMut;
#[cfg(feature = "alloc")]
mod string;
#[cfg(feature = "alloc")]
mod vec;
#[cfg(feature = "serde")]
mod serde;
#[cfg(feature = "serde")]
pub use crate::serde::{
masked_serialize, Deserialize, ErasedMaskSerialize, SerializableSecret, Serialize,
};
/// This module should be included with asterisk.
///
/// `use masking::prelude::*;`
pub mod prelude {
pub use super::{ExposeInterface, ExposeOptionInterface, PeekInterface};
}
#[cfg(feature = "diesel")]
mod diesel;
#[cfg(feature = "cassandra")]
mod cassandra;
pub mod maskable;
pub use maskable::*;
// File: crates/masking/src/boxed.rs
//! `Box` types containing secrets
//!
//! There is not alias type by design.
#[cfg(feature = "serde")]
use super::{SerializableSecret, Serialize};
#[cfg(feature = "serde")]
impl<S: Serialize> SerializableSecret for Box<S> {}
// File: crates/masking/src/cassandra.rs
use scylla::{
deserialize::DeserializeValue,
frame::response::result::ColumnType,
serialize::{
value::SerializeValue,
writers::{CellWriter, WrittenCellProof},
SerializationError,
},
};
use crate::{abs::PeekInterface, StrongSecret};
impl<T> SerializeValue for StrongSecret<T>
where
T: SerializeValue + zeroize::Zeroize + Clone,
{
fn serialize<'b>(
&self,
column_type: &ColumnType<'_>,
writer: CellWriter<'b>,
) -> Result<WrittenCellProof<'b>, SerializationError> {
self.peek().serialize(column_type, writer)
}
}
impl<'frame, 'metadata, T> DeserializeValue<'frame, 'metadata> for StrongSecret<T>
where
T: DeserializeValue<'frame, 'metadata> + zeroize::Zeroize + Clone,
{
fn type_check(column_type: &ColumnType<'_>) -> Result<(), scylla::deserialize::TypeCheckError> {
T::type_check(column_type)
}
fn deserialize(
column_type: &'metadata ColumnType<'metadata>,
v: Option<scylla::deserialize::FrameSlice<'frame>>,
) -> Result<Self, scylla::deserialize::DeserializationError> {
Ok(Self::new(T::deserialize(column_type, v)?))
}
}
// File: crates/masking/src/vec.rs
//! Secret `Vec` types
//!
//! There is not alias type by design.
#[cfg(feature = "serde")]
use super::{SerializableSecret, Serialize};
#[cfg(feature = "serde")]
impl<S: Serialize> SerializableSecret for Vec<S> {}
// File: crates/masking/src/diesel.rs
//! Diesel-related.
use diesel::{
backend::Backend,
deserialize::{self, FromSql, Queryable},
expression::AsExpression,
internal::derives::as_expression::Bound,
serialize::{self, Output, ToSql},
sql_types,
};
use crate::{Secret, Strategy, StrongSecret, ZeroizableSecret};
impl<S, I, T> AsExpression<T> for &Secret<S, I>
where
T: sql_types::SingleValue,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, T> AsExpression<T> for &&Secret<S, I>
where
T: sql_types::SingleValue,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, T, DB> ToSql<T, DB> for Secret<S, I>
where
DB: Backend,
S: ToSql<T, DB>,
I: Strategy<S>,
{
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result {
ToSql::<T, DB>::to_sql(&self.inner_secret, out)
}
}
impl<DB, S, T, I> FromSql<T, DB> for Secret<S, I>
where
DB: Backend,
S: FromSql<T, DB>,
I: Strategy<S>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
S::from_sql(bytes).map(|raw| raw.into())
}
}
impl<S, I, T> AsExpression<T> for Secret<S, I>
where
T: sql_types::SingleValue,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<ST, DB, S, I> Queryable<ST, DB> for Secret<S, I>
where
DB: Backend,
I: Strategy<S>,
ST: sql_types::SingleValue,
Self: FromSql<ST, DB>,
{
type Row = Self;
fn build(row: Self::Row) -> deserialize::Result<Self> {
Ok(row)
}
}
impl<S, I, T> AsExpression<T> for &StrongSecret<S, I>
where
T: sql_types::SingleValue,
S: ZeroizableSecret,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, T> AsExpression<T> for &&StrongSecret<S, I>
where
T: sql_types::SingleValue,
S: ZeroizableSecret,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<S, I, DB, T> ToSql<T, DB> for StrongSecret<S, I>
where
DB: Backend,
S: ToSql<T, DB> + ZeroizableSecret,
I: Strategy<S>,
{
fn to_sql<'b>(&'b self, out: &mut Output<'b, '_, DB>) -> serialize::Result {
ToSql::<T, DB>::to_sql(&self.inner_secret, out)
}
}
impl<DB, S, I, T> FromSql<T, DB> for StrongSecret<S, I>
where
DB: Backend,
S: FromSql<T, DB> + ZeroizableSecret,
I: Strategy<S>,
{
fn from_sql(bytes: DB::RawValue<'_>) -> deserialize::Result<Self> {
S::from_sql(bytes).map(|raw| raw.into())
}
}
impl<S, I, T> AsExpression<T> for StrongSecret<S, I>
where
T: sql_types::SingleValue,
S: ZeroizableSecret,
I: Strategy<S>,
{
type Expression = Bound<T, Self>;
fn as_expression(self) -> Self::Expression {
Bound::new(self)
}
}
impl<ST, DB, S, I> Queryable<ST, DB> for StrongSecret<S, I>
where
I: Strategy<S>,
DB: Backend,
S: ZeroizableSecret,
ST: sql_types::SingleValue,
Self: FromSql<ST, DB>,
{
type Row = Self;
fn build(row: Self::Row) -> deserialize::Result<Self> {
Ok(row)
}
}
// File: crates/masking/src/string.rs
//! Secret strings
//!
//! There is not alias type by design.
use alloc::{
str::FromStr,
string::{String, ToString},
};
#[cfg(feature = "serde")]
use super::SerializableSecret;
use super::{Secret, Strategy};
use crate::StrongSecret;
#[cfg(feature = "serde")]
impl SerializableSecret for String {}
impl<I> FromStr for Secret<String, I>
where
I: Strategy<String>,
{
type Err = core::convert::Infallible;
fn from_str(src: &str) -> Result<Self, Self::Err> {
Ok(Self::new(src.to_string()))
}
}
impl<I> FromStr for StrongSecret<String, I>
where
I: Strategy<String>,
{
type Err = core::convert::Infallible;
fn from_str(src: &str) -> Result<Self, Self::Err> {
Ok(Self::new(src.to_string()))
}
}
// File: crates/masking/src/bytes.rs
//! Optional `Secret` wrapper type for the `bytes::BytesMut` crate.
use core::fmt;
use bytes::BytesMut;
#[cfg(all(feature = "bytes", feature = "serde"))]
use serde::de::{self, Deserialize};
use super::{PeekInterface, ZeroizableSecret};
/// Instance of [`BytesMut`] protected by a type that impls the [`ExposeInterface`]
/// trait like `Secret<T>`.
///
/// Because of the nature of how the `BytesMut` type works, it needs some special
/// care in order to have a proper zeroizing drop handler.
#[derive(Clone)]
#[cfg_attr(docsrs, cfg(feature = "bytes"))]
pub struct SecretBytesMut(BytesMut);
impl SecretBytesMut {
/// Wrap bytes in `SecretBytesMut`
pub fn new(bytes: impl Into<BytesMut>) -> Self {
Self(bytes.into())
}
}
impl PeekInterface<BytesMut> for SecretBytesMut {
fn peek(&self) -> &BytesMut {
&self.0
}
fn peek_mut(&mut self) -> &mut BytesMut {
&mut self.0
}
}
impl fmt::Debug for SecretBytesMut {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "SecretBytesMut([REDACTED])")
}
}
impl From<BytesMut> for SecretBytesMut {
fn from(bytes: BytesMut) -> Self {
Self::new(bytes)
}
}
impl Drop for SecretBytesMut {
fn drop(&mut self) {
self.0.resize(self.0.capacity(), 0);
self.0.as_mut().zeroize();
debug_assert!(self.0.as_ref().iter().all(|b| *b == 0));
}
}
#[cfg(all(feature = "bytes", feature = "serde"))]
impl<'de> Deserialize<'de> for SecretBytesMut {
fn deserialize<D: de::Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
struct SecretBytesVisitor;
impl<'de> de::Visitor<'de> for SecretBytesVisitor {
type Value = SecretBytesMut;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("byte array")
}
#[inline]
fn visit_bytes<E>(self, v: &[u8]) -> Result<Self::Value, E>
where
E: de::Error,
{
let mut bytes = BytesMut::with_capacity(v.len());
bytes.extend_from_slice(v);
Ok(SecretBytesMut(bytes))
}
#[inline]
fn visit_seq<V>(self, mut seq: V) -> Result<Self::Value, V::Error>
where
V: de::SeqAccess<'de>,
{
// 4096 is cargo culted from upstream
let len = core::cmp::min(seq.size_hint().unwrap_or(0), 4096);
let mut bytes = BytesMut::with_capacity(len);
use bytes::BufMut;
while let Some(value) = seq.next_element()? {
bytes.put_u8(value);
}
Ok(SecretBytesMut(bytes))
}
}
deserializer.deserialize_bytes(SecretBytesVisitor)
}
}
// File: crates/masking/src/secret.rs
//! Structure describing secret.
use std::{fmt, marker::PhantomData};
use crate::{strategy::Strategy, PeekInterface, StrongSecret};
/// Secret thing.
///
/// To get access to value use method `expose()` of trait [`crate::ExposeInterface`].
///
/// ## Masking
/// Use the [`crate::strategy::Strategy`] trait to implement a masking strategy on a zero-variant
/// enum and pass this enum as a second generic parameter to [`Secret`] while defining it.
/// [`Secret`] will take care of applying the masking strategy on the inner secret when being
/// displayed.
///
/// ## Masking Example
///
/// ```
/// use masking::Strategy;
/// use masking::Secret;
/// use std::fmt;
///
/// enum MyStrategy {}
///
/// impl<T> Strategy<T> for MyStrategy
/// where
/// T: fmt::Display
/// {
/// fn fmt(val: &T, f: &mut fmt::Formatter<'_>) -> fmt::Result {
/// write!(f, "{}", val.to_string().to_ascii_lowercase())
/// }
/// }
///
/// let my_secret: Secret<String, MyStrategy> = Secret::new("HELLO".to_string());
///
/// assert_eq!("hello", &format!("{:?}", my_secret));
/// ```
pub struct Secret<Secret, MaskingStrategy = crate::WithType>
where
MaskingStrategy: Strategy<Secret>,
{
pub(crate) inner_secret: Secret,
pub(crate) masking_strategy: PhantomData<MaskingStrategy>,
}
impl<SecretValue, MaskingStrategy> Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
/// Take ownership of a secret value
pub fn new(secret: SecretValue) -> Self {
Self {
inner_secret: secret,
masking_strategy: PhantomData,
}
}
/// Zip 2 secrets with the same masking strategy into one
pub fn zip<OtherSecretValue>(
self,
other: Secret<OtherSecretValue, MaskingStrategy>,
) -> Secret<(SecretValue, OtherSecretValue), MaskingStrategy>
where
MaskingStrategy: Strategy<OtherSecretValue> + Strategy<(SecretValue, OtherSecretValue)>,
{
(self.inner_secret, other.inner_secret).into()
}
/// consume self and modify the inner value
pub fn map<OtherSecretValue>(
self,
f: impl FnOnce(SecretValue) -> OtherSecretValue,
) -> Secret<OtherSecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<OtherSecretValue>,
{
f(self.inner_secret).into()
}
/// Convert to [`StrongSecret`]
pub fn into_strong(self) -> StrongSecret<SecretValue, MaskingStrategy>
where
SecretValue: zeroize::DefaultIsZeroes,
{
StrongSecret::new(self.inner_secret)
}
/// Convert to [`Secret`] with a reference to the inner secret
pub fn as_ref(&self) -> Secret<&SecretValue, MaskingStrategy>
where
MaskingStrategy: for<'a> Strategy<&'a SecretValue>,
{
Secret::new(self.peek())
}
}
impl<SecretValue, MaskingStrategy> PeekInterface<SecretValue>
for Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
fn peek(&self) -> &SecretValue {
&self.inner_secret
}
fn peek_mut(&mut self) -> &mut SecretValue {
&mut self.inner_secret
}
}
impl<SecretValue, MaskingStrategy> From<SecretValue> for Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
fn from(secret: SecretValue) -> Self {
Self::new(secret)
}
}
impl<SecretValue, MaskingStrategy> Clone for Secret<SecretValue, MaskingStrategy>
where
SecretValue: Clone,
MaskingStrategy: Strategy<SecretValue>,
{
fn clone(&self) -> Self {
Self {
inner_secret: self.inner_secret.clone(),
masking_strategy: PhantomData,
}
}
}
impl<SecretValue, MaskingStrategy> PartialEq for Secret<SecretValue, MaskingStrategy>
where
Self: PeekInterface<SecretValue>,
SecretValue: PartialEq,
MaskingStrategy: Strategy<SecretValue>,
{
fn eq(&self, other: &Self) -> bool {
self.peek().eq(other.peek())
}
}
impl<SecretValue, MaskingStrategy> Eq for Secret<SecretValue, MaskingStrategy>
where
Self: PeekInterface<SecretValue>,
SecretValue: Eq,
MaskingStrategy: Strategy<SecretValue>,
{
}
impl<SecretValue, MaskingStrategy> fmt::Debug for Secret<SecretValue, MaskingStrategy>
where
MaskingStrategy: Strategy<SecretValue>,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
MaskingStrategy::fmt(&self.inner_secret, f)
}
}
impl<SecretValue, MaskingStrategy> Default for Secret<SecretValue, MaskingStrategy>
where
SecretValue: Default,
MaskingStrategy: Strategy<SecretValue>,
{
fn default() -> Self {
SecretValue::default().into()
}
}
// Required by base64-serde to serialize Secret of Vec<u8> which contains the base64 decoded value
impl AsRef<[u8]> for Secret<Vec<u8>> {
fn as_ref(&self) -> &[u8] {
self.peek().as_slice()
}
}
/// Strategy for masking JSON values
#[cfg(feature = "serde")]
pub enum JsonMaskStrategy {}
#[cfg(feature = "serde")]
impl Strategy<serde_json::Value> for JsonMaskStrategy {
fn fmt(value: &serde_json::Value, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match value {
serde_json::Value::Object(map) => {
write!(f, "{{")?;
let mut first = true;
for (key, val) in map {
if !first {
write!(f, ", ")?;
}
first = false;
write!(f, "\"{key}\":")?;
Self::fmt(val, f)?;
}
write!(f, "}}")
}
serde_json::Value::Array(arr) => {
write!(f, "[")?;
let mut first = true;
for val in arr {
if !first {
write!(f, ", ")?;
}
first = false;
Self::fmt(val, f)?;
}
write!(f, "]")
}
serde_json::Value::String(s) => {
// For strings, we show a masked version that gives a hint about the content
let masked = if s.len() <= 2 {
"**".to_string()
} else if s.len() <= 6 {
format!("{}**", &s[0..1])
} else {
// For longer strings, show first and last character with length in between
format!(
"{}**{}**{}",
&s[0..1],
s.len() - 2,
&s[s.len() - 1..s.len()]
)
};
write!(f, "\"{masked}\"")
}
serde_json::Value::Number(n) => {
// For numbers, we can show the order of magnitude
if n.is_i64() || n.is_u64() {
let num_str = n.to_string();
let masked_num = "*".repeat(num_str.len());
write!(f, "{masked_num}")
} else if n.is_f64() {
// For floats, just use a generic mask
write!(f, "**.**")
} else {
write!(f, "0")
}
}
serde_json::Value::Bool(b) => {
// For booleans, we can show a hint about which one it is
write!(f, "{}", if *b { "**true" } else { "**false" })
}
serde_json::Value::Null => write!(f, "null"),
}
}
}
#[cfg(feature = "proto_tonic")]
impl<T> prost::Message for Secret<T, crate::WithType>
where
T: prost::Message + Default + Clone,
{
fn encode_raw(&self, buf: &mut impl bytes::BufMut) {
self.peek().encode_raw(buf);
}
fn merge_field(
&mut self,
tag: u32,
wire_type: prost::encoding::WireType,
buf: &mut impl bytes::Buf,
ctx: prost::encoding::DecodeContext,
) -> Result<(), prost::DecodeError> {
if tag == 1 {
self.peek_mut().merge_field(tag, wire_type, buf, ctx)
} else {
prost::encoding::skip_field(wire_type, tag, buf, ctx)
}
}
fn encoded_len(&self) -> usize {
self.peek().encoded_len()
}
fn clear(&mut self) {
self.peek_mut().clear();
}
}
#[cfg(test)]
#[cfg(feature = "serde")]
mod tests {
use serde_json::json;
use super::*;
#[test]
#[allow(clippy::expect_used)]
fn test_json_mask_strategy() {
// Create a sample JSON with different types for testing
let original = json!({ "user": { "name": "John Doe", "email": "[email protected]", "age": 35, "verified": true }, "card": { "number": "4242424242424242", "cvv": 123, "amount": 99.99 }, "tags": ["personal", "premium"], "null_value": null, "short": "hi" });
// Apply the JsonMaskStrategy
let secret = Secret::<_, JsonMaskStrategy>::new(original.clone());
let masked_str = format!("{secret:?}");
// Get specific values from original
let original_obj = original.as_object().expect("Original should be an object");
let user_obj = original_obj["user"]
.as_object()
.expect("User should be an object");
let name = user_obj["name"].as_str().expect("Name should be a string");
let email = user_obj["email"]
.as_str()
.expect("Email should be a string");
let age = user_obj["age"].as_i64().expect("Age should be a number");
let verified = user_obj["verified"]
.as_bool()
.expect("Verified should be a boolean");
let card_obj = original_obj["card"]
.as_object()
.expect("Card should be an object");
let card_number = card_obj["number"]
.as_str()
.expect("Card number should be a string");
let cvv = card_obj["cvv"].as_i64().expect("CVV should be a number");
let tags = original_obj["tags"]
.as_array()
.expect("Tags should be an array");
let tag1 = tags
.first()
.and_then(|v| v.as_str())
.expect("First tag should be a string");
// Now explicitly verify the masking patterns for each value type
// 1. String masking - pattern: first char + ** + length - 2 + ** + last char
let expected_name_mask = format!(
"\"{}**{}**{}\"",
&name[0..1],
name.len() - 2,
&name[name.len() - 1..]
);
let expected_email_mask = format!(
"\"{}**{}**{}\"",
&email[0..1],
email.len() - 2,
&email[email.len() - 1..]
);
let expected_card_mask = format!(
"\"{}**{}**{}\"",
&card_number[0..1],
card_number.len() - 2,
&card_number[card_number.len() - 1..]
);
let expected_tag1_mask = if tag1.len() <= 2 {
"\"**\"".to_string()
} else if tag1.len() <= 6 {
format!("\"{}**\"", &tag1[0..1])
} else {
format!(
"\"{}**{}**{}\"",
&tag1[0..1],
tag1.len() - 2,
&tag1[tag1.len() - 1..]
)
};
let expected_short_mask = "\"**\"".to_string(); // For "hi"
// 2. Number masking
let expected_age_mask = "*".repeat(age.to_string().len()); // Repeat * for the number of digits
let expected_cvv_mask = "*".repeat(cvv.to_string().len());
// 3. Boolean masking
let expected_verified_mask = if verified { "**true" } else { "**false" };
// Check that the masked output includes the expected masked patterns
assert!(
masked_str.contains(&expected_name_mask),
"Name not masked correctly. Expected: {expected_name_mask}"
);
assert!(
masked_str.contains(&expected_email_mask),
"Email not masked correctly. Expected: {expected_email_mask}",
);
assert!(
masked_str.contains(&expected_card_mask),
"Card number not masked correctly. Expected: {expected_card_mask}",
);
assert!(
masked_str.contains(&expected_tag1_mask),
"Tag not masked correctly. Expected: {expected_tag1_mask}",
);
assert!(
masked_str.contains(&expected_short_mask),
"Short string not masked correctly. Expected: {expected_short_mask}",
);
assert!(
masked_str.contains(&expected_age_mask),
"Age not masked correctly. Expected: {expected_age_mask}",
);
assert!(
masked_str.contains(&expected_cvv_mask),
"CVV not masked correctly. Expected: {expected_cvv_mask}",
);
assert!(
masked_str.contains(expected_verified_mask),
"Boolean not masked correctly. Expected: {expected_verified_mask}",
);
// Check structure preservation
assert!(
masked_str.contains("\"user\""),
"Structure not preserved - missing user object"
);
assert!(
masked_str.contains("\"card\""),
"Structure not preserved - missing card object"
);
assert!(
masked_str.contains("\"tags\""),
"Structure not preserved - missing tags array"
);
assert!(
masked_str.contains("\"null_value\":null"),
"Null value not preserved correctly"
);
// Additional security checks to ensure no original values are exposed
assert!(
!masked_str.contains(name),
"Original name value exposed in masked output"
);
assert!(
!masked_str.contains(email),
"Original email value exposed in masked output"
);
assert!(
!masked_str.contains(card_number),
"Original card number exposed in masked output"
);
assert!(
!masked_str.contains(&age.to_string()),
"Original age value exposed in masked output"
);
assert!(
!masked_str.contains(&cvv.to_string()),
"Original CVV value exposed in masked output"
);
assert!(
!masked_str.contains(tag1),
"Original tag value exposed in masked output"
);
assert!(
!masked_str.contains("hi"),
"Original short string value exposed in masked output"
);
}
}
</crate>
|
{
"crate": "masking",
"file": null,
"files": [
"crates/masking/tests/basic.rs",
"crates/masking/src/strategy.rs",
"crates/masking/src/abs.rs",
"crates/masking/src/strong_secret.rs",
"crates/masking/src/serde.rs",
"crates/masking/src/maskable.rs",
"crates/masking/src/lib.rs",
"crates/masking/src/boxed.rs",
"crates/masking/src/cassandra.rs",
"crates/masking/src/vec.rs",
"crates/masking/src/diesel.rs",
"crates/masking/src/string.rs",
"crates/masking/src/bytes.rs",
"crates/masking/src/secret.rs"
],
"module": null,
"num_files": 14,
"token_count": 13449
}
|
crate_7143377136351361907
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: injector
Files: 5
</path>
<crate>
// File: crates/injector/src/consts.rs
/// Header name for external vault metadata
pub const EXTERNAL_VAULT_METADATA_HEADER: &str = "x-external-vault-metadata";
// File: crates/injector/src/injector.rs
pub mod core {
use std::collections::HashMap;
use async_trait::async_trait;
use common_utils::request::{Method, RequestBuilder, RequestContent};
use error_stack::{self, ResultExt};
use masking::{self, ExposeInterface};
use nom::{
bytes::complete::{tag, take_while1},
character::complete::{char, multispace0},
sequence::{delimited, preceded, terminated},
IResult,
};
use router_env::{instrument, logger, tracing};
use serde_json::Value;
use thiserror::Error;
use crate as injector_types;
use crate::{
types::{ContentType, InjectorRequest, InjectorResponse, IntoInjectorResponse},
vault_metadata::VaultMetadataExtractorExt,
};
impl From<injector_types::HttpMethod> for Method {
fn from(method: injector_types::HttpMethod) -> Self {
match method {
injector_types::HttpMethod::GET => Self::Get,
injector_types::HttpMethod::POST => Self::Post,
injector_types::HttpMethod::PUT => Self::Put,
injector_types::HttpMethod::PATCH => Self::Patch,
injector_types::HttpMethod::DELETE => Self::Delete,
}
}
}
/// Proxy configuration structure (copied from hyperswitch_interfaces to make injector standalone)
#[derive(Debug, serde::Deserialize, Clone)]
#[serde(default)]
pub struct Proxy {
/// The URL of the HTTP proxy server.
pub http_url: Option<String>,
/// The URL of the HTTPS proxy server.
pub https_url: Option<String>,
/// The timeout duration (in seconds) for idle connections in the proxy pool.
pub idle_pool_connection_timeout: Option<u64>,
/// A comma-separated list of hosts that should bypass the proxy.
pub bypass_proxy_hosts: Option<String>,
}
impl Default for Proxy {
fn default() -> Self {
Self {
http_url: Default::default(),
https_url: Default::default(),
idle_pool_connection_timeout: Some(90),
bypass_proxy_hosts: Default::default(),
}
}
}
/// Create HTTP client using the proven external_services create_client logic
fn create_client(
proxy_config: &Proxy,
client_certificate: Option<masking::Secret<String>>,
client_certificate_key: Option<masking::Secret<String>>,
ca_certificate: Option<masking::Secret<String>>,
) -> error_stack::Result<reqwest::Client, InjectorError> {
logger::debug!(
has_client_cert = client_certificate.is_some(),
has_client_key = client_certificate_key.is_some(),
has_ca_cert = ca_certificate.is_some(),
"Creating HTTP client"
);
// Case 1: Mutual TLS with client certificate and key
if let (Some(encoded_certificate), Some(encoded_certificate_key)) =
(client_certificate.clone(), client_certificate_key.clone())
{
if ca_certificate.is_some() {
logger::warn!("All of client certificate, client key, and CA certificate are provided. CA certificate will be ignored in mutual TLS setup.");
}
let client_builder = get_client_builder(proxy_config)?;
let identity = create_identity_from_certificate_and_key(
encoded_certificate.clone(),
encoded_certificate_key,
)?;
let certificate_list = create_certificate(encoded_certificate)?;
let client_builder = certificate_list
.into_iter()
.fold(client_builder, |client_builder, certificate| {
client_builder.add_root_certificate(certificate)
});
return client_builder
.identity(identity)
.use_rustls_tls()
.build()
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!(
"Failed to construct client with certificate and certificate key: {:?}",
e
);
});
}
// Case 2: Use provided CA certificate for server authentication only (one-way TLS)
if let Some(ca_pem) = ca_certificate {
let pem = ca_pem.expose().replace("\\r\\n", "\n"); // Fix escaped newlines
let cert = reqwest::Certificate::from_pem(pem.as_bytes())
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to parse CA certificate PEM block: {:?}", e)
})?;
let client_builder = get_client_builder(proxy_config)?.add_root_certificate(cert);
return client_builder
.use_rustls_tls()
.build()
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to construct client with CA certificate: {:?}", e);
});
}
// Case 3: Default client (no certs)
get_base_client(proxy_config)
}
/// Helper functions from external_services
fn get_client_builder(
proxy_config: &Proxy,
) -> error_stack::Result<reqwest::ClientBuilder, InjectorError> {
let mut client_builder = reqwest::Client::builder();
// Configure proxy if provided
if let Some(proxy_url) = &proxy_config.https_url {
let proxy = reqwest::Proxy::https(proxy_url)
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to configure HTTPS proxy: {:?}", e);
})?;
client_builder = client_builder.proxy(proxy);
}
if let Some(proxy_url) = &proxy_config.http_url {
let proxy = reqwest::Proxy::http(proxy_url)
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to configure HTTP proxy: {:?}", e);
})?;
client_builder = client_builder.proxy(proxy);
}
Ok(client_builder)
}
fn get_base_client(
proxy_config: &Proxy,
) -> error_stack::Result<reqwest::Client, InjectorError> {
let client_builder = get_client_builder(proxy_config)?;
client_builder
.build()
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to build default HTTP client: {:?}", e);
})
}
fn create_identity_from_certificate_and_key(
encoded_certificate: masking::Secret<String>,
encoded_certificate_key: masking::Secret<String>,
) -> error_stack::Result<reqwest::Identity, InjectorError> {
let cert_str = encoded_certificate.expose();
let key_str = encoded_certificate_key.expose();
let combined_pem = format!("{cert_str}\n{key_str}");
reqwest::Identity::from_pem(combined_pem.as_bytes())
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!(
"Failed to create identity from certificate and key: {:?}",
e
);
})
}
fn create_certificate(
encoded_certificate: masking::Secret<String>,
) -> error_stack::Result<Vec<reqwest::Certificate>, InjectorError> {
let cert_str = encoded_certificate.expose();
let cert = reqwest::Certificate::from_pem(cert_str.as_bytes())
.change_context(InjectorError::HttpRequestFailed)
.inspect_err(|e| {
logger::error!("Failed to create certificate from PEM: {:?}", e);
})?;
Ok(vec![cert])
}
/// Generic function to log HTTP request errors with detailed error type information
fn log_and_convert_http_error(e: reqwest::Error, context: &str) -> InjectorError {
let error_msg = e.to_string();
logger::error!("HTTP request failed in {}: {}", context, error_msg);
// Log specific error types for debugging
if e.is_timeout() {
logger::error!("Request timed out in {}", context);
}
if e.is_connect() {
logger::error!("Connection error occurred in {}", context);
}
if e.is_request() {
logger::error!("Request construction error in {}", context);
}
if e.is_decode() {
logger::error!("Response decoding error in {}", context);
}
InjectorError::HttpRequestFailed
}
/// Apply certificate configuration to request builder and return built request
fn build_request_with_certificates(
mut request_builder: RequestBuilder,
config: &injector_types::ConnectionConfig,
) -> common_utils::request::Request {
// Add certificate configuration if provided
if let Some(cert_content) = &config.client_cert {
request_builder = request_builder.add_certificate(Some(cert_content.clone()));
}
if let Some(key_content) = &config.client_key {
request_builder = request_builder.add_certificate_key(Some(key_content.clone()));
}
if let Some(ca_content) = &config.ca_cert {
request_builder = request_builder.add_ca_certificate_pem(Some(ca_content.clone()));
}
request_builder.build()
}
/// Simplified HTTP client for injector using the proven external_services create_client logic
#[instrument(skip_all)]
pub async fn send_request(
client_proxy: &Proxy,
request: common_utils::request::Request,
_option_timeout_secs: Option<u64>,
) -> error_stack::Result<reqwest::Response, InjectorError> {
logger::info!(
has_client_cert = request.certificate.is_some(),
has_client_key = request.certificate_key.is_some(),
has_ca_cert = request.ca_certificate.is_some(),
"Making HTTP request using standalone injector HTTP client with configuration"
);
// Create reqwest client using the proven create_client function
let client = create_client(
client_proxy,
request.certificate.clone(),
request.certificate_key.clone(),
request.ca_certificate.clone(),
)?;
// Build the request
let method = match request.method {
Method::Get => reqwest::Method::GET,
Method::Post => reqwest::Method::POST,
Method::Put => reqwest::Method::PUT,
Method::Patch => reqwest::Method::PATCH,
Method::Delete => reqwest::Method::DELETE,
};
let mut req_builder = client.request(method, &request.url);
// Add headers
for (key, value) in &request.headers {
let header_value = match value {
masking::Maskable::Masked(secret) => secret.clone().expose(),
masking::Maskable::Normal(normal) => normal.clone(),
};
req_builder = req_builder.header(key, header_value);
}
// Add body if present
if let Some(body) = request.body {
match body {
RequestContent::Json(payload) => {
req_builder = req_builder.json(&payload);
}
RequestContent::FormUrlEncoded(payload) => {
req_builder = req_builder.form(&payload);
}
RequestContent::RawBytes(payload) => {
req_builder = req_builder.body(payload);
}
_ => {
logger::warn!("Unsupported request content type, using raw bytes");
}
}
}
// Send the request
let response = req_builder
.send()
.await
.map_err(|e| log_and_convert_http_error(e, "send_request"))?;
logger::info!(
status_code = response.status().as_u16(),
"HTTP request completed successfully"
);
Ok(response)
}
#[derive(Error, Debug)]
pub enum InjectorError {
#[error("Token replacement failed: {0}")]
TokenReplacementFailed(String),
#[error("HTTP request failed")]
HttpRequestFailed,
#[error("Serialization error: {0}")]
SerializationError(String),
#[error("Invalid template: {0}")]
InvalidTemplate(String),
}
#[instrument(skip_all)]
pub async fn injector_core(
request: InjectorRequest,
) -> error_stack::Result<InjectorResponse, InjectorError> {
logger::info!("Starting injector_core processing");
let injector = Injector::new();
injector.injector_core(request).await
}
/// Represents a token reference found in a template string
#[derive(Debug)]
struct TokenReference {
/// The field name to be replaced (without the {{$}} wrapper)
pub field: String,
}
/// Parses a single token reference from a string using nom parser combinators
///
/// Expects tokens in the format `{{$field_name}}` where field_name contains
/// only alphanumeric characters and underscores.
fn parse_token(input: &str) -> IResult<&str, TokenReference> {
let (input, field) = delimited(
tag("{{"),
preceded(
multispace0,
preceded(
char('$'),
terminated(
take_while1(|c: char| c.is_alphanumeric() || c == '_'),
multispace0,
),
),
),
tag("}}"),
)(input)?;
Ok((
input,
TokenReference {
field: field.to_string(),
},
))
}
/// Finds all token references in a string using nom parser
///
/// Scans through the entire input string and extracts all valid token references.
/// Returns a vector of TokenReference structs containing the field names.
fn find_all_tokens(input: &str) -> Vec<TokenReference> {
let mut tokens = Vec::new();
let mut current_input = input;
while !current_input.is_empty() {
if let Ok((remaining, token_ref)) = parse_token(current_input) {
tokens.push(token_ref);
current_input = remaining;
} else {
// Move forward one character if no token found
if let Some((_, rest)) = current_input.split_at_checked(1) {
current_input = rest;
} else {
break;
}
}
}
tokens
}
/// Recursively searches for a field in vault data JSON structure
///
/// Performs a depth-first search through the JSON object hierarchy to find
/// a field with the specified name. Returns the first matching value found.
fn find_field_recursively_in_vault_data(
obj: &serde_json::Map<String, Value>,
field_name: &str,
) -> Option<Value> {
obj.get(field_name).cloned().or_else(|| {
obj.values()
.filter_map(|val| {
if let Value::Object(inner_obj) = val {
find_field_recursively_in_vault_data(inner_obj, field_name)
} else {
None
}
})
.next()
})
}
#[async_trait]
trait TokenInjector {
async fn injector_core(
&self,
request: InjectorRequest,
) -> error_stack::Result<InjectorResponse, InjectorError>;
}
pub struct Injector;
impl Injector {
pub fn new() -> Self {
Self
}
/// Processes a string template and replaces token references with vault data
#[instrument(skip_all)]
fn interpolate_string_template_with_vault_data(
&self,
template: String,
vault_data: &Value,
vault_connector: &injector_types::VaultConnectors,
) -> error_stack::Result<String, InjectorError> {
// Find all tokens using nom parser
let tokens = find_all_tokens(&template);
let mut result = template;
for token_ref in tokens.into_iter() {
let extracted_field_value = self.extract_field_from_vault_data(
vault_data,
&token_ref.field,
vault_connector,
)?;
let token_str = match extracted_field_value {
Value::String(token_value) => token_value,
_ => serde_json::to_string(&extracted_field_value).unwrap_or_default(),
};
// Replace the token in the result string
let token_pattern = format!("{{{{${}}}}}", token_ref.field);
result = result.replace(&token_pattern, &token_str);
}
Ok(result)
}
#[instrument(skip_all)]
fn interpolate_token_references_with_vault_data(
&self,
value: Value,
vault_data: &Value,
vault_connector: &injector_types::VaultConnectors,
) -> error_stack::Result<Value, InjectorError> {
match value {
Value::Object(obj) => {
let new_obj = obj
.into_iter()
.map(|(key, val)| {
self.interpolate_token_references_with_vault_data(
val,
vault_data,
vault_connector,
)
.map(|processed| (key, processed))
})
.collect::<error_stack::Result<serde_json::Map<_, _>, InjectorError>>()?;
Ok(Value::Object(new_obj))
}
Value::String(s) => {
let processed_string = self.interpolate_string_template_with_vault_data(
s,
vault_data,
vault_connector,
)?;
Ok(Value::String(processed_string))
}
_ => Ok(value),
}
}
#[instrument(skip_all)]
fn extract_field_from_vault_data(
&self,
vault_data: &Value,
field_name: &str,
vault_connector: &injector_types::VaultConnectors,
) -> error_stack::Result<Value, InjectorError> {
logger::debug!(
"Extracting field '{}' from vault data using vault type {:?}",
field_name,
vault_connector
);
match vault_data {
Value::Object(obj) => {
let raw_value = find_field_recursively_in_vault_data(obj, field_name)
.ok_or_else(|| {
error_stack::Report::new(InjectorError::TokenReplacementFailed(
format!("Field '{field_name}' not found"),
))
})?;
// Apply vault-specific token transformation
self.apply_vault_specific_transformation(raw_value, vault_connector, field_name)
}
_ => Err(error_stack::Report::new(
InjectorError::TokenReplacementFailed(
"Vault data is not a valid JSON object".to_string(),
),
)),
}
}
#[instrument(skip_all)]
fn apply_vault_specific_transformation(
&self,
extracted_field_value: Value,
vault_connector: &injector_types::VaultConnectors,
field_name: &str,
) -> error_stack::Result<Value, InjectorError> {
match vault_connector {
injector_types::VaultConnectors::VGS => {
logger::debug!(
"VGS vault: Using direct token replacement for field '{}'",
field_name
);
Ok(extracted_field_value)
}
}
}
#[instrument(skip_all)]
async fn make_http_request(
&self,
config: &injector_types::ConnectionConfig,
payload: &str,
content_type: &ContentType,
) -> error_stack::Result<InjectorResponse, InjectorError> {
logger::info!(
method = ?config.http_method,
endpoint = %config.endpoint,
content_type = ?content_type,
payload_length = payload.len(),
headers_count = config.headers.len(),
"Making HTTP request to connector"
);
// Validate inputs first
if config.endpoint.is_empty() {
logger::error!("Endpoint URL is empty");
Err(error_stack::Report::new(InjectorError::InvalidTemplate(
"Endpoint URL cannot be empty".to_string(),
)))?;
}
// Parse and validate the complete endpoint URL
let url = reqwest::Url::parse(&config.endpoint).map_err(|e| {
logger::error!("Failed to parse endpoint URL: {}", e);
error_stack::Report::new(InjectorError::InvalidTemplate(format!(
"Invalid endpoint URL: {e}"
)))
})?;
logger::debug!("Constructed URL: {}", url);
// Convert headers to common_utils Headers format safely
let headers: Vec<(String, masking::Maskable<String>)> = config
.headers
.clone()
.into_iter()
.map(|(k, v)| (k, masking::Maskable::new_normal(v.expose().clone())))
.collect();
// Determine method and request content
let method = Method::from(config.http_method);
// Determine request content based on content type with error handling
let request_content = match content_type {
ContentType::ApplicationJson => {
// Try to parse as JSON, fallback to raw string
match serde_json::from_str::<Value>(payload) {
Ok(json) => Some(RequestContent::Json(Box::new(json))),
Err(e) => {
logger::debug!(
"Failed to parse payload as JSON: {}, falling back to raw bytes",
e
);
Some(RequestContent::RawBytes(payload.as_bytes().to_vec()))
}
}
}
ContentType::ApplicationXWwwFormUrlencoded => {
// Parse form data safely
let form_data: HashMap<String, String> =
url::form_urlencoded::parse(payload.as_bytes())
.into_owned()
.collect();
Some(RequestContent::FormUrlEncoded(Box::new(form_data)))
}
ContentType::ApplicationXml | ContentType::TextXml => {
Some(RequestContent::RawBytes(payload.as_bytes().to_vec()))
}
ContentType::TextPlain => {
Some(RequestContent::RawBytes(payload.as_bytes().to_vec()))
}
};
// Extract vault metadata directly from headers using existing functions
let (vault_proxy_url, vault_ca_cert) = if config
.headers
.contains_key(crate::consts::EXTERNAL_VAULT_METADATA_HEADER)
{
let mut temp_config = injector_types::ConnectionConfig::new(
config.endpoint.clone(),
config.http_method,
);
// Use existing vault metadata extraction with fallback
if temp_config.extract_and_apply_vault_metadata_with_fallback(&config.headers) {
(temp_config.proxy_url, temp_config.ca_cert)
} else {
(None, None)
}
} else {
(None, None)
};
// Build request safely with certificate configuration
let mut request_builder = RequestBuilder::new()
.method(method)
.url(url.as_str())
.headers(headers);
if let Some(content) = request_content {
request_builder = request_builder.set_body(content);
}
// Create final config with vault CA certificate if available
let mut final_config = config.clone();
let has_vault_ca_cert = vault_ca_cert.is_some();
if has_vault_ca_cert {
final_config.ca_cert = vault_ca_cert;
}
// Log certificate configuration (but not the actual content)
logger::info!(
has_client_cert = final_config.client_cert.is_some(),
has_client_key = final_config.client_key.is_some(),
has_ca_cert = final_config.ca_cert.is_some(),
has_vault_ca_cert = has_vault_ca_cert,
insecure = final_config.insecure.unwrap_or(false),
cert_format = ?final_config.cert_format,
"Certificate configuration applied"
);
// Build request with certificate configuration applied
let request = build_request_with_certificates(request_builder, &final_config);
// Determine which proxy to use: vault metadata > backup > none
let final_proxy_url = vault_proxy_url.or_else(|| config.backup_proxy_url.clone());
let proxy = if let Some(proxy_url) = final_proxy_url {
let proxy_url_str = proxy_url.expose();
// Set proxy URL for both HTTP and HTTPS traffic
Proxy {
http_url: Some(proxy_url_str.clone()),
https_url: Some(proxy_url_str),
idle_pool_connection_timeout: Some(90),
bypass_proxy_hosts: None,
}
} else {
Proxy::default()
};
// Send request using local standalone http client
let response = send_request(&proxy, request, None).await?;
// Convert reqwest::Response to InjectorResponse using trait
response
.into_injector_response()
.await
.map_err(|e| error_stack::Report::new(e))
}
}
impl Default for Injector {
fn default() -> Self {
Self::new()
}
}
#[async_trait]
impl TokenInjector for Injector {
#[instrument(skip_all)]
async fn injector_core(
&self,
request: InjectorRequest,
) -> error_stack::Result<InjectorResponse, InjectorError> {
let start_time = std::time::Instant::now();
// Extract token data from SecretSerdeValue for vault data lookup
let vault_data = request.token_data.specific_token_data.expose().clone();
logger::debug!(
template_length = request.connector_payload.template.len(),
vault_connector = ?request.token_data.vault_connector,
"Processing token injection request"
);
// Process template string directly with vault-specific logic
let processed_payload = self.interpolate_string_template_with_vault_data(
request.connector_payload.template,
&vault_data,
&request.token_data.vault_connector,
)?;
logger::debug!(
processed_payload_length = processed_payload.len(),
"Token replacement completed"
);
// Determine content type from headers or default to form-urlencoded
let content_type = request
.connection_config
.headers
.get("Content-Type")
.and_then(|ct| match ct.clone().expose().as_str() {
"application/json" => Some(ContentType::ApplicationJson),
"application/x-www-form-urlencoded" => {
Some(ContentType::ApplicationXWwwFormUrlencoded)
}
"application/xml" => Some(ContentType::ApplicationXml),
"text/xml" => Some(ContentType::TextXml),
"text/plain" => Some(ContentType::TextPlain),
_ => None,
})
.unwrap_or(ContentType::ApplicationXWwwFormUrlencoded);
// Make HTTP request to connector and return enhanced response
let response = self
.make_http_request(
&request.connection_config,
&processed_payload,
&content_type,
)
.await?;
let elapsed = start_time.elapsed();
logger::info!(
duration_ms = elapsed.as_millis(),
status_code = response.status_code,
response_size = serde_json::to_string(&response.response)
.map(|s| s.len())
.unwrap_or(0),
headers_count = response.headers.as_ref().map(|h| h.len()).unwrap_or(0),
"Token injection completed successfully"
);
Ok(response)
}
}
}
// Re-export all items
pub use core::*;
#[cfg(test)]
#[allow(clippy::unwrap_used)]
mod tests {
use std::collections::HashMap;
use router_env::logger;
use crate::*;
#[tokio::test]
#[ignore = "Integration test that requires network access"]
async fn test_injector_core_integration() {
// Create test request
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
masking::Secret::new("application/x-www-form-urlencoded".to_string()),
);
headers.insert(
"Authorization".to_string(),
masking::Secret::new("Bearer Test".to_string()),
);
let specific_token_data = common_utils::pii::SecretSerdeValue::new(serde_json::json!({
"card_number": "TEST_123",
"cvv": "123",
"exp_month": "12",
"exp_year": "25"
}));
let request = InjectorRequest {
connector_payload: ConnectorPayload {
template: "card_number={{$card_number}}&cvv={{$cvv}}&expiry={{$exp_month}}/{{$exp_year}}&amount=50¤cy=USD&transaction_type=purchase".to_string(),
},
token_data: TokenData {
vault_connector: VaultConnectors::VGS,
specific_token_data,
},
connection_config: ConnectionConfig {
endpoint: "https://api.stripe.com/v1/payment_intents".to_string(),
http_method: HttpMethod::POST,
headers,
proxy_url: None, // Remove proxy that was causing issues
backup_proxy_url: None,
// Certificate fields (None for basic test)
client_cert: None,
client_key: None,
ca_cert: None, // Empty CA cert for testing
insecure: None,
cert_password: None,
cert_format: None,
max_response_size: None, // Use default
},
};
// Test the core function - this will make a real HTTP request to httpbin.org
let result = injector_core(request).await;
// The request should succeed (httpbin.org should be accessible)
if let Err(ref e) = result {
logger::info!("Error: {e:?}");
}
assert!(
result.is_ok(),
"injector_core should succeed with valid request: {result:?}"
);
let response = result.unwrap();
// Print the actual response for demonstration
logger::info!("=== HTTP RESPONSE FROM HTTPBIN.ORG ===");
logger::info!(
"{}",
serde_json::to_string_pretty(&response).unwrap_or_default()
);
logger::info!("=======================================");
// Response should have a proper status code and response data
assert!(
response.status_code >= 200 && response.status_code < 300,
"Response should have successful status code: {}",
response.status_code
);
assert!(
response.response.is_object() || response.response.is_string(),
"Response data should be JSON object or string"
);
}
#[tokio::test]
async fn test_certificate_configuration() {
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
masking::Secret::new("application/x-www-form-urlencoded".to_string()),
);
headers.insert(
"Authorization".to_string(),
masking::Secret::new("Bearer TEST".to_string()),
);
let specific_token_data = common_utils::pii::SecretSerdeValue::new(serde_json::json!({
"card_number": "4242429789164242",
"cvv": "123",
"exp_month": "12",
"exp_year": "25"
}));
// Test with insecure flag (skip certificate verification)
let request = InjectorRequest {
connector_payload: ConnectorPayload {
template: "card_number={{$card_number}}&cvv={{$cvv}}&expiry={{$exp_month}}/{{$exp_year}}&amount=50¤cy=USD&transaction_type=purchase".to_string(),
},
token_data: TokenData {
vault_connector: VaultConnectors::VGS,
specific_token_data,
},
connection_config: ConnectionConfig {
endpoint: "https://httpbin.org/post".to_string(),
http_method: HttpMethod::POST,
headers,
proxy_url: None, // Remove proxy to make test work reliably
backup_proxy_url: None,
// Test without certificates for basic functionality
client_cert: None,
client_key: None,
ca_cert: None,
insecure: None,
cert_password: None,
cert_format: None,
max_response_size: None,
},
};
let result = injector_core(request).await;
// Should succeed even with insecure flag
assert!(
result.is_ok(),
"Certificate test should succeed: {result:?}"
);
let response = result.unwrap();
// Print the actual response for demonstration
logger::info!("=== CERTIFICATE TEST RESPONSE ===");
logger::info!(
"{}",
serde_json::to_string_pretty(&response).unwrap_or_default()
);
logger::info!("================================");
// Should succeed with proper status code
assert!(
response.status_code >= 200 && response.status_code < 300,
"Certificate test should have successful status code: {}",
response.status_code
);
// Verify the tokens were replaced correctly in the form data
// httpbin.org returns the request data in the 'form' field
let response_str = serde_json::to_string(&response.response).unwrap_or_default();
// Check that our test tokens were replaced with the actual values from vault data
let tokens_replaced = response_str.contains("4242429789164242") && // card_number
response_str.contains("123") && // cvv
response_str.contains("12/25"); // expiry
assert!(
tokens_replaced,
"Response should contain replaced tokens (card_number, cvv, expiry): {}",
serde_json::to_string_pretty(&response.response).unwrap_or_default()
);
}
}
// File: crates/injector/src/types.rs
pub mod models {
use std::collections::HashMap;
use async_trait::async_trait;
use common_utils::pii::SecretSerdeValue;
use masking::Secret;
use router_env::logger;
use serde::{Deserialize, Serialize};
// Enums for the injector - making it standalone
/// Content types supported by the injector for HTTP requests
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ContentType {
ApplicationJson,
ApplicationXWwwFormUrlencoded,
ApplicationXml,
TextXml,
TextPlain,
}
/// HTTP methods supported by the injector
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum HttpMethod {
GET,
POST,
PUT,
PATCH,
DELETE,
}
/// Vault connectors supported by the injector for token management
///
/// Currently supports VGS as the primary vault connector. While only VGS is
/// implemented today, this enum structure is maintained for future extensibility
/// to support additional vault providers (e.g., Basis Theory, Skyflow, etc.)
/// without breaking API compatibility.
#[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Serialize, serde::Deserialize)]
#[serde(rename_all = "UPPERCASE")]
pub enum VaultConnectors {
/// VGS (Very Good Security) vault connector
VGS,
}
/// Token data containing vault-specific information for token replacement
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct TokenData {
/// The specific token data retrieved from the vault
pub specific_token_data: SecretSerdeValue,
/// The type of vault connector being used (e.g., VGS)
pub vault_connector: VaultConnectors,
}
/// Connector payload containing the template to be processed
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ConnectorPayload {
/// Template string containing token references in the format {{$field_name}}
pub template: String,
}
/// Configuration for HTTP connection to the external connector
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ConnectionConfig {
/// Complete URL endpoint for the connector (e.g., "https://api.stripe.com/v1/payment_intents")
pub endpoint: String,
/// HTTP method to use for the request
pub http_method: HttpMethod,
/// HTTP headers to include in the request
pub headers: HashMap<String, Secret<String>>,
/// Optional proxy URL for routing the request through a proxy server
pub proxy_url: Option<Secret<String>>,
/// Optional backup proxy URL to use if vault metadata doesn't provide one
#[serde(default)]
pub backup_proxy_url: Option<Secret<String>>,
/// Optional client certificate for mutual TLS authentication
pub client_cert: Option<Secret<String>>,
/// Optional client private key for mutual TLS authentication
pub client_key: Option<Secret<String>>,
/// Optional CA certificate for verifying the server certificate
pub ca_cert: Option<Secret<String>>,
/// Whether to skip certificate verification (for testing only)
pub insecure: Option<bool>,
/// Optional password for encrypted client certificate
pub cert_password: Option<Secret<String>>,
/// Format of the client certificate (e.g., "PEM")
pub cert_format: Option<String>,
/// Maximum response size in bytes (defaults to 10MB if not specified)
pub max_response_size: Option<usize>,
}
/// Complete request structure for the injector service
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct InjectorRequest {
/// Token data from the vault
pub token_data: TokenData,
/// Payload template to process
pub connector_payload: ConnectorPayload,
/// HTTP connection configuration
pub connection_config: ConnectionConfig,
}
/// Response from the injector including status code and response data
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct InjectorResponse {
/// HTTP status code from the connector response
pub status_code: u16,
/// Response headers from the connector (optional)
pub headers: Option<HashMap<String, String>>,
/// Response body from the connector
pub response: serde_json::Value,
}
/// Trait for converting HTTP responses to InjectorResponse
#[async_trait]
pub trait IntoInjectorResponse {
/// Convert to InjectorResponse with proper error handling
async fn into_injector_response(
self,
) -> Result<InjectorResponse, crate::injector::core::InjectorError>;
}
#[async_trait]
impl IntoInjectorResponse for reqwest::Response {
async fn into_injector_response(
self,
) -> Result<InjectorResponse, crate::injector::core::InjectorError> {
let status_code = self.status().as_u16();
logger::info!(
status_code = status_code,
"Converting reqwest::Response to InjectorResponse"
);
// Extract headers
let headers: Option<HashMap<String, String>> = {
let header_map: HashMap<String, String> = self
.headers()
.iter()
.filter_map(|(name, value)| {
value
.to_str()
.ok()
.map(|v| (name.to_string(), v.to_string()))
})
.collect();
if header_map.is_empty() {
None
} else {
Some(header_map)
}
};
let response_text = self
.text()
.await
.map_err(|_| crate::injector::core::InjectorError::HttpRequestFailed)?;
logger::debug!(
response_length = response_text.len(),
headers_count = headers.as_ref().map(|h| h.len()).unwrap_or(0),
"Processing connector response"
);
let response_data = match serde_json::from_str::<serde_json::Value>(&response_text) {
Ok(json) => json,
Err(_e) => serde_json::Value::String(response_text),
};
Ok(InjectorResponse {
status_code,
headers,
response: response_data,
})
}
}
impl InjectorRequest {
/// Creates a new InjectorRequest
#[allow(clippy::too_many_arguments)]
pub fn new(
endpoint: String,
http_method: HttpMethod,
template: String,
token_data: TokenData,
headers: Option<HashMap<String, Secret<String>>>,
proxy_url: Option<Secret<String>>,
client_cert: Option<Secret<String>>,
client_key: Option<Secret<String>>,
ca_cert: Option<Secret<String>>,
) -> Self {
let headers = headers.unwrap_or_default();
let mut connection_config = ConnectionConfig::new(endpoint, http_method);
// Keep vault metadata header for processing in make_http_request
// Store backup proxy for make_http_request to use as fallback
connection_config.backup_proxy_url = proxy_url;
connection_config.client_cert = connection_config.client_cert.or(client_cert);
connection_config.client_key = connection_config.client_key.or(client_key);
connection_config.ca_cert = connection_config.ca_cert.or(ca_cert);
connection_config.headers = headers;
Self {
token_data,
connector_payload: ConnectorPayload { template },
connection_config,
}
}
}
impl ConnectionConfig {
/// Creates a new ConnectionConfig from basic parameters
pub fn new(endpoint: String, http_method: HttpMethod) -> Self {
Self {
endpoint,
http_method,
headers: HashMap::new(),
proxy_url: None,
backup_proxy_url: None,
client_cert: None,
client_key: None,
ca_cert: None,
insecure: None,
cert_password: None,
cert_format: None,
max_response_size: None,
}
}
}
}
pub use models::*;
// File: crates/injector/src/lib.rs
pub mod consts;
pub mod injector;
pub mod types;
pub mod vault_metadata;
// Re-export all functionality
pub use consts::*;
pub use injector::*;
pub use types::*;
pub use vault_metadata::*;
// File: crates/injector/src/vault_metadata.rs
use std::collections::HashMap;
use base64::Engine;
use masking::{ExposeInterface, Secret};
use router_env::logger;
use url::Url;
use crate::{consts::EXTERNAL_VAULT_METADATA_HEADER, types::ConnectionConfig, VaultConnectors};
const BASE64_ENGINE: base64::engine::GeneralPurpose = base64::engine::general_purpose::STANDARD;
/// Trait for different vault metadata processors
pub trait VaultMetadataProcessor: Send + Sync {
/// Process vault metadata and return connection configuration updates
fn process_metadata(
&self,
connection_config: &mut ConnectionConfig,
) -> Result<(), VaultMetadataError>;
/// Get the vault connector type
fn vault_connector(&self) -> VaultConnectors;
}
/// Comprehensive errors related to vault metadata processing
#[derive(Debug, thiserror::Error)]
pub enum VaultMetadataError {
#[error("Failed to decode base64 vault metadata: {0}")]
Base64DecodingFailed(String),
#[error("Failed to parse vault metadata JSON: {0}")]
JsonParsingFailed(String),
#[error("Unsupported vault connector: {0}")]
UnsupportedVaultConnector(String),
#[error("Invalid URL in vault metadata: {0}")]
InvalidUrl(String),
#[error("Missing required field in vault metadata: {0}")]
MissingRequiredField(String),
#[error("Invalid certificate format: {0}")]
InvalidCertificateFormat(String),
#[error("Vault metadata header is empty or malformed")]
EmptyOrMalformedHeader,
#[error("URL validation failed for {field}: {url} - {reason}")]
UrlValidationFailed {
field: String,
url: String,
reason: String,
},
#[error("Certificate validation failed: {0}")]
CertificateValidationFailed(String),
#[error("Vault metadata processing failed for connector {connector}: {reason}")]
ProcessingFailed { connector: String, reason: String },
}
impl VaultMetadataError {
/// Create a URL validation error with context
pub fn url_validation_failed(field: &str, url: &str, reason: impl Into<String>) -> Self {
Self::UrlValidationFailed {
field: field.to_string(),
url: url.to_string(),
reason: reason.into(),
}
}
}
/// External vault proxy metadata (moved from external_services)
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
#[serde(untagged)]
pub enum ExternalVaultProxyMetadata {
/// VGS proxy data variant
VgsMetadata(VgsMetadata),
}
/// VGS proxy data (moved from external_services)
#[derive(Debug, Clone, serde::Deserialize, serde::Serialize)]
pub struct VgsMetadata {
/// External vault url
pub proxy_url: Url,
/// CA certificates to verify the vault server
pub certificate: Secret<String>,
}
impl VaultMetadataProcessor for VgsMetadata {
fn process_metadata(
&self,
connection_config: &mut ConnectionConfig,
) -> Result<(), VaultMetadataError> {
// Validate and set proxy URL from VGS metadata
let proxy_url_str = self.proxy_url.as_str().to_string();
connection_config.proxy_url = Some(Secret::new(proxy_url_str.clone()));
// Validate and decode certificate from VGS metadata
let cert_content = self.certificate.clone().expose();
// Check if certificate is base64 encoded and decode if necessary
let decoded_cert = if cert_content.starts_with("-----BEGIN") {
cert_content
} else {
match BASE64_ENGINE.decode(&cert_content) {
Ok(decoded_bytes) => String::from_utf8(decoded_bytes).map_err(|e| {
VaultMetadataError::CertificateValidationFailed(format!(
"Certificate is not valid UTF-8 after base64 decoding: {e}"
))
})?,
Err(e) => {
logger::error!(
error = %e,
"Failed to decode base64 certificate"
);
return Err(VaultMetadataError::CertificateValidationFailed(format!(
"Failed to decode base64 certificate: {e}"
)));
}
}
};
connection_config.ca_cert = Some(Secret::new(decoded_cert.clone()));
Ok(())
}
fn vault_connector(&self) -> VaultConnectors {
VaultConnectors::VGS
}
}
impl VaultMetadataProcessor for ExternalVaultProxyMetadata {
fn process_metadata(
&self,
connection_config: &mut ConnectionConfig,
) -> Result<(), VaultMetadataError> {
match self {
Self::VgsMetadata(vgs_metadata) => vgs_metadata.process_metadata(connection_config),
}
}
fn vault_connector(&self) -> VaultConnectors {
match self {
Self::VgsMetadata(vgs_metadata) => vgs_metadata.vault_connector(),
}
}
}
/// Factory for creating vault metadata processors from different sources
pub struct VaultMetadataFactory;
impl VaultMetadataFactory {
/// Create a vault metadata processor from base64 encoded header value with comprehensive validation
pub fn from_base64_header(
base64_value: &str,
) -> Result<Box<dyn VaultMetadataProcessor>, VaultMetadataError> {
// Validate input
if base64_value.trim().is_empty() {
return Err(VaultMetadataError::EmptyOrMalformedHeader);
}
// Decode base64 with detailed error context
let decoded_bytes = BASE64_ENGINE.decode(base64_value.trim()).map_err(|e| {
logger::error!(
error = %e,
"Failed to decode base64 vault metadata header"
);
VaultMetadataError::Base64DecodingFailed(format!("Invalid base64 encoding: {e}"))
})?;
// Validate decoded size
if decoded_bytes.is_empty() {
return Err(VaultMetadataError::EmptyOrMalformedHeader);
}
if decoded_bytes.len() > 1_000_000 {
return Err(VaultMetadataError::JsonParsingFailed(
"Decoded vault metadata is too large (>1MB)".to_string(),
));
}
// Parse JSON with detailed error context
let metadata: ExternalVaultProxyMetadata =
serde_json::from_slice(&decoded_bytes).map_err(|e| {
logger::error!(
error = %e,
"Failed to parse vault metadata JSON"
);
VaultMetadataError::JsonParsingFailed(format!("Invalid JSON structure: {e}"))
})?;
logger::info!(
vault_connector = ?metadata.vault_connector(),
"Successfully parsed vault metadata from header"
);
Ok(Box::new(metadata))
}
}
/// Trait for extracting vault metadata from various sources
pub trait VaultMetadataExtractor {
/// Extract vault metadata from headers and apply to connection config
fn extract_and_apply_vault_metadata(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> Result<(), VaultMetadataError>;
}
impl VaultMetadataExtractor for ConnectionConfig {
fn extract_and_apply_vault_metadata(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> Result<(), VaultMetadataError> {
if let Some(vault_metadata_header) = headers.get(EXTERNAL_VAULT_METADATA_HEADER) {
let processor =
VaultMetadataFactory::from_base64_header(&vault_metadata_header.clone().expose())
.map_err(|e| {
logger::error!(
error = %e,
"Failed to create vault metadata processor from header"
);
e
})?;
processor.process_metadata(self).map_err(|e| {
logger::error!(
error = %e,
vault_connector = ?processor.vault_connector(),
"Failed to apply vault metadata to connection config"
);
e
})?;
logger::info!(
vault_connector = ?processor.vault_connector(),
proxy_url_applied = self.proxy_url.is_some(),
ca_cert_applied = self.ca_cert.is_some(),
client_cert_applied = self.client_cert.is_some(),
"Successfully applied vault metadata to connection configuration"
);
}
Ok(())
}
}
/// Extended trait for graceful fallback handling
pub trait VaultMetadataExtractorExt {
/// Extract vault metadata with graceful fallback (doesn't fail the entire request)
fn extract_and_apply_vault_metadata_with_fallback(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> bool;
/// Extract vault metadata from a single header value with graceful fallback
fn extract_and_apply_vault_metadata_with_fallback_from_header(
&mut self,
header_value: &str,
) -> bool;
}
impl VaultMetadataExtractorExt for ConnectionConfig {
fn extract_and_apply_vault_metadata_with_fallback(
&mut self,
headers: &HashMap<String, Secret<String>>,
) -> bool {
match self.extract_and_apply_vault_metadata(headers) {
Ok(()) => {
logger::info!(
proxy_url_set = self.proxy_url.is_some(),
ca_cert_set = self.ca_cert.is_some(),
client_cert_set = self.client_cert.is_some(),
"Vault metadata processing completed successfully"
);
true
}
Err(error) => {
logger::warn!(
error = %error,
proxy_url_set = self.proxy_url.is_some(),
ca_cert_set = self.ca_cert.is_some(),
"Vault metadata processing failed, continuing without vault configuration"
);
false
}
}
}
fn extract_and_apply_vault_metadata_with_fallback_from_header(
&mut self,
header_value: &str,
) -> bool {
let mut temp_headers = HashMap::new();
temp_headers.insert(
EXTERNAL_VAULT_METADATA_HEADER.to_string(),
Secret::new(header_value.to_string()),
);
self.extract_and_apply_vault_metadata_with_fallback(&temp_headers)
}
}
#[cfg(test)]
#[allow(clippy::expect_used)]
mod tests {
use std::collections::HashMap;
use base64::Engine;
use common_utils::pii::SecretSerdeValue;
use super::*;
use crate::types::{HttpMethod, InjectorRequest, TokenData, VaultConnectors};
#[test]
fn test_vault_metadata_processing() {
// Create test VGS metadata with base64 encoded certificate
let vgs_metadata = VgsMetadata {
proxy_url: "https://vgs-proxy.example.com:8443"
.parse()
.expect("Valid test URL"),
certificate: Secret::new("cert".to_string()),
};
let metadata = ExternalVaultProxyMetadata::VgsMetadata(vgs_metadata);
// Serialize and base64 encode (as it would come from the header)
let metadata_json =
serde_json::to_vec(&metadata).expect("Metadata serialization should succeed");
let base64_metadata = BASE64_ENGINE.encode(&metadata_json);
// Create headers with vault metadata
let mut headers = HashMap::new();
headers.insert(
"Content-Type".to_string(),
Secret::new("application/json".to_string()),
);
headers.insert(
"Authorization".to_string(),
Secret::new("Bearer token123".to_string()),
);
headers.insert(
EXTERNAL_VAULT_METADATA_HEADER.to_string(),
Secret::new(base64_metadata),
);
// Test the amazing automatic processing with the unified API!
let injector_request = InjectorRequest::new(
"https://api.example.com/v1/payments".to_string(),
HttpMethod::POST,
"amount={{$amount}}¤cy={{$currency}}".to_string(),
TokenData {
vault_connector: VaultConnectors::VGS,
specific_token_data: SecretSerdeValue::new(serde_json::json!({
"amount": "1000",
"currency": "USD"
})),
},
Some(headers),
None, // No fallback proxy needed - vault metadata provides it
None, // No fallback client cert
None, // No fallback client key
None, // No fallback CA cert
);
// Verify vault metadata was automatically applied!
assert!(injector_request.connection_config.proxy_url.is_some());
assert!(injector_request.connection_config.ca_cert.is_some());
assert_eq!(
injector_request
.connection_config
.proxy_url
.as_ref()
.expect("Proxy URL should be set")
.clone()
.expose(),
"https://vgs-proxy.example.com:8443/"
);
// Verify vault metadata header was removed from regular headers
assert!(!injector_request
.connection_config
.headers
.contains_key(EXTERNAL_VAULT_METADATA_HEADER));
// Verify other headers are preserved
assert!(injector_request
.connection_config
.headers
.contains_key("Content-Type"));
assert!(injector_request
.connection_config
.headers
.contains_key("Authorization"));
}
#[test]
fn test_vault_metadata_factory() {
let vgs_metadata = VgsMetadata {
proxy_url: "https://vgs-proxy.example.com:8443"
.parse()
.expect("Valid test URL"),
certificate: Secret::new("cert".to_string()),
};
let metadata = ExternalVaultProxyMetadata::VgsMetadata(vgs_metadata);
let metadata_json =
serde_json::to_vec(&metadata).expect("Metadata serialization should succeed");
let base64_metadata = BASE64_ENGINE.encode(&metadata_json);
// Test factory creation from base64
let processor = VaultMetadataFactory::from_base64_header(&base64_metadata)
.expect("Base64 decoding should succeed");
assert_eq!(processor.vault_connector(), VaultConnectors::VGS);
// Test processor creation was successful
assert!(processor.vault_connector() == VaultConnectors::VGS);
}
}
</crate>
|
{
"crate": "injector",
"file": null,
"files": [
"crates/injector/src/consts.rs",
"crates/injector/src/injector.rs",
"crates/injector/src/types.rs",
"crates/injector/src/lib.rs",
"crates/injector/src/vault_metadata.rs"
],
"module": null,
"num_files": 5,
"token_count": 11627
}
|
crate_4882760121450281612
|
clm
|
crate
|
<path>
Repository: hyperswitch
Crate: connector-template
Files: 3
</path>
<crate>
// File: connector-template/test.rs
use hyperswitch_domain_models::payment_method_data::{Card, PaymentMethodData};
use masking::Secret;
use router::{
types::{self, api, storage::enums,
}};
use crate::utils::{self, ConnectorActions};
use test_utils::connector_auth;
#[derive(Clone, Copy)]
struct {{project-name | downcase | pascal_case}}Test;
impl ConnectorActions for {{project-name | downcase | pascal_case}}Test {}
impl utils::Connector for {{project-name | downcase | pascal_case}}Test {
fn get_data(&self) -> api::ConnectorData {
use router::connector::{{project-name | downcase | pascal_case}};
utils::construct_connector_data_old(
Box::new({{project-name | downcase | pascal_case}}::new()),
types::Connector::Plaid,
api::GetToken::Connector,
None,
)
}
fn get_auth_token(&self) -> types::ConnectorAuthType {
utils::to_connector_auth_type(
connector_auth::ConnectorAuthentication::new()
.{{project-name | downcase}}
.expect("Missing connector authentication configuration").into(),
)
}
fn get_name(&self) -> String {
"{{project-name | downcase}}".to_string()
}
}
static CONNECTOR: {{project-name | downcase | pascal_case}}Test = {{project-name | downcase | pascal_case}}Test {};
fn get_default_payment_info() -> Option<utils::PaymentInfo> {
None
}
fn payment_method_details() -> Option<types::PaymentsAuthorizeData> {
None
}
// Cards Positive Tests
// Creates a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_only_authorize_payment() {
let response = CONNECTOR
.authorize_payment(payment_method_details(), get_default_payment_info())
.await
.expect("Authorize payment response");
assert_eq!(response.status, enums::AttemptStatus::Authorized);
}
// Captures a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_capture_authorized_payment() {
let response = CONNECTOR
.authorize_and_capture_payment(payment_method_details(), None, get_default_payment_info())
.await
.expect("Capture payment response");
assert_eq!(response.status, enums::AttemptStatus::Charged);
}
// Partially captures a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_partially_capture_authorized_payment() {
let response = CONNECTOR
.authorize_and_capture_payment(
payment_method_details(),
Some(types::PaymentsCaptureData {
amount_to_capture: 50,
..utils::PaymentCaptureType::default().0
}),
get_default_payment_info(),
)
.await
.expect("Capture payment response");
assert_eq!(response.status, enums::AttemptStatus::Charged);
}
// Synchronizes a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_authorized_payment() {
let authorize_response = CONNECTOR
.authorize_payment(payment_method_details(), get_default_payment_info())
.await
.expect("Authorize payment response");
let txn_id = utils::get_connector_transaction_id(authorize_response.response);
let response = CONNECTOR
.psync_retry_till_status_matches(
enums::AttemptStatus::Authorized,
Some(types::PaymentsSyncData {
connector_transaction_id: types::ResponseId::ConnectorTransactionId(
txn_id.unwrap(),
),
..Default::default()
}),
get_default_payment_info(),
)
.await
.expect("PSync response");
assert_eq!(response.status, enums::AttemptStatus::Authorized,);
}
// Voids a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_void_authorized_payment() {
let response = CONNECTOR
.authorize_and_void_payment(
payment_method_details(),
Some(types::PaymentsCancelData {
connector_transaction_id: String::from(""),
cancellation_reason: Some("requested_by_customer".to_string()),
..Default::default()
}),
get_default_payment_info(),
)
.await
.expect("Void payment response");
assert_eq!(response.status, enums::AttemptStatus::Voided);
}
// Refunds a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_refund_manually_captured_payment() {
let response = CONNECTOR
.capture_payment_and_refund(payment_method_details(), None, None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Partially refunds a payment using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_partially_refund_manually_captured_payment() {
let response = CONNECTOR
.capture_payment_and_refund(
payment_method_details(),
None,
Some(types::RefundsData {
refund_amount: 50,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Synchronizes a refund using the manual capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_manually_captured_refund() {
let refund_response = CONNECTOR
.capture_payment_and_refund(payment_method_details(), None, None, get_default_payment_info())
.await
.unwrap();
let response = CONNECTOR
.rsync_retry_till_status_matches(
enums::RefundStatus::Success,
refund_response.response.unwrap().connector_refund_id,
None,
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Creates a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_make_payment() {
let authorize_response = CONNECTOR.make_payment(payment_method_details(), get_default_payment_info()).await.unwrap();
assert_eq!(authorize_response.status, enums::AttemptStatus::Charged);
}
// Synchronizes a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_auto_captured_payment() {
let authorize_response = CONNECTOR.make_payment(payment_method_details(), get_default_payment_info()).await.unwrap();
assert_eq!(authorize_response.status, enums::AttemptStatus::Charged);
let txn_id = utils::get_connector_transaction_id(authorize_response.response);
assert_ne!(txn_id, None, "Empty connector transaction id");
let response = CONNECTOR
.psync_retry_till_status_matches(
enums::AttemptStatus::Charged,
Some(types::PaymentsSyncData {
connector_transaction_id: types::ResponseId::ConnectorTransactionId(
txn_id.unwrap(),
),
capture_method: Some(enums::CaptureMethod::Automatic),
..Default::default()
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(response.status, enums::AttemptStatus::Charged,);
}
// Refunds a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_refund_auto_captured_payment() {
let response = CONNECTOR
.make_payment_and_refund(payment_method_details(), None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Partially refunds a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_partially_refund_succeeded_payment() {
let refund_response = CONNECTOR
.make_payment_and_refund(
payment_method_details(),
Some(types::RefundsData {
refund_amount: 50,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
refund_response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Creates multiple refunds against a payment using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_refund_succeeded_payment_multiple_times() {
CONNECTOR
.make_payment_and_multiple_refund(
payment_method_details(),
Some(types::RefundsData {
refund_amount: 50,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await;
}
// Synchronizes a refund using the automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_sync_refund() {
let refund_response = CONNECTOR
.make_payment_and_refund(payment_method_details(), None, get_default_payment_info())
.await
.unwrap();
let response = CONNECTOR
.rsync_retry_till_status_matches(
enums::RefundStatus::Success,
refund_response.response.unwrap().connector_refund_id,
None,
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap().refund_status,
enums::RefundStatus::Success,
);
}
// Cards Negative scenarios
// Creates a payment with incorrect CVC.
#[actix_web::test]
async fn should_fail_payment_for_incorrect_cvc() {
let response = CONNECTOR
.make_payment(
Some(types::PaymentsAuthorizeData {
payment_method_data: PaymentMethodData::Card(Card {
card_cvc: Secret::new("12345".to_string()),
..utils::CCardType::default().0
}),
..utils::PaymentAuthorizeType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Your card's security code is invalid.".to_string(),
);
}
// Creates a payment with incorrect expiry month.
#[actix_web::test]
async fn should_fail_payment_for_invalid_exp_month() {
let response = CONNECTOR
.make_payment(
Some(types::PaymentsAuthorizeData {
payment_method_data: PaymentMethodData::Card(Card {
card_exp_month: Secret::new("20".to_string()),
..utils::CCardType::default().0
}),
..utils::PaymentAuthorizeType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Your card's expiration month is invalid.".to_string(),
);
}
// Creates a payment with incorrect expiry year.
#[actix_web::test]
async fn should_fail_payment_for_incorrect_expiry_year() {
let response = CONNECTOR
.make_payment(
Some(types::PaymentsAuthorizeData {
payment_method_data: PaymentMethodData::Card(Card {
card_exp_year: Secret::new("2000".to_string()),
..utils::CCardType::default().0
}),
..utils::PaymentAuthorizeType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Your card's expiration year is invalid.".to_string(),
);
}
// Voids a payment using automatic capture flow (Non 3DS).
#[actix_web::test]
async fn should_fail_void_payment_for_auto_capture() {
let authorize_response = CONNECTOR.make_payment(payment_method_details(), get_default_payment_info()).await.unwrap();
assert_eq!(authorize_response.status, enums::AttemptStatus::Charged);
let txn_id = utils::get_connector_transaction_id(authorize_response.response);
assert_ne!(txn_id, None, "Empty connector transaction id");
let void_response = CONNECTOR
.void_payment(txn_id.unwrap(), None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
void_response.response.unwrap_err().message,
"You cannot cancel this PaymentIntent because it has a status of succeeded."
);
}
// Captures a payment using invalid connector payment id.
#[actix_web::test]
async fn should_fail_capture_for_invalid_payment() {
let capture_response = CONNECTOR
.capture_payment("123456789".to_string(), None, get_default_payment_info())
.await
.unwrap();
assert_eq!(
capture_response.response.unwrap_err().message,
String::from("No such payment_intent: '123456789'")
);
}
// Refunds a payment with refund amount higher than payment amount.
#[actix_web::test]
async fn should_fail_for_refund_amount_higher_than_payment_amount() {
let response = CONNECTOR
.make_payment_and_refund(
payment_method_details(),
Some(types::RefundsData {
refund_amount: 150,
..utils::PaymentRefundType::default().0
}),
get_default_payment_info(),
)
.await
.unwrap();
assert_eq!(
response.response.unwrap_err().message,
"Refund amount (₹1.50) is greater than charge amount (₹1.00)",
);
}
// Connector dependent test cases goes here
// [#478]: add unit tests for non 3DS, wallets & webhooks in connector tests
// File: connector-template/transformers.rs
use common_enums::enums;
use serde::{Deserialize, Serialize};
use masking::Secret;
use common_utils::types::{StringMinorUnit};
use hyperswitch_domain_models::{
payment_method_data::PaymentMethodData,
router_data::{ConnectorAuthType, RouterData},
router_flow_types::refunds::{Execute, RSync},
router_request_types::ResponseId,
router_response_types::{PaymentsResponseData, RefundsResponseData},
types::{PaymentsAuthorizeRouterData, RefundsRouterData},
};
use hyperswitch_interfaces::errors;
use crate::types::{RefundsResponseRouterData, ResponseRouterData};
//TODO: Fill the struct with respective fields
pub struct {{project-name | downcase | pascal_case}}RouterData<T> {
pub amount: StringMinorUnit, // The type of amount that a connector accepts, for example, String, i64, f64, etc.
pub router_data: T,
}
impl<T>
From<(
StringMinorUnit,
T,
)> for {{project-name | downcase | pascal_case}}RouterData<T>
{
fn from(
(amount, item): (
StringMinorUnit,
T,
),
) -> Self {
//Todo : use utils to convert the amount to the type of amount that a connector accepts
Self {
amount,
router_data: item,
}
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Serialize, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}PaymentsRequest {
amount: StringMinorUnit,
card: {{project-name | downcase | pascal_case}}Card
}
#[derive(Default, Debug, Serialize, Eq, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}Card {
number: cards::CardNumber,
expiry_month: Secret<String>,
expiry_year: Secret<String>,
cvc: Secret<String>,
complete: bool,
}
impl TryFrom<&{{project-name | downcase | pascal_case}}RouterData<&PaymentsAuthorizeRouterData>> for {{project-name | downcase | pascal_case}}PaymentsRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &{{project-name | downcase | pascal_case}}RouterData<&PaymentsAuthorizeRouterData>) -> Result<Self,Self::Error> {
match item.router_data.request.payment_method_data.clone() {
PaymentMethodData::Card(_) => {
Err(errors::ConnectorError::NotImplemented("Card payment method not implemented".to_string()).into())
},
_ => Err(errors::ConnectorError::NotImplemented("Payment method".to_string()).into()),
}
}
}
//TODO: Fill the struct with respective fields
// Auth Struct
pub struct {{project-name | downcase | pascal_case}}AuthType {
pub(super) api_key: Secret<String>
}
impl TryFrom<&ConnectorAuthType> for {{project-name | downcase | pascal_case}}AuthType {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(auth_type: &ConnectorAuthType) -> Result<Self, Self::Error> {
match auth_type {
ConnectorAuthType::HeaderKey { api_key } => Ok(Self {
api_key: api_key.to_owned(),
}),
_ => Err(errors::ConnectorError::FailedToObtainAuthType.into()),
}
}
}
// PaymentsResponse
//TODO: Append the remaining status flags
#[derive(Debug, Clone, Copy, Default, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "lowercase")]
pub enum {{project-name | downcase | pascal_case}}PaymentStatus {
Succeeded,
Failed,
#[default]
Processing,
}
impl From<{{project-name | downcase | pascal_case}}PaymentStatus> for common_enums::AttemptStatus {
fn from(item: {{project-name | downcase | pascal_case}}PaymentStatus) -> Self {
match item {
{{project-name | downcase | pascal_case}}PaymentStatus::Succeeded => Self::Charged,
{{project-name | downcase | pascal_case}}PaymentStatus::Failed => Self::Failure,
{{project-name | downcase | pascal_case}}PaymentStatus::Processing => Self::Authorizing,
}
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}PaymentsResponse {
status: {{project-name | downcase | pascal_case}}PaymentStatus,
id: String,
}
impl<F,T> TryFrom<ResponseRouterData<F, {{project-name | downcase | pascal_case}}PaymentsResponse, T, PaymentsResponseData>> for RouterData<F, T, PaymentsResponseData> {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: ResponseRouterData<F, {{project-name | downcase | pascal_case}}PaymentsResponse, T, PaymentsResponseData>) -> Result<Self,Self::Error> {
Ok(Self {
status: common_enums::AttemptStatus::from(item.response.status),
response: Ok(PaymentsResponseData::TransactionResponse {
resource_id: ResponseId::ConnectorTransactionId(item.response.id),
redirection_data: Box::new(None),
mandate_reference: Box::new(None),
connector_metadata: None,
network_txn_id: None,
connector_response_reference_id: None,
incremental_authorization_allowed: None,
charges: None,
}),
..item.data
})
}
}
//TODO: Fill the struct with respective fields
// REFUND :
// Type definition for RefundRequest
#[derive(Default, Debug, Serialize)]
pub struct {{project-name | downcase | pascal_case}}RefundRequest {
pub amount: StringMinorUnit
}
impl<F> TryFrom<&{{project-name | downcase | pascal_case}}RouterData<&RefundsRouterData<F>>> for {{project-name | downcase | pascal_case}}RefundRequest {
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: &{{project-name | downcase | pascal_case}}RouterData<&RefundsRouterData<F>>) -> Result<Self,Self::Error> {
Ok(Self {
amount: item.amount.to_owned(),
})
}
}
// Type definition for Refund Response
#[allow(dead_code)]
#[derive(Debug, Copy, Serialize, Default, Deserialize, Clone)]
pub enum RefundStatus {
Succeeded,
Failed,
#[default]
Processing,
}
impl From<RefundStatus> for enums::RefundStatus {
fn from(item: RefundStatus) -> Self {
match item {
RefundStatus::Succeeded => Self::Success,
RefundStatus::Failed => Self::Failure,
RefundStatus::Processing => Self::Pending,
//TODO: Review mapping
}
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Clone, Serialize, Deserialize)]
pub struct RefundResponse {
id: String,
status: RefundStatus
}
impl TryFrom<RefundsResponseRouterData<Execute, RefundResponse>>
for RefundsRouterData<Execute>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(
item: RefundsResponseRouterData<Execute, RefundResponse>,
) -> Result<Self, Self::Error> {
Ok(Self {
response: Ok(RefundsResponseData {
connector_refund_id: item.response.id.to_string(),
refund_status: enums::RefundStatus::from(item.response.status),
}),
..item.data
})
}
}
impl TryFrom<RefundsResponseRouterData<RSync, RefundResponse>> for RefundsRouterData<RSync>
{
type Error = error_stack::Report<errors::ConnectorError>;
fn try_from(item: RefundsResponseRouterData<RSync, RefundResponse>) -> Result<Self,Self::Error> {
Ok(Self {
response: Ok(RefundsResponseData {
connector_refund_id: item.response.id.to_string(),
refund_status: enums::RefundStatus::from(item.response.status),
}),
..item.data
})
}
}
//TODO: Fill the struct with respective fields
#[derive(Default, Debug, Serialize, Deserialize, PartialEq)]
pub struct {{project-name | downcase | pascal_case}}ErrorResponse {
pub status_code: u16,
pub code: String,
pub message: String,
pub reason: Option<String>,
pub network_advice_code: Option<String>,
pub network_decline_code: Option<String>,
pub network_error_message: Option<String>,
}
// File: connector-template/mod.rs
pub mod transformers;
use error_stack::{report, ResultExt};
use masking::{ExposeInterface, Mask};
use common_utils::{
errors::CustomResult,
ext_traits::BytesExt,
types::{AmountConvertor, StringMinorUnit, StringMinorUnitForConnector},
request::{Method, Request, RequestBuilder, RequestContent},
};
use hyperswitch_domain_models::{
router_data::{AccessToken, ConnectorAuthType, ErrorResponse, RouterData},
router_flow_types::{
access_token_auth::AccessTokenAuth,
payments::{
Authorize, Capture, PSync, PaymentMethodToken, Session,
SetupMandate, Void,
},
refunds::{Execute, RSync},
},
router_request_types::{
AccessTokenRequestData, PaymentMethodTokenizationData,
PaymentsAuthorizeData, PaymentsCancelData, PaymentsCaptureData, PaymentsSessionData,
PaymentsSyncData, RefundsData, SetupMandateRequestData,
},
router_response_types::{PaymentsResponseData, RefundsResponseData},
types::{
PaymentsAuthorizeRouterData,
PaymentsCaptureRouterData, PaymentsSyncRouterData, RefundSyncRouterData, RefundsRouterData,
},
};
use hyperswitch_interfaces::{
api::{self, ConnectorCommon, ConnectorCommonExt, ConnectorIntegration, ConnectorValidation, ConnectorSpecifications},
configs::Connectors,
errors,
events::connector_api_logs::ConnectorEvent,
types::{self, Response},
webhooks,
};
use std::sync::LazyLock;
use common_enums::enums;
use hyperswitch_interfaces::api::ConnectorSpecifications;
use hyperswitch_domain_models::router_response_types::{ConnectorInfo, SupportedPaymentMethods};
use crate::{
constants::headers,
types::ResponseRouterData,
utils,
};
use hyperswitch_domain_models::payment_method_data::PaymentMethodData;
use transformers as {{project-name | downcase}};
#[derive(Clone)]
pub struct {{project-name | downcase | pascal_case}} {
amount_converter: &'static (dyn AmountConvertor<Output = StringMinorUnit> + Sync)
}
impl {{project-name | downcase | pascal_case}} {
pub fn new() -> &'static Self {
&Self {
amount_converter: &StringMinorUnitForConnector
}
}
}
impl api::Payment for {{project-name | downcase | pascal_case}} {}
impl api::PaymentSession for {{project-name | downcase | pascal_case}} {}
impl api::ConnectorAccessToken for {{project-name | downcase | pascal_case}} {}
impl api::MandateSetup for {{project-name | downcase | pascal_case}} {}
impl api::PaymentAuthorize for {{project-name | downcase | pascal_case}} {}
impl api::PaymentSync for {{project-name | downcase | pascal_case}} {}
impl api::PaymentCapture for {{project-name | downcase | pascal_case}} {}
impl api::PaymentVoid for {{project-name | downcase | pascal_case}} {}
impl api::Refund for {{project-name | downcase | pascal_case}} {}
impl api::RefundExecute for {{project-name | downcase | pascal_case}} {}
impl api::RefundSync for {{project-name | downcase | pascal_case}} {}
impl api::PaymentToken for {{project-name | downcase | pascal_case}} {}
impl
ConnectorIntegration<
PaymentMethodToken,
PaymentMethodTokenizationData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
// Not Implemented (R)
}
impl<Flow, Request, Response> ConnectorCommonExt<Flow, Request, Response> for {{project-name | downcase | pascal_case}}
where
Self: ConnectorIntegration<Flow, Request, Response>,{
fn build_headers(
&self,
req: &RouterData<Flow, Request, Response>,
_connectors: &Connectors,
) -> CustomResult<Vec<(String, masking::Maskable<String>)>, errors::ConnectorError> {
let mut header = vec![(
headers::CONTENT_TYPE.to_string(),
self.get_content_type().to_string().into(),
)];
let mut api_key = self.get_auth_header(&req.connector_auth_type)?;
header.append(&mut api_key);
Ok(header)
}
}
impl ConnectorCommon for {{project-name | downcase | pascal_case}} {
fn id(&self) -> &'static str {
"{{project-name | downcase}}"
}
fn get_currency_unit(&self) -> api::CurrencyUnit {
todo!()
// TODO! Check connector documentation, on which unit they are processing the currency.
// If the connector accepts amount in lower unit ( i.e cents for USD) then return api::CurrencyUnit::Minor,
// if connector accepts amount in base unit (i.e dollars for USD) then return api::CurrencyUnit::Base
}
fn common_get_content_type(&self) -> &'static str {
"application/json"
}
fn base_url<'a>(&self, connectors: &'a Connectors) -> &'a str {
connectors.{{project-name}}.base_url.as_ref()
}
fn get_auth_header(&self, auth_type:&ConnectorAuthType)-> CustomResult<Vec<(String,masking::Maskable<String>)>,errors::ConnectorError> {
let auth = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}AuthType::try_from(auth_type)
.change_context(errors::ConnectorError::FailedToObtainAuthType)?;
Ok(vec![(headers::AUTHORIZATION.to_string(), auth.api_key.expose().into_masked())])
}
fn build_error_response(
&self,
res: Response,
event_builder: Option<&mut ConnectorEvent>,
) -> CustomResult<ErrorResponse, errors::ConnectorError> {
let response: {{project-name | downcase}}::{{project-name | downcase | pascal_case}}ErrorResponse = res
.response
.parse_struct("{{project-name | downcase | pascal_case}}ErrorResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
Ok(ErrorResponse {
status_code: res.status_code,
code: response.code,
message: response.message,
reason: response.reason,
attempt_status: None,
connector_transaction_id: None,
network_advice_code: None,
network_decline_code: None,
network_error_message: None,
})
}
}
impl ConnectorValidation for {{project-name | downcase | pascal_case}}
{
fn validate_mandate_payment(
&self,
_pm_type: Option<enums::PaymentMethodType>,
pm_data: PaymentMethodData,
) -> CustomResult<(), errors::ConnectorError> {
match pm_data {
PaymentMethodData::Card(_) => Err(errors::ConnectorError::NotImplemented(
"validate_mandate_payment does not support cards".to_string(),
)
.into()),
_ => Ok(()),
}
}
fn validate_psync_reference_id(
&self,
_data: &PaymentsSyncData,
_is_three_ds: bool,
_status: enums::AttemptStatus,
_connector_meta_data: Option<common_utils::pii::SecretSerdeValue>,
) -> CustomResult<(), errors::ConnectorError> {
Ok(())
}
}
impl
ConnectorIntegration<
Session,
PaymentsSessionData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
//TODO: implement sessions flow
}
impl ConnectorIntegration<AccessTokenAuth, AccessTokenRequestData, AccessToken>
for {{project-name | downcase | pascal_case}}
{
}
impl
ConnectorIntegration<
SetupMandate,
SetupMandateRequestData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
}
impl
ConnectorIntegration<
Authorize,
PaymentsAuthorizeData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}} {
fn get_headers(&self, req: &PaymentsAuthorizeRouterData, connectors: &Connectors,) -> CustomResult<Vec<(String, masking::Maskable<String>)>,errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &PaymentsAuthorizeRouterData,
_connectors: &Connectors,) -> CustomResult<String,errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn get_request_body(&self, req: &PaymentsAuthorizeRouterData, _connectors: &Connectors,) -> CustomResult<RequestContent, errors::ConnectorError> {
let amount = utils::convert_amount(
self.amount_converter,
req.request.minor_amount,
req.request.currency,
)?;
let connector_router_data =
{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RouterData::from((
amount,
req,
));
let connector_req = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsRequest::try_from(&connector_router_data)?;
Ok(RequestContent::Json(Box::new(connector_req)))
}
fn build_request(
&self,
req: &PaymentsAuthorizeRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&types::PaymentsAuthorizeType::get_url(
self, req, connectors,
)?)
.attach_default_headers()
.headers(types::PaymentsAuthorizeType::get_headers(
self, req, connectors,
)?)
.set_body(types::PaymentsAuthorizeType::get_request_body(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &PaymentsAuthorizeRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<PaymentsAuthorizeRouterData,errors::ConnectorError> {
let response: {{project-name | downcase}}::{{project-name | downcase | pascal_case}}PaymentsResponse = res.response.parse_struct("{{project-name | downcase | pascal_case}} PaymentsAuthorizeResponse").change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(&self, res: Response, event_builder: Option<&mut ConnectorEvent>) -> CustomResult<ErrorResponse,errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<PSync, PaymentsSyncData, PaymentsResponseData>
for {{project-name | downcase | pascal_case}}
{
fn get_headers(
&self,
req: &PaymentsSyncRouterData,
connectors: &Connectors,
) -> CustomResult<Vec<(String, masking::Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &PaymentsSyncRouterData,
_connectors: &Connectors,
) -> CustomResult<String, errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn build_request(
&self,
req: &PaymentsSyncRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Get)
.url(&types::PaymentsSyncType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::PaymentsSyncType::get_headers(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &PaymentsSyncRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<PaymentsSyncRouterData, errors::ConnectorError> {
let response: {{project-name | downcase}}:: {{project-name | downcase | pascal_case}}PaymentsResponse = res
.response
.parse_struct("{{project-name | downcase}} PaymentsSyncResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: Response,
event_builder: Option<&mut ConnectorEvent>
) -> CustomResult<ErrorResponse, errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<
Capture,
PaymentsCaptureData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{
fn get_headers(
&self,
req: &PaymentsCaptureRouterData,
connectors: &Connectors,
) -> CustomResult<Vec<(String, masking::Maskable<String>)>, errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &PaymentsCaptureRouterData,
_connectors: &Connectors,
) -> CustomResult<String, errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn get_request_body(
&self,
_req: &PaymentsCaptureRouterData,
_connectors: &Connectors,
) -> CustomResult<RequestContent, errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_request_body method".to_string()).into())
}
fn build_request(
&self,
req: &PaymentsCaptureRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Post)
.url(&types::PaymentsCaptureType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::PaymentsCaptureType::get_headers(
self, req, connectors,
)?)
.set_body(types::PaymentsCaptureType::get_request_body(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &PaymentsCaptureRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<PaymentsCaptureRouterData, errors::ConnectorError> {
let response: {{project-name | downcase }}::{{project-name | downcase | pascal_case}}PaymentsResponse = res
.response
.parse_struct("{{project-name | downcase | pascal_case}} PaymentsCaptureResponse")
.change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(
&self,
res: Response,
event_builder: Option<&mut ConnectorEvent>
) -> CustomResult<ErrorResponse, errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<
Void,
PaymentsCancelData,
PaymentsResponseData,
> for {{project-name | downcase | pascal_case}}
{}
impl
ConnectorIntegration<
Execute,
RefundsData,
RefundsResponseData,
> for {{project-name | downcase | pascal_case}} {
fn get_headers(&self, req: &RefundsRouterData<Execute>, connectors: &Connectors,) -> CustomResult<Vec<(String,masking::Maskable<String>)>,errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &RefundsRouterData<Execute>,
_connectors: &Connectors,) -> CustomResult<String,errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn get_request_body(&self, req: &RefundsRouterData<Execute>, _connectors: &Connectors,) -> CustomResult<RequestContent, errors::ConnectorError> {
let refund_amount = utils::convert_amount(
self.amount_converter,
req.request.minor_refund_amount,
req.request.currency,
)?;
let connector_router_data =
{{project-name | downcase}}::{{project-name | downcase | pascal_case}}RouterData::from((
refund_amount,
req,
));
let connector_req = {{project-name | downcase}}::{{project-name | downcase | pascal_case}}RefundRequest::try_from(&connector_router_data)?;
Ok(RequestContent::Json(Box::new(connector_req)))
}
fn build_request(&self, req: &RefundsRouterData<Execute>, connectors: &Connectors,) -> CustomResult<Option<Request>,errors::ConnectorError> {
let request = RequestBuilder::new()
.method(Method::Post)
.url(&types::RefundExecuteType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::RefundExecuteType::get_headers(self, req, connectors)?)
.set_body(types::RefundExecuteType::get_request_body(self, req, connectors)?)
.build();
Ok(Some(request))
}
fn handle_response(
&self,
data: &RefundsRouterData<Execute>,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<RefundsRouterData<Execute>,errors::ConnectorError> {
let response: {{project-name| downcase}}::RefundResponse = res.response.parse_struct("{{project-name | downcase}} RefundResponse").change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(&self, res: Response, event_builder: Option<&mut ConnectorEvent>) -> CustomResult<ErrorResponse,errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
impl
ConnectorIntegration<RSync, RefundsData, RefundsResponseData> for {{project-name | downcase | pascal_case}} {
fn get_headers(&self, req: &RefundSyncRouterData,connectors: &Connectors,) -> CustomResult<Vec<(String, masking::Maskable<String>)>,errors::ConnectorError> {
self.build_headers(req, connectors)
}
fn get_content_type(&self) -> &'static str {
self.common_get_content_type()
}
fn get_url(
&self,
_req: &RefundSyncRouterData,_connectors: &Connectors,) -> CustomResult<String,errors::ConnectorError> {
Err(errors::ConnectorError::NotImplemented("get_url method".to_string()).into())
}
fn build_request(
&self,
req: &RefundSyncRouterData,
connectors: &Connectors,
) -> CustomResult<Option<Request>, errors::ConnectorError> {
Ok(Some(
RequestBuilder::new()
.method(Method::Get)
.url(&types::RefundSyncType::get_url(self, req, connectors)?)
.attach_default_headers()
.headers(types::RefundSyncType::get_headers(self, req, connectors)?)
.set_body(types::RefundSyncType::get_request_body(self, req, connectors)?)
.build(),
))
}
fn handle_response(
&self,
data: &RefundSyncRouterData,
event_builder: Option<&mut ConnectorEvent>,
res: Response,
) -> CustomResult<RefundSyncRouterData,errors::ConnectorError,> {
let response: {{project-name | downcase}}::RefundResponse = res.response.parse_struct("{{project-name | downcase}} RefundSyncResponse").change_context(errors::ConnectorError::ResponseDeserializationFailed)?;
event_builder.map(|i| i.set_response_body(&response));
router_env::logger::info!(connector_response=?response);
RouterData::try_from(ResponseRouterData {
response,
data: data.clone(),
http_code: res.status_code,
})
}
fn get_error_response(&self, res: Response, event_builder: Option<&mut ConnectorEvent>) -> CustomResult<ErrorResponse,errors::ConnectorError> {
self.build_error_response(res, event_builder)
}
}
#[async_trait::async_trait]
impl webhooks::IncomingWebhook for {{project-name | downcase | pascal_case}} {
fn get_webhook_object_reference_id(
&self,
_request: &webhooks::IncomingWebhookRequestDetails<'_>,
) -> CustomResult<api_models::webhooks::ObjectReferenceId, errors::ConnectorError> {
Err(report!(errors::ConnectorError::WebhooksNotImplemented))
}
fn get_webhook_event_type(
&self,
_request: &webhooks::IncomingWebhookRequestDetails<'_>,
) -> CustomResult<api_models::webhooks::IncomingWebhookEvent, errors::ConnectorError> {
Err(report!(errors::ConnectorError::WebhooksNotImplemented))
}
fn get_webhook_resource_object(
&self,
_request: &webhooks::IncomingWebhookRequestDetails<'_>,
) -> CustomResult<Box<dyn masking::ErasedMaskSerialize>, errors::ConnectorError> {
Err(report!(errors::ConnectorError::WebhooksNotImplemented))
}
}
static {{project-name | upcase}}_SUPPORTED_PAYMENT_METHODS: LazyLock<SupportedPaymentMethods> =
LazyLock::new(SupportedPaymentMethods::new);
static {{project-name | upcase}}_CONNECTOR_INFO: ConnectorInfo = ConnectorInfo {
display_name: "{{project-name | downcase | pascal_case}}",
description: "{{project-name | downcase | pascal_case}} connector",
connector_type: enums::HyperswitchConnectorCategory::PaymentGateway,
};
static {{project-name | upcase}}_SUPPORTED_WEBHOOK_FLOWS: [enums::EventClass; 0] = [];
impl ConnectorSpecifications for {{project-name | downcase | pascal_case}} {
fn get_connector_about(&self) -> Option<&'static ConnectorInfo> {
Some(&{{project-name | upcase}}_CONNECTOR_INFO)
}
fn get_supported_payment_methods(&self) -> Option<&'static SupportedPaymentMethods> {
Some(&*{{project-name | upcase}}_SUPPORTED_PAYMENT_METHODS)
}
fn get_supported_webhook_flows(&self) -> Option<&'static [enums::EventClass]> {
Some(&{{project-name | upcase}}_SUPPORTED_WEBHOOK_FLOWS)
}
}
</crate>
|
{
"crate": "connector-template",
"file": null,
"files": [
"connector-template/test.rs",
"connector-template/transformers.rs",
"connector-template/mod.rs"
],
"module": null,
"num_files": 3,
"token_count": 9951
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.