defmodule Compliance.Customer.Importer do
@moduledoc """
grouping for importing Individual and Business
"""
alias Compliance.Helpers.NameHelper
alias Compliance.Helpers.ValidationHelper
alias Compliance.Helpers.DateHelper
alias Compliance.CompanySetting
alias Compliance.Customer
def import_individuals(company_key, imported_by, file_base64) do
with {:ok, rows} <- get_rows_in_first_sheet(file_base64) do
{:ok, start_import(company_key, imported_by, rows)}
end
end
defp start_import(company_key, imported_by, [_header | rows]) do
risk_scores = CompanySetting.list_riskscores!(company_key)
requirements = CompanySetting.list_requirements(company_key)
validated_rows =
rows
# Add index starting from 2
|> Enum.with_index(2)
|> Enum.map(&parse_and_validate_row(company_key, risk_scores, requirements, &1))
case Enum.all?(validated_rows, fn
{:ok, _} -> true
_ -> false
end) do
# return early here
false ->
error_list =
validated_rows
|> Enum.filter(fn
{:error, _} -> true
_ -> false
end)
|> Enum.map(fn {:error, errors} -> errors end)
|> List.flatten()
{:error, error_list}
true ->
Enum.map(validated_rows, fn {:ok,
%{
prepared_row: prepared_row,
risk_score: risk_score,
index: index
}} ->
# Finally process row which will return error with line numbers or succss
process_row(
imported_by,
requirements,
risk_score,
prepared_row,
index
)
end)
end
end
defp parse_and_validate_row(company_key, risk_scores, requirements, {row, index}) do
case parse_row(row, index) do
{:error, errors} ->
{:error, errors}
{:ok, parsed_row} ->
prepared_row =
parsed_row
|> Map.put(:company_key, company_key)
|> Map.put(:signature_type, "Qualified")
|> add_business()
risk_score = CompanySetting.find_risk_score(risk_scores, prepared_row.risk_score)
case pre_validate(prepared_row, risk_score, index) do
{:error, errors} -> {:error, errors}
{:ok} -> {:ok, %{prepared_row: prepared_row, risk_score: risk_score, index: index}}
end
end
end
# Will run pre validation for all, stopping import if something is amiss
defp pre_validate(parsed_row, risk_score, index) do
changeset = Customer.change_individual(%Compliance.Schema.Individual{}, parsed_row)
cond do
risk_score == nil and parsed_row.risk_score != "" ->
{:error, format_risk_score_error(parsed_row.risk_score, index)}
changeset.valid? == false ->
{:error, format_validation_error(changeset.errors, parsed_row, index)}
true ->
{:ok}
end
end
defp format_validation_error(errors, parsed_row, index) do
field_errors = ValidationHelper.format_validation_error(%{valid: false, errors: errors})
errors_for_line =
Enum.reduce(field_errors, %{errors: field_errors, line_number: index}, fn x, acc ->
Map.put(acc, x.field.field_name, Map.get(parsed_row, x.field.field_name))
end)
[errors_for_line]
end
defp format_risk_score_error(risk_score, index) do
[
%{
risk_score: risk_score,
line_number: index,
errors: [
%{
message: "Could not find risk score with name: '#{risk_score}'",
field: %{error_message: "{:validation}", field_name: :risk_score}
}
]
}
]
end
defp process_row(_, _, risk_score, parsed_row, index)
when risk_score == nil and parsed_row.risk_score == "" do
with {:ok, individual} <- create_individual(parsed_row, index) do
{:ok, %{line_number: index, individual: individual, fake_results: nil}}
end
end
defp process_row(
imported_by,
requirements,
risk_score,
parsed_row,
index
) do
with {:ok, individual} <- create_individual(parsed_row, index) do
fake_results =
add_fake_results(imported_by, individual, risk_score, requirements, parsed_row)
{:ok, %{line_number: index, individual: individual, fake_results: fake_results}}
end
end
defp add_business(attr) when attr.business_ssn == "" do
# don't do anything here
attr
end
defp add_business(attr) do
business =
case Customer.search_business(%{ssn: attr.business_ssn, company_key: attr.company_key}) do
# create it here
nil ->
{:ok, created_business} =
Customer.create_business(%{
active: true,
company_key: attr.company_key,
name: attr.business_name,
ssn: attr.business_ssn
})
created_business
found ->
# already exists
found
end
Map.put(attr, :business_id, business.id)
end
defp create_individual(attr, index) do
individiual_search = Customer.search_one(attr)
case individiual_search do
nil ->
# create him
Customer.create_individual(attr)
_ ->
# Customer exists, abandon import for this customer
{:error,
%{
line_number: index,
errors: [
%{
message: "customer: #{attr.first_name} with ssn:#{attr.ssn}, already exists"
}
]
}}
end
|> case do
{:error, %Ecto.Changeset{} = validation} ->
{:error,
%{
line_number: index,
errors: ValidationHelper.format_validation_error(validation)
}}
# not a validation error
result ->
result
end
end
defp add_fake_results(imported_by, individual, risk_score, requirements, parsed_row) do
# foreach requirements
requirements
# That are not pep
|> Enum.filter(&(&1.compliance_type != :pep))
|> Enum.group_by(& &1.compliance_type)
|> Enum.map(fn requirements_group ->
{compliance_type, requirements_in_group} = requirements_group
{:ok, result} =
Customer.create_result(%{
# we cannot really know which flow to select if multiple flows per compliance type
flow_key: hd(requirements_in_group).flow_key,
compliance_type: compliance_type,
process_key: "imported",
signee_key: "imported",
meta: %{},
individual_id: individual.id,
status: :completed,
answered_date: parsed_row.answered_date,
imported: true
})
# and now add score
Customer.create_score(%{
note: parsed_row.risk_score_comment,
scored_by_user: imported_by,
risk_score_id: risk_score.id,
individual_id: individual.id,
imported: true
})
%{result_id: result.id}
end)
end
defp get_rows_in_first_sheet(file_base64) do
try do
with {:ok, package} <- Base.decode64!(file_base64) |> XlsxReader.open(source: :binary),
sheet_names when is_list(sheet_names) and length(sheet_names) > 0 <-
XlsxReader.sheet_names(package),
first_sheet <- hd(sheet_names) do
XlsxReader.sheet(package, first_sheet, empty_rows: false, type_conversion: false)
else
[] -> {:error, "The workbook does not contain any sheets"}
end
rescue
exception ->
Sentry.capture_exception(exception, __STACKTRACE__)
{:error, "Error occurred opening excel file: #{Exception.message(exception)}"}
end
end
defp parse_row(row, index) when length(row) >= 6 do
[
name,
ssn,
mobile,
email,
risk_score,
risk_score_comment,
answered_date,
business_ssn,
business_name
] = Enum.take(row, 9)
case DateHelper.parse_date_string(answered_date, NaiveDateTime.utc_now()) do
{:error, _msg} ->
{:error,
%{
answered_date: answered_date,
line_number: index,
errors: [
%{
message: "Could not parse date: '#{answered_date}'"
}
]
}}
{:ok, parsed_date} ->
case NameHelper.split_name(name) do
[first_name, middle_name, last_name] ->
{:ok,
%{
first_name: first_name,
middle_name: middle_name,
last_name: last_name,
ssn: format_string(ssn),
phone: format_string(mobile),
email: email |> String.replace(" ", ""),
risk_score: risk_score,
risk_score_comment: risk_score_comment,
imported: true,
business_ssn: business_ssn,
business_name: business_name,
answered_date: parsed_date
}}
_ ->
{:error, "Full name should have three parts: first, middle, and last"}
end
end
rescue
msg -> {:error, msg}
end
defp parse_row(row, _) when length(row) < 9 do
{:error, "The row is missing columns"}
end
defp format_string(string) do
string
|> String.replace(" ", "")
|> String.replace("-", "")
end
end