progress in persons
This commit is contained in:
parent
c31240846f
commit
af7aac7657
|
|
@ -1,4 +1,4 @@
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Body
|
from fastapi import APIRouter, Depends, HTTPException, Query, Request, Body, Form
|
||||||
from fastapi import UploadFile, File
|
from fastapi import UploadFile, File
|
||||||
from sqlalchemy.orm import Session
|
from sqlalchemy.orm import Session
|
||||||
from typing import Dict, Any, List, Optional
|
from typing import Dict, Any, List, Optional
|
||||||
|
|
@ -701,9 +701,9 @@ async def import_persons_excel(
|
||||||
business_id: int,
|
business_id: int,
|
||||||
request: Request,
|
request: Request,
|
||||||
file: UploadFile = File(...),
|
file: UploadFile = File(...),
|
||||||
dry_run: bool = Body(default=True),
|
dry_run: str = Form(default="true"),
|
||||||
match_by: str = Body(default="code"),
|
match_by: str = Form(default="code"),
|
||||||
conflict_policy: str = Body(default="upsert"),
|
conflict_policy: str = Form(default="upsert"),
|
||||||
auth_context: AuthContext = Depends(get_current_user),
|
auth_context: AuthContext = Depends(get_current_user),
|
||||||
db: Session = Depends(get_db),
|
db: Session = Depends(get_db),
|
||||||
):
|
):
|
||||||
|
|
@ -712,137 +712,164 @@ async def import_persons_excel(
|
||||||
import re
|
import re
|
||||||
from openpyxl import load_workbook
|
from openpyxl import load_workbook
|
||||||
from fastapi import HTTPException
|
from fastapi import HTTPException
|
||||||
|
import logging
|
||||||
|
|
||||||
if not file.filename or not file.filename.lower().endswith('.xlsx'):
|
logger = logging.getLogger(__name__)
|
||||||
raise HTTPException(status_code=400, detail="فرمت فایل معتبر نیست. تنها xlsx پشتیبانی میشود")
|
|
||||||
|
|
||||||
content = await file.read()
|
|
||||||
try:
|
try:
|
||||||
wb = load_workbook(filename=io.BytesIO(content), data_only=True)
|
# Convert dry_run string to boolean
|
||||||
except Exception:
|
dry_run_bool = dry_run.lower() in ('true', '1', 'yes', 'on')
|
||||||
raise HTTPException(status_code=400, detail="امکان خواندن فایل وجود ندارد")
|
|
||||||
|
logger.info(f"Import request: business_id={business_id}, dry_run={dry_run_bool}, match_by={match_by}, conflict_policy={conflict_policy}")
|
||||||
|
logger.info(f"File info: filename={file.filename}, content_type={file.content_type}")
|
||||||
|
|
||||||
ws = wb.active
|
if not file.filename or not file.filename.lower().endswith('.xlsx'):
|
||||||
rows = list(ws.iter_rows(values_only=True))
|
logger.error(f"Invalid file format: {file.filename}")
|
||||||
if not rows:
|
raise HTTPException(status_code=400, detail="فرمت فایل معتبر نیست. تنها xlsx پشتیبانی میشود")
|
||||||
return success_response(data={"summary": {"total": 0}}, request=request, message="فایل خالی است")
|
|
||||||
|
|
||||||
headers = [str(h).strip() if h is not None else "" for h in rows[0]]
|
content = await file.read()
|
||||||
data_rows = rows[1:]
|
logger.info(f"File content size: {len(content)} bytes")
|
||||||
|
|
||||||
|
# Check if content is empty or too small
|
||||||
|
if len(content) < 100:
|
||||||
|
logger.error(f"File too small: {len(content)} bytes")
|
||||||
|
raise HTTPException(status_code=400, detail="فایل خیلی کوچک است یا خالی است")
|
||||||
|
|
||||||
|
# Check if it's a valid Excel file by looking at the first few bytes
|
||||||
|
if not content.startswith(b'PK'):
|
||||||
|
logger.error("File does not start with PK signature (not a valid Excel file)")
|
||||||
|
raise HTTPException(status_code=400, detail="فرمت فایل معتبر نیست. فایل Excel معتبر نیست")
|
||||||
|
|
||||||
|
try:
|
||||||
|
wb = load_workbook(filename=io.BytesIO(content), data_only=True)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error loading workbook: {str(e)}")
|
||||||
|
raise HTTPException(status_code=400, detail=f"امکان خواندن فایل وجود ندارد: {str(e)}")
|
||||||
|
|
||||||
# helper to map enum strings (fa/en) to internal value
|
ws = wb.active
|
||||||
def normalize_person_type(value: str) -> Optional[str]:
|
rows = list(ws.iter_rows(values_only=True))
|
||||||
if not value:
|
if not rows:
|
||||||
return None
|
return success_response(data={"summary": {"total": 0}}, request=request, message="فایل خالی است")
|
||||||
value = str(value).strip()
|
|
||||||
mapping = {
|
|
||||||
'customer': 'مشتری', 'marketer': 'بازاریاب', 'employee': 'کارمند', 'supplier': 'تامینکننده',
|
|
||||||
'partner': 'همکار', 'seller': 'فروشنده', 'shareholder': 'سهامدار'
|
|
||||||
}
|
|
||||||
for en, fa in mapping.items():
|
|
||||||
if value.lower() == en or value == fa:
|
|
||||||
return fa
|
|
||||||
return value # assume already fa
|
|
||||||
|
|
||||||
errors: list[dict] = []
|
headers = [str(h).strip() if h is not None else "" for h in rows[0]]
|
||||||
valid_items: list[dict] = []
|
data_rows = rows[1:]
|
||||||
|
|
||||||
for idx, row in enumerate(data_rows, start=2):
|
# helper to map enum strings (fa/en) to internal value
|
||||||
item: dict[str, Any] = {}
|
def normalize_person_type(value: str) -> Optional[str]:
|
||||||
row_errors: list[str] = []
|
if not value:
|
||||||
for ci, key in enumerate(headers):
|
return None
|
||||||
if not key:
|
value = str(value).strip()
|
||||||
|
mapping = {
|
||||||
|
'customer': 'مشتری', 'marketer': 'بازاریاب', 'employee': 'کارمند', 'supplier': 'تامینکننده',
|
||||||
|
'partner': 'همکار', 'seller': 'فروشنده', 'shareholder': 'سهامدار'
|
||||||
|
}
|
||||||
|
for en, fa in mapping.items():
|
||||||
|
if value.lower() == en or value == fa:
|
||||||
|
return fa
|
||||||
|
return value # assume already fa
|
||||||
|
|
||||||
|
errors: list[dict] = []
|
||||||
|
valid_items: list[dict] = []
|
||||||
|
|
||||||
|
for idx, row in enumerate(data_rows, start=2):
|
||||||
|
item: dict[str, Any] = {}
|
||||||
|
row_errors: list[str] = []
|
||||||
|
for ci, key in enumerate(headers):
|
||||||
|
if not key:
|
||||||
|
continue
|
||||||
|
val = row[ci] if ci < len(row) else None
|
||||||
|
if isinstance(val, str):
|
||||||
|
val = val.strip()
|
||||||
|
item[key] = val
|
||||||
|
# normalize types
|
||||||
|
if 'person_type' in item and item['person_type']:
|
||||||
|
item['person_type'] = normalize_person_type(item['person_type'])
|
||||||
|
if 'person_types' in item and item['person_types']:
|
||||||
|
# split by comma
|
||||||
|
parts = [normalize_person_type(p.strip()) for p in str(item['person_types']).split(',') if str(p).strip()]
|
||||||
|
item['person_types'] = parts
|
||||||
|
|
||||||
|
# alias_name required
|
||||||
|
if not item.get('alias_name'):
|
||||||
|
row_errors.append('alias_name الزامی است')
|
||||||
|
|
||||||
|
# shareholder rule
|
||||||
|
if (item.get('person_type') == 'سهامدار') or (isinstance(item.get('person_types'), list) and 'سهامدار' in item.get('person_types', [])):
|
||||||
|
sc = item.get('share_count')
|
||||||
|
try:
|
||||||
|
sc_val = int(sc) if sc is not None and str(sc).strip() != '' else None
|
||||||
|
except Exception:
|
||||||
|
sc_val = None
|
||||||
|
if sc_val is None or sc_val <= 0:
|
||||||
|
row_errors.append('برای سهامدار share_count باید > 0 باشد')
|
||||||
|
else:
|
||||||
|
item['share_count'] = sc_val
|
||||||
|
|
||||||
|
if row_errors:
|
||||||
|
errors.append({"row": idx, "errors": row_errors})
|
||||||
continue
|
continue
|
||||||
val = row[ci] if ci < len(row) else None
|
|
||||||
if isinstance(val, str):
|
|
||||||
val = val.strip()
|
|
||||||
item[key] = val
|
|
||||||
# normalize types
|
|
||||||
if 'person_type' in item and item['person_type']:
|
|
||||||
item['person_type'] = normalize_person_type(item['person_type'])
|
|
||||||
if 'person_types' in item and item['person_types']:
|
|
||||||
# split by comma
|
|
||||||
parts = [normalize_person_type(p.strip()) for p in str(item['person_types']).split(',') if str(p).strip()]
|
|
||||||
item['person_types'] = parts
|
|
||||||
|
|
||||||
# alias_name required
|
valid_items.append(item)
|
||||||
if not item.get('alias_name'):
|
|
||||||
row_errors.append('alias_name الزامی است')
|
|
||||||
|
|
||||||
# shareholder rule
|
inserted = 0
|
||||||
if (item.get('person_type') == 'سهامدار') or (isinstance(item.get('person_types'), list) and 'سهامدار' in item.get('person_types', [])):
|
updated = 0
|
||||||
sc = item.get('share_count')
|
skipped = 0
|
||||||
try:
|
|
||||||
sc_val = int(sc) if sc is not None and str(sc).strip() != '' else None
|
|
||||||
except Exception:
|
|
||||||
sc_val = None
|
|
||||||
if sc_val is None or sc_val <= 0:
|
|
||||||
row_errors.append('برای سهامدار share_count باید > 0 باشد')
|
|
||||||
else:
|
|
||||||
item['share_count'] = sc_val
|
|
||||||
|
|
||||||
if row_errors:
|
if not dry_run_bool and valid_items:
|
||||||
errors.append({"row": idx, "errors": row_errors})
|
# apply import with conflict policy
|
||||||
continue
|
from adapters.db.models.person import Person
|
||||||
|
from sqlalchemy import and_
|
||||||
|
|
||||||
valid_items.append(item)
|
def find_existing(session: Session, data: dict) -> Optional[Person]:
|
||||||
|
if match_by == 'national_id' and data.get('national_id'):
|
||||||
inserted = 0
|
return session.query(Person).filter(and_(Person.business_id == business_id, Person.national_id == data['national_id'])).first()
|
||||||
updated = 0
|
if match_by == 'email' and data.get('email'):
|
||||||
skipped = 0
|
return session.query(Person).filter(and_(Person.business_id == business_id, Person.email == data['email'])).first()
|
||||||
|
if match_by == 'code' and data.get('code'):
|
||||||
if not dry_run and valid_items:
|
|
||||||
# apply import with conflict policy
|
|
||||||
from adapters.db.models.person import Person
|
|
||||||
from sqlalchemy import and_
|
|
||||||
|
|
||||||
def find_existing(session: Session, data: dict) -> Optional[Person]:
|
|
||||||
if match_by == 'national_id' and data.get('national_id'):
|
|
||||||
return session.query(Person).filter(and_(Person.business_id == business_id, Person.national_id == data['national_id'])).first()
|
|
||||||
if match_by == 'email' and data.get('email'):
|
|
||||||
return session.query(Person).filter(and_(Person.business_id == business_id, Person.email == data['email'])).first()
|
|
||||||
if match_by == 'code' and data.get('code'):
|
|
||||||
try:
|
|
||||||
code_int = int(data['code'])
|
|
||||||
return session.query(Person).filter(and_(Person.business_id == business_id, Person.code == code_int)).first()
|
|
||||||
except Exception:
|
|
||||||
return None
|
|
||||||
return None
|
|
||||||
|
|
||||||
for data in valid_items:
|
|
||||||
existing = find_existing(db, data)
|
|
||||||
if existing is None:
|
|
||||||
# create
|
|
||||||
try:
|
|
||||||
create_person(db, business_id, PersonCreateRequest(**data))
|
|
||||||
inserted += 1
|
|
||||||
except Exception:
|
|
||||||
skipped += 1
|
|
||||||
else:
|
|
||||||
if conflict_policy == 'insert':
|
|
||||||
skipped += 1
|
|
||||||
elif conflict_policy in ('update', 'upsert'):
|
|
||||||
try:
|
try:
|
||||||
update_person(db, existing.id, business_id, PersonUpdateRequest(**data))
|
code_int = int(data['code'])
|
||||||
updated += 1
|
return session.query(Person).filter(and_(Person.business_id == business_id, Person.code == code_int)).first()
|
||||||
|
except Exception:
|
||||||
|
return None
|
||||||
|
return None
|
||||||
|
|
||||||
|
for data in valid_items:
|
||||||
|
existing = find_existing(db, data)
|
||||||
|
if existing is None:
|
||||||
|
# create
|
||||||
|
try:
|
||||||
|
create_person(db, business_id, PersonCreateRequest(**data))
|
||||||
|
inserted += 1
|
||||||
except Exception:
|
except Exception:
|
||||||
skipped += 1
|
skipped += 1
|
||||||
|
else:
|
||||||
|
if conflict_policy == 'insert':
|
||||||
|
skipped += 1
|
||||||
|
elif conflict_policy in ('update', 'upsert'):
|
||||||
|
try:
|
||||||
|
update_person(db, existing.id, business_id, PersonUpdateRequest(**data))
|
||||||
|
updated += 1
|
||||||
|
except Exception:
|
||||||
|
skipped += 1
|
||||||
|
|
||||||
summary = {
|
summary = {
|
||||||
"total": len(data_rows),
|
"total": len(data_rows),
|
||||||
"valid": len(valid_items),
|
"valid": len(valid_items),
|
||||||
"invalid": len(errors),
|
"invalid": len(errors),
|
||||||
"inserted": inserted,
|
"inserted": inserted,
|
||||||
"updated": updated,
|
"updated": updated,
|
||||||
"skipped": skipped,
|
"skipped": skipped,
|
||||||
"dry_run": dry_run,
|
"dry_run": dry_run_bool,
|
||||||
}
|
}
|
||||||
|
|
||||||
return success_response(
|
return success_response(
|
||||||
data={
|
data={
|
||||||
"summary": summary,
|
"summary": summary,
|
||||||
"errors": errors,
|
"errors": errors,
|
||||||
},
|
},
|
||||||
request=request,
|
request=request,
|
||||||
message="نتیجه ایمپورت اشخاص",
|
message="نتیجه ایمپورت اشخاص",
|
||||||
)
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Import error: {str(e)}", exc_info=True)
|
||||||
|
raise HTTPException(status_code=500, detail=f"خطا در پردازش فایل: {str(e)}")
|
||||||
|
|
|
||||||
|
|
@ -197,14 +197,6 @@ class _BusinessShellState extends State<BusinessShell> {
|
||||||
type: _MenuItemType.simple,
|
type: _MenuItemType.simple,
|
||||||
hasAddButton: true,
|
hasAddButton: true,
|
||||||
),
|
),
|
||||||
_MenuItem(
|
|
||||||
label: t.checks,
|
|
||||||
icon: Icons.receipt_long,
|
|
||||||
selectedIcon: Icons.receipt_long,
|
|
||||||
path: '/business/${widget.businessId}/checks',
|
|
||||||
type: _MenuItemType.simple,
|
|
||||||
hasAddButton: true,
|
|
||||||
),
|
|
||||||
],
|
],
|
||||||
),
|
),
|
||||||
_MenuItem(
|
_MenuItem(
|
||||||
|
|
@ -246,6 +238,14 @@ class _BusinessShellState extends State<BusinessShell> {
|
||||||
type: _MenuItemType.simple,
|
type: _MenuItemType.simple,
|
||||||
hasAddButton: true,
|
hasAddButton: true,
|
||||||
),
|
),
|
||||||
|
_MenuItem(
|
||||||
|
label: t.checks,
|
||||||
|
icon: Icons.receipt_long,
|
||||||
|
selectedIcon: Icons.receipt_long,
|
||||||
|
path: '/business/${widget.businessId}/checks',
|
||||||
|
type: _MenuItemType.simple,
|
||||||
|
hasAddButton: true,
|
||||||
|
),
|
||||||
_MenuItem(
|
_MenuItem(
|
||||||
label: t.documents,
|
label: t.documents,
|
||||||
icon: Icons.description,
|
icon: Icons.description,
|
||||||
|
|
|
||||||
|
|
@ -127,7 +127,7 @@ class _PersonImportDialogState extends State<PersonImportDialog> {
|
||||||
});
|
});
|
||||||
final form = FormData.fromMap({
|
final form = FormData.fromMap({
|
||||||
'file': MultipartFile.fromBytes(bytes, filename: filename),
|
'file': MultipartFile.fromBytes(bytes, filename: filename),
|
||||||
'dry_run': dryRun,
|
'dry_run': dryRun.toString(),
|
||||||
'match_by': _matchBy,
|
'match_by': _matchBy,
|
||||||
'conflict_policy': _conflictPolicy,
|
'conflict_policy': _conflictPolicy,
|
||||||
});
|
});
|
||||||
|
|
|
||||||
Loading…
Reference in a new issue