Compare commits
31 Commits
66765b19bb
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| 5998f75b7f | |||
| 2f66b82cdc | |||
| 13335c9df8 | |||
| 628d42ab4f | |||
| ae095a483c | |||
| a5c065767c | |||
| 107987b30c | |||
| af54508eea | |||
| 5bca43e7c2 | |||
| 1347c5696f | |||
| d6b6b46e3b | |||
| 17096924c1 | |||
| fdff94fa85 | |||
| 521dd9da5c | |||
| 6c74e43629 | |||
| 643b06023c | |||
| 4eb98a1598 | |||
| 9870fa7b29 | |||
| b0cde349b6 | |||
| 70262f48dc | |||
| 0b29ff76e9 | |||
| f3ef71ca3e | |||
| c55433a1b7 | |||
| 3b4ef2c1e1 | |||
| 5085e375af | |||
| 8be8404b2b | |||
| a7402d353b | |||
| 2c2e060f6a | |||
| 5fde0a680f | |||
| 94ab6c4698 | |||
| 1337a4d640 |
@@ -1,6 +1,6 @@
|
||||
# Dockerfile
|
||||
#FROM ghcr.io/seniorkian/python39-rsi:1.0.0
|
||||
FROM registry-ea28d41763-mnpc.apps.ir-central1.arvancaas.ir/python39-rsi:1.0.0
|
||||
FROM registry.hamdocker.ir/seniorkian/python39-rsi:1.0.0
|
||||
ENV TZ="Asia/Tehran"
|
||||
|
||||
# Set working directory
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
@@ -14,13 +14,15 @@ from rest_framework.permissions import AllowAny
|
||||
|
||||
from app.filtersets import TransportingDetailFilterSet, KillHouseFilterSet, HatchingsFilterSet, GuildsFilterSet, \
|
||||
TransportCarcassDetailFilterSet, AllProductsTransportFilterSet
|
||||
from app.helper_excel import create_header, create_header_freez, shamsi_date, excel_description, create_value
|
||||
from app.helper_excel import create_header, create_header_freez, shamsi_date, excel_description, create_value, \
|
||||
convert_str_to_date
|
||||
from app.models import TransportingDetail, KillHouse, Hatching, TransportCarcassDetail, Guilds, AllProductsTransport
|
||||
from app.serializers import TransportingDetailSerializer, HatchingDetailSerializer, \
|
||||
StewardForTransportCarcassSerializer, KillHouseForTransportCarcassSerializer, TransportCarcassDetailSerializer, \
|
||||
GuildsForTransportCarcassSerializer, AllProductsTransportSerializer
|
||||
GuildsForTransportCarcassSerializer, AllProductsTransportSerializer, AllProductsTransportCustomSerializer
|
||||
from helpers import build_query
|
||||
from app.helper import get_hatching_permit_code
|
||||
from app.helper import get_hatching_permit_code, normalize_persian_arabic_text
|
||||
|
||||
|
||||
def transporting_detail_excel(request):
|
||||
filterset_class = TransportingDetailFilterSet
|
||||
@@ -2572,3 +2574,239 @@ def import_transporting_detail(request):
|
||||
created_count += 1
|
||||
|
||||
return HttpResponse(f"{created_count} رکورد جدید اضافه شد ✅")
|
||||
|
||||
|
||||
def all_products_transport_excel(request):
|
||||
filterset_class = AllProductsTransportFilterSet
|
||||
|
||||
filters={"trash":False}
|
||||
product_type = request.GET.get('product_type')
|
||||
destination_province = request.GET.get('destination_province')
|
||||
date1 = request.GET.get('date1')
|
||||
date2 = request.GET.get('date2')
|
||||
search = request.GET.get('search')
|
||||
|
||||
if product_type and product_type != 'undefined':
|
||||
filters['product'] = product_type
|
||||
|
||||
if destination_province and destination_province != 'undefined':
|
||||
if destination_province == 'مرکزی':
|
||||
filters['destination_province'] = 'مركزي'
|
||||
else:
|
||||
filters['destination_province'] = destination_province
|
||||
|
||||
if date1 and date2 and date1 != 'undefined' and date2 != 'undefined':
|
||||
try:
|
||||
start_date = datetime.datetime.strptime(str(date1), '%Y-%m-%d')
|
||||
end_date = datetime.datetime.strptime(str(date2), '%Y-%m-%d')
|
||||
filters['date__gte'] = start_date
|
||||
filters['date__lte'] = end_date
|
||||
except ValueError:
|
||||
pass
|
||||
transports = (
|
||||
AllProductsTransport.objects
|
||||
.filter(**filters)
|
||||
.values_list(
|
||||
"tracking",
|
||||
"product",
|
||||
"items",
|
||||
"quantity",
|
||||
"unit",
|
||||
"date",
|
||||
"destination",
|
||||
"jihadi_destination",
|
||||
"destination_province",
|
||||
"destination_city",
|
||||
"origin",
|
||||
"jihadi_origin",
|
||||
"origin_province",
|
||||
"origin_city",
|
||||
"destination_prev",
|
||||
"destination_changed",
|
||||
"car_tracking_code",
|
||||
"unloading_date",
|
||||
"unloading",
|
||||
)
|
||||
)
|
||||
|
||||
if search and search != 'undefined' and search.strip():
|
||||
transports = transports.filter(
|
||||
build_query(filterset_class.Meta.fields, search)
|
||||
)
|
||||
transports = transports.order_by('-date', '-create_date')
|
||||
transports = transports.iterator(chunk_size=2000)
|
||||
|
||||
excel_options = [
|
||||
'ردیف',
|
||||
'کد رهگیری',
|
||||
'محصول',
|
||||
'اقلام',
|
||||
'مقدار',
|
||||
'واحد',
|
||||
'تاریخ',
|
||||
'مقصد',
|
||||
'شناسه مقصد',
|
||||
'استان مقصد',
|
||||
'شهرستان مقصد',
|
||||
'مبدا',
|
||||
'شناسه مبدا',
|
||||
'استان مبدا',
|
||||
'شهرستان مبدا',
|
||||
'نوع حمل',
|
||||
'مقصد قبلی',
|
||||
'تغییر مقصد',
|
||||
'کد رهگیری خودرو',
|
||||
'تاریخ تخلیه',
|
||||
'تخلیه',
|
||||
|
||||
]
|
||||
|
||||
output = BytesIO()
|
||||
workbook = Workbook()
|
||||
worksheet = workbook.active
|
||||
worksheet.sheet_view.rightToLeft = True
|
||||
worksheet.insert_rows(1)
|
||||
cell = worksheet.cell(row=1, column=1)
|
||||
cell.alignment = Alignment(horizontal='center', vertical='center')
|
||||
header_list2 = [
|
||||
'محصول',
|
||||
'تعداد بار',
|
||||
'حجم بار (کیلوگرم)',
|
||||
'تعداد بار داخل استان',
|
||||
'حجم بار داخل استان',
|
||||
'درصد بار داخل استان',
|
||||
'تعداد بار خارج استان',
|
||||
'حجم بار خارج استان',
|
||||
'درصد بار خارج استان',
|
||||
|
||||
]
|
||||
create_header(worksheet, header_list2, 5, 2, height=20.8, border_style='thin')
|
||||
if 'date1' in request.GET:
|
||||
date1 = datetime.datetime.strptime(str(request.GET['date1']), '%Y-%m-%d').date()
|
||||
date2 = datetime.datetime.strptime(str(request.GET['date2']), '%Y-%m-%d').date()
|
||||
from_date_1 = shamsi_date(date1)
|
||||
to_date_1 = shamsi_date(date2)
|
||||
worksheet['A3'] = f'از تاریخ:({from_date_1}) تا تاریخ:({to_date_1})'
|
||||
excel_description(worksheet, 'A5', f'اطلاعات بار کل کشور', color='red', row2='C5')
|
||||
if destination_province and destination_province != 'undefined':
|
||||
excel_description(worksheet, 'A4', f'استان {destination_province}', row2='C4')
|
||||
|
||||
if product_type and product_type != 'undefined':
|
||||
excel_description(worksheet, 'A1', f'محصول {product_type}', row2='C1')
|
||||
|
||||
|
||||
create_header_freez(worksheet, excel_options, 1, 6, 7, 20)
|
||||
l = 5
|
||||
m = 1
|
||||
total_quantity = 0
|
||||
|
||||
for row in transports:
|
||||
quantity = row[3] or 0
|
||||
total_quantity += quantity
|
||||
|
||||
list1 = [
|
||||
m,
|
||||
row[0],
|
||||
row[1],
|
||||
row[2] or '-',
|
||||
quantity,
|
||||
row[4],
|
||||
str(shamsi_date(convert_str_to_date(row[5]), in_value=True)) if row[5] else '-',
|
||||
row[6],
|
||||
row[7],
|
||||
row[8],
|
||||
row[9],
|
||||
row[10],
|
||||
row[11],
|
||||
row[12],
|
||||
row[13],
|
||||
"داخل استان" if row[12] == row[8] else "خارج استان",
|
||||
row[14] or '-',
|
||||
row[15] or '-',
|
||||
row[16],
|
||||
str(shamsi_date(convert_str_to_date(row[17]), in_value=True)) if row[17] else '-',
|
||||
row[18] or '-',
|
||||
]
|
||||
|
||||
m += 1
|
||||
l += 1
|
||||
create_value(worksheet, list1, l + 1, 1)
|
||||
|
||||
aggregation = AllProductsTransport.objects.filter(**filters).aggregate(
|
||||
total=Sum('quantity'),
|
||||
input_total=Sum('quantity', filter=Q(out=False)),
|
||||
output_total=Sum('quantity', filter=Q(out=True)),
|
||||
input_count=Count('id', filter=Q(out=False)),
|
||||
output_count=Count('id', filter=Q(out=True)),
|
||||
total_count=Count('id'),
|
||||
)
|
||||
|
||||
total_count = aggregation['total_count'] or 0
|
||||
total_quantity = aggregation['total'] or 0
|
||||
input_quantity = aggregation['input_total'] or 0
|
||||
output_quantity = aggregation['output_total'] or 0
|
||||
input_count = aggregation['input_count'] or 0
|
||||
output_count = aggregation['output_count'] or 0
|
||||
|
||||
if total_count > 0 and (input_quantity + output_quantity) > 0:
|
||||
input_percent = round((input_quantity / (input_quantity + output_quantity)) * 100, 1)
|
||||
output_percent = round((output_quantity / (input_quantity + output_quantity)) * 100, 1)
|
||||
else:
|
||||
input_percent = 0
|
||||
output_percent = 0
|
||||
|
||||
|
||||
|
||||
list2 = [
|
||||
'مجموع==>',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
total_quantity,
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
'',
|
||||
|
||||
|
||||
]
|
||||
create_value(worksheet, list2, l + 3, 1, color='green')
|
||||
|
||||
value_header_list2 = [
|
||||
product_type if product_type else '-',
|
||||
int(total_count),
|
||||
int(total_quantity),
|
||||
int(input_count),
|
||||
int(input_quantity),
|
||||
input_percent,
|
||||
int(output_count),
|
||||
int(output_quantity),
|
||||
output_percent,
|
||||
|
||||
]
|
||||
|
||||
create_value(worksheet, value_header_list2, 3, 5, border_style='thin')
|
||||
|
||||
workbook.save(output)
|
||||
output.seek(0)
|
||||
|
||||
response = HttpResponse(
|
||||
content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet')
|
||||
|
||||
response[
|
||||
'Content-Disposition'] = f'attachment; filename="اطلاعات حمل محصولات.xlsx"'.encode(
|
||||
'utf-8')
|
||||
response.write(output.getvalue())
|
||||
return response
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime, timedelta
|
||||
import re
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
@@ -21,11 +22,12 @@ from app.serializers import TransportingDetailForUpdateSerializer, AllProductsTr
|
||||
@permission_classes([AllowAny])
|
||||
@csrf_exempt
|
||||
def get_bar_info(request):
|
||||
day = datetime.now() - timedelta(days=3)
|
||||
kill_houses = request.data
|
||||
kill_houses = dict(kill_houses)['kill_house']
|
||||
bars = AllProductsTransport.objects.filter(trash=False, out=True, jihadi_destination__in=kill_houses,
|
||||
unloading='تخلیه شده.',product='مرغ زنده -جهت كشتار',hatching__isnull=False,
|
||||
date__gte='2025-12-25').order_by('-date')
|
||||
date__gte=day).order_by('-date')
|
||||
ser_data = AllProductsTransportSerializer(bars, many=True).data
|
||||
return Response(ser_data, status=status.HTTP_200_OK)
|
||||
|
||||
@@ -144,6 +146,26 @@ def api_get_hatching_permit_code(request):
|
||||
return Response({'detail': str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
||||
|
||||
|
||||
def normalize_persian_arabic_text(text):
|
||||
"""
|
||||
نرمالسازی متن فارسی/عربی برای یکسانسازی کاراکترهای مشابه
|
||||
تبدیل کاراکترهای فارسی به عربی برای سازگاری با دیتابیس
|
||||
این تابع کاراکترهای 'ک' و 'ی' فارسی را به 'ك' و 'ي' عربی تبدیل میکند
|
||||
تا با فرم استاندارد ذخیره شده در دیتابیس مطابقت داشته باشد
|
||||
"""
|
||||
if not text:
|
||||
return text
|
||||
|
||||
# تبدیل کاراکترهای فارسی به عربی
|
||||
# 'ک' (U+06A9 - Persian Kaf) -> 'ك' (U+0643 - Arabic Kaf)
|
||||
# 'ی' (U+06CC - Persian Yeh) -> 'ي' (U+064A - Arabic Yeh)
|
||||
text = str(text)
|
||||
# text = text.replace('ک', 'ك') # Persian Kaf to Arabic Kaf
|
||||
text = text.replace('ی', 'ي') # Persian Yeh to Arabic Yeh
|
||||
|
||||
return text
|
||||
|
||||
|
||||
def create_guild(**info):
|
||||
|
||||
Guilds(
|
||||
@@ -155,3 +177,76 @@ class SSLAdapter(HTTPAdapter):
|
||||
self.context = create_urllib3_context()
|
||||
self.context.options |= 0x4 # OP_LEGACY_SERVER_CONNECT
|
||||
super().__init__(*args, **kwargs)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
from django.utils import timezone
|
||||
|
||||
|
||||
def apply_date_filter(queryset, date_filter):
|
||||
if not date_filter:
|
||||
return queryset
|
||||
|
||||
field = date_filter.get("field", "Date")
|
||||
filter_type = date_filter.get("type")
|
||||
value = date_filter.get("value")
|
||||
|
||||
now = timezone.now()
|
||||
|
||||
if filter_type == "today":
|
||||
start = now.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
end = start + timedelta(days=1)
|
||||
return queryset.filter(
|
||||
**{f"{field}__gte": start, f"{field}__lt": end}
|
||||
)
|
||||
|
||||
if filter_type == "yesterday":
|
||||
start = (now - timedelta(days=1)).replace(
|
||||
hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
end = start + timedelta(days=1)
|
||||
return queryset.filter(
|
||||
**{f"{field}__gte": start, f"{field}__lt": end}
|
||||
)
|
||||
|
||||
if filter_type == "last_n_days" and value:
|
||||
start = now - timedelta(days=int(value))
|
||||
return queryset.filter(**{f"{field}__gte": start})
|
||||
|
||||
if filter_type == "this_week":
|
||||
start = now - timedelta(days=now.weekday())
|
||||
start = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
return queryset.filter(**{f"{field}__gte": start})
|
||||
|
||||
if filter_type == "this_month":
|
||||
start = now.replace(day=1, hour=0, minute=0, second=0, microsecond=0)
|
||||
return queryset.filter(**{f"{field}__gte": start})
|
||||
|
||||
if filter_type == "last_n_month" and value:
|
||||
start = now
|
||||
for _ in range(int(value)):
|
||||
start = (start.replace(day=1) - timedelta(days=1)).replace(day=1)
|
||||
start = start.replace(hour=0, minute=0, second=0, microsecond=0)
|
||||
return queryset.filter(**{f"{field}__gte": start})
|
||||
|
||||
if filter_type == "this_year":
|
||||
start = now.replace(
|
||||
month=1, day=1, hour=0, minute=0, second=0, microsecond=0
|
||||
)
|
||||
return queryset.filter(**{f"{field}__gte": start})
|
||||
|
||||
if filter_type == "last_n_year" and value:
|
||||
start = now.replace(
|
||||
year=now.year - int(value),
|
||||
month=1,
|
||||
day=1,
|
||||
hour=0,
|
||||
minute=0,
|
||||
second=0,
|
||||
microsecond=0
|
||||
)
|
||||
return queryset.filter(**{f"{field}__gte": start})
|
||||
|
||||
return queryset
|
||||
@@ -1,3 +1,4 @@
|
||||
from datetime import datetime
|
||||
from io import BytesIO
|
||||
|
||||
import jdatetime
|
||||
@@ -325,3 +326,55 @@ def add_chart(
|
||||
# x_axis_title="سردخانهها",
|
||||
# y_axis_title="وزن (کیلوگرم)"
|
||||
# )
|
||||
|
||||
|
||||
def convert_str_to_date(string, with_datetime=None):
|
||||
"""
|
||||
Convert a string to a datetime.date object.
|
||||
|
||||
This function tries multiple common date formats, including ISO 8601 with or
|
||||
without milliseconds, and plain 'YYYY-MM-DD'. If the string cannot be parsed,
|
||||
it returns None.
|
||||
|
||||
Parameters:
|
||||
-----------
|
||||
string : str
|
||||
The date string to convert.
|
||||
|
||||
Returns:
|
||||
--------
|
||||
datetime.date or None
|
||||
A datetime.date object if conversion succeeds, otherwise None.
|
||||
|
||||
Supported formats:
|
||||
------------------
|
||||
- 'YYYY-MM-DDTHH:MM:SS.sssZ' (ISO 8601 with milliseconds)
|
||||
- 'YYYY-MM-DDTHH:MM:SSZ' (ISO 8601 without milliseconds)
|
||||
- 'YYYY-MM-DD' (Simple date)
|
||||
"""
|
||||
string = str(string).strip()
|
||||
|
||||
# فرمتهای مختلف تاریخ
|
||||
date_formats = [
|
||||
'%Y-%m-%dT%H:%M:%S.%fZ',
|
||||
'%Y-%m-%dT%H:%M:%SZ',
|
||||
'%Y-%m-%dT%H:%M:%S.%f%z', # ✅ با میلیثانیه و تایمزون
|
||||
'%Y-%m-%dT%H:%M:%S%z', # ✅ مثل: 2025-02-26T03:30:00+03:30
|
||||
'%Y-%m-%dT%H:%M:%S.%f',
|
||||
'%Y-%m-%dT%H:%M:%S',
|
||||
'%Y-%m-%d %H:%M:%S.%f',
|
||||
'%Y-%m-%d %H:%M:%S',
|
||||
'%Y-%m-%d'
|
||||
]
|
||||
|
||||
for fmt in date_formats:
|
||||
try:
|
||||
if with_datetime:
|
||||
date = datetime.strptime(string, fmt)
|
||||
else:
|
||||
date = datetime.strptime(string, fmt).date()
|
||||
return date
|
||||
except ValueError:
|
||||
continue
|
||||
|
||||
return None
|
||||
|
||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user