|
|
@@ -46,8 +46,15 @@ from Object.WechatPayObject import WechatPayObject
|
|
|
from Object.AliPayObject import AliPayObject
|
|
|
from dateutil.relativedelta import relativedelta
|
|
|
from django.conf import settings
|
|
|
+import datetime as dt
|
|
|
+from Ansjer.config import LOGGER
|
|
|
+from Object.AliOssUtil import AliOssUtil
|
|
|
+
|
|
|
ACCESS_KEY_ID = settings.ACCESS_KEY_ID
|
|
|
SECRET_ACCESS_KEY = settings.SECRET_ACCESS_KEY
|
|
|
+# aliyun
|
|
|
+ALICLOUD_AK = settings.ALICLOUD_AK
|
|
|
+ALICLOUD_SK = settings.ALICLOUD_SK
|
|
|
|
|
|
|
|
|
class CronDelDataView(View):
|
|
|
@@ -1298,12 +1305,12 @@ class CronCollectDataView(View):
|
|
|
@staticmethod
|
|
|
def collect_operating_costs(response):
|
|
|
try:
|
|
|
- today = datetime.datetime.today()
|
|
|
- end_time = datetime.datetime(today.year, today.month, today.day)
|
|
|
- yesterday = end_time - datetime.timedelta(days=1)
|
|
|
- start_time = datetime.datetime(yesterday.year, yesterday.month, 1)
|
|
|
- start_time_stamp = int(start_time.timestamp())
|
|
|
- end_time_stamp = int(end_time.timestamp())
|
|
|
+ today = datetime.datetime.today() # 获取当前日期时间(如2025-11-27 10:00:00)
|
|
|
+ end_time = datetime.datetime(today.year, today.month, today.day) # 今日凌晨0点(如2025-11-27 00:00:00)
|
|
|
+ yesterday = end_time - datetime.timedelta(days=1) # 昨日凌晨0点(如2025-11-26 00:00:00)
|
|
|
+ start_time = datetime.datetime(yesterday.year, yesterday.month, 1) # 当月第一天凌晨(如2025-11-01 00:00:00)
|
|
|
+ start_time_stamp = int(start_time.timestamp()) # 当月第一天时间戳(整数,用于数据筛选)
|
|
|
+ end_time_stamp = int(end_time.timestamp()) # 今日凌晨时间戳(整数,用于数据筛选)
|
|
|
thread = threading.Thread(target=CronCollectDataView.thread_collect_operating_costs,
|
|
|
args=(start_time_stamp, end_time_stamp, start_time, end_time))
|
|
|
thread.start() # 启动线程
|
|
|
@@ -1317,17 +1324,17 @@ class CronCollectDataView(View):
|
|
|
create_time = int(time.time())
|
|
|
today_end_time = end_time_stamp + 86400
|
|
|
operating_costs_qs_1 = OperatingCosts.objects.filter(time=start_time_stamp).exclude(
|
|
|
- created_time__gte=end_time_stamp, created_time__lt=today_end_time).values('order_id', 'end_time', 'uid')
|
|
|
+ created_time__gte=end_time_stamp, created_time__lt=today_end_time).values('order_id', 'end_time', 'uid',
|
|
|
+ 'vod_location')
|
|
|
operating_costs_qs_2 = OperatingCosts.objects.filter(time=start_time_stamp,
|
|
|
created_time__gte=end_time_stamp,
|
|
|
created_time__lt=today_end_time, start_time=0).values(
|
|
|
- 'order_id', 'end_time', 'uid')
|
|
|
+ 'order_id', 'end_time', 'uid', 'vod_location')
|
|
|
operating_costs_qs = operating_costs_qs_1.union(operating_costs_qs_2)
|
|
|
- storage_univalence = 0.023 / 30
|
|
|
- api_univalence = 0.005 / 1000
|
|
|
region = '国内' if CONFIG_INFO == CONFIG_CN else '国外'
|
|
|
country_qs = CountryModel.objects.values('id', 'country_name')
|
|
|
country_dict = {}
|
|
|
+ redis_obj = RedisObject()
|
|
|
for item in country_qs:
|
|
|
country_dict[item['id']] = item['country_name']
|
|
|
for item in operating_costs_qs:
|
|
|
@@ -1350,91 +1357,215 @@ class CronCollectDataView(View):
|
|
|
order_start_time = int((datetime.datetime.fromtimestamp(item['end_time']) - relativedelta(
|
|
|
months=order['rank__expire'])).timestamp())
|
|
|
order_days = math.ceil((item['end_time'] - order_start_time) / 86400)
|
|
|
+ # 计算结算天数和查询范围
|
|
|
if item['end_time'] > end_time_stamp: # 订单结束时间大于统计时间
|
|
|
if order_start_time <= start_time_stamp: # 订单月初之前开始
|
|
|
- settlement_days = (end_time - start_time).days # 当月结算天数=月初-月底
|
|
|
- uid_bucket_statistics = UidBucketStatistics.objects.filter(time__gte=start_time_stamp,
|
|
|
- time__lte=end_time_stamp,
|
|
|
- uid=item['uid'])
|
|
|
+ settlement_days = (end_time - start_time).days
|
|
|
+ query_start = start_time_stamp
|
|
|
+ query_end = end_time_stamp
|
|
|
elif order_start_time >= end_time_stamp: # 订单在统计时间之后开始
|
|
|
settlement_days = 1
|
|
|
- uid_bucket_statistics = UidBucketStatistics.objects.filter(time__gte=end_time_stamp,
|
|
|
- time__lt=order_start_time,
|
|
|
- uid=item['uid'])
|
|
|
+ query_start = end_time_stamp
|
|
|
+ query_end = order_start_time
|
|
|
else: # 订单月初和统计时间之间开始
|
|
|
settlement_days = math.ceil((end_time_stamp - order_start_time) / 86400)
|
|
|
- uid_bucket_statistics = UidBucketStatistics.objects.filter(time__gte=order_start_time,
|
|
|
- time__lte=end_time_stamp,
|
|
|
- uid=item['uid'])
|
|
|
- remaining_usage_time = math.ceil((item['end_time'] - end_time_stamp) / 86400) # 剩余使用时间
|
|
|
+ query_start = order_start_time
|
|
|
+ query_end = end_time_stamp
|
|
|
+ remaining_usage_time = math.ceil((item['end_time'] - end_time_stamp) / 86400)
|
|
|
else: # 订单结束时间小于统计时间
|
|
|
if order_start_time <= start_time_stamp:
|
|
|
settlement_days = math.ceil((item['end_time'] - start_time_stamp) / 86400)
|
|
|
- uid_bucket_statistics = UidBucketStatistics.objects.filter(time__gte=start_time_stamp,
|
|
|
- time__lt=item['end_time'],
|
|
|
- uid=item['uid'])
|
|
|
+ query_start = start_time_stamp
|
|
|
+ query_end = item['end_time']
|
|
|
else:
|
|
|
settlement_days = math.ceil((item['end_time'] - order_start_time) / 86400)
|
|
|
- uid_bucket_statistics = UidBucketStatistics.objects.filter(time__gte=order_start_time,
|
|
|
- time__lt=item['end_time'],
|
|
|
- uid=item['uid'])
|
|
|
+ query_start = order_start_time
|
|
|
+ query_end = item['end_time']
|
|
|
remaining_usage_time = 0
|
|
|
- day_average_price = round(price / order_days, 2) # 收入分摊/天
|
|
|
- month_average_price = round(day_average_price * settlement_days, 2) # 收入分摊/月
|
|
|
- monthly_income = round((price - fee) / order_days * settlement_days, 2) # 当月结算收入
|
|
|
+
|
|
|
+ # 获取统计周期内的所有日统计记录
|
|
|
+ uid_bucket_statistics = UidBucketStatistics.objects.filter(
|
|
|
+ time__gte=query_start,
|
|
|
+ time__lt=query_end,
|
|
|
+ uid=item['uid']
|
|
|
+ ).order_by('time')
|
|
|
+
|
|
|
+ day_average_price = round(price / order_days, 2)
|
|
|
+ month_average_price = round(day_average_price * settlement_days, 2)
|
|
|
+ monthly_income = round((price - fee) / order_days * settlement_days, 2)
|
|
|
real_income = round(price - fee, 2)
|
|
|
- result = uid_bucket_statistics.aggregate(size=Sum('storage_size'), api_count=Sum('api_count'))
|
|
|
- actual_storage = round(result['size'], 2) if result['size'] else 0
|
|
|
- actual_api = result['api_count'] if result['api_count'] else 0
|
|
|
- storage_cost = actual_storage / 1024 * storage_univalence * settlement_days
|
|
|
- api_cost = actual_api * api_univalence
|
|
|
- if CONFIG_INFO == CONFIG_CN: # 国内要换算汇率
|
|
|
+
|
|
|
+ # 根据云存位置选择不同的成本计算方式
|
|
|
+ if item['vod_location'] == 0: # 阿里云OSS
|
|
|
+ try:
|
|
|
+ yesterday_stamp = end_time_stamp - 86400
|
|
|
+ uid_bucket_statistics = uid_bucket_statistics.filter(uid=item['uid'],
|
|
|
+ time__gte=yesterday_stamp,
|
|
|
+ time__lt=end_time_stamp)
|
|
|
+ storage_cost, api_cost = CronCollectDataView._calculate_aliyun_costs(
|
|
|
+ uid_bucket_statistics, redis_obj
|
|
|
+ )
|
|
|
+ ali_costs_qs = OperatingCosts.objects.filter(
|
|
|
+ time=start_time_stamp,
|
|
|
+ order_id=item['order_id'],
|
|
|
+ uid=item['uid']
|
|
|
+ ).values('storage_cost', 'api_cost')
|
|
|
+ if ali_costs_qs.exists():
|
|
|
+ ali_costs = ali_costs_qs[0]
|
|
|
+ storage_cost += float(ali_costs['storage_cost'])
|
|
|
+ api_cost += float(ali_costs['api_cost'])
|
|
|
+ except Exception as e:
|
|
|
+ LOGGER.error(f'统计阿里云云存成本异常error_line:{e.__traceback__.tb_lineno}, error_msg:{str(e)}')
|
|
|
+ continue
|
|
|
+
|
|
|
+ else: # AWS S3
|
|
|
+ storage_cost, api_cost = CronCollectDataView._calculate_aws_costs(
|
|
|
+ uid_bucket_statistics, settlement_days
|
|
|
+ )
|
|
|
+
|
|
|
+ # 汇率转换(仅国内)
|
|
|
+ if CONFIG_INFO == CONFIG_CN and item['vod_location'] == 0:
|
|
|
storage_cost = storage_cost * 7
|
|
|
api_cost = api_cost * 7
|
|
|
- profit = round(monthly_income - storage_cost - api_cost, 2) # 利润=月结算金额-月成本
|
|
|
+
|
|
|
+ profit = round(monthly_income - storage_cost - api_cost, 2)
|
|
|
storage_cost = round(storage_cost, 2)
|
|
|
api_cost = round(api_cost, 2)
|
|
|
+
|
|
|
if monthly_income == 0.0:
|
|
|
profit_margin = 0
|
|
|
else:
|
|
|
- profit_margin = round(profit / month_average_price, 2) # 利润率=利润/每月收入分摊
|
|
|
- OperatingCosts.objects.filter(time=start_time_stamp, order_id=item['order_id'],
|
|
|
- uid=item['uid']).update(day_average_price=day_average_price,
|
|
|
- month_average_price=month_average_price,
|
|
|
- monthly_income=monthly_income,
|
|
|
- actual_storage=actual_storage,
|
|
|
- settlement_days=settlement_days,
|
|
|
- actual_api=actual_api, fee=fee,
|
|
|
- created_time=create_time, region=region,
|
|
|
- start_time=order_start_time,
|
|
|
- remaining_usage_time=remaining_usage_time,
|
|
|
- storage_cost=storage_cost, api_cost=api_cost,
|
|
|
- profit=profit, profit_margin=profit_margin,
|
|
|
- real_income=real_income, price=price,
|
|
|
- country_name=country_name,
|
|
|
- order_type=order_type, expire=expire)
|
|
|
+ profit_margin = round(profit / month_average_price, 2)
|
|
|
+
|
|
|
+ # 更新运营成本记录
|
|
|
+ OperatingCosts.objects.filter(
|
|
|
+ time=start_time_stamp,
|
|
|
+ order_id=item['order_id'],
|
|
|
+ uid=item['uid']
|
|
|
+ ).update(
|
|
|
+ day_average_price=day_average_price,
|
|
|
+ month_average_price=month_average_price,
|
|
|
+ monthly_income=monthly_income,
|
|
|
+ actual_storage=round(sum(float(stat.storage_size) for stat in uid_bucket_statistics), 2),
|
|
|
+ settlement_days=settlement_days,
|
|
|
+ actual_api=sum(stat.api_count for stat in uid_bucket_statistics),
|
|
|
+ fee=fee,
|
|
|
+ created_time=create_time,
|
|
|
+ region=region,
|
|
|
+ start_time=order_start_time,
|
|
|
+ remaining_usage_time=remaining_usage_time,
|
|
|
+ storage_cost=storage_cost,
|
|
|
+ api_cost=api_cost,
|
|
|
+ profit=profit,
|
|
|
+ profit_margin=profit_margin,
|
|
|
+ real_income=real_income,
|
|
|
+ price=price,
|
|
|
+ country_name=country_name,
|
|
|
+ order_type=order_type,
|
|
|
+ expire=expire
|
|
|
+ )
|
|
|
print('结束')
|
|
|
except Exception as e:
|
|
|
LOGGER.info(
|
|
|
'thread_collect_operating_costs接口异常:errLine:{}, errMsg:{}'.format(e.__traceback__.tb_lineno,
|
|
|
- repr(e)))
|
|
|
+ repr(e))
|
|
|
+ )
|
|
|
+
|
|
|
+ @classmethod
|
|
|
+ def _calculate_aws_costs(cls, uid_bucket_statistics, settlement_days):
|
|
|
+ """计算AWS S3成本"""
|
|
|
+ aws_storage_univalence = 0.023 / 30
|
|
|
+ aws_api_univalence = 0.005 / 1000
|
|
|
+
|
|
|
+ result = uid_bucket_statistics.aggregate(
|
|
|
+ size=Sum('storage_size'),
|
|
|
+ api_count=Sum('api_count')
|
|
|
+ )
|
|
|
+
|
|
|
+ total_storage = float(result['size']) if result['size'] else 0
|
|
|
+ total_api = result['api_count'] if result['api_count'] else 0
|
|
|
+
|
|
|
+ # AWS按天计费
|
|
|
+ storage_cost = total_storage / 1024 * aws_storage_univalence * settlement_days
|
|
|
+ api_cost = total_api * aws_api_univalence
|
|
|
+
|
|
|
+ return storage_cost, api_cost
|
|
|
+
|
|
|
+ @classmethod
|
|
|
+ def _calculate_aliyun_costs(cls, uid_bucket_statistics, redis_obj):
|
|
|
+ """
|
|
|
+ 计算阿里云OSS存储及API调用成本
|
|
|
+ 该方法通过聚合指定时间范围内的OSS存储用量和API调用次数,结合阿里云官方定价标准,
|
|
|
+ 计算实际产生的存储成本与API调用成本,并支持结果缓存避免重复计算。
|
|
|
+
|
|
|
+ Args:
|
|
|
+ uid_bucket_statistics (QuerySet): OSS用量统计数据集,包含存储大小、API次数、时间戳等字段
|
|
|
+ redis_obj (RedisClient): Redis客户端实例,用于缓存计算结果避免重复统计
|
|
|
+ Returns:
|
|
|
+ Tuple[Decimal, Decimal]:
|
|
|
+ - 存储成本(元,应用折扣后)
|
|
|
+ - API调用成本(元,应用折扣后)
|
|
|
+ """
|
|
|
+ aliyun_storage_univalence = 0.0001666667 # GB/小时
|
|
|
+ aliyun_api_univalence = 0.01 / 10000 # 元/次
|
|
|
+ discount_rate = 0.48 # 4.8折
|
|
|
+
|
|
|
+ total_storage_cost = 0
|
|
|
+ total_api_cost = 0
|
|
|
+
|
|
|
+ # 按日期聚合数据
|
|
|
+ date_stats = {}
|
|
|
+ cost_stat = uid_bucket_statistics.first()
|
|
|
+ cost_key = f'ali:costs:uid:{cost_stat.uid}:time:{cost_stat.time}'
|
|
|
+ if redis_obj.get_data(cost_key):
|
|
|
+ raise TypeError("已统计过")
|
|
|
+ for stat in uid_bucket_statistics:
|
|
|
+ date = dt.datetime.fromtimestamp(stat.time).date()
|
|
|
+ if date not in date_stats:
|
|
|
+ date_stats[date] = {'storage': 0, 'api': 0}
|
|
|
+ date_stats[date]['storage'] += float(stat.storage_size)
|
|
|
+ date_stats[date]['api'] += stat.api_count
|
|
|
+
|
|
|
+ # 计算每日成本
|
|
|
+ for date, stats in date_stats.items():
|
|
|
+ # 计算当日上传的存储成本(直接计算7天费用)
|
|
|
+ daily_storage_gb = stats['storage'] / 1024 # 转换为GB
|
|
|
+
|
|
|
+ # 存储成本 = 当日上传大小 × 单价 × 24小时 × 7天
|
|
|
+ daily_storage_cost = daily_storage_gb * aliyun_storage_univalence * 24 * 7
|
|
|
+
|
|
|
+ # API成本 = API次数 × 单价(API没有7天概念,按实际调用次数)
|
|
|
+ daily_api_cost = stats['api'] * aliyun_api_univalence
|
|
|
+
|
|
|
+ total_storage_cost += daily_storage_cost
|
|
|
+ total_api_cost += daily_api_cost
|
|
|
+
|
|
|
+ redis_obj.set_data(cost_key, json.dumps(str(total_api_cost)), expire=3600 * 12)
|
|
|
+
|
|
|
+ # 应用折扣
|
|
|
+ total_storage_cost_discounted = total_storage_cost * discount_rate
|
|
|
+ total_api_cost_discounted = total_api_cost * discount_rate
|
|
|
+
|
|
|
+ return total_storage_cost_discounted, total_api_cost_discounted
|
|
|
+
|
|
|
|
|
|
@staticmethod
|
|
|
def collect_obj_size(response):
|
|
|
try:
|
|
|
- today = datetime.datetime.today()
|
|
|
- end_time = datetime.datetime(today.year, today.month, today.day)
|
|
|
- start_time = end_time - datetime.timedelta(days=1)
|
|
|
- first_date = datetime.datetime(start_time.year, start_time.month, 1)
|
|
|
- start_time_stamp = int(start_time.timestamp())
|
|
|
- end_time_stamp = int(end_time.timestamp())
|
|
|
- first_date_stamp = int(first_date.timestamp())
|
|
|
+ today = dt.datetime.today() # 获取当前日期时间(如2025-11-26 10:00:00)
|
|
|
+ end_time = dt.datetime(today.year, today.month, today.day) # 今日凌晨0点(如2025-11-26 00:00:00)
|
|
|
+ start_time = end_time - dt.timedelta(days=1) # 昨日凌晨0点(如2025-11-25 00:00:00)
|
|
|
+ first_date = dt.datetime(start_time.year, start_time.month, 1) # 当月第一天凌晨(如2025-11-01 00:00:00)
|
|
|
+ start_time_stamp = int(start_time.timestamp()) # 昨日0点时间戳(用于筛选数据)
|
|
|
+ end_time_stamp = int(end_time.timestamp()) # 今日0点时间戳(用于筛选数据)
|
|
|
+ first_date_stamp = int(first_date.timestamp()) # 当月第一天时间戳(用于月度统计)
|
|
|
thread = threading.Thread(target=CronCollectDataView.thread_collect_obj_size,
|
|
|
args=(start_time_stamp, end_time_stamp, first_date_stamp))
|
|
|
thread.start() # 启动线程
|
|
|
|
|
|
return response.json(0)
|
|
|
except Exception as e:
|
|
|
+ LOGGER.error(f'collect_obj_size接口异常:errLine:{e.__traceback__.tb_lineno}, errMsg:{repr(e)}')
|
|
|
return response.json(500, 'error_line:{}, error_msg:{}'.format(e.__traceback__.tb_lineno, repr(e)))
|
|
|
|
|
|
@staticmethod
|
|
|
@@ -1446,20 +1577,44 @@ class CronCollectDataView(View):
|
|
|
'bucket__bucket',
|
|
|
'orderId',
|
|
|
'channel',
|
|
|
- 'endTime')
|
|
|
+ 'endTime',
|
|
|
+ 'bucket__vod_location',
|
|
|
+ 'bucket__region')
|
|
|
+
|
|
|
s3_obj = AmazonS3Util(ACCESS_KEY_ID, SECRET_ACCESS_KEY, REGION_NAME)
|
|
|
+
|
|
|
for item in uid_vod:
|
|
|
path = '{uid}/vod{channel}'.format(uid=item['uid'], channel=item['channel'])
|
|
|
- s3_result = s3_obj.get_object_list(item['bucket__bucket'], path,
|
|
|
- path + '/{}'.format(start_time), end_time)
|
|
|
actual_storage = 0
|
|
|
actual_api = 0
|
|
|
- for obj in s3_result:
|
|
|
- temp_time = int(obj['Key'].split('/')[2])
|
|
|
- if temp_time < end_time:
|
|
|
- actual_storage += obj['Size']
|
|
|
- actual_api += 1
|
|
|
- actual_storage = round(actual_storage / 1024 / 1024, 2)
|
|
|
+ if item['bucket__vod_location'] == 1: # 阿里云
|
|
|
+ # 初始化OSS工具类
|
|
|
+ oss_obj = AliOssUtil(
|
|
|
+ ALICLOUD_AK,
|
|
|
+ ALICLOUD_SK,
|
|
|
+ f"https://oss-{item['bucket__region']}.aliyuncs.com"
|
|
|
+ )
|
|
|
+ # 获取所有文件并计算总大小
|
|
|
+ result = oss_obj.list_all_objects(
|
|
|
+ bucket_name=item['bucket__bucket'],
|
|
|
+ prefix=path,
|
|
|
+ start_after=f'{path}/{start_time}',
|
|
|
+ max_keys=1000,
|
|
|
+ end_time=end_time
|
|
|
+ )
|
|
|
+ if result:
|
|
|
+ actual_storage = result['total_size_mb']
|
|
|
+ actual_api = result['total_files']
|
|
|
+ else:
|
|
|
+ s3_result = s3_obj.get_object_list(item['bucket__bucket'], path,
|
|
|
+ path + '/{}'.format(start_time), end_time)
|
|
|
+
|
|
|
+ for obj in s3_result:
|
|
|
+ temp_time = int(obj['Key'].split('/')[2])
|
|
|
+ if temp_time < end_time:
|
|
|
+ actual_storage += obj['Size']
|
|
|
+ actual_api += 1
|
|
|
+ actual_storage = round(actual_storage / 1024 / 1024, 2)
|
|
|
with transaction.atomic():
|
|
|
if actual_api:
|
|
|
UidBucketStatistics.objects.create(uid=item['uid'], storage_size=actual_storage,
|
|
|
@@ -1471,9 +1626,9 @@ class CronCollectDataView(View):
|
|
|
if not operating_costs_qs.exists():
|
|
|
OperatingCosts.objects.create(order_id=item['orderId'], uid=item['uid'],
|
|
|
created_time=creat_time, time=first_date,
|
|
|
- end_time=item['endTime'])
|
|
|
- print(actual_storage, actual_api)
|
|
|
- print('结束')
|
|
|
+ end_time=item['endTime'], vod_location=item['bucket__vod_location'])
|
|
|
+ LOGGER.info(f"uid:{item['uid']} 大小:{actual_storage} api次数:{actual_api}")
|
|
|
+ LOGGER.info('结束')
|
|
|
except Exception as e:
|
|
|
LOGGER.info('统计s3信息异常:errLine:{}, errMsg:{}'.format(e.__traceback__.tb_lineno, repr(e)))
|
|
|
|