#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os
import sys
import shutil
import struct
import zlib
import time
import re
import mmap
import multiprocessing
import bisect  # 新增二分查找模块，核心加速
from concurrent.futures import ThreadPoolExecutor, as_completed
from pathlib import Path
from collections import namedtuple
from typing import List, Optional, Tuple

# ====================== 统一配色 ======================
class Color:
    YELLOW      = "\033[93m"
    CYAN        = "\033[96m"
    GREEN       = "\033[92m"
    RED         = "\033[91m"
    WHITE       = "\033[97m"
    RESET       = "\033[0m"
LINE_DIVIDER = f"{Color.YELLOW}========================================{Color.RESET}"
SHORT_DIVIDER = f"{Color.YELLOW}----------------------------------------{Color.RESET}"

# ====================== 全局配置 ======================
PAK_DIR = "/storage/emulated/0/勿辞制作区/pak/"
UNPACK_DIR = "/storage/emulated/0/勿辞制作区/uexp解包"
PACK_TEMP_DIR = "/storage/emulated/0/勿辞制作区/uexp打包"
CONFIG_DIR = "/storage/emulated/0/勿辞制作区/配置"
TARGET_FILE = "BattleItem.uexp"
MIN_PRINT = True
MAX_WORKERS = min(multiprocessing.cpu_count(), 8)
ENCRYPT_KEY = 0x79

# ====================== 清空目录 ======================
def clear_pack_unpack_dirs():
    for dir_path in [UNPACK_DIR, PACK_TEMP_DIR]:
        path = Path(dir_path)
        if path.exists():
            try:
                shutil.rmtree(path)
            except Exception as e:
                print(f"{Color.RED}[错误] 清空目录 {dir_path} 失败: {str(e)}{Color.RESET}")
                sys.exit(1)
        path.mkdir(parents=True, exist_ok=True)

# ====================== 核心结构体 ======================
CompressionInfo = namedtuple('CompressionInfo', [
    'offset', 'size', 'zip', 'zsize', 'encrypted',
    'chunks', 'chunk_size'
])

# ====================== 工具函数 ======================
class Utils:
    @staticmethod
    def decompress_zlib(data):
        try:
            return zlib.decompress(data)
        except Exception:
            return b''

    @staticmethod
    def encrypt_data(data, encrypt_flag):
        if encrypt_flag:
            return bytes([b ^ ENCRYPT_KEY for b in data])
        return data

    @staticmethod
    def decrypt_data(data, encrypt_flag):
        return Utils.encrypt_data(data, encrypt_flag)

    @staticmethod
    def compress_to_max_size(input_data, max_size):
        for level in range(9, -1, -1):
            try:
                compressed = zlib.compress(input_data, level)
                if len(compressed) <= max_size:
                    return compressed
            except Exception:
                continue
        return input_data.ljust(max_size, b'\x00')

    @staticmethod
    def read_config_pairs(file_path):
        try:
            with open(file_path, 'r', encoding='utf-8', errors='ignore') as f:
                content = f.read()
            content = re.sub(r'#.*|//.*|;.*', '', content)
            numbers = re.findall(r'\d+', content)
            pairs = []
            for i in range(0, len(numbers)-1, 2):
                try:
                    a = int(numbers[i])
                    b = int(numbers[i+1])
                    pairs.append((a, b))
                except:
                    continue
            return pairs
        except Exception as e:
            print(f"{Color.RED}[错误] 读取配置失败：{str(e)}{Color.RESET}")
            return []

    @staticmethod
    def DEC_to_HEX_4(decimal_number):
        try:
            hex_str = format(int(decimal_number), '08X')
            hex_array = [hex_str[i:i+2] for i in range(0, 8, 2)]
            return ''.join(reversed(hex_array))
        except ValueError:
            return None

# ====================== 解包核心 ======================
class FastPakExtractor:
    def __init__(self):
        self.encrypt = 0
        self.mm = None
        self.selected_pak = None
        self.compression_info = {}
        self.base_path = "../../../"
        self.valid_target_file = None
        self.unpack_elapsed = 0

    def get_pak_files(self):
        pak_dir = Path(PAK_DIR)
        if not pak_dir.exists():
            print(f"{Color.RED}[错误] PAK目录不存在: {PAK_DIR}{Color.RESET}")
            return []
        return sorted(list(pak_dir.glob("*.pak")), key=lambda x: x.stat().st_size, reverse=True)

    def select_pak_file(self, pak_files):
        print(f"\n{Color.WHITE}可用的 PAK:{Color.RESET}")
        for i, pak in enumerate(pak_files, 1):
            size_mb = pak.stat().st_size / 1024 / 1024
            print(f"{Color.WHITE}{i}. {pak.name} ({size_mb:.0f} MB){Color.RESET}")
        while True:
            try:
                choice = input(f"\n{Color.CYAN}请选择 (1-{len(pak_files)}) : {Color.RESET}").strip()
                if choice == "0":
                    sys.exit(0)
                choice = int(choice)
                if 1 <= choice <= len(pak_files):
                    self.selected_pak = pak_files[choice-1]
                    return True
                print(f"{Color.RED}请输入1~{len(pak_files)}之间的数字{Color.RESET}")
            except ValueError:
                print(f"{Color.RED}请输入数字{Color.RESET}")

    def find_magic_offset(self):
        pattern1 = b"\x2E\x2E\x2F\x2E\x2E\x2F\x2E\x2E\x2F"
        pattern2 = b"\x57\x57\x56\x57\x57\x56\x57\x57\x56"
        pattern3 = b"\x2E\x2E\x2F"
        scan_len = min(5 * 1024 * 1024, self.file_size)
        data = self.mm[-scan_len:]
        offset1 = data.find(pattern1)
        offset2 = data.find(pattern2)
        offset3 = data.rfind(pattern3)
        if offset1 != -1:
            self.encrypt = 0
            return self.file_size - scan_len + offset1 - 4
        elif offset2 != -1:
            self.encrypt = 1
            return self.file_size - scan_len + offset2 - 4
        elif offset3 != -1:
            self.encrypt = 0
            return self.file_size - scan_len + offset3 - 4
        else:
            return self.file_size - 0x2C

    def get_base_path(self, offset):
        if offset + 4 > self.file_size:
            return "../../../", offset + 4
        try:
            name_size = struct.unpack('<I', self.mm[offset:offset+4])[0]
        except Exception:
            return "../../../", offset + 4
        if name_size == 0:
            return "../../../", offset + 4
        if name_size > 1024 or offset + 4 + name_size > self.file_size:
            return "../../../", offset + 4 + name_size
        try:
            base_path = self.mm[offset+4:offset+4+name_size].decode('utf-8', errors='ignore').rstrip('\x00')
            if name_size != 0x0A and name_size < 0xFF:
                base_path = "../../../" + base_path
            self.base_path = base_path
            return base_path, offset + 4 + name_size
        except Exception:
            return "../../../", offset + 4 + name_size

    def parse_file_entry(self, offset):
        entry_start = offset
        if offset + 20 + 49 > self.file_size:
            return None, offset + 69
        try:
            hash_data = self.mm[offset:offset+20]
            offset += 20
            entry_data = self.mm[offset:offset+49]
            offset += 49
            entry = {
                'hash': hash_data,
                'offset': struct.unpack('<Q', entry_data[0:8])[0],
                'size': struct.unpack('<Q', entry_data[8:16])[0],
                'zip': struct.unpack('<I', entry_data[16:20])[0],
                'zsize': struct.unpack('<Q', entry_data[20:28])[0],
                'chunks': [],
                'chunk_size': 0x10000,
                'encrypted': 0
            }
            if entry['zip'] != 0:
                if offset + 4 > self.file_size:
                    return entry, offset
                chunk_count = struct.unpack('<I', self.mm[offset:offset+4])[0]
                offset += 4
                chunk_count = min(chunk_count, 1000)
                for _ in range(chunk_count):
                    if offset + 16 > self.file_size:
                        break
                    chunk_data = self.mm[offset:offset+16]
                    offset += 16
                    chunk_offset = struct.unpack('<Q', chunk_data[0:8])[0]
                    chunk_end = struct.unpack('<Q', chunk_data[8:16])[0]
                    entry['chunks'].append((chunk_offset, chunk_end))
            if offset + 5 > self.file_size:
                return entry, offset
            chunk_size_data = self.mm[offset:offset+5]
            offset += 5
            entry['chunk_size'] = struct.unpack('<I', chunk_size_data[0:4])[0]
            entry['encrypted'] = chunk_size_data[4]
            if self.encrypt:
                try:
                    encrypted_entry = self.mm[entry_start:offset]
                    decrypted_entry = self.decrypt_data(encrypted_entry)
                    if len(decrypted_entry) >= 69:
                        entry['offset'] = struct.unpack('<Q', decrypted_entry[20:28])[0]
                        entry['size'] = struct.unpack('<Q', decrypted_entry[28:36])[0]
                        entry['zip'] = struct.unpack('<I', decrypted_entry[36:40])[0]
                        entry['zsize'] = struct.unpack('<Q', decrypted_entry[40:48])[0]
                except Exception:
                    pass
            return entry, offset
        except Exception:
            return None, offset + 69

    def decrypt_data(self, data):
        if not self.encrypt:
            return data
        return bytes([b ^ ENCRYPT_KEY for b in data])

    def extract_file(self, entry, output_path):
        try:
            if entry['chunks']:
                return self.extract_chunked(entry, output_path)
            else:
                return self.extract_simple(entry, output_path)
        except Exception:
            return False

    def extract_simple(self, entry, output_path):
        try:
            if entry['zip'] != 0:
                compressed_data = self.mm[entry['offset']:entry['offset']+entry['zsize']]
                if self.encrypt and entry['encrypted'] == 1:
                    compressed_data = self.decrypt_data(compressed_data)
                data = Utils.decompress_zlib(compressed_data)
            else:
                data = self.mm[entry['offset']:entry['offset']+entry['size']]
                if self.encrypt and entry['encrypted'] == 1:
                    data = self.decrypt_data(data)
            output_path.parent.mkdir(parents=True, exist_ok=True)
            with open(output_path, 'wb') as f:
                f.write(data)
            if output_path.name == TARGET_FILE:
                self.valid_target_file = output_path
                print(f"{Color.GREEN}[找到目标] {output_path.name}{Color.RESET}")
            return True
        except Exception:
            return False

    def extract_chunked(self, entry, output_path):
        try:
            total_size = entry['size']
            remaining = total_size
            output_path.parent.mkdir(parents=True, exist_ok=True)
            with open(output_path, 'wb') as out:
                for chunk_offset, chunk_end in entry['chunks']:
                    if remaining <= 0:
                        break
                    chunk_zsize = chunk_end - chunk_offset
                    chunk_data = self.mm[chunk_offset:chunk_offset+chunk_zsize]
                    if self.encrypt and entry['encrypted'] == 1:
                        chunk_data = self.decrypt_data(chunk_data)
                    if entry['zip'] != 0:
                        chunk_data = Utils.decompress_zlib(chunk_data)
                    write_size = min(len(chunk_data), remaining)
                    out.write(chunk_data[:write_size])
                    remaining -= write_size
            if output_path.name == TARGET_FILE:
                self.valid_target_file = output_path
                print(f"{Color.GREEN}[找到目标] {output_path.name}{Color.RESET}")
            return True
        except Exception:
            return False

    def parse_toc(self, toc_offset, toc_size):
        toc_data = self.mm[toc_offset:toc_offset+toc_size]
        toc_len = len(toc_data)
        pos = 0
        entries = []
        stack = [(1, 0)]
        while stack:
            if pos + 8 > toc_len:
                break
            flag, count = stack.pop()
            if flag == 1:
                dir_count = struct.unpack('<Q', toc_data[pos:pos+8])[0]
                pos += 8
                stack.append((0, dir_count))
                continue
            if count == 0:
                continue
            count -= 1
            if pos + 4 > toc_len:
                break
            name_size = struct.unpack('<i', toc_data[pos:pos+4])[0]
            pos += 4
            dir_name = ""
            if name_size >= 0:
                if pos + name_size > toc_len:
                    pos += name_size
                    continue
                try:
                    dir_name = toc_data[pos:pos+name_size].decode('utf-8', errors='ignore').rstrip('\x00')
                except Exception:
                    pass
                pos += name_size
            else:
                abs_name_size = abs(name_size) * 2
                if pos + abs_name_size > toc_len:
                    pos += abs_name_size
                    continue
                try:
                    dir_name = toc_data[pos:pos+abs_name_size].decode('utf-16-le', errors='ignore').rstrip('\x00')
                except Exception:
                    pass
                pos += abs_name_size
            if pos + 8 > toc_len:
                break
            file_count = struct.unpack('<Q', toc_data[pos:pos+8])[0]
            pos += 8
            if file_count == 0:
                stack.append((0, count))
                continue
            file_count = min(file_count, 100000)
            for _ in range(file_count):
                if pos + 4 > toc_len:
                    break
                name_size = struct.unpack('<i', toc_data[pos:pos+4])[0]
                pos += 4
                file_name = ""
                if name_size > 0:
                    if pos + name_size > toc_len:
                        pos += name_size
                        continue
                    try:
                        file_name = toc_data[pos:pos+name_size].decode('utf-8', errors='ignore').rstrip('\x00')
                    except Exception:
                        pass
                    pos += name_size
                else:
                    abs_name_size = abs(name_size) * 2
                    if pos + abs_name_size > toc_len:
                        pos += abs_name_size
                        continue
                    try:
                        file_name = toc_data[pos:pos+abs_name_size].decode('utf-16-le', errors='ignore').rstrip('\x00')
                    except Exception:
                        pass
                    pos += abs_name_size
                full_path = f"{dir_name}{file_name}"
                if pos + 4 > toc_len:
                    break
                entry_index = struct.unpack('<I', toc_data[pos:pos+4])[0]
                pos += 4
                if file_name == TARGET_FILE:
                    entries.append((entry_index, full_path, file_name))
            if count > 0:
                stack.append((0, count))
        return entries

    def unpack_pak(self):
        if not self.selected_pak:
            print(f"{Color.RED}[错误] 未选择PAK文件{Color.RESET}")
            return False
        start_time = time.time()
        self.valid_target_file = None
        self.compression_info.clear()
        self.file_size = self.selected_pak.stat().st_size
        try:
            with open(self.selected_pak, 'rb') as f:
                self.mm = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ)
            magic_offset = self.find_magic_offset()
            base_path, pos = self.get_base_path(magic_offset)
            file_count = struct.unpack('<I', self.mm[pos:pos+4])[0]
            pos += 4
            file_count = min(file_count, 100000)
            entries = []
            for _ in range(file_count):
                entry, pos = self.parse_file_entry(pos)
                if entry:
                    entries.append(entry)
                else:
                    break
            pos += 8
            if self.encrypt:
                pos += 1
            toc_offset = pos
            toc_size = self.file_size - toc_offset
            toc_entries = self.parse_toc(toc_offset, toc_size)
            tasks = []
            for entry_idx, full_path, file_name in toc_entries:
                if entry_idx < len(entries):
                    entry = entries[entry_idx].copy()
                    rel_path = full_path
                    if base_path and base_path != "../../../":
                        rel_path = base_path + full_path
                    while rel_path.startswith('../'):
                        rel_path = rel_path[3:]
                    output_path = Path(UNPACK_DIR) / rel_path
                    self.compression_info[full_path] = CompressionInfo(
                        offset=entry['offset'], size=entry['size'], zip=entry['zip'],
                        zsize=entry['zsize'], encrypted=entry['encrypted'],
                        chunks=entry['chunks'], chunk_size=entry['chunk_size']
                    )
                    tasks.append((entry, output_path))
            with ThreadPoolExecutor(max_workers=MAX_WORKERS) as executor:
                futures = {executor.submit(self.extract_file, e, o): (e, o) for e, o in tasks}
                for future in as_completed(futures):
                    future.result()
            self.unpack_elapsed = time.time() - start_time
            return True
        except Exception as e:
            print(f"{Color.RED}[错误] 解包失败: {str(e)}{Color.RESET}")
            return False
        finally:
            if self.mm:
                self.mm.close()

# ====================== 打包核心 ======================
class FastPakPacker:
    def __init__(self, extractor):
        self.extractor = extractor
        self.encrypt = extractor.encrypt
        self.success_count = 0
        self.fail_count = 0
        self.file_success = 0
        self.file_fail = 0
        self.pack_elapsed = 0

    def replace_non_chunked(self, pak_file, info, new_data):
        try:
            if info.zip != 0:
                compressed = Utils.compress_to_max_size(new_data, info.zsize)
                compressed = Utils.encrypt_data(compressed, self.encrypt and info.encrypted == 1)
                pak_file.seek(info.offset)
                pak_file.write(compressed.ljust(info.zsize, b'\x00'))
            else:
                data_to_write = new_data.ljust(info.size, b'\x00')
                data_to_write = Utils.encrypt_data(data_to_write, self.encrypt and info.encrypted == 1)
                pak_file.seek(info.offset)
                pak_file.write(data_to_write)
            self.success_count += 1
            return True
        except Exception as e:
            print(f"{Color.RED}[错误] 写入失败: {str(e)[:50]}{Color.RESET}")
            self.fail_count += 1
            return False

    def replace_chunked(self, pak_file, info, new_data):
        try:
            data_pos = 0
            total_size = info.size
            for i, (chunk_offset, chunk_end) in enumerate(info.chunks):
                max_chunk_size = chunk_end - chunk_offset
                if i < len(info.chunks) - 1:
                    current_chunk_size = min(info.chunk_size, total_size - data_pos)
                else:
                    current_chunk_size = total_size - data_pos
                if current_chunk_size <= 0:
                    break
                chunk_data = new_data[data_pos:data_pos + current_chunk_size]
                data_pos += current_chunk_size
                if info.zip != 0:
                    chunk_data = Utils.compress_to_max_size(chunk_data, max_chunk_size)
                chunk_data = Utils.encrypt_data(chunk_data, self.encrypt and info.encrypted == 1)
                if len(chunk_data) < max_chunk_size:
                    chunk_data = chunk_data.ljust(max_chunk_size, b'\x00')
                pak_file.seek(chunk_offset)
                pak_file.write(chunk_data)
            self.success_count += len(info.chunks)
            return True
        except Exception as e:
            print(f"{Color.RED}[错误] 分块写入失败: {str(e)[:50]}{Color.RESET}")
            self.fail_count += len(info.chunks)
            return False

    def pack_pak(self):
        original_pak = self.extractor.selected_pak
        if not original_pak:
            print(f"{Color.RED}[错误] 未选择PAK文件{Color.RESET}")
            return False
        start_time = time.time()
        self.success_count = 0
        self.fail_count = 0
        self.file_success = 0
        self.file_fail = 0
        file_map = {}
        for root, _, files in os.walk(UNPACK_DIR):
            for file in files:
                file_map[file.lower()] = Path(root) / file
        try:
            temp_pak = Path(PACK_TEMP_DIR) / f"temp_{original_pak.name}"
            Path(PACK_TEMP_DIR).mkdir(parents=True, exist_ok=True)
            shutil.copy2(original_pak, temp_pak)
            with open(temp_pak, 'r+b') as pak_file:
                for full_path, info in self.extractor.compression_info.items():
                    target_name = full_path.split('/')[-1].lower()
                    if target_name not in file_map:
                        self.file_fail += 1
                        continue
                    in_path = file_map[target_name]
                    if not in_path.exists():
                        self.file_fail += 1
                        continue
                    with open(in_path, 'rb') as df:
                        new_data = df.read()
                    if info.chunks:
                        if self.replace_chunked(pak_file, info, new_data):
                            self.file_success += 1
                        else:
                            self.file_fail += 1
                    else:
                        if self.replace_non_chunked(pak_file, info, new_data):
                            self.file_success += 1
                        else:
                            self.file_fail += 1
            shutil.move(temp_pak, original_pak)
            self.pack_elapsed = time.time() - start_time
            
            total_chunks = self.success_count + self.fail_count
            total_files = self.file_success + self.file_fail
            
            print(f"{Color.GREEN}压缩成功: [{self.success_count}/{total_chunks}]{Color.RESET}")
            print(f"{Color.RED}压缩失败: [{self.fail_count}/{total_chunks}]{Color.RESET}")
            print(f"{Color.GREEN}文件成功: [{self.file_success}/{total_files}]{Color.RESET}")
            print(f"{Color.RED}文件失败: [{self.file_fail}/{total_files}]{Color.RESET}")
            return True
        except Exception as e:
            print(f"{Color.RED}[错误] 打包失败: {str(e)}{Color.RESET}")
            if temp_pak.exists():
                temp_pak.unlink()
            return False

# ====================== 特征码提取 ======================
def extract_feature_code(file_data: bytes) -> Optional[str]:
    search_bytes = bytes.fromhex("A41E0600")
    positions = []
    start = 0
    while True:
        pos = file_data.find(search_bytes, start)
        if pos == -1:
            break
        positions.append(pos)
        start = pos + len(search_bytes)
    if len(positions) >= 2:
        second_end = positions[1] + len(search_bytes)
        if second_end + 2 <= len(file_data):
            feature = file_data[second_end:second_end+2]
            return feature.hex().upper()
    print(f"{Color.RED}[错误] 特征码提取失败{Color.RESET}")
    return None

# ====================== 极速优化：贴图修改核心 ======================
def modify_weapon_textures(data: bytearray, suffix: str, pairs: List[Tuple[int,int]]) -> int:
    # 1. 预计算固定值（只算1次）
    def calc_single_fixed(suf):
        big = suf[2:4] + suf[0:2]
        val = int(big, 16) + 0x11
        fx = format(val, '04X')
        return fx[2:4] + fx[0:2]

    fixed_texture = calc_single_fixed(suffix)
    big = suffix[2:4] + suffix[0:2]
    base_val = int(big, 16) + 0x0D
    fixed_gun = format(base_val, '04X')[2:4] + format(base_val, '04X')[0:2]

    # 2. 预扫描所有FX位置（关键加速：只扫2次，后续用二分查找）
    def pre_scan_fx_positions(fx_hex):
        fx_bytes = bytes.fromhex(fx_hex)
        positions = []
        pos = 0
        while True:
            pos = data.find(fx_bytes, pos)
            if pos == -1:
                break
            positions.append(pos)
            pos += 1
        return positions

    fx_texture_pos = pre_scan_fx_positions(fixed_texture)
    fx_gun_pos = pre_scan_fx_positions(fixed_gun)

    # 3. 预过滤无效规则，预生成所有替换模式
    valid_rules = []
    suffix_bytes = bytes.fromhex(suffix)
    for old_num, new_num in pairs:
        ah = Utils.DEC_to_HEX_4(old_num)
        bh = Utils.DEC_to_HEX_4(new_num)
        if not ah or not bh:
            continue
        # 预生成模式：a_hex + suffix_bytes
        a_pattern = bytes.fromhex(ah) + suffix_bytes
        b_pattern = bytes.fromhex(bh) + suffix_bytes
        valid_rules.append((a_pattern, b_pattern))

    success_count = 0
    total_rules = len(valid_rules)
    if total_rules == 0:
        print(f"{Color.RED}[错误] 无有效规则{Color.RESET}")
        return 0

    print(f"\n{Color.CYAN}开始执行贴图批量替换（极速优化版）{Color.RESET}")
    for idx, (a_pattern, b_pattern) in enumerate(valid_rules, 1):
        print(SHORT_DIVIDER)
        print(f"{Color.CYAN}[提示] 【第 {idx}/{total_rules} 组】替换中...{Color.RESET}")

        # 查找A、B模式位置
        p1 = data.find(a_pattern)
        p2 = data.find(b_pattern)
        if p1 == -1 or p2 == -1:
            print(f"{Color.YELLOW}[警告] 未找到匹配位置，跳过{Color.RESET}")
            continue

        # 用二分查找快速定位p1/p2后的FX位置（替代线性find）
        def find_next_fx(pos, fx_positions):
            idx = bisect.bisect_right(fx_positions, pos)
            if idx < len(fx_positions):
                return fx_positions[idx]
            return -1

        st1 = find_next_fx(p1, fx_texture_pos)
        st2 = find_next_fx(p2, fx_texture_pos)
        if st1 == -1 or st2 == -1:
            print(f"{Color.YELLOW}[警告] 未找到贴图FX，跳过{Color.RESET}")
            continue
        # 交换数据
        data[st1-8:st1], data[st2-8:st2] = data[st2-8:st2], data[st1-8:st1]

        # 枪械贴图替换
        st1_gun = find_next_fx(p1, fx_gun_pos)
        st2_gun = find_next_fx(p2, fx_gun_pos)
        if st1_gun != -1 and st2_gun != -1:
            data[st1_gun-8:st1_gun], data[st2_gun-8:st2_gun] = data[st2_gun-8:st2_gun], data[st1_gun-8:st1_gun]

        success_count += 1
        print(f"{Color.GREEN}[成功] 替换完成{Color.RESET}")

    print(SHORT_DIVIDER)
    print(f"{Color.GREEN}批量替换完成: 成功应用 {success_count}/{total_rules} 组规则{Color.RESET}")
    return success_count

# ====================== 配置选择 ======================
def select_config():
    if not Path(CONFIG_DIR).exists():
        print(f"{Color.RED}[错误] 配置目录不存在{Color.RESET}")
        return None
    files = list(Path(CONFIG_DIR).glob("*.txt")) + list(Path(CONFIG_DIR).glob("*.py"))
    if not files:
        print(f"{Color.RED}[错误] 无配置文件{Color.RESET}")
        return None
    print(f"\n{Color.WHITE}可用配置：{Color.RESET}")
    for i, f in enumerate(files, 1):
        print(f"{i}. {f.name}")
    print("0. 退出")
    while True:
        try:
            c = int(input(f"{Color.CYAN}选择配置：{Color.RESET}"))
            if c == 0:
                sys.exit(0)
            if 1 <= c <= len(files):
                return files[c-1]
            print(f"{Color.RED}无效输入{Color.RESET}")
        except ValueError:
            print(f"{Color.RED}请输入数字{Color.RESET}")

# ====================== 主流程 ======================
def main():
    os.environ['PYTHONIOENCODING'] = 'utf-8'
    clear_pack_unpack_dirs()
    print(LINE_DIVIDER)
    print(f"{Color.YELLOW}          贴图修改工具（极速优化版）{Color.RESET}")
    print(LINE_DIVIDER)

    extractor = FastPakExtractor()
    pak_list = extractor.get_pak_files()
    if not pak_list:
        print(f"{Color.RED}[错误] 无PAK文件{Color.RESET}")
        return
    extractor.select_pak_file(pak_list)

    print(f"\n{Color.GREEN}[1/3] 自动解包中...{Color.RESET}")
    if not extractor.unpack_pak():
        print(f"{Color.RED}[错误] 解包失败{Color.RESET}")
        return
    print(f"{Color.GREEN}[完成] 解包耗时：{extractor.unpack_elapsed:.2f}s{Color.RESET}")

    if not extractor.valid_target_file:
        print(f"{Color.RED}[错误] 未找到{TARGET_FILE}{Color.RESET}")
        return

    with open(extractor.valid_target_file, 'rb') as f:
        raw = f.read()
    feat = extract_feature_code(raw)
    if not feat:
        return
    print(f"{Color.GREEN}[特征码] {feat}{Color.RESET}")

    cfg = select_config()
    pairs = Utils.read_config_pairs(cfg)
    if not pairs:
        print(f"{Color.RED}[错误] 无有效规则{Color.RESET}")
        return
    print(f"{Color.CYAN}[加载] {len(pairs)} 组替换规则{Color.RESET}")

    data = bytearray(raw)
    modify_weapon_textures(data, feat, pairs)

    with open(extractor.valid_target_file, 'wb') as f:
        f.write(data)

    print(f"\n{Color.GREEN}[2/3] 自动打包中...{Color.RESET}")
    packer = FastPakPacker(extractor)
    packer.pack_pak()
    print(f"{Color.GREEN}[完成] 打包耗时：{packer.pack_elapsed:.2f}s{Color.RESET}")

    print(LINE_DIVIDER)
    print(f"{Color.GREEN}✅ 全部流程完成！{Color.RESET}")
    print(LINE_DIVIDER)

if __name__ == "__main__":
    try:
        main()
    except KeyboardInterrupt:
        print(f"\n{Color.RED}[退出] 程序中断{Color.RESET}")
    except Exception as e:
        print(f"{Color.RED}[异常] {str(e)}{Color.RESET}")
