From b79e89ec69c57a5a4d575a34bdbe79c1444f5f6e Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Fri, 27 Feb 2026 16:06:29 +0000 Subject: [PATCH 01/11] Update main.go --- cmd/evrtools/main.go | 48 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/cmd/evrtools/main.go b/cmd/evrtools/main.go index cea300d..6e49510 100644 --- a/cmd/evrtools/main.go +++ b/cmd/evrtools/main.go @@ -7,6 +7,7 @@ import ( "io" "os" "path/filepath" + "strings" "github.com/EchoTools/evrFileTools/pkg/manifest" ) @@ -20,6 +21,7 @@ var ( preserveGroups bool forceOverwrite bool useDecimalName bool + exportTypes string ) func init() { @@ -31,6 +33,7 @@ func init() { flag.BoolVar(&preserveGroups, "preserve-groups", false, "Preserve frame grouping in output") flag.BoolVar(&forceOverwrite, "force", false, "Allow non-empty output directory") flag.BoolVar(&useDecimalName, "decimal-names", false, "Use decimal format for filenames (default is hex)") + flag.StringVar(&exportTypes, "export", "", "Comma-separated list of types to export (textures, tints)") } func main() { @@ -134,11 +137,37 @@ func runExtract() error { } defer pkg.Close() + var filterTypes []int64 + if exportTypes != "" { + for _, t := range strings.Split(exportTypes, ",") { + switch strings.TrimSpace(t) { + case "textures": + // Use variables to avoid constant overflow checks for negative int64s + t1 := uint64(0xBEAC1969CB7B8861) + t2 := uint64(0x4A4C32C49300B8A0) + t3 := uint64(0xe2efe7289d5985b8) + t4 := uint64(0x489bb35d53ca50e9) + filterTypes = append(filterTypes, + int64(t1), // -4707359568332879775 + int64(t2), // 5353709876897953952 + int64(t3), // -2094201140079393352 + int64(t4), // 5231972605540061417 + ) + case "tints": + filterTypes = append(filterTypes, + int64(uint64(0x24CBFD54E9A7F2EA)), // Folder: 24cbfd54e9a7f2ea + int64(uint64(0x32f30fe361939dee)), // 3671295590506143214 + ) + } + } + } + fmt.Println("Extracting files...") if err := pkg.Extract( outputDir, manifest.WithPreserveGroups(preserveGroups), manifest.WithDecimalNames(useDecimalName), + manifest.WithTypeFilter(filterTypes), ); err != nil { return fmt.Errorf("extract: %w", err) } @@ -154,6 +183,14 @@ func runBuild() error { return fmt.Errorf("scan files: %w", err) } + // If dataDir is provided, we are in "repack" mode where we merge original files + if dataDir != "" { + manifestPath := filepath.Join(dataDir, "manifests", packageName) + if _, err := os.Stat(manifestPath); err == nil { + return runRepack(files) + } + } + totalFiles := 0 for _, group := range files { totalFiles += len(group) @@ -180,3 +217,14 @@ func runBuild() error { fmt.Printf("Build complete. Output written to %s\n", outputDir) return nil } + +func runRepack(inputFiles [][]manifest.ScannedFile) error { + fmt.Println("Loading original manifest for repacking...") + manifestPath := filepath.Join(dataDir, "manifests", packageName) + m, err := manifest.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("read manifest: %w", err) + } + + return manifest.Repack(m, inputFiles, outputDir, packageName, dataDir) +} From bfa207be9d9d0d5f93ba6ef1762dcb2991cff9f5 Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Fri, 27 Feb 2026 16:07:06 +0000 Subject: [PATCH 02/11] Update main.go added extra format options and improved detection or something i dont know --- cmd/texconv/main.go | 364 ++++++++++++++++++++++++++++++++++++-------- 1 file changed, 303 insertions(+), 61 deletions(-) diff --git a/cmd/texconv/main.go b/cmd/texconv/main.go index 633cdc0..8fd6827 100644 --- a/cmd/texconv/main.go +++ b/cmd/texconv/main.go @@ -25,6 +25,7 @@ import ( "image" "image/png" "io" + "math" "os" "path/filepath" "strings" @@ -64,8 +65,12 @@ const ( DXGIFormatBC6HSF16 = 96 DXGIFormatBC7Unorm = 98 // High quality DXGIFormatBC7UnormSRGB = 99 + DXGIFormatR8Unorm = 61 // Grayscale + DXGIFormatR11G11B10Float = 26 // Packed Float DXGIFormatR8G8B8A8Unorm = 28 // Uncompressed RGBA DXGIFormatR8G8B8A8UnormSRGB = 29 + DXGIFormatB8G8R8A8UnormSRGB = 91 // BGRA sRGB + DXGIFormatB8G8R8A8Typeless = 87 // BGRA Typeless ) // DDSHeader represents the main DDS file header (124 bytes) @@ -219,7 +224,7 @@ func decodeDDS(inputPath, outputPath string) error { } // Decompress to RGBA - rgba, err := decompressBC(compressedData, info) + img, err := decompressBC(compressedData, info) if err != nil { return fmt.Errorf("decompress: %w", err) } @@ -231,7 +236,7 @@ func decodeDDS(inputPath, outputPath string) error { } defer outFile.Close() - if err := png.Encode(outFile, rgba); err != nil { + if err := png.Encode(outFile, img); err != nil { return fmt.Errorf("encode png: %w", err) } @@ -465,6 +470,26 @@ func parseDDSHeader(r io.ReadSeeker) (*TextureInfo, error) { info.FormatName = "BC7" info.Compression = "BC7" info.BytesPerPixel = 1 + case DXGIFormatR8Unorm: + info.FormatName = "R8_UNORM" + info.Compression = "None" + info.BytesPerPixel = 1 + case DXGIFormatR11G11B10Float: + info.FormatName = "R11G11B10_FLOAT" + info.Compression = "None" + info.BytesPerPixel = 4 + case DXGIFormatR8G8B8A8Unorm, DXGIFormatR8G8B8A8UnormSRGB: + info.FormatName = "RGBA8" + info.Compression = "None" + info.BytesPerPixel = 4 + case DXGIFormatB8G8R8A8UnormSRGB: + info.FormatName = "BGRA8" + info.Compression = "None" + info.BytesPerPixel = 4 + case DXGIFormatB8G8R8A8Typeless: + info.FormatName = "BGRA8_TYPELESS" + info.Compression = "None" + info.BytesPerPixel = 4 default: return nil, fmt.Errorf("unsupported DXGI format: %d", info.Format) } @@ -498,32 +523,56 @@ func calculateMipSize(width, height, format uint32) uint32 { DXGIFormatBC6HUF16, DXGIFormatBC6HSF16, DXGIFormatBC7Unorm, DXGIFormatBC7UnormSRGB: return blockW * blockH * 16 // 16 bytes per block + case DXGIFormatR8Unorm: + return width * height + case DXGIFormatR11G11B10Float: + return width * height * 4 + case DXGIFormatR8G8B8A8Unorm, DXGIFormatR8G8B8A8UnormSRGB: + return width * height * 4 + case DXGIFormatB8G8R8A8UnormSRGB: + return width * height * 4 + case DXGIFormatB8G8R8A8Typeless: + return width * height * 4 default: return width * height * 4 // Fallback: uncompressed RGBA } } // decompressBC decompresses BC-compressed data to RGBA -func decompressBC(data []byte, info *TextureInfo) (*image.RGBA, error) { - rgba := image.NewRGBA(image.Rect(0, 0, int(info.Width), int(info.Height))) +func decompressBC(data []byte, info *TextureInfo) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, int(info.Width), int(info.Height))) + + isSRGB := info.Format == DXGIFormatBC1UnormSRGB || + info.Format == DXGIFormatBC3UnormSRGB || + info.Format == DXGIFormatBC7UnormSRGB switch info.Format { case DXGIFormatBC1Unorm, DXGIFormatBC1UnormSRGB: - return decompressBC1(data, int(info.Width), int(info.Height)) + return decompressBC1(data, int(info.Width), int(info.Height), isSRGB) case DXGIFormatBC3Unorm, DXGIFormatBC3UnormSRGB: - return decompressBC3(data, int(info.Width), int(info.Height)) + return decompressBC3(data, int(info.Width), int(info.Height), isSRGB) case DXGIFormatBC5Unorm, DXGIFormatBC5SNorm: return decompressBC5(data, int(info.Width), int(info.Height)) + case DXGIFormatR8Unorm: + return decompressR8(data, int(info.Width), int(info.Height)) + case DXGIFormatR11G11B10Float: + return decompressR11G11B10Float(data, int(info.Width), int(info.Height)) + case DXGIFormatR8G8B8A8Unorm, DXGIFormatR8G8B8A8UnormSRGB: + return decompressRGBA(data, int(info.Width), int(info.Height)) + case DXGIFormatB8G8R8A8UnormSRGB: + return decompressBGRA(data, int(info.Width), int(info.Height)) + case DXGIFormatB8G8R8A8Typeless: + return decompressBGRA(data, int(info.Width), int(info.Height)) default: return nil, fmt.Errorf("decompression not implemented for format: %s", info.FormatName) } - return rgba, nil + return nrgba, nil } // decompressBC1 decompresses BC1/DXT1 to RGBA -func decompressBC1(data []byte, width, height int) (*image.RGBA, error) { - rgba := image.NewRGBA(image.Rect(0, 0, width, height)) +func decompressBC1(data []byte, width, height int, isSRGB bool) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) blockW := (width + 3) / 4 blockH := (height + 3) / 4 @@ -541,40 +590,73 @@ func decompressBC1(data []byte, width, height int) (*image.RGBA, error) { offset += 4 // Decode RGB565 - r0 := uint8((c0 >> 11) * 255 / 31) - g0 := uint8(((c0 >> 5) & 0x3F) * 255 / 63) - b0 := uint8((c0 & 0x1F) * 255 / 31) - - r1 := uint8((c1 >> 11) * 255 / 31) - g1 := uint8(((c1 >> 5) & 0x3F) * 255 / 63) - b1 := uint8((c1 & 0x1F) * 255 / 31) + r0_5 := (c0 >> 11) & 0x1F + g0_6 := (c0 >> 5) & 0x3F + b0_5 := c0 & 0x1F + r0_8 := uint8((r0_5 << 3) | (r0_5 >> 2)) + g0_8 := uint8((g0_6 << 2) | (g0_6 >> 4)) + b0_8 := uint8((b0_5 << 3) | (b0_5 >> 2)) + + r1_5 := (c1 >> 11) & 0x1F + g1_6 := (c1 >> 5) & 0x3F + b1_5 := c1 & 0x1F + r1_8 := uint8((r1_5 << 3) | (r1_5 >> 2)) + g1_8 := uint8((g1_6 << 2) | (g1_6 >> 4)) + b1_8 := uint8((b1_5 << 3) | (b1_5 >> 2)) // Color palette var colors [4][4]uint8 - colors[0] = [4]uint8{r0, g0, b0, 255} - colors[1] = [4]uint8{r1, g1, b1, 255} - - if c0 > c1 { - colors[2] = [4]uint8{ - (2*r0 + r1) / 3, - (2*g0 + g1) / 3, - (2*b0 + b1) / 3, - 255, + + if isSRGB { + lr0 := srgbToLinear(r0_8) + lg0 := srgbToLinear(g0_8) + lb0 := srgbToLinear(b0_8) + lr1 := srgbToLinear(r1_8) + lg1 := srgbToLinear(g1_8) + lb1 := srgbToLinear(b1_8) + + var linearColors [4][3]float32 + linearColors[0] = [3]float32{lr0, lg0, lb0} + linearColors[1] = [3]float32{lr1, lg1, lb1} + + if c0 > c1 { + linearColors[2] = [3]float32{(2*lr0 + lr1) / 3, (2*lg0 + lg1) / 3, (2*lb0 + lb1) / 3} + linearColors[3] = [3]float32{(lr0 + 2*lr1) / 3, (lg0 + 2*lg1) / 3, (lb0 + 2*lb1) / 3} + } else { + linearColors[2] = [3]float32{(lr0 + lr1) / 2, (lg0 + lg1) / 2, (lb0 + lb1) / 2} + linearColors[3] = [3]float32{0, 0, 0} } - colors[3] = [4]uint8{ - (r0 + 2*r1) / 3, - (g0 + 2*g1) / 3, - (b0 + 2*b1) / 3, - 255, + + for i := 0; i < 4; i++ { + colors[i][0] = linearToSrgb(linearColors[i][0]) + colors[i][1] = linearToSrgb(linearColors[i][1]) + colors[i][2] = linearToSrgb(linearColors[i][2]) + colors[i][3] = 255 + } + if c0 <= c1 { + colors[3][3] = 0 } } else { - colors[2] = [4]uint8{ - (r0 + r1) / 2, - (g0 + g1) / 2, - (b0 + b1) / 2, - 255, + colors[0] = [4]uint8{r0_8, g0_8, b0_8, 255} + colors[1] = [4]uint8{r1_8, g1_8, b1_8, 255} + + if c0 > c1 { + colors[2] = [4]uint8{ + (2*r0_8 + r1_8) / 3, + (2*g0_8 + g1_8) / 3, + (2*b0_8 + b1_8) / 3, + 255, + } + colors[3] = [4]uint8{ + (r0_8 + 2*r1_8) / 3, + (g0_8 + 2*g1_8) / 3, + (b0_8 + 2*b1_8) / 3, + 255, + } + } else { + colors[2] = [4]uint8{(r0_8 + r1_8) / 2, (g0_8 + g1_8) / 2, (b0_8 + b1_8) / 2, 255} + colors[3] = [4]uint8{0, 0, 0, 0} // Transparent } - colors[3] = [4]uint8{0, 0, 0, 0} // Transparent } // Read index bits @@ -594,22 +676,22 @@ func decompressBC1(data []byte, width, height int) (*image.RGBA, error) { idx := (indices >> (2 * (py*4 + px))) & 3 color := colors[idx] - offset := rgba.PixOffset(x, y) - rgba.Pix[offset+0] = color[0] - rgba.Pix[offset+1] = color[1] - rgba.Pix[offset+2] = color[2] - rgba.Pix[offset+3] = color[3] + offset := nrgba.PixOffset(x, y) + nrgba.Pix[offset+0] = color[0] + nrgba.Pix[offset+1] = color[1] + nrgba.Pix[offset+2] = color[2] + nrgba.Pix[offset+3] = color[3] } } } } - return rgba, nil + return nrgba, nil } // decompressBC3 decompresses BC3/DXT5 to RGBA -func decompressBC3(data []byte, width, height int) (*image.RGBA, error) { - rgba := image.NewRGBA(image.Rect(0, 0, width, height)) +func decompressBC3(data []byte, width, height int, isSRGB bool) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) blockW := (width + 3) / 4 blockH := (height + 3) / 4 @@ -651,19 +733,46 @@ func decompressBC3(data []byte, width, height int) (*image.RGBA, error) { c1 := uint16(data[offset+2]) | uint16(data[offset+3])<<8 offset += 4 - r0 := uint8((c0 >> 11) * 255 / 31) - g0 := uint8(((c0 >> 5) & 0x3F) * 255 / 63) - b0 := uint8((c0 & 0x1F) * 255 / 31) + r0_5 := (c0 >> 11) & 0x1F + g0_6 := (c0 >> 5) & 0x3F + b0_5 := c0 & 0x1F + r0_8 := uint8((r0_5 << 3) | (r0_5 >> 2)) + g0_8 := uint8((g0_6 << 2) | (g0_6 >> 4)) + b0_8 := uint8((b0_5 << 3) | (b0_5 >> 2)) - r1 := uint8((c1 >> 11) * 255 / 31) - g1 := uint8(((c1 >> 5) & 0x3F) * 255 / 63) - b1 := uint8((c1 & 0x1F) * 255 / 31) + r1_5 := (c1 >> 11) & 0x1F + g1_6 := (c1 >> 5) & 0x3F + b1_5 := c1 & 0x1F + r1_8 := uint8((r1_5 << 3) | (r1_5 >> 2)) + g1_8 := uint8((g1_6 << 2) | (g1_6 >> 4)) + b1_8 := uint8((b1_5 << 3) | (b1_5 >> 2)) var colors [4][3]uint8 - colors[0] = [3]uint8{r0, g0, b0} - colors[1] = [3]uint8{r1, g1, b1} - colors[2] = [3]uint8{(2*r0 + r1) / 3, (2*g0 + g1) / 3, (2*b0 + b1) / 3} - colors[3] = [3]uint8{(r0 + 2*r1) / 3, (g0 + 2*g1) / 3, (b0 + 2*b1) / 3} + if isSRGB { + lr0 := srgbToLinear(r0_8) + lg0 := srgbToLinear(g0_8) + lb0 := srgbToLinear(b0_8) + lr1 := srgbToLinear(r1_8) + lg1 := srgbToLinear(g1_8) + lb1 := srgbToLinear(b1_8) + + var linearColors [4][3]float32 + linearColors[0] = [3]float32{lr0, lg0, lb0} + linearColors[1] = [3]float32{lr1, lg1, lb1} + linearColors[2] = [3]float32{(2*lr0 + lr1) / 3, (2*lg0 + lg1) / 3, (2*lb0 + lb1) / 3} + linearColors[3] = [3]float32{(lr0 + 2*lr1) / 3, (lg0 + 2*lg1) / 3, (lb0 + 2*lb1) / 3} + + for i := 0; i < 4; i++ { + colors[i][0] = linearToSrgb(linearColors[i][0]) + colors[i][1] = linearToSrgb(linearColors[i][1]) + colors[i][2] = linearToSrgb(linearColors[i][2]) + } + } else { + colors[0] = [3]uint8{r0_8, g0_8, b0_8} + colors[1] = [3]uint8{r1_8, g1_8, b1_8} + colors[2] = [3]uint8{(2*r0_8 + r1_8) / 3, (2*g0_8 + g1_8) / 3, (2*b0_8 + b1_8) / 3} + colors[3] = [3]uint8{(r0_8 + 2*r1_8) / 3, (g0_8 + 2*g1_8) / 3, (b0_8 + 2*b1_8) / 3} + } colorIndices := uint32(data[offset]) | uint32(data[offset+1])<<8 | uint32(data[offset+2])<<16 | uint32(data[offset+3])<<24 @@ -685,26 +794,159 @@ func decompressBC3(data []byte, width, height int) (*image.RGBA, error) { color := colors[colorIdx] alpha := alphas[alphaIdx] - pixOffset := rgba.PixOffset(x, y) - rgba.Pix[pixOffset+0] = color[0] - rgba.Pix[pixOffset+1] = color[1] - rgba.Pix[pixOffset+2] = color[2] - rgba.Pix[pixOffset+3] = alpha + pixOffset := nrgba.PixOffset(x, y) + nrgba.Pix[pixOffset+0] = color[0] + nrgba.Pix[pixOffset+1] = color[1] + nrgba.Pix[pixOffset+2] = color[2] + nrgba.Pix[pixOffset+3] = alpha } } } } - return rgba, nil + return nrgba, nil } // decompressBC5 decompresses BC5 (normal maps) to RGBA -func decompressBC5(data []byte, width, height int) (*image.RGBA, error) { +func decompressBC5(data []byte, width, height int) (*image.NRGBA, error) { // BC5 stores two channels (RG for normal maps) // We'll decode them and reconstruct Z = sqrt(1 - X^2 - Y^2) return nil, fmt.Errorf("BC5 decompression not yet implemented") } +// decompressR8 decompresses R8_UNORM (grayscale) to RGBA +func decompressR8(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height { + return nil, fmt.Errorf("data truncated") + } + + offset := 0 + for y := 0; y < height; y++ { + for x := 0; x < width; x++ { + v := data[offset] + offset++ + pixOffset := nrgba.PixOffset(x, y) + nrgba.Pix[pixOffset+0] = v + nrgba.Pix[pixOffset+1] = v + nrgba.Pix[pixOffset+2] = v + nrgba.Pix[pixOffset+3] = 255 + } + } + return nrgba, nil +} + +// decompressRGBA decompresses uncompressed RGBA to RGBA +func decompressRGBA(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height*4 { + return nil, fmt.Errorf("data truncated") + } + copy(nrgba.Pix, data[:width*height*4]) + return nrgba, nil +} + +// decompressBGRA decompresses uncompressed BGRA to RGBA +func decompressBGRA(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height*4 { + return nil, fmt.Errorf("data truncated") + } + + count := width * height + for i := 0; i < count; i++ { + offset := i * 4 + b := data[offset] + g := data[offset+1] + r := data[offset+2] + a := data[offset+3] + + nrgba.Pix[offset] = r + nrgba.Pix[offset+1] = g + nrgba.Pix[offset+2] = b + nrgba.Pix[offset+3] = a + } + return nrgba, nil +} + +// decompressR11G11B10Float decompresses packed float format to RGBA +func decompressR11G11B10Float(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height*4 { + return nil, fmt.Errorf("data truncated") + } + + offset := 0 + for y := 0; y < height; y++ { + for x := 0; x < width; x++ { + packed := uint32(data[offset]) | uint32(data[offset+1])<<8 | uint32(data[offset+2])<<16 | uint32(data[offset+3])<<24 + offset += 4 + + r := f11ToF32(packed & 0x7FF) + g := f11ToF32((packed >> 11) & 0x7FF) + b := f10ToF32((packed >> 22) & 0x3FF) + + // Clamp to 0-255 + r8 := uint8(math.Min(255, math.Max(0, float64(r)*255))) + g8 := uint8(math.Min(255, math.Max(0, float64(g)*255))) + b8 := uint8(math.Min(255, math.Max(0, float64(b)*255))) + + pixOffset := nrgba.PixOffset(x, y) + nrgba.Pix[pixOffset+0] = r8 + nrgba.Pix[pixOffset+1] = g8 + nrgba.Pix[pixOffset+2] = b8 + nrgba.Pix[pixOffset+3] = 255 + } + } + return nrgba, nil +} + +func f11ToF32(u uint32) float32 { + exponent := (u >> 6) & 0x1F + mantissa := u & 0x3F + if exponent == 0 { + if mantissa == 0 { + return 0.0 + } + return float32(mantissa) / 64.0 * (1.0 / 16384.0) + } else if exponent == 31 { + return 65504.0 + } + return float32(math.Pow(2, float64(exponent)-15)) * (1.0 + float32(mantissa)/64.0) +} + +func f10ToF32(u uint32) float32 { + exponent := (u >> 5) & 0x1F + mantissa := u & 0x1F + if exponent == 0 { + if mantissa == 0 { + return 0.0 + } + return float32(mantissa) / 32.0 * (1.0 / 16384.0) + } else if exponent == 31 { + return 65504.0 + } + return float32(math.Pow(2, float64(exponent)-15)) * (1.0 + float32(mantissa)/32.0) +} + +// srgbToLinear converts an sRGB byte value to a linear float32 value. +func srgbToLinear(c uint8) float32 { + v := float32(c) / 255.0 + if v <= 0.04045 { + return v / 12.92 + } + return float32(math.Pow(float64((v+0.055)/1.055), 2.4)) +} + +// linearToSrgb converts a linear float32 value to an sRGB byte value. +func linearToSrgb(v float32) uint8 { + if v <= 0.0031308 { + return uint8(math.Min(255, math.Max(0, float64(v)*12.92*255.0))) + } + srgb := 1.055*math.Pow(float64(v), 1.0/2.4) - 0.055 + return uint8(math.Min(255, math.Max(0, srgb*255.0))) +} + // writeDDSFile writes a complete DDS file with DX10 header func writeDDSFile(w io.Writer, width, height, mipCount, dxgiFormat uint32, compressedData []byte) error { // Calculate pitch/linear size From 38261b25135f304e5f8cd8956cd8ef55832b12df Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Fri, 27 Feb 2026 16:10:21 +0000 Subject: [PATCH 03/11] I dont even remember what i changed there was too much so ill list what i remember; updated texconv to add extra format and better detection, added --export command in evrtools to extract just textures or tints added better repacking from my own version of evrfiletools --- EVR_Texture_Editor.py | 3065 +++++++++++++++++++++++++++++++++++++++ cmd/evrtools/main.go | 48 + cmd/texconv/main.go | 364 ++++- make | 0 pkg/manifest/builder.go | 61 +- pkg/manifest/package.go | 105 +- pkg/manifest/repack.go | 383 +++++ pkg/manifest/scanner.go | 53 +- 8 files changed, 3987 insertions(+), 92 deletions(-) create mode 100644 EVR_Texture_Editor.py create mode 100644 make create mode 100644 pkg/manifest/repack.go diff --git a/EVR_Texture_Editor.py b/EVR_Texture_Editor.py new file mode 100644 index 0000000..f3c321f --- /dev/null +++ b/EVR_Texture_Editor.py @@ -0,0 +1,3065 @@ +import os +import sys +import struct +import tkinter as tk +from tkinter import ttk, filedialog, messagebox, scrolledtext +import shutil +import tempfile +import subprocess +import threading +import json +import glob +import time +import zipfile +import urllib.request +import webbrowser +from pathlib import Path +from concurrent.futures import ThreadPoolExecutor, as_completed + +try: + from PIL import Image, ImageTk, ImageDraw, ImageFont + HAS_PIL = True +except ImportError: + HAS_PIL = False + messagebox.showerror("Missing Dependencies", "Pillow library is required but not installed.\nPlease install it manually: pip install Pillow") + sys.exit(1) + +# --- SETTINGS & PATH MANAGEMENT --- +SETTINGS_DIR_NAME = "Settings" + +def get_base_dir(): + if getattr(sys, 'frozen', False): + return os.path.dirname(sys.executable) + else: + return os.path.dirname(os.path.abspath(__file__)) + +def get_settings_path(filename): + base = get_base_dir() + settings_dir = os.path.join(base, SETTINGS_DIR_NAME) + if not os.path.exists(settings_dir): + try: + os.makedirs(settings_dir) + except: pass + return os.path.join(settings_dir, filename) + +def get_tool_path(tool_name): + # Check Settings folder first + settings_path = get_settings_path(tool_name) + if os.path.exists(settings_path): + return settings_path + + # Fallback to script dir + script_path = os.path.join(get_base_dir(), tool_name) + if os.path.exists(script_path): + return script_path + + return settings_path + +def get_cache_dir(): + # Check Settings folder first (Preferred) + settings_path = get_settings_path("texture_cache") + if os.path.exists(settings_path) and os.path.isdir(settings_path): + return settings_path + + base = get_base_dir() + # Check legacy/root location + legacy_path = os.path.join(base, "texture_cache") + if os.path.exists(legacy_path) and os.path.isdir(legacy_path): + return legacy_path + + # Default to Settings folder + return settings_path + +CONFIG_FILE = get_settings_path("config.json") +CACHE_DIR = get_cache_dir() # Store cache in Settings folder for persistence (or root if exists) +CACHE2_FILE = get_settings_path("cache2.json") +LEGACY_CACHE_FILE = get_settings_path("cache.json") +MAPPING_FILE = get_settings_path("texture_mapping.json") + +# App version for updates +APP_VERSION = "2.0.0" +GITHUB_REPO = "heisthecat31/EchoVR-Texture-Editor" +GITHUB_API_URL = f"https://api.github.com/repos/{GITHUB_REPO}/releases/latest" + +DECODE_CACHE = {} + + +def compare_versions(v1, v2): + """Compare two version strings (e.g., '1.0.0' vs '1.1.0'). Returns True if v2 > v1""" + try: + parts1 = [int(x) for x in v1.split('.')] + parts2 = [int(x) for x in v2.split('.')] + + # Pad with zeros + while len(parts1) < len(parts2): + parts1.append(0) + while len(parts2) < len(parts1): + parts2.append(0) + + for p1, p2 in zip(parts1, parts2): + if p2 > p1: + return True + elif p2 < p1: + return False + return False + except: + return False + + +def check_for_updates(): + """Check GitHub for latest release. Returns (has_update, latest_version, download_url) or (False, None, None)""" + try: + response = urllib.request.urlopen(GITHUB_API_URL, timeout=5) + data = json.loads(response.read().decode('utf-8')) + + if 'tag_name' in data: + latest_version = data['tag_name'].lstrip('v') # Remove 'v' prefix if present + download_url = data.get('html_url', '') # Link to releases page + + if compare_versions(APP_VERSION, latest_version): + return True, latest_version, download_url + except Exception as e: + pass # Silent fail - don't break if network unavailable + + return False, None, None + + +def _dir_nonempty(path): + """Return True if directory exists and has at least one entry (no full listdir).""" + try: + with os.scandir(path) as it: + return next(it, None) is not None + except (OSError, TypeError): + return False + + +def run_hidden_command(cmd, cwd=None, timeout=None, capture_output=True): + if sys.platform == 'win32': + startupinfo = subprocess.STARTUPINFO() + startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW + startupinfo.wShowWindow = subprocess.SW_HIDE + + if capture_output: + try: + result = subprocess.run( + cmd, + startupinfo=startupinfo, + capture_output=True, + text=True, + cwd=cwd, + timeout=timeout, + creationflags=subprocess.CREATE_NO_WINDOW + ) + return result + except subprocess.TimeoutExpired: + return subprocess.CompletedProcess(cmd, -1, "", "Timeout expired") + except Exception: + return subprocess.CompletedProcess(cmd, -1, "", "Command failed") + else: + try: + result = subprocess.run( + cmd, + startupinfo=startupinfo, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + cwd=cwd, + timeout=timeout, + creationflags=subprocess.CREATE_NO_WINDOW + ) + return result + except Exception: + return subprocess.CompletedProcess(cmd, -1) + else: + try: + if capture_output: + return subprocess.run(cmd, capture_output=True, text=True, cwd=cwd, timeout=timeout) + else: + return subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, cwd=cwd, timeout=timeout) + except subprocess.TimeoutExpired: + return subprocess.CompletedProcess(cmd, -1, "", "Timeout expired") + except Exception: + return subprocess.CompletedProcess(cmd, -1, "", "Command failed") + +# --- CACHE MANAGER --- +class TextureCacheManager: + @staticmethod + def load_cache(): + if os.path.exists(CACHE2_FILE): + try: + with open(CACHE2_FILE, 'r', encoding='utf-8') as f: + return json.load(f) + except Exception: + return {} + return {} + + @staticmethod + def save_cache(cache_data): + try: + with open(CACHE2_FILE, 'w', encoding='utf-8') as f: + json.dump(cache_data, f, indent=2) + except Exception: + pass + + @staticmethod + def get_cached_files(folder_path): + cache = TextureCacheManager.load_cache() + if not cache: return None + + norm_path = os.path.normpath(folder_path).lower() + for key in cache: + if os.path.normpath(key).lower() == norm_path: + return cache[key] + return None + + @staticmethod + def update_cache(folder_path, file_list): + cache = TextureCacheManager.load_cache() + cache[os.path.normpath(folder_path)] = file_list + TextureCacheManager.save_cache(cache) + +class ConfigManager: + @staticmethod + def load_config(): + base_dir = get_base_dir() + settings_dir = os.path.join(base_dir, SETTINGS_DIR_NAME) + + if not os.path.exists(settings_dir): + try: + os.makedirs(settings_dir) + except: pass + + default_config = { + 'output_folder': None, + 'data_folder': None, + 'extracted_folder': os.path.join(settings_dir, "pcvr-extracted"), + 'repacked_folder': os.path.join(settings_dir, "output-both"), + 'pcvr_input_folder': os.path.join(settings_dir, "input-pcvr"), + 'quest_input_folder': os.path.join(settings_dir, "input-quest"), + 'backup_folder': None, + 'renderdoc_path': None + } + + try: + if os.path.exists(CONFIG_FILE): + with open(CONFIG_FILE, 'r', encoding='utf-8') as f: + loaded_config = json.load(f) + for key in default_config: + if key in loaded_config: + value = loaded_config[key] + if value is None: + continue + if isinstance(value, str) and (key.endswith('_folder') or key.endswith('_path')): + value = os.path.normpath(value) + if not os.path.exists(value) and key in ['repacked_folder', 'pcvr_input_folder', 'quest_input_folder']: + parent_path = os.path.join(os.path.dirname(value), os.path.basename(value)) + if os.path.exists(parent_path): + value = parent_path + default_config[key] = value + except Exception as e: + print(f"Config load error: {e}") + + return default_config + + @staticmethod + def save_config(**kwargs): + config = ConfigManager.load_config() + config.update(kwargs) + + try: + with open(CONFIG_FILE, 'w', encoding='utf-8') as f: + json.dump(config, f, indent=4) + except Exception as e: + print(f"Config save error: {e}") + +class TutorialPopup: + """Step-by-step guided tutorial with highlight boxes showing what to click in order.""" + HIGHLIGHT_BG = "#2d5a27" + HIGHLIGHT_BORDER = 4 + PANEL_BG = "#333333" + + @staticmethod + def _get_widget(app, attr): + try: + return getattr(app, attr, None) + except Exception: + return None + + @staticmethod + def show(parent, app=None): + if app is None: + app = parent + steps = [ + ("data_folder_btn", "Step 1: Data Folder", "Click the **Select** button next to Data Folder to choose your EchoVR game folder (the one containing 'manifests' and 'packages')."), + ("extracted_folder_btn", "Step 2: Extracted Folder", "Click **Select** next to Extracted Folder to choose where extracted textures will be saved (e.g. a new empty folder)."), + ("extract_btn", "Step 3: Extract Package", "Click **Extract Package**. Choose 'Textures Only' for a fast extract, or 'Full Package' if you need everything."), + ("file_list", "Step 4: Texture List", "After extraction, textures appear here. Click one or more (Ctrl/Shift for multi-select) to choose which texture to replace."), + ("replacement_canvas", "Step 5: Replacement Texture", "Click the **right canvas** (Replacement area) to open a file picker and choose your replacement image (PNG/DDS)."), + ("replace_btn", "Step 6: Replace Texture", "Click **Replace Texture** to apply your replacement image to all selected textures. Files go to input-pcvr or input-quest."), + ("repack_btn", "Step 7: Repack Modified", "After editing, click **Repack Modified** to build the output. Use the default 'output-both' folder when asked."), + ("push_quest_btn", "Step 8: Deploy", "Quest: use **Push Files To Quest** to deploy. PCVR: use **Update EchoVR** in the header to copy files into your game folder."), + ] + panel = tk.Toplevel(parent) + panel.title("Tutorial") + panel.configure(bg=TutorialPopup.PANEL_BG) + panel.resizable(False, False) + panel.geometry("340x165") + panel.transient(parent) + panel.attributes("-topmost", True) + try: + px = parent.winfo_rootx() + max(0, (parent.winfo_width() - 340) // 2) + py = parent.winfo_rooty() + parent.winfo_height() - 185 + if py < parent.winfo_rooty(): + py = parent.winfo_rooty() + 20 + panel.geometry(f"+{px}+{py}") + except Exception: + pass + current_step = [0] + saved_style = {} + + def _clear_highlight(): + w = saved_style.get("widget") + if w and w.winfo_exists(): + try: + for k, v in saved_style.get("config", {}).items(): + try: + w.config(**{k: v}) + except Exception: + pass + except Exception: + pass + saved_style.clear() + + def _apply_highlight(widget): + if not widget or not widget.winfo_exists(): + return + try: + orig = {} + for key in ("bg", "relief", "bd", "highlightbackground", "highlightthickness"): + try: + orig[key] = widget.cget(key) + except Exception: + pass + saved_style["widget"] = widget + saved_style["config"] = orig + for attr, value in [ + ("bg", TutorialPopup.HIGHLIGHT_BG), + ("relief", tk.SOLID), + ("bd", TutorialPopup.HIGHLIGHT_BORDER), + ("highlightbackground", "#4cd964"), + ("highlightthickness", TutorialPopup.HIGHLIGHT_BORDER), + ]: + try: + widget.config(**{attr: value}) + except Exception: + pass + except Exception: + saved_style.clear() + + def _go(step_index): + _clear_highlight() + current_step[0] = step_index + idx = current_step[0] + step_label.config(text=f"Step {idx + 1} of {len(steps)}") + title_label.config(text=steps[idx][1]) + desc_label.config(text=steps[idx][2]) + widget = TutorialPopup._get_widget(app, steps[idx][0]) + _apply_highlight(widget) + prev_btn.config(state=tk.NORMAL if idx > 0 else tk.DISABLED) + is_last = idx >= len(steps) - 1 + next_btn.config(state=tk.NORMAL, text="Close" if is_last else "Next →") + + def _next(): + if current_step[0] >= len(steps) - 1: + _skip() + else: + _go(current_step[0] + 1) + + def _prev(): + if current_step[0] > 0: + _go(current_step[0] - 1) + + def _skip(): + _clear_highlight() + panel.destroy() + + content = tk.Frame(panel, bg=TutorialPopup.PANEL_BG, padx=10, pady=8) + content.pack(fill=tk.BOTH, expand=True) + step_label = tk.Label(content, text=f"Step 1 of {len(steps)}", font=("Arial", 8), fg="#888888", bg=TutorialPopup.PANEL_BG) + step_label.pack(anchor="w") + title_label = tk.Label(content, text=steps[0][1], font=("Arial", 10, "bold"), fg="#4cd964", bg=TutorialPopup.PANEL_BG, anchor="w") + title_label.pack(fill=tk.X, pady=(2, 4)) + desc_label = tk.Label(content, text=steps[0][2], font=("Arial", 9), fg="#eeeeee", bg=TutorialPopup.PANEL_BG, justify=tk.LEFT, anchor="w", wraplength=310) + desc_label.pack(fill=tk.X) + btn_frame = tk.Frame(content, bg=TutorialPopup.PANEL_BG) + btn_frame.pack(fill=tk.X, pady=(8, 0)) + prev_btn = tk.Button(btn_frame, text="← Prev", command=_prev, state=tk.DISABLED, bg="#4a4a4a", fg="#ffffff", font=("Arial", 8), relief=tk.RAISED, bd=1, padx=6, pady=4) + prev_btn.pack(side=tk.LEFT, padx=(0, 6)) + next_btn = tk.Button(btn_frame, text="Next →", command=_next, bg="#4cd964", fg="#000000", font=("Arial", 8, "bold"), relief=tk.RAISED, bd=1, padx=6, pady=4) + next_btn.pack(side=tk.LEFT, padx=(0, 6)) + skip_btn = tk.Button(btn_frame, text="Skip", command=_skip, bg="#555555", fg="#ffffff", font=("Arial", 8), relief=tk.RAISED, bd=1, padx=6, pady=4) + skip_btn.pack(side=tk.RIGHT) + panel.protocol("WM_DELETE_WINDOW", _skip) + _go(0) + +class ProgressDialog: + """Simple progress dialog for long-running operations""" + def __init__(self, parent, title="Processing", message="Please wait...", show_bar=True): + self.dialog = tk.Toplevel(parent) + self.dialog.title(title) + height = 150 if show_bar else 100 + self.dialog.geometry(f"400x{height}") + self.dialog.configure(bg='#1a1a1a') + self.dialog.resizable(False, False) + self.dialog.transient(parent) + self.dialog.grab_set() + + # Center on parent + try: + x = parent.winfo_x() + (parent.winfo_width() - 400) // 2 + y = parent.winfo_y() + (parent.winfo_height() - 150) // 2 + self.dialog.geometry(f"+{x}+{y}") + except: + pass + + # Message label + tk.Label(self.dialog, text=message, font=("Arial", 11), fg="#ffffff", bg='#1a1a1a').pack(pady=(20, 10)) + + self.show_bar = show_bar + if show_bar: + # Progress bar + self.progress = ttk.Progressbar(self.dialog, length=300, mode='determinate', value=0) + self.progress.pack(pady=10, padx=50) + + # Status label + self.status_label = tk.Label(self.dialog, text="0%", font=("Arial", 9), fg="#4cd964", bg='#1a1a1a') + self.status_label.pack(pady=5) + else: + self.progress = None + self.status_label = None + + # Cancel button + self.cancel_requested = False + self.cancel_btn = tk.Button(self.dialog, text="Cancel", command=self.request_cancel, + bg='#ff3b30', fg='#ffffff', font=("Arial", 9, "bold"), + relief=tk.RAISED, bd=2, padx=20, pady=5) + self.cancel_btn.pack(pady=10) + + self.dialog.protocol("WM_DELETE_WINDOW", self.request_cancel) + + def update(self, current, total): + """Update progress (0-100)""" + if not self.dialog.winfo_exists(): + return False + if self.show_bar and self.progress and self.status_label: + percent = int((current / total) * 100) if total > 0 else 0 + self.progress['value'] = percent + self.status_label.config(text=f"{percent}%") + self.dialog.update_idletasks() + return not self.cancel_requested + + def request_cancel(self): + self.cancel_requested = True + self.cancel_btn.config(state=tk.DISABLED, text="Cancelling...") + self.dialog.update_idletasks() + + def close(self): + """Close the progress dialog""" + try: + self.dialog.destroy() + except: + pass + +class UpdateNotificationDialog: + """Dialog for notifying user about app updates""" + def __init__(self, parent, latest_version, download_url): + self.dialog = tk.Toplevel(parent) + self.dialog.title("📥 Update Available") + self.dialog.geometry("500x250") + self.dialog.configure(bg='#1a1a1a') + self.dialog.resizable(False, False) + self.dialog.transient(parent) + self.dialog.grab_set() + + # Center on parent + try: + x = parent.winfo_x() + (parent.winfo_width() - 500) // 2 + y = parent.winfo_y() + (parent.winfo_height() - 250) // 2 + self.dialog.geometry(f"+{x}+{y}") + except: + pass + + # Title + tk.Label(self.dialog, text="🎉 Update Available", font=("Arial", 14, "bold"), + fg="#4cd964", bg='#1a1a1a').pack(pady=(20, 10)) + + # Version info + info_text = f"A new version is available!\n\nCurrent: v{APP_VERSION}\nLatest: v{latest_version}\n\nClick 'Download' to visit the releases page." + tk.Label(self.dialog, text=info_text, font=("Arial", 10), fg="#cccccc", bg='#1a1a1a', justify=tk.LEFT).pack(pady=10, padx=20) + + # Buttons frame + btn_frame = tk.Frame(self.dialog, bg='#1a1a1a') + btn_frame.pack(pady=20) + + download_btn = tk.Button(btn_frame, text="📥 Download", command=self.download, + bg='#007aff', fg='#ffffff', font=("Arial", 10, "bold"), + relief=tk.RAISED, bd=2, padx=20, pady=8) + download_btn.pack(side=tk.LEFT, padx=5) + + remind_btn = tk.Button(btn_frame, text="Remind Later", command=self.dialog.destroy, + bg='#4a4a4a', fg='#ffffff', font=("Arial", 10), + relief=tk.RAISED, bd=2, padx=20, pady=8) + remind_btn.pack(side=tk.LEFT, padx=5) + + self.download_url = download_url + + def download(self): + """Open download page in default browser""" + try: + webbrowser.open(self.download_url) + self.dialog.destroy() + except: + messagebox.showerror("Error", "Could not open browser. Please visit:\n" + self.download_url) + +class UpdateEchoPopup: + def __init__(self, parent, app, config): + self.parent = parent + self.app = app + self.config = config + self.backup_location = None + + self.popup = tk.Toplevel(parent) + self.popup.title("⚠ Update EchoVR Game Files") + self.popup.geometry("850x500") + self.popup.configure(bg='#1a1a1a') + self.popup.resizable(False, False) + + self.popup.transient(parent) + self.popup.grab_set() + + self.popup.update_idletasks() + x = parent.winfo_x() + (parent.winfo_width() - self.popup.winfo_reqwidth()) // 2 + y = parent.winfo_y() + (parent.winfo_height() - self.popup.winfo_reqheight()) // 2 + self.popup.geometry(f"+{x}+{y}") + + self.setup_ui() + self.refresh_backup_status() + + def setup_ui(self): + title_frame = tk.Frame(self.popup, bg='#1a1a1a') + title_frame.pack(fill=tk.X, padx=20, pady=20) + + warning_icon = "⚠️" + title_label = tk.Label(title_frame, text=f"{warning_icon} WARNING: Update EchoVR", font=("Arial", 14, "bold"), fg="#ff6b6b", bg='#1a1a1a') + title_label.pack() + + warning_text = """This menu allows you to update your EchoVR installation. +Always create a backup before proceeding.""" + + warning_label = tk.Label(self.popup, text=warning_text, font=("Arial", 11), fg="#ffffff", bg='#1a1a1a', justify=tk.CENTER, wraplength=650) + warning_label.pack(padx=20, pady=10) + + data_folder = self.config.get('data_folder', 'Not selected') + data_frame = tk.Frame(self.popup, bg='#2a2a2a', relief=tk.RAISED, bd=1) + data_frame.pack(fill=tk.X, padx=20, pady=10) + + tk.Label(data_frame, text="Game Data Folder:", font=("Arial", 10, "bold"), fg="#4cd964", bg='#2a2a2a').pack(anchor="w", padx=10, pady=(10, 0)) + + folder_label = tk.Label(data_frame, text=data_folder, font=("Arial", 9), fg="#cccccc", bg='#2a2a2a', wraplength=620, justify=tk.LEFT) + folder_label.pack(fill=tk.X, padx=10, pady=(0, 10)) + + script_dir = os.path.dirname(os.path.abspath(__file__)) + output_folder = self.config.get('repacked_folder', os.path.join(script_dir, "output-both")) + output_frame = tk.Frame(self.popup, bg='#2a2a2a', relief=tk.RAISED, bd=1) + output_frame.pack(fill=tk.X, padx=20, pady=10) + + tk.Label(output_frame, text="Modified Files Source:", font=("Arial", 10, "bold"), fg="#4cd964", bg='#2a2a2a').pack(anchor="w", padx=10, pady=(10, 0)) + + output_label = tk.Label(output_frame, text=output_folder, font=("Arial", 9), fg="#cccccc", bg='#2a2a2a', wraplength=620, justify=tk.LEFT) + output_label.pack(fill=tk.X, padx=10, pady=(0, 10)) + + backup_frame = tk.Frame(self.popup, bg='#1a1a1a') + backup_frame.pack(fill=tk.X, padx=20, pady=10) + + btn_frame = tk.Frame(backup_frame, bg='#1a1a1a') + btn_frame.pack(pady=10) + + self.create_backup_btn = tk.Button(btn_frame, text="📁 Create Backup", command=self.create_backup, bg='#4a4a4a', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=10) + self.create_backup_btn.pack(side=tk.LEFT, padx=5) + + self.restore_backup_btn = tk.Button(btn_frame, text="🔄 Restore Backup", command=self.restore_backup, bg='#4a4a4a', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=10, state=tk.DISABLED) + self.restore_backup_btn.pack(side=tk.LEFT, padx=5) + + self.update_pkg_btn = tk.Button(btn_frame, text="📦 Update Packages", command=self.start_update_thread, bg='#007aff', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=10) + self.update_pkg_btn.pack(side=tk.LEFT, padx=5) + + self.backup_status = tk.Label(backup_frame, text="Checking backup status...", font=("Arial", 9), fg="#ffcc00", bg='#1a1a1a') + self.backup_status.pack() + + close_frame = tk.Frame(self.popup, bg='#1a1a1a') + close_frame.pack(fill=tk.X, padx=20, pady=20) + + self.close_btn = tk.Button(close_frame, text="Close", command=self.popup.destroy, bg='#4a4a4a', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=30, pady=10) + self.close_btn.pack() + + def log_info(self, message): + if hasattr(self.app, 'log_info'): + self.app.log_info(message) + + def check_backup_exists(self): + backup_folder = self.config.get('backup_folder') + if backup_folder: + backup_folder = os.path.normpath(backup_folder) + if os.path.exists(backup_folder): + self.backup_location = backup_folder + return True + return False + + def refresh_backup_status(self): + if self.check_backup_exists(): + self.backup_status.config(text=f"✓ Backup found: {os.path.basename(self.backup_location)}", fg="#4cd964") + self.restore_backup_btn.config(state=tk.NORMAL) + else: + self.backup_status.config(text="No backup found - create one before updating", fg="#ffcc00") + self.restore_backup_btn.config(state=tk.DISABLED) + + def create_backup(self): + if not self.config.get('data_folder'): + messagebox.showerror("Error", "Please select game data folder first") + return + + backup_path = filedialog.askdirectory(title="Select Backup Location", initialdir=os.path.dirname(self.config['data_folder'])) + + if not backup_path: + return + + try: + timestamp = time.strftime("%Y%m%d_%H%M%S") + backup_folder = os.path.join(backup_path, f"EchoVR_Backup_{timestamp}") + + self.backup_status.config(text="Creating backup...", fg="#ffcc00") + self.popup.update_idletasks() + + # Run in thread to prevent freeze + def backup_task(): + try: + shutil.copytree(self.config['data_folder'], backup_folder) + self.popup.after(0, lambda: self.on_backup_complete(True, backup_folder)) + except Exception as e: + self.popup.after(0, lambda: self.on_backup_complete(False, str(e))) + + threading.Thread(target=backup_task, daemon=True).start() + + except Exception as e: + messagebox.showerror("Error", f"Failed to start backup:\n{str(e)}") + + def on_backup_complete(self, success, result): + if success: + ConfigManager.save_config(backup_folder=result) + self.config['backup_folder'] = result + self.backup_location = result + self.refresh_backup_status() + self.log_info(f"✓ Backup created: {result}") + messagebox.showinfo("Success", f"Backup created successfully at:\n{result}") + else: + messagebox.showerror("Error", f"Failed to create backup:\n{result}") + self.backup_status.config(text="Backup failed", fg="#ff3b30") + + def restore_backup(self): + if not self.backup_location or not os.path.exists(self.backup_location): + messagebox.showerror("Error", "Backup not found") + return + + confirm = messagebox.askyesno("Confirm Restore", f"Restore game files from backup?\n\nBackup: {self.backup_location}\n\nThis will OVERWRITE your current game files.") + + if not confirm: + return + + self.backup_status.config(text="Restoring backup... (Do not close)", fg="#ffcc00") + self.restore_backup_btn.config(state=tk.DISABLED) + self.popup.update_idletasks() + + def restore_task(): + try: + if os.path.exists(self.config['data_folder']): + shutil.rmtree(self.config['data_folder']) + shutil.copytree(self.backup_location, self.config['data_folder']) + self.popup.after(0, lambda: self.on_restore_complete(True, self.backup_location)) + except Exception as e: + self.popup.after(0, lambda: self.on_restore_complete(False, str(e))) + + threading.Thread(target=restore_task, daemon=True).start() + + def on_restore_complete(self, success, result): + if success: + self.log_info(f"✓ Game files restored from backup: {result}") + messagebox.showinfo("Success", "Game files restored from backup!") + self.popup.destroy() + else: + messagebox.showerror("Error", f"Failed to restore backup:\n{result}") + self.backup_status.config(text="Restore failed", fg="#ff3b30") + self.restore_backup_btn.config(state=tk.NORMAL) + + def start_update_thread(self): + # Validation checks + script_dir = os.path.dirname(os.path.abspath(__file__)) + output_folder = self.config.get('repacked_folder') + if not output_folder: + output_folder = os.path.join(script_dir, "output-both") + + data_folder = self.config.get('data_folder') + + if not os.path.exists(output_folder): + messagebox.showerror("Error", f"Output folder not found:\n{output_folder}\n\nPlease repack your files first.") + return + + if not data_folder or not os.path.exists(data_folder): + messagebox.showerror("Error", "Game data folder not found.\nPlease select your EchoVR data folder first.") + return + + packages_path = os.path.join(output_folder, "packages") + manifests_path = os.path.join(output_folder, "manifests") + + if not os.path.exists(packages_path) or not os.path.exists(manifests_path): + messagebox.showerror("Error", f"Required folders not found in:\n{output_folder}\n\nPlease repack your files first.") + return + + if not self.backup_location: + warning_result = messagebox.askyesno("⚠ WARNING - No Backup Found", f"No backup found! This operation will OVERWRITE your game files.\n\nContinue WITHOUT a backup?") + if not warning_result: + return + + confirm = messagebox.askyesno("Update Game Files", f"This will UPDATE your EchoVR installation.\n\nSource: {output_folder}\nTarget: {data_folder}\n\nOperation:\n1. Move files from output-both to game folder\n2. Wipe output-both folder\n\nContinue?") + + if not confirm: + return + + # Disable buttons + self.update_pkg_btn.config(state=tk.DISABLED, text="Updating...") + self.close_btn.config(state=tk.DISABLED) + + # Show progress dialog + progress = ProgressDialog(self.popup, "Updating Game Files", "Moving files to game folder...") + + # Start Thread + threading.Thread(target=self.update_packages_thread, args=(output_folder, data_folder, progress), daemon=True).start() + + def update_packages_thread(self, output_folder, data_folder, progress): + try: + files_moved = 0 + total_files = 0 + + # Count total files first + for folder in ['packages', 'manifests']: + src_path = os.path.join(output_folder, folder) + if os.path.exists(src_path): + total_files += len([f for f in os.listdir(src_path) if os.path.isfile(os.path.join(src_path, f))]) + + if total_files == 0: + total_files = 1 # Avoid division by zero + + # Move files + for folder in ['packages', 'manifests']: + src_path = os.path.join(output_folder, folder) + dst_path = os.path.join(data_folder, folder) + + if os.path.exists(src_path): + os.makedirs(dst_path, exist_ok=True) + + for filename in os.listdir(src_path): + if not progress.update(files_moved, total_files): + self.popup.after(0, lambda: self.on_update_complete(False, "Operation cancelled")) + return + + src_file = os.path.join(src_path, filename) + dst_file = os.path.join(dst_path, filename) + + if os.path.isfile(src_file): + shutil.move(src_file, dst_file) + files_moved += 1 + + progress.update(total_files, total_files) + + try: + for folder in ['packages', 'manifests']: + folder_path = os.path.join(output_folder, folder) + if os.path.exists(folder_path): + shutil.rmtree(folder_path) + except Exception as wipe_error: + self.popup.after(0, lambda: self.log_info(f"⚠ Could not completely wipe output-both: {wipe_error}")) + + self.popup.after(0, lambda: self.on_update_complete(True, files_moved, progress)) + + except Exception as e: + self.popup.after(0, lambda: self.on_update_complete(False, str(e), progress)) + + def on_update_complete(self, success, result, progress=None): + if progress: + progress.close() + + self.update_pkg_btn.config(state=tk.NORMAL, text="📦 Update Packages") + self.close_btn.config(state=tk.NORMAL) + + if success: + self.log_info(f"✓ Moved {result} files to game folder") + self.log_info(f"✓ Wiped output-both folder") + messagebox.showinfo("Success", f"Successfully updated game files!\n\nFiles moved: {result}") + self.popup.destroy() + else: + messagebox.showerror("Error", f"Failed to update packages:\n{result}") + self.backup_status.config(text="Update failed", fg="#ff3b30") + +class ADBPlatformTools: + @staticmethod + def get_safe_install_directory(): + script_dir = os.path.dirname(os.path.abspath(__file__)) + install_dir = os.path.join(script_dir, "platform-tools") + return install_dir + + @staticmethod + def install_platform_tools(): + import platform + system = platform.system().lower() + + download_urls = { + 'windows': 'https://dl.google.com/android/repository/platform-tools-latest-windows.zip', + 'linux': 'https://dl.google.com/android/repository/platform-tools-latest-linux.zip', + 'darwin': 'https://dl.google.com/android/repository/platform-tools-latest-darwin.zip' + } + + url = download_urls.get(system) + if not url: + return False, f"Unsupported platform: {system}" + + script_dir = os.path.dirname(os.path.abspath(__file__)) + install_base = os.path.join(script_dir, "platform-tools") + download_path = os.path.join(script_dir, "platform-tools-download.zip") + + try: + os.makedirs(install_base, exist_ok=True) + + urllib.request.urlretrieve(url, download_path) + + with zipfile.ZipFile(download_path, 'r') as zip_ref: + zip_ref.extractall(install_base) + + try: + os.remove(download_path) + except: + pass + + adb_path = os.path.join(install_base, "platform-tools", "adb.exe" if system == 'windows' else "adb") + if not os.path.exists(adb_path): + adb_path = os.path.join(install_base, "adb.exe" if system == 'windows' else "adb") + + if os.path.exists(adb_path): + if system != 'windows': + try: + os.chmod(adb_path, 0o755) + except: + pass + + adb_dir = os.path.dirname(adb_path) + os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH'] + + return True, f"Platform Tools installed to: {adb_dir}" + else: + return False, "ADB executable not found after extraction" + + except Exception as e: + return False, f"Installation failed: {str(e)}" + +class ADBManager: + @staticmethod + def find_adb(): + safe_dir = ADBPlatformTools.get_safe_install_directory() + local_paths = [ + os.path.join(safe_dir, "platform-tools", "adb.exe"), + os.path.join(safe_dir, "platform-tools", "adb"), + os.path.join(safe_dir, "adb.exe"), + os.path.join(safe_dir, "adb") + ] + + script_dir = os.path.dirname(os.path.abspath(__file__)) + local_paths.extend([ + os.path.join(script_dir, "platform-tools", "adb.exe"), + os.path.join(script_dir, "platform-tools", "adb"), + os.path.join(script_dir, "adb.exe"), + os.path.join(script_dir, "adb") + ]) + + for path in local_paths: + if os.path.exists(path): + return path + + try: + result = run_hidden_command(['adb', 'version'], timeout=10) + if result.returncode == 0: + return 'adb' + except: + pass + + return None + + @staticmethod + def check_adb(): + adb_path = ADBManager.find_adb() + if not adb_path: + return False, "ADB not found", None + + try: + try: + run_hidden_command([adb_path, 'kill-server'], timeout=5) + except: + pass + + result = run_hidden_command([adb_path, 'devices'], timeout=10) + if result.returncode == 0: + lines = [line for line in result.stdout.strip().split('\n') if '\tdevice' in line] + if lines: + devices = [] + for line in lines: + device_id = line.split('\t')[0] + info_result = run_hidden_command([adb_path, '-s', device_id, 'shell', 'getprop', 'ro.product.model'], timeout=10) + model = info_result.stdout.strip() if info_result.returncode == 0 else "Unknown" + devices.append(f"{device_id} ({model})") + + return True, f"Connected: {', '.join(devices)}", adb_path + else: + return True, "No devices connected", adb_path + return False, "ADB command failed", adb_path + except subprocess.TimeoutExpired: + return False, "ADB timeout", adb_path + except Exception as e: + return False, f"ADB error: {str(e)}", adb_path + + @staticmethod + def push_to_quest(local_folder, quest_path): + adb_path = ADBManager.find_adb() + if not adb_path: + return False, "ADB not available" + + try: + # Optimize: Attempt to push the directory contents at once first + # "adb push local_folder/. remote_folder/" + # This is vastly faster than iterating files. + + # Ensure remote dir exists + run_hidden_command([adb_path, 'shell', 'mkdir', '-p', quest_path], timeout=30) + + # Use trailing /. to push contents + cmd = [adb_path, 'push', local_folder + "/.", quest_path + "/"] + result = run_hidden_command(cmd, timeout=600) # 10 minute timeout + + if result.returncode == 0: + return True, "Successfully pushed all items (Bulk Mode)" + + # Fallback to file-by-file if bulk fails (rare but safer) + success_count = 0 + total_count = 0 + errors = [] + + for item in os.listdir(local_folder): + item_path = os.path.join(local_folder, item) + if os.path.exists(item_path): + total_count += 1 + result = run_hidden_command([adb_path, 'push', item_path, quest_path], timeout=60) + + if result.returncode == 0: + success_count += 1 + else: + error_msg = result.stderr.strip() if result.stderr else "Unknown error" + errors.append(f"{item}: {error_msg}") + + if success_count == total_count: + return True, f"Successfully pushed all {success_count} items" + elif success_count > 0: + return True, f"Partially successful: {success_count}/{total_count}. Errors: {len(errors)}" + else: + return False, f"Failed to push items. Errors: {len(errors)}" + + except subprocess.TimeoutExpired: + return False, "Push operation timed out" + except Exception as push_error: + return False, f"Push error: {str(push_error)}" + + @staticmethod + def install_adb_tools(): + return ADBPlatformTools.install_platform_tools() + +class ASTCTools: + @staticmethod + def load_texture_mapping(mapping_file): + if not os.path.exists(mapping_file): + return {} + try: + with open(mapping_file, 'r', encoding='utf-8') as f: + mapping = json.load(f) + return mapping + except Exception as e: + print(f"Mapping load error: {e}") + return {} + + @staticmethod + def find_texture_info(texture_name, mapping): + if texture_name in mapping: + return mapping[texture_name] + suffixes = ['_d', '_n', '_s', '_e', '_a', '_r', '_m', '_h'] + for suffix in suffixes: + if texture_name.endswith(suffix): + base_name = texture_name[:-len(suffix)] + if base_name in mapping: + return mapping[base_name] + return None + + @staticmethod + def wrap_raw_astc(raw_path, wrapped_path, width, height, block_width=4, block_height=4): + try: + magic = struct.pack(" 1000: + if cache_key: + DECODE_CACHE[cache_key] = { + 'width': width, 'height': height, + 'block_w': block_w, 'block_h': block_h, + 'original_size': raw_file.stat().st_size + } + return True + else: + output_file.unlink() + return False + else: + if output_file.exists(): + output_file.unlink() + return False + except Exception: + if output_file.exists(): + output_file.unlink() + return False + finally: + if temp_astc and temp_astc.exists(): + try: temp_astc.unlink() + except: pass + + @staticmethod + def get_common_block_sizes(): + return [(4, 4), (8, 8), (6, 6), (5, 5), (10, 10), (12, 12), (5, 4), (6, 5), (8, 5), (8, 6), (10, 5), (10, 6), (10, 8)] + + @staticmethod + def decode_with_mapping(astcenc_path, texture_file, output_path, mapping): + texture_name = texture_file.stem + texture_info = ASTCTools.find_texture_info(texture_name, mapping) + if not texture_info: return False + + pcvr_width = texture_info['width'] + pcvr_height = texture_info['height'] + + for block_w, block_h in ASTCTools.get_common_block_sizes(): + output_file = output_path / f"{texture_file.stem}.png" + if ASTCTools.decode_with_config(astcenc_path, texture_file, output_file, pcvr_width, pcvr_height, block_w, block_h, texture_name): + return True + return False + + @staticmethod + def brute_force_decode(astcenc_path, texture_file, output_path): + configurations = [ + (2048, 1024, 8, 8, "2Kx1K_8x8"), (2048, 1024, 6, 6, "2Kx1K_6x6"), (2048, 1024, 4, 4, "2Kx1K_4x4"), + (1024, 512, 8, 8, "1Kx512_8x8"), (1024, 512, 6, 6, "1Kx512_6x6"), (1024, 512, 4, 4, "1Kx512_4x4"), + (2048, 2048, 8, 8, "2K_square_8x8"), (1024, 1024, 8, 8, "1K_square_8x8"), + ] + file_size = texture_file.stat().st_size + + for width, height, block_w, block_h, desc in configurations: + expected_size = ASTCTools.calculate_astc_size(width, height, block_w, block_h) + if abs(expected_size - file_size) > 100: + continue + output_file = output_path / f"{texture_file.stem}_BF_{desc}.png" + if ASTCTools.decode_with_config(astcenc_path, texture_file, output_file, width, height, block_w, block_h, texture_file.stem): + return True + return False + + @staticmethod + def calculate_astc_size(width, height, block_w, block_h): + blocks_x = (width + block_w - 1) // block_w + blocks_y = (height + block_h - 1) // block_h + return blocks_x * blocks_y * 16 + + @staticmethod + def pad_to_size(data, target_size): + current_size = len(data) + if current_size < target_size: + padding = b'\x00' * (target_size - current_size) + return data + padding + elif current_size > target_size: + return data[:target_size] + else: + return data + + @staticmethod + def encode_texture(astcenc_path, input_png, output_file, width, height, block_w, block_h, quality="medium", target_size=None): + temp_astc = None + try: + with tempfile.NamedTemporaryFile(suffix='.astc', delete=False) as f: + temp_astc = Path(f.name) + + result = run_hidden_command([ + str(astcenc_path), "-cl", str(input_png), str(temp_astc), f"{block_w}x{block_h}", f"-{quality}", "-silent" + ], timeout=30) + + if result.returncode != 0: return False + + with open(temp_astc, 'rb') as f: + astc_data = f.read() + + if len(astc_data) > 16 and astc_data[:4] == b'\x13\xAB\xA1\x5C': + raw_data = astc_data[16:] + else: + raw_data = astc_data + + if target_size: + expected_size = ASTCTools.calculate_astc_size(width, height, block_w, block_h) + if len(raw_data) != target_size: + raw_data = ASTCTools.pad_to_size(raw_data, target_size) + + output_file.write_bytes(raw_data) + return True + except subprocess.TimeoutExpired: + return False + except Exception: + return False + finally: + if temp_astc and temp_astc.exists(): + temp_astc.unlink(missing_ok=True) + + @staticmethod + def encode_with_cache(astcenc_path, input_png, output_file, texture_name, quality="medium"): + if texture_name not in DECODE_CACHE: return False + config = DECODE_CACHE[texture_name] + return ASTCTools.encode_texture(astcenc_path, input_png, output_file, config['width'], config['height'], config['block_w'], config['block_h'], quality, config['original_size']) + + @staticmethod + def save_decode_cache(cache_file): + try: + with open(cache_file, 'w', encoding='utf-8') as f: + json.dump(DECODE_CACHE, f, indent=2) + except: pass + + @staticmethod + def load_decode_cache(cache_file): + global DECODE_CACHE + if os.path.exists(cache_file): + try: + with open(cache_file, 'r', encoding='utf-8') as f: + DECODE_CACHE = json.load(f) + except: pass + +class EVRToolsManager: + def __init__(self): + self.tool_path = self.find_tool() + + def find_tool(self): + tool_names = ["evrFileTools.exe", "echoModifyFiles.exe", "echoFileTools.exe"] + for name in tool_names: + path = get_tool_path(name) + if os.path.exists(path): + return path + return None + + def extract_package(self, data_dir, package_name, output_dir, export_type=""): + if not self.tool_path: + return False, "evrFileTools.exe not found" + + try: + cmd = [ + self.tool_path, "-mode", "extract", "-package", package_name, + "-data", data_dir, "-output", output_dir, + "-force" + ] + if export_type: + cmd.extend(["--export", export_type]) + cmd.extend(["-export", export_type]) + + result = run_hidden_command(cmd, cwd=os.path.dirname(self.tool_path), timeout=2000) + + if result.returncode == 0: + return True, f"Extracted to {output_dir}" + else: + error_msg = result.stderr if result.stderr else result.stdout + return False, f"Extraction failed: {error_msg}" + except subprocess.TimeoutExpired: + return False, "Extraction timeout" + except Exception as e: + return False, f"Extraction error: {str(e)}" + + def repack_package(self, output_dir, package_name, data_dir, input_dir): + if not self.tool_path: + return False, "evrFileTools.exe not found" + + try: + cmd = [ + self.tool_path, "-mode", "build", + "-package", package_name, + "-data", data_dir, + "-input", input_dir, "-output", output_dir, + "-force" + ] + + result = run_hidden_command(cmd, cwd=os.path.dirname(self.tool_path), timeout=2000) + + if result.returncode == 0: + return True, f"Repacked to {output_dir}" + else: + error_msg = result.stderr if result.stderr else result.stdout + return False, f"Repacking failed: {error_msg}" + except subprocess.TimeoutExpired: + return False, "Repacking timeout" + except Exception as e: + return False, f"Repacking error: {str(e)}" + +class DDSHandler: + DXGI_FORMAT = { + 0: "DXGI_FORMAT_UNKNOWN", 26: "DXGI_FORMAT_R11G11B10_FLOAT", 61: "DXGI_FORMAT_R8_UNORM", + 71: "DXGI_FORMAT_BC1_UNORM", 77: "DXGI_FORMAT_BC3_UNORM", + 80: "DXGI_FORMAT_BC4_UNORM", 83: "DXGI_FORMAT_BC5_UNORM", + 91: "DXGI_FORMAT_B8G8R8A8_UNORM_SRGB", + 87: "DXGI_FORMAT_B8G8R8A8_TYPELESS", + } + + @staticmethod + def get_dds_info(file_path): + try: + with open(file_path, 'rb') as f: + signature = f.read(4) + if signature != b'DDS ': return None + header = f.read(124) + if len(header) < 124: return None + + height = struct.unpack('= 20: + format_code = struct.unpack(' ' + cmd = [texconv_path, "encode", temp_png, out_dds] + result = run_hidden_command(cmd, timeout=60) + + if result.returncode != 0: + return None, 0 # Conversion failed + + if not os.path.isfile(out_dds): + return None, 0 # Output file not created + + size = os.path.getsize(out_dds) + base = os.path.splitext(os.path.basename(source_path))[0] + final_path = os.path.join(tempfile.gettempdir(), f"pcvr_replace_{os.getpid()}_{base}.dds") + shutil.copy2(out_dds, final_path) + return final_path, size + except Exception: + return None, 0 + + @staticmethod + def hex_edit_file_size(file_path, new_size): + try: + with open(file_path, 'r+b') as f: + data = bytearray(f.read()) + if len(data) >= 248: + file_size_bytes = struct.pack(' {'width': int, 'height': int, 'pixels': int, 'size': int} + self.sort_mode = "name" # name, width, height, pixels + + self.setup_ui() + self.load_page(0) + + def setup_ui(self): + top_frame = tk.Frame(self.window, bg='#2a2a2a', height=60) + top_frame.pack(fill=tk.X) + + # Info and sort controls + info_label = tk.Label(top_frame, text="Click an image to select it", fg='#cccccc', bg='#2a2a2a', font=("Arial", 9)) + info_label.pack(side=tk.LEFT, padx=10, pady=5) + + sort_label = tk.Label(top_frame, text="Sort by:", fg='#ffffff', bg='#2a2a2a', font=("Arial", 9)) + sort_label.pack(side=tk.RIGHT, padx=(10, 5), pady=5) + + self.sort_var = tk.StringVar(value="name") + self.sort_dropdown = ttk.Combobox(top_frame, textvariable=self.sort_var, + values=["Name", "Pixels (Large to Small)", "Pixels (Small to Large)"], + state="readonly", width=20, font=("Arial", 9)) + self.sort_dropdown.pack(side=tk.RIGHT, padx=(0, 10), pady=5) + self.sort_dropdown.bind('<>', self.on_sort_change) + + # Navigation Frame (Bottom) + nav_frame = tk.Frame(self.window, bg='#2a2a2a', height=50) + nav_frame.pack(side=tk.BOTTOM, fill=tk.X) + + self.prev_btn = tk.Button(nav_frame, text="<< Previous", command=self.prev_page, + bg='#4a4a4a', fg='#ffffff', font=("Arial", 9, "bold"), relief=tk.RAISED, bd=1, state=tk.DISABLED) + self.prev_btn.pack(side=tk.LEFT, padx=20, pady=10) + + self.page_label = tk.Label(nav_frame, text=f"Page 1 / {self.total_pages}", font=("Arial", 10, "bold"), fg='#ffffff', bg='#2a2a2a') + self.page_label.pack(side=tk.LEFT, expand=True) + + self.next_btn = tk.Button(nav_frame, text="Next >>", command=self.next_page, + bg='#4a4a4a', fg='#ffffff', font=("Arial", 9, "bold"), relief=tk.RAISED, bd=1) + self.next_btn.pack(side=tk.RIGHT, padx=20, pady=10) + + self.canvas = tk.Canvas(self.window, bg='#1a1a1a', highlightthickness=0) + self.scrollbar = ttk.Scrollbar(self.window, orient="vertical", command=self.canvas.yview) + self.scroll_frame = tk.Frame(self.canvas, bg='#1a1a1a') + + self.scroll_frame.bind("", lambda e: self.canvas.configure(scrollregion=self.canvas.bbox("all"))) + self.canvas.create_window((0, 0), window=self.scroll_frame, anchor="nw") + self.canvas.configure(yscrollcommand=self.scrollbar.set) + + self.canvas.pack(side="left", fill="both", expand=True) + self.scrollbar.pack(side="right", fill="y") + self.canvas.bind_all("", self._on_mousewheel) + + def _on_mousewheel(self, event): + try: self.canvas.yview_scroll(int(-1*(event.delta/120)), "units") + except: pass + + def on_click(self, filename): + self.app.select_texture_by_name(filename) + self.parent.lift() + + def prev_page(self): + if self.current_page > 0: + self.load_page(self.current_page - 1) + + def next_page(self): + if self.current_page < self.total_pages - 1: + self.load_page(self.current_page + 1) + + def load_page(self, page_num): + self.current_page = page_num + self.loading_generation += 1 + current_gen = self.loading_generation + + # Update controls + self.page_label.config(text=f"Page {page_num + 1} / {self.total_pages}") + self.prev_btn.config(state=tk.NORMAL if page_num > 0 else tk.DISABLED) + self.next_btn.config(state=tk.NORMAL if page_num < self.total_pages - 1 else tk.DISABLED) + + # Clear grid + for widget in self.scroll_frame.winfo_children(): + widget.destroy() + self.loaded_images.clear() + self.canvas.yview_moveto(0) + + start_idx = page_num * self.TEXTURES_PER_PAGE + end_idx = min(start_idx + self.TEXTURES_PER_PAGE, len(self.image_files)) + + # Show loading indicator + loading_lbl = tk.Label(self.scroll_frame, text="Loading...", fg="white", bg="#1a1a1a") + loading_lbl.grid(row=0, column=0, columnspan=self.GRID_COLS, pady=20) + + threading.Thread(target=self._load_page_worker, args=(start_idx, end_idx, current_gen, loading_lbl), daemon=True).start() + + def _load_page_worker(self, start_idx, end_idx, generation, loading_lbl): + for idx in range(start_idx, end_idx): + if not self.window.winfo_exists() or self.loading_generation != generation: + return + + filename = self.image_files[idx] + file_path = os.path.join(self.folder_path, filename) + + try: + img = TextureLoader.load_texture(file_path, self.is_quest) + if img: + img.thumbnail(self.THUMB_SIZE) + + # Calculate row/col relative to this page + rel_idx = idx - start_idx + row = rel_idx // self.GRID_COLS + col = rel_idx % self.GRID_COLS + + self.window.after(0, lambda i=img, f=filename, r=row, c=col: self.add_thumbnail(i, f, r, c)) + except Exception: + pass + + self.window.after(0, lambda: loading_lbl.destroy()) + + def add_thumbnail(self, img, filename, row, col): + """Add a thumbnail to the grid""" + if not self.window.winfo_exists(): + return + + try: + # Store texture resolution info + self.texture_info[filename] = { + 'width': img.width, + 'height': img.height, + 'pixels': img.width * img.height, + 'size': os.path.getsize(os.path.join(self.folder_path, filename)) + } + + photo = ImageTk.PhotoImage(img) + self.loaded_images[filename] = photo + + frame = tk.Frame(self.scroll_frame, bg='#333333', bd=1, relief=tk.SOLID) + frame.grid(row=row, column=col, padx=4, pady=4, sticky='nsew') + + btn = tk.Button(frame, image=photo, command=lambda f=filename: self.on_click(f), bg='#1a1a1a', borderwidth=0) + btn.image = photo + btn.pack() + + label = tk.Label(frame, text=filename[:12]+"...", font=("Arial", 8), fg='#aaaaaa', bg='#333333') + label.pack(fill=tk.X) + except Exception: + pass + + def on_sort_change(self, event=None): + """Handle sort mode change""" + sort_selection = self.sort_var.get() + + # Sort image_files based on selected mode + if sort_selection == "Name": + self.image_files.sort() + elif sort_selection == "Pixels (Large to Small)": + self.image_files.sort(key=lambda f: self.texture_info.get(f, {}).get('pixels', 0), reverse=True) + elif sort_selection == "Pixels (Small to Large)": + self.image_files.sort(key=lambda f: self.texture_info.get(f, {}).get('pixels', 0), reverse=False) + + # Reload page 0 + self.load_page(0) + +class EchoVRTextureViewer: + def __init__(self, root): + self.root = root + self.root.title("EchoVR Texture Editor - PCVR & Quest Support") + self.root.geometry("1200x800") + self.root.minsize(800, 600) + + self.colors = { + 'bg_dark': '#0a0a0a', 'bg_medium': '#1a1a1a', 'bg_light': '#2a2a2a', + 'accent_green': '#4cd964', 'accent_blue': '#007aff', 'accent_orange': '#ff9500', + 'accent_red': '#ff3b30', 'text_light': '#ffffff', 'text_muted': '#cccccc', + 'success': '#4cd964', 'warning': '#ffcc00', 'error': '#ff3b30' + } + + self.root.configure(bg=self.colors['bg_dark']) + self.config = ConfigManager.load_config() + self.output_folder = self.config.get('output_folder') + self.pcvr_input_folder = self.config.get('pcvr_input_folder') + self.quest_input_folder = self.config.get('quest_input_folder') + self.data_folder = self.config.get('data_folder') + self.extracted_folder = self.config.get('extracted_folder') + self.repacked_folder = self.config.get('repacked_folder') + + self.package_name = None + self.evr_tools = EVRToolsManager() + self.textures_folder = None + self.corresponding_folder = None + self.current_texture = None + self.replacement_texture = None + self.original_info = None + self.replacement_info = None + self.replacement_size = None + self.is_quest_textures = False + self.is_pcvr_textures = False + self.texture_cache = {} + self.all_textures = [] + self.filtered_textures = [] + self.is_downloading = False + + self.ensure_settings_folders() + self.setup_ui() + self.auto_detect_folders() + self.check_external_tools() + + if self.output_folder and os.path.exists(self.output_folder): + self.set_output_folder(self.output_folder) + if self.data_folder and os.path.exists(self.data_folder): + self.set_data_folder(self.data_folder) + if self.extracted_folder and os.path.exists(self.extracted_folder): + self.set_extracted_folder(self.extracted_folder) + + # Save defaults to config if they were missing + ConfigManager.save_config(**self.config) + + def ensure_settings_folders(self): + base_dir = get_base_dir() + settings_dir = os.path.join(base_dir, SETTINGS_DIR_NAME) + + folders = [ + "input-pcvr", "input-quest", + "pcvr-extracted", "quest-extracted", + "output-both", "texture_cache" + ] + + for folder in folders: + path = os.path.join(settings_dir, folder) + if not os.path.exists(path): + try: + os.makedirs(path) + except: pass + + def check_external_tools(self): + """Check if external tools are runnable and warn about missing DLLs""" + tools = [ + ("texconv.exe", "Texture Converter"), + ("evrtools.exe", "EVR Tools") + ] + + for tool_name, desc in tools: + path = get_tool_path(tool_name) + if os.path.exists(path): + try: + # Run with no args. texconv exits 1 normally. + # If DLLs are missing, Windows returns 0xC0000135 (-1073741515) + cmd = [path] + if sys.platform == 'win32': + result = subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, + creationflags=subprocess.CREATE_NO_WINDOW) + + # Check for STATUS_DLL_NOT_FOUND + if result.returncode == 3221225781 or result.returncode == -1073741515: + self.log_info(f"❌ {desc} ({tool_name}) is missing DLLs!") + self.log_info(f" Please copy libsquish-0.dll, libstdc++-6.dll,") + self.log_info(f" and libgcc_s_seh-1.dll to the same folder as {tool_name}") + except Exception: + pass + + def auto_detect_folders(self): + base_dir = get_base_dir() + settings_dir = os.path.join(base_dir, SETTINGS_DIR_NAME) + + pcvr_folder = os.path.join(settings_dir, "input-pcvr") + if os.path.exists(pcvr_folder): + self.pcvr_input_folder = pcvr_folder + self.log_info(f"Auto-detected PCVR input folder: {pcvr_folder}") + + quest_folder = os.path.join(settings_dir, "input-quest") + if os.path.exists(quest_folder): + self.quest_input_folder = quest_folder + self.log_info(f"Auto-detected Quest input folder: {quest_folder}") + + output_both = os.path.join(settings_dir, "output-both") + if os.path.exists(output_both): + self.repacked_folder = output_both + self.log_info(f"Auto-detected output-both folder: {output_both}") + + def setup_ui(self): + self.root.columnconfigure(0, weight=1) + self.root.rowconfigure(0, weight=1) + + main_frame = tk.Frame(self.root, bg=self.colors['bg_dark']) + main_frame.grid(row=0, column=0, sticky='nsew', padx=10, pady=10) + main_frame.columnconfigure(1, weight=1) + main_frame.rowconfigure(4, weight=1) + + header_frame = tk.Frame(main_frame, bg=self.colors['bg_dark']) + header_frame.grid(row=0, column=0, columnspan=3, sticky='ew', pady=(0, 10)) + + self.tutorial_btn = tk.Button(header_frame, text="📚 Tutorial", command=lambda: TutorialPopup.show(self.root, self), bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=8) + self.tutorial_btn.pack(side=tk.LEFT, padx=(0, 5)) + + self.check_updates_btn = tk.Button(header_frame, text="🔄 Check Updates", command=self.check_app_updates, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=12, pady=8) + self.check_updates_btn.pack(side=tk.LEFT, padx=(0, 10)) + + title_label = tk.Label(header_frame, text="ECHO VR TEXTURE EDITOR", font=("Arial", 16, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark']) + title_label.pack(side=tk.LEFT, expand=True) + + self.update_echo_btn = tk.Button(header_frame, text="⚠ Update EchoVR", command=lambda: UpdateEchoPopup(self.root, self, self.config), bg=self.colors['accent_red'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=8) + self.update_echo_btn.pack(side=tk.RIGHT, padx=(10, 0)) + + self.status_label = tk.Label(main_frame, text="Welcome to EchoVR Texture Editor", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) + self.status_label.grid(row=1, column=0, columnspan=3, sticky='ew', pady=(0, 10)) + + self.platform_label = tk.Label(main_frame, text="Platform: Not detected", font=("Arial", 10, "bold"), fg=self.colors['warning'], bg=self.colors['bg_dark']) + self.platform_label.grid(row=2, column=0, columnspan=3, sticky='ew', pady=(0, 10)) + + evr_frame = tk.LabelFrame(main_frame, text="EVR TOOLS INTEGRATION", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) + evr_frame.grid(row=3, column=0, columnspan=3, sticky='ew', pady=(0, 10)) + evr_frame.columnconfigure(1, weight=1) + + tk.Label(evr_frame, text="Data Folder:", font=("Arial", 9), fg=self.colors['text_light'], bg=self.colors['bg_dark']).grid(row=0, column=0, sticky='w', padx=10, pady=5) + + self.data_folder_label = tk.Label(evr_frame, text="Not selected", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) + self.data_folder_label.grid(row=0, column=1, sticky='w', padx=5, pady=5) + + self.data_folder_btn = tk.Button(evr_frame, text="Select", command=self.select_data_folder, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED, bd=1, padx=10, pady=3) + self.data_folder_btn.grid(row=0, column=2, padx=10, pady=5) + + tk.Label(evr_frame, text="Extracted Folder:", font=("Arial", 9), fg=self.colors['text_light'], bg=self.colors['bg_dark']).grid(row=1, column=0, sticky='w', padx=10, pady=5) + + self.extracted_folder_label = tk.Label(evr_frame, text="Not selected", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) + self.extracted_folder_label.grid(row=1, column=1, sticky='w', padx=5, pady=5) + + self.extracted_folder_btn = tk.Button(evr_frame, text="Select", command=self.select_extracted_folder, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED, bd=1, padx=10, pady=3) + self.extracted_folder_btn.grid(row=1, column=2, padx=10, pady=5) + + button_frame = tk.Frame(evr_frame, bg=self.colors['bg_dark']) + button_frame.grid(row=2, column=0, columnspan=3, pady=10) + + self.extract_btn = tk.Button(button_frame, text="Extract Package", command=self.extract_package, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=20, pady=8, state=tk.DISABLED) + self.extract_btn.pack(side=tk.LEFT, padx=5) + + self.repack_btn = tk.Button(button_frame, text="Repack Modified", command=self.repack_package, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=20, pady=8, state=tk.DISABLED) + self.repack_btn.pack(side=tk.LEFT, padx=5) + + self.evr_status_label = tk.Label(evr_frame, text="Ready", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) + self.evr_status_label.grid(row=3, column=0, columnspan=3, pady=(0, 10)) + + content_frame = tk.Frame(main_frame, bg=self.colors['bg_dark']) + content_frame.grid(row=4, column=0, columnspan=3, sticky='nsew') + content_frame.columnconfigure(0, weight=1) + content_frame.columnconfigure(1, weight=2) + content_frame.columnconfigure(2, weight=2) + content_frame.rowconfigure(0, weight=1) + + left_frame = tk.LabelFrame(content_frame, text="AVAILABLE TEXTURES", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) + left_frame.grid(row=0, column=0, sticky='nsew', padx=(0, 5)) + left_frame.columnconfigure(0, weight=1) + left_frame.rowconfigure(1, weight=1) + + search_frame = tk.Frame(left_frame, bg=self.colors['bg_dark']) + search_frame.grid(row=0, column=0, sticky='ew', padx=5, pady=5) + + tk.Label(search_frame, text="Search:", font=("Arial", 9), fg=self.colors['text_light'], bg=self.colors['bg_dark']).pack(side=tk.LEFT, padx=(0, 5)) + + self.search_var = tk.StringVar() + self.search_entry = tk.Entry(search_frame, textvariable=self.search_var, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), insertbackground=self.colors['text_light']) + self.search_entry.pack(side=tk.LEFT, fill=tk.X, expand=True, padx=(0, 5)) + self.search_entry.bind('', self.filter_textures) + + clear_btn = tk.Button(search_frame, text="X", command=self.clear_search, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED, bd=1, width=3) + clear_btn.pack(side=tk.LEFT) + + # Grid View Button + self.grid_view_btn = tk.Button(left_frame, text="View Texture Grid", command=self.open_grid_view, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2) + self.grid_view_btn.grid(row=2, column=0, sticky='ew', padx=5, pady=5) + + list_frame = tk.Frame(left_frame, bg=self.colors['bg_dark']) + list_frame.grid(row=1, column=0, sticky='nsew', padx=5, pady=(0, 5)) + list_frame.columnconfigure(0, weight=1) + list_frame.rowconfigure(0, weight=1) + + # EXTENDED selectmode for multi-select + self.file_list = tk.Listbox(list_frame, bg=self.colors['bg_light'], fg=self.colors['text_light'], selectbackground=self.colors['accent_green'], selectforeground=self.colors['text_light'], font=("Arial", 9), relief=tk.SUNKEN, bd=1, selectmode=tk.EXTENDED) + + scrollbar = tk.Scrollbar(list_frame, bg=self.colors['bg_light']) + self.file_list.configure(yscrollcommand=scrollbar.set) + scrollbar.config(command=self.file_list.yview) + + self.file_list.grid(row=0, column=0, sticky='nsew') + scrollbar.grid(row=0, column=1, sticky='ns') + self.file_list.bind('<>', self.on_texture_selected) + self.file_list.bind('', self._on_listbox_scroll) + self.file_list.bind('', self._on_listbox_scroll) # Linux scroll up + self.file_list.bind('', self._on_listbox_scroll) # Linux scroll down + + # Track listbox scroll state for lazy loading + self.listbox_visible_end = 500 # Initial visible items + + middle_frame = tk.LabelFrame(content_frame, text="ORIGINAL TEXTURE", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) + middle_frame.grid(row=0, column=1, sticky='nsew', padx=5) + middle_frame.columnconfigure(0, weight=1) + middle_frame.rowconfigure(0, weight=1) + + self.original_canvas = tk.Canvas(middle_frame, bg=self.colors['bg_medium']) + self.original_canvas.grid(row=0, column=0, sticky='nsew') + + right_frame = tk.LabelFrame(content_frame, text="REPLACEMENT TEXTURE", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) + right_frame.grid(row=0, column=2, sticky='nsew', padx=(5, 0)) + right_frame.columnconfigure(0, weight=1) + right_frame.rowconfigure(0, weight=1) + + self.replacement_canvas = tk.Canvas(right_frame, bg=self.colors['bg_medium']) + self.replacement_canvas.grid(row=0, column=0, sticky='nsew') + self.replacement_canvas.bind("", self.browse_replacement_texture) + + button_panel = tk.Frame(main_frame, bg=self.colors['bg_dark']) + button_panel.grid(row=5, column=0, columnspan=3, sticky='ew', pady=(10, 0)) + + adb_frame = tk.Frame(button_panel, bg=self.colors['bg_dark']) + adb_frame.pack(side=tk.LEFT, fill=tk.Y) + + self.install_adb_btn = tk.Button(adb_frame, text="Install ADB Tools", command=self.install_adb_tools, bg=self.colors['accent_orange'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5) + self.install_adb_btn.pack(side=tk.LEFT, padx=5) + + self.push_quest_btn = tk.Button(adb_frame, text="Push Files To Quest", command=self.push_to_quest, bg=self.colors['accent_orange'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5, state=tk.DISABLED) + self.push_quest_btn.pack(side=tk.LEFT, padx=5) + + action_frame = tk.Frame(button_panel, bg=self.colors['bg_dark']) + action_frame.pack(side=tk.RIGHT, fill=tk.Y) + + self.edit_btn = tk.Button(action_frame, text="Open in Editor", command=self.open_external_editor, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5, state=tk.DISABLED) + self.edit_btn.pack(side=tk.LEFT, padx=5) + + self.replace_btn = tk.Button(action_frame, text="Replace Texture", command=self.replace_texture, bg=self.colors['accent_green'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5, state=tk.DISABLED) + self.replace_btn.pack(side=tk.LEFT, padx=5) + + self.download_btn = tk.Button(action_frame, text="Download All Textures", command=self.download_textures, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5) + self.download_btn.pack(side=tk.LEFT, padx=5) + + self.load_all_btn = tk.Button(action_frame, text="Load/Cache All", command=self.load_all_textures, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5) + self.load_all_btn.pack(side=tk.LEFT, padx=5) + + self.resolution_status = tk.Label(button_panel, text="", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) + + info_frame = tk.LabelFrame(main_frame, text="TEXTURE INFORMATION", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) + info_frame.grid(row=6, column=0, columnspan=3, sticky='nsew', pady=(10, 0)) + info_frame.columnconfigure(0, weight=1) + info_frame.rowconfigure(0, weight=1) + + self.info_text = scrolledtext.ScrolledText(info_frame, height=6, wrap=tk.WORD, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.SUNKEN, bd=1) + self.info_text.grid(row=0, column=0, sticky='nsew', padx=2, pady=2) + + self.update_canvas_placeholder(self.original_canvas, "Select output folder to view textures") + self.update_canvas_placeholder(self.replacement_canvas, "Click to select replacement texture") + + def update_canvas_placeholder(self, canvas, text): + canvas.delete("all") + canvas_width = canvas.winfo_width() + canvas_height = canvas.winfo_height() + if canvas_width <= 1 or canvas_height <= 1: + canvas_width, canvas_height = 400, 300 + canvas.create_text(canvas_width//2, canvas_height//2, text=text, font=("Arial", 10), fill=self.colors['text_muted'], justify=tk.CENTER) + + def log_info(self, message): + self.info_text.insert(tk.END, message + "\n") + self.info_text.see(tk.END) + self.info_text.update_idletasks() + + def _on_listbox_scroll(self, event): + """Load more items as user scrolls near the bottom""" + try: + # Get the current visible range + visible_items = self.file_list.yview() + if visible_items[1] > 0.9: # Top 90% of the scrollbar + # Load more items if available + current_count = self.file_list.size() + total_available = len(self.filtered_textures) + if current_count < total_available: + # Load next chunk + chunk_size = 500 + next_items = min(current_count + chunk_size, total_available) + # Remove the "load more" indicator + if current_count > 0: + last_item = self.file_list.get(current_count - 1) + if "Scroll down to load" in last_item or "more items" in last_item: + self.file_list.delete(current_count - 1) + # Add more items + for i in range(current_count - 1, next_items): + if i >= 0: + self.file_list.insert(tk.END, self.filtered_textures[i]) + # Add indicator if more remain + if next_items < total_available: + remaining = total_available - next_items + self.file_list.insert(tk.END, f"[Loading {remaining} more items...]") + except: + pass + + + def select_data_folder(self): + path = filedialog.askdirectory(title="Select Data Folder (contains manifests and packages)") + if path: + self.set_data_folder(path) + + def set_data_folder(self, path): + self.data_folder = path + self.data_folder_label.config(text=os.path.basename(path), fg=self.colors['text_light']) + + manifests_path = os.path.join(path, "manifests") + packages_path = os.path.join(path, "packages") + + if not os.path.exists(manifests_path) or not os.path.exists(packages_path): + parent_path = os.path.dirname(path) + parent_manifests = os.path.join(parent_path, "manifests") + parent_packages = os.path.join(parent_path, "packages") + + if os.path.exists(parent_manifests) and os.path.exists(parent_packages): + path = parent_path + manifests_path = parent_manifests + packages_path = parent_packages + self.data_folder = path + self.data_folder_label.config(text=os.path.basename(path)) + + if os.path.exists(manifests_path) and os.path.exists(packages_path): + self._set_package_from_manifests(manifests_path) + self.log_info(f"✓ Data folder set: {path}") + else: + self.log_info("✗ Could not find manifests and packages folders") + + ConfigManager.save_config(data_folder=self.data_folder) + self.config['data_folder'] = self.data_folder + self.update_evr_buttons_state() + + def select_extracted_folder(self): + path = filedialog.askdirectory(title="Select Extracted Folder") + if path: + self.set_extracted_folder(path) + + def set_extracted_folder(self, path): + self.extracted_folder = path + self.extracted_folder_label.config(text=os.path.basename(path), fg=self.colors['text_light']) + self.set_output_folder(path) + self.update_evr_buttons_state() + ConfigManager.save_config(extracted_folder=self.extracted_folder) + self.config['extracted_folder'] = self.extracted_folder + self.log_info(f"✓ Extracted folder set: {path}") + + PACKAGE_TEXTURES = "48037dc70b0ecab2" + + def _set_package_from_manifests(self, manifests_path): + try: + packages = [] + packages_dir = os.path.join(os.path.dirname(manifests_path), "packages") + with os.scandir(manifests_path) as it: + for e in it: + if not e.is_file(): + continue + file_name = e.name + package_file = os.path.join(packages_dir, file_name) + package_file_0 = os.path.join(packages_dir, f"{file_name}_0") + if os.path.exists(package_file) or os.path.exists(package_file_0): + packages.append(file_name) + if self.PACKAGE_TEXTURES in packages: + self.package_name = self.PACKAGE_TEXTURES + elif packages: + self.package_name = packages[0] + else: + self.package_name = None + self.update_evr_buttons_state() + if packages: + self.log_info(f"Using package: {self.package_name}") + else: + self.log_info("No valid packages found") + except Exception as e: + self.log_info(f"Error reading manifests: {e}") + self.package_name = None + + def update_evr_buttons_state(self): + if self.data_folder and self.package_name and self.extracted_folder: + self.extract_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) + if os.path.exists(self.extracted_folder) and _dir_nonempty(self.extracted_folder): + self.repack_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) + else: + self.repack_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + else: + self.extract_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + self.repack_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + + def extract_package(self): + if not all([self.data_folder, self.package_name, self.extracted_folder]): + messagebox.showerror("Error", "Please select data folder, package, and extraction folder first.") + return + + popup = tk.Toplevel(self.root) + popup.title("Extraction Mode") + popup.geometry("400x180") + popup.configure(bg=self.colors['bg_medium']) + popup.resizable(False, False) + popup.transient(self.root) + popup.grab_set() + + try: + x = self.root.winfo_x() + (self.root.winfo_width() - 400) // 2 + y = self.root.winfo_y() + (self.root.winfo_height() - 180) // 2 + popup.geometry(f"+{x}+{y}") + except: pass + + tk.Label(popup, text="Select Extraction Mode", font=("Arial", 12, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_medium']).pack(pady=(20, 10)) + tk.Label(popup, text="Full Package extraction is required for repacking.", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_medium']).pack(pady=(0, 20)) + tk.Label(popup, text="Texture mode is faster but only extracts texture files.", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_medium']).pack(pady=(0, 20)) + + btn_frame = tk.Frame(popup, bg=self.colors['bg_medium']) + btn_frame.pack(fill=tk.X, padx=20) + + def do_extract(textures_only): + popup.destroy() + self._run_extraction(textures_only) + + tk.Button(btn_frame, text="Extract Full Package (For Repacking)", command=lambda: do_extract(False), bg=self.colors['accent_green'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED).pack(fill=tk.X, pady=5) + tk.Button(btn_frame, text="Extract Textures Only (For Viewing)", command=lambda: do_extract(True), bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED).pack(fill=tk.X, pady=5) + tk.Button(btn_frame, text="Extract Textures Only (Fast)", command=lambda: do_extract(True), bg=self.colors['accent_green'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED).pack(fill=tk.X, pady=5) + tk.Button(btn_frame, text="Extract Full Package (Slow)", command=lambda: do_extract(False), bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED).pack(fill=tk.X, pady=5) + + def _run_extraction(self, textures_only): + os.makedirs(self.extracted_folder, exist_ok=True) + mode_text = "Textures Only" if textures_only else "Full Package" + + # Show progress dialog + progress = ProgressDialog(self.root, "Extracting Package", f"Extracting {mode_text}...\n\nThis may take a few minutes...", show_bar=False) + + self.evr_status_label.config(text=f"Extracting package ({mode_text})...", fg=self.colors['accent_green']) + self.root.update_idletasks() + + def extraction_thread(): + export_type = "textures" if textures_only else "" + success, message = self.evr_tools.extract_package(self.data_folder, self.package_name, self.extracted_folder, export_type=export_type) + self.root.after(0, lambda: self.on_extraction_complete(success, message, progress)) + + threading.Thread(target=extraction_thread, daemon=True).start() + + def on_extraction_complete(self, success, message, progress=None): + if progress: + progress.close() + + if success: + self.evr_status_label.config(text="Extraction successful!", fg=self.colors['success']) + self.log_info(f"✓ EXTRACTION: {message}") + extracted_textures_path = self.find_extracted_textures(self.extracted_folder) + if extracted_textures_path: + self.set_output_folder(extracted_textures_path) + else: + self.set_output_folder(self.extracted_folder) + self.repack_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) + else: + self.evr_status_label.config(text="Extraction failed", fg=self.colors['error']) + self.log_info(f"✗ EXTRACTION FAILED: {message}") + messagebox.showerror("Extraction Error", message) + + def find_extracted_textures(self, base_dir): + target_names = {"-4707359568332879775", "5231972605540061417"} + target_names = {"beac1969cb7b8861", "489b7b69cb19e0e9"} + for root, dirs, _ in os.walk(base_dir): + for d in dirs: + if d in target_names: + return root + return None + + def repack_package(self): + if not all([self.data_folder, self.package_name, self.extracted_folder]): + messagebox.showerror("Error", "Please select data folder, package, and extraction folder first.") + return + + input_folder = self.extracted_folder + if not input_folder or not os.path.exists(input_folder): + messagebox.showerror("Error", "Extracted folder not set or found. Please perform a full extraction first.") + if self.is_quest_textures and self.quest_input_folder: + input_folder = self.quest_input_folder + self.log_info("🎯 Using Quest input folder for repacking") + elif self.is_pcvr_textures and self.pcvr_input_folder: + input_folder = self.pcvr_input_folder + self.log_info("🎮 Using PCVR input folder for repacking") + else: + messagebox.showerror("Error", "Input folder not found. Please check input-pcvr/input-quest folders.") + return + + self.log_info(f"📦 Using '{os.path.basename(input_folder)}' as input for repacking.") + + script_dir = os.path.dirname(os.path.abspath(__file__)) + output_dir = self.repacked_folder + + confirm = messagebox.askyesno("Confirm Repack", f"Repack modified files to:\n{output_dir}\n\nContinue?") + if not confirm: return + + # Show progress dialog + progress = ProgressDialog(self.root, "Repacking Package", "Rebuilding package files...\n\nThis may take a few minutes...", show_bar=False) + + self.evr_status_label.config(text="Repacking package...", fg=self.colors['accent_green']) + self.root.update_idletasks() + + def repacking_thread(): + success, message = self.evr_tools.repack_package(output_dir, self.package_name, self.data_folder, input_folder) + self.root.after(0, lambda: self.on_repacking_complete(success, message, output_dir, progress)) + + threading.Thread(target=repacking_thread, daemon=True).start() + + def on_repacking_complete(self, success, message, output_dir, progress=None): + if progress: + progress.close() + + if success: + self.evr_status_label.config(text="Repacking successful!", fg=self.colors['success']) + self.log_info(f"✓ REPACKING: {message}") + packages_path = os.path.join(output_dir, "packages") + manifests_path = os.path.join(output_dir, "manifests") + if os.path.exists(packages_path) and os.path.exists(manifests_path): + self.log_info(f"✓ Packages and manifests created in: {output_dir}") + self.update_quest_push_button() + else: + self.log_info("⚠ Packages or manifests folders not found in output directory") + else: + self.evr_status_label.config(text="Repacking failed", fg=self.colors['error']) + self.log_info(f"✗ REPACKING FAILED: {message}") + messagebox.showinfo("Repacking Result", message) + + def check_app_updates(self): + """Check for app updates on GitHub""" + self.log_info("🔄 Checking for updates...") + self.check_updates_btn.config(state=tk.DISABLED, text="Checking...") + self.root.update_idletasks() + + def check_thread(): + has_update, latest_version, download_url = check_for_updates() + self.root.after(0, lambda: self.on_update_check_complete(has_update, latest_version, download_url)) + + threading.Thread(target=check_thread, daemon=True).start() + + def on_update_check_complete(self, has_update, latest_version, download_url): + self.check_updates_btn.config(state=tk.NORMAL, text="🔄 Check Updates") + + if has_update: + self.log_info(f"✅ Update available: v{latest_version}") + UpdateNotificationDialog(self.root, latest_version, download_url) + else: + self.log_info(f"✅ You are running the latest version (v{APP_VERSION})") + messagebox.showinfo("Updates", f"You are running the latest version!\n\nCurrent: v{APP_VERSION}") + + def install_adb_tools(self): + self.log_info("Installing ADB Platform Tools...") + def install_thread(): + success, message = ADBManager.install_adb_tools() + self.root.after(0, lambda: self.on_adb_install_complete(success, message)) + threading.Thread(target=install_thread, daemon=True).start() + + def on_adb_install_complete(self, success, message): + if success: + self.log_info(f"✅ ADB Tools installed: {message}") + messagebox.showinfo("Success", "ADB Platform Tools installed successfully!") + self.test_adb_connection() + else: + self.log_info(f"❌ ADB installation failed: {message}") + messagebox.showerror("Error", f"ADB installation failed: {message}") + + def test_adb_connection(self): + def test_thread(): + success, message, adb_path = ADBManager.check_adb() + self.root.after(0, lambda: self.on_adb_test_complete(success, message)) + threading.Thread(target=test_thread, daemon=True).start() + + def on_adb_test_complete(self, success, message): + if success: + self.log_info(f"✅ ADB: {message}") + if self.is_quest_textures: + self.push_quest_btn.config(state=tk.NORMAL, bg=self.colors['accent_orange']) + else: + self.log_info(f"❌ ADB: {message}") + self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + + def update_quest_push_button(self): + if self.is_quest_textures and self.output_folder: + self.test_adb_connection() + else: + self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + + def push_to_quest(self): + if not self.output_folder: + messagebox.showerror("Error", "Please select output folder first") + return + success, message, _ = ADBManager.check_adb() + if not success: + messagebox.showerror("ADB Error", f"Cannot connect to Quest:\n{message}") + return + + result = messagebox.askyesno("Push to Quest", "This will push files to your Quest headset.\n\nContinue?", icon='warning') + if not result: return + + self.log_info("🚀 Starting Quest file push...") + self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light'], text="Pushing...") + self.root.update_idletasks() + + def push_thread(): + try: + push_folder = self.output_folder + if self.repacked_folder and os.path.exists(self.repacked_folder): + if (os.path.exists(os.path.join(self.repacked_folder, "manifests")) or os.path.exists(os.path.join(self.repacked_folder, "packages"))): + push_folder = self.repacked_folder + self.log_info("📦 Using repacked folder") + + quest_dest_path = "/sdcard/readyatdawn/files/_data/5932408047/rad15/android" + success, message = ADBManager.push_to_quest(push_folder, quest_dest_path) + self.root.after(0, lambda: self.on_quest_push_complete(success, message)) + except Exception as thread_error: + error_message = f"Push thread error: {str(thread_error)}" + self.root.after(0, lambda: self.on_quest_push_complete(False, error_message)) + + threading.Thread(target=push_thread, daemon=True).start() + + def on_quest_push_complete(self, success, message): + if success: + messagebox.showinfo("Success", f"Files pushed to Quest!\n\n{message}") + self.log_info(f"✅ QUEST PUSH: {message}") + else: + messagebox.showerror("Error", f"Failed to push files:\n{message}") + self.log_info(f"❌ QUEST PUSH FAILED: {message}") + self.push_quest_btn.config(state=tk.NORMAL, bg=self.colors['accent_orange'], text="Push Files To Quest") + self.update_quest_push_button() + + def set_output_folder(self, path): + self.output_folder = path + folder_name = os.path.basename(path).lower() + if "quest" in folder_name: + self.is_quest_textures = True + self.is_pcvr_textures = False + self.textures_folder = os.path.join(path, "5231972605540061417") + self.corresponding_folder = os.path.join(path, "-2094201140079393352") + self.textures_folder = os.path.join(path, "489b7b69cb19e0e9") + self.corresponding_folder = os.path.join(path, "e2ef0854d0cd69b8") + self.platform_label.config(text="Platform: Quest (ASTC)", fg=self.colors['success']) + self.log_info("🎯 Switched to Quest mode") + elif "pcvr" in folder_name: + self.is_quest_textures = False + self.is_pcvr_textures = True + self.textures_folder = os.path.join(path, "-4707359568332879775") + self.corresponding_folder = os.path.join(path, "5353709876897953952") + self.textures_folder = os.path.join(path, "beac1969cb7b8861") + self.corresponding_folder = os.path.join(path, "4a4c32c49300b8a0") + self.platform_label.config(text="Platform: PCVR (DDS)", fg=self.colors['accent_blue']) + self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + self.log_info("🎮 Switched to PCVR mode") + else: + quest_textures_folder = os.path.join(path, "5231972605540061417") + pcvr_textures_folder = os.path.join(path, "-4707359568332879775") + quest_textures_folder = os.path.join(path, "489b7b69cb19e0e9") + pcvr_textures_folder = os.path.join(path, "beac1969cb7b8861") + if getattr(sys, 'frozen', False): + parent_dir = os.path.dirname(os.path.dirname(path)) + if not os.path.exists(quest_textures_folder): + quest_textures_folder = os.path.join(parent_dir, os.path.basename(path), "5231972605540061417") + quest_textures_folder = os.path.join(parent_dir, os.path.basename(path), "489b7b69cb19e0e9") + if not os.path.exists(pcvr_textures_folder): + pcvr_textures_folder = os.path.join(parent_dir, os.path.basename(path), "-4707359568332879775") + pcvr_textures_folder = os.path.join(parent_dir, os.path.basename(path), "beac1969cb7b8861") + + if os.path.exists(quest_textures_folder): + self.textures_folder = quest_textures_folder + self.corresponding_folder = os.path.join(path, "-2094201140079393352") + self.corresponding_folder = os.path.join(path, "e2ef0854d0cd69b8") + self.is_quest_textures = True + self.is_pcvr_textures = False + self.platform_label.config(text="Platform: Quest (ASTC)", fg=self.colors['success']) + self.log_info("🎯 Auto-detected Quest textures") + elif os.path.exists(pcvr_textures_folder): + self.textures_folder = pcvr_textures_folder + self.corresponding_folder = os.path.join(path, "5353709876897953952") + self.corresponding_folder = os.path.join(path, "4a4c32c49300b8a0") + self.is_quest_textures = False + self.is_pcvr_textures = True + self.platform_label.config(text="Platform: PCVR (DDS)", fg=self.colors['accent_blue']) + self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + self.log_info("🎮 Auto-detected PCVR textures") + else: + self.textures_folder = path + self.log_info("⚠ Could not determine platform structure, using root folder") + + if os.path.exists(self.textures_folder): + platform_text = "Quest" if self.is_quest_textures else "PCVR" + self.status_label.config(text=f"Output folder: {os.path.basename(path)} ({platform_text})") + self.log_info(f"Output folder set: {path} ({platform_text})") + self.load_textures() + ConfigManager.save_config(output_folder=self.output_folder) + self.config['output_folder'] = self.output_folder + self.update_quest_push_button() + + def filter_textures(self, event=None): + search_text = self.search_var.get().lower() + if not search_text: + self.filtered_textures = self.all_textures.copy() + else: + self.filtered_textures = [texture for texture in self.all_textures if search_text in texture.lower()] + self.file_list.delete(0, tk.END) + # Load textures in chunks to avoid UI freeze + if self.filtered_textures: + chunk_size = 500 + for i in range(0, min(len(self.filtered_textures), chunk_size)): + self.file_list.insert(tk.END, self.filtered_textures[i]) + + # Show indicator if there are more + if len(self.filtered_textures) > chunk_size: + self.file_list.insert(tk.END, f"... ({len(self.filtered_textures) - chunk_size} more items - scroll to load)") + + def clear_search(self): + self.search_var.set("") + self.filter_textures() + + def load_textures(self): + self.file_list.delete(0, tk.END) + self.file_list.insert(tk.END, "Loading textures...") + self.update_canvas_placeholder(self.original_canvas, "Loading textures...") + self.root.update_idletasks() + threading.Thread(target=self._load_textures_worker, daemon=True).start() + + def _is_valid_texture_file(self, file_path): + try: + if not os.path.isfile(file_path): return False + size = os.path.getsize(file_path) + if size == 0: return False + + if not self.is_pcvr_textures and not self.is_quest_textures: + return True + + with open(file_path, 'rb') as f: + header = f.read(16) + + if self.is_pcvr_textures: + return header.startswith(b'DDS ') + + if self.is_quest_textures: + if header.startswith(b'\x13\xAB\xA1\x5C'): return True + if header.startswith(b'\xABKTX 11') or header.startswith(b'\xABKTX 20'): return True + if b'BcBP' in header: return True + if header.startswith(b'PVR'): return True + + if size % 16 == 0: + if header.strip().startswith(b'{') or header.strip().startswith(b'<'): + return False + return True + return False + + return True + except: + return False + + def _load_textures_worker(self): + if not self.textures_folder or not os.path.exists(self.textures_folder): + self.root.after(0, lambda: self._on_textures_loaded([], 0)) + return + + cached_files = TextureCacheManager.get_cached_files(self.textures_folder) + if cached_files is not None: + self.root.after(0, lambda: self._on_textures_loaded(cached_files, len(cached_files))) + return + + valid_files = [] + try: + with os.scandir(self.textures_folder) as it: + for e in it: + if e.is_file() and self._is_valid_texture_file(e.path): + valid_files.append(e.name) + + TextureCacheManager.update_cache(self.textures_folder, valid_files) + self.root.after(0, lambda: self._on_textures_loaded(valid_files, len(valid_files))) + except Exception as e: + print(f"Scan Error: {e}") + self.root.after(0, lambda: self._on_textures_loaded([], 0)) + + def _on_textures_loaded(self, files, count): + self.all_textures = sorted(files) + self.filtered_textures = self.all_textures.copy() + self.file_list.delete(0, tk.END) + if self.filtered_textures: + # Load first batch to avoid UI freeze with large texture counts + chunk_size = 500 + for i in range(0, min(len(self.filtered_textures), chunk_size)): + self.file_list.insert(tk.END, self.filtered_textures[i]) + + # Show indicator if there are more items + if len(self.filtered_textures) > chunk_size: + remaining = len(self.filtered_textures) - chunk_size + self.file_list.insert(tk.END, f"[Scroll down to load {remaining} more items]") + + # Cleanup cache to prevent disk bloat + TextureLoader.cleanup_cache() + + platform_text = "Quest" if self.is_quest_textures else "PCVR" + status_text = f"Found {count} {platform_text} texture files" + self.status_label.config(text=status_text) + self.log_info(f"Found {count} {platform_text} texture files") + if count == 0: + self.log_info("No texture files found.") + self.update_canvas_placeholder(self.original_canvas, "No textures found") + else: + self.update_canvas_placeholder(self.original_canvas, "Select a texture to view") + + def on_texture_selected(self, event): + if not self.file_list.curselection(): return + + # Multi-select: Show count if multiple + selection = self.file_list.curselection() + if len(selection) > 1: + self.update_canvas_placeholder(self.original_canvas, f"{len(selection)} files selected") + self.replace_btn.config(state=tk.NORMAL, bg=self.colors['accent_green'], text=f"Replace {len(selection)} Files") + self.edit_btn.config(state=tk.DISABLED) + return + + index = selection[0] + texture_name = self.filtered_textures[index] + self.current_texture = os.path.join(self.textures_folder, texture_name) + self.replace_btn.config(text="Replace Texture") + + try: + self.update_canvas_placeholder(self.original_canvas, "Loading texture...") + self.root.update_idletasks() + def load_texture_thread(): + try: + image = TextureLoader.load_texture(self.current_texture, self.is_quest_textures) + self.root.after(0, lambda: self.display_texture_result(image)) + except Exception as e: + self.root.after(0, lambda: self.display_texture_error(e)) + threading.Thread(target=load_texture_thread, daemon=True).start() + except Exception as e: + self.log_info(f"Error loading texture: {e}") + self.update_canvas_placeholder(self.original_canvas, "Error loading texture") + + def display_texture_result(self, image): + if image: + self.display_image_on_canvas(image, self.original_canvas) + if self.is_quest_textures: + self.original_info = { + 'file_size': os.path.getsize(self.current_texture), + 'format': 'ASTC', 'width': image.width, 'height': image.height + } + else: + self.original_info = DDSHandler.get_dds_info(self.current_texture) + if self.original_info is None: + try: + size = os.path.getsize(self.current_texture) + except: + size = 0 + self.original_info = { + 'file_size': size, + 'format': 'DDS/Raw', + 'width': image.width, + 'height': image.height + } + + self.update_texture_info() + self.edit_btn.config(state=tk.NORMAL, bg=self.colors['accent_blue']) + self.replace_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) + else: + self.update_canvas_placeholder(self.original_canvas, "Failed to load texture") + self.edit_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + self.replace_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + + def display_texture_error(self, error): + self.log_info(f"Error loading texture: {error}") + self.update_canvas_placeholder(self.original_canvas, "Error loading texture") + self.edit_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + self.replace_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) + + def browse_replacement_texture(self, event): + if not self.current_texture and len(self.file_list.curselection()) == 0: + messagebox.showinfo("Info", "Please select an original texture first") + return + + file_types = [("PNG files", "*.png"), ("DDS files", "*.dds"), ("All files", "*.*")] + if self.is_quest_textures: + file_types = [("PNG files", "*.png"), ("All files", "*.*")] + + file_path = filedialog.askopenfilename(title="Select Replacement Texture", filetypes=file_types) + + if file_path: + self.replacement_texture = file_path + try: + def load_replacement_thread(): + try: + if self.is_quest_textures: + image = Image.open(file_path).convert("RGBA") + elif file_path.lower().endswith(".png"): + image = Image.open(file_path).convert("RGBA") + else: + image = TextureLoader.load_texture(file_path, False) + self.root.after(0, lambda: self.display_replacement_result(image, file_path)) + except Exception as e: + self.root.after(0, lambda: self.display_replacement_error(e)) + threading.Thread(target=load_replacement_thread, daemon=True).start() + except Exception as e: + self.log_info(f"Error loading replacement texture: {e}") + self.update_canvas_placeholder(self.replacement_canvas, "Error loading replacement") + + def display_replacement_result(self, image, file_path): + if image: + self.display_image_on_canvas(image, self.replacement_canvas) + if self.is_quest_textures: + self.replacement_info = { + 'file_size': os.path.getsize(file_path), + 'format': 'PNG', 'width': image.width, 'height': image.height + } + self.replacement_size = None + else: + self.replacement_info = DDSHandler.get_dds_info(file_path) + if self.replacement_info is None: + self.replacement_info = { + 'format': 'PNG', 'width': image.width, 'height': image.height, + 'file_size': os.path.getsize(file_path) + } + self.replacement_size = None + else: + self.replacement_size = self.replacement_info.get('file_size') + self.update_texture_info() + self.check_resolution_match() + self.log_info(f"Replacement loaded: {os.path.basename(file_path)}") + else: + self.update_canvas_placeholder(self.replacement_canvas, "Failed to load replacement") + + def display_replacement_error(self, error): + self.log_info(f"Error loading replacement texture: {error}") + self.update_canvas_placeholder(self.replacement_canvas, "Error loading replacement") + + def display_image_on_canvas(self, image, canvas): + canvas.delete("all") + canvas_width = canvas.winfo_width() + canvas_height = canvas.winfo_height() + if canvas_width <= 1 or canvas_height <= 1: + canvas_width, canvas_height = 400, 300 + + img_width, img_height = image.size + ratio = min(canvas_width / img_width, canvas_height / img_height) + new_size = (int(img_width * ratio), int(img_height * ratio)) + + resized_image = image.resize(new_size, Image.Resampling.LANCZOS) + photo = ImageTk.PhotoImage(resized_image) + x_pos = (canvas_width - new_size[0]) // 2 + y_pos = (canvas_height - new_size[1]) // 2 + canvas.create_image(x_pos, y_pos, anchor=tk.NW, image=photo) + canvas.image = photo + + def update_texture_info(self): + info = "" + if self.original_info: + platform_text = "Quest" if self.is_quest_textures else "PCVR" + info += f"=== ORIGINAL ({platform_text}) ===\n" + info += f"File: {os.path.basename(self.current_texture)}\n" + info += f"Size: {self.original_info['file_size']:,} bytes\n" + if 'width' in self.original_info: + info += f"Dim: {self.original_info['width']} x {self.original_info['height']}\n" + info += f"Format: {self.original_info['format']}\n\n" + + if self.replacement_info: + info += "=== REPLACEMENT ===\n" + info += f"File: {os.path.basename(self.replacement_texture)}\n" + if 'width' in self.replacement_info: + info += f"Dim: {self.replacement_info['width']} x {self.replacement_info['height']}\n" + info += f"Format: {self.replacement_info['format']}\n" + + self.info_text.delete(1.0, tk.END) + self.info_text.insert(tk.END, info) + + def check_resolution_match(self): + if self.original_info and self.replacement_info and 'width' in self.original_info and 'width' in self.replacement_info: + ow, oh = self.original_info['width'], self.original_info['height'] + rw, rh = self.replacement_info['width'], self.replacement_info['height'] + if ow == rw and oh == rh: + self.resolution_status.config(text="✓ Resolutions match", fg=self.colors['success']) + else: + self.resolution_status.config( + text=f"⚠ Resolution will be adjusted to {ow}×{oh} when replacing", + fg=self.colors['warning'] + ) + else: + self.resolution_status.config(text="") + + def open_external_editor(self): + if not self.current_texture: return + try: + if sys.platform == 'win32': os.startfile(self.current_texture) + elif sys.platform == 'darwin': subprocess.call(('open', self.current_texture)) + else: subprocess.call(('xdg-open', self.current_texture)) + except Exception as e: + messagebox.showerror("Error", f"Could not open external editor: {str(e)}") + + def replace_texture(self): + if not self.replacement_texture or not self.output_folder: + return + + selection = self.file_list.curselection() + if not selection: + return + + if len(selection) > 1: + confirm = messagebox.askyesno("Multi-Replace", f"Are you sure you want to replace {len(selection)} textures with the selected image?") + if not confirm: + return + + replacement_size = None + if not self.is_quest_textures and self.replacement_info and 'file_size' in self.replacement_info: + replacement_size = self.replacement_info.get('file_size') + + def do_one(index): + texture_name = self.filtered_textures[index] + current_texture_path = os.path.join(self.textures_folder, texture_name) + if self.is_quest_textures: + return texture_name, TextureReplacer.replace_quest_texture(self.extracted_folder, current_texture_path, self.replacement_texture, self.texture_cache) + return texture_name, TextureReplacer.replace_pcvr_texture(self.extracted_folder, current_texture_path, self.replacement_texture, replacement_size) + + results = [] + if len(selection) > 3: + max_workers = min(4, len(selection), (os.cpu_count() or 2) + 1) + with ThreadPoolExecutor(max_workers=max_workers) as ex: + futures = [ex.submit(do_one, idx) for idx in selection] + for f in as_completed(futures): + try: + results.append(f.result()) + except Exception as e: + results.append((None, (False, str(e)))) + else: + for index in selection: + results.append(do_one(index)) + + for texture_name, (success, message) in results: + if texture_name is None: + continue + if success: + self.log_info(f"✓ Replaced {texture_name}") + else: + self.log_info(f"✗ Failed {texture_name}: {message}") + + ok = sum(1 for _, (s, _) in results if s) + fail = len(results) - ok + msg = f"Replaced {ok} texture(s)." + (f" {fail} failed." if fail else "") + messagebox.showinfo("Complete", msg) + if len(selection) == 1: + self.on_texture_selected(None) + + def download_textures(self): + if self.is_downloading: + self.log_info("Download already in progress...") + return + confirm = messagebox.askyesno("Download Textures", "Download texture cache archive (~400MB)?") + if not confirm: return + self.is_downloading = True + self.download_btn.config(state=tk.DISABLED, text="Downloading...", bg=self.colors['accent_orange']) + threading.Thread(target=self._download_worker, daemon=True).start() + + def _download_worker(self): + url = "https://github.com/heisthecat31/EchoVR-Texture-Editor/releases/download/quest/texture_cache.zip" + if getattr(sys, 'frozen', False): + application_path = os.path.dirname(sys.executable) + else: + application_path = os.path.dirname(os.path.abspath(__file__)) + # Extract into the persistent settings cache directory and protect existing files + extract_to_path = CACHE_DIR + temp_zip_path = os.path.join(tempfile.gettempdir(), "texture_cache.zip") + try: + self.root.after(0, lambda: self.log_info(f"Downloading from: {url}")) + urllib.request.urlretrieve(url, temp_zip_path) + self.root.after(0, lambda: self.log_info("✓ Download complete. Extracting...")) + # Ensure cache dir exists + os.makedirs(extract_to_path, exist_ok=True) + + # Safely extract zip entries one-by-one and do NOT overwrite existing files + with zipfile.ZipFile(temp_zip_path, 'r') as zip_ref: + for member in zip_ref.infolist(): + # Skip directories + if member.is_dir(): + continue + + # Flatten any leading 'texture_cache/' from the zip entry path + member_path = member.filename + if member_path.startswith('texture_cache/'): + member_path = member_path[len('texture_cache/'):] + if member_path.startswith('/') or member_path.startswith('\\') or member_path == '': + continue + + # Normalize the target path and avoid path traversal + target_path = os.path.normpath(os.path.join(extract_to_path, member_path)) + if not target_path.startswith(os.path.normpath(extract_to_path) + os.sep) and os.path.normpath(extract_to_path) != os.path.normpath(target_path): + # Unsafe path - skip + continue + + target_dir = os.path.dirname(target_path) + if not os.path.exists(target_dir): + try: + os.makedirs(target_dir, exist_ok=True) + except: + pass + + # If file already exists, skip extracting to avoid overwrite + if os.path.exists(target_path): + continue + + # Extract this single file + try: + with zip_ref.open(member, 'r') as source, open(target_path, 'wb') as target: + shutil.copyfileobj(source, target) + except Exception: + # If extraction of this member fails, skip it and continue + continue + try: os.remove(temp_zip_path) + except: pass + self.root.after(0, lambda: self._on_download_finished(True, "Texture cache downloaded successfully!")) + except Exception as e: + self.root.after(0, lambda: self._on_download_finished(False, f"Download failed: {str(e)}")) + + def _on_download_finished(self, success, message): + self.is_downloading = False + self.download_btn.config(state=tk.NORMAL, text="Download All Textures", bg=self.colors['accent_blue']) + if success: + messagebox.showinfo("Success", message) + self.log_info(f"✅ {message}") + else: + messagebox.showerror("Error", message) + self.log_info(f"❌ {message}") + + # NEW METHODS FOR GRID VIEW + def open_grid_view(self): + if not self.textures_folder: + messagebox.showerror("Error", "No textures loaded.") + return + TextureGridPopup(self.root, self, self.filtered_textures, self.textures_folder, self.is_quest_textures) + + def select_texture_by_name(self, filename): + if filename in self.filtered_textures: + idx = self.filtered_textures.index(filename) + self.file_list.selection_clear(0, tk.END) + self.file_list.selection_set(idx) + self.file_list.see(idx) + self.on_texture_selected(None) + + def load_all_textures(self): + if not self.textures_folder or not self.all_textures: + messagebox.showinfo("Info", "No textures found to load.") + return + + confirm = messagebox.askyesno("Load All Textures", f"This will load and cache {len(self.all_textures)} textures.\nThis process converts textures to PNG for previewing.\nIt may take a while depending on the number of files.\n\nContinue?") + if not confirm: return + + self.load_all_btn.config(state=tk.DISABLED) + progress = ProgressDialog(self.root, "Caching Textures", "Generating texture cache...", show_bar=True) + + threading.Thread(target=self._load_all_worker, args=(progress,), daemon=True).start() + + def _load_all_worker(self, progress): + total = len(self.all_textures) + failed = [] + skipped = 0 + success = 0 + + for i, texture_name in enumerate(self.all_textures): + if progress.cancel_requested: + break + + full_path = os.path.join(self.textures_folder, texture_name) + try: + # Check if already cached to avoid unnecessary loading/decoding + cache_path = TextureLoader.get_cache_path(full_path) + if os.path.exists(cache_path) and os.path.getsize(cache_path) > 0: + skipped += 1 + else: + img = TextureLoader.load_texture(full_path, self.is_quest_textures) + if img: + success += 1 + else: + # Determine format for report + fmt = "ASTC" if self.is_quest_textures else "Unknown" + if not self.is_quest_textures: + info = DDSHandler.get_dds_info(full_path) + if info: fmt = info.get('format', 'Unknown') + failed.append(f"{texture_name} ({fmt})") + except Exception as e: + failed.append(f"{texture_name} (Error: {str(e)})") + + if not progress.update(i + 1, total): + break + + self.root.after(0, lambda: self._on_load_all_complete(progress, success, skipped, failed)) + + def _on_load_all_complete(self, progress, success, skipped, failed): + progress.close() + self.load_all_btn.config(state=tk.NORMAL) + + msg = f"Processing Complete.\n\nCached: {success}\nSkipped (Already Cached): {skipped}\nFailed: {len(failed)}" + if failed: + msg += "\n\nFailures (First 20):\n" + "\n".join(failed[:20]) + if len(failed) > 20: msg += f"\n...and {len(failed)-20} more." + + try: + with open("texture_load_failures.txt", "w") as f: + f.write("Failed Textures:\n" + "\n".join(failed)) + msg += "\n\nFull list saved to texture_load_failures.txt" + except: pass + messagebox.showwarning("Load Results", msg) + else: + messagebox.showinfo("Load Results", msg) + +def main(): + root = tk.Tk() + + # Set app icon + icon_path = os.path.join(get_base_dir(), "icon.ico") + + # Check if running as PyInstaller bundle (onefile) where resources are in _MEIPASS + if hasattr(sys, '_MEIPASS'): + bundled_icon = os.path.join(sys._MEIPASS, "icon.ico") + if os.path.exists(bundled_icon): + icon_path = bundled_icon + + if os.path.exists(icon_path): + try: + root.iconbitmap(icon_path) + except Exception: + pass + + app = EchoVRTextureViewer(root) + root.mainloop() + +if __name__ == '__main__': + main() + # Check if running as PyInstaller bundle (onefile) where resources are in _MEIPASS + if hasattr(sys, '_MEIPASS'): + bundled_icon = os.path.join(sys._MEIPASS, "icon.ico") + if os.path.exists(bundled_icon): + icon_path = bundled_icon + + if os.path.exists(icon_path): + try: + root.iconbitmap(icon_path) + except Exception: + pass + + app = EchoVRTextureViewer(root) + root.mainloop() + +if __name__ == '__main__': + main() \ No newline at end of file diff --git a/cmd/evrtools/main.go b/cmd/evrtools/main.go index cea300d..6e49510 100644 --- a/cmd/evrtools/main.go +++ b/cmd/evrtools/main.go @@ -7,6 +7,7 @@ import ( "io" "os" "path/filepath" + "strings" "github.com/EchoTools/evrFileTools/pkg/manifest" ) @@ -20,6 +21,7 @@ var ( preserveGroups bool forceOverwrite bool useDecimalName bool + exportTypes string ) func init() { @@ -31,6 +33,7 @@ func init() { flag.BoolVar(&preserveGroups, "preserve-groups", false, "Preserve frame grouping in output") flag.BoolVar(&forceOverwrite, "force", false, "Allow non-empty output directory") flag.BoolVar(&useDecimalName, "decimal-names", false, "Use decimal format for filenames (default is hex)") + flag.StringVar(&exportTypes, "export", "", "Comma-separated list of types to export (textures, tints)") } func main() { @@ -134,11 +137,37 @@ func runExtract() error { } defer pkg.Close() + var filterTypes []int64 + if exportTypes != "" { + for _, t := range strings.Split(exportTypes, ",") { + switch strings.TrimSpace(t) { + case "textures": + // Use variables to avoid constant overflow checks for negative int64s + t1 := uint64(0xBEAC1969CB7B8861) + t2 := uint64(0x4A4C32C49300B8A0) + t3 := uint64(0xe2efe7289d5985b8) + t4 := uint64(0x489bb35d53ca50e9) + filterTypes = append(filterTypes, + int64(t1), // -4707359568332879775 + int64(t2), // 5353709876897953952 + int64(t3), // -2094201140079393352 + int64(t4), // 5231972605540061417 + ) + case "tints": + filterTypes = append(filterTypes, + int64(uint64(0x24CBFD54E9A7F2EA)), // Folder: 24cbfd54e9a7f2ea + int64(uint64(0x32f30fe361939dee)), // 3671295590506143214 + ) + } + } + } + fmt.Println("Extracting files...") if err := pkg.Extract( outputDir, manifest.WithPreserveGroups(preserveGroups), manifest.WithDecimalNames(useDecimalName), + manifest.WithTypeFilter(filterTypes), ); err != nil { return fmt.Errorf("extract: %w", err) } @@ -154,6 +183,14 @@ func runBuild() error { return fmt.Errorf("scan files: %w", err) } + // If dataDir is provided, we are in "repack" mode where we merge original files + if dataDir != "" { + manifestPath := filepath.Join(dataDir, "manifests", packageName) + if _, err := os.Stat(manifestPath); err == nil { + return runRepack(files) + } + } + totalFiles := 0 for _, group := range files { totalFiles += len(group) @@ -180,3 +217,14 @@ func runBuild() error { fmt.Printf("Build complete. Output written to %s\n", outputDir) return nil } + +func runRepack(inputFiles [][]manifest.ScannedFile) error { + fmt.Println("Loading original manifest for repacking...") + manifestPath := filepath.Join(dataDir, "manifests", packageName) + m, err := manifest.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("read manifest: %w", err) + } + + return manifest.Repack(m, inputFiles, outputDir, packageName, dataDir) +} diff --git a/cmd/texconv/main.go b/cmd/texconv/main.go index 633cdc0..8fd6827 100644 --- a/cmd/texconv/main.go +++ b/cmd/texconv/main.go @@ -25,6 +25,7 @@ import ( "image" "image/png" "io" + "math" "os" "path/filepath" "strings" @@ -64,8 +65,12 @@ const ( DXGIFormatBC6HSF16 = 96 DXGIFormatBC7Unorm = 98 // High quality DXGIFormatBC7UnormSRGB = 99 + DXGIFormatR8Unorm = 61 // Grayscale + DXGIFormatR11G11B10Float = 26 // Packed Float DXGIFormatR8G8B8A8Unorm = 28 // Uncompressed RGBA DXGIFormatR8G8B8A8UnormSRGB = 29 + DXGIFormatB8G8R8A8UnormSRGB = 91 // BGRA sRGB + DXGIFormatB8G8R8A8Typeless = 87 // BGRA Typeless ) // DDSHeader represents the main DDS file header (124 bytes) @@ -219,7 +224,7 @@ func decodeDDS(inputPath, outputPath string) error { } // Decompress to RGBA - rgba, err := decompressBC(compressedData, info) + img, err := decompressBC(compressedData, info) if err != nil { return fmt.Errorf("decompress: %w", err) } @@ -231,7 +236,7 @@ func decodeDDS(inputPath, outputPath string) error { } defer outFile.Close() - if err := png.Encode(outFile, rgba); err != nil { + if err := png.Encode(outFile, img); err != nil { return fmt.Errorf("encode png: %w", err) } @@ -465,6 +470,26 @@ func parseDDSHeader(r io.ReadSeeker) (*TextureInfo, error) { info.FormatName = "BC7" info.Compression = "BC7" info.BytesPerPixel = 1 + case DXGIFormatR8Unorm: + info.FormatName = "R8_UNORM" + info.Compression = "None" + info.BytesPerPixel = 1 + case DXGIFormatR11G11B10Float: + info.FormatName = "R11G11B10_FLOAT" + info.Compression = "None" + info.BytesPerPixel = 4 + case DXGIFormatR8G8B8A8Unorm, DXGIFormatR8G8B8A8UnormSRGB: + info.FormatName = "RGBA8" + info.Compression = "None" + info.BytesPerPixel = 4 + case DXGIFormatB8G8R8A8UnormSRGB: + info.FormatName = "BGRA8" + info.Compression = "None" + info.BytesPerPixel = 4 + case DXGIFormatB8G8R8A8Typeless: + info.FormatName = "BGRA8_TYPELESS" + info.Compression = "None" + info.BytesPerPixel = 4 default: return nil, fmt.Errorf("unsupported DXGI format: %d", info.Format) } @@ -498,32 +523,56 @@ func calculateMipSize(width, height, format uint32) uint32 { DXGIFormatBC6HUF16, DXGIFormatBC6HSF16, DXGIFormatBC7Unorm, DXGIFormatBC7UnormSRGB: return blockW * blockH * 16 // 16 bytes per block + case DXGIFormatR8Unorm: + return width * height + case DXGIFormatR11G11B10Float: + return width * height * 4 + case DXGIFormatR8G8B8A8Unorm, DXGIFormatR8G8B8A8UnormSRGB: + return width * height * 4 + case DXGIFormatB8G8R8A8UnormSRGB: + return width * height * 4 + case DXGIFormatB8G8R8A8Typeless: + return width * height * 4 default: return width * height * 4 // Fallback: uncompressed RGBA } } // decompressBC decompresses BC-compressed data to RGBA -func decompressBC(data []byte, info *TextureInfo) (*image.RGBA, error) { - rgba := image.NewRGBA(image.Rect(0, 0, int(info.Width), int(info.Height))) +func decompressBC(data []byte, info *TextureInfo) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, int(info.Width), int(info.Height))) + + isSRGB := info.Format == DXGIFormatBC1UnormSRGB || + info.Format == DXGIFormatBC3UnormSRGB || + info.Format == DXGIFormatBC7UnormSRGB switch info.Format { case DXGIFormatBC1Unorm, DXGIFormatBC1UnormSRGB: - return decompressBC1(data, int(info.Width), int(info.Height)) + return decompressBC1(data, int(info.Width), int(info.Height), isSRGB) case DXGIFormatBC3Unorm, DXGIFormatBC3UnormSRGB: - return decompressBC3(data, int(info.Width), int(info.Height)) + return decompressBC3(data, int(info.Width), int(info.Height), isSRGB) case DXGIFormatBC5Unorm, DXGIFormatBC5SNorm: return decompressBC5(data, int(info.Width), int(info.Height)) + case DXGIFormatR8Unorm: + return decompressR8(data, int(info.Width), int(info.Height)) + case DXGIFormatR11G11B10Float: + return decompressR11G11B10Float(data, int(info.Width), int(info.Height)) + case DXGIFormatR8G8B8A8Unorm, DXGIFormatR8G8B8A8UnormSRGB: + return decompressRGBA(data, int(info.Width), int(info.Height)) + case DXGIFormatB8G8R8A8UnormSRGB: + return decompressBGRA(data, int(info.Width), int(info.Height)) + case DXGIFormatB8G8R8A8Typeless: + return decompressBGRA(data, int(info.Width), int(info.Height)) default: return nil, fmt.Errorf("decompression not implemented for format: %s", info.FormatName) } - return rgba, nil + return nrgba, nil } // decompressBC1 decompresses BC1/DXT1 to RGBA -func decompressBC1(data []byte, width, height int) (*image.RGBA, error) { - rgba := image.NewRGBA(image.Rect(0, 0, width, height)) +func decompressBC1(data []byte, width, height int, isSRGB bool) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) blockW := (width + 3) / 4 blockH := (height + 3) / 4 @@ -541,40 +590,73 @@ func decompressBC1(data []byte, width, height int) (*image.RGBA, error) { offset += 4 // Decode RGB565 - r0 := uint8((c0 >> 11) * 255 / 31) - g0 := uint8(((c0 >> 5) & 0x3F) * 255 / 63) - b0 := uint8((c0 & 0x1F) * 255 / 31) - - r1 := uint8((c1 >> 11) * 255 / 31) - g1 := uint8(((c1 >> 5) & 0x3F) * 255 / 63) - b1 := uint8((c1 & 0x1F) * 255 / 31) + r0_5 := (c0 >> 11) & 0x1F + g0_6 := (c0 >> 5) & 0x3F + b0_5 := c0 & 0x1F + r0_8 := uint8((r0_5 << 3) | (r0_5 >> 2)) + g0_8 := uint8((g0_6 << 2) | (g0_6 >> 4)) + b0_8 := uint8((b0_5 << 3) | (b0_5 >> 2)) + + r1_5 := (c1 >> 11) & 0x1F + g1_6 := (c1 >> 5) & 0x3F + b1_5 := c1 & 0x1F + r1_8 := uint8((r1_5 << 3) | (r1_5 >> 2)) + g1_8 := uint8((g1_6 << 2) | (g1_6 >> 4)) + b1_8 := uint8((b1_5 << 3) | (b1_5 >> 2)) // Color palette var colors [4][4]uint8 - colors[0] = [4]uint8{r0, g0, b0, 255} - colors[1] = [4]uint8{r1, g1, b1, 255} - - if c0 > c1 { - colors[2] = [4]uint8{ - (2*r0 + r1) / 3, - (2*g0 + g1) / 3, - (2*b0 + b1) / 3, - 255, + + if isSRGB { + lr0 := srgbToLinear(r0_8) + lg0 := srgbToLinear(g0_8) + lb0 := srgbToLinear(b0_8) + lr1 := srgbToLinear(r1_8) + lg1 := srgbToLinear(g1_8) + lb1 := srgbToLinear(b1_8) + + var linearColors [4][3]float32 + linearColors[0] = [3]float32{lr0, lg0, lb0} + linearColors[1] = [3]float32{lr1, lg1, lb1} + + if c0 > c1 { + linearColors[2] = [3]float32{(2*lr0 + lr1) / 3, (2*lg0 + lg1) / 3, (2*lb0 + lb1) / 3} + linearColors[3] = [3]float32{(lr0 + 2*lr1) / 3, (lg0 + 2*lg1) / 3, (lb0 + 2*lb1) / 3} + } else { + linearColors[2] = [3]float32{(lr0 + lr1) / 2, (lg0 + lg1) / 2, (lb0 + lb1) / 2} + linearColors[3] = [3]float32{0, 0, 0} } - colors[3] = [4]uint8{ - (r0 + 2*r1) / 3, - (g0 + 2*g1) / 3, - (b0 + 2*b1) / 3, - 255, + + for i := 0; i < 4; i++ { + colors[i][0] = linearToSrgb(linearColors[i][0]) + colors[i][1] = linearToSrgb(linearColors[i][1]) + colors[i][2] = linearToSrgb(linearColors[i][2]) + colors[i][3] = 255 + } + if c0 <= c1 { + colors[3][3] = 0 } } else { - colors[2] = [4]uint8{ - (r0 + r1) / 2, - (g0 + g1) / 2, - (b0 + b1) / 2, - 255, + colors[0] = [4]uint8{r0_8, g0_8, b0_8, 255} + colors[1] = [4]uint8{r1_8, g1_8, b1_8, 255} + + if c0 > c1 { + colors[2] = [4]uint8{ + (2*r0_8 + r1_8) / 3, + (2*g0_8 + g1_8) / 3, + (2*b0_8 + b1_8) / 3, + 255, + } + colors[3] = [4]uint8{ + (r0_8 + 2*r1_8) / 3, + (g0_8 + 2*g1_8) / 3, + (b0_8 + 2*b1_8) / 3, + 255, + } + } else { + colors[2] = [4]uint8{(r0_8 + r1_8) / 2, (g0_8 + g1_8) / 2, (b0_8 + b1_8) / 2, 255} + colors[3] = [4]uint8{0, 0, 0, 0} // Transparent } - colors[3] = [4]uint8{0, 0, 0, 0} // Transparent } // Read index bits @@ -594,22 +676,22 @@ func decompressBC1(data []byte, width, height int) (*image.RGBA, error) { idx := (indices >> (2 * (py*4 + px))) & 3 color := colors[idx] - offset := rgba.PixOffset(x, y) - rgba.Pix[offset+0] = color[0] - rgba.Pix[offset+1] = color[1] - rgba.Pix[offset+2] = color[2] - rgba.Pix[offset+3] = color[3] + offset := nrgba.PixOffset(x, y) + nrgba.Pix[offset+0] = color[0] + nrgba.Pix[offset+1] = color[1] + nrgba.Pix[offset+2] = color[2] + nrgba.Pix[offset+3] = color[3] } } } } - return rgba, nil + return nrgba, nil } // decompressBC3 decompresses BC3/DXT5 to RGBA -func decompressBC3(data []byte, width, height int) (*image.RGBA, error) { - rgba := image.NewRGBA(image.Rect(0, 0, width, height)) +func decompressBC3(data []byte, width, height int, isSRGB bool) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) blockW := (width + 3) / 4 blockH := (height + 3) / 4 @@ -651,19 +733,46 @@ func decompressBC3(data []byte, width, height int) (*image.RGBA, error) { c1 := uint16(data[offset+2]) | uint16(data[offset+3])<<8 offset += 4 - r0 := uint8((c0 >> 11) * 255 / 31) - g0 := uint8(((c0 >> 5) & 0x3F) * 255 / 63) - b0 := uint8((c0 & 0x1F) * 255 / 31) + r0_5 := (c0 >> 11) & 0x1F + g0_6 := (c0 >> 5) & 0x3F + b0_5 := c0 & 0x1F + r0_8 := uint8((r0_5 << 3) | (r0_5 >> 2)) + g0_8 := uint8((g0_6 << 2) | (g0_6 >> 4)) + b0_8 := uint8((b0_5 << 3) | (b0_5 >> 2)) - r1 := uint8((c1 >> 11) * 255 / 31) - g1 := uint8(((c1 >> 5) & 0x3F) * 255 / 63) - b1 := uint8((c1 & 0x1F) * 255 / 31) + r1_5 := (c1 >> 11) & 0x1F + g1_6 := (c1 >> 5) & 0x3F + b1_5 := c1 & 0x1F + r1_8 := uint8((r1_5 << 3) | (r1_5 >> 2)) + g1_8 := uint8((g1_6 << 2) | (g1_6 >> 4)) + b1_8 := uint8((b1_5 << 3) | (b1_5 >> 2)) var colors [4][3]uint8 - colors[0] = [3]uint8{r0, g0, b0} - colors[1] = [3]uint8{r1, g1, b1} - colors[2] = [3]uint8{(2*r0 + r1) / 3, (2*g0 + g1) / 3, (2*b0 + b1) / 3} - colors[3] = [3]uint8{(r0 + 2*r1) / 3, (g0 + 2*g1) / 3, (b0 + 2*b1) / 3} + if isSRGB { + lr0 := srgbToLinear(r0_8) + lg0 := srgbToLinear(g0_8) + lb0 := srgbToLinear(b0_8) + lr1 := srgbToLinear(r1_8) + lg1 := srgbToLinear(g1_8) + lb1 := srgbToLinear(b1_8) + + var linearColors [4][3]float32 + linearColors[0] = [3]float32{lr0, lg0, lb0} + linearColors[1] = [3]float32{lr1, lg1, lb1} + linearColors[2] = [3]float32{(2*lr0 + lr1) / 3, (2*lg0 + lg1) / 3, (2*lb0 + lb1) / 3} + linearColors[3] = [3]float32{(lr0 + 2*lr1) / 3, (lg0 + 2*lg1) / 3, (lb0 + 2*lb1) / 3} + + for i := 0; i < 4; i++ { + colors[i][0] = linearToSrgb(linearColors[i][0]) + colors[i][1] = linearToSrgb(linearColors[i][1]) + colors[i][2] = linearToSrgb(linearColors[i][2]) + } + } else { + colors[0] = [3]uint8{r0_8, g0_8, b0_8} + colors[1] = [3]uint8{r1_8, g1_8, b1_8} + colors[2] = [3]uint8{(2*r0_8 + r1_8) / 3, (2*g0_8 + g1_8) / 3, (2*b0_8 + b1_8) / 3} + colors[3] = [3]uint8{(r0_8 + 2*r1_8) / 3, (g0_8 + 2*g1_8) / 3, (b0_8 + 2*b1_8) / 3} + } colorIndices := uint32(data[offset]) | uint32(data[offset+1])<<8 | uint32(data[offset+2])<<16 | uint32(data[offset+3])<<24 @@ -685,26 +794,159 @@ func decompressBC3(data []byte, width, height int) (*image.RGBA, error) { color := colors[colorIdx] alpha := alphas[alphaIdx] - pixOffset := rgba.PixOffset(x, y) - rgba.Pix[pixOffset+0] = color[0] - rgba.Pix[pixOffset+1] = color[1] - rgba.Pix[pixOffset+2] = color[2] - rgba.Pix[pixOffset+3] = alpha + pixOffset := nrgba.PixOffset(x, y) + nrgba.Pix[pixOffset+0] = color[0] + nrgba.Pix[pixOffset+1] = color[1] + nrgba.Pix[pixOffset+2] = color[2] + nrgba.Pix[pixOffset+3] = alpha } } } } - return rgba, nil + return nrgba, nil } // decompressBC5 decompresses BC5 (normal maps) to RGBA -func decompressBC5(data []byte, width, height int) (*image.RGBA, error) { +func decompressBC5(data []byte, width, height int) (*image.NRGBA, error) { // BC5 stores two channels (RG for normal maps) // We'll decode them and reconstruct Z = sqrt(1 - X^2 - Y^2) return nil, fmt.Errorf("BC5 decompression not yet implemented") } +// decompressR8 decompresses R8_UNORM (grayscale) to RGBA +func decompressR8(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height { + return nil, fmt.Errorf("data truncated") + } + + offset := 0 + for y := 0; y < height; y++ { + for x := 0; x < width; x++ { + v := data[offset] + offset++ + pixOffset := nrgba.PixOffset(x, y) + nrgba.Pix[pixOffset+0] = v + nrgba.Pix[pixOffset+1] = v + nrgba.Pix[pixOffset+2] = v + nrgba.Pix[pixOffset+3] = 255 + } + } + return nrgba, nil +} + +// decompressRGBA decompresses uncompressed RGBA to RGBA +func decompressRGBA(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height*4 { + return nil, fmt.Errorf("data truncated") + } + copy(nrgba.Pix, data[:width*height*4]) + return nrgba, nil +} + +// decompressBGRA decompresses uncompressed BGRA to RGBA +func decompressBGRA(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height*4 { + return nil, fmt.Errorf("data truncated") + } + + count := width * height + for i := 0; i < count; i++ { + offset := i * 4 + b := data[offset] + g := data[offset+1] + r := data[offset+2] + a := data[offset+3] + + nrgba.Pix[offset] = r + nrgba.Pix[offset+1] = g + nrgba.Pix[offset+2] = b + nrgba.Pix[offset+3] = a + } + return nrgba, nil +} + +// decompressR11G11B10Float decompresses packed float format to RGBA +func decompressR11G11B10Float(data []byte, width, height int) (*image.NRGBA, error) { + nrgba := image.NewNRGBA(image.Rect(0, 0, width, height)) + if len(data) < width*height*4 { + return nil, fmt.Errorf("data truncated") + } + + offset := 0 + for y := 0; y < height; y++ { + for x := 0; x < width; x++ { + packed := uint32(data[offset]) | uint32(data[offset+1])<<8 | uint32(data[offset+2])<<16 | uint32(data[offset+3])<<24 + offset += 4 + + r := f11ToF32(packed & 0x7FF) + g := f11ToF32((packed >> 11) & 0x7FF) + b := f10ToF32((packed >> 22) & 0x3FF) + + // Clamp to 0-255 + r8 := uint8(math.Min(255, math.Max(0, float64(r)*255))) + g8 := uint8(math.Min(255, math.Max(0, float64(g)*255))) + b8 := uint8(math.Min(255, math.Max(0, float64(b)*255))) + + pixOffset := nrgba.PixOffset(x, y) + nrgba.Pix[pixOffset+0] = r8 + nrgba.Pix[pixOffset+1] = g8 + nrgba.Pix[pixOffset+2] = b8 + nrgba.Pix[pixOffset+3] = 255 + } + } + return nrgba, nil +} + +func f11ToF32(u uint32) float32 { + exponent := (u >> 6) & 0x1F + mantissa := u & 0x3F + if exponent == 0 { + if mantissa == 0 { + return 0.0 + } + return float32(mantissa) / 64.0 * (1.0 / 16384.0) + } else if exponent == 31 { + return 65504.0 + } + return float32(math.Pow(2, float64(exponent)-15)) * (1.0 + float32(mantissa)/64.0) +} + +func f10ToF32(u uint32) float32 { + exponent := (u >> 5) & 0x1F + mantissa := u & 0x1F + if exponent == 0 { + if mantissa == 0 { + return 0.0 + } + return float32(mantissa) / 32.0 * (1.0 / 16384.0) + } else if exponent == 31 { + return 65504.0 + } + return float32(math.Pow(2, float64(exponent)-15)) * (1.0 + float32(mantissa)/32.0) +} + +// srgbToLinear converts an sRGB byte value to a linear float32 value. +func srgbToLinear(c uint8) float32 { + v := float32(c) / 255.0 + if v <= 0.04045 { + return v / 12.92 + } + return float32(math.Pow(float64((v+0.055)/1.055), 2.4)) +} + +// linearToSrgb converts a linear float32 value to an sRGB byte value. +func linearToSrgb(v float32) uint8 { + if v <= 0.0031308 { + return uint8(math.Min(255, math.Max(0, float64(v)*12.92*255.0))) + } + srgb := 1.055*math.Pow(float64(v), 1.0/2.4) - 0.055 + return uint8(math.Min(255, math.Max(0, srgb*255.0))) +} + // writeDDSFile writes a complete DDS file with DX10 header func writeDDSFile(w io.Writer, width, height, mipCount, dxgiFormat uint32, compressedData []byte) error { // Calculate pitch/linear size diff --git a/make b/make new file mode 100644 index 0000000..e69de29 diff --git a/pkg/manifest/builder.go b/pkg/manifest/builder.go index 20c523c..b120aeb 100644 --- a/pkg/manifest/builder.go +++ b/pkg/manifest/builder.go @@ -6,6 +6,7 @@ import ( "math" "os" "path/filepath" + "strings" "github.com/DataDog/zstd" ) @@ -91,24 +92,29 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { } for _, file := range group { - data, err := os.ReadFile(file.Path) - if err != nil { - return nil, fmt.Errorf("read file %s: %w", file.Path, err) + var data []byte + var err error + + if file.Path != "" { + data, err = os.ReadFile(file.Path) + } else if file.SrcPackage != nil && file.SrcContent != nil { + data, err = file.SrcPackage.ReadContent(file.SrcContent) + if err != nil && strings.Contains(err.Error(), "too short") { + fmt.Printf("Warning: skipping corrupted file %x/%x: %v\n", file.TypeSymbol, file.FileSymbol, err) + data = []byte{} + err = nil + } + } else { + err = fmt.Errorf("no source for file %x/%x", file.TypeSymbol, file.FileSymbol) } - manifest.FrameContents = append(manifest.FrameContents, FrameContent{ - TypeSymbol: file.TypeSymbol, - FileSymbol: file.FileSymbol, - FrameIndex: frameIndex, - DataOffset: currentOffset, - Size: uint32(len(data)), - Alignment: 1, - }) + if err != nil { + return nil, fmt.Errorf("read file %x/%x: %w", file.TypeSymbol, file.FileSymbol, err) + } - manifest.Metadata = append(manifest.Metadata, FileMetadata{ - TypeSymbol: file.TypeSymbol, - FileSymbol: file.FileSymbol, - }) + if !file.SkipManifest { + b.addFileToManifest(manifest, file, frameIndex, currentOffset) + } currentFrame.Write(data) currentOffset += uint32(len(data)) @@ -131,12 +137,33 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { return manifest, nil } +func (b *Builder) addFileToManifest(manifest *Manifest, file ScannedFile, frameIndex, offset uint32) { + alignment := uint32(1) + + manifest.FrameContents = append(manifest.FrameContents, FrameContent{ + TypeSymbol: file.TypeSymbol, + FileSymbol: file.FileSymbol, + FrameIndex: frameIndex, + DataOffset: offset, + Size: file.Size, + Alignment: alignment, + }) + + manifest.Metadata = append(manifest.Metadata, FileMetadata{ + TypeSymbol: file.TypeSymbol, + FileSymbol: file.FileSymbol, + }) +} + func (b *Builder) writeFrame(manifest *Manifest, data *bytes.Buffer, index uint32) error { compressed, err := zstd.CompressLevel(nil, data.Bytes(), b.compressionLevel) if err != nil { return fmt.Errorf("compress frame %d: %w", index, err) } + return b.writeCompressedFrame(manifest, compressed, uint32(data.Len())) +} +func (b *Builder) writeCompressedFrame(manifest *Manifest, compressed []byte, uncompressedSize uint32) error { packageIndex := manifest.Header.PackageCount - 1 packagePath := filepath.Join(b.outputDir, "packages", fmt.Sprintf("%s_%d", b.packageName, packageIndex)) @@ -162,14 +189,14 @@ func (b *Builder) writeFrame(manifest *Manifest, data *bytes.Buffer, index uint3 defer f.Close() if _, err := f.Write(compressed); err != nil { - return fmt.Errorf("write frame %d: %w", index, err) + return fmt.Errorf("write compressed data: %w", err) } manifest.Frames = append(manifest.Frames, Frame{ PackageIndex: packageIndex, Offset: offset, CompressedSize: uint32(len(compressed)), - Length: uint32(data.Len()), + Length: uncompressedSize, }) b.incrementSection(&manifest.Header.Frames, 1) diff --git a/pkg/manifest/package.go b/pkg/manifest/package.go index de00e26..e11feb3 100644 --- a/pkg/manifest/package.go +++ b/pkg/manifest/package.go @@ -14,6 +14,10 @@ import ( type Package struct { manifest *Manifest files []packageFile + + // Decompression cache + lastFrameIdx uint32 + lastFrameData []byte } type packageFile interface { @@ -31,8 +35,9 @@ func OpenPackage(manifest *Manifest, basePath string) (*Package, error) { count := manifest.PackageCount() pkg := &Package{ - manifest: manifest, - files: make([]packageFile, count), + manifest: manifest, + files: make([]packageFile, count), + lastFrameIdx: ^uint32(0), // Invalid index } for i := range count { @@ -58,6 +63,7 @@ func (p *Package) Close() error { } } } + p.lastFrameData = nil return lastErr } @@ -66,6 +72,84 @@ func (p *Package) Manifest() *Manifest { return p.manifest } +// ReadContent reads the data for a specific file content. +func (p *Package) ReadContent(fc *FrameContent) ([]byte, error) { + // Check cache + if p.lastFrameData != nil && p.lastFrameIdx == fc.FrameIndex { + if uint32(len(p.lastFrameData)) < fc.DataOffset+fc.Size { + return nil, fmt.Errorf("frame data too short for content") + } + return p.lastFrameData[fc.DataOffset : fc.DataOffset+fc.Size], nil + } + + // Load frame + if int(fc.FrameIndex) >= len(p.manifest.Frames) { + return nil, fmt.Errorf("invalid frame index %d", fc.FrameIndex) + } + frame := p.manifest.Frames[fc.FrameIndex] + + if frame.Length == 0 { + return nil, nil + } + + // Read compressed data + if int(frame.PackageIndex) >= len(p.files) { + return nil, fmt.Errorf("invalid package index %d", frame.PackageIndex) + } + file := p.files[frame.PackageIndex] + if _, err := file.Seek(int64(frame.Offset), io.SeekStart); err != nil { + return nil, fmt.Errorf("seek frame %d: %w", fc.FrameIndex, err) + } + + compressed := make([]byte, frame.CompressedSize) + if _, err := io.ReadFull(file, compressed); err != nil { + return nil, fmt.Errorf("read frame %d: %w", fc.FrameIndex, err) + } + + // Decompress + decompressed, err := zstd.Decompress(nil, compressed) + if err != nil { + return nil, fmt.Errorf("decompress frame %d: %w", fc.FrameIndex, err) + } + + // Update cache + p.lastFrameIdx = fc.FrameIndex + p.lastFrameData = decompressed + + if uint32(len(decompressed)) < fc.DataOffset+fc.Size { + return nil, fmt.Errorf("decompressed frame too short") + } + + return decompressed[fc.DataOffset : fc.DataOffset+fc.Size], nil +} + +// ReadRawFrame reads the raw compressed data for a specific frame. +func (p *Package) ReadRawFrame(frameIndex uint32) ([]byte, error) { + if int(frameIndex) >= len(p.manifest.Frames) { + return nil, fmt.Errorf("invalid frame index %d", frameIndex) + } + frame := p.manifest.Frames[frameIndex] + + if frame.Length == 0 { + return nil, nil + } + + if int(frame.PackageIndex) >= len(p.files) { + return nil, fmt.Errorf("invalid package index %d", frame.PackageIndex) + } + file := p.files[frame.PackageIndex] + if _, err := file.Seek(int64(frame.Offset), io.SeekStart); err != nil { + return nil, fmt.Errorf("seek frame %d: %w", frameIndex, err) + } + + compressed := make([]byte, frame.CompressedSize) + if _, err := io.ReadFull(file, compressed); err != nil { + return nil, fmt.Errorf("read frame %d: %w", frameIndex, err) + } + + return compressed, nil +} + // Extract extracts all files from the package to the output directory. func (p *Package) Extract(outputDir string, opts ...ExtractOption) error { cfg := &extractConfig{} @@ -117,6 +201,10 @@ func (p *Package) Extract(outputDir string, opts ...ExtractOption) error { // Extract files from this frame using pre-built index contents := frameIndex[uint32(frameIdx)] for _, fc := range contents { + if len(cfg.allowedTypes) > 0 && !cfg.allowedTypes[fc.TypeSymbol] { + continue + } + var fileName string if cfg.decimalNames { fileName = strconv.FormatInt(fc.FileSymbol, 10) @@ -154,6 +242,7 @@ func (p *Package) Extract(outputDir string, opts ...ExtractOption) error { type extractConfig struct { preserveGroups bool decimalNames bool + allowedTypes map[int64]bool } // ExtractOption configures extraction behavior. @@ -172,3 +261,15 @@ func WithDecimalNames(decimal bool) ExtractOption { c.decimalNames = decimal } } + +// WithTypeFilter configures extraction to only include specific file types. +func WithTypeFilter(types []int64) ExtractOption { + return func(c *extractConfig) { + if len(types) > 0 { + c.allowedTypes = make(map[int64]bool, len(types)) + for _, t := range types { + c.allowedTypes[t] = true + } + } + } +} diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go new file mode 100644 index 0000000..9792994 --- /dev/null +++ b/pkg/manifest/repack.go @@ -0,0 +1,383 @@ +package manifest + +import ( + "bytes" + "encoding/binary" + "fmt" + "math" + "os" + "path/filepath" + "runtime" + "sort" + "sync" + + "github.com/DataDog/zstd" +) + +// Pools to eliminate GC overhead +var ( + readPool = sync.Pool{New: func() interface{} { return make([]byte, 0, 1024*1024) }} + decompPool = sync.Pool{New: func() interface{} { return make([]byte, 0, 4*1024*1024) }} + compPool = sync.Pool{New: func() interface{} { return make([]byte, 0, 1024*1024) }} + constructionPool = sync.Pool{New: func() interface{} { return bytes.NewBuffer(make([]byte, 0, 4*1024*1024)) }} +) + +type frameResult struct { + index int + data []byte + err error + decompressedSize uint32 + isModified bool + shouldSkip bool + rawReadBuf []byte + decompBuf []byte +} + +type fcWrapper struct { + index int + fc FrameContent +} + +type packageWriter struct { + fileHandle *os.File + pkgIndex uint32 + outputDir string + pkgName string + created map[uint32]bool +} + +func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize uint32) error { + os.MkdirAll(fmt.Sprintf("%s/packages", pw.outputDir), 0777) + + cEntry := Frame{} + activePackageNum := uint32(0) + if len(manifest.Frames) > 0 { + cEntry = manifest.Frames[len(manifest.Frames)-1] + activePackageNum = cEntry.PackageIndex + } + + if int64(cEntry.Offset)+int64(cEntry.CompressedSize)+int64(len(data)) > math.MaxInt32 { + activePackageNum++ + manifest.Header.PackageCount = activePackageNum + 1 + } + + if pw.fileHandle == nil || pw.pkgIndex != activePackageNum { + if pw.fileHandle != nil { + pw.fileHandle.Close() + } + + currentPackagePath := fmt.Sprintf("%s/packages/%s_%d", pw.outputDir, pw.pkgName, activePackageNum) + flags := os.O_RDWR | os.O_CREATE | os.O_APPEND + + if !pw.created[activePackageNum] { + flags = os.O_RDWR | os.O_CREATE | os.O_TRUNC + pw.created[activePackageNum] = true + } + + f, err := os.OpenFile(currentPackagePath, flags, 0777) + if err != nil { + return err + } + pw.fileHandle = f + pw.pkgIndex = activePackageNum + } + + if _, err := pw.fileHandle.Write(data); err != nil { + return err + } + + newEntry := Frame{ + PackageIndex: activePackageNum, + Offset: cEntry.Offset + cEntry.CompressedSize, + CompressedSize: uint32(len(data)), + Length: decompressedSize, + } + if int64(newEntry.Offset)+int64(newEntry.CompressedSize) > math.MaxInt32 { + newEntry.Offset = 0 + } + + manifest.Frames = append(manifest.Frames, newEntry) + incrementSection(&manifest.Header.Frames, 1) + + return nil +} + +func (pw *packageWriter) close() { + if pw.fileHandle != nil { + pw.fileHandle.Close() + pw.fileHandle = nil + } +} + +func incrementSection(s *Section, count int) { + s.Count += uint64(count) + s.ElementCount += uint64(count) + s.Length += s.ElementSize * uint64(count) +} + +func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, dataDir string) error { + fmt.Println("Mapping modified files...") + + totalFiles := 0 + for _, chunk := range fileMap { + totalFiles += len(chunk) + } + + modifiedFilesLookupTable := make(map[[128]byte]ScannedFile, totalFiles) + frameContentsLookupTable := make(map[[128]byte]FrameContent, manifest.Header.FrameContents.ElementCount) + modifiedFrames := make(map[uint32]bool) + + for _, v := range manifest.FrameContents { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(v.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(v.FileSymbol)) + frameContentsLookupTable[buf] = v + } + + for _, fileGroup := range fileMap { + for _, v := range fileGroup { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(v.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(v.FileSymbol)) + + if content, ok := frameContentsLookupTable[buf]; ok { + modifiedFrames[content.FrameIndex] = true + modifiedFilesLookupTable[buf] = v + } + } + } + fmt.Printf("Mapped %d files to modify.\n", len(modifiedFilesLookupTable)) + + contentsByFrame := make(map[uint32][]fcWrapper) + for k, v := range manifest.FrameContents { + contentsByFrame[v.FrameIndex] = append(contentsByFrame[v.FrameIndex], fcWrapper{index: k, fc: v}) + } + + newManifest := *manifest + newManifest.Frames = make([]Frame, 0) + origFramesHeader := manifest.Header.Frames + newManifest.Header.PackageCount = 1 + newManifest.Header.Frames = Section{ + Unk1: origFramesHeader.Unk1, + Unk2: origFramesHeader.Unk2, + ElementSize: 16, + } + + packages := make(map[uint32]*os.File) + for i := 0; i < int(manifest.Header.PackageCount); i++ { + pFilePath := fmt.Sprintf("%s/packages/%s_%d", dataDir, packageName, i) + f, err := os.Open(pFilePath) + if err != nil { + return fmt.Errorf("failed to open package %s: %v", pFilePath, err) + } + packages[uint32(i)] = f + defer f.Close() + } + + totalFrames := int(manifest.Header.Frames.ElementCount) + lookaheadSize := runtime.NumCPU() * 16 + futureResults := make(chan chan frameResult, lookaheadSize) + writer := &packageWriter{outputDir: outputDir, pkgName: packageName, created: make(map[uint32]bool)} + defer writer.close() + + go func() { + defer close(futureResults) + for i := 0; i < totalFrames; i++ { + resultChan := make(chan frameResult, 1) + futureResults <- resultChan + + go func(idx int, ch chan frameResult) { + v := manifest.Frames[idx] + isMod := modifiedFrames[uint32(idx)] + res := frameResult{index: idx, isModified: isMod, decompressedSize: v.Length} + + rawReadBuf := readPool.Get().([]byte) + if cap(rawReadBuf) < int(v.CompressedSize) { + rawReadBuf = make([]byte, int(v.CompressedSize)) + } else { + rawReadBuf = rawReadBuf[:v.CompressedSize] + } + res.rawReadBuf = rawReadBuf + + activeFile := packages[v.PackageIndex] + if v.CompressedSize > 0 { + if _, err := activeFile.ReadAt(rawReadBuf, int64(v.Offset)); err != nil { + if v.Length == 0 { + res.shouldSkip = true + ch <- res + return + } + res.err = err + ch <- res + return + } + } + + if !isMod { + res.data = rawReadBuf + ch <- res + return + } + + decompBuf := decompPool.Get().([]byte) + decompBytes, err := zstd.Decompress(decompBuf[:0], rawReadBuf) + if err != nil { + res.err = err + ch <- res + return + } + res.decompBuf = decompBytes + + bufObj := constructionPool.Get() + constructionBuf := bufObj.(*bytes.Buffer) + constructionBuf.Reset() + defer constructionPool.Put(bufObj) + + sorted := make([]fcWrapper, 0) + if contents, ok := contentsByFrame[uint32(idx)]; ok { + sorted = append(sorted, contents...) + } + sort.Slice(sorted, func(a, b int) bool { + return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset + }) + + for j := 0; j < len(sorted); j++ { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) + + if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { + modData, err := os.ReadFile(modFile.Path) + if err != nil { + res.err = err + ch <- res + return + } + constructionBuf.Write(modData) + } else { + start := sorted[j].fc.DataOffset + end := start + sorted[j].fc.Size + constructionBuf.Write(decompBytes[start:end]) + } + } + + compBuf := compPool.Get().([]byte) + encodedData, _ := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) + res.data = encodedData + res.decompressedSize = uint32(constructionBuf.Len()) + + ch <- res + }(i, resultChan) + } + }() + + fmt.Println("Starting repack...") + for resultCh := range futureResults { + res := <-resultCh + if res.err != nil { + return res.err + } + + if res.shouldSkip { + if res.rawReadBuf != nil { + readPool.Put(res.rawReadBuf) + } + if res.decompBuf != nil { + decompPool.Put(res.decompBuf) + } + if res.isModified && res.data != nil { + compPool.Put(res.data) + } + continue + } + + if res.isModified { + sorted := make([]fcWrapper, 0) + if contents, ok := contentsByFrame[uint32(res.index)]; ok { + sorted = append(sorted, contents...) + } + sort.Slice(sorted, func(a, b int) bool { + return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset + }) + + currentOffset := uint32(0) + for j := 0; j < len(sorted); j++ { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) + + size := sorted[j].fc.Size + if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { + size = modFile.Size + } + + newManifest.FrameContents[sorted[j].index] = FrameContent{ + TypeSymbol: sorted[j].fc.TypeSymbol, + FileSymbol: sorted[j].fc.FileSymbol, + FrameIndex: sorted[j].fc.FrameIndex, + DataOffset: currentOffset, + Size: size, + Alignment: sorted[j].fc.Alignment, + } + currentOffset += size + } + } + + if err := writer.write(&newManifest, res.data, res.decompressedSize); err != nil { + return err + } + + if res.isModified { + if res.rawReadBuf != nil { + readPool.Put(res.rawReadBuf) + } + if res.decompBuf != nil { + decompPool.Put(res.decompBuf) + } + if res.data != nil { + compPool.Put(res.data) + } + } else { + if res.data != nil { + readPool.Put(res.data) + } + } + } + + writer.close() + + actualPkgCount := uint32(0) + for { + path := fmt.Sprintf("%s/packages/%s_%d", outputDir, packageName, actualPkgCount) + if _, err := os.Stat(path); err != nil { + break + } + actualPkgCount++ + } + newManifest.Header.PackageCount = actualPkgCount + + for i := uint32(0); i < newManifest.Header.PackageCount; i++ { + path := fmt.Sprintf("%s/packages/%s_%d", outputDir, packageName, i) + stats, err := os.Stat(path) + if err != nil { + continue + } + newEntry := Frame{ + PackageIndex: i, + Offset: uint32(stats.Size()), + CompressedSize: 0, Length: 0, + } + newManifest.Frames = append(newManifest.Frames, newEntry) + incrementSection(&newManifest.Header.Frames, 1) + } + + newManifest.Frames = append(newManifest.Frames, Frame{}) + incrementSection(&newManifest.Header.Frames, 1) + + manifestDir := filepath.Join(outputDir, "manifests") + if err := os.MkdirAll(manifestDir, 0755); err != nil { + return fmt.Errorf("create manifest dir: %w", err) + } + + return WriteFile(filepath.Join(manifestDir, packageName), &newManifest) +} diff --git a/pkg/manifest/scanner.go b/pkg/manifest/scanner.go index 5e7ceaa..1618644 100644 --- a/pkg/manifest/scanner.go +++ b/pkg/manifest/scanner.go @@ -14,6 +14,11 @@ type ScannedFile struct { FileSymbol int64 Path string Size uint32 + + // Source for repacking (optional) + SrcPackage *Package + SrcContent *FrameContent + SkipManifest bool } // ScanFiles walks the input directory and returns files grouped by chunk number. @@ -29,26 +34,50 @@ func ScanFiles(inputDir string) ([][]ScannedFile, error) { return nil } - // Parse directory structure - dir := filepath.Dir(path) - parts := strings.Split(filepath.ToSlash(dir), "/") - if len(parts) < 3 { - return fmt.Errorf("invalid path structure: %s", path) + relPath, err := filepath.Rel(inputDir, path) + if err != nil { + return fmt.Errorf("failed to get relative path: %w", err) } - chunkNum, err := strconv.ParseInt(parts[len(parts)-3], 10, 64) - if err != nil { - return fmt.Errorf("parse chunk number: %w", err) + // Normalize separators + relPath = filepath.ToSlash(relPath) + parts := strings.Split(relPath, "/") + + var chunkNum int64 = 0 + var typeStr, fileStr string + + if len(parts) == 3 { + if c, err := strconv.ParseInt(parts[0], 10, 64); err == nil { + chunkNum = c + typeStr = parts[1] + fileStr = parts[2] + } else { + typeStr = parts[1] + fileStr = parts[2] + } + } else if len(parts) == 2 { + typeStr = parts[0] + fileStr = parts[1] + } else { + return nil // Skip } - typeSymbol, err := strconv.ParseInt(parts[len(parts)-2], 10, 64) + parseSymbol := func(s string) (int64, error) { + s = strings.TrimSuffix(s, filepath.Ext(s)) + if u, err := strconv.ParseUint(s, 16, 64); err == nil { + return int64(u), nil + } + return strconv.ParseInt(s, 10, 64) + } + + typeSymbol, err := parseSymbol(typeStr) if err != nil { - return fmt.Errorf("parse type symbol: %w", err) + return nil } - fileSymbol, err := strconv.ParseInt(filepath.Base(path), 10, 64) + fileSymbol, err := parseSymbol(fileStr) if err != nil { - return fmt.Errorf("parse file symbol: %w", err) + return nil } size := info.Size() From 75aedeeb5ff539979ab21967c76fbf6076da450f Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Fri, 27 Feb 2026 16:12:56 +0000 Subject: [PATCH 04/11] Delete EVR_Texture_Editor.py --- EVR_Texture_Editor.py | 3065 ----------------------------------------- 1 file changed, 3065 deletions(-) delete mode 100644 EVR_Texture_Editor.py diff --git a/EVR_Texture_Editor.py b/EVR_Texture_Editor.py deleted file mode 100644 index f3c321f..0000000 --- a/EVR_Texture_Editor.py +++ /dev/null @@ -1,3065 +0,0 @@ -import os -import sys -import struct -import tkinter as tk -from tkinter import ttk, filedialog, messagebox, scrolledtext -import shutil -import tempfile -import subprocess -import threading -import json -import glob -import time -import zipfile -import urllib.request -import webbrowser -from pathlib import Path -from concurrent.futures import ThreadPoolExecutor, as_completed - -try: - from PIL import Image, ImageTk, ImageDraw, ImageFont - HAS_PIL = True -except ImportError: - HAS_PIL = False - messagebox.showerror("Missing Dependencies", "Pillow library is required but not installed.\nPlease install it manually: pip install Pillow") - sys.exit(1) - -# --- SETTINGS & PATH MANAGEMENT --- -SETTINGS_DIR_NAME = "Settings" - -def get_base_dir(): - if getattr(sys, 'frozen', False): - return os.path.dirname(sys.executable) - else: - return os.path.dirname(os.path.abspath(__file__)) - -def get_settings_path(filename): - base = get_base_dir() - settings_dir = os.path.join(base, SETTINGS_DIR_NAME) - if not os.path.exists(settings_dir): - try: - os.makedirs(settings_dir) - except: pass - return os.path.join(settings_dir, filename) - -def get_tool_path(tool_name): - # Check Settings folder first - settings_path = get_settings_path(tool_name) - if os.path.exists(settings_path): - return settings_path - - # Fallback to script dir - script_path = os.path.join(get_base_dir(), tool_name) - if os.path.exists(script_path): - return script_path - - return settings_path - -def get_cache_dir(): - # Check Settings folder first (Preferred) - settings_path = get_settings_path("texture_cache") - if os.path.exists(settings_path) and os.path.isdir(settings_path): - return settings_path - - base = get_base_dir() - # Check legacy/root location - legacy_path = os.path.join(base, "texture_cache") - if os.path.exists(legacy_path) and os.path.isdir(legacy_path): - return legacy_path - - # Default to Settings folder - return settings_path - -CONFIG_FILE = get_settings_path("config.json") -CACHE_DIR = get_cache_dir() # Store cache in Settings folder for persistence (or root if exists) -CACHE2_FILE = get_settings_path("cache2.json") -LEGACY_CACHE_FILE = get_settings_path("cache.json") -MAPPING_FILE = get_settings_path("texture_mapping.json") - -# App version for updates -APP_VERSION = "2.0.0" -GITHUB_REPO = "heisthecat31/EchoVR-Texture-Editor" -GITHUB_API_URL = f"https://api.github.com/repos/{GITHUB_REPO}/releases/latest" - -DECODE_CACHE = {} - - -def compare_versions(v1, v2): - """Compare two version strings (e.g., '1.0.0' vs '1.1.0'). Returns True if v2 > v1""" - try: - parts1 = [int(x) for x in v1.split('.')] - parts2 = [int(x) for x in v2.split('.')] - - # Pad with zeros - while len(parts1) < len(parts2): - parts1.append(0) - while len(parts2) < len(parts1): - parts2.append(0) - - for p1, p2 in zip(parts1, parts2): - if p2 > p1: - return True - elif p2 < p1: - return False - return False - except: - return False - - -def check_for_updates(): - """Check GitHub for latest release. Returns (has_update, latest_version, download_url) or (False, None, None)""" - try: - response = urllib.request.urlopen(GITHUB_API_URL, timeout=5) - data = json.loads(response.read().decode('utf-8')) - - if 'tag_name' in data: - latest_version = data['tag_name'].lstrip('v') # Remove 'v' prefix if present - download_url = data.get('html_url', '') # Link to releases page - - if compare_versions(APP_VERSION, latest_version): - return True, latest_version, download_url - except Exception as e: - pass # Silent fail - don't break if network unavailable - - return False, None, None - - -def _dir_nonempty(path): - """Return True if directory exists and has at least one entry (no full listdir).""" - try: - with os.scandir(path) as it: - return next(it, None) is not None - except (OSError, TypeError): - return False - - -def run_hidden_command(cmd, cwd=None, timeout=None, capture_output=True): - if sys.platform == 'win32': - startupinfo = subprocess.STARTUPINFO() - startupinfo.dwFlags |= subprocess.STARTF_USESHOWWINDOW - startupinfo.wShowWindow = subprocess.SW_HIDE - - if capture_output: - try: - result = subprocess.run( - cmd, - startupinfo=startupinfo, - capture_output=True, - text=True, - cwd=cwd, - timeout=timeout, - creationflags=subprocess.CREATE_NO_WINDOW - ) - return result - except subprocess.TimeoutExpired: - return subprocess.CompletedProcess(cmd, -1, "", "Timeout expired") - except Exception: - return subprocess.CompletedProcess(cmd, -1, "", "Command failed") - else: - try: - result = subprocess.run( - cmd, - startupinfo=startupinfo, - stdout=subprocess.DEVNULL, - stderr=subprocess.DEVNULL, - cwd=cwd, - timeout=timeout, - creationflags=subprocess.CREATE_NO_WINDOW - ) - return result - except Exception: - return subprocess.CompletedProcess(cmd, -1) - else: - try: - if capture_output: - return subprocess.run(cmd, capture_output=True, text=True, cwd=cwd, timeout=timeout) - else: - return subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, cwd=cwd, timeout=timeout) - except subprocess.TimeoutExpired: - return subprocess.CompletedProcess(cmd, -1, "", "Timeout expired") - except Exception: - return subprocess.CompletedProcess(cmd, -1, "", "Command failed") - -# --- CACHE MANAGER --- -class TextureCacheManager: - @staticmethod - def load_cache(): - if os.path.exists(CACHE2_FILE): - try: - with open(CACHE2_FILE, 'r', encoding='utf-8') as f: - return json.load(f) - except Exception: - return {} - return {} - - @staticmethod - def save_cache(cache_data): - try: - with open(CACHE2_FILE, 'w', encoding='utf-8') as f: - json.dump(cache_data, f, indent=2) - except Exception: - pass - - @staticmethod - def get_cached_files(folder_path): - cache = TextureCacheManager.load_cache() - if not cache: return None - - norm_path = os.path.normpath(folder_path).lower() - for key in cache: - if os.path.normpath(key).lower() == norm_path: - return cache[key] - return None - - @staticmethod - def update_cache(folder_path, file_list): - cache = TextureCacheManager.load_cache() - cache[os.path.normpath(folder_path)] = file_list - TextureCacheManager.save_cache(cache) - -class ConfigManager: - @staticmethod - def load_config(): - base_dir = get_base_dir() - settings_dir = os.path.join(base_dir, SETTINGS_DIR_NAME) - - if not os.path.exists(settings_dir): - try: - os.makedirs(settings_dir) - except: pass - - default_config = { - 'output_folder': None, - 'data_folder': None, - 'extracted_folder': os.path.join(settings_dir, "pcvr-extracted"), - 'repacked_folder': os.path.join(settings_dir, "output-both"), - 'pcvr_input_folder': os.path.join(settings_dir, "input-pcvr"), - 'quest_input_folder': os.path.join(settings_dir, "input-quest"), - 'backup_folder': None, - 'renderdoc_path': None - } - - try: - if os.path.exists(CONFIG_FILE): - with open(CONFIG_FILE, 'r', encoding='utf-8') as f: - loaded_config = json.load(f) - for key in default_config: - if key in loaded_config: - value = loaded_config[key] - if value is None: - continue - if isinstance(value, str) and (key.endswith('_folder') or key.endswith('_path')): - value = os.path.normpath(value) - if not os.path.exists(value) and key in ['repacked_folder', 'pcvr_input_folder', 'quest_input_folder']: - parent_path = os.path.join(os.path.dirname(value), os.path.basename(value)) - if os.path.exists(parent_path): - value = parent_path - default_config[key] = value - except Exception as e: - print(f"Config load error: {e}") - - return default_config - - @staticmethod - def save_config(**kwargs): - config = ConfigManager.load_config() - config.update(kwargs) - - try: - with open(CONFIG_FILE, 'w', encoding='utf-8') as f: - json.dump(config, f, indent=4) - except Exception as e: - print(f"Config save error: {e}") - -class TutorialPopup: - """Step-by-step guided tutorial with highlight boxes showing what to click in order.""" - HIGHLIGHT_BG = "#2d5a27" - HIGHLIGHT_BORDER = 4 - PANEL_BG = "#333333" - - @staticmethod - def _get_widget(app, attr): - try: - return getattr(app, attr, None) - except Exception: - return None - - @staticmethod - def show(parent, app=None): - if app is None: - app = parent - steps = [ - ("data_folder_btn", "Step 1: Data Folder", "Click the **Select** button next to Data Folder to choose your EchoVR game folder (the one containing 'manifests' and 'packages')."), - ("extracted_folder_btn", "Step 2: Extracted Folder", "Click **Select** next to Extracted Folder to choose where extracted textures will be saved (e.g. a new empty folder)."), - ("extract_btn", "Step 3: Extract Package", "Click **Extract Package**. Choose 'Textures Only' for a fast extract, or 'Full Package' if you need everything."), - ("file_list", "Step 4: Texture List", "After extraction, textures appear here. Click one or more (Ctrl/Shift for multi-select) to choose which texture to replace."), - ("replacement_canvas", "Step 5: Replacement Texture", "Click the **right canvas** (Replacement area) to open a file picker and choose your replacement image (PNG/DDS)."), - ("replace_btn", "Step 6: Replace Texture", "Click **Replace Texture** to apply your replacement image to all selected textures. Files go to input-pcvr or input-quest."), - ("repack_btn", "Step 7: Repack Modified", "After editing, click **Repack Modified** to build the output. Use the default 'output-both' folder when asked."), - ("push_quest_btn", "Step 8: Deploy", "Quest: use **Push Files To Quest** to deploy. PCVR: use **Update EchoVR** in the header to copy files into your game folder."), - ] - panel = tk.Toplevel(parent) - panel.title("Tutorial") - panel.configure(bg=TutorialPopup.PANEL_BG) - panel.resizable(False, False) - panel.geometry("340x165") - panel.transient(parent) - panel.attributes("-topmost", True) - try: - px = parent.winfo_rootx() + max(0, (parent.winfo_width() - 340) // 2) - py = parent.winfo_rooty() + parent.winfo_height() - 185 - if py < parent.winfo_rooty(): - py = parent.winfo_rooty() + 20 - panel.geometry(f"+{px}+{py}") - except Exception: - pass - current_step = [0] - saved_style = {} - - def _clear_highlight(): - w = saved_style.get("widget") - if w and w.winfo_exists(): - try: - for k, v in saved_style.get("config", {}).items(): - try: - w.config(**{k: v}) - except Exception: - pass - except Exception: - pass - saved_style.clear() - - def _apply_highlight(widget): - if not widget or not widget.winfo_exists(): - return - try: - orig = {} - for key in ("bg", "relief", "bd", "highlightbackground", "highlightthickness"): - try: - orig[key] = widget.cget(key) - except Exception: - pass - saved_style["widget"] = widget - saved_style["config"] = orig - for attr, value in [ - ("bg", TutorialPopup.HIGHLIGHT_BG), - ("relief", tk.SOLID), - ("bd", TutorialPopup.HIGHLIGHT_BORDER), - ("highlightbackground", "#4cd964"), - ("highlightthickness", TutorialPopup.HIGHLIGHT_BORDER), - ]: - try: - widget.config(**{attr: value}) - except Exception: - pass - except Exception: - saved_style.clear() - - def _go(step_index): - _clear_highlight() - current_step[0] = step_index - idx = current_step[0] - step_label.config(text=f"Step {idx + 1} of {len(steps)}") - title_label.config(text=steps[idx][1]) - desc_label.config(text=steps[idx][2]) - widget = TutorialPopup._get_widget(app, steps[idx][0]) - _apply_highlight(widget) - prev_btn.config(state=tk.NORMAL if idx > 0 else tk.DISABLED) - is_last = idx >= len(steps) - 1 - next_btn.config(state=tk.NORMAL, text="Close" if is_last else "Next →") - - def _next(): - if current_step[0] >= len(steps) - 1: - _skip() - else: - _go(current_step[0] + 1) - - def _prev(): - if current_step[0] > 0: - _go(current_step[0] - 1) - - def _skip(): - _clear_highlight() - panel.destroy() - - content = tk.Frame(panel, bg=TutorialPopup.PANEL_BG, padx=10, pady=8) - content.pack(fill=tk.BOTH, expand=True) - step_label = tk.Label(content, text=f"Step 1 of {len(steps)}", font=("Arial", 8), fg="#888888", bg=TutorialPopup.PANEL_BG) - step_label.pack(anchor="w") - title_label = tk.Label(content, text=steps[0][1], font=("Arial", 10, "bold"), fg="#4cd964", bg=TutorialPopup.PANEL_BG, anchor="w") - title_label.pack(fill=tk.X, pady=(2, 4)) - desc_label = tk.Label(content, text=steps[0][2], font=("Arial", 9), fg="#eeeeee", bg=TutorialPopup.PANEL_BG, justify=tk.LEFT, anchor="w", wraplength=310) - desc_label.pack(fill=tk.X) - btn_frame = tk.Frame(content, bg=TutorialPopup.PANEL_BG) - btn_frame.pack(fill=tk.X, pady=(8, 0)) - prev_btn = tk.Button(btn_frame, text="← Prev", command=_prev, state=tk.DISABLED, bg="#4a4a4a", fg="#ffffff", font=("Arial", 8), relief=tk.RAISED, bd=1, padx=6, pady=4) - prev_btn.pack(side=tk.LEFT, padx=(0, 6)) - next_btn = tk.Button(btn_frame, text="Next →", command=_next, bg="#4cd964", fg="#000000", font=("Arial", 8, "bold"), relief=tk.RAISED, bd=1, padx=6, pady=4) - next_btn.pack(side=tk.LEFT, padx=(0, 6)) - skip_btn = tk.Button(btn_frame, text="Skip", command=_skip, bg="#555555", fg="#ffffff", font=("Arial", 8), relief=tk.RAISED, bd=1, padx=6, pady=4) - skip_btn.pack(side=tk.RIGHT) - panel.protocol("WM_DELETE_WINDOW", _skip) - _go(0) - -class ProgressDialog: - """Simple progress dialog for long-running operations""" - def __init__(self, parent, title="Processing", message="Please wait...", show_bar=True): - self.dialog = tk.Toplevel(parent) - self.dialog.title(title) - height = 150 if show_bar else 100 - self.dialog.geometry(f"400x{height}") - self.dialog.configure(bg='#1a1a1a') - self.dialog.resizable(False, False) - self.dialog.transient(parent) - self.dialog.grab_set() - - # Center on parent - try: - x = parent.winfo_x() + (parent.winfo_width() - 400) // 2 - y = parent.winfo_y() + (parent.winfo_height() - 150) // 2 - self.dialog.geometry(f"+{x}+{y}") - except: - pass - - # Message label - tk.Label(self.dialog, text=message, font=("Arial", 11), fg="#ffffff", bg='#1a1a1a').pack(pady=(20, 10)) - - self.show_bar = show_bar - if show_bar: - # Progress bar - self.progress = ttk.Progressbar(self.dialog, length=300, mode='determinate', value=0) - self.progress.pack(pady=10, padx=50) - - # Status label - self.status_label = tk.Label(self.dialog, text="0%", font=("Arial", 9), fg="#4cd964", bg='#1a1a1a') - self.status_label.pack(pady=5) - else: - self.progress = None - self.status_label = None - - # Cancel button - self.cancel_requested = False - self.cancel_btn = tk.Button(self.dialog, text="Cancel", command=self.request_cancel, - bg='#ff3b30', fg='#ffffff', font=("Arial", 9, "bold"), - relief=tk.RAISED, bd=2, padx=20, pady=5) - self.cancel_btn.pack(pady=10) - - self.dialog.protocol("WM_DELETE_WINDOW", self.request_cancel) - - def update(self, current, total): - """Update progress (0-100)""" - if not self.dialog.winfo_exists(): - return False - if self.show_bar and self.progress and self.status_label: - percent = int((current / total) * 100) if total > 0 else 0 - self.progress['value'] = percent - self.status_label.config(text=f"{percent}%") - self.dialog.update_idletasks() - return not self.cancel_requested - - def request_cancel(self): - self.cancel_requested = True - self.cancel_btn.config(state=tk.DISABLED, text="Cancelling...") - self.dialog.update_idletasks() - - def close(self): - """Close the progress dialog""" - try: - self.dialog.destroy() - except: - pass - -class UpdateNotificationDialog: - """Dialog for notifying user about app updates""" - def __init__(self, parent, latest_version, download_url): - self.dialog = tk.Toplevel(parent) - self.dialog.title("📥 Update Available") - self.dialog.geometry("500x250") - self.dialog.configure(bg='#1a1a1a') - self.dialog.resizable(False, False) - self.dialog.transient(parent) - self.dialog.grab_set() - - # Center on parent - try: - x = parent.winfo_x() + (parent.winfo_width() - 500) // 2 - y = parent.winfo_y() + (parent.winfo_height() - 250) // 2 - self.dialog.geometry(f"+{x}+{y}") - except: - pass - - # Title - tk.Label(self.dialog, text="🎉 Update Available", font=("Arial", 14, "bold"), - fg="#4cd964", bg='#1a1a1a').pack(pady=(20, 10)) - - # Version info - info_text = f"A new version is available!\n\nCurrent: v{APP_VERSION}\nLatest: v{latest_version}\n\nClick 'Download' to visit the releases page." - tk.Label(self.dialog, text=info_text, font=("Arial", 10), fg="#cccccc", bg='#1a1a1a', justify=tk.LEFT).pack(pady=10, padx=20) - - # Buttons frame - btn_frame = tk.Frame(self.dialog, bg='#1a1a1a') - btn_frame.pack(pady=20) - - download_btn = tk.Button(btn_frame, text="📥 Download", command=self.download, - bg='#007aff', fg='#ffffff', font=("Arial", 10, "bold"), - relief=tk.RAISED, bd=2, padx=20, pady=8) - download_btn.pack(side=tk.LEFT, padx=5) - - remind_btn = tk.Button(btn_frame, text="Remind Later", command=self.dialog.destroy, - bg='#4a4a4a', fg='#ffffff', font=("Arial", 10), - relief=tk.RAISED, bd=2, padx=20, pady=8) - remind_btn.pack(side=tk.LEFT, padx=5) - - self.download_url = download_url - - def download(self): - """Open download page in default browser""" - try: - webbrowser.open(self.download_url) - self.dialog.destroy() - except: - messagebox.showerror("Error", "Could not open browser. Please visit:\n" + self.download_url) - -class UpdateEchoPopup: - def __init__(self, parent, app, config): - self.parent = parent - self.app = app - self.config = config - self.backup_location = None - - self.popup = tk.Toplevel(parent) - self.popup.title("⚠ Update EchoVR Game Files") - self.popup.geometry("850x500") - self.popup.configure(bg='#1a1a1a') - self.popup.resizable(False, False) - - self.popup.transient(parent) - self.popup.grab_set() - - self.popup.update_idletasks() - x = parent.winfo_x() + (parent.winfo_width() - self.popup.winfo_reqwidth()) // 2 - y = parent.winfo_y() + (parent.winfo_height() - self.popup.winfo_reqheight()) // 2 - self.popup.geometry(f"+{x}+{y}") - - self.setup_ui() - self.refresh_backup_status() - - def setup_ui(self): - title_frame = tk.Frame(self.popup, bg='#1a1a1a') - title_frame.pack(fill=tk.X, padx=20, pady=20) - - warning_icon = "⚠️" - title_label = tk.Label(title_frame, text=f"{warning_icon} WARNING: Update EchoVR", font=("Arial", 14, "bold"), fg="#ff6b6b", bg='#1a1a1a') - title_label.pack() - - warning_text = """This menu allows you to update your EchoVR installation. -Always create a backup before proceeding.""" - - warning_label = tk.Label(self.popup, text=warning_text, font=("Arial", 11), fg="#ffffff", bg='#1a1a1a', justify=tk.CENTER, wraplength=650) - warning_label.pack(padx=20, pady=10) - - data_folder = self.config.get('data_folder', 'Not selected') - data_frame = tk.Frame(self.popup, bg='#2a2a2a', relief=tk.RAISED, bd=1) - data_frame.pack(fill=tk.X, padx=20, pady=10) - - tk.Label(data_frame, text="Game Data Folder:", font=("Arial", 10, "bold"), fg="#4cd964", bg='#2a2a2a').pack(anchor="w", padx=10, pady=(10, 0)) - - folder_label = tk.Label(data_frame, text=data_folder, font=("Arial", 9), fg="#cccccc", bg='#2a2a2a', wraplength=620, justify=tk.LEFT) - folder_label.pack(fill=tk.X, padx=10, pady=(0, 10)) - - script_dir = os.path.dirname(os.path.abspath(__file__)) - output_folder = self.config.get('repacked_folder', os.path.join(script_dir, "output-both")) - output_frame = tk.Frame(self.popup, bg='#2a2a2a', relief=tk.RAISED, bd=1) - output_frame.pack(fill=tk.X, padx=20, pady=10) - - tk.Label(output_frame, text="Modified Files Source:", font=("Arial", 10, "bold"), fg="#4cd964", bg='#2a2a2a').pack(anchor="w", padx=10, pady=(10, 0)) - - output_label = tk.Label(output_frame, text=output_folder, font=("Arial", 9), fg="#cccccc", bg='#2a2a2a', wraplength=620, justify=tk.LEFT) - output_label.pack(fill=tk.X, padx=10, pady=(0, 10)) - - backup_frame = tk.Frame(self.popup, bg='#1a1a1a') - backup_frame.pack(fill=tk.X, padx=20, pady=10) - - btn_frame = tk.Frame(backup_frame, bg='#1a1a1a') - btn_frame.pack(pady=10) - - self.create_backup_btn = tk.Button(btn_frame, text="📁 Create Backup", command=self.create_backup, bg='#4a4a4a', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=10) - self.create_backup_btn.pack(side=tk.LEFT, padx=5) - - self.restore_backup_btn = tk.Button(btn_frame, text="🔄 Restore Backup", command=self.restore_backup, bg='#4a4a4a', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=10, state=tk.DISABLED) - self.restore_backup_btn.pack(side=tk.LEFT, padx=5) - - self.update_pkg_btn = tk.Button(btn_frame, text="📦 Update Packages", command=self.start_update_thread, bg='#007aff', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=10) - self.update_pkg_btn.pack(side=tk.LEFT, padx=5) - - self.backup_status = tk.Label(backup_frame, text="Checking backup status...", font=("Arial", 9), fg="#ffcc00", bg='#1a1a1a') - self.backup_status.pack() - - close_frame = tk.Frame(self.popup, bg='#1a1a1a') - close_frame.pack(fill=tk.X, padx=20, pady=20) - - self.close_btn = tk.Button(close_frame, text="Close", command=self.popup.destroy, bg='#4a4a4a', fg='#ffffff', font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=30, pady=10) - self.close_btn.pack() - - def log_info(self, message): - if hasattr(self.app, 'log_info'): - self.app.log_info(message) - - def check_backup_exists(self): - backup_folder = self.config.get('backup_folder') - if backup_folder: - backup_folder = os.path.normpath(backup_folder) - if os.path.exists(backup_folder): - self.backup_location = backup_folder - return True - return False - - def refresh_backup_status(self): - if self.check_backup_exists(): - self.backup_status.config(text=f"✓ Backup found: {os.path.basename(self.backup_location)}", fg="#4cd964") - self.restore_backup_btn.config(state=tk.NORMAL) - else: - self.backup_status.config(text="No backup found - create one before updating", fg="#ffcc00") - self.restore_backup_btn.config(state=tk.DISABLED) - - def create_backup(self): - if not self.config.get('data_folder'): - messagebox.showerror("Error", "Please select game data folder first") - return - - backup_path = filedialog.askdirectory(title="Select Backup Location", initialdir=os.path.dirname(self.config['data_folder'])) - - if not backup_path: - return - - try: - timestamp = time.strftime("%Y%m%d_%H%M%S") - backup_folder = os.path.join(backup_path, f"EchoVR_Backup_{timestamp}") - - self.backup_status.config(text="Creating backup...", fg="#ffcc00") - self.popup.update_idletasks() - - # Run in thread to prevent freeze - def backup_task(): - try: - shutil.copytree(self.config['data_folder'], backup_folder) - self.popup.after(0, lambda: self.on_backup_complete(True, backup_folder)) - except Exception as e: - self.popup.after(0, lambda: self.on_backup_complete(False, str(e))) - - threading.Thread(target=backup_task, daemon=True).start() - - except Exception as e: - messagebox.showerror("Error", f"Failed to start backup:\n{str(e)}") - - def on_backup_complete(self, success, result): - if success: - ConfigManager.save_config(backup_folder=result) - self.config['backup_folder'] = result - self.backup_location = result - self.refresh_backup_status() - self.log_info(f"✓ Backup created: {result}") - messagebox.showinfo("Success", f"Backup created successfully at:\n{result}") - else: - messagebox.showerror("Error", f"Failed to create backup:\n{result}") - self.backup_status.config(text="Backup failed", fg="#ff3b30") - - def restore_backup(self): - if not self.backup_location or not os.path.exists(self.backup_location): - messagebox.showerror("Error", "Backup not found") - return - - confirm = messagebox.askyesno("Confirm Restore", f"Restore game files from backup?\n\nBackup: {self.backup_location}\n\nThis will OVERWRITE your current game files.") - - if not confirm: - return - - self.backup_status.config(text="Restoring backup... (Do not close)", fg="#ffcc00") - self.restore_backup_btn.config(state=tk.DISABLED) - self.popup.update_idletasks() - - def restore_task(): - try: - if os.path.exists(self.config['data_folder']): - shutil.rmtree(self.config['data_folder']) - shutil.copytree(self.backup_location, self.config['data_folder']) - self.popup.after(0, lambda: self.on_restore_complete(True, self.backup_location)) - except Exception as e: - self.popup.after(0, lambda: self.on_restore_complete(False, str(e))) - - threading.Thread(target=restore_task, daemon=True).start() - - def on_restore_complete(self, success, result): - if success: - self.log_info(f"✓ Game files restored from backup: {result}") - messagebox.showinfo("Success", "Game files restored from backup!") - self.popup.destroy() - else: - messagebox.showerror("Error", f"Failed to restore backup:\n{result}") - self.backup_status.config(text="Restore failed", fg="#ff3b30") - self.restore_backup_btn.config(state=tk.NORMAL) - - def start_update_thread(self): - # Validation checks - script_dir = os.path.dirname(os.path.abspath(__file__)) - output_folder = self.config.get('repacked_folder') - if not output_folder: - output_folder = os.path.join(script_dir, "output-both") - - data_folder = self.config.get('data_folder') - - if not os.path.exists(output_folder): - messagebox.showerror("Error", f"Output folder not found:\n{output_folder}\n\nPlease repack your files first.") - return - - if not data_folder or not os.path.exists(data_folder): - messagebox.showerror("Error", "Game data folder not found.\nPlease select your EchoVR data folder first.") - return - - packages_path = os.path.join(output_folder, "packages") - manifests_path = os.path.join(output_folder, "manifests") - - if not os.path.exists(packages_path) or not os.path.exists(manifests_path): - messagebox.showerror("Error", f"Required folders not found in:\n{output_folder}\n\nPlease repack your files first.") - return - - if not self.backup_location: - warning_result = messagebox.askyesno("⚠ WARNING - No Backup Found", f"No backup found! This operation will OVERWRITE your game files.\n\nContinue WITHOUT a backup?") - if not warning_result: - return - - confirm = messagebox.askyesno("Update Game Files", f"This will UPDATE your EchoVR installation.\n\nSource: {output_folder}\nTarget: {data_folder}\n\nOperation:\n1. Move files from output-both to game folder\n2. Wipe output-both folder\n\nContinue?") - - if not confirm: - return - - # Disable buttons - self.update_pkg_btn.config(state=tk.DISABLED, text="Updating...") - self.close_btn.config(state=tk.DISABLED) - - # Show progress dialog - progress = ProgressDialog(self.popup, "Updating Game Files", "Moving files to game folder...") - - # Start Thread - threading.Thread(target=self.update_packages_thread, args=(output_folder, data_folder, progress), daemon=True).start() - - def update_packages_thread(self, output_folder, data_folder, progress): - try: - files_moved = 0 - total_files = 0 - - # Count total files first - for folder in ['packages', 'manifests']: - src_path = os.path.join(output_folder, folder) - if os.path.exists(src_path): - total_files += len([f for f in os.listdir(src_path) if os.path.isfile(os.path.join(src_path, f))]) - - if total_files == 0: - total_files = 1 # Avoid division by zero - - # Move files - for folder in ['packages', 'manifests']: - src_path = os.path.join(output_folder, folder) - dst_path = os.path.join(data_folder, folder) - - if os.path.exists(src_path): - os.makedirs(dst_path, exist_ok=True) - - for filename in os.listdir(src_path): - if not progress.update(files_moved, total_files): - self.popup.after(0, lambda: self.on_update_complete(False, "Operation cancelled")) - return - - src_file = os.path.join(src_path, filename) - dst_file = os.path.join(dst_path, filename) - - if os.path.isfile(src_file): - shutil.move(src_file, dst_file) - files_moved += 1 - - progress.update(total_files, total_files) - - try: - for folder in ['packages', 'manifests']: - folder_path = os.path.join(output_folder, folder) - if os.path.exists(folder_path): - shutil.rmtree(folder_path) - except Exception as wipe_error: - self.popup.after(0, lambda: self.log_info(f"⚠ Could not completely wipe output-both: {wipe_error}")) - - self.popup.after(0, lambda: self.on_update_complete(True, files_moved, progress)) - - except Exception as e: - self.popup.after(0, lambda: self.on_update_complete(False, str(e), progress)) - - def on_update_complete(self, success, result, progress=None): - if progress: - progress.close() - - self.update_pkg_btn.config(state=tk.NORMAL, text="📦 Update Packages") - self.close_btn.config(state=tk.NORMAL) - - if success: - self.log_info(f"✓ Moved {result} files to game folder") - self.log_info(f"✓ Wiped output-both folder") - messagebox.showinfo("Success", f"Successfully updated game files!\n\nFiles moved: {result}") - self.popup.destroy() - else: - messagebox.showerror("Error", f"Failed to update packages:\n{result}") - self.backup_status.config(text="Update failed", fg="#ff3b30") - -class ADBPlatformTools: - @staticmethod - def get_safe_install_directory(): - script_dir = os.path.dirname(os.path.abspath(__file__)) - install_dir = os.path.join(script_dir, "platform-tools") - return install_dir - - @staticmethod - def install_platform_tools(): - import platform - system = platform.system().lower() - - download_urls = { - 'windows': 'https://dl.google.com/android/repository/platform-tools-latest-windows.zip', - 'linux': 'https://dl.google.com/android/repository/platform-tools-latest-linux.zip', - 'darwin': 'https://dl.google.com/android/repository/platform-tools-latest-darwin.zip' - } - - url = download_urls.get(system) - if not url: - return False, f"Unsupported platform: {system}" - - script_dir = os.path.dirname(os.path.abspath(__file__)) - install_base = os.path.join(script_dir, "platform-tools") - download_path = os.path.join(script_dir, "platform-tools-download.zip") - - try: - os.makedirs(install_base, exist_ok=True) - - urllib.request.urlretrieve(url, download_path) - - with zipfile.ZipFile(download_path, 'r') as zip_ref: - zip_ref.extractall(install_base) - - try: - os.remove(download_path) - except: - pass - - adb_path = os.path.join(install_base, "platform-tools", "adb.exe" if system == 'windows' else "adb") - if not os.path.exists(adb_path): - adb_path = os.path.join(install_base, "adb.exe" if system == 'windows' else "adb") - - if os.path.exists(adb_path): - if system != 'windows': - try: - os.chmod(adb_path, 0o755) - except: - pass - - adb_dir = os.path.dirname(adb_path) - os.environ['PATH'] = adb_dir + os.pathsep + os.environ['PATH'] - - return True, f"Platform Tools installed to: {adb_dir}" - else: - return False, "ADB executable not found after extraction" - - except Exception as e: - return False, f"Installation failed: {str(e)}" - -class ADBManager: - @staticmethod - def find_adb(): - safe_dir = ADBPlatformTools.get_safe_install_directory() - local_paths = [ - os.path.join(safe_dir, "platform-tools", "adb.exe"), - os.path.join(safe_dir, "platform-tools", "adb"), - os.path.join(safe_dir, "adb.exe"), - os.path.join(safe_dir, "adb") - ] - - script_dir = os.path.dirname(os.path.abspath(__file__)) - local_paths.extend([ - os.path.join(script_dir, "platform-tools", "adb.exe"), - os.path.join(script_dir, "platform-tools", "adb"), - os.path.join(script_dir, "adb.exe"), - os.path.join(script_dir, "adb") - ]) - - for path in local_paths: - if os.path.exists(path): - return path - - try: - result = run_hidden_command(['adb', 'version'], timeout=10) - if result.returncode == 0: - return 'adb' - except: - pass - - return None - - @staticmethod - def check_adb(): - adb_path = ADBManager.find_adb() - if not adb_path: - return False, "ADB not found", None - - try: - try: - run_hidden_command([adb_path, 'kill-server'], timeout=5) - except: - pass - - result = run_hidden_command([adb_path, 'devices'], timeout=10) - if result.returncode == 0: - lines = [line for line in result.stdout.strip().split('\n') if '\tdevice' in line] - if lines: - devices = [] - for line in lines: - device_id = line.split('\t')[0] - info_result = run_hidden_command([adb_path, '-s', device_id, 'shell', 'getprop', 'ro.product.model'], timeout=10) - model = info_result.stdout.strip() if info_result.returncode == 0 else "Unknown" - devices.append(f"{device_id} ({model})") - - return True, f"Connected: {', '.join(devices)}", adb_path - else: - return True, "No devices connected", adb_path - return False, "ADB command failed", adb_path - except subprocess.TimeoutExpired: - return False, "ADB timeout", adb_path - except Exception as e: - return False, f"ADB error: {str(e)}", adb_path - - @staticmethod - def push_to_quest(local_folder, quest_path): - adb_path = ADBManager.find_adb() - if not adb_path: - return False, "ADB not available" - - try: - # Optimize: Attempt to push the directory contents at once first - # "adb push local_folder/. remote_folder/" - # This is vastly faster than iterating files. - - # Ensure remote dir exists - run_hidden_command([adb_path, 'shell', 'mkdir', '-p', quest_path], timeout=30) - - # Use trailing /. to push contents - cmd = [adb_path, 'push', local_folder + "/.", quest_path + "/"] - result = run_hidden_command(cmd, timeout=600) # 10 minute timeout - - if result.returncode == 0: - return True, "Successfully pushed all items (Bulk Mode)" - - # Fallback to file-by-file if bulk fails (rare but safer) - success_count = 0 - total_count = 0 - errors = [] - - for item in os.listdir(local_folder): - item_path = os.path.join(local_folder, item) - if os.path.exists(item_path): - total_count += 1 - result = run_hidden_command([adb_path, 'push', item_path, quest_path], timeout=60) - - if result.returncode == 0: - success_count += 1 - else: - error_msg = result.stderr.strip() if result.stderr else "Unknown error" - errors.append(f"{item}: {error_msg}") - - if success_count == total_count: - return True, f"Successfully pushed all {success_count} items" - elif success_count > 0: - return True, f"Partially successful: {success_count}/{total_count}. Errors: {len(errors)}" - else: - return False, f"Failed to push items. Errors: {len(errors)}" - - except subprocess.TimeoutExpired: - return False, "Push operation timed out" - except Exception as push_error: - return False, f"Push error: {str(push_error)}" - - @staticmethod - def install_adb_tools(): - return ADBPlatformTools.install_platform_tools() - -class ASTCTools: - @staticmethod - def load_texture_mapping(mapping_file): - if not os.path.exists(mapping_file): - return {} - try: - with open(mapping_file, 'r', encoding='utf-8') as f: - mapping = json.load(f) - return mapping - except Exception as e: - print(f"Mapping load error: {e}") - return {} - - @staticmethod - def find_texture_info(texture_name, mapping): - if texture_name in mapping: - return mapping[texture_name] - suffixes = ['_d', '_n', '_s', '_e', '_a', '_r', '_m', '_h'] - for suffix in suffixes: - if texture_name.endswith(suffix): - base_name = texture_name[:-len(suffix)] - if base_name in mapping: - return mapping[base_name] - return None - - @staticmethod - def wrap_raw_astc(raw_path, wrapped_path, width, height, block_width=4, block_height=4): - try: - magic = struct.pack(" 1000: - if cache_key: - DECODE_CACHE[cache_key] = { - 'width': width, 'height': height, - 'block_w': block_w, 'block_h': block_h, - 'original_size': raw_file.stat().st_size - } - return True - else: - output_file.unlink() - return False - else: - if output_file.exists(): - output_file.unlink() - return False - except Exception: - if output_file.exists(): - output_file.unlink() - return False - finally: - if temp_astc and temp_astc.exists(): - try: temp_astc.unlink() - except: pass - - @staticmethod - def get_common_block_sizes(): - return [(4, 4), (8, 8), (6, 6), (5, 5), (10, 10), (12, 12), (5, 4), (6, 5), (8, 5), (8, 6), (10, 5), (10, 6), (10, 8)] - - @staticmethod - def decode_with_mapping(astcenc_path, texture_file, output_path, mapping): - texture_name = texture_file.stem - texture_info = ASTCTools.find_texture_info(texture_name, mapping) - if not texture_info: return False - - pcvr_width = texture_info['width'] - pcvr_height = texture_info['height'] - - for block_w, block_h in ASTCTools.get_common_block_sizes(): - output_file = output_path / f"{texture_file.stem}.png" - if ASTCTools.decode_with_config(astcenc_path, texture_file, output_file, pcvr_width, pcvr_height, block_w, block_h, texture_name): - return True - return False - - @staticmethod - def brute_force_decode(astcenc_path, texture_file, output_path): - configurations = [ - (2048, 1024, 8, 8, "2Kx1K_8x8"), (2048, 1024, 6, 6, "2Kx1K_6x6"), (2048, 1024, 4, 4, "2Kx1K_4x4"), - (1024, 512, 8, 8, "1Kx512_8x8"), (1024, 512, 6, 6, "1Kx512_6x6"), (1024, 512, 4, 4, "1Kx512_4x4"), - (2048, 2048, 8, 8, "2K_square_8x8"), (1024, 1024, 8, 8, "1K_square_8x8"), - ] - file_size = texture_file.stat().st_size - - for width, height, block_w, block_h, desc in configurations: - expected_size = ASTCTools.calculate_astc_size(width, height, block_w, block_h) - if abs(expected_size - file_size) > 100: - continue - output_file = output_path / f"{texture_file.stem}_BF_{desc}.png" - if ASTCTools.decode_with_config(astcenc_path, texture_file, output_file, width, height, block_w, block_h, texture_file.stem): - return True - return False - - @staticmethod - def calculate_astc_size(width, height, block_w, block_h): - blocks_x = (width + block_w - 1) // block_w - blocks_y = (height + block_h - 1) // block_h - return blocks_x * blocks_y * 16 - - @staticmethod - def pad_to_size(data, target_size): - current_size = len(data) - if current_size < target_size: - padding = b'\x00' * (target_size - current_size) - return data + padding - elif current_size > target_size: - return data[:target_size] - else: - return data - - @staticmethod - def encode_texture(astcenc_path, input_png, output_file, width, height, block_w, block_h, quality="medium", target_size=None): - temp_astc = None - try: - with tempfile.NamedTemporaryFile(suffix='.astc', delete=False) as f: - temp_astc = Path(f.name) - - result = run_hidden_command([ - str(astcenc_path), "-cl", str(input_png), str(temp_astc), f"{block_w}x{block_h}", f"-{quality}", "-silent" - ], timeout=30) - - if result.returncode != 0: return False - - with open(temp_astc, 'rb') as f: - astc_data = f.read() - - if len(astc_data) > 16 and astc_data[:4] == b'\x13\xAB\xA1\x5C': - raw_data = astc_data[16:] - else: - raw_data = astc_data - - if target_size: - expected_size = ASTCTools.calculate_astc_size(width, height, block_w, block_h) - if len(raw_data) != target_size: - raw_data = ASTCTools.pad_to_size(raw_data, target_size) - - output_file.write_bytes(raw_data) - return True - except subprocess.TimeoutExpired: - return False - except Exception: - return False - finally: - if temp_astc and temp_astc.exists(): - temp_astc.unlink(missing_ok=True) - - @staticmethod - def encode_with_cache(astcenc_path, input_png, output_file, texture_name, quality="medium"): - if texture_name not in DECODE_CACHE: return False - config = DECODE_CACHE[texture_name] - return ASTCTools.encode_texture(astcenc_path, input_png, output_file, config['width'], config['height'], config['block_w'], config['block_h'], quality, config['original_size']) - - @staticmethod - def save_decode_cache(cache_file): - try: - with open(cache_file, 'w', encoding='utf-8') as f: - json.dump(DECODE_CACHE, f, indent=2) - except: pass - - @staticmethod - def load_decode_cache(cache_file): - global DECODE_CACHE - if os.path.exists(cache_file): - try: - with open(cache_file, 'r', encoding='utf-8') as f: - DECODE_CACHE = json.load(f) - except: pass - -class EVRToolsManager: - def __init__(self): - self.tool_path = self.find_tool() - - def find_tool(self): - tool_names = ["evrFileTools.exe", "echoModifyFiles.exe", "echoFileTools.exe"] - for name in tool_names: - path = get_tool_path(name) - if os.path.exists(path): - return path - return None - - def extract_package(self, data_dir, package_name, output_dir, export_type=""): - if not self.tool_path: - return False, "evrFileTools.exe not found" - - try: - cmd = [ - self.tool_path, "-mode", "extract", "-package", package_name, - "-data", data_dir, "-output", output_dir, - "-force" - ] - if export_type: - cmd.extend(["--export", export_type]) - cmd.extend(["-export", export_type]) - - result = run_hidden_command(cmd, cwd=os.path.dirname(self.tool_path), timeout=2000) - - if result.returncode == 0: - return True, f"Extracted to {output_dir}" - else: - error_msg = result.stderr if result.stderr else result.stdout - return False, f"Extraction failed: {error_msg}" - except subprocess.TimeoutExpired: - return False, "Extraction timeout" - except Exception as e: - return False, f"Extraction error: {str(e)}" - - def repack_package(self, output_dir, package_name, data_dir, input_dir): - if not self.tool_path: - return False, "evrFileTools.exe not found" - - try: - cmd = [ - self.tool_path, "-mode", "build", - "-package", package_name, - "-data", data_dir, - "-input", input_dir, "-output", output_dir, - "-force" - ] - - result = run_hidden_command(cmd, cwd=os.path.dirname(self.tool_path), timeout=2000) - - if result.returncode == 0: - return True, f"Repacked to {output_dir}" - else: - error_msg = result.stderr if result.stderr else result.stdout - return False, f"Repacking failed: {error_msg}" - except subprocess.TimeoutExpired: - return False, "Repacking timeout" - except Exception as e: - return False, f"Repacking error: {str(e)}" - -class DDSHandler: - DXGI_FORMAT = { - 0: "DXGI_FORMAT_UNKNOWN", 26: "DXGI_FORMAT_R11G11B10_FLOAT", 61: "DXGI_FORMAT_R8_UNORM", - 71: "DXGI_FORMAT_BC1_UNORM", 77: "DXGI_FORMAT_BC3_UNORM", - 80: "DXGI_FORMAT_BC4_UNORM", 83: "DXGI_FORMAT_BC5_UNORM", - 91: "DXGI_FORMAT_B8G8R8A8_UNORM_SRGB", - 87: "DXGI_FORMAT_B8G8R8A8_TYPELESS", - } - - @staticmethod - def get_dds_info(file_path): - try: - with open(file_path, 'rb') as f: - signature = f.read(4) - if signature != b'DDS ': return None - header = f.read(124) - if len(header) < 124: return None - - height = struct.unpack('= 20: - format_code = struct.unpack(' ' - cmd = [texconv_path, "encode", temp_png, out_dds] - result = run_hidden_command(cmd, timeout=60) - - if result.returncode != 0: - return None, 0 # Conversion failed - - if not os.path.isfile(out_dds): - return None, 0 # Output file not created - - size = os.path.getsize(out_dds) - base = os.path.splitext(os.path.basename(source_path))[0] - final_path = os.path.join(tempfile.gettempdir(), f"pcvr_replace_{os.getpid()}_{base}.dds") - shutil.copy2(out_dds, final_path) - return final_path, size - except Exception: - return None, 0 - - @staticmethod - def hex_edit_file_size(file_path, new_size): - try: - with open(file_path, 'r+b') as f: - data = bytearray(f.read()) - if len(data) >= 248: - file_size_bytes = struct.pack(' {'width': int, 'height': int, 'pixels': int, 'size': int} - self.sort_mode = "name" # name, width, height, pixels - - self.setup_ui() - self.load_page(0) - - def setup_ui(self): - top_frame = tk.Frame(self.window, bg='#2a2a2a', height=60) - top_frame.pack(fill=tk.X) - - # Info and sort controls - info_label = tk.Label(top_frame, text="Click an image to select it", fg='#cccccc', bg='#2a2a2a', font=("Arial", 9)) - info_label.pack(side=tk.LEFT, padx=10, pady=5) - - sort_label = tk.Label(top_frame, text="Sort by:", fg='#ffffff', bg='#2a2a2a', font=("Arial", 9)) - sort_label.pack(side=tk.RIGHT, padx=(10, 5), pady=5) - - self.sort_var = tk.StringVar(value="name") - self.sort_dropdown = ttk.Combobox(top_frame, textvariable=self.sort_var, - values=["Name", "Pixels (Large to Small)", "Pixels (Small to Large)"], - state="readonly", width=20, font=("Arial", 9)) - self.sort_dropdown.pack(side=tk.RIGHT, padx=(0, 10), pady=5) - self.sort_dropdown.bind('<>', self.on_sort_change) - - # Navigation Frame (Bottom) - nav_frame = tk.Frame(self.window, bg='#2a2a2a', height=50) - nav_frame.pack(side=tk.BOTTOM, fill=tk.X) - - self.prev_btn = tk.Button(nav_frame, text="<< Previous", command=self.prev_page, - bg='#4a4a4a', fg='#ffffff', font=("Arial", 9, "bold"), relief=tk.RAISED, bd=1, state=tk.DISABLED) - self.prev_btn.pack(side=tk.LEFT, padx=20, pady=10) - - self.page_label = tk.Label(nav_frame, text=f"Page 1 / {self.total_pages}", font=("Arial", 10, "bold"), fg='#ffffff', bg='#2a2a2a') - self.page_label.pack(side=tk.LEFT, expand=True) - - self.next_btn = tk.Button(nav_frame, text="Next >>", command=self.next_page, - bg='#4a4a4a', fg='#ffffff', font=("Arial", 9, "bold"), relief=tk.RAISED, bd=1) - self.next_btn.pack(side=tk.RIGHT, padx=20, pady=10) - - self.canvas = tk.Canvas(self.window, bg='#1a1a1a', highlightthickness=0) - self.scrollbar = ttk.Scrollbar(self.window, orient="vertical", command=self.canvas.yview) - self.scroll_frame = tk.Frame(self.canvas, bg='#1a1a1a') - - self.scroll_frame.bind("", lambda e: self.canvas.configure(scrollregion=self.canvas.bbox("all"))) - self.canvas.create_window((0, 0), window=self.scroll_frame, anchor="nw") - self.canvas.configure(yscrollcommand=self.scrollbar.set) - - self.canvas.pack(side="left", fill="both", expand=True) - self.scrollbar.pack(side="right", fill="y") - self.canvas.bind_all("", self._on_mousewheel) - - def _on_mousewheel(self, event): - try: self.canvas.yview_scroll(int(-1*(event.delta/120)), "units") - except: pass - - def on_click(self, filename): - self.app.select_texture_by_name(filename) - self.parent.lift() - - def prev_page(self): - if self.current_page > 0: - self.load_page(self.current_page - 1) - - def next_page(self): - if self.current_page < self.total_pages - 1: - self.load_page(self.current_page + 1) - - def load_page(self, page_num): - self.current_page = page_num - self.loading_generation += 1 - current_gen = self.loading_generation - - # Update controls - self.page_label.config(text=f"Page {page_num + 1} / {self.total_pages}") - self.prev_btn.config(state=tk.NORMAL if page_num > 0 else tk.DISABLED) - self.next_btn.config(state=tk.NORMAL if page_num < self.total_pages - 1 else tk.DISABLED) - - # Clear grid - for widget in self.scroll_frame.winfo_children(): - widget.destroy() - self.loaded_images.clear() - self.canvas.yview_moveto(0) - - start_idx = page_num * self.TEXTURES_PER_PAGE - end_idx = min(start_idx + self.TEXTURES_PER_PAGE, len(self.image_files)) - - # Show loading indicator - loading_lbl = tk.Label(self.scroll_frame, text="Loading...", fg="white", bg="#1a1a1a") - loading_lbl.grid(row=0, column=0, columnspan=self.GRID_COLS, pady=20) - - threading.Thread(target=self._load_page_worker, args=(start_idx, end_idx, current_gen, loading_lbl), daemon=True).start() - - def _load_page_worker(self, start_idx, end_idx, generation, loading_lbl): - for idx in range(start_idx, end_idx): - if not self.window.winfo_exists() or self.loading_generation != generation: - return - - filename = self.image_files[idx] - file_path = os.path.join(self.folder_path, filename) - - try: - img = TextureLoader.load_texture(file_path, self.is_quest) - if img: - img.thumbnail(self.THUMB_SIZE) - - # Calculate row/col relative to this page - rel_idx = idx - start_idx - row = rel_idx // self.GRID_COLS - col = rel_idx % self.GRID_COLS - - self.window.after(0, lambda i=img, f=filename, r=row, c=col: self.add_thumbnail(i, f, r, c)) - except Exception: - pass - - self.window.after(0, lambda: loading_lbl.destroy()) - - def add_thumbnail(self, img, filename, row, col): - """Add a thumbnail to the grid""" - if not self.window.winfo_exists(): - return - - try: - # Store texture resolution info - self.texture_info[filename] = { - 'width': img.width, - 'height': img.height, - 'pixels': img.width * img.height, - 'size': os.path.getsize(os.path.join(self.folder_path, filename)) - } - - photo = ImageTk.PhotoImage(img) - self.loaded_images[filename] = photo - - frame = tk.Frame(self.scroll_frame, bg='#333333', bd=1, relief=tk.SOLID) - frame.grid(row=row, column=col, padx=4, pady=4, sticky='nsew') - - btn = tk.Button(frame, image=photo, command=lambda f=filename: self.on_click(f), bg='#1a1a1a', borderwidth=0) - btn.image = photo - btn.pack() - - label = tk.Label(frame, text=filename[:12]+"...", font=("Arial", 8), fg='#aaaaaa', bg='#333333') - label.pack(fill=tk.X) - except Exception: - pass - - def on_sort_change(self, event=None): - """Handle sort mode change""" - sort_selection = self.sort_var.get() - - # Sort image_files based on selected mode - if sort_selection == "Name": - self.image_files.sort() - elif sort_selection == "Pixels (Large to Small)": - self.image_files.sort(key=lambda f: self.texture_info.get(f, {}).get('pixels', 0), reverse=True) - elif sort_selection == "Pixels (Small to Large)": - self.image_files.sort(key=lambda f: self.texture_info.get(f, {}).get('pixels', 0), reverse=False) - - # Reload page 0 - self.load_page(0) - -class EchoVRTextureViewer: - def __init__(self, root): - self.root = root - self.root.title("EchoVR Texture Editor - PCVR & Quest Support") - self.root.geometry("1200x800") - self.root.minsize(800, 600) - - self.colors = { - 'bg_dark': '#0a0a0a', 'bg_medium': '#1a1a1a', 'bg_light': '#2a2a2a', - 'accent_green': '#4cd964', 'accent_blue': '#007aff', 'accent_orange': '#ff9500', - 'accent_red': '#ff3b30', 'text_light': '#ffffff', 'text_muted': '#cccccc', - 'success': '#4cd964', 'warning': '#ffcc00', 'error': '#ff3b30' - } - - self.root.configure(bg=self.colors['bg_dark']) - self.config = ConfigManager.load_config() - self.output_folder = self.config.get('output_folder') - self.pcvr_input_folder = self.config.get('pcvr_input_folder') - self.quest_input_folder = self.config.get('quest_input_folder') - self.data_folder = self.config.get('data_folder') - self.extracted_folder = self.config.get('extracted_folder') - self.repacked_folder = self.config.get('repacked_folder') - - self.package_name = None - self.evr_tools = EVRToolsManager() - self.textures_folder = None - self.corresponding_folder = None - self.current_texture = None - self.replacement_texture = None - self.original_info = None - self.replacement_info = None - self.replacement_size = None - self.is_quest_textures = False - self.is_pcvr_textures = False - self.texture_cache = {} - self.all_textures = [] - self.filtered_textures = [] - self.is_downloading = False - - self.ensure_settings_folders() - self.setup_ui() - self.auto_detect_folders() - self.check_external_tools() - - if self.output_folder and os.path.exists(self.output_folder): - self.set_output_folder(self.output_folder) - if self.data_folder and os.path.exists(self.data_folder): - self.set_data_folder(self.data_folder) - if self.extracted_folder and os.path.exists(self.extracted_folder): - self.set_extracted_folder(self.extracted_folder) - - # Save defaults to config if they were missing - ConfigManager.save_config(**self.config) - - def ensure_settings_folders(self): - base_dir = get_base_dir() - settings_dir = os.path.join(base_dir, SETTINGS_DIR_NAME) - - folders = [ - "input-pcvr", "input-quest", - "pcvr-extracted", "quest-extracted", - "output-both", "texture_cache" - ] - - for folder in folders: - path = os.path.join(settings_dir, folder) - if not os.path.exists(path): - try: - os.makedirs(path) - except: pass - - def check_external_tools(self): - """Check if external tools are runnable and warn about missing DLLs""" - tools = [ - ("texconv.exe", "Texture Converter"), - ("evrtools.exe", "EVR Tools") - ] - - for tool_name, desc in tools: - path = get_tool_path(tool_name) - if os.path.exists(path): - try: - # Run with no args. texconv exits 1 normally. - # If DLLs are missing, Windows returns 0xC0000135 (-1073741515) - cmd = [path] - if sys.platform == 'win32': - result = subprocess.run(cmd, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, - creationflags=subprocess.CREATE_NO_WINDOW) - - # Check for STATUS_DLL_NOT_FOUND - if result.returncode == 3221225781 or result.returncode == -1073741515: - self.log_info(f"❌ {desc} ({tool_name}) is missing DLLs!") - self.log_info(f" Please copy libsquish-0.dll, libstdc++-6.dll,") - self.log_info(f" and libgcc_s_seh-1.dll to the same folder as {tool_name}") - except Exception: - pass - - def auto_detect_folders(self): - base_dir = get_base_dir() - settings_dir = os.path.join(base_dir, SETTINGS_DIR_NAME) - - pcvr_folder = os.path.join(settings_dir, "input-pcvr") - if os.path.exists(pcvr_folder): - self.pcvr_input_folder = pcvr_folder - self.log_info(f"Auto-detected PCVR input folder: {pcvr_folder}") - - quest_folder = os.path.join(settings_dir, "input-quest") - if os.path.exists(quest_folder): - self.quest_input_folder = quest_folder - self.log_info(f"Auto-detected Quest input folder: {quest_folder}") - - output_both = os.path.join(settings_dir, "output-both") - if os.path.exists(output_both): - self.repacked_folder = output_both - self.log_info(f"Auto-detected output-both folder: {output_both}") - - def setup_ui(self): - self.root.columnconfigure(0, weight=1) - self.root.rowconfigure(0, weight=1) - - main_frame = tk.Frame(self.root, bg=self.colors['bg_dark']) - main_frame.grid(row=0, column=0, sticky='nsew', padx=10, pady=10) - main_frame.columnconfigure(1, weight=1) - main_frame.rowconfigure(4, weight=1) - - header_frame = tk.Frame(main_frame, bg=self.colors['bg_dark']) - header_frame.grid(row=0, column=0, columnspan=3, sticky='ew', pady=(0, 10)) - - self.tutorial_btn = tk.Button(header_frame, text="📚 Tutorial", command=lambda: TutorialPopup.show(self.root, self), bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=8) - self.tutorial_btn.pack(side=tk.LEFT, padx=(0, 5)) - - self.check_updates_btn = tk.Button(header_frame, text="🔄 Check Updates", command=self.check_app_updates, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=12, pady=8) - self.check_updates_btn.pack(side=tk.LEFT, padx=(0, 10)) - - title_label = tk.Label(header_frame, text="ECHO VR TEXTURE EDITOR", font=("Arial", 16, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark']) - title_label.pack(side=tk.LEFT, expand=True) - - self.update_echo_btn = tk.Button(header_frame, text="⚠ Update EchoVR", command=lambda: UpdateEchoPopup(self.root, self, self.config), bg=self.colors['accent_red'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=8) - self.update_echo_btn.pack(side=tk.RIGHT, padx=(10, 0)) - - self.status_label = tk.Label(main_frame, text="Welcome to EchoVR Texture Editor", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) - self.status_label.grid(row=1, column=0, columnspan=3, sticky='ew', pady=(0, 10)) - - self.platform_label = tk.Label(main_frame, text="Platform: Not detected", font=("Arial", 10, "bold"), fg=self.colors['warning'], bg=self.colors['bg_dark']) - self.platform_label.grid(row=2, column=0, columnspan=3, sticky='ew', pady=(0, 10)) - - evr_frame = tk.LabelFrame(main_frame, text="EVR TOOLS INTEGRATION", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) - evr_frame.grid(row=3, column=0, columnspan=3, sticky='ew', pady=(0, 10)) - evr_frame.columnconfigure(1, weight=1) - - tk.Label(evr_frame, text="Data Folder:", font=("Arial", 9), fg=self.colors['text_light'], bg=self.colors['bg_dark']).grid(row=0, column=0, sticky='w', padx=10, pady=5) - - self.data_folder_label = tk.Label(evr_frame, text="Not selected", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) - self.data_folder_label.grid(row=0, column=1, sticky='w', padx=5, pady=5) - - self.data_folder_btn = tk.Button(evr_frame, text="Select", command=self.select_data_folder, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED, bd=1, padx=10, pady=3) - self.data_folder_btn.grid(row=0, column=2, padx=10, pady=5) - - tk.Label(evr_frame, text="Extracted Folder:", font=("Arial", 9), fg=self.colors['text_light'], bg=self.colors['bg_dark']).grid(row=1, column=0, sticky='w', padx=10, pady=5) - - self.extracted_folder_label = tk.Label(evr_frame, text="Not selected", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) - self.extracted_folder_label.grid(row=1, column=1, sticky='w', padx=5, pady=5) - - self.extracted_folder_btn = tk.Button(evr_frame, text="Select", command=self.select_extracted_folder, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED, bd=1, padx=10, pady=3) - self.extracted_folder_btn.grid(row=1, column=2, padx=10, pady=5) - - button_frame = tk.Frame(evr_frame, bg=self.colors['bg_dark']) - button_frame.grid(row=2, column=0, columnspan=3, pady=10) - - self.extract_btn = tk.Button(button_frame, text="Extract Package", command=self.extract_package, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=20, pady=8, state=tk.DISABLED) - self.extract_btn.pack(side=tk.LEFT, padx=5) - - self.repack_btn = tk.Button(button_frame, text="Repack Modified", command=self.repack_package, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED, bd=2, padx=20, pady=8, state=tk.DISABLED) - self.repack_btn.pack(side=tk.LEFT, padx=5) - - self.evr_status_label = tk.Label(evr_frame, text="Ready", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) - self.evr_status_label.grid(row=3, column=0, columnspan=3, pady=(0, 10)) - - content_frame = tk.Frame(main_frame, bg=self.colors['bg_dark']) - content_frame.grid(row=4, column=0, columnspan=3, sticky='nsew') - content_frame.columnconfigure(0, weight=1) - content_frame.columnconfigure(1, weight=2) - content_frame.columnconfigure(2, weight=2) - content_frame.rowconfigure(0, weight=1) - - left_frame = tk.LabelFrame(content_frame, text="AVAILABLE TEXTURES", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) - left_frame.grid(row=0, column=0, sticky='nsew', padx=(0, 5)) - left_frame.columnconfigure(0, weight=1) - left_frame.rowconfigure(1, weight=1) - - search_frame = tk.Frame(left_frame, bg=self.colors['bg_dark']) - search_frame.grid(row=0, column=0, sticky='ew', padx=5, pady=5) - - tk.Label(search_frame, text="Search:", font=("Arial", 9), fg=self.colors['text_light'], bg=self.colors['bg_dark']).pack(side=tk.LEFT, padx=(0, 5)) - - self.search_var = tk.StringVar() - self.search_entry = tk.Entry(search_frame, textvariable=self.search_var, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), insertbackground=self.colors['text_light']) - self.search_entry.pack(side=tk.LEFT, fill=tk.X, expand=True, padx=(0, 5)) - self.search_entry.bind('', self.filter_textures) - - clear_btn = tk.Button(search_frame, text="X", command=self.clear_search, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED, bd=1, width=3) - clear_btn.pack(side=tk.LEFT) - - # Grid View Button - self.grid_view_btn = tk.Button(left_frame, text="View Texture Grid", command=self.open_grid_view, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2) - self.grid_view_btn.grid(row=2, column=0, sticky='ew', padx=5, pady=5) - - list_frame = tk.Frame(left_frame, bg=self.colors['bg_dark']) - list_frame.grid(row=1, column=0, sticky='nsew', padx=5, pady=(0, 5)) - list_frame.columnconfigure(0, weight=1) - list_frame.rowconfigure(0, weight=1) - - # EXTENDED selectmode for multi-select - self.file_list = tk.Listbox(list_frame, bg=self.colors['bg_light'], fg=self.colors['text_light'], selectbackground=self.colors['accent_green'], selectforeground=self.colors['text_light'], font=("Arial", 9), relief=tk.SUNKEN, bd=1, selectmode=tk.EXTENDED) - - scrollbar = tk.Scrollbar(list_frame, bg=self.colors['bg_light']) - self.file_list.configure(yscrollcommand=scrollbar.set) - scrollbar.config(command=self.file_list.yview) - - self.file_list.grid(row=0, column=0, sticky='nsew') - scrollbar.grid(row=0, column=1, sticky='ns') - self.file_list.bind('<>', self.on_texture_selected) - self.file_list.bind('', self._on_listbox_scroll) - self.file_list.bind('', self._on_listbox_scroll) # Linux scroll up - self.file_list.bind('', self._on_listbox_scroll) # Linux scroll down - - # Track listbox scroll state for lazy loading - self.listbox_visible_end = 500 # Initial visible items - - middle_frame = tk.LabelFrame(content_frame, text="ORIGINAL TEXTURE", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) - middle_frame.grid(row=0, column=1, sticky='nsew', padx=5) - middle_frame.columnconfigure(0, weight=1) - middle_frame.rowconfigure(0, weight=1) - - self.original_canvas = tk.Canvas(middle_frame, bg=self.colors['bg_medium']) - self.original_canvas.grid(row=0, column=0, sticky='nsew') - - right_frame = tk.LabelFrame(content_frame, text="REPLACEMENT TEXTURE", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) - right_frame.grid(row=0, column=2, sticky='nsew', padx=(5, 0)) - right_frame.columnconfigure(0, weight=1) - right_frame.rowconfigure(0, weight=1) - - self.replacement_canvas = tk.Canvas(right_frame, bg=self.colors['bg_medium']) - self.replacement_canvas.grid(row=0, column=0, sticky='nsew') - self.replacement_canvas.bind("", self.browse_replacement_texture) - - button_panel = tk.Frame(main_frame, bg=self.colors['bg_dark']) - button_panel.grid(row=5, column=0, columnspan=3, sticky='ew', pady=(10, 0)) - - adb_frame = tk.Frame(button_panel, bg=self.colors['bg_dark']) - adb_frame.pack(side=tk.LEFT, fill=tk.Y) - - self.install_adb_btn = tk.Button(adb_frame, text="Install ADB Tools", command=self.install_adb_tools, bg=self.colors['accent_orange'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5) - self.install_adb_btn.pack(side=tk.LEFT, padx=5) - - self.push_quest_btn = tk.Button(adb_frame, text="Push Files To Quest", command=self.push_to_quest, bg=self.colors['accent_orange'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5, state=tk.DISABLED) - self.push_quest_btn.pack(side=tk.LEFT, padx=5) - - action_frame = tk.Frame(button_panel, bg=self.colors['bg_dark']) - action_frame.pack(side=tk.RIGHT, fill=tk.Y) - - self.edit_btn = tk.Button(action_frame, text="Open in Editor", command=self.open_external_editor, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5, state=tk.DISABLED) - self.edit_btn.pack(side=tk.LEFT, padx=5) - - self.replace_btn = tk.Button(action_frame, text="Replace Texture", command=self.replace_texture, bg=self.colors['accent_green'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5, state=tk.DISABLED) - self.replace_btn.pack(side=tk.LEFT, padx=5) - - self.download_btn = tk.Button(action_frame, text="Download All Textures", command=self.download_textures, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5) - self.download_btn.pack(side=tk.LEFT, padx=5) - - self.load_all_btn = tk.Button(action_frame, text="Load/Cache All", command=self.load_all_textures, bg=self.colors['accent_blue'], fg=self.colors['text_light'], font=("Arial", 9, "bold"), relief=tk.RAISED, bd=2, padx=15, pady=5) - self.load_all_btn.pack(side=tk.LEFT, padx=5) - - self.resolution_status = tk.Label(button_panel, text="", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_dark']) - - info_frame = tk.LabelFrame(main_frame, text="TEXTURE INFORMATION", font=("Arial", 10, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_dark'], relief=tk.RAISED, bd=2) - info_frame.grid(row=6, column=0, columnspan=3, sticky='nsew', pady=(10, 0)) - info_frame.columnconfigure(0, weight=1) - info_frame.rowconfigure(0, weight=1) - - self.info_text = scrolledtext.ScrolledText(info_frame, height=6, wrap=tk.WORD, bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.SUNKEN, bd=1) - self.info_text.grid(row=0, column=0, sticky='nsew', padx=2, pady=2) - - self.update_canvas_placeholder(self.original_canvas, "Select output folder to view textures") - self.update_canvas_placeholder(self.replacement_canvas, "Click to select replacement texture") - - def update_canvas_placeholder(self, canvas, text): - canvas.delete("all") - canvas_width = canvas.winfo_width() - canvas_height = canvas.winfo_height() - if canvas_width <= 1 or canvas_height <= 1: - canvas_width, canvas_height = 400, 300 - canvas.create_text(canvas_width//2, canvas_height//2, text=text, font=("Arial", 10), fill=self.colors['text_muted'], justify=tk.CENTER) - - def log_info(self, message): - self.info_text.insert(tk.END, message + "\n") - self.info_text.see(tk.END) - self.info_text.update_idletasks() - - def _on_listbox_scroll(self, event): - """Load more items as user scrolls near the bottom""" - try: - # Get the current visible range - visible_items = self.file_list.yview() - if visible_items[1] > 0.9: # Top 90% of the scrollbar - # Load more items if available - current_count = self.file_list.size() - total_available = len(self.filtered_textures) - if current_count < total_available: - # Load next chunk - chunk_size = 500 - next_items = min(current_count + chunk_size, total_available) - # Remove the "load more" indicator - if current_count > 0: - last_item = self.file_list.get(current_count - 1) - if "Scroll down to load" in last_item or "more items" in last_item: - self.file_list.delete(current_count - 1) - # Add more items - for i in range(current_count - 1, next_items): - if i >= 0: - self.file_list.insert(tk.END, self.filtered_textures[i]) - # Add indicator if more remain - if next_items < total_available: - remaining = total_available - next_items - self.file_list.insert(tk.END, f"[Loading {remaining} more items...]") - except: - pass - - - def select_data_folder(self): - path = filedialog.askdirectory(title="Select Data Folder (contains manifests and packages)") - if path: - self.set_data_folder(path) - - def set_data_folder(self, path): - self.data_folder = path - self.data_folder_label.config(text=os.path.basename(path), fg=self.colors['text_light']) - - manifests_path = os.path.join(path, "manifests") - packages_path = os.path.join(path, "packages") - - if not os.path.exists(manifests_path) or not os.path.exists(packages_path): - parent_path = os.path.dirname(path) - parent_manifests = os.path.join(parent_path, "manifests") - parent_packages = os.path.join(parent_path, "packages") - - if os.path.exists(parent_manifests) and os.path.exists(parent_packages): - path = parent_path - manifests_path = parent_manifests - packages_path = parent_packages - self.data_folder = path - self.data_folder_label.config(text=os.path.basename(path)) - - if os.path.exists(manifests_path) and os.path.exists(packages_path): - self._set_package_from_manifests(manifests_path) - self.log_info(f"✓ Data folder set: {path}") - else: - self.log_info("✗ Could not find manifests and packages folders") - - ConfigManager.save_config(data_folder=self.data_folder) - self.config['data_folder'] = self.data_folder - self.update_evr_buttons_state() - - def select_extracted_folder(self): - path = filedialog.askdirectory(title="Select Extracted Folder") - if path: - self.set_extracted_folder(path) - - def set_extracted_folder(self, path): - self.extracted_folder = path - self.extracted_folder_label.config(text=os.path.basename(path), fg=self.colors['text_light']) - self.set_output_folder(path) - self.update_evr_buttons_state() - ConfigManager.save_config(extracted_folder=self.extracted_folder) - self.config['extracted_folder'] = self.extracted_folder - self.log_info(f"✓ Extracted folder set: {path}") - - PACKAGE_TEXTURES = "48037dc70b0ecab2" - - def _set_package_from_manifests(self, manifests_path): - try: - packages = [] - packages_dir = os.path.join(os.path.dirname(manifests_path), "packages") - with os.scandir(manifests_path) as it: - for e in it: - if not e.is_file(): - continue - file_name = e.name - package_file = os.path.join(packages_dir, file_name) - package_file_0 = os.path.join(packages_dir, f"{file_name}_0") - if os.path.exists(package_file) or os.path.exists(package_file_0): - packages.append(file_name) - if self.PACKAGE_TEXTURES in packages: - self.package_name = self.PACKAGE_TEXTURES - elif packages: - self.package_name = packages[0] - else: - self.package_name = None - self.update_evr_buttons_state() - if packages: - self.log_info(f"Using package: {self.package_name}") - else: - self.log_info("No valid packages found") - except Exception as e: - self.log_info(f"Error reading manifests: {e}") - self.package_name = None - - def update_evr_buttons_state(self): - if self.data_folder and self.package_name and self.extracted_folder: - self.extract_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) - if os.path.exists(self.extracted_folder) and _dir_nonempty(self.extracted_folder): - self.repack_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) - else: - self.repack_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - else: - self.extract_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - self.repack_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - - def extract_package(self): - if not all([self.data_folder, self.package_name, self.extracted_folder]): - messagebox.showerror("Error", "Please select data folder, package, and extraction folder first.") - return - - popup = tk.Toplevel(self.root) - popup.title("Extraction Mode") - popup.geometry("400x180") - popup.configure(bg=self.colors['bg_medium']) - popup.resizable(False, False) - popup.transient(self.root) - popup.grab_set() - - try: - x = self.root.winfo_x() + (self.root.winfo_width() - 400) // 2 - y = self.root.winfo_y() + (self.root.winfo_height() - 180) // 2 - popup.geometry(f"+{x}+{y}") - except: pass - - tk.Label(popup, text="Select Extraction Mode", font=("Arial", 12, "bold"), fg=self.colors['text_light'], bg=self.colors['bg_medium']).pack(pady=(20, 10)) - tk.Label(popup, text="Full Package extraction is required for repacking.", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_medium']).pack(pady=(0, 20)) - tk.Label(popup, text="Texture mode is faster but only extracts texture files.", font=("Arial", 9), fg=self.colors['text_muted'], bg=self.colors['bg_medium']).pack(pady=(0, 20)) - - btn_frame = tk.Frame(popup, bg=self.colors['bg_medium']) - btn_frame.pack(fill=tk.X, padx=20) - - def do_extract(textures_only): - popup.destroy() - self._run_extraction(textures_only) - - tk.Button(btn_frame, text="Extract Full Package (For Repacking)", command=lambda: do_extract(False), bg=self.colors['accent_green'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED).pack(fill=tk.X, pady=5) - tk.Button(btn_frame, text="Extract Textures Only (For Viewing)", command=lambda: do_extract(True), bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED).pack(fill=tk.X, pady=5) - tk.Button(btn_frame, text="Extract Textures Only (Fast)", command=lambda: do_extract(True), bg=self.colors['accent_green'], fg=self.colors['text_light'], font=("Arial", 10, "bold"), relief=tk.RAISED).pack(fill=tk.X, pady=5) - tk.Button(btn_frame, text="Extract Full Package (Slow)", command=lambda: do_extract(False), bg=self.colors['bg_light'], fg=self.colors['text_light'], font=("Arial", 9), relief=tk.RAISED).pack(fill=tk.X, pady=5) - - def _run_extraction(self, textures_only): - os.makedirs(self.extracted_folder, exist_ok=True) - mode_text = "Textures Only" if textures_only else "Full Package" - - # Show progress dialog - progress = ProgressDialog(self.root, "Extracting Package", f"Extracting {mode_text}...\n\nThis may take a few minutes...", show_bar=False) - - self.evr_status_label.config(text=f"Extracting package ({mode_text})...", fg=self.colors['accent_green']) - self.root.update_idletasks() - - def extraction_thread(): - export_type = "textures" if textures_only else "" - success, message = self.evr_tools.extract_package(self.data_folder, self.package_name, self.extracted_folder, export_type=export_type) - self.root.after(0, lambda: self.on_extraction_complete(success, message, progress)) - - threading.Thread(target=extraction_thread, daemon=True).start() - - def on_extraction_complete(self, success, message, progress=None): - if progress: - progress.close() - - if success: - self.evr_status_label.config(text="Extraction successful!", fg=self.colors['success']) - self.log_info(f"✓ EXTRACTION: {message}") - extracted_textures_path = self.find_extracted_textures(self.extracted_folder) - if extracted_textures_path: - self.set_output_folder(extracted_textures_path) - else: - self.set_output_folder(self.extracted_folder) - self.repack_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) - else: - self.evr_status_label.config(text="Extraction failed", fg=self.colors['error']) - self.log_info(f"✗ EXTRACTION FAILED: {message}") - messagebox.showerror("Extraction Error", message) - - def find_extracted_textures(self, base_dir): - target_names = {"-4707359568332879775", "5231972605540061417"} - target_names = {"beac1969cb7b8861", "489b7b69cb19e0e9"} - for root, dirs, _ in os.walk(base_dir): - for d in dirs: - if d in target_names: - return root - return None - - def repack_package(self): - if not all([self.data_folder, self.package_name, self.extracted_folder]): - messagebox.showerror("Error", "Please select data folder, package, and extraction folder first.") - return - - input_folder = self.extracted_folder - if not input_folder or not os.path.exists(input_folder): - messagebox.showerror("Error", "Extracted folder not set or found. Please perform a full extraction first.") - if self.is_quest_textures and self.quest_input_folder: - input_folder = self.quest_input_folder - self.log_info("🎯 Using Quest input folder for repacking") - elif self.is_pcvr_textures and self.pcvr_input_folder: - input_folder = self.pcvr_input_folder - self.log_info("🎮 Using PCVR input folder for repacking") - else: - messagebox.showerror("Error", "Input folder not found. Please check input-pcvr/input-quest folders.") - return - - self.log_info(f"📦 Using '{os.path.basename(input_folder)}' as input for repacking.") - - script_dir = os.path.dirname(os.path.abspath(__file__)) - output_dir = self.repacked_folder - - confirm = messagebox.askyesno("Confirm Repack", f"Repack modified files to:\n{output_dir}\n\nContinue?") - if not confirm: return - - # Show progress dialog - progress = ProgressDialog(self.root, "Repacking Package", "Rebuilding package files...\n\nThis may take a few minutes...", show_bar=False) - - self.evr_status_label.config(text="Repacking package...", fg=self.colors['accent_green']) - self.root.update_idletasks() - - def repacking_thread(): - success, message = self.evr_tools.repack_package(output_dir, self.package_name, self.data_folder, input_folder) - self.root.after(0, lambda: self.on_repacking_complete(success, message, output_dir, progress)) - - threading.Thread(target=repacking_thread, daemon=True).start() - - def on_repacking_complete(self, success, message, output_dir, progress=None): - if progress: - progress.close() - - if success: - self.evr_status_label.config(text="Repacking successful!", fg=self.colors['success']) - self.log_info(f"✓ REPACKING: {message}") - packages_path = os.path.join(output_dir, "packages") - manifests_path = os.path.join(output_dir, "manifests") - if os.path.exists(packages_path) and os.path.exists(manifests_path): - self.log_info(f"✓ Packages and manifests created in: {output_dir}") - self.update_quest_push_button() - else: - self.log_info("⚠ Packages or manifests folders not found in output directory") - else: - self.evr_status_label.config(text="Repacking failed", fg=self.colors['error']) - self.log_info(f"✗ REPACKING FAILED: {message}") - messagebox.showinfo("Repacking Result", message) - - def check_app_updates(self): - """Check for app updates on GitHub""" - self.log_info("🔄 Checking for updates...") - self.check_updates_btn.config(state=tk.DISABLED, text="Checking...") - self.root.update_idletasks() - - def check_thread(): - has_update, latest_version, download_url = check_for_updates() - self.root.after(0, lambda: self.on_update_check_complete(has_update, latest_version, download_url)) - - threading.Thread(target=check_thread, daemon=True).start() - - def on_update_check_complete(self, has_update, latest_version, download_url): - self.check_updates_btn.config(state=tk.NORMAL, text="🔄 Check Updates") - - if has_update: - self.log_info(f"✅ Update available: v{latest_version}") - UpdateNotificationDialog(self.root, latest_version, download_url) - else: - self.log_info(f"✅ You are running the latest version (v{APP_VERSION})") - messagebox.showinfo("Updates", f"You are running the latest version!\n\nCurrent: v{APP_VERSION}") - - def install_adb_tools(self): - self.log_info("Installing ADB Platform Tools...") - def install_thread(): - success, message = ADBManager.install_adb_tools() - self.root.after(0, lambda: self.on_adb_install_complete(success, message)) - threading.Thread(target=install_thread, daemon=True).start() - - def on_adb_install_complete(self, success, message): - if success: - self.log_info(f"✅ ADB Tools installed: {message}") - messagebox.showinfo("Success", "ADB Platform Tools installed successfully!") - self.test_adb_connection() - else: - self.log_info(f"❌ ADB installation failed: {message}") - messagebox.showerror("Error", f"ADB installation failed: {message}") - - def test_adb_connection(self): - def test_thread(): - success, message, adb_path = ADBManager.check_adb() - self.root.after(0, lambda: self.on_adb_test_complete(success, message)) - threading.Thread(target=test_thread, daemon=True).start() - - def on_adb_test_complete(self, success, message): - if success: - self.log_info(f"✅ ADB: {message}") - if self.is_quest_textures: - self.push_quest_btn.config(state=tk.NORMAL, bg=self.colors['accent_orange']) - else: - self.log_info(f"❌ ADB: {message}") - self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - - def update_quest_push_button(self): - if self.is_quest_textures and self.output_folder: - self.test_adb_connection() - else: - self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - - def push_to_quest(self): - if not self.output_folder: - messagebox.showerror("Error", "Please select output folder first") - return - success, message, _ = ADBManager.check_adb() - if not success: - messagebox.showerror("ADB Error", f"Cannot connect to Quest:\n{message}") - return - - result = messagebox.askyesno("Push to Quest", "This will push files to your Quest headset.\n\nContinue?", icon='warning') - if not result: return - - self.log_info("🚀 Starting Quest file push...") - self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light'], text="Pushing...") - self.root.update_idletasks() - - def push_thread(): - try: - push_folder = self.output_folder - if self.repacked_folder and os.path.exists(self.repacked_folder): - if (os.path.exists(os.path.join(self.repacked_folder, "manifests")) or os.path.exists(os.path.join(self.repacked_folder, "packages"))): - push_folder = self.repacked_folder - self.log_info("📦 Using repacked folder") - - quest_dest_path = "/sdcard/readyatdawn/files/_data/5932408047/rad15/android" - success, message = ADBManager.push_to_quest(push_folder, quest_dest_path) - self.root.after(0, lambda: self.on_quest_push_complete(success, message)) - except Exception as thread_error: - error_message = f"Push thread error: {str(thread_error)}" - self.root.after(0, lambda: self.on_quest_push_complete(False, error_message)) - - threading.Thread(target=push_thread, daemon=True).start() - - def on_quest_push_complete(self, success, message): - if success: - messagebox.showinfo("Success", f"Files pushed to Quest!\n\n{message}") - self.log_info(f"✅ QUEST PUSH: {message}") - else: - messagebox.showerror("Error", f"Failed to push files:\n{message}") - self.log_info(f"❌ QUEST PUSH FAILED: {message}") - self.push_quest_btn.config(state=tk.NORMAL, bg=self.colors['accent_orange'], text="Push Files To Quest") - self.update_quest_push_button() - - def set_output_folder(self, path): - self.output_folder = path - folder_name = os.path.basename(path).lower() - if "quest" in folder_name: - self.is_quest_textures = True - self.is_pcvr_textures = False - self.textures_folder = os.path.join(path, "5231972605540061417") - self.corresponding_folder = os.path.join(path, "-2094201140079393352") - self.textures_folder = os.path.join(path, "489b7b69cb19e0e9") - self.corresponding_folder = os.path.join(path, "e2ef0854d0cd69b8") - self.platform_label.config(text="Platform: Quest (ASTC)", fg=self.colors['success']) - self.log_info("🎯 Switched to Quest mode") - elif "pcvr" in folder_name: - self.is_quest_textures = False - self.is_pcvr_textures = True - self.textures_folder = os.path.join(path, "-4707359568332879775") - self.corresponding_folder = os.path.join(path, "5353709876897953952") - self.textures_folder = os.path.join(path, "beac1969cb7b8861") - self.corresponding_folder = os.path.join(path, "4a4c32c49300b8a0") - self.platform_label.config(text="Platform: PCVR (DDS)", fg=self.colors['accent_blue']) - self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - self.log_info("🎮 Switched to PCVR mode") - else: - quest_textures_folder = os.path.join(path, "5231972605540061417") - pcvr_textures_folder = os.path.join(path, "-4707359568332879775") - quest_textures_folder = os.path.join(path, "489b7b69cb19e0e9") - pcvr_textures_folder = os.path.join(path, "beac1969cb7b8861") - if getattr(sys, 'frozen', False): - parent_dir = os.path.dirname(os.path.dirname(path)) - if not os.path.exists(quest_textures_folder): - quest_textures_folder = os.path.join(parent_dir, os.path.basename(path), "5231972605540061417") - quest_textures_folder = os.path.join(parent_dir, os.path.basename(path), "489b7b69cb19e0e9") - if not os.path.exists(pcvr_textures_folder): - pcvr_textures_folder = os.path.join(parent_dir, os.path.basename(path), "-4707359568332879775") - pcvr_textures_folder = os.path.join(parent_dir, os.path.basename(path), "beac1969cb7b8861") - - if os.path.exists(quest_textures_folder): - self.textures_folder = quest_textures_folder - self.corresponding_folder = os.path.join(path, "-2094201140079393352") - self.corresponding_folder = os.path.join(path, "e2ef0854d0cd69b8") - self.is_quest_textures = True - self.is_pcvr_textures = False - self.platform_label.config(text="Platform: Quest (ASTC)", fg=self.colors['success']) - self.log_info("🎯 Auto-detected Quest textures") - elif os.path.exists(pcvr_textures_folder): - self.textures_folder = pcvr_textures_folder - self.corresponding_folder = os.path.join(path, "5353709876897953952") - self.corresponding_folder = os.path.join(path, "4a4c32c49300b8a0") - self.is_quest_textures = False - self.is_pcvr_textures = True - self.platform_label.config(text="Platform: PCVR (DDS)", fg=self.colors['accent_blue']) - self.push_quest_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - self.log_info("🎮 Auto-detected PCVR textures") - else: - self.textures_folder = path - self.log_info("⚠ Could not determine platform structure, using root folder") - - if os.path.exists(self.textures_folder): - platform_text = "Quest" if self.is_quest_textures else "PCVR" - self.status_label.config(text=f"Output folder: {os.path.basename(path)} ({platform_text})") - self.log_info(f"Output folder set: {path} ({platform_text})") - self.load_textures() - ConfigManager.save_config(output_folder=self.output_folder) - self.config['output_folder'] = self.output_folder - self.update_quest_push_button() - - def filter_textures(self, event=None): - search_text = self.search_var.get().lower() - if not search_text: - self.filtered_textures = self.all_textures.copy() - else: - self.filtered_textures = [texture for texture in self.all_textures if search_text in texture.lower()] - self.file_list.delete(0, tk.END) - # Load textures in chunks to avoid UI freeze - if self.filtered_textures: - chunk_size = 500 - for i in range(0, min(len(self.filtered_textures), chunk_size)): - self.file_list.insert(tk.END, self.filtered_textures[i]) - - # Show indicator if there are more - if len(self.filtered_textures) > chunk_size: - self.file_list.insert(tk.END, f"... ({len(self.filtered_textures) - chunk_size} more items - scroll to load)") - - def clear_search(self): - self.search_var.set("") - self.filter_textures() - - def load_textures(self): - self.file_list.delete(0, tk.END) - self.file_list.insert(tk.END, "Loading textures...") - self.update_canvas_placeholder(self.original_canvas, "Loading textures...") - self.root.update_idletasks() - threading.Thread(target=self._load_textures_worker, daemon=True).start() - - def _is_valid_texture_file(self, file_path): - try: - if not os.path.isfile(file_path): return False - size = os.path.getsize(file_path) - if size == 0: return False - - if not self.is_pcvr_textures and not self.is_quest_textures: - return True - - with open(file_path, 'rb') as f: - header = f.read(16) - - if self.is_pcvr_textures: - return header.startswith(b'DDS ') - - if self.is_quest_textures: - if header.startswith(b'\x13\xAB\xA1\x5C'): return True - if header.startswith(b'\xABKTX 11') or header.startswith(b'\xABKTX 20'): return True - if b'BcBP' in header: return True - if header.startswith(b'PVR'): return True - - if size % 16 == 0: - if header.strip().startswith(b'{') or header.strip().startswith(b'<'): - return False - return True - return False - - return True - except: - return False - - def _load_textures_worker(self): - if not self.textures_folder or not os.path.exists(self.textures_folder): - self.root.after(0, lambda: self._on_textures_loaded([], 0)) - return - - cached_files = TextureCacheManager.get_cached_files(self.textures_folder) - if cached_files is not None: - self.root.after(0, lambda: self._on_textures_loaded(cached_files, len(cached_files))) - return - - valid_files = [] - try: - with os.scandir(self.textures_folder) as it: - for e in it: - if e.is_file() and self._is_valid_texture_file(e.path): - valid_files.append(e.name) - - TextureCacheManager.update_cache(self.textures_folder, valid_files) - self.root.after(0, lambda: self._on_textures_loaded(valid_files, len(valid_files))) - except Exception as e: - print(f"Scan Error: {e}") - self.root.after(0, lambda: self._on_textures_loaded([], 0)) - - def _on_textures_loaded(self, files, count): - self.all_textures = sorted(files) - self.filtered_textures = self.all_textures.copy() - self.file_list.delete(0, tk.END) - if self.filtered_textures: - # Load first batch to avoid UI freeze with large texture counts - chunk_size = 500 - for i in range(0, min(len(self.filtered_textures), chunk_size)): - self.file_list.insert(tk.END, self.filtered_textures[i]) - - # Show indicator if there are more items - if len(self.filtered_textures) > chunk_size: - remaining = len(self.filtered_textures) - chunk_size - self.file_list.insert(tk.END, f"[Scroll down to load {remaining} more items]") - - # Cleanup cache to prevent disk bloat - TextureLoader.cleanup_cache() - - platform_text = "Quest" if self.is_quest_textures else "PCVR" - status_text = f"Found {count} {platform_text} texture files" - self.status_label.config(text=status_text) - self.log_info(f"Found {count} {platform_text} texture files") - if count == 0: - self.log_info("No texture files found.") - self.update_canvas_placeholder(self.original_canvas, "No textures found") - else: - self.update_canvas_placeholder(self.original_canvas, "Select a texture to view") - - def on_texture_selected(self, event): - if not self.file_list.curselection(): return - - # Multi-select: Show count if multiple - selection = self.file_list.curselection() - if len(selection) > 1: - self.update_canvas_placeholder(self.original_canvas, f"{len(selection)} files selected") - self.replace_btn.config(state=tk.NORMAL, bg=self.colors['accent_green'], text=f"Replace {len(selection)} Files") - self.edit_btn.config(state=tk.DISABLED) - return - - index = selection[0] - texture_name = self.filtered_textures[index] - self.current_texture = os.path.join(self.textures_folder, texture_name) - self.replace_btn.config(text="Replace Texture") - - try: - self.update_canvas_placeholder(self.original_canvas, "Loading texture...") - self.root.update_idletasks() - def load_texture_thread(): - try: - image = TextureLoader.load_texture(self.current_texture, self.is_quest_textures) - self.root.after(0, lambda: self.display_texture_result(image)) - except Exception as e: - self.root.after(0, lambda: self.display_texture_error(e)) - threading.Thread(target=load_texture_thread, daemon=True).start() - except Exception as e: - self.log_info(f"Error loading texture: {e}") - self.update_canvas_placeholder(self.original_canvas, "Error loading texture") - - def display_texture_result(self, image): - if image: - self.display_image_on_canvas(image, self.original_canvas) - if self.is_quest_textures: - self.original_info = { - 'file_size': os.path.getsize(self.current_texture), - 'format': 'ASTC', 'width': image.width, 'height': image.height - } - else: - self.original_info = DDSHandler.get_dds_info(self.current_texture) - if self.original_info is None: - try: - size = os.path.getsize(self.current_texture) - except: - size = 0 - self.original_info = { - 'file_size': size, - 'format': 'DDS/Raw', - 'width': image.width, - 'height': image.height - } - - self.update_texture_info() - self.edit_btn.config(state=tk.NORMAL, bg=self.colors['accent_blue']) - self.replace_btn.config(state=tk.NORMAL, bg=self.colors['accent_green']) - else: - self.update_canvas_placeholder(self.original_canvas, "Failed to load texture") - self.edit_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - self.replace_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - - def display_texture_error(self, error): - self.log_info(f"Error loading texture: {error}") - self.update_canvas_placeholder(self.original_canvas, "Error loading texture") - self.edit_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - self.replace_btn.config(state=tk.DISABLED, bg=self.colors['bg_light']) - - def browse_replacement_texture(self, event): - if not self.current_texture and len(self.file_list.curselection()) == 0: - messagebox.showinfo("Info", "Please select an original texture first") - return - - file_types = [("PNG files", "*.png"), ("DDS files", "*.dds"), ("All files", "*.*")] - if self.is_quest_textures: - file_types = [("PNG files", "*.png"), ("All files", "*.*")] - - file_path = filedialog.askopenfilename(title="Select Replacement Texture", filetypes=file_types) - - if file_path: - self.replacement_texture = file_path - try: - def load_replacement_thread(): - try: - if self.is_quest_textures: - image = Image.open(file_path).convert("RGBA") - elif file_path.lower().endswith(".png"): - image = Image.open(file_path).convert("RGBA") - else: - image = TextureLoader.load_texture(file_path, False) - self.root.after(0, lambda: self.display_replacement_result(image, file_path)) - except Exception as e: - self.root.after(0, lambda: self.display_replacement_error(e)) - threading.Thread(target=load_replacement_thread, daemon=True).start() - except Exception as e: - self.log_info(f"Error loading replacement texture: {e}") - self.update_canvas_placeholder(self.replacement_canvas, "Error loading replacement") - - def display_replacement_result(self, image, file_path): - if image: - self.display_image_on_canvas(image, self.replacement_canvas) - if self.is_quest_textures: - self.replacement_info = { - 'file_size': os.path.getsize(file_path), - 'format': 'PNG', 'width': image.width, 'height': image.height - } - self.replacement_size = None - else: - self.replacement_info = DDSHandler.get_dds_info(file_path) - if self.replacement_info is None: - self.replacement_info = { - 'format': 'PNG', 'width': image.width, 'height': image.height, - 'file_size': os.path.getsize(file_path) - } - self.replacement_size = None - else: - self.replacement_size = self.replacement_info.get('file_size') - self.update_texture_info() - self.check_resolution_match() - self.log_info(f"Replacement loaded: {os.path.basename(file_path)}") - else: - self.update_canvas_placeholder(self.replacement_canvas, "Failed to load replacement") - - def display_replacement_error(self, error): - self.log_info(f"Error loading replacement texture: {error}") - self.update_canvas_placeholder(self.replacement_canvas, "Error loading replacement") - - def display_image_on_canvas(self, image, canvas): - canvas.delete("all") - canvas_width = canvas.winfo_width() - canvas_height = canvas.winfo_height() - if canvas_width <= 1 or canvas_height <= 1: - canvas_width, canvas_height = 400, 300 - - img_width, img_height = image.size - ratio = min(canvas_width / img_width, canvas_height / img_height) - new_size = (int(img_width * ratio), int(img_height * ratio)) - - resized_image = image.resize(new_size, Image.Resampling.LANCZOS) - photo = ImageTk.PhotoImage(resized_image) - x_pos = (canvas_width - new_size[0]) // 2 - y_pos = (canvas_height - new_size[1]) // 2 - canvas.create_image(x_pos, y_pos, anchor=tk.NW, image=photo) - canvas.image = photo - - def update_texture_info(self): - info = "" - if self.original_info: - platform_text = "Quest" if self.is_quest_textures else "PCVR" - info += f"=== ORIGINAL ({platform_text}) ===\n" - info += f"File: {os.path.basename(self.current_texture)}\n" - info += f"Size: {self.original_info['file_size']:,} bytes\n" - if 'width' in self.original_info: - info += f"Dim: {self.original_info['width']} x {self.original_info['height']}\n" - info += f"Format: {self.original_info['format']}\n\n" - - if self.replacement_info: - info += "=== REPLACEMENT ===\n" - info += f"File: {os.path.basename(self.replacement_texture)}\n" - if 'width' in self.replacement_info: - info += f"Dim: {self.replacement_info['width']} x {self.replacement_info['height']}\n" - info += f"Format: {self.replacement_info['format']}\n" - - self.info_text.delete(1.0, tk.END) - self.info_text.insert(tk.END, info) - - def check_resolution_match(self): - if self.original_info and self.replacement_info and 'width' in self.original_info and 'width' in self.replacement_info: - ow, oh = self.original_info['width'], self.original_info['height'] - rw, rh = self.replacement_info['width'], self.replacement_info['height'] - if ow == rw and oh == rh: - self.resolution_status.config(text="✓ Resolutions match", fg=self.colors['success']) - else: - self.resolution_status.config( - text=f"⚠ Resolution will be adjusted to {ow}×{oh} when replacing", - fg=self.colors['warning'] - ) - else: - self.resolution_status.config(text="") - - def open_external_editor(self): - if not self.current_texture: return - try: - if sys.platform == 'win32': os.startfile(self.current_texture) - elif sys.platform == 'darwin': subprocess.call(('open', self.current_texture)) - else: subprocess.call(('xdg-open', self.current_texture)) - except Exception as e: - messagebox.showerror("Error", f"Could not open external editor: {str(e)}") - - def replace_texture(self): - if not self.replacement_texture or not self.output_folder: - return - - selection = self.file_list.curselection() - if not selection: - return - - if len(selection) > 1: - confirm = messagebox.askyesno("Multi-Replace", f"Are you sure you want to replace {len(selection)} textures with the selected image?") - if not confirm: - return - - replacement_size = None - if not self.is_quest_textures and self.replacement_info and 'file_size' in self.replacement_info: - replacement_size = self.replacement_info.get('file_size') - - def do_one(index): - texture_name = self.filtered_textures[index] - current_texture_path = os.path.join(self.textures_folder, texture_name) - if self.is_quest_textures: - return texture_name, TextureReplacer.replace_quest_texture(self.extracted_folder, current_texture_path, self.replacement_texture, self.texture_cache) - return texture_name, TextureReplacer.replace_pcvr_texture(self.extracted_folder, current_texture_path, self.replacement_texture, replacement_size) - - results = [] - if len(selection) > 3: - max_workers = min(4, len(selection), (os.cpu_count() or 2) + 1) - with ThreadPoolExecutor(max_workers=max_workers) as ex: - futures = [ex.submit(do_one, idx) for idx in selection] - for f in as_completed(futures): - try: - results.append(f.result()) - except Exception as e: - results.append((None, (False, str(e)))) - else: - for index in selection: - results.append(do_one(index)) - - for texture_name, (success, message) in results: - if texture_name is None: - continue - if success: - self.log_info(f"✓ Replaced {texture_name}") - else: - self.log_info(f"✗ Failed {texture_name}: {message}") - - ok = sum(1 for _, (s, _) in results if s) - fail = len(results) - ok - msg = f"Replaced {ok} texture(s)." + (f" {fail} failed." if fail else "") - messagebox.showinfo("Complete", msg) - if len(selection) == 1: - self.on_texture_selected(None) - - def download_textures(self): - if self.is_downloading: - self.log_info("Download already in progress...") - return - confirm = messagebox.askyesno("Download Textures", "Download texture cache archive (~400MB)?") - if not confirm: return - self.is_downloading = True - self.download_btn.config(state=tk.DISABLED, text="Downloading...", bg=self.colors['accent_orange']) - threading.Thread(target=self._download_worker, daemon=True).start() - - def _download_worker(self): - url = "https://github.com/heisthecat31/EchoVR-Texture-Editor/releases/download/quest/texture_cache.zip" - if getattr(sys, 'frozen', False): - application_path = os.path.dirname(sys.executable) - else: - application_path = os.path.dirname(os.path.abspath(__file__)) - # Extract into the persistent settings cache directory and protect existing files - extract_to_path = CACHE_DIR - temp_zip_path = os.path.join(tempfile.gettempdir(), "texture_cache.zip") - try: - self.root.after(0, lambda: self.log_info(f"Downloading from: {url}")) - urllib.request.urlretrieve(url, temp_zip_path) - self.root.after(0, lambda: self.log_info("✓ Download complete. Extracting...")) - # Ensure cache dir exists - os.makedirs(extract_to_path, exist_ok=True) - - # Safely extract zip entries one-by-one and do NOT overwrite existing files - with zipfile.ZipFile(temp_zip_path, 'r') as zip_ref: - for member in zip_ref.infolist(): - # Skip directories - if member.is_dir(): - continue - - # Flatten any leading 'texture_cache/' from the zip entry path - member_path = member.filename - if member_path.startswith('texture_cache/'): - member_path = member_path[len('texture_cache/'):] - if member_path.startswith('/') or member_path.startswith('\\') or member_path == '': - continue - - # Normalize the target path and avoid path traversal - target_path = os.path.normpath(os.path.join(extract_to_path, member_path)) - if not target_path.startswith(os.path.normpath(extract_to_path) + os.sep) and os.path.normpath(extract_to_path) != os.path.normpath(target_path): - # Unsafe path - skip - continue - - target_dir = os.path.dirname(target_path) - if not os.path.exists(target_dir): - try: - os.makedirs(target_dir, exist_ok=True) - except: - pass - - # If file already exists, skip extracting to avoid overwrite - if os.path.exists(target_path): - continue - - # Extract this single file - try: - with zip_ref.open(member, 'r') as source, open(target_path, 'wb') as target: - shutil.copyfileobj(source, target) - except Exception: - # If extraction of this member fails, skip it and continue - continue - try: os.remove(temp_zip_path) - except: pass - self.root.after(0, lambda: self._on_download_finished(True, "Texture cache downloaded successfully!")) - except Exception as e: - self.root.after(0, lambda: self._on_download_finished(False, f"Download failed: {str(e)}")) - - def _on_download_finished(self, success, message): - self.is_downloading = False - self.download_btn.config(state=tk.NORMAL, text="Download All Textures", bg=self.colors['accent_blue']) - if success: - messagebox.showinfo("Success", message) - self.log_info(f"✅ {message}") - else: - messagebox.showerror("Error", message) - self.log_info(f"❌ {message}") - - # NEW METHODS FOR GRID VIEW - def open_grid_view(self): - if not self.textures_folder: - messagebox.showerror("Error", "No textures loaded.") - return - TextureGridPopup(self.root, self, self.filtered_textures, self.textures_folder, self.is_quest_textures) - - def select_texture_by_name(self, filename): - if filename in self.filtered_textures: - idx = self.filtered_textures.index(filename) - self.file_list.selection_clear(0, tk.END) - self.file_list.selection_set(idx) - self.file_list.see(idx) - self.on_texture_selected(None) - - def load_all_textures(self): - if not self.textures_folder or not self.all_textures: - messagebox.showinfo("Info", "No textures found to load.") - return - - confirm = messagebox.askyesno("Load All Textures", f"This will load and cache {len(self.all_textures)} textures.\nThis process converts textures to PNG for previewing.\nIt may take a while depending on the number of files.\n\nContinue?") - if not confirm: return - - self.load_all_btn.config(state=tk.DISABLED) - progress = ProgressDialog(self.root, "Caching Textures", "Generating texture cache...", show_bar=True) - - threading.Thread(target=self._load_all_worker, args=(progress,), daemon=True).start() - - def _load_all_worker(self, progress): - total = len(self.all_textures) - failed = [] - skipped = 0 - success = 0 - - for i, texture_name in enumerate(self.all_textures): - if progress.cancel_requested: - break - - full_path = os.path.join(self.textures_folder, texture_name) - try: - # Check if already cached to avoid unnecessary loading/decoding - cache_path = TextureLoader.get_cache_path(full_path) - if os.path.exists(cache_path) and os.path.getsize(cache_path) > 0: - skipped += 1 - else: - img = TextureLoader.load_texture(full_path, self.is_quest_textures) - if img: - success += 1 - else: - # Determine format for report - fmt = "ASTC" if self.is_quest_textures else "Unknown" - if not self.is_quest_textures: - info = DDSHandler.get_dds_info(full_path) - if info: fmt = info.get('format', 'Unknown') - failed.append(f"{texture_name} ({fmt})") - except Exception as e: - failed.append(f"{texture_name} (Error: {str(e)})") - - if not progress.update(i + 1, total): - break - - self.root.after(0, lambda: self._on_load_all_complete(progress, success, skipped, failed)) - - def _on_load_all_complete(self, progress, success, skipped, failed): - progress.close() - self.load_all_btn.config(state=tk.NORMAL) - - msg = f"Processing Complete.\n\nCached: {success}\nSkipped (Already Cached): {skipped}\nFailed: {len(failed)}" - if failed: - msg += "\n\nFailures (First 20):\n" + "\n".join(failed[:20]) - if len(failed) > 20: msg += f"\n...and {len(failed)-20} more." - - try: - with open("texture_load_failures.txt", "w") as f: - f.write("Failed Textures:\n" + "\n".join(failed)) - msg += "\n\nFull list saved to texture_load_failures.txt" - except: pass - messagebox.showwarning("Load Results", msg) - else: - messagebox.showinfo("Load Results", msg) - -def main(): - root = tk.Tk() - - # Set app icon - icon_path = os.path.join(get_base_dir(), "icon.ico") - - # Check if running as PyInstaller bundle (onefile) where resources are in _MEIPASS - if hasattr(sys, '_MEIPASS'): - bundled_icon = os.path.join(sys._MEIPASS, "icon.ico") - if os.path.exists(bundled_icon): - icon_path = bundled_icon - - if os.path.exists(icon_path): - try: - root.iconbitmap(icon_path) - except Exception: - pass - - app = EchoVRTextureViewer(root) - root.mainloop() - -if __name__ == '__main__': - main() - # Check if running as PyInstaller bundle (onefile) where resources are in _MEIPASS - if hasattr(sys, '_MEIPASS'): - bundled_icon = os.path.join(sys._MEIPASS, "icon.ico") - if os.path.exists(bundled_icon): - icon_path = bundled_icon - - if os.path.exists(icon_path): - try: - root.iconbitmap(icon_path) - except Exception: - pass - - app = EchoVRTextureViewer(root) - root.mainloop() - -if __name__ == '__main__': - main() \ No newline at end of file From 100d0a67329244c9554e8f2dc2942c060f3e6e77 Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Sat, 28 Feb 2026 17:27:24 +0000 Subject: [PATCH 05/11] --quick has been added for repacking # Changelog ## New Feature: Quick Repack (`--quick`) ### Purpose The `--quick` flag (Quick Repack) is designed to drastically reduce the time required to apply mods to Echo VR. Instead of rebuilding the entire package set (which can take several minutes and requires significant disk space), Quick Repack modifies the game files in-place by appending only the changed data. ### How it Works 1. **Manifest Backup & Restoration**: * On the first run, the tool creates a backup of the original manifest (`.bak`). * On subsequent runs, it **always loads the manifest from the backup**. This ensures that every repack operation starts from the clean, original game state. This prevents the game packages from growing indefinitely or creating an endless chain of new package files (e.g., `_3`, `_4`, `_5`...) when applying mods multiple times. 2. **Smart Package Management**: * The tool identifies the "safe" base packages (e.g., `_0`, `_1`, `_2`) that contain the original game data. * It targets the next available package index (e.g., `_3`) for writing modified files. * If a previous mod package exists (e.g., `_3`), the tool effectively overwrites/updates it by resetting the manifest state to the backup before appending new data. 3. **In-Place Modification**: * Modified files are compressed and appended to the end of the active package file. * The manifest is updated to point to these new locations for the modified files, while keeping references to original files unchanged. * This avoids the need to read, decompress, and re-write the gigabytes of unmodified game data. 4. **Safety Mechanisms**: * **Truncation**: If the manifest references package files that do not exist on disk (e.g., from a previous failed run or manual deletion), the tool automatically truncates the manifest to match the actual files present. * **Size Checks**: The tool calculates offsets based on the actual file size on disk to ensure data integrity. ### Usage ```bash evrtools -mode build -package -data "" -input "" -output "dummy" -quick -force --- cmd/evrtools/main.go | 9 + pkg/manifest/repack.go | 388 ++++++++++++++++++++++++++++++++++++++--- 2 files changed, 373 insertions(+), 24 deletions(-) diff --git a/cmd/evrtools/main.go b/cmd/evrtools/main.go index 6e49510..a6cf782 100644 --- a/cmd/evrtools/main.go +++ b/cmd/evrtools/main.go @@ -22,6 +22,7 @@ var ( forceOverwrite bool useDecimalName bool exportTypes string + quickMode bool ) func init() { @@ -34,6 +35,7 @@ func init() { flag.BoolVar(&forceOverwrite, "force", false, "Allow non-empty output directory") flag.BoolVar(&useDecimalName, "decimal-names", false, "Use decimal format for filenames (default is hex)") flag.StringVar(&exportTypes, "export", "", "Comma-separated list of types to export (textures, tints)") + flag.BoolVar(&quickMode, "quick", false, "Quick swap mode (modifies game files in-place)") } func main() { @@ -187,6 +189,13 @@ func runBuild() error { if dataDir != "" { manifestPath := filepath.Join(dataDir, "manifests", packageName) if _, err := os.Stat(manifestPath); err == nil { + if quickMode { + m, err := manifest.ReadFile(manifestPath) + if err != nil { + return fmt.Errorf("read manifest: %w", err) + } + return manifest.QuickRepack(m, files, dataDir, packageName) + } return runRepack(files) } } diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go index 9792994..25fffe3 100644 --- a/pkg/manifest/repack.go +++ b/pkg/manifest/repack.go @@ -39,47 +39,79 @@ type fcWrapper struct { } type packageWriter struct { - fileHandle *os.File - pkgIndex uint32 - outputDir string - pkgName string - created map[uint32]bool + fileHandle *os.File + pkgIndex uint32 + outputDir string + pkgName string + created map[uint32]bool + currentOffset int64 + minPkgIndex uint32 } func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize uint32) error { os.MkdirAll(fmt.Sprintf("%s/packages", pw.outputDir), 0777) cEntry := Frame{} - activePackageNum := uint32(0) if len(manifest.Frames) > 0 { cEntry = manifest.Frames[len(manifest.Frames)-1] - activePackageNum = cEntry.PackageIndex } + activePackageNum := cEntry.PackageIndex - if int64(cEntry.Offset)+int64(cEntry.CompressedSize)+int64(len(data)) > math.MaxInt32 { - activePackageNum++ + // Ensure we don't write to protected original packages + if activePackageNum < pw.minPkgIndex { + activePackageNum = pw.minPkgIndex + } + + // Ensure manifest knows about this package + if manifest.Header.PackageCount <= activePackageNum { manifest.Header.PackageCount = activePackageNum + 1 } - if pw.fileHandle == nil || pw.pkgIndex != activePackageNum { - if pw.fileHandle != nil { - pw.fileHandle.Close() + // Check if the current frame forces a rotation, BUT only if we are still in the same package. + // If we moved to a new package (activePackageNum > cEntry.PackageIndex), the offset of cEntry is irrelevant. + if activePackageNum == cEntry.PackageIndex { + if int64(cEntry.Offset)+int64(cEntry.CompressedSize)+int64(len(data)) > math.MaxInt32 { + activePackageNum++ + manifest.Header.PackageCount = activePackageNum + 1 } + } - currentPackagePath := fmt.Sprintf("%s/packages/%s_%d", pw.outputDir, pw.pkgName, activePackageNum) - flags := os.O_RDWR | os.O_CREATE | os.O_APPEND + // Open file and verify size constraints (handling existing files or rotation) + for { + if pw.fileHandle == nil || pw.pkgIndex != activePackageNum { + if pw.fileHandle != nil { + pw.fileHandle.Close() + } + + currentPackagePath := fmt.Sprintf("%s/packages/%s_%d", pw.outputDir, pw.pkgName, activePackageNum) + flags := os.O_RDWR | os.O_CREATE | os.O_APPEND - if !pw.created[activePackageNum] { - flags = os.O_RDWR | os.O_CREATE | os.O_TRUNC - pw.created[activePackageNum] = true + if !pw.created[activePackageNum] { + flags = os.O_RDWR | os.O_CREATE | os.O_TRUNC + pw.created[activePackageNum] = true + } + + f, err := os.OpenFile(currentPackagePath, flags, 0777) + if err != nil { + return err + } + pw.fileHandle = f + pw.pkgIndex = activePackageNum + + stat, err := pw.fileHandle.Stat() + if err != nil { + return fmt.Errorf("stat package file: %w", err) + } + pw.currentOffset = stat.Size() } - f, err := os.OpenFile(currentPackagePath, flags, 0777) - if err != nil { - return err + // Check if data fits in the current package + if pw.currentOffset+int64(len(data)) > math.MaxInt32 { + activePackageNum++ + manifest.Header.PackageCount = activePackageNum + 1 + continue // Retry with next package } - pw.fileHandle = f - pw.pkgIndex = activePackageNum + break // Fits } if _, err := pw.fileHandle.Write(data); err != nil { @@ -88,7 +120,7 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize newEntry := Frame{ PackageIndex: activePackageNum, - Offset: cEntry.Offset + cEntry.CompressedSize, + Offset: uint32(pw.currentOffset), CompressedSize: uint32(len(data)), Length: decompressedSize, } @@ -98,6 +130,7 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize manifest.Frames = append(manifest.Frames, newEntry) incrementSection(&manifest.Header.Frames, 1) + pw.currentOffset += int64(len(data)) return nil } @@ -262,7 +295,12 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, } compBuf := compPool.Get().([]byte) - encodedData, _ := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) + encodedData, err := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) + if err != nil { + res.err = fmt.Errorf("compress frame: %w", err) + ch <- res + return + } res.data = encodedData res.decompressedSize = uint32(constructionBuf.Len()) @@ -381,3 +419,305 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, return WriteFile(filepath.Join(manifestDir, packageName), &newManifest) } + +// QuickRepack modifies the existing package files in-place by appending new frames +// and updating the manifest. This avoids rewriting the entire package set. +func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageName string) error { + manifestPath := filepath.Join(dataDir, "manifests", packageName) + originalManifestPath := manifestPath + ".bak" + + // 1. Backup/Restore Logic: Ensure we have a clean original manifest + // Check for legacy backup first + if _, err := os.Stat(manifestPath + "_original"); err == nil { + if _, err := os.Stat(originalManifestPath); os.IsNotExist(err) { + os.Rename(manifestPath+"_original", originalManifestPath) + } + } + + if _, err := os.Stat(originalManifestPath); err == nil { + // Backup exists, load it as the source of truth + fmt.Println("Loading original manifest from backup...") + origM, err := ReadFile(originalManifestPath) + if err != nil { + return fmt.Errorf("failed to read backup manifest: %w", err) + } + *manifest = *origM + } else { + // No backup, create one from current (assumed original) + fmt.Println("Creating backup of original manifest...") + input, err := os.ReadFile(manifestPath) + if err == nil { + os.WriteFile(originalManifestPath, input, 0644) + } + } + + minSafePackageIndex := manifest.Header.PackageCount + + // 2. Open Package + pkgPath := filepath.Join(dataDir, "packages", packageName) + srcPkg, err := OpenPackage(manifest, pkgPath) + if err != nil { + return fmt.Errorf("failed to open source package: %w", err) + } + defer srcPkg.Close() + + fmt.Println("Starting Quick Swap (In-Place Modification)...") + + totalFiles := 0 + for _, chunk := range fileMap { + totalFiles += len(chunk) + } + + modifiedFilesLookupTable := make(map[[128]byte]ScannedFile, totalFiles) + frameContentsLookupTable := make(map[[128]byte]FrameContent, manifest.Header.FrameContents.ElementCount) + + for _, v := range manifest.FrameContents { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(v.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(v.FileSymbol)) + frameContentsLookupTable[buf] = v + } + + for _, fileGroup := range fileMap { + for _, v := range fileGroup { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(v.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(v.FileSymbol)) + + if _, ok := frameContentsLookupTable[buf]; ok { + modifiedFilesLookupTable[buf] = v + } + } + } + + fmt.Println("Checking for identical files...") + type checkItem struct { + key [128]byte + fc FrameContent + mod ScannedFile + } + var checks []checkItem + for key, modFile := range modifiedFilesLookupTable { + if fc, ok := frameContentsLookupTable[key]; ok { + checks = append(checks, checkItem{key, fc, modFile}) + } + } + + sort.Slice(checks, func(i, j int) bool { + if checks[i].fc.FrameIndex != checks[j].fc.FrameIndex { + return checks[i].fc.FrameIndex < checks[j].fc.FrameIndex + } + return checks[i].fc.DataOffset < checks[j].fc.DataOffset + }) + + skippedCount := 0 + for _, item := range checks { + newData, err := os.ReadFile(item.mod.Path) + if err != nil { + return fmt.Errorf("read input %s: %w", item.mod.Path, err) + } + + if uint32(len(newData)) == item.fc.Size { + oldData, err := srcPkg.ReadContent(&item.fc) + if err == nil && bytes.Equal(newData, oldData) { + delete(modifiedFilesLookupTable, item.key) + skippedCount++ + } + } + } + + if skippedCount > 0 { + fmt.Printf("Skipped %d identical files.\n", skippedCount) + } + + if len(modifiedFilesLookupTable) == 0 { + fmt.Println("No files changed. Nothing to repack.") + return nil + } + + affectedFrames := make(map[uint32]bool) + for key := range modifiedFilesLookupTable { + if fc, ok := frameContentsLookupTable[key]; ok { + affectedFrames[fc.FrameIndex] = true + } + } + fmt.Printf("Mapped %d files to modify across %d frames.\n", len(modifiedFilesLookupTable), len(affectedFrames)) + + contentsByFrame := make(map[uint32][]fcWrapper) + for k, v := range manifest.FrameContents { + if affectedFrames[v.FrameIndex] { + contentsByFrame[v.FrameIndex] = append(contentsByFrame[v.FrameIndex], fcWrapper{index: k, fc: v}) + } + } + + createdMap := make(map[uint32]bool) + for i := uint32(0); i < manifest.Header.PackageCount; i++ { + createdMap[i] = true + } + + writer := &packageWriter{ + outputDir: dataDir, + pkgName: packageName, + created: createdMap, + minPkgIndex: minSafePackageIndex, + } + defer writer.close() + + var framesToProcess []int + for idx := range affectedFrames { + framesToProcess = append(framesToProcess, int(idx)) + } + sort.Ints(framesToProcess) + + lookaheadSize := runtime.NumCPU() * 4 + futureResults := make(chan chan frameResult, lookaheadSize) + + go func() { + defer close(futureResults) + for _, idx := range framesToProcess { + resultChan := make(chan frameResult, 1) + futureResults <- resultChan + + go func(idx int, ch chan frameResult) { + v := manifest.Frames[idx] + res := frameResult{index: idx, isModified: true, decompressedSize: v.Length} + + rawReadBuf := readPool.Get().([]byte) + if cap(rawReadBuf) < int(v.CompressedSize) { + rawReadBuf = make([]byte, int(v.CompressedSize)) + } else { + rawReadBuf = rawReadBuf[:v.CompressedSize] + } + res.rawReadBuf = rawReadBuf + + if int(v.PackageIndex) >= len(srcPkg.files) { + res.err = fmt.Errorf("invalid package index %d", v.PackageIndex) + ch <- res + return + } + activeFile := srcPkg.files[v.PackageIndex] + + if v.CompressedSize > 0 { + if _, err := activeFile.ReadAt(rawReadBuf, int64(v.Offset)); err != nil { + res.err = err + ch <- res + return + } + } + + decompBuf := decompPool.Get().([]byte) + decompBytes, err := zstd.Decompress(decompBuf[:0], rawReadBuf) + if err != nil { + res.err = err + ch <- res + return + } + res.decompBuf = decompBytes + + bufObj := constructionPool.Get() + constructionBuf := bufObj.(*bytes.Buffer) + constructionBuf.Reset() + defer constructionPool.Put(bufObj) + + sorted := make([]fcWrapper, 0) + if contents, ok := contentsByFrame[uint32(idx)]; ok { + sorted = append(sorted, contents...) + } + sort.Slice(sorted, func(a, b int) bool { + return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset + }) + + for j := 0; j < len(sorted); j++ { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) + + if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { + modData, err := os.ReadFile(modFile.Path) + if err != nil { + res.err = err + ch <- res + return + } + constructionBuf.Write(modData) + } else { + start := sorted[j].fc.DataOffset + end := start + sorted[j].fc.Size + if end > uint32(len(decompBytes)) { + res.err = fmt.Errorf("frame content out of bounds") + ch <- res + return + } + constructionBuf.Write(decompBytes[start:end]) + } + } + + compBuf := compPool.Get().([]byte) + encodedData, _ := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) + res.data = encodedData + res.decompressedSize = uint32(constructionBuf.Len()) + + ch <- res + }(idx, resultChan) + } + }() + + fmt.Println("Writing modified frames...") + for resultCh := range futureResults { + res := <-resultCh + if res.err != nil { + return res.err + } + + newFrameIndex := len(manifest.Frames) + + sorted := make([]fcWrapper, 0) + if contents, ok := contentsByFrame[uint32(res.index)]; ok { + sorted = append(sorted, contents...) + } + sort.Slice(sorted, func(a, b int) bool { + return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset + }) + + currentOffset := uint32(0) + for j := 0; j < len(sorted); j++ { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) + + size := sorted[j].fc.Size + if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { + size = modFile.Size + } + + manifest.FrameContents[sorted[j].index] = FrameContent{ + TypeSymbol: sorted[j].fc.TypeSymbol, + FileSymbol: sorted[j].fc.FileSymbol, + FrameIndex: uint32(newFrameIndex), + DataOffset: currentOffset, + Size: size, + Alignment: sorted[j].fc.Alignment, + } + currentOffset += size + } + + if err := writer.write(manifest, res.data, res.decompressedSize); err != nil { + return err + } + + if res.rawReadBuf != nil { + readPool.Put(res.rawReadBuf) + } + if res.decompBuf != nil { + decompPool.Put(res.decompBuf) + } + if res.data != nil { + compPool.Put(res.data) + } + } + + writer.close() + + fmt.Printf("Updating manifest: %s\n", manifestPath) + return WriteFile(manifestPath, manifest) +} From 8796eebf610cddcc7a982dcdef3f62689ae980d7 Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Sat, 28 Feb 2026 17:53:35 +0000 Subject: [PATCH 06/11] fixed corrupted data block issue --- pkg/manifest/builder.go | 24 ++++++++++++++++++++---- 1 file changed, 20 insertions(+), 4 deletions(-) diff --git a/pkg/manifest/builder.go b/pkg/manifest/builder.go index b120aeb..7a0523d 100644 --- a/pkg/manifest/builder.go +++ b/pkg/manifest/builder.go @@ -17,6 +17,11 @@ const ( // MaxPackageSize is the maximum size of a single package file. MaxPackageSize = math.MaxInt32 + + // MaxFrameSize is the maximum size of a single uncompressed frame. + // This prevents frames from becoming too large when grouping files, + // which can cause memory issues or overflows during decompression. + MaxFrameSize = 1 * 1024 * 1024 ) // Builder constructs packages and manifests from a set of files. @@ -112,6 +117,17 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { return nil, fmt.Errorf("read file %x/%x: %w", file.TypeSymbol, file.FileSymbol, err) } + // Check if adding this file would exceed max frame size + // We only split if the frame is not empty to ensure we don't loop infinitely on large files + if currentFrame.Len() > 0 && currentFrame.Len()+len(data) > MaxFrameSize { + if err := b.writeFrame(manifest, ¤tFrame, frameIndex); err != nil { + return nil, err + } + frameIndex++ + currentFrame.Reset() + currentOffset = 0 + } + if !file.SkipManifest { b.addFileToManifest(manifest, file, frameIndex, currentOffset) } @@ -168,14 +184,14 @@ func (b *Builder) writeCompressedFrame(manifest *Manifest, compressed []byte, un packagePath := filepath.Join(b.outputDir, "packages", fmt.Sprintf("%s_%d", b.packageName, packageIndex)) // Check if we need a new package file + // We use os.Stat to get the actual file size to ensure the manifest offset is correct var offset uint32 - if len(manifest.Frames) > 0 { - lastFrame := manifest.Frames[len(manifest.Frames)-1] - offset = lastFrame.Offset + lastFrame.CompressedSize + if info, err := os.Stat(packagePath); err == nil { + offset = uint32(info.Size()) } maxSize := int64(MaxPackageSize) - if int64(offset) >= maxSize || int64(offset)+int64(len(compressed)) > maxSize { + if int64(offset)+int64(len(compressed)) > maxSize { manifest.Header.PackageCount++ packageIndex++ packagePath = filepath.Join(b.outputDir, "packages", fmt.Sprintf("%s_%d", b.packageName, packageIndex)) From 4f3038d4db6f0ea0fccaf1bc0bb6dcab3c80518e Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Sun, 8 Mar 2026 13:18:06 +0000 Subject: [PATCH 07/11] Fixed repack issue potentially # Updates 1. **`pkg/manifest/repack.go` (`QuickRepack`)**: Fixed out-of-bounds `manifest.Frames` truncation logic. Previously, `QuickRepack` incorrectly determined `lastUsedFrameIdx` by iterating over `FrameContents` and then truncated `manifest.Frames` to that specific index + 1. This accidentally dropped approximately 1,200 valid but unlinked frames from the back of the manifest structure. - Deleting the unlinked frames is what caused the manifest to shrink by ~20KB on repack. - Truncating these frames shifted the indices, causing the runtime engine to decompress the 0-byte package terminators where it expected valid game block allocations. Feeding zero bytes to ZStandard triggers the `Corrupted block detected` and `Unknown frame descriptor` errors in `czstdcompression.cpp`. - Replaced this behavior with a safe loop that checks and drops only the `CompressedData == 0` terminators starting from the end of the `manifest.Frames` slice. 2. **`pkg/manifest/repack.go` (`Repack`)**: Fixed out-of-bounds EOF skip behavior on `Len=0` pseudo frames. In the original `48037dc70b0ecab2` game manifest, there are exactly 3 out-of-bounds padding/dummy frames around index 10301 with length parameters set to zero but large offset and compressed size definitions. - When building a new package via `evrtools build` these 3 frames failed the data boundary checks in `ReadAt` and were therefore assigned `res.shouldSkip = true`. Dropping these frames deleted 48 bytes from the uncompressed manifest binary layout and re-aligned internal indexes contrary to the game's strict data design format. - Changed the behavior to intercept `v.Length == 0` blocks prior to discarding and pass them silently with empty definitions. This faithfully preserves the out-of-bounds indices so Echo VR processes the manifest identically. --- pkg/manifest/builder.go | 23 +++- pkg/manifest/manifest.go | 23 +++- pkg/manifest/package.go | 183 +++++++++++++++++++--------- pkg/manifest/repack.go | 254 ++++++++++++++++++++++++++++++++------- pkg/manifest/scanner.go | 11 +- 5 files changed, 376 insertions(+), 118 deletions(-) diff --git a/pkg/manifest/builder.go b/pkg/manifest/builder.go index 7a0523d..601df67 100644 --- a/pkg/manifest/builder.go +++ b/pkg/manifest/builder.go @@ -57,12 +57,15 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { PackageCount: 1, FrameContents: Section{ ElementSize: 32, + Unk2: 4294967296, }, Metadata: Section{ ElementSize: 40, + Unk2: 4294967296, }, Frames: Section{ ElementSize: 16, + Unk2: 4294967296, }, }, FrameContents: make([]FrameContent, 0, totalFiles), @@ -96,6 +99,7 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { currentOffset = 0 } + addedInGroup := 0 for _, file := range group { var data []byte var err error @@ -117,6 +121,14 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { return nil, fmt.Errorf("read file %x/%x: %w", file.TypeSymbol, file.FileSymbol, err) } + // Align file data within the frame (typically 8 or 16 bytes) + align := uint32(8) + padding := (align - (currentOffset % align)) % align + if padding > 0 { + currentFrame.Write(make([]byte, padding)) + currentOffset += padding + } + // Check if adding this file would exceed max frame size // We only split if the frame is not empty to ensure we don't loop infinitely on large files if currentFrame.Len() > 0 && currentFrame.Len()+len(data) > MaxFrameSize { @@ -129,15 +141,16 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { } if !file.SkipManifest { - b.addFileToManifest(manifest, file, frameIndex, currentOffset) + b.addFileToManifest(manifest, file, frameIndex, currentOffset, align) + addedInGroup++ } currentFrame.Write(data) currentOffset += uint32(len(data)) } - b.incrementSection(&manifest.Header.FrameContents, len(group)) - b.incrementSection(&manifest.Header.Metadata, len(group)) + b.incrementSection(&manifest.Header.FrameContents, addedInGroup) + b.incrementSection(&manifest.Header.Metadata, addedInGroup) } // Write final frame @@ -153,9 +166,7 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { return manifest, nil } -func (b *Builder) addFileToManifest(manifest *Manifest, file ScannedFile, frameIndex, offset uint32) { - alignment := uint32(1) - +func (b *Builder) addFileToManifest(manifest *Manifest, file ScannedFile, frameIndex, offset, alignment uint32) { manifest.FrameContents = append(manifest.FrameContents, FrameContent{ TypeSymbol: file.TypeSymbol, FileSymbol: file.FileSymbol, diff --git a/pkg/manifest/manifest.go b/pkg/manifest/manifest.go index 1f28443..7f6591e 100644 --- a/pkg/manifest/manifest.go +++ b/pkg/manifest/manifest.go @@ -11,15 +11,15 @@ import ( // Binary sizes for manifest structures const ( - HeaderSize = 192 // Fixed header size: + HeaderSize = 192 // Fixed header size: // 4 (PackageCount) + 4 (Unk1) + 8 (Unk2) // + SectionSize (FrameContents) + 16 bytes padding // + SectionSize (Metadata) + 16 bytes padding // + SectionSize (Frames) - SectionSize = 48 // 6 * 8 bytes (Section has 6 uint64 fields) - FrameContentSize = 32 // 8 + 8 + 4 + 4 + 4 + 4 bytes - FileMetadataSize = 40 // 5 * 8 bytes - FrameSize = 16 // 4 * 4 bytes + SectionSize = 48 // 6 * 8 bytes (Section has 6 uint64 fields) + FrameContentSize = 32 // 8 + 8 + 4 + 4 + 4 + 4 bytes + FileMetadataSize = 40 // 5 * 8 bytes + FrameSize = 16 // 4 * 4 bytes ) // Manifest represents a parsed EVR manifest file. @@ -269,6 +269,19 @@ func ReadFile(path string) (*Manifest, error) { // WriteFile writes a manifest to a file. func WriteFile(path string, m *Manifest) error { + // Synchronize header counts with actual slice lengths to prevent size discrepancies + m.Header.FrameContents.Count = uint64(len(m.FrameContents)) + m.Header.FrameContents.ElementCount = uint64(len(m.FrameContents)) + m.Header.FrameContents.Length = m.Header.FrameContents.ElementSize * m.Header.FrameContents.Count + + m.Header.Metadata.Count = uint64(len(m.Metadata)) + m.Header.Metadata.ElementCount = uint64(len(m.Metadata)) + m.Header.Metadata.Length = m.Header.Metadata.ElementSize * m.Header.Metadata.Count + + m.Header.Frames.Count = uint64(len(m.Frames)) + m.Header.Frames.ElementCount = uint64(len(m.Frames)) + m.Header.Frames.Length = m.Header.Frames.ElementSize * m.Header.Frames.Count + data, err := m.MarshalBinary() if err != nil { return fmt.Errorf("marshal manifest: %w", err) diff --git a/pkg/manifest/package.go b/pkg/manifest/package.go index e11feb3..64cb448 100644 --- a/pkg/manifest/package.go +++ b/pkg/manifest/package.go @@ -5,7 +5,9 @@ import ( "io" "os" "path/filepath" + "runtime" "strconv" + "sync" "github.com/DataDog/zstd" ) @@ -163,79 +165,140 @@ func (p *Package) Extract(outputDir string, opts ...ExtractOption) error { frameIndex[fc.FrameIndex] = append(frameIndex[fc.FrameIndex], fc) } - ctx := zstd.NewCtx() - compressed := make([]byte, 32*1024*1024) - decompressed := make([]byte, 32*1024*1024) - // Pre-create directory cache to avoid repeated MkdirAll calls + var dirMu sync.Mutex createdDirs := make(map[string]struct{}) - for frameIdx, frame := range p.manifest.Frames { - if frame.Length == 0 || frame.CompressedSize == 0 { - continue - } - - // Ensure buffers are large enough - if int(frame.CompressedSize) > len(compressed) { - compressed = make([]byte, frame.CompressedSize) - } - if int(frame.Length) > len(decompressed) { - decompressed = make([]byte, frame.Length) - } - - // Read compressed data - file := p.files[frame.PackageIndex] - if _, err := file.Seek(int64(frame.Offset), io.SeekStart); err != nil { - return fmt.Errorf("seek frame %d: %w", frameIdx, err) - } + // Worker pool for parallel extraction + numWorkers := runtime.NumCPU() + type job struct { + index int + frame Frame + } + jobs := make(chan job, numWorkers) + errs := make(chan error, 1) + var wg sync.WaitGroup + + for i := 0; i < numWorkers; i++ { + wg.Add(1) + go func() { + defer wg.Done() + + // Thread-local buffers and context + ctx := zstd.NewCtx() + var compressed []byte + var decompressed []byte + + for j := range jobs { + frame := j.frame + frameIdx := j.index + + // Ensure buffers are large enough + if int(frame.CompressedSize) > cap(compressed) { + compressed = make([]byte, frame.CompressedSize) + } + compressed = compressed[:frame.CompressedSize] - if _, err := io.ReadFull(file, compressed[:frame.CompressedSize]); err != nil { - return fmt.Errorf("read frame %d: %w", frameIdx, err) - } + if int(frame.Length) > cap(decompressed) { + decompressed = make([]byte, frame.Length) + } + decompressed = decompressed[:frame.Length] + + // Read compressed data using ReadAt (thread-safe) + file := p.files[frame.PackageIndex] + if _, err := file.ReadAt(compressed, int64(frame.Offset)); err != nil { + select { + case errs <- fmt.Errorf("read frame %d: %w", frameIdx, err): + default: + } + return + } - // Decompress - if _, err := ctx.Decompress(decompressed[:frame.Length], compressed[:frame.CompressedSize]); err != nil { - return fmt.Errorf("decompress frame %d: %w", frameIdx, err) - } + // Decompress + var err error + decompressed, err = ctx.Decompress(decompressed[:0], compressed) + if err != nil { + select { + case errs <- fmt.Errorf("decompress frame %d: %w", frameIdx, err): + default: + } + return + } - // Extract files from this frame using pre-built index - contents := frameIndex[uint32(frameIdx)] - for _, fc := range contents { - if len(cfg.allowedTypes) > 0 && !cfg.allowedTypes[fc.TypeSymbol] { - continue + // Extract files from this frame + contents := frameIndex[uint32(frameIdx)] + for _, fc := range contents { + if len(cfg.allowedTypes) > 0 && !cfg.allowedTypes[fc.TypeSymbol] { + continue + } + + var fileName string + if cfg.decimalNames { + fileName = strconv.FormatInt(fc.FileSymbol, 10) + } else { + fileName = strconv.FormatUint(uint64(fc.FileSymbol), 16) + } + fileType := strconv.FormatUint(uint64(fc.TypeSymbol), 16) + + var basePath string + if cfg.preserveGroups { + basePath = filepath.Join(outputDir, strconv.FormatUint(uint64(fc.FrameIndex), 10), fileType) + } else { + basePath = filepath.Join(outputDir, fileType) + } + + // Thread-safe directory creation + dirMu.Lock() + if _, exists := createdDirs[basePath]; !exists { + if err := os.MkdirAll(basePath, 0755); err != nil { + dirMu.Unlock() + select { + case errs <- fmt.Errorf("create dir %s: %w", basePath, err): + default: + } + return + } + createdDirs[basePath] = struct{}{} + } + dirMu.Unlock() + + filePath := filepath.Join(basePath, fileName) + if err := os.WriteFile(filePath, decompressed[fc.DataOffset:fc.DataOffset+fc.Size], 0644); err != nil { + select { + case errs <- fmt.Errorf("write file %s: %w", filePath, err): + default: + } + return + } + } } + }() + } - var fileName string - if cfg.decimalNames { - fileName = strconv.FormatInt(fc.FileSymbol, 10) - } else { - fileName = strconv.FormatUint(uint64(fc.FileSymbol), 16) + // Feed jobs + go func() { + for frameIdx, frame := range p.manifest.Frames { + if frame.Length == 0 || frame.CompressedSize == 0 { + continue } - fileType := strconv.FormatUint(uint64(fc.TypeSymbol), 16) - - var basePath string - if cfg.preserveGroups { - basePath = filepath.Join(outputDir, strconv.FormatUint(uint64(fc.FrameIndex), 10), fileType) - } else { - basePath = filepath.Join(outputDir, fileType) + select { + case jobs <- job{frameIdx, frame}: + case <-errs: + close(jobs) + return } + } + close(jobs) + }() - // Only create directory if not already created - if _, exists := createdDirs[basePath]; !exists { - if err := os.MkdirAll(basePath, 0755); err != nil { - return fmt.Errorf("create dir %s: %w", basePath, err) - } - createdDirs[basePath] = struct{}{} - } + wg.Wait() - filePath := filepath.Join(basePath, fileName) - if err := os.WriteFile(filePath, decompressed[fc.DataOffset:fc.DataOffset+fc.Size], 0644); err != nil { - return fmt.Errorf("write file %s: %w", filePath, err) - } - } + select { + case err := <-errs: + return err + default: + return nil } - - return nil } // extractConfig holds extraction options. diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go index 25fffe3..e36f956 100644 --- a/pkg/manifest/repack.go +++ b/pkg/manifest/repack.go @@ -22,6 +22,8 @@ var ( constructionPool = sync.Pool{New: func() interface{} { return bytes.NewBuffer(make([]byte, 0, 4*1024*1024)) }} ) +const MaxRepackFrameSize = 1 * 1024 * 1024 + type frameResult struct { index int data []byte @@ -49,7 +51,7 @@ type packageWriter struct { } func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize uint32) error { - os.MkdirAll(fmt.Sprintf("%s/packages", pw.outputDir), 0777) + os.MkdirAll(fmt.Sprintf("%s/packages", pw.outputDir), 0755) cEntry := Frame{} if len(manifest.Frames) > 0 { @@ -91,7 +93,7 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize pw.created[activePackageNum] = true } - f, err := os.OpenFile(currentPackagePath, flags, 0777) + f, err := os.OpenFile(currentPackagePath, flags, 0644) if err != nil { return err } @@ -124,9 +126,6 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize CompressedSize: uint32(len(data)), Length: decompressedSize, } - if int64(newEntry.Offset)+int64(newEntry.CompressedSize) > math.MaxInt32 { - newEntry.Offset = 0 - } manifest.Frames = append(manifest.Frames, newEntry) incrementSection(&manifest.Header.Frames, 1) @@ -159,6 +158,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, modifiedFilesLookupTable := make(map[[128]byte]ScannedFile, totalFiles) frameContentsLookupTable := make(map[[128]byte]FrameContent, manifest.Header.FrameContents.ElementCount) modifiedFrames := make(map[uint32]bool) + newFiles := make([]ScannedFile, 0) for _, v := range manifest.FrameContents { buf := [128]byte{} @@ -176,6 +176,8 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, if content, ok := frameContentsLookupTable[buf]; ok { modifiedFrames[content.FrameIndex] = true modifiedFilesLookupTable[buf] = v + } else { + newFiles = append(newFiles, v) } } } @@ -236,7 +238,8 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, if v.CompressedSize > 0 { if _, err := activeFile.ReadAt(rawReadBuf, int64(v.Offset)); err != nil { if v.Length == 0 { - res.shouldSkip = true + // For out-of-bounds dummy frames with Len:0, preserve them without skipping to match exact engine structure. + res.data = []byte{} // Pass empty data ch <- res return } @@ -274,7 +277,18 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset }) + currentOffset := uint32(0) for j := 0; j < len(sorted); j++ { + align := sorted[j].fc.Alignment + if align == 0 { + align = 8 + } + padding := (align - (currentOffset % align)) % align + if padding > 0 { + constructionBuf.Write(make([]byte, padding)) + currentOffset += padding + } + buf := [128]byte{} binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) @@ -287,10 +301,12 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, return } constructionBuf.Write(modData) + currentOffset += uint32(len(modData)) } else { start := sorted[j].fc.DataOffset end := start + sorted[j].fc.Size constructionBuf.Write(decompBytes[start:end]) + currentOffset += sorted[j].fc.Size } } @@ -329,34 +345,42 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, continue } - if res.isModified { - sorted := make([]fcWrapper, 0) - if contents, ok := contentsByFrame[uint32(res.index)]; ok { - sorted = append(sorted, contents...) - } + newFrameIdx := uint32(len(newManifest.Frames)) + + // Update all contents belonging to this frame (modified or not) to account for shifts + if contents, ok := contentsByFrame[uint32(res.index)]; ok { + sorted := make([]fcWrapper, len(contents)) + copy(sorted, contents) sort.Slice(sorted, func(a, b int) bool { return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset }) currentOffset := uint32(0) for j := 0; j < len(sorted); j++ { - buf := [128]byte{} - binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) - binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) + fc := &newManifest.FrameContents[sorted[j].index] - size := sorted[j].fc.Size - if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { - size = modFile.Size + align := fc.Alignment + if align == 0 { + align = 8 } + padding := (align - (currentOffset % align)) % align + currentOffset += padding - newManifest.FrameContents[sorted[j].index] = FrameContent{ - TypeSymbol: sorted[j].fc.TypeSymbol, - FileSymbol: sorted[j].fc.FileSymbol, - FrameIndex: sorted[j].fc.FrameIndex, - DataOffset: currentOffset, - Size: size, - Alignment: sorted[j].fc.Alignment, + size := fc.Size + if res.isModified { + buf := [128]byte{} + binary.LittleEndian.PutUint64(buf[0:64], uint64(fc.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(fc.FileSymbol)) + if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { + size = modFile.Size + } } + + fc.FrameIndex = newFrameIdx + fc.DataOffset = currentOffset + fc.Size = size + fc.Alignment = align + currentOffset += size } } @@ -382,17 +406,110 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, } } - writer.close() + // Update Header PackageCount to match what was actually written + if len(newManifest.Frames) > 0 { + newManifest.Header.PackageCount = newManifest.Frames[len(newManifest.Frames)-1].PackageIndex + 1 + } - actualPkgCount := uint32(0) - for { - path := fmt.Sprintf("%s/packages/%s_%d", outputDir, packageName, actualPkgCount) - if _, err := os.Stat(path); err != nil { - break + // Process new files (append to end of package) + if len(newFiles) > 0 { + fmt.Printf("Adding %d new files...\n", len(newFiles)) + + sort.Slice(newFiles, func(i, j int) bool { + if newFiles[i].TypeSymbol != newFiles[j].TypeSymbol { + return newFiles[i].TypeSymbol < newFiles[j].TypeSymbol + } + return newFiles[i].FileSymbol < newFiles[j].FileSymbol + }) + + var currentFrame bytes.Buffer + var currentFrameFiles []ScannedFile + + flushFrame := func() error { + if currentFrame.Len() == 0 { + return nil + } + + compBuf := compPool.Get().([]byte) + encodedData, err := zstd.CompressLevel(compBuf[:0], currentFrame.Bytes(), zstd.BestSpeed) + if err != nil { + return err + } + + if err := writer.write(&newManifest, encodedData, uint32(currentFrame.Len())); err != nil { + return err + } + + frameIdx := uint32(len(newManifest.Frames) - 1) + currentOffset := uint32(0) + + for _, file := range currentFrameFiles { + align := uint32(8) + padding := (align - (currentOffset % align)) % align + currentOffset += padding + + newManifest.FrameContents = append(newManifest.FrameContents, FrameContent{ + TypeSymbol: file.TypeSymbol, + FileSymbol: file.FileSymbol, + FrameIndex: frameIdx, + DataOffset: currentOffset, + Size: file.Size, + Alignment: align, + }) + + newManifest.Metadata = append(newManifest.Metadata, FileMetadata{ + TypeSymbol: file.TypeSymbol, + FileSymbol: file.FileSymbol, + }) + + currentOffset += file.Size + } + + // Align file data within the frame + align := uint32(8) + padding := (align - (uint32(currentFrame.Len()) % align)) % align + if padding > 0 { + currentFrame.Write(make([]byte, padding)) + } + + compPool.Put(encodedData) + currentFrame.Reset() + currentFrameFiles = nil + return nil } - actualPkgCount++ + + for _, file := range newFiles { + data, err := os.ReadFile(file.Path) + if err != nil { + return fmt.Errorf("read new file %s: %w", file.Path, err) + } + + align := uint32(8) + padding := (align - (uint32(currentFrame.Len()) % align)) % align + + if currentFrame.Len() > 0 && currentFrame.Len()+int(padding)+len(data) > MaxRepackFrameSize { + if err := flushFrame(); err != nil { + return err + } + padding = 0 + } + + if padding > 0 { + currentFrame.Write(make([]byte, padding)) + } + + currentFrame.Write(data) + currentFrameFiles = append(currentFrameFiles, file) + } + if err := flushFrame(); err != nil { + return err + } + + incrementSection(&newManifest.Header.FrameContents, len(newFiles)) + incrementSection(&newManifest.Header.Metadata, len(newFiles)) } - newManifest.Header.PackageCount = actualPkgCount + + writer.close() for i := uint32(0); i < newManifest.Header.PackageCount; i++ { path := fmt.Sprintf("%s/packages/%s_%d", outputDir, packageName, i) @@ -543,6 +660,17 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa } fmt.Printf("Mapped %d files to modify across %d frames.\n", len(modifiedFilesLookupTable), len(affectedFrames)) + // Truncate Frames to remove old terminators and null frames from the end of the package before appending new ones + for len(manifest.Frames) > 0 { + lastIdx := len(manifest.Frames) - 1 + f := manifest.Frames[lastIdx] + if f.CompressedSize == 0 && f.Length == 0 { + manifest.Frames = manifest.Frames[:lastIdx] + } else { + break + } + } + contentsByFrame := make(map[uint32][]fcWrapper) for k, v := range manifest.FrameContents { if affectedFrames[v.FrameIndex] { @@ -628,6 +756,15 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa }) for j := 0; j < len(sorted); j++ { + align := sorted[j].fc.Alignment + if align == 0 { + align = 8 + } + padding := (align - (uint32(constructionBuf.Len()) % align)) % align + if padding > 0 { + constructionBuf.Write(make([]byte, padding)) + } + buf := [128]byte{} binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) @@ -653,7 +790,12 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa } compBuf := compPool.Get().([]byte) - encodedData, _ := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) + encodedData, err := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) + if err != nil { + res.err = err + ch <- res + return + } res.data = encodedData res.decompressedSize = uint32(constructionBuf.Len()) @@ -681,23 +823,29 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa currentOffset := uint32(0) for j := 0; j < len(sorted); j++ { + fc := &manifest.FrameContents[sorted[j].index] + + align := fc.Alignment + if align == 0 { + align = 8 + } + padding := (align - (currentOffset % align)) % align + currentOffset += padding + buf := [128]byte{} - binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) - binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) + binary.LittleEndian.PutUint64(buf[0:64], uint64(fc.TypeSymbol)) + binary.LittleEndian.PutUint64(buf[64:128], uint64(fc.FileSymbol)) - size := sorted[j].fc.Size + size := fc.Size if modFile, exists := modifiedFilesLookupTable[buf]; exists && modFile.FileSymbol != 0 { size = modFile.Size } - manifest.FrameContents[sorted[j].index] = FrameContent{ - TypeSymbol: sorted[j].fc.TypeSymbol, - FileSymbol: sorted[j].fc.FileSymbol, - FrameIndex: uint32(newFrameIndex), - DataOffset: currentOffset, - Size: size, - Alignment: sorted[j].fc.Alignment, - } + fc.FrameIndex = uint32(newFrameIndex) + fc.DataOffset = currentOffset + fc.Size = size + fc.Alignment = align + currentOffset += size } @@ -718,6 +866,24 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa writer.close() + // Re-add terminators for all packages + for i := uint32(0); i < manifest.Header.PackageCount; i++ { + path := filepath.Join(dataDir, "packages", fmt.Sprintf("%s_%d", packageName, i)) + stats, err := os.Stat(path) + if err != nil { + continue + } + manifest.Frames = append(manifest.Frames, Frame{ + PackageIndex: i, + Offset: uint32(stats.Size()), + }) + incrementSection(&manifest.Header.Frames, 1) + } + + // Final null frame + manifest.Frames = append(manifest.Frames, Frame{}) + incrementSection(&manifest.Header.Frames, 1) + fmt.Printf("Updating manifest: %s\n", manifestPath) return WriteFile(manifestPath, manifest) } diff --git a/pkg/manifest/scanner.go b/pkg/manifest/scanner.go index 1618644..1f2592a 100644 --- a/pkg/manifest/scanner.go +++ b/pkg/manifest/scanner.go @@ -2,7 +2,7 @@ package manifest import ( "fmt" - "os" + "io/fs" "path/filepath" "strconv" "strings" @@ -26,11 +26,11 @@ type ScannedFile struct { func ScanFiles(inputDir string) ([][]ScannedFile, error) { var files [][]ScannedFile - err := filepath.Walk(inputDir, func(path string, info os.FileInfo, err error) error { + err := filepath.WalkDir(inputDir, func(path string, d fs.DirEntry, err error) error { if err != nil { return err } - if info.IsDir() { + if d.IsDir() { return nil } @@ -80,6 +80,11 @@ func ScanFiles(inputDir string) ([][]ScannedFile, error) { return nil } + info, err := d.Info() + if err != nil { + return fmt.Errorf("get file info %s: %w", path, err) + } + size := info.Size() const maxUint32 = int64(^uint32(0)) if size < 0 || size > maxUint32 { From 15abc8c1e77b11f4cb548d7e449c4155f48eb773 Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Sun, 22 Mar 2026 18:33:12 +0000 Subject: [PATCH 08/11] Fixed issue with offset being off from 1 to 8 Manifest offset was set to 1 by default but packages set to 8, no clue how i messed up this bad. --- pkg/archive/archive_test.go | 29 ++++++++++++++++++++++ pkg/archive/header.go | 15 +++++++++--- pkg/manifest/builder.go | 2 +- pkg/manifest/repack.go | 49 ++++++++++++++++++++++++++++--------- 4 files changed, 79 insertions(+), 16 deletions(-) diff --git a/pkg/archive/archive_test.go b/pkg/archive/archive_test.go index 3a68de6..35210f2 100644 --- a/pkg/archive/archive_test.go +++ b/pkg/archive/archive_test.go @@ -2,6 +2,7 @@ package archive import ( "bytes" + "encoding/binary" "testing" ) @@ -52,6 +53,34 @@ func TestHeader(t *testing.T) { t.Error("expected error for zero length") } }) + + t.Run("HeaderLength24", func(t *testing.T) { + h := &Header{ + Magic: Magic, + HeaderLength: 24, + Length: 1024, + CompressedLength: 512, + } + if err := h.Validate(); err != nil { + t.Errorf("unexpected error for header length 24: %v", err) + } + + // Test UnmarshalBinary with 24-byte header data (Total 32 bytes) + data := make([]byte, 32) + copy(data[0:4], Magic[:]) + binary.LittleEndian.PutUint32(data[4:8], 24) + binary.LittleEndian.PutUint64(data[8:16], 1024) + binary.LittleEndian.PutUint64(data[16:24], 512) + + decoded := &Header{} + if err := decoded.UnmarshalBinary(data); err != nil { + t.Fatalf("unmarshal header length 24: %v", err) + } + + if decoded.HeaderLength != 24 { + t.Errorf("expected header length 24, got %d", decoded.HeaderLength) + } + }) } func TestReadWrite(t *testing.T) { diff --git a/pkg/archive/header.go b/pkg/archive/header.go index 721fd82..f761ccd 100644 --- a/pkg/archive/header.go +++ b/pkg/archive/header.go @@ -30,8 +30,8 @@ func (h *Header) Validate() error { if h.Magic != Magic { return fmt.Errorf("invalid magic: expected %x, got %x", Magic, h.Magic) } - if h.HeaderLength != 16 { - return fmt.Errorf("invalid header length: expected 16, got %d", h.HeaderLength) + if h.HeaderLength != 16 && h.HeaderLength != 24 { + return fmt.Errorf("invalid header length: expected 16 or 24, got %d", h.HeaderLength) } if h.Length == 0 { return fmt.Errorf("uncompressed size is zero") @@ -62,9 +62,16 @@ func (h *Header) EncodeTo(buf []byte) { // UnmarshalBinary decodes the header from binary format. // Uses direct decoding to avoid allocations. func (h *Header) UnmarshalBinary(data []byte) error { - if len(data) < HeaderSize { - return fmt.Errorf("header data too short: need %d, got %d", HeaderSize, len(data)) + if len(data) < 8 { + return fmt.Errorf("header data too short: need 8 for length, got %d", len(data)) } + h.HeaderLength = binary.LittleEndian.Uint32(data[4:8]) + + requiredSize := 8 + int(h.HeaderLength) + if len(data) < requiredSize { + return fmt.Errorf("header data too short for HeaderLength %d: need %d, got %d", h.HeaderLength, requiredSize, len(data)) + } + h.DecodeFrom(data) return h.Validate() } diff --git a/pkg/manifest/builder.go b/pkg/manifest/builder.go index 601df67..ab2b9e6 100644 --- a/pkg/manifest/builder.go +++ b/pkg/manifest/builder.go @@ -122,7 +122,7 @@ func (b *Builder) Build(fileGroups [][]ScannedFile) (*Manifest, error) { } // Align file data within the frame (typically 8 or 16 bytes) - align := uint32(8) + align := uint32(1) padding := (align - (currentOffset % align)) % align if padding > 0 { currentFrame.Write(make([]byte, padding)) diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go index e36f956..c655c68 100644 --- a/pkg/manifest/repack.go +++ b/pkg/manifest/repack.go @@ -4,6 +4,7 @@ import ( "bytes" "encoding/binary" "fmt" + "io" "math" "os" "path/filepath" @@ -86,10 +87,10 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize } currentPackagePath := fmt.Sprintf("%s/packages/%s_%d", pw.outputDir, pw.pkgName, activePackageNum) - flags := os.O_RDWR | os.O_CREATE | os.O_APPEND + flags := os.O_RDWR | os.O_CREATE if !pw.created[activePackageNum] { - flags = os.O_RDWR | os.O_CREATE | os.O_TRUNC + flags |= os.O_TRUNC pw.created[activePackageNum] = true } @@ -100,11 +101,22 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize pw.fileHandle = f pw.pkgIndex = activePackageNum - stat, err := pw.fileHandle.Stat() + // Get the actual size/offset + size, err := f.Seek(0, io.SeekEnd) if err != nil { - return fmt.Errorf("stat package file: %w", err) + return fmt.Errorf("seek to end of package: %w", err) } - pw.currentOffset = stat.Size() + + // Add 1-byte alignment padding (effectively no padding) + if size > 0 && size%1 != 0 { + padding := 1 - (size % 1) + if _, err := f.Write(make([]byte, padding)); err != nil { + return fmt.Errorf("pad package for alignment: %w", err) + } + size += padding + } + + pw.currentOffset = size } // Check if data fits in the current package @@ -281,7 +293,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, for j := 0; j < len(sorted); j++ { align := sorted[j].fc.Alignment if align == 0 { - align = 8 + align = 1 } padding := (align - (currentOffset % align)) % align if padding > 0 { @@ -361,7 +373,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, align := fc.Alignment if align == 0 { - align = 8 + align = 1 } padding := (align - (currentOffset % align)) % align currentOffset += padding @@ -444,7 +456,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, currentOffset := uint32(0) for _, file := range currentFrameFiles { - align := uint32(8) + align := uint32(1) padding := (align - (currentOffset % align)) % align currentOffset += padding @@ -758,7 +770,7 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa for j := 0; j < len(sorted); j++ { align := sorted[j].fc.Alignment if align == 0 { - align = 8 + align = 1 } padding := (align - (uint32(constructionBuf.Len()) % align)) % align if padding > 0 { @@ -827,7 +839,7 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa align := fc.Alignment if align == 0 { - align = 8 + align = 1 } padding := (align - (currentOffset % align)) % align currentOffset += padding @@ -866,13 +878,28 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa writer.close() - // Re-add terminators for all packages + // Re-add terminators for all packages at their current positions for i := uint32(0); i < manifest.Header.PackageCount; i++ { path := filepath.Join(dataDir, "packages", fmt.Sprintf("%s_%d", packageName, i)) stats, err := os.Stat(path) if err != nil { continue } + // The RAD engine expects the terminator frame Offset to be exactly at the file end. + // Align the file end to 1 byte first for consistency if we modified this package. + if stats.Size()%1 != 0 { + f, err := os.OpenFile(path, os.O_RDWR, 0644) + if err == nil { + size, _ := f.Seek(0, io.SeekEnd) + if size%1 != 0 { + padding := 1 - (size % 1) + f.Write(make([]byte, int(padding))) + } + f.Close() + stats, _ = os.Stat(path) // Re-stat for new size + } + } + manifest.Frames = append(manifest.Frames, Frame{ PackageIndex: i, Offset: uint32(stats.Size()), From 1de1469e55089cc8064cd57e1218f2ad873de2e9 Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Thu, 26 Mar 2026 01:00:54 +0000 Subject: [PATCH 09/11] Removed terminator frames --- pkg/manifest/repack.go | 57 ++++++++++++++++-------------------------- 1 file changed, 22 insertions(+), 35 deletions(-) diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go index c655c68..cda394a 100644 --- a/pkg/manifest/repack.go +++ b/pkg/manifest/repack.go @@ -580,9 +580,26 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa } } - minSafePackageIndex := manifest.Header.PackageCount + // 2. Find original package sizes from terminator frames and truncate + // packages back to their original sizes (removes data from previous QuickRepack runs). + originalPkgSizes := make(map[uint32]int64) + for _, f := range manifest.Frames { + if f.CompressedSize == 0 && f.Length == 0 && f.Offset > 0 { + originalPkgSizes[f.PackageIndex] = int64(f.Offset) + } + } - // 2. Open Package + for pkgIdx, origSize := range originalPkgSizes { + pkgFilePath := filepath.Join(dataDir, "packages", fmt.Sprintf("%s_%d", packageName, pkgIdx)) + if info, err := os.Stat(pkgFilePath); err == nil && info.Size() > origSize { + fmt.Printf("Truncating package %d to original size %d (was %d)\n", pkgIdx, origSize, info.Size()) + if err := os.Truncate(pkgFilePath, origSize); err != nil { + return fmt.Errorf("truncate package %d: %w", pkgIdx, err) + } + } + } + + // 3. Open Package pkgPath := filepath.Join(dataDir, "packages", packageName) srcPkg, err := OpenPackage(manifest, pkgPath) if err != nil { @@ -690,6 +707,9 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa } } + // Mark all original packages as already created (don't truncate them on open). + // New frames go into new package files to avoid modifying originals. + minSafePackageIndex := manifest.Header.PackageCount createdMap := make(map[uint32]bool) for i := uint32(0); i < manifest.Header.PackageCount; i++ { createdMap[i] = true @@ -878,39 +898,6 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa writer.close() - // Re-add terminators for all packages at their current positions - for i := uint32(0); i < manifest.Header.PackageCount; i++ { - path := filepath.Join(dataDir, "packages", fmt.Sprintf("%s_%d", packageName, i)) - stats, err := os.Stat(path) - if err != nil { - continue - } - // The RAD engine expects the terminator frame Offset to be exactly at the file end. - // Align the file end to 1 byte first for consistency if we modified this package. - if stats.Size()%1 != 0 { - f, err := os.OpenFile(path, os.O_RDWR, 0644) - if err == nil { - size, _ := f.Seek(0, io.SeekEnd) - if size%1 != 0 { - padding := 1 - (size % 1) - f.Write(make([]byte, int(padding))) - } - f.Close() - stats, _ = os.Stat(path) // Re-stat for new size - } - } - - manifest.Frames = append(manifest.Frames, Frame{ - PackageIndex: i, - Offset: uint32(stats.Size()), - }) - incrementSection(&manifest.Header.Frames, 1) - } - - // Final null frame - manifest.Frames = append(manifest.Frames, Frame{}) - incrementSection(&manifest.Header.Frames, 1) - fmt.Printf("Updating manifest: %s\n", manifestPath) return WriteFile(manifestPath, manifest) } From f5705048be7f099597e78d4c68174e96e7fb10ab Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Sat, 28 Mar 2026 00:06:04 +0000 Subject: [PATCH 10/11] Fixed issue with padding --- pkg/manifest/repack.go | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go index cda394a..f1018aa 100644 --- a/pkg/manifest/repack.go +++ b/pkg/manifest/repack.go @@ -788,10 +788,10 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa }) for j := 0; j < len(sorted); j++ { - align := sorted[j].fc.Alignment - if align == 0 { - align = 1 - } + // Original game engine completely ignores the FrameContent.Alignment + // property when packing frames (tightly packs all files with 0 padding). + align := uint32(1) + padding := (align - (uint32(constructionBuf.Len()) % align)) % align if padding > 0 { constructionBuf.Write(make([]byte, padding)) @@ -857,10 +857,9 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa for j := 0; j < len(sorted); j++ { fc := &manifest.FrameContents[sorted[j].index] - align := fc.Alignment - if align == 0 { - align = 1 - } + // Original game tightly packs frames unconditionally, ignoring fc.Alignment. + align := uint32(1) + padding := (align - (currentOffset % align)) % align currentOffset += padding @@ -876,7 +875,8 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa fc.FrameIndex = uint32(newFrameIndex) fc.DataOffset = currentOffset fc.Size = size - fc.Alignment = align + // Retain original alignment metadata for memory allocation + fc.Alignment = sorted[j].fc.Alignment currentOffset += size } From cbe12911034394cf1ed60b4dacf53c758dff4587 Mon Sep 17 00:00:00 2001 From: he_is_the_cat <125207670+heisthecat31@users.noreply.github.com> Date: Fri, 3 Apr 2026 01:29:25 +0100 Subject: [PATCH 11/11] Repack ensures framegroup name is preserved --- cmd/evrtools/main.go | 8 +- go.mod | 2 +- pkg/archive/benchmark_test.go | 188 ---------------------------------- pkg/archive/reader.go | 66 ++++++++++-- pkg/archive/writer.go | 17 ++- pkg/manifest/builder.go | 4 +- pkg/manifest/manifest.go | 7 +- pkg/manifest/package.go | 6 +- pkg/manifest/repack.go | 180 +++++++++++++++++++------------- 9 files changed, 192 insertions(+), 286 deletions(-) delete mode 100644 pkg/archive/benchmark_test.go diff --git a/cmd/evrtools/main.go b/cmd/evrtools/main.go index a6cf782..5e67fef 100644 --- a/cmd/evrtools/main.go +++ b/cmd/evrtools/main.go @@ -34,8 +34,8 @@ func init() { flag.BoolVar(&preserveGroups, "preserve-groups", false, "Preserve frame grouping in output") flag.BoolVar(&forceOverwrite, "force", false, "Allow non-empty output directory") flag.BoolVar(&useDecimalName, "decimal-names", false, "Use decimal format for filenames (default is hex)") - flag.StringVar(&exportTypes, "export", "", "Comma-separated list of types to export (textures, tints)") - flag.BoolVar(&quickMode, "quick", false, "Quick swap mode (modifies game files in-place)") + flag.StringVar(&exportTypes, "export", "", "Comma-separated list of types to export (textures, tints, audio)") + flag.BoolVar(&quickMode, "quick", false, "Quick swap mode (appends new package files, updates manifest in-place)") } func main() { @@ -160,6 +160,10 @@ func runExtract() error { int64(uint64(0x24CBFD54E9A7F2EA)), // Folder: 24cbfd54e9a7f2ea int64(uint64(0x32f30fe361939dee)), // 3671295590506143214 ) + case "audio": + filterTypes = append(filterTypes, + int64(uint64(0x6d358eef7bb85a98)), // Audio folder + ) } } } diff --git a/go.mod b/go.mod index 558c3a2..f700d31 100644 --- a/go.mod +++ b/go.mod @@ -1,5 +1,5 @@ module github.com/EchoTools/evrFileTools -go 1.22.0 +go 1.24 require github.com/DataDog/zstd v1.5.7 diff --git a/pkg/archive/benchmark_test.go b/pkg/archive/benchmark_test.go deleted file mode 100644 index b2a082d..0000000 --- a/pkg/archive/benchmark_test.go +++ /dev/null @@ -1,188 +0,0 @@ -package archive - -import ( - "bytes" - "testing" - - "github.com/DataDog/zstd" -) - -// BenchmarkCompression benchmarks compression with different configurations. -func BenchmarkCompression(b *testing.B) { - data := make([]byte, 256*1024) // 256KB - for i := range data { - data[i] = byte(i % 256) - } - - b.Run("Compress_BestSpeed", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - _, err := zstd.CompressLevel(nil, data, zstd.BestSpeed) - if err != nil { - b.Fatal(err) - } - } - }) - - b.Run("Compress_Default", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - _, err := zstd.CompressLevel(nil, data, zstd.DefaultCompression) - if err != nil { - b.Fatal(err) - } - } - }) -} - -// BenchmarkDecompression benchmarks decompression with context reuse. -func BenchmarkDecompression(b *testing.B) { - original := make([]byte, 64*1024) // 64KB - for i := range original { - original[i] = byte(i % 256) - } - - compressed, _ := zstd.Compress(nil, original) - - b.Run("WithoutContext", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - _, err := zstd.Decompress(nil, compressed) - if err != nil { - b.Fatal(err) - } - } - }) - - b.Run("WithContext", func(b *testing.B) { - ctx := zstd.NewCtx() - dst := make([]byte, len(original)) - b.ResetTimer() - for i := 0; i < b.N; i++ { - _, err := ctx.Decompress(dst, compressed) - if err != nil { - b.Fatal(err) - } - } - }) -} - -// BenchmarkHeader benchmarks header operations. -func BenchmarkHeader(b *testing.B) { - header := &Header{ - Magic: Magic, - HeaderLength: 16, - Length: 1024 * 1024, - CompressedLength: 512 * 1024, - } - - b.Run("Marshal", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - _, err := header.MarshalBinary() - if err != nil { - b.Fatal(err) - } - } - }) - - b.Run("EncodeTo", func(b *testing.B) { - buf := make([]byte, HeaderSize) - b.ResetTimer() - for i := 0; i < b.N; i++ { - header.EncodeTo(buf) - } - }) - - data, _ := header.MarshalBinary() - - b.Run("Unmarshal", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - h := &Header{} - err := h.UnmarshalBinary(data) - if err != nil { - b.Fatal(err) - } - } - }) - - b.Run("DecodeFrom", func(b *testing.B) { - h := &Header{} - b.ResetTimer() - for i := 0; i < b.N; i++ { - h.DecodeFrom(data) - } - }) -} - -// BenchmarkEncodeDecode benchmarks full encode/decode cycle. -func BenchmarkEncodeDecode(b *testing.B) { - data := make([]byte, 1024*1024) // 1MB - for i := range data { - data[i] = byte(i % 256) - } - - b.Run("Encode", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - var buf bytes.Buffer - ws := &benchSeekableBuffer{Buffer: &buf} - if err := Encode(ws, data); err != nil { - b.Fatal(err) - } - } - }) - - // Pre-encode for decode benchmark - var buf bytes.Buffer - ws := &benchSeekableBuffer{Buffer: &buf} - _ = Encode(ws, data) - encoded := buf.Bytes() - - b.Run("Decode", func(b *testing.B) { - b.ResetTimer() - for i := 0; i < b.N; i++ { - rs := bytes.NewReader(encoded) - _, err := ReadAll(rs) - if err != nil { - b.Fatal(err) - } - } - }) -} - -type benchSeekableBuffer struct { - *bytes.Buffer - pos int64 -} - -func (s *benchSeekableBuffer) Seek(offset int64, whence int) (int64, error) { - switch whence { - case 0: - s.pos = offset - case 1: - s.pos += offset - case 2: - s.pos = int64(s.Buffer.Len()) + offset - } - return s.pos, nil -} - -func (s *benchSeekableBuffer) Write(p []byte) (n int, err error) { - for int64(s.Buffer.Len()) < s.pos { - s.Buffer.WriteByte(0) - } - if s.pos < int64(s.Buffer.Len()) { - data := s.Buffer.Bytes() - n = copy(data[s.pos:], p) - if n < len(p) { - m, _ := s.Buffer.Write(p[n:]) - n += m - } - } else { - n, err = s.Buffer.Write(p) - } - s.pos += int64(n) - return n, err -} diff --git a/pkg/archive/reader.go b/pkg/archive/reader.go index 663185e..de47803 100644 --- a/pkg/archive/reader.go +++ b/pkg/archive/reader.go @@ -3,12 +3,41 @@ package archive import ( "fmt" "io" - "github.com/DataDog/zstd" ) +// fastDecodeAll reads all bytes from r, skips the archive header, and uses +// DecodeAll for bulk decompression. This is ~1000x faster than streaming +// for the game's manifest files (which use non-single-segment zstd frames). +func fastDecodeAll(r io.Reader) ([]byte, error) { + raw, err := io.ReadAll(r) + if err != nil { + return nil, fmt.Errorf("read file: %w", err) + } + if len(raw) < HeaderSize { + return nil, fmt.Errorf("file too short for archive header") + } + + hdr := &Header{} + if err := hdr.UnmarshalBinary(raw[:HeaderSize]); err != nil { + return nil, fmt.Errorf("parse header: %w", err) + } + + payloadEnd := uint64(HeaderSize) + hdr.CompressedLength + if uint64(len(raw)) < payloadEnd { + return nil, fmt.Errorf("file too short: need %d, have %d", payloadEnd, len(raw)) + } + compressed := raw[HeaderSize:payloadEnd] + + data, err := zstd.Decompress(nil, compressed) + if err != nil { + return nil, fmt.Errorf("decompress: %w", err) + } + return data, nil +} + const ( - // DefaultCompressionLevel is the default compression level for encoding. + // DefaultCompressionLevel is the default compression level for encoding (SpeedDefault). DefaultCompressionLevel = zstd.BestSpeed ) @@ -50,7 +79,8 @@ func (r *Reader) Read(p []byte) (n int, err error) { // Close closes the reader. func (r *Reader) Close() error { - return r.zReader.Close() + r.zReader.Close() + return nil } // Length returns the uncompressed data length. @@ -64,18 +94,32 @@ func (r *Reader) CompressedLength() int { } // ReadAll reads the entire decompressed content from an archive. +// Uses fastDecodeAll (bulk DecodeAll) to avoid hangs with the game's +// non-single-segment zstd frames. func ReadAll(r io.ReadSeeker) ([]byte, error) { - reader, err := NewReader(r) - if err != nil { - return nil, err + return fastDecodeAll(r) +} + +// DecodeRaw decompresses raw archive bytes (already loaded into memory). +// Equivalent to ReadAll but takes a []byte instead of io.ReadSeeker, +// avoiding an extra file-read step when the caller already has the bytes. +func DecodeRaw(raw []byte) ([]byte, error) { + if len(raw) < HeaderSize { + return nil, fmt.Errorf("archive too short") + } + hdr := &Header{} + if err := hdr.UnmarshalBinary(raw[:HeaderSize]); err != nil { + return nil, fmt.Errorf("parse header: %w", err) } - defer reader.Close() + payloadEnd := uint64(HeaderSize) + hdr.CompressedLength + if uint64(len(raw)) < payloadEnd { + return nil, fmt.Errorf("archive truncated: need %d, have %d", payloadEnd, len(raw)) + } + compressed := raw[HeaderSize:payloadEnd] - data := make([]byte, reader.Length()) - _, err = io.ReadFull(reader, data) + data, err := zstd.Decompress(nil, compressed) if err != nil { - return nil, fmt.Errorf("read content: %w", err) + return nil, fmt.Errorf("decompress: %w", err) } - return data, nil } diff --git a/pkg/archive/writer.go b/pkg/archive/writer.go index 3f942ad..1733579 100644 --- a/pkg/archive/writer.go +++ b/pkg/archive/writer.go @@ -3,7 +3,6 @@ package archive import ( "fmt" "io" - "github.com/DataDog/zstd" ) @@ -19,7 +18,11 @@ type Writer struct { // WriterOption configures a Writer. type WriterOption func(*Writer) -// WithCompressionLevel sets the compression level for the writer. +const ( + // DefaultCompression is the default compression level (Level 3) + DefaultCompression = zstd.BestSpeed +) + func WithCompressionLevel(level int) WriterOption { return func(w *Writer) { w.level = level @@ -31,7 +34,7 @@ func WithCompressionLevel(level int) WriterOption { func NewWriter(dst io.WriteSeeker, uncompressedSize uint64, opts ...WriterOption) (*Writer, error) { w := &Writer{ dst: dst, - level: DefaultCompressionLevel, + level: DefaultCompression, header: &Header{ Magic: Magic, HeaderLength: 16, @@ -54,6 +57,14 @@ func NewWriter(dst io.WriteSeeker, uncompressedSize uint64, opts ...WriterOption return w, nil } +func (w *Writer) writeFrame(data []byte) (uint32, error) { + compressed, err := zstd.CompressLevel(nil, data, w.level) + if err != nil { + return 0, fmt.Errorf("compress frame: %w", err) + } + return uint32(len(compressed)), nil +} + // Write writes compressed data. func (w *Writer) Write(p []byte) (n int, err error) { return w.zWriter.Write(p) diff --git a/pkg/manifest/builder.go b/pkg/manifest/builder.go index ab2b9e6..642a20f 100644 --- a/pkg/manifest/builder.go +++ b/pkg/manifest/builder.go @@ -12,7 +12,7 @@ import ( ) const ( - // DefaultCompressionLevel is the compression level used for building packages. + // DefaultCompressionLevel is the compression level used for building packages (Level 3). DefaultCompressionLevel = zstd.BestSpeed // MaxPackageSize is the maximum size of a single package file. @@ -185,7 +185,7 @@ func (b *Builder) addFileToManifest(manifest *Manifest, file ScannedFile, frameI func (b *Builder) writeFrame(manifest *Manifest, data *bytes.Buffer, index uint32) error { compressed, err := zstd.CompressLevel(nil, data.Bytes(), b.compressionLevel) if err != nil { - return fmt.Errorf("compress frame %d: %w", index, err) + return fmt.Errorf("compress frame: %w", err) } return b.writeCompressedFrame(manifest, compressed, uint32(data.Len())) } diff --git a/pkg/manifest/manifest.go b/pkg/manifest/manifest.go index 7f6591e..c523864 100644 --- a/pkg/manifest/manifest.go +++ b/pkg/manifest/manifest.go @@ -247,14 +247,14 @@ func encodeSection(s *Section, buf []byte) { } // ReadFile reads and parses a manifest from a file. +// Uses os.ReadFile + bulk decompression for speed (streaming hangs on large manifests). func ReadFile(path string) (*Manifest, error) { - f, err := os.Open(path) + raw, err := os.ReadFile(path) if err != nil { return nil, fmt.Errorf("open manifest: %w", err) } - defer f.Close() - data, err := archive.ReadAll(f) + data, err := archive.DecodeRaw(raw) if err != nil { return nil, fmt.Errorf("read archive: %w", err) } @@ -267,6 +267,7 @@ func ReadFile(path string) (*Manifest, error) { return manifest, nil } + // WriteFile writes a manifest to a file. func WriteFile(path string, m *Manifest) error { // Synchronize header counts with actual slice lengths to prevent size discrepancies diff --git a/pkg/manifest/package.go b/pkg/manifest/package.go index 64cb448..54eaa20 100644 --- a/pkg/manifest/package.go +++ b/pkg/manifest/package.go @@ -8,7 +8,6 @@ import ( "runtime" "strconv" "sync" - "github.com/DataDog/zstd" ) @@ -184,8 +183,7 @@ func (p *Package) Extract(outputDir string, opts ...ExtractOption) error { go func() { defer wg.Done() - // Thread-local buffers and context - ctx := zstd.NewCtx() + // Thread-local buffers var compressed []byte var decompressed []byte @@ -216,7 +214,7 @@ func (p *Package) Extract(outputDir string, opts ...ExtractOption) error { // Decompress var err error - decompressed, err = ctx.Decompress(decompressed[:0], compressed) + decompressed, err = zstd.Decompress(decompressed[:0], compressed) if err != nil { select { case errs <- fmt.Errorf("decompress frame %d: %w", frameIdx, err): diff --git a/pkg/manifest/repack.go b/pkg/manifest/repack.go index f1018aa..21b2319 100644 --- a/pkg/manifest/repack.go +++ b/pkg/manifest/repack.go @@ -11,11 +11,9 @@ import ( "runtime" "sort" "sync" - "github.com/DataDog/zstd" ) -// Pools to eliminate GC overhead var ( readPool = sync.Pool{New: func() interface{} { return make([]byte, 0, 1024*1024) }} decompPool = sync.Pool{New: func() interface{} { return make([]byte, 0, 4*1024*1024) }} @@ -107,15 +105,8 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize return fmt.Errorf("seek to end of package: %w", err) } - // Add 1-byte alignment padding (effectively no padding) - if size > 0 && size%1 != 0 { - padding := 1 - (size % 1) - if _, err := f.Write(make([]byte, padding)); err != nil { - return fmt.Errorf("pad package for alignment: %w", err) - } - size += padding - } - + // Maintain 1-byte alignment (essentially no padding between frames) + // This matches original engine expectations for tight packing. pw.currentOffset = size } @@ -146,6 +137,58 @@ func (pw *packageWriter) write(manifest *Manifest, data []byte, decompressedSize return nil } +// writeRaw writes compressed data to the current package and returns where it +// was written (packageIndex, byteOffset) WITHOUT touching manifest.Frames. +// Used by QuickRepack to do true in-place frame updates. +func (pw *packageWriter) writeRaw(data []byte) (pkgIdx uint32, offset uint32, err error) { + if err = os.MkdirAll(fmt.Sprintf("%s/packages", pw.outputDir), 0755); err != nil { + return 0, 0, err + } + + activePackageNum := pw.pkgIndex + if pw.fileHandle == nil || activePackageNum < pw.minPkgIndex { + activePackageNum = pw.minPkgIndex + } + + for { + if pw.fileHandle == nil || pw.pkgIndex != activePackageNum { + if pw.fileHandle != nil { + pw.fileHandle.Close() + } + pkgPath := fmt.Sprintf("%s/packages/%s_%d", pw.outputDir, pw.pkgName, activePackageNum) + flags := os.O_RDWR | os.O_CREATE + if !pw.created[activePackageNum] { + flags |= os.O_TRUNC + pw.created[activePackageNum] = true + } + f, ferr := os.OpenFile(pkgPath, flags, 0644) + if ferr != nil { + return 0, 0, ferr + } + pw.fileHandle = f + pw.pkgIndex = activePackageNum + size, serr := f.Seek(0, io.SeekEnd) + if serr != nil { + return 0, 0, fmt.Errorf("seek to end: %w", serr) + } + pw.currentOffset = size + } + if pw.currentOffset+int64(len(data)) > math.MaxInt32 { + activePackageNum++ + continue + } + break + } + + writeOffset := uint32(pw.currentOffset) + if _, werr := pw.fileHandle.Write(data); werr != nil { + return 0, 0, werr + } + pw.pkgIndex = activePackageNum + pw.currentOffset += int64(len(data)) + return activePackageNum, writeOffset, nil +} + func (pw *packageWriter) close() { if pw.fileHandle != nil { pw.fileHandle.Close() @@ -289,18 +332,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, return sorted[a].fc.DataOffset < sorted[b].fc.DataOffset }) - currentOffset := uint32(0) for j := 0; j < len(sorted); j++ { - align := sorted[j].fc.Alignment - if align == 0 { - align = 1 - } - padding := (align - (currentOffset % align)) % align - if padding > 0 { - constructionBuf.Write(make([]byte, padding)) - currentOffset += padding - } - buf := [128]byte{} binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) @@ -313,22 +345,15 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, return } constructionBuf.Write(modData) - currentOffset += uint32(len(modData)) } else { start := sorted[j].fc.DataOffset end := start + sorted[j].fc.Size constructionBuf.Write(decompBytes[start:end]) - currentOffset += sorted[j].fc.Size } } compBuf := compPool.Get().([]byte) - encodedData, err := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) - if err != nil { - res.err = fmt.Errorf("compress frame: %w", err) - ch <- res - return - } + encodedData, _ := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) res.data = encodedData res.decompressedSize = uint32(constructionBuf.Len()) @@ -371,13 +396,6 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, for j := 0; j < len(sorted); j++ { fc := &newManifest.FrameContents[sorted[j].index] - align := fc.Alignment - if align == 0 { - align = 1 - } - padding := (align - (currentOffset % align)) % align - currentOffset += padding - size := fc.Size if res.isModified { buf := [128]byte{} @@ -391,7 +409,9 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, fc.FrameIndex = newFrameIdx fc.DataOffset = currentOffset fc.Size = size - fc.Alignment = align + // Retain original alignment metadata - engine uses this for memory allocation, + // but files are packed tightly (no padding bytes written between them). + fc.Alignment = sorted[j].fc.Alignment currentOffset += size } @@ -443,10 +463,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, } compBuf := compPool.Get().([]byte) - encodedData, err := zstd.CompressLevel(compBuf[:0], currentFrame.Bytes(), zstd.BestSpeed) - if err != nil { - return err - } + encodedData, _ := zstd.CompressLevel(compBuf[:0], currentFrame.Bytes(), zstd.BestSpeed) if err := writer.write(&newManifest, encodedData, uint32(currentFrame.Len())); err != nil { return err @@ -477,8 +494,8 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, currentOffset += file.Size } - // Align file data within the frame - align := uint32(8) + // No padding between files in frame (game engine tightly packs) + align := uint32(1) padding := (align - (uint32(currentFrame.Len()) % align)) % align if padding > 0 { currentFrame.Write(make([]byte, padding)) @@ -496,7 +513,7 @@ func Repack(manifest *Manifest, fileMap [][]ScannedFile, outputDir, packageName, return fmt.Errorf("read new file %s: %w", file.Path, err) } - align := uint32(8) + align := uint32(1) padding := (align - (uint32(currentFrame.Len()) % align)) % align if currentFrame.Len() > 0 && currentFrame.Len()+int(padding)+len(data) > MaxRepackFrameSize { @@ -788,15 +805,6 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa }) for j := 0; j < len(sorted); j++ { - // Original game engine completely ignores the FrameContent.Alignment - // property when packing frames (tightly packs all files with 0 padding). - align := uint32(1) - - padding := (align - (uint32(constructionBuf.Len()) % align)) % align - if padding > 0 { - constructionBuf.Write(make([]byte, padding)) - } - buf := [128]byte{} binary.LittleEndian.PutUint64(buf[0:64], uint64(sorted[j].fc.TypeSymbol)) binary.LittleEndian.PutUint64(buf[64:128], uint64(sorted[j].fc.FileSymbol)) @@ -822,12 +830,7 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa } compBuf := compPool.Get().([]byte) - encodedData, err := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) - if err != nil { - res.err = err - ch <- res - return - } + encodedData, _ := zstd.CompressLevel(compBuf[:0], constructionBuf.Bytes(), zstd.BestSpeed) res.data = encodedData res.decompressedSize = uint32(constructionBuf.Len()) @@ -843,8 +846,9 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa return res.err } - newFrameIndex := len(manifest.Frames) - + // Update FrameContent DataOffsets for this frame (sizes may have changed). + // We do NOT change FrameIndex — we update the existing frame entry in-place + // so the frame grouping stays identical to the original manifest. sorted := make([]fcWrapper, 0) if contents, ok := contentsByFrame[uint32(res.index)]; ok { sorted = append(sorted, contents...) @@ -857,12 +861,6 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa for j := 0; j < len(sorted); j++ { fc := &manifest.FrameContents[sorted[j].index] - // Original game tightly packs frames unconditionally, ignoring fc.Alignment. - align := uint32(1) - - padding := (align - (currentOffset % align)) % align - currentOffset += padding - buf := [128]byte{} binary.LittleEndian.PutUint64(buf[0:64], uint64(fc.TypeSymbol)) binary.LittleEndian.PutUint64(buf[64:128], uint64(fc.FileSymbol)) @@ -872,17 +870,26 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa size = modFile.Size } - fc.FrameIndex = uint32(newFrameIndex) + // FrameIndex stays the same — only DataOffset and Size may change fc.DataOffset = currentOffset fc.Size = size - // Retain original alignment metadata for memory allocation fc.Alignment = sorted[j].fc.Alignment currentOffset += size } - if err := writer.write(manifest, res.data, res.decompressedSize); err != nil { - return err + // Write compressed data to a new package and update the frame entry + // IN-PLACE at its original index. This preserves the exact frame + // structure (same frame count, same FrameIndex values) the engine expects. + pkgIdx, offset, err := writer.writeRaw(res.data) + if err != nil { + return fmt.Errorf("write frame %d: %w", res.index, err) + } + manifest.Frames[res.index] = Frame{ + PackageIndex: pkgIdx, + Offset: offset, + CompressedSize: uint32(len(res.data)), + Length: res.decompressedSize, } if res.rawReadBuf != nil { @@ -898,6 +905,35 @@ func QuickRepack(manifest *Manifest, fileMap [][]ScannedFile, dataDir, packageNa writer.close() + // Determine the highest package index actually used (original + any new ones) + highestPkg := manifest.Header.PackageCount - 1 + for _, f := range manifest.Frames { + if f.CompressedSize > 0 && f.PackageIndex > highestPkg { + highestPkg = f.PackageIndex + } + } + manifest.Header.PackageCount = highestPkg + 1 + + // Re-add terminator frames for ALL packages (original + newly created) + for i := uint32(0); i <= highestPkg; i++ { + path := fmt.Sprintf("%s/packages/%s_%d", dataDir, packageName, i) + stats, err := os.Stat(path) + if err != nil { + continue + } + manifest.Frames = append(manifest.Frames, Frame{ + PackageIndex: i, + Offset: uint32(stats.Size()), + CompressedSize: 0, + Length: 0, + }) + } + + // Final global null terminator + manifest.Frames = append(manifest.Frames, Frame{}) + fmt.Printf("Updating manifest: %s\n", manifestPath) return WriteFile(manifestPath, manifest) } + +