We can obtain models but no textures

the *.pages files contain all textures but are compressed
someone can take a look of these *.pages files please?
here are two pages files , one of 13mb and other 26mb
thanks
Code: Select all
offset type description
----------------------------
0x00 int[4] 0x77339904
0x04 int[4] size in tiles
0x08 int[4] ?
0x0c int[4] log2(size in tiles)
0x10 char[2] 'UU'
0x12 char[6] ?
0x18 int[4] size of the file in bytes
0x1c int[4] 0x0
Code: Select all
enum pageCompression_t : int
{
COMP_INVALID,
COMP_NONE,
COMP_DXT,
COMP_LZW,
COMP_DCT,
COMP_HDP,
COMP_JXR,
COMP_MAX_COMPRESSIONS
};
enum HDPFlags_t : int
{
HDP_SPECULAR_SHIFT_MASK = 0x3,
HDP_MONO_SPECULAR = 0x4,
HDP_HAS_COVER = 0x8
};
struct pageHeader_t
{
int magic;
pageCompression_t pageCompression;
unsigned int finerDiskOffset[4];
unsigned short finerDiskLength[4];
unsigned short x;
unsigned short y;
unsigned short level;
unsigned short reserved;
};
struct pageFileHeader_t
{
int magic;
int pagesWide;
int installedMipAndOffset;
short numLevels;
short layoutVersion;
int totalPages;
int diskOffsetScale;
long long totalFileSize;
//pageHeader_t subRoot;
};
struct HDPHeader_t
{
unsigned char qualityDiffuse;
unsigned char qualityNormal;
unsigned char qualitySpecular;
unsigned char qualityPower;
unsigned char flags;
unsigned char pad;
unsigned short diffuseSize;
unsigned short normalSize;
unsigned short specularSize;
unsigned short powerSize;
unsigned short alphaSize;
};
Code: Select all
//--------------------------------------
//--- 010 Editor v5.0 Binary Template
//
// File:
// Author:
// Revision:
// Purpose:
//--------------------------------------
LittleEndian();
enum pageCompression_t
{
COMP_INVALID,
COMP_NONE,
COMP_DXT,
COMP_LZW,
COMP_DCT,
COMP_HDP,
COMP_JXR,
COMP_MAX_COMPRESSIONS
};
enum HDPFlags_t
{
HDP_SPECULAR_SHIFT_MASK = 0x3,
HDP_MONO_SPECULAR = 0x4,
HDP_HAS_COVER = 0x8
};
typedef struct HDPHeader_t
{
unsigned char qualityDiffuse;
unsigned char qualityNormal;
unsigned char qualitySpecular;
unsigned char qualityPower;
unsigned char flags;
unsigned char pad;
unsigned short diffuseSize;
unsigned short normalSize;
unsigned short specularSize;
unsigned short powerSize;
unsigned short alphaSize;
};
typedef struct pageHeader_t
{
uint magic_signature; // Signatuer ID 0xCABFED04
pageCompression_t pageCompression; // Compression Type (seems always 0x05, HDP Type)
uint PageDiskOffset[4]; // Sub-Node Offset from start of file, divided by DiskOffsetScale
ushort PageDiskLength[4]; // Sub-Node Length, divided by DiskOffsetScale
ushort x; // Unclear, deeper nodes have larger values (Perhaps quardrant location)
ushort y; // Unclear, deeper nodes have larger values (Perhaps quardrant location)
ushort QuadTreeLevel; // Node's level in the Quadtree
ushort reserved;
};
struct Pages_File {
char Signature[4]; // Signatuer ID 0x77339904
ulong pageswide; // # of pages wide (always square so also # of pages tall) Possibly needs multiplied by DiskOffsetScale.
ulong installedMipAndOffset; // purpose unclear
ushort QuadtreeLevelCount; // How many levels to the Quadtree
ushort LayoutVer; // always zero?
ulong totalPages; // Number seems inaccurate, almost always 0x155555
ulong DiskOffsetScale; // Factor to scale various offsets and sizes by.
uint64 totalFileSize; // Size of the file in bytes.
// Root node
pageHeader_t SubRoot;
// subnodes
pageHeader_t SubRoot; // Read Leaf Data
BigEndian();
HDPHeader_t test; // Get DCT header data
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata1[SubRoot[0].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[0].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[1].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata2[SubRoot[1].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[1].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[2].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata3[SubRoot[2].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[2].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[3].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata4[SubRoot[3].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[3].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[4].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata5[SubRoot[4].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[4].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[5].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata6[SubRoot[5].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[5].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[6].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata7[SubRoot[6].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[6].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[7].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
BigEndian();
HDPHeader_t test;
LittleEndian();
// Get Data for Leaf (based on Offset/Length parent leaf specified)
char compdata8[SubRoot[7].PageDiskLength[0]*DiskOffsetScale-(FTell()-SubRoot[7].PageDiskOffset[0]*DiskOffsetScale)];
// Move to first subleaf of leaf we just read.
FSeek(SubRoot[8].PageDiskOffset[0]*DiskOffsetScale);
pageHeader_t SubRoot;
} MegaTexture;
Code: Select all
0x71 0x00 0xA3 0xC6 0x0A 0x3C 0x60 0xA3 0xC6 0x00 0xFF 0x00 0x00
It's scripted in 010 Editor. http://www.010editor.comZerOHearth wrote:Nice work many THX.![]()
How can i load this script to test it.
Code: Select all
# Dumps 3 pages of data, displays header for each page that is dumped
endian little
startfunction Print_Header
endian big # header is Big Endian for some reason
print "---"
get DiffuseQuality byte
print "DiffuseMap Quality: %DiffuseQuality%"
get NormalQuality byte
print "NormalMap Quality: %NormalQuality%"
get SpecularQuality byte
print "SpecularMap Quality: %SpecularQuality%"
get PowerQuality byte
print "PowerMap Quality: %PowerQuality%"
get Flags byte
print "Flags: %Flags%"
get padding byte
print "padding ignore?: %padding%"
get DiffuseSize short
print "DiffuseMap DataSize: %DiffuseSize%"
get NormalSize short
print "NormalMap DataSize: %NormalSize%"
get SpecularSize short
print "SpecularMap DataSize: %SpecularSize%"
get PowerSize short
print "PowerMap DataSize: %PowerSize%"
get AlphaSize short
print "AlphaMap DataSize: %AlphaSize%"
print "---------------------------"
endian little # back to little for the rest of the file
endfunction
goto 0x14
get DiskOffsetScale long
print "Diskoffsetscale:%DiskOffsetScale%"
goto 0x28
get Block0_Offset long
goto 0x38
get Block0_Length short
math Block0_Offset * DiskOffsetScale
math Block0_Length * DiskOffsetScale
print "-- Page_Block0.dat ------------------------"
xmath Block0_Offset_skipheader "Block0_Offset + 56"
xmath Block0_Length_skipheader "Block0_Length - 56"
print "Block0_Offset:%Block0_Offset_skipheader%"
print "Block0_Length:%Block0_Length_skipheader%"
# skip to header and print it.
xmath Block0_Offset_atheader "Block0_Offset + 40"
goto Block0_Offset_atheader
callfunction Print_Header
#dump Page_block zero
log "Page_Block0.dat" Block0_Offset_skipheader Block0_Length_skipheader
math Block0_Offset + 8 #skip to first sub-Page_block
goto Block0_Offset
# get all the branches for testing
get Block1_Offset1 long
get Block1_Offset2 long
get Block1_Offset3 long
get Block1_Offset4 long
get Block1_Length1 short
get Block1_Length2 short
get Block1_Length3 short
get Block1_Length4 short
# Find a branch that's populated (the first may not always be at the top)
if Block1_Offset1 != 0
print "Branch 1 chosen."
math Block1_Offset = Block1_Offset1
math Block1_Length = Block1_Length1
elif Block1_Offset2 != 0
print "Branch 2 chosen."
math Block1_Offset = Block1_Offset2
math Block1_Length = Block1_Length2
elif Block1_Offset3 != 0
print "Branch 3 chosen."
math Block1_Offset = Block1_Offset3
math Block1_Length = Block1_Length3
elif Block1_Offset4 != 0
print "Branch 4 chosen."
math Block1_Offset = Block1_Offset4
math Block1_Length = Block1_Length4
endif
math Block1_Offset * DiskOffsetScale
math Block1_Length * DiskOffsetScale
print "-- Page_Block1.dat ------------------------"
xmath Block1_Offset_skipheader "Block1_Offset + 56"
xmath Block1_Length_skipheader "Block1_Length - 56"
print "Block1_Offset:%Block1_Offset_skipheader%"
print "Block1_Length:%Block1_Length_skipheader%"
# skip to header and print it.
xmath Block1_Offset_atheader "Block1_Offset + 40"
goto Block1_Offset_atheader
callfunction Print_Header
#dump Page_block zero
log "Page_Block1.dat" Block1_Offset_skipheader Block1_Length_skipheader
math Block1_Offset + 8 #skip to first sub-Page_block
goto Block1_Offset
get Block2_Offset long
math Block1_Offset + 16
goto Block1_Offset
get Block2_Length short
math Block2_Offset * DiskOffsetScale
math Block2_Length * DiskOffsetScale
print "-- Page_Block2.dat ------------------------"
xmath Block2_Offset_skipheader "Block2_Offset + 56"
xmath Block2_Length_skipheader "Block2_Length - 56"
print "Block2_Offset:%Block2_Offset_skipheader%"
print "Block2_Length:%Block2_Length_skipheader%"
# skip to header and print it.
xmath Block2_Offset_atheader "Block2_Offset + 40"
goto Block2_Offset_atheader
callfunction Print_Header
#dump Page_block zero
log "Page_Block2.dat" Block2_Offset_skipheader Block2_Length_skipheader
cleanexit
There's some interesting patterns in these numbers. For one, the sum of the different *Map DataSizes adds up to the whole block size. For example on Block 2: 4655+4633+3514+238+0=13040 which you notice is the size of that block (Block2_Length).Diskoffsetscale:2
-- Page_Block0.dat ------------------------
Block0_Offset:128
Block0_Length:5870
---
DiffuseMap Quality: 30
NormalMap Quality: 13
SpecularMap Quality: 25
PowerMap Quality: 40
Flags: 6
padding ignore?: 0
DiffuseMap DataSize: 1720
NormalMap DataSize: 3218
SpecularMap DataSize: 803
PowerMap DataSize: 128
AlphaMap DataSize: 0
---------------------------
00000080 5870 Page_Block0.dat
Branch 2 chosen.
-- Page_Block1.dat ------------------------
Block1_Offset:6054
Block1_Length:9170
---
DiffuseMap Quality: 30
NormalMap Quality: 13
SpecularMap Quality: 25
PowerMap Quality: 40
Flags: 6
padding ignore?: 0
DiffuseMap DataSize: 3043
NormalMap DataSize: 4004
SpecularMap DataSize: 1951
PowerMap DataSize: 171
AlphaMap DataSize: 0
---------------------------
000017a6 9170 Page_Block1.dat
-- Page_Block2.dat ------------------------
Block2_Offset:21758
Block2_Length:13040
---
DiffuseMap Quality: 30
NormalMap Quality: 13
SpecularMap Quality: 25
PowerMap Quality: 40
Flags: 5
padding ignore?: 0
DiffuseMap DataSize: 4655
NormalMap DataSize: 4633
SpecularMap DataSize: 3514
PowerMap DataSize: 238
AlphaMap DataSize: 0
---------------------------
000054fe 13040 Page_Block2.dat
- 3 files found in 0 seconds
coverage file 0 0% 28168 13563938
yes, i've been pursuing that as well, though using the JPEG XR specification (which is materially identical). http://www.itu.int/rec/T-REC-T.832m0xf wrote:Data blocks represents HDPhoto bitstream without IMAGE_HEADER (see "HDPhoto_Bitstream_Spec_1.0.doc"). It's very complex format. I tried to adapt HD photo decoding library, but without results.