Thread: The Chad Way To Decode Idx/Dat Cache Files

Results 1 to 6 of 6
  1. #1 The Chad Way To Decode Idx/Dat Cache Files 
    WVWVWVWVWVWVWVW

    _jordan's Avatar
    Join Date
    Nov 2012
    Posts
    3,046
    Thanks given
    111
    Thanks received
    1,848
    Rep Power
    5000

    Code:
        public byte[] read(int var1) {
          BufferedFile var2 = this.datFile;
          synchronized(this.datFile) {
             try {
                Object var10000;
                if (this.idxFile.length() < (long)(var1 * 6 + 6)) {
                   var10000 = null;
                   return (byte[])var10000;
                } else {
                   this.idxFile.seek((long)(var1 * 6));
                   this.idxFile.read(ArchiveDisk_buffer, 0, 6);
                   int var3 = ((ArchiveDisk_buffer[0] & 255) << 16) + (ArchiveDisk_buffer[2] & 255) + ((ArchiveDisk_buffer[1] & 255) << 8);
                   int var4 = (ArchiveDisk_buffer[5] & 255) + ((ArchiveDisk_buffer[3] & 255) << 16) + ((ArchiveDisk_buffer[4] & 255) << 8);
                   if (var3 < 0 || var3 > this.maxEntrySize) {
                      var10000 = null;
                      return (byte[])var10000;
                   } else if (var4 <= 0 || (long)var4 > this.datFile.length() / 520L) {
                      var10000 = null;
                      return (byte[])var10000;
                   } else {
                      byte[] var5 = new byte[var3];
                      int var6 = 0;
    
                      for(int var7 = 0; var6 < var3; ++var7) {
                         if (var4 == 0) {
                            var10000 = null;
                            return (byte[])var10000;
                         }
    
                         this.datFile.seek(520L * (long)var4);
                         int var8 = var3 - var6;
                         int var9;
                         int var10;
                         int var11;
                         int var12;
                         byte var13;
                         if (var1 > 65535) {
                            if (var8 > 510) {
                               var8 = 510;
                            }
    
                            var13 = 10;
                            this.datFile.read(ArchiveDisk_buffer, 0, var8 + var13);
                            var9 = ((ArchiveDisk_buffer[1] & 255) << 16) + ((ArchiveDisk_buffer[0] & 255) << 24) + (ArchiveDisk_buffer[3] & 255) + ((ArchiveDisk_buffer[2] & 255) << 8);
                            var10 = (ArchiveDisk_buffer[5] & 255) + ((ArchiveDisk_buffer[4] & 255) << 8);
                            var11 = (ArchiveDisk_buffer[8] & 255) + ((ArchiveDisk_buffer[7] & 255) << 8) + ((ArchiveDisk_buffer[6] & 255) << 16);
                            var12 = ArchiveDisk_buffer[9] & 255;
                         } else {
                            if (var8 > 512) {
                               var8 = 512;
                            }
    
                            var13 = 8;
                            this.datFile.read(ArchiveDisk_buffer, 0, var8 + var13);
                            var9 = (ArchiveDisk_buffer[1] & 255) + ((ArchiveDisk_buffer[0] & 255) << 8);
                            var10 = (ArchiveDisk_buffer[3] & 255) + ((ArchiveDisk_buffer[2] & 255) << 8);
                            var11 = ((ArchiveDisk_buffer[5] & 255) << 8) + ((ArchiveDisk_buffer[4] & 255) << 16) + (ArchiveDisk_buffer[6] & 255);
                            var12 = ArchiveDisk_buffer[7] & 255;
                         }
    
                         if (var9 != var1 || var10 != var7 || var12 != this.archive) {
                            var10000 = null;
                            return (byte[])var10000;
                         }
    
                         if (var11 < 0 || (long)var11 > this.datFile.length() / 520L) {
                            var10000 = null;
                            return (byte[])var10000;
                         }
    
                         int var14 = var13 + var8;
    
                         for(int var15 = var13; var15 < var14; ++var15) {
                            var5[var6++] = ArchiveDisk_buffer[var15];
                         }
    
                         var4 = var11;
                      }
    
                      byte[] var20 = var5;
                      return var20;
                   }
                }
             } catch (IOException var18) {
                return null;
             }
          }
       }


    Code:
            val idxBytes = idxFilePath.toFile().readBytes()
            val datBytes = datFilePath.toFile().readBytes()
    Code:
            val offset = idxId * 6
            val idxBuffer = idxBytes.copyOfRange(offset, offset + 6).toByteBuffer()
            val idxSize = idxBuffer.readUnsignedMedium()
            val idxSector = idxBuffer.readUnsignedMedium()
            if (idxSize < 0) {
                throw IdxFileException("Invalid size for sector Size=$idxSize Sector=$idxSector")
            }
            if (idxSector <= 0 || idxSector > datBytes.size / 520) return ByteArray(0)
            return ByteBuffer.allocate(idxSize).decode(id, idxId, idxSize, idxSector)
    Code:
        private tailrec fun ByteBuffer.decode(
            id: Int,
            referenceTableId: Int,
            size: Int,
            sector: Int,
            bytes: Int = 0,
            part: Int = 0
        ): ByteArray {
            if (size <= bytes) return array()
    
            if (sector == 0) {
                throw EndOfDatFileException("Unexpected end of file. Id=[$id} Length=[$size]")
            }
    
            val offset = 520 * sector
            val large = referenceTableId > 0xFFFF
            val headerSize = if (large) 10 else 8
            val blockSize = getSizeAdjusted(size - bytes, headerSize)
            val header = datBytes.copyOfRange(offset, offset + headerSize + blockSize).toByteBuffer()
    
            val currentReferenceTableId = if (large) header.int else header.readUnsignedShort()
            val currentPart = header.readUnsignedShort()
            val nextSector = header.readUnsignedMedium()
            val currentId = header.readUnsignedByte()
    
            if (referenceTableId != currentReferenceTableId || currentPart != part || id != currentId) {
                throw DatFileException("DatFile mismatch Id={$currentId} != {$id}, ReferenceTableId={$currentReferenceTableId} != {$referenceTableId}, CurrentPart={$currentPart} != {$part}")
            }
            if (nextSector < 0 || nextSector > datBytes.size / 520) {
                throw DatFileException("Invalid next sector $nextSector")
            }
    
            put(header.array(), headerSize, blockSize)
            return decode(id, referenceTableId, size, nextSector, bytes + blockSize, part + 1)
        }
    
        private fun getSizeAdjusted(byteAmount: Int, headerSize: Int): Int = if (byteAmount <= 520 - headerSize) byteAmount else 520 - headerSize
    Attached image
    Attached image
    Reply With Quote  
     

  2. #2  
    🖤RuneScape Veteran(2004)🖤 0117be's Avatar
    Join Date
    Jan 2012
    Posts
    136
    Thanks given
    1
    Thanks received
    18
    Rep Power
    1
    Clutch
    19 Years of NO XP WASTE.
    Reply With Quote  
     

  3. #3  
    Registered Member
    Tyluur's Avatar
    Join Date
    Jun 2010
    Age
    26
    Posts
    5,103
    Thanks given
    1,818
    Thanks received
    1,767
    Rep Power
    2438
    Very nice work!

    Perhaps this should be a snippet though?
    Not quite informative!

    Quote Originally Posted by blakeman8192 View Post
    Keep trying. Quitting is the only true failure.
    Spoiler for skrrrrr:

    Attached image
    Reply With Quote  
     

  4. #4  
    Registered Member
    Tyluur's Avatar
    Join Date
    Jun 2010
    Age
    26
    Posts
    5,103
    Thanks given
    1,818
    Thanks received
    1,767
    Rep Power
    2438
    Quote Originally Posted by _jordan View Post

    Code:
        public byte[] read(int var1) {
          BufferedFile var2 = this.datFile;
          synchronized(this.datFile) {
             try {
                Object var10000;
                if (this.idxFile.length() < (long)(var1 * 6 + 6)) {
                   var10000 = null;
                   return (byte[])var10000;
                } else {
                   this.idxFile.seek((long)(var1 * 6));
                   this.idxFile.read(ArchiveDisk_buffer, 0, 6);
                   int var3 = ((ArchiveDisk_buffer[0] & 255) << 16) + (ArchiveDisk_buffer[2] & 255) + ((ArchiveDisk_buffer[1] & 255) << 8);
                   int var4 = (ArchiveDisk_buffer[5] & 255) + ((ArchiveDisk_buffer[3] & 255) << 16) + ((ArchiveDisk_buffer[4] & 255) << 8);
                   if (var3 < 0 || var3 > this.maxEntrySize) {
                      var10000 = null;
                      return (byte[])var10000;
                   } else if (var4 <= 0 || (long)var4 > this.datFile.length() / 520L) {
                      var10000 = null;
                      return (byte[])var10000;
                   } else {
                      byte[] var5 = new byte[var3];
                      int var6 = 0;
    
                      for(int var7 = 0; var6 < var3; ++var7) {
                         if (var4 == 0) {
                            var10000 = null;
                            return (byte[])var10000;
                         }
    
                         this.datFile.seek(520L * (long)var4);
                         int var8 = var3 - var6;
                         int var9;
                         int var10;
                         int var11;
                         int var12;
                         byte var13;
                         if (var1 > 65535) {
                            if (var8 > 510) {
                               var8 = 510;
                            }
    
                            var13 = 10;
                            this.datFile.read(ArchiveDisk_buffer, 0, var8 + var13);
                            var9 = ((ArchiveDisk_buffer[1] & 255) << 16) + ((ArchiveDisk_buffer[0] & 255) << 24) + (ArchiveDisk_buffer[3] & 255) + ((ArchiveDisk_buffer[2] & 255) << 8);
                            var10 = (ArchiveDisk_buffer[5] & 255) + ((ArchiveDisk_buffer[4] & 255) << 8);
                            var11 = (ArchiveDisk_buffer[8] & 255) + ((ArchiveDisk_buffer[7] & 255) << 8) + ((ArchiveDisk_buffer[6] & 255) << 16);
                            var12 = ArchiveDisk_buffer[9] & 255;
                         } else {
                            if (var8 > 512) {
                               var8 = 512;
                            }
    
                            var13 = 8;
                            this.datFile.read(ArchiveDisk_buffer, 0, var8 + var13);
                            var9 = (ArchiveDisk_buffer[1] & 255) + ((ArchiveDisk_buffer[0] & 255) << 8);
                            var10 = (ArchiveDisk_buffer[3] & 255) + ((ArchiveDisk_buffer[2] & 255) << 8);
                            var11 = ((ArchiveDisk_buffer[5] & 255) << 8) + ((ArchiveDisk_buffer[4] & 255) << 16) + (ArchiveDisk_buffer[6] & 255);
                            var12 = ArchiveDisk_buffer[7] & 255;
                         }
    
                         if (var9 != var1 || var10 != var7 || var12 != this.archive) {
                            var10000 = null;
                            return (byte[])var10000;
                         }
    
                         if (var11 < 0 || (long)var11 > this.datFile.length() / 520L) {
                            var10000 = null;
                            return (byte[])var10000;
                         }
    
                         int var14 = var13 + var8;
    
                         for(int var15 = var13; var15 < var14; ++var15) {
                            var5[var6++] = ArchiveDisk_buffer[var15];
                         }
    
                         var4 = var11;
                      }
    
                      byte[] var20 = var5;
                      return var20;
                   }
                }
             } catch (IOException var18) {
                return null;
             }
          }
       }


    Code:
            val idxBytes = idxFilePath.toFile().readBytes()
            val datBytes = datFilePath.toFile().readBytes()
    Code:
            val offset = idxId * 6
            val idxBuffer = idxBytes.copyOfRange(offset, offset + 6).toByteBuffer()
            val idxSize = idxBuffer.readUnsignedMedium()
            val idxSector = idxBuffer.readUnsignedMedium()
            if (idxSize < 0) {
                throw IdxFileException("Invalid size for sector Size=$idxSize Sector=$idxSector")
            }
            if (idxSector <= 0 || idxSector > datBytes.size / 520) return ByteArray(0)
            return ByteBuffer.allocate(idxSize).decode(id, idxId, idxSize, idxSector)
    Code:
        private tailrec fun ByteBuffer.decode(
            id: Int,
            referenceTableId: Int,
            size: Int,
            sector: Int,
            bytes: Int = 0,
            part: Int = 0
        ): ByteArray {
            if (size <= bytes) return array()
    
            if (sector == 0) {
                throw EndOfDatFileException("Unexpected end of file. Id=[$id} Length=[$size]")
            }
    
            val offset = 520 * sector
            val large = referenceTableId > 0xFFFF
            val headerSize = if (large) 10 else 8
            val blockSize = getSizeAdjusted(size - bytes, headerSize)
            val header = datBytes.copyOfRange(offset, offset + headerSize + blockSize).toByteBuffer()
    
            val currentReferenceTableId = if (large) header.int else header.readUnsignedShort()
            val currentPart = header.readUnsignedShort()
            val nextSector = header.readUnsignedMedium()
            val currentId = header.readUnsignedByte()
    
            if (referenceTableId != currentReferenceTableId || currentPart != part || id != currentId) {
                throw DatFileException("DatFile mismatch Id={$currentId} != {$id}, ReferenceTableId={$currentReferenceTableId} != {$referenceTableId}, CurrentPart={$currentPart} != {$part}")
            }
            if (nextSector < 0 || nextSector > datBytes.size / 520) {
                throw DatFileException("Invalid next sector $nextSector")
            }
    
            put(header.array(), headerSize, blockSize)
            return decode(id, referenceTableId, size, nextSector, bytes + blockSize, part + 1)
        }
    
        private fun getSizeAdjusted(byteAmount: Int, headerSize: Int): Int = if (byteAmount <= 520 - headerSize) byteAmount else 520 - headerSize
    Gave this a run through on ChatGPT...

    The function takes in the following parameters:

    id: an integer representing the identifier of the data buffer
    referenceTableId: an integer representing the identifier of the reference table used to look up the data buffer
    size: an integer representing the size of the data buffer
    sector: an integer representing the sector index of the data buffer
    bytes: an integer representing the number of bytes that have been read so far from the data buffer (default value is 0)
    part: an integer representing the part index of the data buffer (default value is 0)
    The function uses tail recursion to read data from the buffer until all bytes have been read. At each recursive call, the function first checks if all bytes have been read, and if so, returns an empty byte array. Otherwise, it proceeds to read the header of the next data block.

    The header is obtained by copying a portion of the buffer from the offset specified by the sector index to a position calculated based on the header size and block size. The header size is either 8 or 10 bytes depending on whether the reference table identifier is larger than 0xFFFF, while the block size is calculated based on the remaining number of bytes to read and the header size. The header is then converted to a ByteBuffer object for parsing.

    The header contains four fields: the reference table identifier, the part index, the next sector index, and the current buffer identifier. The function checks whether these fields match the expected values specified in the function parameters. If any of the fields do not match, a DatFileException is thrown.

    Finally, the function appends the decoded block to the output byte array and recursively calls itself with updated parameters to read the next block. The updated parameters are the same as the original ones, except that the sector index is updated to the next sector specified in the header, the number of bytes read so far is incremented by the block size, and the part index is incremented by 1.
    Also, here's its ELI5 take:

    This code is like a recipe for making a cake from different pieces. Each piece has a number and a label on it. The pieces are stored in a big box called datBytes. The recipe tells you how to find the right piece and put it together with the other pieces until you have a whole cake.

    The first thing the recipe does is check if you have all the pieces you need. If you do, then you are done and you can enjoy your cake. If not, then you need to find the next piece.

    The next thing the recipe does is check if there is a piece left in the box. If there is no piece left, then something went wrong and you can’t make the cake. You have to tell someone that there is an error.

    The next thing the recipe does is look for the piece with the right number and label in the box. The number tells you where in the box to look for it. The label tells you which part of the cake it belongs to. The piece also has some information on it, like how big it is and where to find the next piece.

    The next thing the recipe does is check if the piece matches what you expect. If it doesn’t match, then something went wrong and you can’t make the cake. You have to tell someone that there is an error.

    The next thing the recipe does is add the piece to your cake. It cuts off any extra bits that don’t fit in your cake.

    The last thing the recipe does is repeat this process with the next piece until you have all of them.

    That’s how this code works! It’s like a puzzle that makes a cake from different pieces.
    Quote Originally Posted by blakeman8192 View Post
    Keep trying. Quitting is the only true failure.
    Spoiler for skrrrrr:

    Attached image
    Reply With Quote  
     

  5. #5  
    WVWVWVWVWVWVWVW

    _jordan's Avatar
    Join Date
    Nov 2012
    Posts
    3,046
    Thanks given
    111
    Thanks received
    1,848
    Rep Power
    5000
    Quote Originally Posted by Tyluur View Post
    Gave this a run through on ChatGPT...



    Also, here's its ELI5 take:



    RuneLite implementation as well

    Code:
    	public synchronized IndexEntry read(int id) throws IOException
    	{
    		idx.seek(id * INDEX_ENTRY_LEN);
    		int i = idx.read(buffer);
    		if (i != INDEX_ENTRY_LEN)
    		{
    			logger.debug("short read for id {} on index {}: {}", id, indexFileId, i);
    			return null;
    		}
    
    		int length = ((buffer[0] & 0xFF) << 16) | ((buffer[1] & 0xFF) << 8) | (buffer[2] & 0xFF);
    		int sector = ((buffer[3] & 0xFF) << 16) | ((buffer[4] & 0xFF) << 8) | (buffer[5] & 0xFF);
    
    		if (length <= 0 || sector <= 0)
    		{
    			logger.debug("invalid length or sector {}/{}", length, sector);
    			return null;
    		}
    
    		return new IndexEntry(this, id, sector, length);
    	}
    Code:
    	public synchronized byte[] read(int indexId, int archiveId, int sector, int size) throws IOException
    	{
    		if (sector <= 0L || dat.length() / SECTOR_SIZE < (long) sector)
    		{
    			logger.warn("bad read, dat length {}, requested sector {}", dat.length(), sector);
    			return null;
    		}
    
    		byte[] readBuffer = new byte[SECTOR_SIZE];
    		ByteBuffer buffer = ByteBuffer.allocate(size);
    
    		for (int part = 0, readBytesCount = 0, nextSector;
    			size > readBytesCount;
    			sector = nextSector)
    		{
    			if (sector == 0)
    			{
    				logger.warn("Unexpected end of file");
    				return null;
    			}
    
    			dat.seek(SECTOR_SIZE * sector);
    
    			int dataBlockSize = size - readBytesCount;
    			byte headerSize;
    			int currentIndex;
    			int currentPart;
    			int currentArchive;
    			if (archiveId > 0xFFFF)
    			{
    				headerSize = 10;
    				if (dataBlockSize > SECTOR_SIZE - headerSize)
    				{
    					dataBlockSize = SECTOR_SIZE - headerSize;
    				}
    
    				int i = dat.read(readBuffer, 0, headerSize + dataBlockSize);
    				if (i != headerSize + dataBlockSize)
    				{
    					logger.warn("Short read when reading file data for {}/{}", indexId, archiveId);
    					return null;
    				}
    
    				currentArchive = ((readBuffer[0] & 0xFF) << 24)
    					| ((readBuffer[1] & 0xFF) << 16)
    					| ((readBuffer[2] & 0xFF) << 8)
    					| (readBuffer[3] & 0xFF);
    				currentPart = ((readBuffer[4] & 0xFF) << 8) + (readBuffer[5] & 0xFF);
    				nextSector = ((readBuffer[6] & 0xFF) << 16)
    					| ((readBuffer[7] & 0xFF) << 8)
    					| (readBuffer[8] & 0xFF);
    				currentIndex = readBuffer[9] & 0xFF;
    			}
    			else
    			{
    				headerSize = 8;
    				if (dataBlockSize > SECTOR_SIZE - headerSize)
    				{
    					dataBlockSize = SECTOR_SIZE - headerSize;
    				}
    
    				int i = dat.read(readBuffer, 0, headerSize + dataBlockSize);
    				if (i != headerSize + dataBlockSize)
    				{
    					logger.warn("short read");
    					return null;
    				}
    
    				currentArchive = ((readBuffer[0] & 0xFF) << 8)
    					| (readBuffer[1] & 0xFF);
    				currentPart = ((readBuffer[2] & 0xFF) << 8)
    					| (readBuffer[3] & 0xFF);
    				nextSector = ((readBuffer[4] & 0xFF) << 16)
    					| ((readBuffer[5] & 0xFF) << 8)
    					| (readBuffer[6] & 0xFF);
    				currentIndex = readBuffer[7] & 0xFF;
    			}
    
    			if (archiveId != currentArchive || currentPart != part || indexId != currentIndex)
    			{
    				logger.warn("data mismatch {} != {}, {} != {}, {} != {}",
    					archiveId, currentArchive,
    					part, currentPart,
    					indexId, currentIndex);
    				return null;
    			}
    
    			if (nextSector < 0 || dat.length() / SECTOR_SIZE < (long) nextSector)
    			{
    				logger.warn("Invalid next sector");
    				return null;
    			}
    
    			buffer.put(readBuffer, headerSize, dataBlockSize);
    			readBytesCount += dataBlockSize;
    
    			++part;
    		}
    
    		buffer.flip();
    		return buffer.array();
    	}
    Attached image
    Attached image
    Reply With Quote  
     

  6. #6  
    Registered Member

    Join Date
    Feb 2010
    Posts
    3,253
    Thanks given
    1,145
    Thanks received
    909
    Rep Power
    2081
    nice we've had this since 2004
    Reply With Quote  
     

  7. Thankful users:



Thread Information
Users Browsing this Thread

There are currently 1 users browsing this thread. (0 members and 1 guests)


User Tag List

Similar Threads

  1. Replies: 0
    Last Post: 09-18-2016, 11:48 PM
  2. Replies: 0
    Last Post: 04-30-2012, 06:08 AM
  3. The Real Way To Add Vengeance
    By Grim Line in forum Tutorials
    Replies: 22
    Last Post: 09-24-2007, 09:25 AM
  4. The real way to block Azn's Client
    By KiIIer in forum Tutorials
    Replies: 9
    Last Post: 09-19-2007, 01:52 PM
  5. Replies: 4
    Last Post: 05-03-2007, 03:57 AM
Posting Permissions
  • You may not post new threads
  • You may not post replies
  • You may not post attachments
  • You may not edit your posts
  •