diff --git a/Server/src/org/crandor/game/content/skill/free/fishing/FishingPulse.java b/Server/src/org/crandor/game/content/skill/free/fishing/FishingPulse.java index d906ac227..a83e5dd03 100644 --- a/Server/src/org/crandor/game/content/skill/free/fishing/FishingPulse.java +++ b/Server/src/org/crandor/game/content/skill/free/fishing/FishingPulse.java @@ -73,7 +73,8 @@ public final class FishingPulse extends SkillPulse { if (option == null) { return false; } - if (!player.getInventory().containsItem(option.getTool()) && !hasBarbTail() && isBareHanded(player)) { + player.debug(String.valueOf(player.getInventory().containsItem(option.getTool()))); + if (!player.getInventory().containsItem(option.getTool()) && !hasBarbTail()) { //System.out.println(isBareHanded(player)); player.getDialogueInterpreter().sendDialogue("You need a " + option.getTool().getName().toLowerCase() + " to catch these fish."); stop(); diff --git a/Server/src/org/crandor/game/world/GameWorld.java b/Server/src/org/crandor/game/world/GameWorld.java index 953e3e7d8..58465cfd3 100644 --- a/Server/src/org/crandor/game/world/GameWorld.java +++ b/Server/src/org/crandor/game/world/GameWorld.java @@ -312,6 +312,7 @@ public final class GameWorld { LandscapeParser.addGameObject(new GameObject(724, new Location(2341, 3693, 0), 10, 0)); //LandscapeParser.addGameObject(new GameObject(6097, new Location(2343, 3690, 0), 10, 0)); LandscapeParser.addGameObject(new GameObject(1317, new Location(2343, 3690, 0), 10, 3)); + LandscapeParser.addGameObject(new GameObject(1814, new Location(3090, 3475, 0), 4, 0));//edgeville lever NPC[] npcs = new NPC[]{new NPC(494, new Location(2327, 3687, 0))}; for (NPC npc : npcs) { npc.setDirection(Direction.EAST); diff --git a/Server/src/org/crandor/net/IoEventHandler.java b/Server/src/org/crandor/net/IoEventHandler.java index 1fe483b09..c7061672d 100644 --- a/Server/src/org/crandor/net/IoEventHandler.java +++ b/Server/src/org/crandor/net/IoEventHandler.java @@ -58,6 +58,7 @@ public class IoEventHandler { ByteBuffer buffer = ByteBuffer.allocate(100_000); IoSession session = (IoSession) key.attachment(); if (channel.read(buffer) == -1) { + //just when a client closes their client, nothing to worry about. throw new IOException("An existing connection was disconnected!"); } buffer.flip(); diff --git a/Server/src/org/crandor/net/registry/AccountRegister.java b/Server/src/org/crandor/net/registry/AccountRegister.java index 5186b919a..f42e88aeb 100644 --- a/Server/src/org/crandor/net/registry/AccountRegister.java +++ b/Server/src/org/crandor/net/registry/AccountRegister.java @@ -1,5 +1,15 @@ package org.crandor.net.registry; +import java.nio.ByteBuffer; +import java.sql.Connection; +import java.sql.Date; +import java.sql.PreparedStatement; +import java.sql.SQLException; +import java.sql.Timestamp; +import java.util.regex.Matcher; +import java.util.regex.Pattern; + +import org.crandor.ServerConstants; import org.crandor.cache.misc.buffer.ByteBufferUtils; import org.crandor.game.node.entity.player.info.portal.PlayerSQLManager; import org.crandor.game.system.SystemManager; @@ -9,12 +19,6 @@ import org.crandor.game.system.task.TaskExecutor; import org.crandor.net.Constants; import org.crandor.net.IoSession; import org.crandor.net.event.LoginReadEvent; -import org.keldagrim.ServerConstants; - -import java.nio.ByteBuffer; -import java.sql.*; -import java.util.regex.Matcher; -import java.util.regex.Pattern; /** * Handles the registry of new accounts. @@ -163,11 +167,7 @@ public class AccountRegister extends SQLEntryHandler { //If the management server's settings register new users with the server's clan chat //I believe if there was no entry there would be errors during the registration, hence a null entry if the setting is off - if (ServerConstants.NEW_PLAYER_DEFAULT_CLAN == true){ - statement.setString(7,ServerConstants.SERVER_NAME); - }else{ - statement.setString(7,null); - } + statement.executeUpdate(); SQLManager.close(statement.getConnection()); } diff --git a/Server/src/plugin/interaction/object/WildernessLeverPlugin.java b/Server/src/plugin/interaction/object/WildernessLeverPlugin.java index 682fa6d1f..d04aaadf2 100644 --- a/Server/src/plugin/interaction/object/WildernessLeverPlugin.java +++ b/Server/src/plugin/interaction/object/WildernessLeverPlugin.java @@ -90,6 +90,7 @@ public final class WildernessLeverPlugin extends OptionHandler { } } }, + MAGE_BANK("mage's cave", 5959, 5960, Location.create(3090, 3956, 0), Location.create(2539, 4712, 0)), ARENA("arena", 9706, 9707, Location.create(3105, 3956, 0), Location.create(3105, 3951, 0)) { @Override public boolean canPull(Player player, GameObject object) { diff --git a/Server/src/plugin/tutorial/MasterChefDialogue.java b/Server/src/plugin/tutorial/MasterChefDialogue.java index 66ba807b1..00678759e 100644 --- a/Server/src/plugin/tutorial/MasterChefDialogue.java +++ b/Server/src/plugin/tutorial/MasterChefDialogue.java @@ -43,16 +43,26 @@ public class MasterChefDialogue extends DialoguePlugin { Component.setUnclosable(player, interpreter.sendDialogues(npc, FacialExpression.NO_EXPRESSION, "Ahh! Welcome, newcomer. I am the Master Chef, Lev. It", "is here I will teach you how to cook food truly fit for a", "king.")); break; case 20: - Component.setUnclosable(player, interpreter.sendDialogues(npc, FacialExpression.NO_EXPRESSION, "I see you have lost your pot of flour and bucket of water,", "No worries i will supply you with more.")); - if (player.getInventory().freeSlots() >= 2) { - player.getInventory().add(new Item(1933)); - player.getInventory().add(new Item(1929)); + if (player.getInventory().containsAll(1933,1929)) { + Component.setUnclosable(player, interpreter.sendDialogues(npc, FacialExpression.NO_EXPRESSION, "Mix together the flour and water to form a dough.")); + stage = 1; + + } else if (player.getInventory().containsItem(new Item(2307))) { + Component.setUnclosable(player, interpreter.sendDialogues(npc, FacialExpression.NO_EXPRESSION, "You already have some dough, no need", "to make more.")); stage = 1; } else { - Component.setUnclosable(player, interpreter.sendDialogue("You don't have enough inventory space.")); - stage = 99; + Component.setUnclosable(player, interpreter.sendDialogues(npc, FacialExpression.NO_EXPRESSION, "I see you have lost your pot of flour and bucket of water,", "No worries i will supply you with more.")); + if (player.getInventory().freeSlots() >= 2) { + player.getInventory().add(new Item(1933)); + player.getInventory().add(new Item(1929)); + stage = 1; + } else { + Component.setUnclosable(player, interpreter.sendDialogue("You don't have enough inventory space.")); + stage = 99; + } + break; } - break; + case 19: if (!player.getInventory().contains(1929, 1) && !player.getInventory().containItems(1933)) { if (player.getInventory().hasSpaceFor(new Item(1929, 1)) && player.getInventory().hasSpaceFor(new Item(1933, 1))) { diff --git a/Server/src/plugin/tutorial/TutorialCompletionDialogue.java b/Server/src/plugin/tutorial/TutorialCompletionDialogue.java index c097c59c6..a350780d1 100644 --- a/Server/src/plugin/tutorial/TutorialCompletionDialogue.java +++ b/Server/src/plugin/tutorial/TutorialCompletionDialogue.java @@ -1,6 +1,5 @@ package plugin.tutorial; -import org.crandor.plugin.InitializablePlugin; import org.crandor.game.content.dialogue.DialoguePlugin; import org.crandor.game.content.dialogue.FacialExpression; import org.crandor.game.content.global.tutorial.TutorialSession; @@ -15,7 +14,7 @@ import org.crandor.game.world.GameWorld; import org.crandor.game.world.map.Location; import org.crandor.net.amsc.MSPacketRepository; import org.crandor.net.amsc.WorldCommunicator; -import org.keldagrim.ServerConstants; +import org.crandor.plugin.InitializablePlugin; /** * Handles the tutorial completition dialogue (skippy, magic instructor) @@ -306,12 +305,7 @@ public class TutorialCompletionDialogue extends DialoguePlugin { //Appending the welcome message and some other stuff player.getPacketDispatch().sendMessage("Welcome to " + GameWorld.getName() + "."); - //If the management server's settings register new users with the server's clan chat, we would have to simulate joining a clan - if (ServerConstants.NEW_PLAYER_DEFAULT_CLAN == true) { - player.getPacketDispatch().sendMessage("Attempting to join channel...:clan:"); - player.getPacketDispatch().sendMessage("Now talking in clan channel " + ServerConstants.SERVER_NAME + ":clan:"); - player.getPacketDispatch().sendMessage("To talk, start each line of chat with the / symbol.:clan:"); - } + player.unlock(); TutorialSession.getExtension(player).setStage(TutorialSession.MAX_STAGE + 1); diff --git a/Tools/Arios Editor/.classpath b/Tools/Arios Editor/.classpath new file mode 100644 index 000000000..6251336b2 --- /dev/null +++ b/Tools/Arios Editor/.classpath @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/Tools/Arios Editor/.project b/Tools/Arios Editor/.project new file mode 100644 index 000000000..e69b63f9e --- /dev/null +++ b/Tools/Arios Editor/.project @@ -0,0 +1,17 @@ + + + Arios Editor + + + + + + org.eclipse.jdt.core.javabuilder + + + + + + org.eclipse.jdt.core.javanature + + diff --git a/Tools/Arios Editor/.settings/org.eclipse.jdt.core.prefs b/Tools/Arios Editor/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 000000000..8b9e3c71b --- /dev/null +++ b/Tools/Arios Editor/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,10 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=13 +org.eclipse.jdt.core.compiler.compliance=13 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning +org.eclipse.jdt.core.compiler.release=enabled +org.eclipse.jdt.core.compiler.source=13 diff --git a/Tools/Arios Editor/Main.bat b/Tools/Arios Editor/Main.bat new file mode 100644 index 000000000..8aadb0854 --- /dev/null +++ b/Tools/Arios Editor/Main.bat @@ -0,0 +1,3 @@ +@echo off +java -server -Xms512m -Xmx1536m -XX:NewSize=32m -XX:MaxPermSize=128m -XX:+UseConcMarkSweepGC -XX:+ExplicitGCInvokesConcurrent -XX:+AggressiveOpts -cp bin;data/libs/*; org.arios.Runner +pause \ No newline at end of file diff --git a/Tools/Arios Editor/bin/org/arios/Runner.class b/Tools/Arios Editor/bin/org/arios/Runner.class new file mode 100644 index 000000000..2ab19623b Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/Runner.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/Cache.class b/Tools/Arios Editor/bin/org/arios/cache/Cache.class new file mode 100644 index 000000000..ddf66e2b2 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/Cache.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/CacheFile.class b/Tools/Arios Editor/bin/org/arios/cache/CacheFile.class new file mode 100644 index 000000000..0a5eab8d3 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/CacheFile.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/CacheFileManager.class b/Tools/Arios Editor/bin/org/arios/cache/CacheFileManager.class new file mode 100644 index 000000000..59932fef5 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/CacheFileManager.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/ServerStore.class b/Tools/Arios Editor/bin/org/arios/cache/ServerStore.class new file mode 100644 index 000000000..299c09a90 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/ServerStore.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/StoreFile.class b/Tools/Arios Editor/bin/org/arios/cache/StoreFile.class new file mode 100644 index 000000000..3032cd6a4 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/StoreFile.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/def/Definition.class b/Tools/Arios Editor/bin/org/arios/cache/def/Definition.class new file mode 100644 index 000000000..2c8425603 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/def/Definition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/def/impl/AnimationDefinition.class b/Tools/Arios Editor/bin/org/arios/cache/def/impl/AnimationDefinition.class new file mode 100644 index 000000000..af2b0b6e0 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/def/impl/AnimationDefinition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/def/impl/GraphicDefinition.class b/Tools/Arios Editor/bin/org/arios/cache/def/impl/GraphicDefinition.class new file mode 100644 index 000000000..55cffbac0 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/def/impl/GraphicDefinition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/def/impl/ItemDefinition.class b/Tools/Arios Editor/bin/org/arios/cache/def/impl/ItemDefinition.class new file mode 100644 index 000000000..79485b7f5 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/def/impl/ItemDefinition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/def/impl/NPCDefinition.class b/Tools/Arios Editor/bin/org/arios/cache/def/impl/NPCDefinition.class new file mode 100644 index 000000000..4930a13ee Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/def/impl/NPCDefinition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/def/impl/ObjectDefinition.class b/Tools/Arios Editor/bin/org/arios/cache/def/impl/ObjectDefinition.class new file mode 100644 index 000000000..3343fa180 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/def/impl/ObjectDefinition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/BZip2BlockEntry.class b/Tools/Arios Editor/bin/org/arios/cache/misc/BZip2BlockEntry.class new file mode 100644 index 000000000..3533f8b3c Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/BZip2BlockEntry.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/BZip2Decompressor.class b/Tools/Arios Editor/bin/org/arios/cache/misc/BZip2Decompressor.class new file mode 100644 index 000000000..26f13f59a Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/BZip2Decompressor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/ByteBufferUtils.class b/Tools/Arios Editor/bin/org/arios/cache/misc/ByteBufferUtils.class new file mode 100644 index 000000000..81f48103f Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/ByteBufferUtils.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/Container.class b/Tools/Arios Editor/bin/org/arios/cache/misc/Container.class new file mode 100644 index 000000000..bd5f9412e Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/Container.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/ContainersInformation.class b/Tools/Arios Editor/bin/org/arios/cache/misc/ContainersInformation.class new file mode 100644 index 000000000..62657a42e Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/ContainersInformation.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/DefinitionSize.class b/Tools/Arios Editor/bin/org/arios/cache/misc/DefinitionSize.class new file mode 100644 index 000000000..40a40cf4d Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/DefinitionSize.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/FilesContainer.class b/Tools/Arios Editor/bin/org/arios/cache/misc/FilesContainer.class new file mode 100644 index 000000000..eecdb97a5 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/FilesContainer.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/GZipCompressor.class b/Tools/Arios Editor/bin/org/arios/cache/misc/GZipCompressor.class new file mode 100644 index 000000000..e3232b23c Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/GZipCompressor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/GZipDecompressor.class b/Tools/Arios Editor/bin/org/arios/cache/misc/GZipDecompressor.class new file mode 100644 index 000000000..21d33b10c Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/GZipDecompressor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/LandscapeCache.class b/Tools/Arios Editor/bin/org/arios/cache/misc/LandscapeCache.class new file mode 100644 index 000000000..a1c89b5fa Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/LandscapeCache.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/Stream.class b/Tools/Arios Editor/bin/org/arios/cache/misc/Stream.class new file mode 100644 index 000000000..d72eb5210 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/Stream.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/StringUtils.class b/Tools/Arios Editor/bin/org/arios/cache/misc/StringUtils.class new file mode 100644 index 000000000..4bf9c789f Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/StringUtils.class differ diff --git a/Tools/Arios Editor/bin/org/arios/cache/misc/XTEACryption.class b/Tools/Arios Editor/bin/org/arios/cache/misc/XTEACryption.class new file mode 100644 index 000000000..7eb54ba1f Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/cache/misc/XTEACryption.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/WorkFrame$1.class b/Tools/Arios Editor/bin/org/arios/workspace/WorkFrame$1.class new file mode 100644 index 000000000..e1b5434ee Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/WorkFrame$1.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/WorkFrame.class b/Tools/Arios Editor/bin/org/arios/workspace/WorkFrame.class new file mode 100644 index 000000000..46b5a8e85 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/WorkFrame.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/WorkLogger.class b/Tools/Arios Editor/bin/org/arios/workspace/WorkLogger.class new file mode 100644 index 000000000..49ff889c1 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/WorkLogger.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/WorkSettings.class b/Tools/Arios Editor/bin/org/arios/workspace/WorkSettings.class new file mode 100644 index 000000000..6526865f8 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/WorkSettings.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/WorkSpace.class b/Tools/Arios Editor/bin/org/arios/workspace/WorkSpace.class new file mode 100644 index 000000000..42ddf4261 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/WorkSpace.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/EditorTab.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/EditorTab.class new file mode 100644 index 000000000..891a66364 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/EditorTab.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/EditorType.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/EditorType.class new file mode 100644 index 000000000..f4af0f88c Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/EditorType.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor$1.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor$1.class new file mode 100644 index 000000000..ffbe57ef5 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor$1.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor$2.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor$2.class new file mode 100644 index 000000000..37234359b Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor$2.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor.class new file mode 100644 index 000000000..9e9d114e4 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeEditor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList$ListSelectionHandler.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList$ListSelectionHandler.class new file mode 100644 index 000000000..b509dbc28 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList$ListSelectionHandler.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList$NodeListRenderer.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList$NodeListRenderer.class new file mode 100644 index 000000000..42f5ec57f Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList$NodeListRenderer.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList.class new file mode 100644 index 000000000..fe5657492 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeList.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodePanel$1.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodePanel$1.class new file mode 100644 index 000000000..397948ca5 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodePanel$1.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodePanel.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodePanel.class new file mode 100644 index 000000000..3bdf4bed4 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodePanel.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeTable.class b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeTable.class new file mode 100644 index 000000000..1575d99b8 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/editor/NodeTable.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/Configuration.class b/Tools/Arios Editor/bin/org/arios/workspace/node/Configuration.class new file mode 100644 index 000000000..c29382f28 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/Configuration.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/Node.class b/Tools/Arios Editor/bin/org/arios/workspace/node/Node.class new file mode 100644 index 000000000..0d581cc74 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/Node.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$1.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$1.class new file mode 100644 index 000000000..9f3f1f050 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$1.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$2.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$2.class new file mode 100644 index 000000000..b7fe8ee2b Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$2.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$3.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$3.class new file mode 100644 index 000000000..62e1c5fdd Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$3.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$4.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$4.class new file mode 100644 index 000000000..1ac1d4803 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$4.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$5.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$5.class new file mode 100644 index 000000000..0589037e7 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$5.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$6.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$6.class new file mode 100644 index 000000000..f04304750 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$6.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$7.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$7.class new file mode 100644 index 000000000..99b1d9daf Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$7.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$8.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$8.class new file mode 100644 index 000000000..b7cb7e42f Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item$8.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item.class new file mode 100644 index 000000000..f6a5f7b72 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/Item.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/ItemEditor.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/ItemEditor.class new file mode 100644 index 000000000..e1d4eb863 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/ItemEditor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/ItemWrapper.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/ItemWrapper.class new file mode 100644 index 000000000..a7faa16f5 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/ItemWrapper.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/Shop$ShopDefinition.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/Shop$ShopDefinition.class new file mode 100644 index 000000000..6ea3efee9 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/Shop$ShopDefinition.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/Shop.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/Shop.class new file mode 100644 index 000000000..30dfe4618 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/Shop.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopEditor.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopEditor.class new file mode 100644 index 000000000..678e0a320 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopEditor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopManager.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopManager.class new file mode 100644 index 000000000..6d5f7fbe7 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopManager.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopPanel.class b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopPanel.class new file mode 100644 index 000000000..a5401f77f Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/item/shop/ShopPanel.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/DropFrequency.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/DropFrequency.class new file mode 100644 index 000000000..3e462f545 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/DropFrequency.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPC$1.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPC$1.class new file mode 100644 index 000000000..6d9973761 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPC$1.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPC.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPC.class new file mode 100644 index 000000000..20fa3c99a Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPC.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDrop.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDrop.class new file mode 100644 index 000000000..98789d18b Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDrop.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropManager$DropTable.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropManager$DropTable.class new file mode 100644 index 000000000..ef58f1a92 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropManager$DropTable.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropManager.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropManager.class new file mode 100644 index 000000000..31578a8cc Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropManager.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$1.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$1.class new file mode 100644 index 000000000..eea6d83a7 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$1.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$2.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$2.class new file mode 100644 index 000000000..10c9f6784 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$2.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$3.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$3.class new file mode 100644 index 000000000..452652390 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable$3.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable.class new file mode 100644 index 000000000..73056d2e8 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel$DropTable.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel.class new file mode 100644 index 000000000..5d7c9f320 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCDropPanel.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCEditor.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCEditor.class new file mode 100644 index 000000000..fc5d3b1e0 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/NPCEditor.class differ diff --git a/Tools/Arios Editor/bin/org/arios/workspace/node/npc/TableType.class b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/TableType.class new file mode 100644 index 000000000..e12df25a0 Binary files /dev/null and b/Tools/Arios Editor/bin/org/arios/workspace/node/npc/TableType.class differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-05-57.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-05-57.arios new file mode 100644 index 000000000..d37f2548c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-05-57.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-07-41.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-07-41.arios new file mode 100644 index 000000000..6146f59b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-07-41.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-08-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-08-10.arios new file mode 100644 index 000000000..a3e87b4ae Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-17 11-08-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-00-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-00-13.arios new file mode 100644 index 000000000..fe38903f8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-00-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-01-12.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-01-12.arios new file mode 100644 index 000000000..ed8220541 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-01-12.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-02-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-02-03.arios new file mode 100644 index 000000000..e8b525af6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-02-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-02-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-02-40.arios new file mode 100644 index 000000000..e8b525af6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-02-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-04-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-04-59.arios new file mode 100644 index 000000000..f477de227 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-04-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-05-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-05-47.arios new file mode 100644 index 000000000..e7dc7efe3 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-05-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-06-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-06-55.arios new file mode 100644 index 000000000..c4a499d26 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-06-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-07-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-07-22.arios new file mode 100644 index 000000000..e1bc3df48 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-07-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-10-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-10-11.arios new file mode 100644 index 000000000..d8919c31e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-10-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-10-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-10-45.arios new file mode 100644 index 000000000..6d45960d9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-10-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-11-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-11-15.arios new file mode 100644 index 000000000..277e4919c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-11-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-01.arios new file mode 100644 index 000000000..89c22e0cb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-32.arios new file mode 100644 index 000000000..aa039130d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-56.arios new file mode 100644 index 000000000..5b0690a0c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-12-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-13-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-13-04.arios new file mode 100644 index 000000000..5b0690a0c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-13-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-14-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-14-36.arios new file mode 100644 index 000000000..be96f5505 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-14-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-15-46.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-15-46.arios new file mode 100644 index 000000000..0cec8159b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-15-46.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-17-33.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-17-33.arios new file mode 100644 index 000000000..5e4eeacf9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-17-33.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-18-34.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-18-34.arios new file mode 100644 index 000000000..b059bfb0b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-18-34.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-20-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-20-27.arios new file mode 100644 index 000000000..a2f73a5b8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-20-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-21-23.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-21-23.arios new file mode 100644 index 000000000..bc4018ed6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-21-23.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-22-37.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-22-37.arios new file mode 100644 index 000000000..b7f6609d6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-22-37.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-24-28.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-24-28.arios new file mode 100644 index 000000000..9f7fc5fc0 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-24-28.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-26-58.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-26-58.arios new file mode 100644 index 000000000..0c062b7f2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-26-58.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-28-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-28-43.arios new file mode 100644 index 000000000..892bf9ee9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-28-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-29-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-29-15.arios new file mode 100644 index 000000000..cc332e25a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-29-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-31-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-31-07.arios new file mode 100644 index 000000000..225cacbd4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-31-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-31-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-31-39.arios new file mode 100644 index 000000000..7519e0dac Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-31-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-32-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-32-32.arios new file mode 100644 index 000000000..cc332e25a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-32-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-51-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-51-50.arios new file mode 100644 index 000000000..9583a0456 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 01-51-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-01-09.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-01-09.arios new file mode 100644 index 000000000..6146f59b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-01-09.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-01-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-01-55.arios new file mode 100644 index 000000000..ffe5a2aa4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-01-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-03-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-03-27.arios new file mode 100644 index 000000000..ffe5a2aa4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-03-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-11-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-11-27.arios new file mode 100644 index 000000000..ffe5a2aa4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-11-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-13-52.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-13-52.arios new file mode 100644 index 000000000..d44973628 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-13-52.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-34-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-34-29.arios new file mode 100644 index 000000000..a26b55b77 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-34-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-40-31.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-40-31.arios new file mode 100644 index 000000000..4aef3d01a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-40-31.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-45-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-45-59.arios new file mode 100644 index 000000000..fc06aaea8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-45-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-46-05.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-46-05.arios new file mode 100644 index 000000000..fc06aaea8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-46-05.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-46-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-46-11.arios new file mode 100644 index 000000000..fc06aaea8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-46-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-47-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-47-26.arios new file mode 100644 index 000000000..fc06aaea8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-47-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-48-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-48-11.arios new file mode 100644 index 000000000..f4aac2ae2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-48-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-48-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-48-53.arios new file mode 100644 index 000000000..e180eb8d6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-48-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-49-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-49-43.arios new file mode 100644 index 000000000..d9a6e7af0 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-49-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-50-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-50-08.arios new file mode 100644 index 000000000..caf74d185 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-50-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-51-33.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-51-33.arios new file mode 100644 index 000000000..5524c4d93 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-51-33.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-54-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-54-22.arios new file mode 100644 index 000000000..b8dd0343f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-54-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-55-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-55-47.arios new file mode 100644 index 000000000..170ae7025 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-55-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-57-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-57-13.arios new file mode 100644 index 000000000..99617565e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-57-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-58-24.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-58-24.arios new file mode 100644 index 000000000..bf26e2658 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-58-24.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-59-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-59-03.arios new file mode 100644 index 000000000..6187ccb05 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-18 12-59-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 01-17-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 01-17-50.arios new file mode 100644 index 000000000..cce61f82f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 01-17-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 01-22-57.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 01-22-57.arios new file mode 100644 index 000000000..e945b7540 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 01-22-57.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-29-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-29-17.arios new file mode 100644 index 000000000..ab5108eeb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-29-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-30-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-30-20.arios new file mode 100644 index 000000000..d3bbe536b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-30-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-32-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-32-50.arios new file mode 100644 index 000000000..97b602384 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-32-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-42-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-42-45.arios new file mode 100644 index 000000000..97b602384 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-42-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-45-23.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-45-23.arios new file mode 100644 index 000000000..8b8f60899 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-45-23.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-45-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-45-30.arios new file mode 100644 index 000000000..91351649e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-45-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-59-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-59-36.arios new file mode 100644 index 000000000..97b602384 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 10-59-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-00-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-00-20.arios new file mode 100644 index 000000000..f87cab2db Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-00-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-00-34.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-00-34.arios new file mode 100644 index 000000000..f1daab0f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-00-34.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-03-52.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-03-52.arios new file mode 100644 index 000000000..737bf6719 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-03-52.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-05-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-05-02.arios new file mode 100644 index 000000000..6d20d6263 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-05-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-05-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-05-10.arios new file mode 100644 index 000000000..6bf662d5e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-05-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-07-41.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-07-41.arios new file mode 100644 index 000000000..6bf662d5e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-07-41.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-08-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-08-08.arios new file mode 100644 index 000000000..b1d1bf954 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-08-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-08-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-08-36.arios new file mode 100644 index 000000000..f582b88ab Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-08-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-09-42.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-09-42.arios new file mode 100644 index 000000000..929e0333b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-09-42.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-17-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-17-45.arios new file mode 100644 index 000000000..29915cea2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-17-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-21-12.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-21-12.arios new file mode 100644 index 000000000..c122873a2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-21-12.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-23-38.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-23-38.arios new file mode 100644 index 000000000..942f86845 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-23-38.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-23-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-23-51.arios new file mode 100644 index 000000000..5857503f3 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-23-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-41-42.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-41-42.arios new file mode 100644 index 000000000..5857503f3 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 11-41-42.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-08-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-08-26.arios new file mode 100644 index 000000000..cc332e25a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-08-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-50-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-50-20.arios new file mode 100644 index 000000000..3b9e82912 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-50-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-58-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-58-17.arios new file mode 100644 index 000000000..abc2b3bf6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-58-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-59-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-59-18.arios new file mode 100644 index 000000000..816371722 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-19 12-59-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-03-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-03-54.arios new file mode 100644 index 000000000..7cf84995b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-03-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-04-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-04-02.arios new file mode 100644 index 000000000..546ee2805 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-04-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-04-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-04-10.arios new file mode 100644 index 000000000..ebca3a200 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-04-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-06-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-06-04.arios new file mode 100644 index 000000000..29c82e183 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-06-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-16-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-16-27.arios new file mode 100644 index 000000000..a6d2c1fbc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 01-16-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-36-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-36-39.arios new file mode 100644 index 000000000..4aeaa4aee Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-36-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-37-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-37-11.arios new file mode 100644 index 000000000..6761bbc29 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-37-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-38-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-38-02.arios new file mode 100644 index 000000000..6761bbc29 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-38-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-40-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-40-32.arios new file mode 100644 index 000000000..6761bbc29 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-40-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-42-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-42-06.arios new file mode 100644 index 000000000..3ee01515b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-42-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-46-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-46-08.arios new file mode 100644 index 000000000..a1982fe2e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-46-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-48-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-48-08.arios new file mode 100644 index 000000000..d4ebfc8c3 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-48-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-49-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-49-08.arios new file mode 100644 index 000000000..0237fcf96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-49-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-51-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-51-04.arios new file mode 100644 index 000000000..c9d7cfcc2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-51-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-51-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-51-08.arios new file mode 100644 index 000000000..320666fc1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-51-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-55-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-55-45.arios new file mode 100644 index 000000000..178846e2d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-55-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-56-49.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-56-49.arios new file mode 100644 index 000000000..2da59bdc4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-56-49.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-57-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-57-01.arios new file mode 100644 index 000000000..1fa99663d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-57-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-58-58.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-58-58.arios new file mode 100644 index 000000000..fb1fc7206 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-58-58.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-59-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-59-04.arios new file mode 100644 index 000000000..2dd272aa7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 04-59-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-00-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-00-40.arios new file mode 100644 index 000000000..bff8dd704 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-00-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-11.arios new file mode 100644 index 000000000..1a219aa2f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-23.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-23.arios new file mode 100644 index 000000000..fba2d38c7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-23.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-37.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-37.arios new file mode 100644 index 000000000..fba2d38c7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-37.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-46.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-46.arios new file mode 100644 index 000000000..da6502236 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 05-01-46.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-09-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-09-54.arios new file mode 100644 index 000000000..5fa6eea07 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-09-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-14-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-14-47.arios new file mode 100644 index 000000000..066831502 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-14-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-15-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-15-36.arios new file mode 100644 index 000000000..65fbe7648 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 07-15-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 08-49-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 08-49-04.arios new file mode 100644 index 000000000..d9fbed7f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 08-49-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-02-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-02-03.arios new file mode 100644 index 000000000..8b167ee3e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-02-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-02-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-02-47.arios new file mode 100644 index 000000000..9b9d876bb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-02-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-17-31.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-17-31.arios new file mode 100644 index 000000000..dc63cbd01 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-17-31.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-38-00.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-38-00.arios new file mode 100644 index 000000000..7d30cd98f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 09-38-00.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 10-38-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 10-38-04.arios new file mode 100644 index 000000000..d8c714b20 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 10-38-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 10-38-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 10-38-40.arios new file mode 100644 index 000000000..a6d2c1fbc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 10-38-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 11-48-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 11-48-18.arios new file mode 100644 index 000000000..93d7ea1fc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 11-48-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-02-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-02-01.arios new file mode 100644 index 000000000..92b57d86e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-02-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-02-05.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-02-05.arios new file mode 100644 index 000000000..b0e79b861 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-02-05.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-04-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-04-22.arios new file mode 100644 index 000000000..b0e79b861 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-04-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-04-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-04-59.arios new file mode 100644 index 000000000..e44bd027f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-04-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-50-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-50-43.arios new file mode 100644 index 000000000..73fa688ce Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-50-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-52-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-52-30.arios new file mode 100644 index 000000000..abd6fbbe5 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-52-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-53-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-53-17.arios new file mode 100644 index 000000000..bf39aa4cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-20 12-53-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 01-03-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 01-03-50.arios new file mode 100644 index 000000000..1809edce4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 01-03-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-26-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-26-30.arios new file mode 100644 index 000000000..d33612a15 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-26-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-27-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-27-22.arios new file mode 100644 index 000000000..0b23e8859 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-27-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-33-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-33-51.arios new file mode 100644 index 000000000..c1093a37f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-33-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-48-42.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-48-42.arios new file mode 100644 index 000000000..ca4ec07fa Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 02-48-42.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-09.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-09.arios new file mode 100644 index 000000000..5c19242e2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-09.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-20.arios new file mode 100644 index 000000000..7f97f16fb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-36.arios new file mode 100644 index 000000000..e18c616c9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 08-27-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-44-58.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-44-58.arios new file mode 100644 index 000000000..b39fab749 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-44-58.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-45-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-45-51.arios new file mode 100644 index 000000000..82727e693 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-45-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-46-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-46-22.arios new file mode 100644 index 000000000..afc15f3c6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-46-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-46-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-46-54.arios new file mode 100644 index 000000000..722bbd475 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-46-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-47-41.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-47-41.arios new file mode 100644 index 000000000..87ac448ba Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-47-41.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-51-35.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-51-35.arios new file mode 100644 index 000000000..ca4ec07fa Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 10-51-35.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-21 11-03-48.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 11-03-48.arios new file mode 100644 index 000000000..bba9a0dad Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-21 11-03-48.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-30-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-30-25.arios new file mode 100644 index 000000000..397eb2f78 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-30-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-31-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-31-53.arios new file mode 100644 index 000000000..f5315e5f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-31-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-32-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-32-18.arios new file mode 100644 index 000000000..8801ff30b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-32-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-32-37.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-32-37.arios new file mode 100644 index 000000000..319601aac Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 01-32-37.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-22 09-34-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 09-34-01.arios new file mode 100644 index 000000000..d593b181f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-22 09-34-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 08-21-05.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 08-21-05.arios new file mode 100644 index 000000000..360cf1259 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 08-21-05.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-10.arios new file mode 100644 index 000000000..6cafd6748 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-17.arios new file mode 100644 index 000000000..7aecdfc47 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-24.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-24.arios new file mode 100644 index 000000000..7aecdfc47 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-24.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-29.arios new file mode 100644 index 000000000..047b77778 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-51.arios new file mode 100644 index 000000000..89f6e969c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-52.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-52.arios new file mode 100644 index 000000000..862206d4d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-23-52.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-24-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-24-01.arios new file mode 100644 index 000000000..862206d4d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-24-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-24-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-24-07.arios new file mode 100644 index 000000000..b2392d736 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 09-24-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-35-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-35-55.arios new file mode 100644 index 000000000..f988b060b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-35-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-36-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-36-10.arios new file mode 100644 index 000000000..770981d8d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-36-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-37-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-37-04.arios new file mode 100644 index 000000000..9c5bd0a02 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-23 12-37-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 10-59-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 10-59-01.arios new file mode 100644 index 000000000..1d37c9bd1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 10-59-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 10-59-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 10-59-15.arios new file mode 100644 index 000000000..b432419cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 10-59-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-00-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-00-18.arios new file mode 100644 index 000000000..b432419cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-00-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-00-34.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-00-34.arios new file mode 100644 index 000000000..b432419cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-00-34.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-23-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-23-25.arios new file mode 100644 index 000000000..b432419cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-23-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-24-38.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-24-38.arios new file mode 100644 index 000000000..d10017705 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-24-38.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-25-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-25-07.arios new file mode 100644 index 000000000..b432419cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-25-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-25-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-25-15.arios new file mode 100644 index 000000000..fb18b06be Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-25-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-36-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-36-43.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-36-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-36-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-36-44.arios new file mode 100644 index 000000000..5f4f1ea5d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-36-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-37-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-37-15.arios new file mode 100644 index 000000000..5f4f1ea5d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-37-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-37-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-37-44.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-37-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-51.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-53.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-58.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-58.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-38-58.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-40-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-40-39.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-40-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-41-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-41-56.arios new file mode 100644 index 000000000..60a054b18 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-41-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-42-52.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-42-52.arios new file mode 100644 index 000000000..2c6e749b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-42-52.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-43-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-43-04.arios new file mode 100644 index 000000000..6f3de22f2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-43-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-43-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-43-30.arios new file mode 100644 index 000000000..583609aea Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-43-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-44-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-44-39.arios new file mode 100644 index 000000000..c34867cdc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-44-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-45-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-45-03.arios new file mode 100644 index 000000000..7b59ecf0d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-45-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-52-38.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-52-38.arios new file mode 100644 index 000000000..d4d2403a7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-52-38.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-52-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-52-51.arios new file mode 100644 index 000000000..8d903be89 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-52-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-53-12.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-53-12.arios new file mode 100644 index 000000000..0a36ebcbd Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-53-12.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-54-14.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-54-14.arios new file mode 100644 index 000000000..9cac9927f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-54-14.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-01.arios new file mode 100644 index 000000000..924c80ec2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-19.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-19.arios new file mode 100644 index 000000000..c8c77fbe8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-19.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-36.arios new file mode 100644 index 000000000..6e1f054e8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-46.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-46.arios new file mode 100644 index 000000000..a32ca874c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-46.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-59.arios new file mode 100644 index 000000000..87369362c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-55-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-18.arios new file mode 100644 index 000000000..9226cbc7d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-27.arios new file mode 100644 index 000000000..f03beb0f7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-35.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-35.arios new file mode 100644 index 000000000..cd732f37c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-35.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-47.arios new file mode 100644 index 000000000..de3d5a305 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-24 11-56-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-11-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-11-03.arios new file mode 100644 index 000000000..d50edcb73 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-11-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-11-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-11-22.arios new file mode 100644 index 000000000..1de405c19 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-11-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-16-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-16-36.arios new file mode 100644 index 000000000..730c0f2a7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-16-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-26.arios new file mode 100644 index 000000000..74ed904b5 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-36.arios new file mode 100644 index 000000000..8b09a1a6f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-59.arios new file mode 100644 index 000000000..8ef8655da Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-17-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-18-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-18-29.arios new file mode 100644 index 000000000..8ef8655da Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-25 12-18-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-32-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-32-45.arios new file mode 100644 index 000000000..8dc66fd66 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-32-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-32-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-32-59.arios new file mode 100644 index 000000000..8dc66fd66 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-32-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-34-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-34-03.arios new file mode 100644 index 000000000..8dc66fd66 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-34-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-34-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-34-51.arios new file mode 100644 index 000000000..8dc66fd66 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-34-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-35-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-35-06.arios new file mode 100644 index 000000000..210409c4f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-35-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-35-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-35-44.arios new file mode 100644 index 000000000..210409c4f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 09-35-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-42-35.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-42-35.arios new file mode 100644 index 000000000..c1f1fc4b6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-42-35.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-43-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-43-06.arios new file mode 100644 index 000000000..be26c08e2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-43-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-43-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-43-54.arios new file mode 100644 index 000000000..e02be1567 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-43-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-16.arios new file mode 100644 index 000000000..acfa77142 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-22.arios new file mode 100644 index 000000000..144d32a69 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-30.arios new file mode 100644 index 000000000..32de226d5 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-55.arios new file mode 100644 index 000000000..b966e15f4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-44-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-49-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-49-22.arios new file mode 100644 index 000000000..b966e15f4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-49-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-49-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-49-43.arios new file mode 100644 index 000000000..de1f20bc7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-49-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-50-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-50-30.arios new file mode 100644 index 000000000..52dca1a56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-50-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-51-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-51-16.arios new file mode 100644 index 000000000..6d1964487 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-51-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-06.arios new file mode 100644 index 000000000..642ad2d7f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-22.arios new file mode 100644 index 000000000..1d70fbbbc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-28.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-28.arios new file mode 100644 index 000000000..7f8dc034f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-52-28.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-06.arios new file mode 100644 index 000000000..79319c458 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-18.arios new file mode 100644 index 000000000..dcb2b26ec Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-56.arios new file mode 100644 index 000000000..bd8c763b9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-53-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-04.arios new file mode 100644 index 000000000..17242be84 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-13.arios new file mode 100644 index 000000000..9baf947b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-15.arios new file mode 100644 index 000000000..9baf947b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 10-54-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-06-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-06-18.arios new file mode 100644 index 000000000..9baf947b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-06-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-18-28.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-18-28.arios new file mode 100644 index 000000000..0ea4e0e43 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-18-28.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-18-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-18-40.arios new file mode 100644 index 000000000..475eadc74 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-18-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-20-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-20-29.arios new file mode 100644 index 000000000..a3dbbb4a7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-20-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-20-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-20-56.arios new file mode 100644 index 000000000..8765032a4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-20-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-21-05.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-21-05.arios new file mode 100644 index 000000000..1bd148638 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-21-05.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-19.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-19.arios new file mode 100644 index 000000000..1bd148638 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-19.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-25.arios new file mode 100644 index 000000000..0798227da Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-35.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-35.arios new file mode 100644 index 000000000..45e2bcef0 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-26 11-42-35.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-27 01-54-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-27 01-54-43.arios new file mode 100644 index 000000000..eaa8c9ee7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-27 01-54-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 02-12-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 02-12-29.arios new file mode 100644 index 000000000..9685f5fe1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 02-12-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 02-14-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 02-14-39.arios new file mode 100644 index 000000000..8c93626f7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 02-14-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-18-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-18-56.arios new file mode 100644 index 000000000..4d49a840a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-18-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-19-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-19-07.arios new file mode 100644 index 000000000..639e0c5ad Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-19-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-19-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-19-54.arios new file mode 100644 index 000000000..1a1794b85 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-19-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-27-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-27-18.arios new file mode 100644 index 000000000..24190625a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-27-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-38-35.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-38-35.arios new file mode 100644 index 000000000..24190625a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-38-35.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-40-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-40-16.arios new file mode 100644 index 000000000..24190625a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-40-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-42-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-42-50.arios new file mode 100644 index 000000000..2614bd1dc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-42-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-43-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-43-02.arios new file mode 100644 index 000000000..d0557b59b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-43-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-52-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-52-45.arios new file mode 100644 index 000000000..329c3b5d4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 11-52-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-35-48.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-35-48.arios new file mode 100644 index 000000000..980a72846 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-35-48.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-35-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-35-55.arios new file mode 100644 index 000000000..c63bb0fcf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-35-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-36-05.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-36-05.arios new file mode 100644 index 000000000..c63bb0fcf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-36-05.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-39-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-39-06.arios new file mode 100644 index 000000000..a37806054 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-08-30 12-39-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-11-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-11-51.arios new file mode 100644 index 000000000..28cd7cbf5 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-11-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-12-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-12-08.arios new file mode 100644 index 000000000..1a9095fe0 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-12-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-14-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-14-01.arios new file mode 100644 index 000000000..32b4053a7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-14-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-14-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-14-29.arios new file mode 100644 index 000000000..d1020c469 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 04-14-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-24-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-24-25.arios new file mode 100644 index 000000000..d1020c469 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-24-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-26-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-26-07.arios new file mode 100644 index 000000000..50f7146b1 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-26-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-28-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-28-03.arios new file mode 100644 index 000000000..014ca592a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-28-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-29-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-29-51.arios new file mode 100644 index 000000000..23fbf8651 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-29-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-32-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-32-04.arios new file mode 100644 index 000000000..fde3a09bc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-32-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-32-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-32-39.arios new file mode 100644 index 000000000..7f4badb58 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-32-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-34-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-34-25.arios new file mode 100644 index 000000000..f38c73c69 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-34-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-36-48.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-36-48.arios new file mode 100644 index 000000000..7f4badb58 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 10-36-48.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-41-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-41-10.arios new file mode 100644 index 000000000..017c8628c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-41-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-41-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-41-27.arios new file mode 100644 index 000000000..bee107eaa Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-41-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-42-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-42-25.arios new file mode 100644 index 000000000..fbe0b303b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-42-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-42-31.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-42-31.arios new file mode 100644 index 000000000..bed5e1fdc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-04 11-42-31.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-05-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-05-32.arios new file mode 100644 index 000000000..ca9df952e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-05-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-09-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-09-25.arios new file mode 100644 index 000000000..ca9df952e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-09-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-10-28.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-10-28.arios new file mode 100644 index 000000000..e604413b6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-05 12-10-28.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-01-14.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-01-14.arios new file mode 100644 index 000000000..181bd5185 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-01-14.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-05-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-05-16.arios new file mode 100644 index 000000000..6507806ff Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-05-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-06-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-06-17.arios new file mode 100644 index 000000000..438e0f407 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-06-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-07-00.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-07-00.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-07-00.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-07-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-07-13.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-07-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-09-34.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-09-34.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-09-34.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-11-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-11-17.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-11-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-11-42.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-11-42.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-11-42.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-02.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-21.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-21.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-21.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-47.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-12-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-13-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-13-11.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-13-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-14-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-14-18.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-14-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-14-41.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-14-41.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-14-41.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-15-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-15-08.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-15-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-15-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-15-59.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-15-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-09.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-09.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-09.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-10.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-43.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-44.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-47.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-53.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-16-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-03.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-20.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-21.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-21.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-21.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-23.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-23.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-23.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-37.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-37.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-17-37.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-18-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-18-29.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-18-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-21-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-21-11.arios new file mode 100644 index 000000000..5ab963eb5 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-21-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-22-09.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-22-09.arios new file mode 100644 index 000000000..6171b4a74 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-22-09.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-22-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-22-20.arios new file mode 100644 index 000000000..6d0b01b9a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-22-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-26-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-26-39.arios new file mode 100644 index 000000000..6d0b01b9a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-26-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-31-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-31-44.arios new file mode 100644 index 000000000..2bda311e6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-31-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-32-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-32-15.arios new file mode 100644 index 000000000..6a48e5e44 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-32-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-05.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-05.arios new file mode 100644 index 000000000..e4094a753 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-05.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-16.arios new file mode 100644 index 000000000..86b93d6ac Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-53.arios new file mode 100644 index 000000000..32ce31e5e Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-33-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-35-22.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-35-22.arios new file mode 100644 index 000000000..ca07ec14f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-35-22.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-36-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-36-55.arios new file mode 100644 index 000000000..4775d4a96 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-36-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-40.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-41.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-41.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-41.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-43.arios new file mode 100644 index 000000000..fd808b596 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-54.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-55.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-37-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-02.arios new file mode 100644 index 000000000..f64140054 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-38.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-38.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-38.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-39.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-44.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-38-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-13.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-40.arios new file mode 100644 index 000000000..bc41fc340 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-47.arios new file mode 100644 index 000000000..de68395f9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-39-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-42-34.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-42-34.arios new file mode 100644 index 000000000..5ce3bfdda Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-42-34.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-43-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-43-32.arios new file mode 100644 index 000000000..fdd4b28b8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-43-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-44-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-44-39.arios new file mode 100644 index 000000000..cc7298615 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-44-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-45-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-45-03.arios new file mode 100644 index 000000000..1c21c1426 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-45-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-45-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-45-43.arios new file mode 100644 index 000000000..2e462c67b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-45-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-47-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-47-06.arios new file mode 100644 index 000000000..aee58b34a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-47-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-50-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-50-27.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-50-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-50-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-50-29.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-50-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-51-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-51-01.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-51-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-52-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-52-45.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-52-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-52-51.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-52-51.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-52-51.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-13.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-18.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-26.arios new file mode 100644 index 000000000..0f9c2a395 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-53-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-54-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-54-08.arios new file mode 100644 index 000000000..85946fb4f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-54-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-54-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-54-45.arios new file mode 100644 index 000000000..6507806ff Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-54-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-55-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-55-08.arios new file mode 100644 index 000000000..331631418 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-55-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-56-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-56-50.arios new file mode 100644 index 000000000..45727c6f7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 01-56-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-01-57.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-01-57.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-01-57.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-02-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-02-11.arios new file mode 100644 index 000000000..2a9e55caf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-02-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-05-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-05-44.arios new file mode 100644 index 000000000..a30d7fdfa Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-05-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-06-57.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-06-57.arios new file mode 100644 index 000000000..d28ac9295 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-06-57.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-09-27.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-09-27.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-09-27.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-09-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-09-45.arios new file mode 100644 index 000000000..2dfd8c927 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-09-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-11-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-11-01.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-11-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-12-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-12-43.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-12-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-12-57.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-12-57.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-12-57.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-13-24.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-13-24.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-13-24.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-14-34.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-14-34.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-14-34.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-16-24.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-16-24.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-16-24.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-19-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-19-39.arios new file mode 100644 index 000000000..7c9155815 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-19-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-21-33.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-21-33.arios new file mode 100644 index 000000000..0b55510bb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-21-33.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-22-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-22-08.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-22-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-24-23.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-24-23.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-24-23.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-25-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-25-53.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-25-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-27-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-27-40.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-27-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-30-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-30-32.arios new file mode 100644 index 000000000..64606ef56 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-30-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-30-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-30-44.arios new file mode 100644 index 000000000..967ad85d7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-30-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-01.arios new file mode 100644 index 000000000..2210f4e7d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-03.arios new file mode 100644 index 000000000..3a1f65cdf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-37.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-37.arios new file mode 100644 index 000000000..3a1f65cdf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-31-37.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-32-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-32-30.arios new file mode 100644 index 000000000..6392ca33d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 02-32-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 04-38-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 04-38-20.arios new file mode 100644 index 000000000..06465f6d4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 04-38-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-48-44.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-48-44.arios new file mode 100644 index 000000000..3a259ad36 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-48-44.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-59-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-59-26.arios new file mode 100644 index 000000000..e3ea6fff0 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-59-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-59-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-59-45.arios new file mode 100644 index 000000000..48c0911e8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 05-59-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 06-00-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 06-00-04.arios new file mode 100644 index 000000000..c896a7776 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 06-00-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 06-00-10.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 06-00-10.arios new file mode 100644 index 000000000..c3837fd7b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 06-00-10.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-19-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-19-50.arios new file mode 100644 index 000000000..3239f9806 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-19-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-20-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-20-08.arios new file mode 100644 index 000000000..c3837fd7b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-20-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-20-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-20-32.arios new file mode 100644 index 000000000..c3837fd7b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-20-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-21-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-21-29.arios new file mode 100644 index 000000000..d738ccf85 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-21-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-22-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-22-16.arios new file mode 100644 index 000000000..e1be31de9 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-22-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-24-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-24-40.arios new file mode 100644 index 000000000..492a17eb7 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-24-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-25-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-25-56.arios new file mode 100644 index 000000000..ba5e316ac Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 07-25-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-07.arios new file mode 100644 index 000000000..7a6e1db0c Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-30.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-30.arios new file mode 100644 index 000000000..60c7c7314 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-30.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-59.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-59.arios new file mode 100644 index 000000000..60c7c7314 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-02-59.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-08.arios new file mode 100644 index 000000000..a5242b827 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-32.arios new file mode 100644 index 000000000..a5242b827 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-54.arios new file mode 100644 index 000000000..60c7c7314 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-10-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-11-20.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-11-20.arios new file mode 100644 index 000000000..60c7c7314 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-11-20.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-11-56.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-11-56.arios new file mode 100644 index 000000000..a34997058 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-11-56.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-13-23.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-13-23.arios new file mode 100644 index 000000000..fd1d29016 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-13-23.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-14-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-14-07.arios new file mode 100644 index 000000000..fd1d29016 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-14-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-15-21.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-15-21.arios new file mode 100644 index 000000000..e761d7a14 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-15-21.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-22-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-22-07.arios new file mode 100644 index 000000000..2275e026b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-22-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-24-35.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-24-35.arios new file mode 100644 index 000000000..c8b7951dc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-24-35.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-04.arios new file mode 100644 index 000000000..fb0d444f2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-49.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-49.arios new file mode 100644 index 000000000..221c81d80 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-49.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-54.arios new file mode 100644 index 000000000..787c8221b Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-25-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-26-01.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-26-01.arios new file mode 100644 index 000000000..708d05729 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-26-01.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-26-11.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-26-11.arios new file mode 100644 index 000000000..a43f217b2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-26-11.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-28-31.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-28-31.arios new file mode 100644 index 000000000..50ed809b6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-28-31.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-28-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-28-45.arios new file mode 100644 index 000000000..50ed809b6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-28-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-30-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-30-13.arios new file mode 100644 index 000000000..50ed809b6 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-30-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-33-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-33-13.arios new file mode 100644 index 000000000..4919860bf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-33-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-33-47.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-33-47.arios new file mode 100644 index 000000000..4919860bf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-33-47.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-34-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-34-04.arios new file mode 100644 index 000000000..4919860bf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-34-04.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-34-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-34-54.arios new file mode 100644 index 000000000..4919860bf Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-34-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-35-12.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-35-12.arios new file mode 100644 index 000000000..099b33e45 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-35-12.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-35-57.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-35-57.arios new file mode 100644 index 000000000..a8ad51ea4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-35-57.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-39-38.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-39-38.arios new file mode 100644 index 000000000..a8ad51ea4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-39-38.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-40-53.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-40-53.arios new file mode 100644 index 000000000..504187ee5 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-40-53.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-43-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-43-07.arios new file mode 100644 index 000000000..61397a0fc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-43-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-44-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-44-03.arios new file mode 100644 index 000000000..10e430a53 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-44-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-45-29.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-45-29.arios new file mode 100644 index 000000000..80ce82c74 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-45-29.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-48-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-48-08.arios new file mode 100644 index 000000000..45a5069a2 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-48-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-49-14.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-49-14.arios new file mode 100644 index 000000000..8ea5c71cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-49-14.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-50-19.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-50-19.arios new file mode 100644 index 000000000..8ea5c71cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-50-19.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-50-38.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-50-38.arios new file mode 100644 index 000000000..8ea5c71cc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-50-38.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-52-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-52-13.arios new file mode 100644 index 000000000..02c64c49d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-52-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-53-45.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-53-45.arios new file mode 100644 index 000000000..02c64c49d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-53-45.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-54-17.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-54-17.arios new file mode 100644 index 000000000..b581c822d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-54-17.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-57-16.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-57-16.arios new file mode 100644 index 000000000..b581c822d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-57-16.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-58-54.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-58-54.arios new file mode 100644 index 000000000..07bc041e8 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-58-54.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-59-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-59-50.arios new file mode 100644 index 000000000..23a4aed2f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 08-59-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-02-33.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-02-33.arios new file mode 100644 index 000000000..f11c0f5e3 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-02-33.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-05-00.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-05-00.arios new file mode 100644 index 000000000..0f8d9a1c4 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-05-00.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-05-25.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-05-25.arios new file mode 100644 index 000000000..4406a0638 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-05-25.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-15-43.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-15-43.arios new file mode 100644 index 000000000..4406a0638 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-15-43.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-17-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-17-39.arios new file mode 100644 index 000000000..faba6e144 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-17-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-17-55.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-17-55.arios new file mode 100644 index 000000000..87141da36 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-17-55.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-03.arios new file mode 100644 index 000000000..87141da36 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-13.arios new file mode 100644 index 000000000..87141da36 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-26.arios new file mode 100644 index 000000000..87141da36 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-49.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-49.arios new file mode 100644 index 000000000..ffc94dc50 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-18-49.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-19-03.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-19-03.arios new file mode 100644 index 000000000..ffc94dc50 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-19-03.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-14.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-14.arios new file mode 100644 index 000000000..ffc94dc50 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-14.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-24.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-24.arios new file mode 100644 index 000000000..ffc94dc50 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-24.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-36.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-36.arios new file mode 100644 index 000000000..ffc94dc50 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-20-36.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-21-32.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-21-32.arios new file mode 100644 index 000000000..f3716a98f Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-21-32.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-23-06.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-23-06.arios new file mode 100644 index 000000000..f9e9ca97d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-23-06.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-23-40.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-23-40.arios new file mode 100644 index 000000000..f9e9ca97d Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-23-40.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-33-18.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-33-18.arios new file mode 100644 index 000000000..a240c8974 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 09-33-18.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-11-00.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-11-00.arios new file mode 100644 index 000000000..18a730091 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-11-00.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-19-08.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-19-08.arios new file mode 100644 index 000000000..12ad2fc03 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-19-08.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-28-46.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-28-46.arios new file mode 100644 index 000000000..5b3831336 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-28-46.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-31-15.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-31-15.arios new file mode 100644 index 000000000..6d61ba966 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-31-15.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-31-58.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-31-58.arios new file mode 100644 index 000000000..25bc04808 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-31-58.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-32-02.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-32-02.arios new file mode 100644 index 000000000..47144a90a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-32-02.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-32-39.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-32-39.arios new file mode 100644 index 000000000..47144a90a Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-32-39.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-34-07.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-34-07.arios new file mode 100644 index 000000000..1cd4230fb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 11-34-07.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-43-26.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-43-26.arios new file mode 100644 index 000000000..ac4c80beb Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-43-26.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-44-50.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-44-50.arios new file mode 100644 index 000000000..4088515fc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-44-50.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-49-13.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-49-13.arios new file mode 100644 index 000000000..811bd8381 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-49-13.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-54-21.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-54-21.arios new file mode 100644 index 000000000..4088515fc Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-54-21.arios differ diff --git a/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-55-04.arios b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-55-04.arios new file mode 100644 index 000000000..811bd8381 Binary files /dev/null and b/Tools/Arios Editor/data/backup/static_cache_2014-09-06 12-55-04.arios differ diff --git a/Tools/Arios Editor/data/libs/mysql-connector-java-5.1.30-bin.jar b/Tools/Arios Editor/data/libs/mysql-connector-java-5.1.30-bin.jar new file mode 100644 index 000000000..afef9b2d0 Binary files /dev/null and b/Tools/Arios Editor/data/libs/mysql-connector-java-5.1.30-bin.jar differ diff --git a/Tools/Arios Editor/data/store/dynamic_cache.arios b/Tools/Arios Editor/data/store/dynamic_cache.arios new file mode 100644 index 000000000..88d2d70c8 Binary files /dev/null and b/Tools/Arios Editor/data/store/dynamic_cache.arios differ diff --git a/Tools/Arios Editor/data/store/static_cache_2014-09-05 12-05-32.arios b/Tools/Arios Editor/data/store/static_cache_2014-09-05 12-05-32.arios new file mode 100644 index 000000000..ca9df952e Binary files /dev/null and b/Tools/Arios Editor/data/store/static_cache_2014-09-05 12-05-32.arios differ diff --git a/Tools/Arios Editor/src/org/arios/Runner.java b/Tools/Arios Editor/src/org/arios/Runner.java new file mode 100644 index 000000000..f65f6c6f4 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/Runner.java @@ -0,0 +1,62 @@ +package org.arios; + +import org.arios.cache.def.impl.ItemDefinition; +import org.arios.workspace.WorkLogger; +import org.arios.workspace.WorkSpace; +import org.arios.workspace.editor.EditorType; +import org.arios.workspace.node.Node; + +/** + * The class used to run the program. + * @author Vexias + * + */ +public final class Runner { + + /** + * The main method. + * @param args the arguments. + * @throws Throwable if thrown. + */ + public static void main(String...args) throws Throwable { + System.setOut(new WorkLogger(System.out)); + WorkSpace.getWorkSpace().init(); + //setNonSpawnables(); + } + + /** + * Sets the non spawnable. + */ + public static void setNonSpawnables() { + String[] names = new String[] {"godsword", "verac", "ahrim", "dharock", "torag", "guthin", "karil", "armadyl", "bandos", "bandos godsword", "armadyl godsword", "zamorak godsword", "saradomin godsword", "saradomin hilt", "zamorak hilt", "bandos hilt", "armadyl hilt", "3rd age", "clue scroll", "casket"}; + for (Node i : EditorType.ITEM.getTab().getNodes().values()) { + if ((boolean)i.getConfigValue("spawnable") == false) { + if (i.getName().equals("Clue scroll") || i.getName().equals("Casket")) { + continue; + } + //System.err.println("" + i.toString() + " is not spawnable!"); + continue; + } + if (!(((ItemDefinition) i.getDefinition()).isUnnoted())) { + continue; + } + if (i.getName().equals("Whoopsie")) { + continue; + } + if (i.getName().startsWith("D'hide body") || i.getName().startsWith("Elegant") || i.getName().contains("ele'")|| i.getName().endsWith("scroll") || i.getName().contains("pouch") || i.getName().endsWith("charm") || i.getName().startsWith("Pouch") || i.getName().equals("Zamorak robe") || i.getName().equals("Cape of legends") || i.getName().startsWith("Rock-shell") || i.getName().startsWith("Yak-hide") || i.getName().equals("Chompy bird hat") || i.getName().equals("Hat") || i.getName().equals("Boots") || i.getName().equals("Robe bottoms") || i.getName().equals("Robe top") || i.getName().toLowerCase().contains("heraldic") || i.getName().startsWith("D'hide") || (i.getName().equals("Gloves") && i.getId() < 7461) || i.getName().startsWith("Ghostly") || i.getName().startsWith("Fremennik")) { + i.setConfig("trade-override", true); + } + if (!((ItemDefinition)i.getDefinition()).getConfiguration("tradeable", false) && ((ItemDefinition)i.getDefinition()).hasAction("wear") && !((ItemDefinition)i.getDefinition()).getConfiguration("trade-override", false)) { + //System.err.println("Item=" + i); + } + for (String name : names) { + if (i.getName().toLowerCase().startsWith(name)) { + i.setConfig("spawnable", false); + System.err.println("Setting non spawnable for - " + i.toString() + "!"); + break; + } + } + } + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/Cache.java b/Tools/Arios Editor/src/org/arios/cache/Cache.java new file mode 100644 index 000000000..443a6d41f --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/Cache.java @@ -0,0 +1,145 @@ +package org.arios.cache; + +import java.io.File; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; + +import org.arios.workspace.WorkSpace; + +/** + * A cache reader. + * @author Emperor + * @author Dragonkk + */ +public final class Cache { + + /** + * The cache file manager. + */ + private static CacheFileManager[] cacheFileManagers; + + /** + * The container cache file informer. + */ + private static CacheFile referenceFile; + + /** + * Construct a new instance. + */ + private Cache(String location) { + try { + init(location); + } catch (Throwable e) { + e.printStackTrace(); + } + } + + /** + * Initialize the cache reader. + * @param path The cache path. + * @throws Throwable When an exception occurs. + */ + public static final void init(String path) throws Throwable { + byte[] cacheFileBuffer = new byte[520]; + RandomAccessFile containersInformFile = new RandomAccessFile(path + "/main_file_cache.idx255", "r"); + RandomAccessFile dataFile = new RandomAccessFile(path + "/main_file_cache.dat2", "r"); + referenceFile = new CacheFile(255, containersInformFile, dataFile, 500000, cacheFileBuffer); + int length = (int) (containersInformFile.length() / 6); + cacheFileManagers = new CacheFileManager[length]; + for (int i = 0; i < length; i++) { + File f = new File(path + "/main_file_cache.idx" + i); + if (f.exists() && f.length() > 0) { + cacheFileManagers[i] = new CacheFileManager(new CacheFile(i, new RandomAccessFile(f, "r"), dataFile, 1000000, cacheFileBuffer), true); + if (cacheFileManagers[i].getInformation() == null) { + System.out.println("Error loading cache index " + i + ": no information."); + cacheFileManagers[i] = null; + } + } + } + } + + /** + * Initaizes the cache. + * @throws Throwable when an exception occurs. + */ + public static final void init() throws Throwable { + init(WorkSpace.getWorkSpace().getSettings().getCachePath()); + } + + /** + * Gets the archive buffer for the grab requests. + * @param index The index id. + * @param archive The archive id. + * @param priority The priority. + * @param encryptionValue The current encryption value. + * @return The byte buffer. + */ + public static ByteBuffer getArchiveData(int index, int archive, boolean priority, int encryptionValue) { + byte[] data = index == 255 ? + referenceFile.getContainerData(archive) : + cacheFileManagers[index].getCacheFile().getContainerData(archive); + if (data == null) { + return null; + } + int compression = data[0] & 0xff; + int length = ((data[1] & 0xff) << 24) + ((data[2] & 0xff) << 16) + ((data[3] & 0xff) << 8) + (data[4] & 0xff); + int settings = compression; + if (!priority) { + settings |= 0x80; + } + int realLength = compression != 0 ? length + 4 : length; + ByteBuffer buffer = ByteBuffer.allocate((realLength + 5) + (realLength / 512) + 10); + buffer.put((byte) index); + buffer.putShort((short) archive); + buffer.put((byte) settings); + buffer.putInt(length); + for (int i = 5; i < realLength + 5; i++) { + if (buffer.position() % 512 == 0) { + buffer.put((byte) 255); + } + buffer.put(data[i]); + } + if (encryptionValue != 0) { + for (int i = 0; i < buffer.position(); i++) { + buffer.put(i, (byte) (buffer.get(i) ^ encryptionValue)); + } + } + buffer.flip(); + return buffer; + } + + /** + * Generate the reference data for the cache files. + * @return The reference data byte array. + */ + public static final byte[] generateReferenceData() { + ByteBuffer buffer = ByteBuffer.allocate(cacheFileManagers.length * 8); + for (int index = 0; index < cacheFileManagers.length; index++) { + if (cacheFileManagers[index] == null) { + buffer.putInt(index == 24 ? 609698396 : 0); + buffer.putInt(0); + continue; + } + buffer.putInt(cacheFileManagers[index].getInformation().getInformationContainer().getCrc()); + buffer.putInt(cacheFileManagers[index].getInformation().getRevision()); + } + return buffer.array(); + } + + /** + * Get the cache file managers. + * @return The cache file managers. + */ + public static final CacheFileManager[] getIndexes() { + return cacheFileManagers; + } + + /** + * Get the container cache file informer. + * @return The container cache file informer. + */ + public static final CacheFile getReferenceFile() { + return referenceFile; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/CacheFile.java b/Tools/Arios Editor/src/org/arios/cache/CacheFile.java new file mode 100644 index 000000000..e4b7b8b33 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/CacheFile.java @@ -0,0 +1,149 @@ +package org.arios.cache; + +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; + +import org.arios.cache.misc.ContainersInformation; +import org.arios.cache.misc.XTEACryption; + +/** + * A cache file. + * @author Dragonkk + */ +public final class CacheFile { + + /** + * The index file id. + */ + private int indexFileId; + + /** + * The cache file buffer. + */ + private byte[] cacheFileBuffer; + + /** + * The maximum container size. + */ + private int maxContainerSize; + + /** + * The index file. + */ + private RandomAccessFile indexFile; + + /** + * The data file. + */ + private RandomAccessFile dataFile; + + /** + * Construct a new cache file. + * @param indexFileId The index file id. + * @param indexFile The index file. + * @param dataFile The data file. + * @param maxContainerSize The maximum container size. + * @param cacheFileBuffer The cache file buffer. + */ + public CacheFile(int indexFileId, RandomAccessFile indexFile, RandomAccessFile dataFile, int maxContainerSize, byte[] cacheFileBuffer) { + this.cacheFileBuffer = cacheFileBuffer; + this.indexFileId = indexFileId; + this.maxContainerSize = maxContainerSize; + this.indexFile = indexFile; + this.dataFile = dataFile; + } + + /** + * Get the unpacked container data. + * @param containerId The container id. + * @param xteaKeys The container keys. + * @return The unpacked container data. + */ + public final byte[] getContainerUnpackedData(int containerId, int[] xteaKeys) { + byte[] packedData = getContainerData(containerId); + if (packedData == null) { + return null; + } + if (xteaKeys != null && (xteaKeys[0] != 0 || xteaKeys[1] != 0 || xteaKeys[2] != 0 || xteaKeys[3] != 0)) { + packedData = XTEACryption.decrypt(xteaKeys, ByteBuffer.wrap(packedData), 5, packedData.length).array(); + } + return ContainersInformation.unpackCacheContainer(packedData); + } + + /** + * Get the container data for the specified container id. + * @param containerId The container id. + * @return The container data. + */ + public final byte[] getContainerData(int containerId) { + synchronized(dataFile) { + try { + if (indexFile.length() < (6 * containerId + 6)) { + return null; + } + indexFile.seek(6 * containerId); + indexFile.read(cacheFileBuffer, 0, 6); + int containerSize = (cacheFileBuffer[2] & 0xff) + (((0xff & cacheFileBuffer[0]) << 16) + (cacheFileBuffer[1] << 8 & 0xff00)); + int sector = ((cacheFileBuffer[3] & 0xff) << 16)- (-(0xff00 & cacheFileBuffer[4] << 8) - (cacheFileBuffer[5] & 0xff)); + if (containerSize < 0 || containerSize > maxContainerSize) { + return null; + } + if (sector <= 0 || dataFile.length() / 520L < sector) { + return null; + } + byte data[] = new byte[containerSize]; + int dataReadCount = 0; + int part = 0; + while (containerSize > dataReadCount) { + if (sector == 0) { + return null; + } + dataFile.seek(520 * sector); + int dataToReadCount = containerSize - dataReadCount; + if (dataToReadCount > 512) { + dataToReadCount = 512; + } + dataFile.read(cacheFileBuffer, 0, 8 + dataToReadCount); + int currentContainerId = (0xff & cacheFileBuffer[1]) + (0xff00 & cacheFileBuffer[0] << 8); + int currentPart = ((cacheFileBuffer[2] & 0xff) << 8) + (0xff & cacheFileBuffer[3]); + int nextSector = (cacheFileBuffer[6] & 0xff) + (0xff00 & cacheFileBuffer[5] << 8) + ((0xff & cacheFileBuffer[4]) << 16); + int currentIndexFileId = cacheFileBuffer[7] & 0xff; + if (containerId != currentContainerId || currentPart != part|| indexFileId != currentIndexFileId) { + return null; + } + if (nextSector < 0 || (dataFile.length() / 520L) < nextSector) { + return null; + } + for (int index = 0; dataToReadCount > index; index++) { + data[dataReadCount++] = cacheFileBuffer[8 + index]; + } + part++; + sector = nextSector; + } + return data; + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } + } + + /** + * Get the index file id. + * @return + */ + public int getIndexFileId() { + return indexFileId; + } + + /** + * Get the unpacked container data. + * @param containerId The container id. + * @return The unpacked container data. + */ + public final byte[] getContainerUnpackedData(int containerId) { + return getContainerUnpackedData(containerId, null); + } +} + diff --git a/Tools/Arios Editor/src/org/arios/cache/CacheFileManager.java b/Tools/Arios Editor/src/org/arios/cache/CacheFileManager.java new file mode 100644 index 000000000..a2e29bc4a --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/CacheFileManager.java @@ -0,0 +1,246 @@ +package org.arios.cache; + +import java.nio.ByteBuffer; + +import org.arios.cache.misc.ContainersInformation; +import org.arios.cache.misc.StringUtils; + +/** + * A cache file manager. + * @author Dragonkk + * + */ +public final class CacheFileManager { + + /** + * The cache file. + */ + private CacheFile cacheFile; + + /** + * The containers information. + */ + private ContainersInformation information; + + /** + * Discard a files data. + */ + private boolean discardFilesData; + + /** + * A array holding file data. + */ + private byte[][][] filesData; + + /** + * Construct a new cache file manager. + * @param cacheFile The cache file. + * @param discardFilesData To discard a files data. + */ + public CacheFileManager(CacheFile cacheFile, boolean discardFilesData) { + this.cacheFile = cacheFile; + this.discardFilesData = discardFilesData; + byte[] informContainerPackedData = Cache.getReferenceFile().getContainerData(cacheFile.getIndexFileId()); + if (informContainerPackedData == null) { + return; + } + information = new ContainersInformation(informContainerPackedData); + resetFilesData(); + } + + /** + * Get the cache file. + * @return The cache file. + */ + public CacheFile getCacheFile() { + return cacheFile; + } + + /** + * Get the containers size. + * @return The containers size. + */ + public int getContainersSize() { + return information.getContainers().length; + } + + /** + * Get the files size. + * @param containerId The container id. + * @return The files size. + */ + public int getFilesSize(int containerId) { + if (!validContainer(containerId)) { + return -1; + } + return information.getContainers()[containerId].getFiles().length; + } + + /** + * Reset the file data. + */ + public void resetFilesData() { + filesData = new byte[information.getContainers().length][][]; + } + + /** + * Check if a file is valid. + * @param containerId The container id. + * @param fileId The file id. + * @return If the file is valid {@code true}. + */ + public boolean validFile(int containerId, int fileId) { + if (!validContainer(containerId)) { + return false; + } + if (fileId < 0 || information.getContainers()[containerId].getFiles().length <= fileId) { + return false; + } + return true; + + } + + /** + * If a container is valid. + * @param containerId The container id. + * @return If the container is valid {@code true}. + */ + public boolean validContainer(int containerId) { + if (containerId < 0 || information.getContainers().length <= containerId) { + return false; + } + return true; + } + + /** + * Get the file ids. + * @param containerId The container id. + * @return The file ids. + */ + public int[] getFileIds(int containerId) { + if (!validContainer(containerId)) { + return null; + } + return information.getContainers()[containerId].getFilesIndexes(); + } + + /** + * Get the archive id. + * @param name The archive name. + * @return The archive id. + */ + public int getArchiveId(String name) { + if (name == null) { + return -1; + } + int hash = StringUtils.getNameHash(name); + for (int containerIndex = 0; containerIndex < information.getContainersIndexes().length; containerIndex++) { + if (information.getContainers()[information.getContainersIndexes()[containerIndex]].getNameHash() == hash) { + return information.getContainersIndexes()[containerIndex]; + } + } + return -1; + } + + /** + * Get the file data. + * @param containerId The container id. + * @param fileId The file id. + * @return The get file data. + */ + public byte[] getFileData(int containerId, int fileId) { + return getFileData(containerId, fileId, null); + } + + /** + * Load the file data. + * @param archiveId The container id. + * @param keys The container keys. + * @return If the file data is loaded {@code true}. + */ + public boolean loadFilesData(int archiveId, int[] keys) { + byte[] data = cacheFile.getContainerUnpackedData(archiveId, keys); + if (data == null) { + return false; + } + if(filesData[archiveId] == null) { + if(information.getContainers()[archiveId] == null) { + return false; //container inform doesnt exist anymore + } + filesData[archiveId] = new byte[information.getContainers()[archiveId].getFiles().length][]; + } + if (information.getContainers()[archiveId].getFilesIndexes().length == 1) { + int fileId = information.getContainers()[archiveId].getFilesIndexes()[0]; + filesData[archiveId][fileId] = data; + } else { + int readPosition = data.length; + int amtOfLoops = data[--readPosition] & 0xff; + readPosition -= amtOfLoops * (information.getContainers()[archiveId].getFilesIndexes().length * 4); + ByteBuffer buffer = ByteBuffer.wrap(data); + int filesSize[] = new int[information.getContainers()[archiveId].getFilesIndexes().length]; + buffer.position(readPosition); + for (int loop = 0; loop < amtOfLoops; loop++) { + int offset = 0; + for (int fileIndex = 0; fileIndex < information.getContainers()[archiveId].getFilesIndexes().length; fileIndex++) { + filesSize[fileIndex] += offset += buffer.getInt(); + } + } + byte[][] filesBufferData = new byte[information.getContainers()[archiveId].getFilesIndexes().length][]; + for (int fileIndex = 0; fileIndex < information.getContainers()[archiveId].getFilesIndexes().length; fileIndex++) { + filesBufferData[fileIndex] = new byte[filesSize[fileIndex]]; + filesSize[fileIndex] = 0; + } + buffer.position(readPosition); + int sourceOffset = 0; + for (int loop = 0; loop < amtOfLoops; loop++) { + int dataRead = 0; + for (int fileIndex = 0; fileIndex < information.getContainers()[archiveId].getFilesIndexes().length; fileIndex++) { + dataRead += buffer.getInt(); + System.arraycopy(data, sourceOffset, filesBufferData[fileIndex], filesSize[fileIndex],dataRead); + sourceOffset += dataRead; + filesSize[fileIndex] += dataRead; + } + } + for (int fileIndex = 0; fileIndex < information.getContainers()[archiveId].getFilesIndexes().length; fileIndex++) { + filesData[archiveId][information.getContainers()[archiveId].getFilesIndexes()[fileIndex]] = filesBufferData[fileIndex]; + } + } + return true; + + } + + /** + * Get the file data. + * @param containerId The container id. + * @param fileId The file id. + * @param xteaKeys The container keys. + * @return The file data. + */ + public byte[] getFileData(int containerId, int fileId, int[] xteaKeys) { + if (!validFile(containerId, fileId)) { + return null; + } + if (filesData[containerId] == null || filesData[containerId][fileId] == null) { + if (!loadFilesData(containerId, xteaKeys)) { + return null; + } + } + byte[] data = filesData[containerId][fileId]; + if (discardFilesData) { + if (filesData[containerId].length == 1) { + filesData[containerId] = null; + } else { + filesData[containerId][fileId] = null; + } + } + return data; + } + + /** + * Get the containers information. + * @return The containers information. + */ + public ContainersInformation getInformation() { + return information; + } +} diff --git a/Tools/Arios Editor/src/org/arios/cache/ServerStore.java b/Tools/Arios Editor/src/org/arios/cache/ServerStore.java new file mode 100644 index 000000000..d3b5b4d3b --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/ServerStore.java @@ -0,0 +1,206 @@ +package org.arios.cache; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.channels.FileChannel.MapMode; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.misc.ByteBufferUtils; +import org.arios.workspace.WorkSpace; + +/** + * The server data storage. + * @author Emperor + * + */ +public final class ServerStore { + + /** + * The storage. + */ + private static Map storage = new HashMap<>(); + + /** + * If the store has initialized. + */ + private static boolean initialized; + + /** + * Initializes the store. + */ + public static void init() { + storage = new HashMap<>(); + File file = new File(WorkSpace.getWorkSpace().getSettings().getStorePath() + "/static_cache.arios"); + if (file.exists()) { + System.out.println("Loaded store " + file.getAbsolutePath() + "..."); + try (RandomAccessFile raf = new RandomAccessFile(file, "rw")) { + FileChannel channel = raf.getChannel(); + ByteBuffer buffer = channel.map(MapMode.READ_WRITE, 0, channel.size()); + int size = buffer.getShort() & 0xFFFF; + for (int i = 0; i < size; i++) { + StoreFile store = new StoreFile(); + String archive = ByteBufferUtils.getString(buffer); + if (storage.containsKey(archive)) { + throw new IllegalStateException("Duplicate archive found - archive=" + archive + "!"); + } + byte[] data = new byte[buffer.getInt()]; + buffer.get(data); + store.setData(data); + storage.put(archive, store); + } + if (buffer.hasRemaining()) { + throw new IllegalStateException("Unable to read full static store! " + buffer.remaining()); + } + channel.close(); + raf.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + initialized = true; + } + + /** + * Used for writing the static store. + * @param path The path. + */ + public static void createStaticStore(String path) { + write(path + "/static_cache.arios", false); + } + + /** + * Used for writing the static store. + */ + public static void createStaticStore() { + write(WorkSpace.getWorkSpace().getSettings().getStorePath(), false); + } + + /** + * Writes all the dynamic storage files (on server termination). + * @param path The path. + */ + public static void dump(String path) { + write(path + "/dynamic_cache.arios", true); + } + + /** + * Writes the store file to the given file path. + * @param filePath The file path. + * @param dynamic If the dynamic store is being written. + */ + public static void write(String filePath, boolean dynamic) { + if (!initialized) { + throw new IllegalStateException("Server store has not been initialized!"); + } + File f = new File(filePath); + if (f.exists()) { + f.delete(); + } + ByteBuffer buffer = ByteBuffer.allocate(1 << 28); + buffer.putShort((short) 0); + int size = 0; + for (String archive : storage.keySet()) { + StoreFile file = storage.get(archive); + if (file.isDynamic() != dynamic) { + continue; + } + size++; + ByteBuffer buf = file.data(); + ByteBufferUtils.putString(archive, buffer); + buffer.putInt(buf.remaining()); + buffer.put(buf); + } + buffer.putShort(0, (short) size); + buffer.flip(); + try (RandomAccessFile raf = new RandomAccessFile(f, "rw")) { + FileChannel channel = raf.getChannel(); + channel.write(buffer); + channel.close(); + raf.close(); + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Sets the archive data. + * @param archive The archive id. + * @param buffer The readable buffer. + */ + public static void setArchive(String archive, ByteBuffer buffer) { + setArchive(archive, buffer, true); + } + + /** + * Sets the archive data. + * @param archive The archive id. + * @param buffer The readable buffer. + * @param dynamic If the data changes during server runtime. + */ + public static void setArchive(String archive, ByteBuffer buffer, boolean dynamic) { + byte[] data = new byte[buffer.remaining()]; + buffer.get(data); + setArchive(archive, data, dynamic, true); + } + + /** + * Sets the archive data. + * @param archive The archive index. + * @param data The archive data. + * @param dynamic If the data changes during server runtime. + */ + public static void setArchive(String archive, byte[] data, boolean dynamic) { + setArchive(archive, data, dynamic, true); + } + + + /** + * Sets the archive data. + * @param archive The archive index. + * @param data The archive data. + * @param dynamic If the data changes during server runtime. + * @param overwrite If the archive should be overwritten. + */ + public static void setArchive(String archive, byte[] data, boolean dynamic, boolean overwrite) { + StoreFile file = storage.get(archive); + if (file == null) { + storage.put(archive, file = new StoreFile()); + } + else if (!overwrite) { + throw new IllegalStateException("Already contained archive " + archive + "!"); + } + file.setDynamic(dynamic); + file.setData(data); + } + + /** + * Gets the archive data for the given archive id. + * @param archive The archive index. + * @return The archive data. + */ + public static ByteBuffer getArchive(String archive) { + return get(archive).data(); + } + + /** + * Sets the archive file. + * @param archive The archive. + * @param file The file. + */ + public static void set(String archive, StoreFile file) { + storage.put(archive, file); + } + + /** + * Gets the store file for the given archive. + * @param archive The archive id. + * @return The store file. + */ + public static StoreFile get(String archive) { + return storage.get(archive); + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/StoreFile.java b/Tools/Arios Editor/src/org/arios/cache/StoreFile.java new file mode 100644 index 000000000..ad0d3a58f --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/StoreFile.java @@ -0,0 +1,73 @@ +package org.arios.cache; + +import java.nio.ByteBuffer; + +/** + * Represents a file used in the server store. + * @author Emperor + * + */ +public final class StoreFile { + + /** + * If the data can change during server runtime. + */ + private boolean dynamic; + + /** + * The file data. + */ + private byte[] data; + + /** + * Constructs a new {@code StoreFile} {@code Object}. + */ + public StoreFile() { + /* + * empty. + */ + } + + /** + * Puts the data on the buffer. + * @param buffer The buffer. + */ + public void put(ByteBuffer buffer) { + byte[] data = new byte[buffer.remaining()]; + buffer.get(data); + this.data = data; + } + + /** + * Creates a byte buffer containing the file data. + * @return The buffer. + */ + public ByteBuffer data() { + return ByteBuffer.wrap(data); + } + + /** + * Sets the data. + * @param data The data. + */ + public void setData(byte[] data) { + this.data = data; + } + + /** + * Gets the dynamic. + * @return The dynamic. + */ + public boolean isDynamic() { + return dynamic; + } + + /** + * Sets the dynamic. + * @param dynamic The dynamic to set. + */ + public void setDynamic(boolean dynamic) { + this.dynamic = dynamic; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/def/Definition.java b/Tools/Arios Editor/src/org/arios/cache/def/Definition.java new file mode 100644 index 000000000..b8b7d6f7d --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/def/Definition.java @@ -0,0 +1,161 @@ +package org.arios.cache.def; + +import java.util.HashMap; +import java.util.Map; + +import org.arios.workspace.node.Configuration; + +/** + * Represent's a node's definitions. + * @author Emperor + * + */ +public class Definition { + + /** + * The node id. + */ + protected int id; + + /** + * The name. + */ + protected String name = "null"; + + /** + * The examine info. + */ + protected String examine; + + /** + * The options. + */ + protected String[] options; + + /** + * The configurations. + */ + protected final Map> configurations = new HashMap>(); + + /** + * Constructs a new {@code Definition} {@code Object}. + */ + public Definition() { + /* + * empty. + */ + } + + /** + * Checks if this node has options. + * @return {@code True} if so. + */ + public boolean hasOptions() { + if (options == null) { + return false; + } + for (String option : options) { + if (option != null && !option.equals("null")) { + return true; + } + } + return false; + } + + /** + * Gets a configuration of this item's definitions. + * @param key The key. + * @return The configuration value. + */ + @SuppressWarnings("unchecked") + public V getConfiguration(String key) { + return (V) configurations.get(key); + } + + /** + * Gets a configuration from this item's definitions. + * @param key The key. + * @param fail The object to return if there was no value found for this key. + * @return The value, or the fail object. + */ + @SuppressWarnings("unchecked") + public V getConfiguration(String key, V fail) { + Configuration config = configurations.get(key); + if (config == null) { + return fail; + } + return (V) config.getValue(); + } + + /** + * Gets the id. + * @return The id. + */ + public int getId() { + return id; + } + + /** + * Sets the id. + * @param id The id to set. + */ + public void setId(int id) { + this.id = id; + } + + /** + * Gets the name. + * @return The name. + */ + public String getName() { + return name; + } + + /** + * Sets the name. + * @param name The name to set. + */ + public void setName(String name) { + this.name = name; + } + + /** + * Gets the examine. + * @return The examine. + */ + public String getExamine() { + return examine; + } + + /** + * Sets the examine. + * @param examine The examine to set. + */ + public void setExamine(String examine) { + this.examine = examine; + } + + /** + * Gets the options. + * @return The options. + */ + public String[] getOptions() { + return options; + } + + /** + * Sets the options. + * @param options The options to set. + */ + public void setOptions(String[] options) { + this.options = options; + } + + /** + * Gets the configurations. + * @return The configurations. + */ + public Map> getConfigurations() { + return configurations; + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/def/impl/AnimationDefinition.java b/Tools/Arios Editor/src/org/arios/cache/def/impl/AnimationDefinition.java new file mode 100644 index 000000000..709177acd --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/def/impl/AnimationDefinition.java @@ -0,0 +1,200 @@ +package org.arios.cache.def.impl; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.Cache; +import org.arios.cache.misc.ByteBufferUtils; + +/** + * Represents an animation's definitions. + * @author Emperor + */ +public final class AnimationDefinition { + + public int anInt2136; + public int anInt2137; + public int[] anIntArray2139; + public int anInt2140; + public boolean aBoolean2141 = false; + public int anInt2142; + public int emoteItem; + public int anInt2144 = -1; + public int[][] handledSounds; + public boolean[] aBooleanArray2149; + public int[] anIntArray2151; + public boolean aBoolean2152; + public int[] durations; + public int anInt2155; + public boolean aBoolean2158; + public boolean aBoolean2159; + public int anInt2162; + public int anInt2163; + boolean newHeader; + + // added + public int[] soundMinDelay; + public int[] soundMaxDelay; + public int[] anIntArray1362; + public boolean effect2Sound; + + private static final Map animDefs = new HashMap<>(); + + public static final AnimationDefinition forId(int emoteId) { + try { + AnimationDefinition defs = animDefs.get(emoteId); + if (defs != null) { + return defs; + } + byte[] data = Cache.getIndexes()[20].getFileData(emoteId >>> 7, emoteId & 0x7f); + defs = new AnimationDefinition(); + if (data != null) { + defs.readValueLoop(ByteBuffer.wrap(data)); + } + defs.method2394(); + animDefs.put(emoteId, defs); + return defs; + } catch (Throwable t) { + return null; + } + } + + private void readValueLoop(ByteBuffer buffer) { + for (;;) { + int opcode = buffer.get() & 0xFF; + if (opcode == 0) { + break; + } + readValues(buffer, opcode); + } + } + + /** + * Gets the duration of this animation in milliseconds. + * @return The duration. + */ + public int getDuration() { + if (durations == null) { + return 0; + } + int duration = 0; + for (int i : durations) { + if (i > 100) { + continue; + } + duration += i * 20; + } + return duration; + } + + /** + * Gets the duration of this animation in (600ms) ticks. + * @return The duration in ticks. + */ + public int getDurationTicks() { + int ticks = getDuration() / 600; + return ticks < 1 ? 1 : ticks; + } + + private void readValues(ByteBuffer buffer, int opcode) { + if (opcode == 1) { + int length = buffer.getShort() & 0xFFFF; + durations = new int[length]; + for (int i = 0; i < length; i++) { + durations[i] = buffer.getShort() & 0xFFFF; + } + anIntArray2139 = new int[length]; + for (int i = 0; i < length; i++) { + anIntArray2139[i] = buffer.getShort() & 0xFFFF; + } + for (int i = 0; i < length; i++) { + anIntArray2139[i] = ((buffer.getShort() & 0xFFFF << 16) + anIntArray2139[i]); + } + } else if (opcode != 2) { + if (opcode != 3) { + if (opcode == 4) + aBoolean2152 = true; + else if (opcode == 5) + anInt2142 = buffer.get() & 0xFF; + else if (opcode != 6) { + if (opcode == 7) + emoteItem = buffer.getShort() & 0xFFFF; + else if ((opcode ^ 0xffffffff) != -9) { + if (opcode != 9) { + if (opcode != 10) { + if (opcode == 11) + anInt2155 = buffer.get() & 0xFF; + else if (opcode == 12) { + int i = buffer.get() & 0xFF; + anIntArray2151 = new int[i]; + for (int i_19_ = 0; ((i_19_ ^ 0xffffffff) > (i ^ 0xffffffff)); i_19_++) + anIntArray2151[i_19_] = buffer.getShort() & 0xFFFF; + for (int i_20_ = 0; i > i_20_; i_20_++) + anIntArray2151[i_20_] = ((buffer.getShort() & 0xFFFF << 16) + anIntArray2151[i_20_]); + } else if (opcode == 13) { + // opcode 13 + int i = buffer.getShort() & 0xFFFF; + handledSounds = new int[i][]; + for (int i_21_ = 0; i_21_ < i; i_21_++) { + int i_22_ = buffer.get() & 0xFF; + if ((i_22_ ^ 0xffffffff) < -1) { + handledSounds[i_21_] = new int[i_22_]; + handledSounds[i_21_][0] = ByteBufferUtils.getTriByte(buffer); + for (int i_23_ = 1; ((i_22_ ^ 0xffffffff) < (i_23_ ^ 0xffffffff)); i_23_++) { + handledSounds[i_21_][i_23_] = buffer.getShort() & 0xFFFF; + } + } + } + } else if (opcode == 14) { + aBoolean2141 = true; + } else { + System.out.println("Unhandled animation opcode " + opcode); + } + } else + anInt2162 = buffer.get() & 0xFF; + } else + anInt2140 = buffer.get() & 0xFF; + } else + anInt2136 = buffer.get() & 0xFF; + } else + anInt2144 = buffer.getShort() & 0xFFFF; + } else { + aBooleanArray2149 = new boolean[256]; + int length = buffer.get() & 0xFF; + for (int i = 0; i < length; i++) { + aBooleanArray2149[buffer.get() & 0xFF] = true; + } + } + } else + anInt2163 = buffer.getShort() & 0xFFFF; + } + + public void method2394() { + if (anInt2140 == -1) { + if (aBooleanArray2149 == null) + anInt2140 = 0; + else + anInt2140 = 2; + } + if (anInt2162 == -1) { + if (aBooleanArray2149 == null) + anInt2162 = 0; + else + anInt2162 = 2; + } + } + + public AnimationDefinition() { + anInt2136 = 99; + emoteItem = -1; + anInt2140 = -1; + aBoolean2152 = false; + anInt2142 = 5; + aBoolean2159 = false; + anInt2163 = -1; + anInt2155 = 2; + aBoolean2158 = false; + anInt2162 = -1; + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/def/impl/GraphicDefinition.java b/Tools/Arios Editor/src/org/arios/cache/def/impl/GraphicDefinition.java new file mode 100644 index 000000000..9adbc8d9b --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/def/impl/GraphicDefinition.java @@ -0,0 +1,187 @@ +package org.arios.cache.def.impl; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.Cache; + +/** + * Represents a Graphic's definition. + * @author Jagex + * + */ +public class GraphicDefinition { + + public short[] aShortArray1435; + public short[] aShortArray1438; + public int anInt1440; + public boolean aBoolean1442; + public int defaultModel; + public int anInt1446; + public boolean aBoolean1448 = false; + public int anInt1449; + public int animationId; + public int anInt1451; + public int graphicsId; + public int anInt1454; + public short[] aShortArray1455; + public short[] aShortArray1456; + + // added + public byte byteValue; + // added + public int intValue; + + /** + * The definitions mapping. + */ + private static final Map graphicDefinitions = new HashMap<>(); + + /** + * Gets the graphic definition for the given graphic id. + * @param gfxId The graphic id. + * @return The definition. + */ + public static final GraphicDefinition forId(int gfxId) { + GraphicDefinition def = graphicDefinitions.get(gfxId); + if (def != null) { + return def; + } + byte[] data = Cache.getIndexes()[21].getFileData(gfxId >>> 735411752, gfxId & 0xff); + def = new GraphicDefinition(); + def.graphicsId = gfxId; + if (data != null) { + def.readValueLoop(ByteBuffer.wrap(data)); + } + graphicDefinitions.put(gfxId, def); + return def; + } + + /** + * The main method, used for running a graphic definition search. + * @param s The arguments cast on runtime. + */ + public static final void main(String... s) { + + //5046 - 5050 are related anims & 2148 + GraphicDefinition d = GraphicDefinition.forId(803); + System.out.println("Graphic " + d.graphicsId + " anim id = " + d.animationId + ", " + d.defaultModel + "."); + for (int i = 0; i < 5000; i++) { + GraphicDefinition def = GraphicDefinition.forId(i); + if (def == null) { + continue; + } + if ((def.animationId > 2000 && def.animationId < 2200) || (def.defaultModel >= 1300 && def.defaultModel < 1500)) { + System.out.println("Possible match [id=" + i + ", anim=" + + def.animationId + "]."); + } + } + } + + /** + * Reads and handles all data from the input stream. + * @param buffer The input stream. + */ + private void readValueLoop(ByteBuffer buffer) { + for (;;) { + int opcode = buffer.get() & 0xFF; + if (opcode == 0) { + break; + } + readValues(buffer, opcode); + } + } + + /** + * Reads the opcode values from the input stream. + * @param buffer The input stream. + * @param opcode The opcode to handle. + */ + public void readValues(ByteBuffer buffer, int opcode) { + if (opcode != 1) { + if ((opcode ^ 0xffffffff) == -3) + animationId = buffer.getShort(); + else if (opcode == 4) + anInt1446 = buffer.getShort() & 0xFFFF; + else if (opcode != 5) { + if ((opcode ^ 0xffffffff) != -7) { + if (opcode == 7) + anInt1440 = buffer.get() & 0xFF; + else if ((opcode ^ 0xffffffff) == -9) + anInt1451 = buffer.get() & 0xFF; + else if (opcode != 9) { + if (opcode != 10) { + if (opcode == 11) { // added opcode + // aBoolean1442 = true; + byteValue = (byte) 1; + } else if (opcode == 12) { // added opcode + // aBoolean1442 = true; + byteValue = (byte) 4; + } else if (opcode == 13) { // added opcode + // aBoolean1442 = true; + byteValue = (byte) 5; + } else if (opcode == 14) { // added opcode + // aBoolean1442 = true; + // aByte2856 = 2; + byteValue = (byte) 2; + intValue = (buffer.get() & 0xFF) * 256; + } else if (opcode == 15) { + // aByte2856 = 3; + byteValue = (byte) 3; + intValue = buffer.getShort() & 0xFFFF; + } else if (opcode == 16) { + // aByte2856 = 3; + byteValue = (byte) 3; + intValue = buffer.getInt(); + } else if (opcode != 40) { + if ((opcode ^ 0xffffffff) == -42) { + int i = buffer.get() & 0xFF; + aShortArray1455 = new short[i]; + aShortArray1435 = new short[i]; + for (int i_0_ = 0; i > i_0_; i_0_++) { + aShortArray1455[i_0_] = (short) (buffer.getShort() & 0xFFFF); + aShortArray1435[i_0_] = (short) (buffer.getShort() & 0xFFFF); + } + } + } else { + int i = buffer.get() & 0xFF; + aShortArray1438 = new short[i]; + aShortArray1456 = new short[i]; + for (int i_1_ = 0; ((i ^ 0xffffffff) < (i_1_ ^ 0xffffffff)); i_1_++) { + aShortArray1438[i_1_] = (short) (buffer.getShort() & 0xFFFF); + aShortArray1456[i_1_] = (short) (buffer.getShort() & 0xFFFF); + } + } + } else + aBoolean1448 = true; + } else { + // aBoolean1442 = true; + byteValue = (byte) 3; + intValue = 8224; + } + } else + anInt1454 = buffer.getShort() & 0xFFFF; + } else + anInt1449 = buffer.getShort() & 0xFFFF; + } else + defaultModel = buffer.getShort(); + } + + /** + * Constructs a new {@code GraphicDefinition} {@code Object}. + * + */ + public GraphicDefinition() { + byteValue = 0; + intValue = -1; + anInt1446 = 128; + aBoolean1442 = false; + anInt1449 = 128; + anInt1451 = 0; + animationId = -1; + anInt1454 = 0; + anInt1440 = 0; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/def/impl/ItemDefinition.java b/Tools/Arios Editor/src/org/arios/cache/def/impl/ItemDefinition.java new file mode 100644 index 000000000..7f5f050e8 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/def/impl/ItemDefinition.java @@ -0,0 +1,1282 @@ +package org.arios.cache.def.impl; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.Cache; +import org.arios.cache.def.Definition; +import org.arios.cache.misc.ByteBufferUtils; +import org.arios.workspace.node.Configuration; + +/** + * Represents an item's definitions. + * @author Jagex + * @author Emperor + */ +public class ItemDefinition extends Definition { + + /** + * The item definitions mapping. + */ + private static final Map DEFINITIONS = new HashMap<>(); + + /** + * The interface model id. + */ + private int interfaceModelId; + + /** + * The model zoom. + */ + private int modelZoom; + + /** + * The model rotation. + */ + private int modelRotation1; + + /** + * The model rotation. + */ + private int modelRotation2; + + /** + * The model offset. + */ + private int modelOffset1; + + /** + * The model offset. + */ + private int modelOffset2; + + /** + * If item is stackable. + */ + private boolean stackable; + + /** + * The item value. + */ + private int value; + + /** + * If item is members only. + */ + private boolean membersOnly; + + /** + * The male model wear id. + */ + private int maleWornModelId1 = -1; + + /** + * The female model wear id. + */ + private int femaleWornModelId1; + + /** + * The male model wear id. + */ + private int maleWornModelId2 = -1; + + /** + * The female wear model id. + */ + private int femaleWornModelId2; + + /** + * The ground actions. + */ + private String[] groundActions; + + /** + * The original model colors. + */ + private short[] originalModelColors; + + /** + * The modified model colors. + */ + private short[] modifiedModelColors; + + /** + * The texture color 1. + */ + private short[] textureColour1; + + /** + * The texture color 2. + */ + private short[] textureColour2; + + /** + * A unknown byte array. + */ + private byte[] unknownArray1; + + /** + * A unknown integer array. + */ + private int[] unknownArray2; + + /** + * A unknown integer array. + */ + private int[][] unknownArray3; + + /** + * If item is noted. + */ + private boolean unnoted = true; + + /** + * The colour equipment. + */ + private int colourEquip1; + + /** + * The colour equipment. + */ + private int colourEquip2; + + /** + * The note item. + */ + private int noteId = -1; + + /** + * The note template id. + */ + private int noteTemplateId = -1; + + /** + * The stackable ids. + */ + private int[] stackIds; + + /** + * The stackable amounts. + */ + private int[] stackAmounts; + + /** + * The team id. + */ + private int teamId; + + /** + * The lend id. + */ + private int lendId = -1; + + /** + * The lend template id. + */ + private int lendTemplateId = -1; + + /** + * The recolour id. + */ + private int recolourId = -1; + + /** + * The recolour template id. + */ + private int recolourTemplateId = -1; + + /** + * The equip id. + */ + private int equipId; + + /** + * The item requirements + */ + private HashMap itemRequirements; + + /** + * The clientscript data. + */ + private HashMap clientScriptData; + + /** + * Constructs a new {@code ItemDefinition} {@code Object}. + */ + public ItemDefinition() { + groundActions = new String[] { null, null, "take", null, null }; + options = new String[] { null, null, null, null, "drop" }; + } + + /** + * Gets an item definition. + * @param itemId The item's id. + * @return The item definition. + */ + public static ItemDefinition forId(int itemId) { + ItemDefinition def = DEFINITIONS.get(itemId); + if (def == null) { + byte[] data = Cache.getIndexes()[19].getFileData(itemId >>> 8, itemId & 0xFF); + if (data == null) { + if (itemId != -1) { + //System.out.println("Failed loading NPC " + id + "."); + } + } else { + def = parseDefinition(itemId, ByteBuffer.wrap(data)); + } + if (def == null) { + def = new ItemDefinition(); + } + DEFINITIONS.put(itemId, def); + } + return def; + } + + /** + * Parses an item's definitions. + * @param itemId The item id. + * @param buffer The buffer. + * @return The item definition. + */ + public static ItemDefinition parseDefinition(int itemId, ByteBuffer buffer) { + ItemDefinition def = new ItemDefinition(); + def.id = itemId; + while (true) { + int opcode = buffer.get() & 0xFF; + if (opcode == 0) { + break; + } else if (opcode == 1) { + def.interfaceModelId = buffer.getShort() & 0xFFFF; + } else if (opcode == 2) { + def.name = ByteBufferUtils.getString(buffer); + } else if (opcode == 3) { + def.configurations.put("examine", new Configuration(7, ByteBufferUtils.getString(buffer))); //Examine info. + } else if (opcode == 4) { + def.modelZoom = buffer.getShort() & 0xFFFF; + } else if (opcode == 5) { + def.modelRotation1 = buffer.getShort() & 0xFFFF; + } else if (opcode == 6) { + def.modelRotation2 = buffer.getShort() & 0xFFFF; + } else if (opcode == 7) { + def.modelOffset1 = buffer.getShort() & 0xFFFF; + if (def.modelOffset1 > 32767) + def.modelOffset1 -= 65536; + } else if (opcode == 8) { + def.modelOffset2 = buffer.getShort() & 0xFFFF; + if (def.modelOffset2 > 32767) { + def.modelOffset2 -= 65536; + } + } else if (opcode == 10) { + buffer.getShort(); + } else if (opcode == 11) { + def.stackable = true; + } else if (opcode == 12) { + def.value = buffer.getInt(); + if (def.value == 0) { + def.value = 1; + } + } else if (opcode == 16) { + def.membersOnly = true; + } else if (opcode == 18) { + buffer.getShort(); + } else if (opcode == 23) { + def.maleWornModelId1 = buffer.getShort() & 0xFFFF; + buffer.get(); + } else if (opcode == 24) { + def.femaleWornModelId1 = buffer.getShort() & 0xFFFF; + } else if (opcode == 25) { + def.maleWornModelId2 = buffer.getShort() & 0xFFFF; + buffer.get(); + } else if (opcode == 26) { + def.femaleWornModelId2 = buffer.getShort() & 0xFFFF; + } else if (opcode >= 30 && opcode < 35) { + def.groundActions[opcode - 30] = ByteBufferUtils.getString(buffer); + } else if (opcode >= 35 && opcode < 40) { + def.options[opcode - 35] = ByteBufferUtils.getString(buffer); + } else if (opcode == 40) { + int length = buffer.get() & 0xFF; + def.originalModelColors = new short[length]; + def.modifiedModelColors = new short[length]; + for (int index = 0; index < length; index++) { + def.originalModelColors[index] = buffer.getShort(); + def.modifiedModelColors[index] = buffer.getShort(); + } + } else if (opcode == 41) { + int length = buffer.get() & 0xFF; + def.textureColour1 = new short[length]; + def.textureColour2 = new short[length]; + for (int index = 0; index < length; index++) { + def.textureColour1[index] = buffer.getShort(); + def.textureColour2[index] = buffer.getShort(); + } + } else if (opcode == 42) { + int length = buffer.get() & 0xFF; + def.unknownArray1 = new byte[length]; + for (int index = 0; index < length; index++) + def.unknownArray1[index] = buffer.get(); + } else if (opcode == 65) { + def.unnoted = true; + } else if (opcode == 78) { + def.colourEquip1 = buffer.getShort() & 0xFFFF; + } else if (opcode == 79) { + def.colourEquip2 = buffer.getShort() & 0xFFFF; + } else if (opcode == 90) { + buffer.getShort(); + } else if (opcode == 91) { + buffer.getShort(); + } else if (opcode == 92) { + buffer.getShort(); + } else if (opcode == 93) { + buffer.getShort(); + } else if (opcode == 95) { + buffer.getShort(); + } else if (opcode == 96) { + buffer.get(); + } else if (opcode == 97) { + def.noteId = buffer.getShort() & 0xFFFF; + } else if (opcode == 98) { + def.noteTemplateId = buffer.getShort() & 0xFFFF; + } else if (opcode >= 100 && opcode < 110) { + if (def.stackIds == null) { + def.stackIds = new int[10]; + def.stackAmounts = new int[10]; + } + def.stackIds[opcode - 100] = buffer.getShort() & 0xFFFF; + def.stackAmounts[opcode - 100] = buffer.getShort() & 0xFFFF; + } else if (opcode == 110) { + buffer.getShort(); + } else if (opcode == 111) { + buffer.getShort(); + } else if (opcode == 112) { + buffer.getShort(); + } else if (opcode == 113) { + buffer.get(); + } else if (opcode == 114) { + buffer.get(); + } else if (opcode == 115) { + def.teamId = buffer.get(); + } else if (opcode == 121) { + def.lendId = buffer.getShort() & 0xFFFF; + } else if (opcode == 122) { + def.lendTemplateId = buffer.getShort() & 0xFFFF; + } else if (opcode == 124) { + if (def.unknownArray3 == null) { + def.unknownArray3 = new int[11][]; + } + int slot = buffer.get(); + def.unknownArray3[slot] = new int[6]; + for (int i = 0; i < 6; i++) { + def.unknownArray3[slot][i] = buffer.getShort(); + } + } else if (opcode == 125) { + buffer.get(); + buffer.get(); + buffer.get(); + } else if (opcode == 126) { + buffer.get(); + buffer.get(); + buffer.get(); + } else if (opcode == 127) { + buffer.get(); + buffer.getShort(); + } else if (opcode == 128) { + buffer.get(); + buffer.getShort(); + } else if (opcode == 129) { + buffer.get(); + buffer.getShort(); + } else if (opcode == 130) { + buffer.get(); + buffer.getShort(); + } else if (opcode == 132) { + int length = buffer.get() & 0xFF; + def.unknownArray2 = new int[length]; + for (int index = 0; index < length; index++) { + def.unknownArray2[index] = buffer.getShort() & 0xFFFF; + } + } else if (opcode == 134) { + buffer.get(); + } else if (opcode == 139) { + def.recolourId = buffer.getShort() & 0xFFFF; + } else if (opcode == 140) { + def.recolourTemplateId = buffer.getShort() & 0xFFFF; + } else if (opcode == 249) { + int length = buffer.get() & 0xFF; + if (def.clientScriptData == null) { + def.clientScriptData = new HashMap(); + } + for (int index = 0; index < length; index++) { + boolean string = (buffer.get() & 0xFF) == 1; + int key = ByteBufferUtils.getTriByte(buffer); + Object value = string ? ByteBufferUtils.getString(buffer) : buffer.getInt(); + def.clientScriptData.put(key, value); + System.out.println(key + " " + value); + } + } else { + System.out.println("Unhandled opcode! opcode: " + opcode); + break; + } + } + return def; + } + + /** + * Defines the definitions for noted, lending and recolored items. + */ + public static void defineTemplates() { + int equipId = 0; + for (int i = 0; i < DEFINITIONS.size(); i++) { + ItemDefinition def = forId(i); + if (def != null && (def.maleWornModelId1 >= 0 || def.maleWornModelId2 >= 0)) { + def.equipId = equipId++; +// if (i < 3200) +// System.out.println("Item [id=" + i + ", name=" + def.getName() + "] equip id: " + (equipId - 1) + " [models=" + def.maleWornModelId1 + ", " + def.maleWornModelId2 + ", " + def.femaleWornModelId1 + ", " + def.femaleWornModelId2 + "."); + } + if (def.noteTemplateId != -1) { + def.transferNoteDefinition(forId(def.noteId), forId(def.noteTemplateId)); + } + if (def.lendTemplateId != -1) { + def.transferLendDefinition(forId(def.lendId), forId(def.lendTemplateId)); + } + if (def.recolourTemplateId != -1) { + def.transferRecolourDefinition(forId(def.recolourId), forId(def.recolourTemplateId)); + } + } + } + + /** + * Transfers definitions for noted items. + * @param reference The reference definitions. + * @param templateReference The template definitions. + */ + public void transferNoteDefinition(ItemDefinition reference, ItemDefinition templateReference) { + membersOnly = reference.membersOnly; + interfaceModelId = templateReference.interfaceModelId; + originalModelColors = templateReference.originalModelColors; + name = reference.name; + modelOffset2 = templateReference.modelOffset2; + textureColour1 = templateReference.textureColour1; + value = reference.value; + modelRotation2 = templateReference.modelRotation2; + stackable = true; + unnoted = false; + modifiedModelColors = templateReference.modifiedModelColors; + modelRotation1 = templateReference.modelRotation1; + modelZoom = templateReference.modelZoom; + textureColour1 = templateReference.textureColour1; + } + + /** + * Transfers definitions for lending items. + * @param reference The reference definitions. + * @param templateReference The template definitions. + */ + public void transferLendDefinition(ItemDefinition reference, ItemDefinition templateReference) { + femaleWornModelId1 = reference.femaleWornModelId1; + maleWornModelId2 = reference.maleWornModelId2; + membersOnly = reference.membersOnly; + interfaceModelId = templateReference.interfaceModelId; + textureColour2 = reference.textureColour2; + groundActions = reference.groundActions; + unknownArray1 = reference.unknownArray1; + modelRotation1 = templateReference.modelRotation1; + modelRotation2 = templateReference.modelRotation2; + originalModelColors = reference.originalModelColors; + name = reference.name; + maleWornModelId1 = reference.maleWornModelId1; + colourEquip1 = reference.colourEquip1; + teamId = reference.teamId; + modelOffset2 = templateReference.modelOffset2; + clientScriptData = reference.clientScriptData; + modifiedModelColors = reference.modifiedModelColors; + colourEquip2 = reference.colourEquip2; + modelOffset1 = templateReference.modelOffset1; + textureColour1 = reference.textureColour1; + value = 0; + modelZoom = templateReference.modelZoom; + options = new String[5]; + femaleWornModelId2 = reference.femaleWornModelId2; + if (reference.options != null) { + options = reference.options.clone(); + } + } + + /** + * Transfers definitions for recolored items. + * @param reference The reference definitions. + * @param templateReference The template definitions. + */ + public void transferRecolourDefinition(ItemDefinition reference, ItemDefinition templateReference) { + femaleWornModelId2 = reference.femaleWornModelId2; + options = new String[5]; + modelRotation2 = templateReference.modelRotation2; + name = reference.name; + maleWornModelId1 = reference.maleWornModelId1; + modelOffset2 = templateReference.modelOffset2; + femaleWornModelId1 = reference.femaleWornModelId1; + maleWornModelId2 = reference.maleWornModelId2; + modelOffset1 = templateReference.modelOffset1; + unknownArray1 = reference.unknownArray1; + stackable = reference.stackable; + modelRotation1 = templateReference.modelRotation1; + textureColour1 = reference.textureColour1; + colourEquip1 = reference.colourEquip1; + textureColour2 = reference.textureColour2; + modifiedModelColors = reference.modifiedModelColors; + modelZoom = templateReference.modelZoom; + colourEquip2 = reference.colourEquip2; + teamId = reference.teamId; + value = 0; + groundActions = reference.groundActions; + originalModelColors = reference.originalModelColors; + membersOnly = reference.membersOnly; + clientScriptData = reference.clientScriptData; + interfaceModelId = templateReference.interfaceModelId; + if (reference.options != null) { + options = reference.options.clone(); + } + } + + /** + * Gets the id. + * @return The id. + */ + @Override + public int getId() { + return id; + } + + /** + * Sets the id. + * @param id The id to set. + */ + @Override + public void setId(int id) { + this.id = id; + } + + /** + * Gets the interfaceModelId. + * @return The interfaceModelId. + */ + public int getInterfaceModelId() { + return interfaceModelId; + } + + /** + * Sets the interfaceModelId. + * @param interfaceModelId The interfaceModelId to set. + */ + public void setInterfaceModelId(int interfaceModelId) { + this.interfaceModelId = interfaceModelId; + } + + /** + * Gets the name. + * @return The name. + */ + @Override + public String getName() { + return name; + } + + /** + * Sets the name. + * @param name The name to set. + */ + @Override + public void setName(String name) { + this.name = name; + } + + /** + * Gets the modelZoom. + * @return The modelZoom. + */ + public int getModelZoom() { + return modelZoom; + } + + /** + * Sets the modelZoom. + * @param modelZoom The modelZoom to set. + */ + public void setModelZoom(int modelZoom) { + this.modelZoom = modelZoom; + } + + /** + * Gets the modelRotation1. + * @return The modelRotation1. + */ + public int getModelRotation1() { + return modelRotation1; + } + + /** + * Sets the modelRotation1. + * @param modelRotation1 The modelRotation1 to set. + */ + public void setModelRotation1(int modelRotation1) { + this.modelRotation1 = modelRotation1; + } + + /** + * Gets the modelRotation2. + * @return The modelRotation2. + */ + public int getModelRotation2() { + return modelRotation2; + } + + /** + * Sets the modelRotation2. + * @param modelRotation2 The modelRotation2 to set. + */ + public void setModelRotation2(int modelRotation2) { + this.modelRotation2 = modelRotation2; + } + + /** + * Gets the modelOffset1. + * @return The modelOffset1. + */ + public int getModelOffset1() { + return modelOffset1; + } + + /** + * Sets the modelOffset1. + * @param modelOffset1 The modelOffset1 to set. + */ + public void setModelOffset1(int modelOffset1) { + this.modelOffset1 = modelOffset1; + } + + /** + * Gets the modelOffset2. + * @return The modelOffset2. + */ + public int getModelOffset2() { + return modelOffset2; + } + + /** + * Sets the modelOffset2. + * @param modelOffset2 The modelOffset2 to set. + */ + public void setModelOffset2(int modelOffset2) { + this.modelOffset2 = modelOffset2; + } + + /** + * Gets the stackable. + * @return The stackable. + */ + public boolean isStackable() { + return stackable || !this.unnoted; + } + + /** + * Sets the stackable. + * @param stackable The stackable to set. + */ + public void setStackable(boolean stackable) { + this.stackable = stackable; + } + + /** + * Gets the value. + * @return The value. + */ + public int getValue() { + if (value == 0) { + return 1; + } + return value; + } + /** + *@return The value. + */ + public int getMaxValue() { + if ((int) (value * 1.05) <= 0) { + return 1; + } + return (int) (value * 1.05); + } + /** + *@return The value. + */ + public int getMinValue() { + if ((int) (value * .95) <= 0) { + return 1; + } + return (int) (value * .95); + } + /** + * Sets the value. + * @param value The value to set. + */ + public void setValue(int value) { + this.value = value; + } + + /** + * Gets the membersOnly. + * @return The membersOnly. + */ + public boolean isMembersOnly() { + return membersOnly; + } + + /** + * Sets the membersOnly. + * @param membersOnly The membersOnly to set. + */ + public void setMembersOnly(boolean membersOnly) { + this.membersOnly = membersOnly; + } + + /** + * Gets the maleWornModelId1. + * @return The maleWornModelId1. + */ + public int getMaleWornModelId1() { + return maleWornModelId1; + } + + /** + * Sets the maleWornModelId1. + * @param maleWornModelId1 The maleWornModelId1 to set. + */ + public void setMaleWornModelId1(int maleWornModelId1) { + this.maleWornModelId1 = maleWornModelId1; + } + + /** + * Gets the femaleWornModelId1. + * @return The femaleWornModelId1. + */ + public int getFemaleWornModelId1() { + return femaleWornModelId1; + } + + /** + * Sets the femaleWornModelId1. + * @param femaleWornModelId1 The femaleWornModelId1 to set. + */ + public void setFemaleWornModelId1(int femaleWornModelId1) { + this.femaleWornModelId1 = femaleWornModelId1; + } + + /** + * Gets the maleWornModelId2. + * @return The maleWornModelId2. + */ + public int getMaleWornModelId2() { + return maleWornModelId2; + } + + /** + * Sets the maleWornModelId2. + * @param maleWornModelId2 The maleWornModelId2 to set. + */ + public void setMaleWornModelId2(int maleWornModelId2) { + this.maleWornModelId2 = maleWornModelId2; + } + + /** + * Gets the femaleWornModelId2. + * @return The femaleWornModelId2. + */ + public int getFemaleWornModelId2() { + return femaleWornModelId2; + } + + /** + * Sets the femaleWornModelId2. + * @param femaleWornModelId2 The femaleWornModelId2 to set. + */ + public void setFemaleWornModelId2(int femaleWornModelId2) { + this.femaleWornModelId2 = femaleWornModelId2; + } + + /** + * Gets the groundOptions. + * @return The groundOptions. + */ + public String[] getGroundOptions() { + return groundActions; + } + + /** + * Sets the groundOptions. + * @param groundOptions The groundOptions to set. + */ + public void setGroundOptions(String[] groundOptions) { + this.groundActions = groundOptions; + } + + /** + * Gets the inventoryOptions. + * @return The inventoryOptions. + */ + public String[] getInventoryOptions() { + return options; + } + + /** + * Sets the inventoryOptions. + * @param inventoryOptions The inventoryOptions to set. + */ + public void setInventoryOptions(String[] inventoryOptions) { + this.options = inventoryOptions; + } + + /** + * Gets the originalModelColors. + * @return The originalModelColors. + */ + public short[] getOriginalModelColors() { + return originalModelColors; + } + + /** + * Sets the originalModelColors. + * @param originalModelColors The originalModelColors to set. + */ + public void setOriginalModelColors(short[] originalModelColors) { + this.originalModelColors = originalModelColors; + } + + /** + * Gets the modifiedModelColors. + * @return The modifiedModelColors. + */ + public short[] getModifiedModelColors() { + return modifiedModelColors; + } + + /** + * Sets the modifiedModelColors. + * @param modifiedModelColors The modifiedModelColors to set. + */ + public void setModifiedModelColors(short[] modifiedModelColors) { + this.modifiedModelColors = modifiedModelColors; + } + + /** + * Gets the textureColour1. + * @return The textureColour1. + */ + public short[] getTextureColour1() { + return textureColour1; + } + + /** + * Sets the textureColour1. + * @param textureColour1 The textureColour1 to set. + */ + public void setTextureColour1(short[] textureColour1) { + this.textureColour1 = textureColour1; + } + + /** + * Gets the textureColour2. + * @return The textureColour2. + */ + public short[] getTextureColour2() { + return textureColour2; + } + + /** + * Sets the textureColour2. + * @param textureColour2 The textureColour2 to set. + */ + public void setTextureColour2(short[] textureColour2) { + this.textureColour2 = textureColour2; + } + + /** + * Gets the unknownArray1. + * @return The unknownArray1. + */ + public byte[] getUnknownArray1() { + return unknownArray1; + } + + /** + * Sets the unknownArray1. + * @param unknownArray1 The unknownArray1 to set. + */ + public void setUnknownArray1(byte[] unknownArray1) { + this.unknownArray1 = unknownArray1; + } + + /** + * Gets the unknownArray2. + * @return The unknownArray2. + */ + public int[] getUnknownArray2() { + return unknownArray2; + } + + /** + * Sets the unknownArray2. + * @param unknownArray2 The unknownArray2 to set. + */ + public void setUnknownArray2(int[] unknownArray2) { + this.unknownArray2 = unknownArray2; + } + + /** + * Gets the unnoted. + * @return The unnoted. + */ + public boolean isUnnoted() { + return unnoted; + } + + /** + * Sets the unnoted. + * @param unnoted The unnoted to set. + */ + public void setUnnoted(boolean unnoted) { + this.unnoted = unnoted; + } + + /** + * Gets the colourEquip1. + * @return The colourEquip1. + */ + public int getColourEquip1() { + return colourEquip1; + } + + /** + * Sets the colourEquip1. + * @param colourEquip1 The colourEquip1 to set. + */ + public void setColourEquip1(int colourEquip1) { + this.colourEquip1 = colourEquip1; + } + + /** + * Gets the colourEquip2. + * @return The colourEquip2. + */ + public int getColourEquip2() { + return colourEquip2; + } + + /** + * Sets the colourEquip2. + * @param colourEquip2 The colourEquip2 to set. + */ + public void setColourEquip2(int colourEquip2) { + this.colourEquip2 = colourEquip2; + } + + /** + * Gets the noteId. + * @return The noteId. + */ + public int getNoteId() { + return noteId; + } + + /** + * Sets the noteId. + * @param noteId The noteId to set. + */ + public void setNoteId(int noteId) { + this.noteId = noteId; + } + + /** + * Gets the noteTemplateId. + * @return The noteTemplateId. + */ + public int getNoteTemplateId() { + return noteTemplateId; + } + + /** + * Sets the noteTemplateId. + * @param noteTemplateId The noteTemplateId to set. + */ + public void setNoteTemplateId(int noteTemplateId) { + this.noteTemplateId = noteTemplateId; + } + + /** + * Gets the stackIds. + * @return The stackIds. + */ + public int[] getStackIds() { + return stackIds; + } + + /** + * Sets the stackIds. + * @param stackIds The stackIds to set. + */ + public void setStackIds(int[] stackIds) { + this.stackIds = stackIds; + } + + /** + * Gets the stackAmounts. + * @return The stackAmounts. + */ + public int[] getStackAmounts() { + return stackAmounts; + } + + /** + * Sets the stackAmounts. + * @param stackAmounts The stackAmounts to set. + */ + public void setStackAmounts(int[] stackAmounts) { + this.stackAmounts = stackAmounts; + } + + /** + * Gets the teamId. + * @return The teamId. + */ + public int getTeamId() { + return teamId; + } + + /** + * Sets the teamId. + * @param teamId The teamId to set. + */ + public void setTeamId(int teamId) { + this.teamId = teamId; + } + + /** + * Gets the lendId. + * @return The lendId. + */ + public int getLendId() { + return lendId; + } + + /** + * Sets the lendId. + * @param lendId The lendId to set. + */ + public void setLendId(int lendId) { + this.lendId = lendId; + } + + /** + * Gets the lendTemplateId. + * @return The lendTemplateId. + */ + public int getLendTemplateId() { + return lendTemplateId; + } + + /** + * Sets the lendTemplateId. + * @param lendTemplateId The lendTemplateId to set. + */ + public void setLendTemplateId(int lendTemplateId) { + this.lendTemplateId = lendTemplateId; + } + + /** + * Gets the recolourId. + * @return The recolourId. + */ + public int getRecolourId() { + return recolourId; + } + + /** + * Sets the recolourId. + * @param recolourId The recolourId to set. + */ + public void setRecolourId(int recolourId) { + this.recolourId = recolourId; + } + + /** + * Gets the recolourTemplateId. + * @return The recolourTemplateId. + */ + public int getRecolourTemplateId() { + return recolourTemplateId; + } + + /** + * Sets the recolourTemplateId. + * @param recolourTemplateId The recolourTemplateId to set. + */ + public void setRecolourTemplateId(int recolourTemplateId) { + this.recolourTemplateId = recolourTemplateId; + } + + /** + * Gets the equipId. + * @return The equipId. + */ + public int getEquipId() { + return equipId; + } + + /** + * Sets the equipId. + * @param equipId The equipId to set. + */ + public void setEquipId(int equipId) { + this.equipId = equipId; + } + + /** + * Gets the clientScriptData. + * @return The clientScriptData. + */ + public HashMap getClientScriptData() { + return clientScriptData; + } + + /** + * Sets the clientScriptData. + * @param clientScriptData The clientScriptData to set. + */ + public void setClientScriptData(HashMap clientScriptData) { + this.clientScriptData = clientScriptData; + } + + /** + * If the item has the specified item. + * @param optionName The action. + * @return If the item has the specified action {@code true}. + */ + public boolean hasAction(String optionName) { + if (options == null) { + return false; + } + for (String action : options) { + if (action == null) { + continue; + } + if (action.equalsIgnoreCase(optionName)) { + return true; + } + } + return false; + } + + /** + * If the item has the destroy action. + * @return If the item has the destroy action {@code true}. + */ + public boolean hasDestroyAction() { + if (options == null) { + return false; + } + for (String action : options) { + if (action == null) { + continue; + } + if (action.equalsIgnoreCase("Destroy")) { + return true; + } + } + return false; + } + + /** + * If the item has the wear action. + * @return If the item has the wear action {@code true}. + */ + public boolean hasWearAction() { + if (options == null) { + return false; + } + for (String action : options) { + if (action == null) { + continue; + } + if (action.equalsIgnoreCase("wield") || action.equalsIgnoreCase("wear") || action.equalsIgnoreCase("equip")) { + return true; + } + } + return false; + } + + /** + * If the item has a special bar. + * @return If the item has a special bar {@code true}. + */ + public boolean hasSpecialBar() { + if (clientScriptData == null) { + return false; + } + Object specialBar = clientScriptData.get(686); + if (specialBar != null && specialBar instanceof Integer) { + return (Integer) specialBar == 1; + } + return false; + } + + /** + * Get the quest id for the item. + * @return The quest id. + */ + public int getQuestId() { + if (clientScriptData == null) { + return -1; + } + Object questId = clientScriptData.get(861); + if (questId != null && questId instanceof Integer) { + return (Integer) questId; + } + return -1; + } + + /** + * Get the archive id. + * @return The archive id. + */ + public int getArchiveId() { + return id >>> 8; + } + + /** + * Get the file id. + * @return The file id. + */ + public int getFileId() { + return 0xff & id; + } + + /** + * Gets the definitions. + * @return The definitions. + */ + public static Map getDefinitions() { + return DEFINITIONS; + } + + /** + * @return the itemRequirements. + */ + public HashMap getItemRequirements() { + return itemRequirements; + } + + /** + * @param itemRequirements the itemRequirements to set. + */ + public void setItemRequirements(HashMap itemRequirements) { + this.itemRequirements = itemRequirements; + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/def/impl/NPCDefinition.java b/Tools/Arios Editor/src/org/arios/cache/def/impl/NPCDefinition.java new file mode 100644 index 000000000..f13182b71 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/def/impl/NPCDefinition.java @@ -0,0 +1,876 @@ +package org.arios.cache.def.impl; + +import java.awt.Graphics; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.Cache; +import org.arios.cache.def.Definition; +import org.arios.cache.misc.ByteBufferUtils; + +/** + * Represents an NPC's definitions. + * @author Emperor + * + */ +public final class NPCDefinition extends Definition { + + /** + * The definitions. + */ + private static final Map DEFINITIONS = new HashMap<>(); + + /** + * The size. + */ + public int size = 1; + + /** + * The combat level. + */ + private int combatLevel; + + /** + * The head icons. + */ + public int headIcons; + + /** + * If the NPC can be seen on the minimap. + */ + public boolean isVisibleOnMap; + + /** + * The examine option value + */ + public String examine; + + /** + * Unidentified variables. + */ + public int anInt833; + public int anInt836; + public int anInt837; + public boolean aBoolean841; + public int anInt842; + public int configFileId; + public int[] childNPCIds; + public int anInt846; + public int anInt850; + public byte aByte851; + public boolean aBoolean852; + public int anInt853; + public byte aByte854; + public boolean aBoolean856; + public boolean aBoolean857; + public short[] aShortArray859; + public byte[] aByteArray861; + public short aShort862; + public boolean aBoolean863; + public int anInt864; + public short[] aShortArray866; + public int[] anIntArray868; + public int anInt869; + public int anInt870; + public int anInt871; + public int anInt872; + public int anInt874; + public int anInt875; + public int anInt876; + public int anInt879; + public short[] aShortArray880; + public int anInt884; + public int configId; + public int anInt889; + public int[] anIntArray892; + public short aShort894; + public short[] aShortArray896; + public int anInt897; + public int anInt899; + public int anInt901; + public int standAnimation; + public int walkAnimation; + + /** + * The minimum combat distance (0 uses default distances). + */ + private int combatDistance; + + /** + * The combat graphics. + */ + private Graphics[] combatGraphics = new Graphics[3]; + + /** + * The turning animation. + */ + private int turnAnimation; + + + /** + * The turn 180� animation. + */ + private int turn180Animation; + + /** + * The turn clock-wise animation. + */ + private int turnCWAnimation; + + /** + * The turn counter clock-wise animation. + */ + private int turnCCWAnimation; + + /** + * Constructs a new {@code NPCDefinition} {@code Object}. + * @param id The NPC id. + */ + public NPCDefinition(int id) { + this.id = id; + anInt842 = -1; + configFileId = -1; + anInt837 = -1; + anInt846 = -1; + anInt853 = 32; + standAnimation = -1; + walkAnimation = -1; + combatLevel = -1; + anInt836 = -1; + name = "null"; + anInt869 = 0; + anInt850 = 255; + anInt871 = -1; + aBoolean852 = true; + aShort862 = (short) 0; + anInt876 = -1; + aByte851 = (byte) -96; + anInt875 = 0; + anInt872 = -1; + aBoolean857 = true; + anInt870 = -1; + anInt874 = -1; + anInt833 = -1; + anInt864 = 128; + headIcons = -1; + aBoolean856 = false; + configId = -1; + aByte854 = (byte) -16; + aBoolean863 = false; + isVisibleOnMap = true; + anInt889 = -1; + anInt884 = -1; + aBoolean841 = true; + anInt879 = -1; + anInt899 = 128; + aShort894 = (short) 0; + options = new String[5]; + anInt897 = 0; + anInt901 = -1; + anIntArray868 = new int[0]; + } + + /** + * Gets the NPC definition for this id. + * @param id The NPC id. + * @return The NPC definition object. + */ + public static NPCDefinition forId(int id) { + NPCDefinition def = DEFINITIONS.get(id); + if (def == null) { + def = new NPCDefinition(id); + byte[] data = Cache.getIndexes()[18].getFileData(id >>> 134238215, id & 0x7f); + if (data == null) { + if (id != -1) { + //System.out.println("Failed loading NPC " + id + "."); + } + } else { + def.parse(ByteBuffer.wrap(data)); + } + DEFINITIONS.put(id, def); + } + return def; + } + + /** + * Parses the data. + * @param buffer The data input stream. + */ + private void parse(ByteBuffer buffer) { + while (true) { + int opcode = buffer.get() & 0xFF; + if (opcode == 0) { + break; + } + parseOpcode(buffer, opcode); + } + } + + /** + * Parses an opcode. + * @param buffer The data input stream. + * @param opcode The opcode to parse. + */ + private void parseOpcode(ByteBuffer buffer, int opcode) { + switch (opcode) { + case 1: + int length = buffer.get() & 0xFF; + anIntArray868 = new int[length]; + for (int i_66_ = 0; i_66_ < length; i_66_++) { + anIntArray868[i_66_] = buffer.getShort() & 0xFFFF; + if ((anIntArray868[i_66_] ^ 0xffffffff) == -65536) + anIntArray868[i_66_] = -1; + } + break; + case 2: + name = ByteBufferUtils.getString(buffer); + break; + case 12: + size = buffer.get() & 0xFF; + break; + case 13: + standAnimation = buffer.getShort(); + break; + case 14: + walkAnimation = buffer.getShort(); + break; + case 15: + turnAnimation = buffer.getShort(); + break; + case 16: + buffer.getShort(); //Another turn animation + break; + case 17: + walkAnimation = buffer.getShort(); + turn180Animation = buffer.getShort(); + turnCWAnimation = buffer.getShort(); + turnCCWAnimation = buffer.getShort(); + break; + case 30: + case 31: + case 32: + case 33: + case 34: + options[opcode - 30] = ByteBufferUtils.getString(buffer); + break; + case 40: + length = buffer.get() & 0xFF; + aShortArray859 = new short[length]; + aShortArray896 = new short[length]; + for (int i_65_ = 0; (length ^ 0xffffffff) < (i_65_ ^ 0xffffffff); i_65_++) { + aShortArray896[i_65_] = (short) (buffer.getShort() & 0xFFFF); + aShortArray859[i_65_] = (short) (buffer.getShort() & 0xFFFF); + } + break; + case 41: + length = buffer.get() & 0xFF; + aShortArray880 = new short[length]; + aShortArray866 = new short[length]; + for (int i_54_ = 0; (i_54_ ^ 0xffffffff) > (length ^ 0xffffffff); i_54_++) { + aShortArray880[i_54_] = (short) (buffer.getShort() & 0xFFFF); + aShortArray866[i_54_] = (short) (buffer.getShort() & 0xFFFF); + } + break; + case 42: + length = buffer.get() & 0xFF; + aByteArray861 = new byte[length]; + for (int i_55_ = 0; length > i_55_; i_55_++) { + aByteArray861[i_55_] = (byte) buffer.get(); + } + break; + case 60: + length = buffer.get() & 0xFF; + anIntArray892 = new int[length]; + for (int i_64_ = 0; (i_64_ ^ 0xffffffff) > (length ^ 0xffffffff); i_64_++) { + anIntArray892[i_64_] = buffer.getShort() & 0xFFFF; + } + break; + case 93: + isVisibleOnMap = false; + break; + case 95: + setCombatLevel(buffer.getShort() & 0xFFFF); + break; + case 97: + anInt864 = buffer.getShort() & 0xFFFF; + break; + case 98: + anInt899 = buffer.getShort() & 0xFFFF; + break; + case 99: + aBoolean863 = true; + break; + case 100: + anInt869 = buffer.get(); + break; + case 101: + anInt897 = buffer.get() * 5; + break; + case 102: + headIcons = buffer.getShort() & 0xFFFF; + break; + case 103: + anInt853 = buffer.getShort() & 0xFFFF; + break; + case 106: + case 118: + configFileId = buffer.getShort() & 0xFFFF; + if (configFileId == 65535) { + configFileId = -1; + } + configId = buffer.getShort() & 0xFFFF; + if (configId == 65535) { + configId = -1; + } + int defaultValue = -1; + if ((opcode ^ 0xffffffff) == -119) { + defaultValue = buffer.getShort() & 0xFFFF; + if (defaultValue == 65535) { + defaultValue = -1; + } + } + length = buffer.get() & 0xFF; + childNPCIds = new int[2 + length]; + for (int i = 0; length >= i; i++) { + childNPCIds[i] = buffer.getShort() & 0xFFFF; + if (childNPCIds[i] == 65535) { + childNPCIds[i] = -1; + } + } + childNPCIds[length + 1] = defaultValue; + break; + case 107: + aBoolean841 = false; + break; + case 109: + aBoolean852 = false; + break; + case 111: + aBoolean857 = false; + break; + case 113: + aShort862 = (short) (buffer.getShort() & 0xFFFF); + aShort894 = (short) (buffer.getShort() & 0xFFFF); + break; + case 114: + aByte851 = (byte) (buffer.get()); + aByte854 = (byte) (buffer.get()); + break; + case 115: + buffer.get();// & 0xFF; + buffer.get();// & 0xFF; + break; + default: + System.out.println("Unhandled NPC definition opcode: " + opcode); + } + if (id == 3074) { + setName("Monk"); + options[0] = "Talk-to"; + } + if (id == 2020) { + setName("Vanstrom Klause"); + } + if (id == 381) { + setName("Captain Barnaby"); + options[0] = "Talk-to"; + options[2] = "Pay-fare"; + } + if (id == 3820) { + setName("Wise Old Man"); + options[0] = "Talk-to"; + } + if (id == 4495) { + setName("Grubfoot"); + options[0] = "Talk-to"; + } + if (id == 7707) { + setName("Drezel"); + } + } + + /** + * Checks if this NPC has an attack option. + * @return {@code True} if so. + */ + public boolean hasAttackOption() { + for (String option : options) { + if (option != null && option.equalsIgnoreCase("attack")) { + return true; + } + } + return false; + } + + public boolean hasAction(String optionName) { + if (options == null) { + return false; + } + for (String action : options) { + if (action == null) { + continue; + } + if (action.equalsIgnoreCase(optionName)) { + return true; + } + } + return false; + } + + /** + * Gets the definitions mapping. + * @return The mapping. + */ + public static final Map getDefinitions() { + return DEFINITIONS; + } + + /** + * Method sets the value for 'examine' + * @param examine the examine to set. + */ + public final void setExamine(String examine) { + this.examine = examine; + } + + /** + * Gets the size. + * @return The size. + */ + public int getSize() { + return size; + } + + /** + * Gets the headIcons. + * @return The headIcons. + */ + public int getHeadIcons() { + return headIcons; + } + + /** + * Gets the isVisibleOnMap. + * @return The isVisibleOnMap. + */ + public boolean isVisibleOnMap() { + return isVisibleOnMap; + } + + /** + * Gets the anInt833. + * @return The anInt833. + */ + public int getAnInt833() { + return anInt833; + } + + /** + * Gets the anInt836. + * @return The anInt836. + */ + public int getAnInt836() { + return anInt836; + } + + /** + * Gets the anInt837. + * @return The anInt837. + */ + public int getAnInt837() { + return anInt837; + } + + /** + * Gets the aBoolean841. + * @return The aBoolean841. + */ + public boolean isaBoolean841() { + return aBoolean841; + } + + /** + * Gets the anInt842. + * @return The anInt842. + */ + public int getAnInt842() { + return anInt842; + } + + /** + * Gets the configFileId. + * @return The configFileId. + */ + public int getConfigFileId() { + return configFileId; + } + + /** + * Gets the childNPCIds. + * @return The childNPCIds. + */ + public int[] getChildNPCIds() { + return childNPCIds; + } + + /** + * Gets the anInt846. + * @return The anInt846. + */ + public int getAnInt846() { + return anInt846; + } + + /** + * Gets the anInt850. + * @return The anInt850. + */ + public int getAnInt850() { + return anInt850; + } + + /** + * Gets the aByte851. + * @return The aByte851. + */ + public byte getaByte851() { + return aByte851; + } + + /** + * Gets the aBoolean852. + * @return The aBoolean852. + */ + public boolean isaBoolean852() { + return aBoolean852; + } + + /** + * Gets the anInt853. + * @return The anInt853. + */ + public int getAnInt853() { + return anInt853; + } + + /** + * Gets the aByte854. + * @return The aByte854. + */ + public byte getaByte854() { + return aByte854; + } + + /** + * Gets the aBoolean856. + * @return The aBoolean856. + */ + public boolean isaBoolean856() { + return aBoolean856; + } + + /** + * Gets the aBoolean857. + * @return The aBoolean857. + */ + public boolean isaBoolean857() { + return aBoolean857; + } + + /** + * Gets the aShortArray859. + * @return The aShortArray859. + */ + public short[] getaShortArray859() { + return aShortArray859; + } + + /** + * Gets the aByteArray861. + * @return The aByteArray861. + */ + public byte[] getaByteArray861() { + return aByteArray861; + } + + /** + * Gets the aShort862. + * @return The aShort862. + */ + public short getaShort862() { + return aShort862; + } + + /** + * Gets the aBoolean863. + * @return The aBoolean863. + */ + public boolean isaBoolean863() { + return aBoolean863; + } + + /** + * Gets the anInt864. + * @return The anInt864. + */ + public int getAnInt864() { + return anInt864; + } + + /** + * Gets the aShortArray866. + * @return The aShortArray866. + */ + public short[] getaShortArray866() { + return aShortArray866; + } + + /** + * Gets the anIntArray868. + * @return The anIntArray868. + */ + public int[] getAnIntArray868() { + return anIntArray868; + } + + /** + * Gets the anInt869. + * @return The anInt869. + */ + public int getAnInt869() { + return anInt869; + } + + /** + * Gets the anInt870. + * @return The anInt870. + */ + public int getAnInt870() { + return anInt870; + } + + /** + * Gets the anInt871. + * @return The anInt871. + */ + public int getAnInt871() { + return anInt871; + } + + /** + * Gets the anInt872. + * @return The anInt872. + */ + public int getAnInt872() { + return anInt872; + } + + /** + * Gets the anInt874. + * @return The anInt874. + */ + public int getAnInt874() { + return anInt874; + } + + /** + * Gets the anInt875. + * @return The anInt875. + */ + public int getAnInt875() { + return anInt875; + } + + /** + * Gets the anInt876. + * @return The anInt876. + */ + public int getAnInt876() { + return anInt876; + } + + /** + * Gets the anInt879. + * @return The anInt879. + */ + public int getAnInt879() { + return anInt879; + } + + /** + * Gets the aShortArray880. + * @return The aShortArray880. + */ + public short[] getaShortArray880() { + return aShortArray880; + } + + /** + * Gets the anInt884. + * @return The anInt884. + */ + public int getAnInt884() { + return anInt884; + } + + /** + * Gets the configId. + * @return The configId. + */ + public int getConfigId() { + return configId; + } + + /** + * Gets the anInt889. + * @return The anInt889. + */ + public int getAnInt889() { + return anInt889; + } + + /** + * Gets the anIntArray892. + * @return The anIntArray892. + */ + public int[] getAnIntArray892() { + return anIntArray892; + } + + /** + * Gets the aShort894. + * @return The aShort894. + */ + public short getaShort894() { + return aShort894; + } + + /** + * Gets the aShortArray896. + * @return The aShortArray896. + */ + public short[] getaShortArray896() { + return aShortArray896; + } + + /** + * Gets the anInt897. + * @return The anInt897. + */ + public int getAnInt897() { + return anInt897; + } + + /** + * Gets the anInt899. + * @return The anInt899. + */ + public int getAnInt899() { + return anInt899; + } + + /** + * Gets the anInt901. + * @return The anInt901. + */ + public int getAnInt901() { + return anInt901; + } + + /** + * Gets the standAnimation. + * @return The standAnimation. + */ + public int getStandAnimation() { + return standAnimation; + } + + /** + * Gets the walkAnimation. + * @return The walkAnimation. + */ + public int getWalkAnimation() { + return walkAnimation; + } + + /** + * Gets the turnAnimation. + * @return The turnAnimation. + */ + public int getTurnAnimation() { + return turnAnimation; + } + + /** + * Gets the turn180Animation. + * @return The turn180Animation. + */ + public int getTurn180Animation() { + return turn180Animation; + } + + /** + * Gets the turnCWAnimation. + * @return The turnCWAnimation. + */ + public int getTurnCWAnimation() { + return turnCWAnimation; + } + + /** + * Gets the turnCCWAnimation. + * @return The turnCCWAnimation. + */ + public int getTurnCCWAnimation() { + return turnCCWAnimation; + } + + /** + * Gets the combatLevel. + * @return The combatLevel. + */ + public int getCombatLevel() { + return combatLevel; + } + + /** + * Sets the combatLevel. + * @param combatLevel The combatLevel to set. + */ + public void setCombatLevel(int combatLevel) { + this.combatLevel = combatLevel; + } + + /** + * Gets the combatDistance. + * @return The combatDistance. + */ + public int getCombatDistance() { + return combatDistance; + } + + /** + * Sets the combatDistance. + * @param combatDistance The combatDistance to set. + */ + public void setCombatDistance(int combatDistance) { + this.combatDistance = combatDistance; + } + + /** + * Gets the combatGraphics. + * @return The combatGraphics. + */ + public Graphics[] getCombatGraphics() { + return combatGraphics; + } + + /** + * Sets the combatGraphics. + * @param combatGraphics The combatGraphics to set. + */ + public void setCombatGraphics(Graphics[] combatGraphics) { + this.combatGraphics = combatGraphics; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/def/impl/ObjectDefinition.java b/Tools/Arios Editor/src/org/arios/cache/def/impl/ObjectDefinition.java new file mode 100644 index 000000000..2f0eb53e9 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/def/impl/ObjectDefinition.java @@ -0,0 +1,1512 @@ +package org.arios.cache.def.impl; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.def.Definition; +import org.arios.cache.misc.ByteBufferUtils; + +/** + * Represents an object's definition. + * @author Emperor + */ +public class ObjectDefinition extends Definition { + + /** + * The item definitions mapping. + */ + private static final Map DEFINITIONS = new HashMap(); + + /** + * The original model colors. + */ + private short[] originalColors; + + /** + * The children ids. + */ + public int[] childrenIds; + + /** + * The model ids. + */ + private int[] modelIds; + + /** + * The model configuration. + */ + private int[] modelConfiguration; + + /** + * A unknown integer. + */ + static int anInt3832; + + /** + * A unkown integer array. + */ + int[] anIntArray3833 = null; + + /** + * A unknown integer. + */ + private int anInt3834; + + /** + * A unknown integer. + */ + int anInt3835; + + /** + * A unknown integer. + */ + static int anInt3836; + + /** + * A unknown byte. + */ + private byte aByte3837; + + /** + * A unknown integer. + */ + int anInt3838 = -1; + + /** + * A unknown boolean. + */ + boolean aBoolean3839; + + /** + * A unknown integer. + */ + private int anInt3840; + + /** + * A unknown integer. + */ + private int anInt3841; + + /** + * A unknown integer. + */ + static int anInt3842; + + /** + * A unknown integer. + */ + static int anInt3843; + + /** + * A unknown integer. + */ + int anInt3844; + + /** + * A unknown boolean. + */ + boolean aBoolean3845; + + /** + * A unknown integer. + */ + static int anInt3846; + + /** + * A unknown byte. + */ + private byte aByte3847; + + /** + * A unknown byte. + */ + private byte aByte3849; + + /** + * A unknown integer. + */ + int anInt3850; + + /** + * A unknown integer. + */ + int anInt3851; + + /** + * The second boolean. + */ + public boolean secondBool; + + /** + * A unknown boolean. + */ + public boolean aBoolean3853; + + /** + * A unknown integer. + */ + int anInt3855; + + /** + * The first boolean. + */ + public boolean notClipped; + + /** + * A unknown integer. + */ + int anInt3857; + + /** + * A unknown byte array. + */ + private byte[] aByteArray3858; + + /** + * A unknown integer array. + */ + int[] anIntArray3859; + + /** + * A unknown integer. + */ + int anInt3860; + + /** + * The config file id. + */ + int configFileId; + + /** + * The modified colors. + */ + private short[] modifiedColors; + + /** + * A unknown integer. + */ + int anInt3865; + + /** + * A unknown boolean. + */ + boolean aBoolean3866; + + /** + * A unknown boolean. + */ + boolean aBoolean3867; + + /** + * The solid. + */ + public boolean projectileClipped; + + /** + * A unknown integer array. + */ + private int[] anIntArray3869; + + /** + * A unknown boolean. + */ + boolean aBoolean3870; + + /** + * The y-size. + */ + public int sizeY; + + /** + * A unknown boolean. + */ + boolean aBoolean3872; + + /** + * A unknown boolean. + */ + boolean membersOnly; + + /** + * The third integer. + */ + public boolean boolean1; + + /** + * A unknown integer. + */ + private int anInt3875; + + /** + * The add object check. + */ + public int addObjectCheck; + + /** + * A unknown integer. + */ + private int anInt3877; + + /** + * A unknown integer. + */ + private int anInt3878; + + /** + * The clipping type. + */ + public int clipType; + + /** + * A unknown integer. + */ + private int anInt3881; + + /** + * A unknown integer. + */ + private int anInt3882; + + /** + * A unknown integer. + */ + private int anInt3883; + + /** + * The loader. + */ + Object loader; + + /** + * A unknown integer. + */ + private int anInt3889; + + /** + * The x-size. + */ + public int sizeX; + + /** + * A unknown boolean. + */ + public boolean aBoolean3891; + + /** + * A unknown integer. + */ + int anInt3892; + + /** + * The second integer. + */ + public int secondInt; + + /** + * A unknown boolean. + */ + boolean aBoolean3894; + + /** + * A unknown boolean. + */ + boolean aBoolean3895; + + /** + * A unknown integer. + */ + int anInt3896; + + /** + * The configuration id. + */ + int configId; + + /** + * A unknown byte array. + */ + private byte[] aByteArray3899; + + /** + * A unknown integer. + */ + int anInt3900; + + /** + * A unknown integer. + */ + private int anInt3902; + + /** + * A unknown integer. + */ + int anInt3904; + + /** + * A unknown integer. + */ + int anInt3905; + + /** + * A unknown boolean. + */ + boolean aBoolean3906; + + /** + * A unknown integer array. + */ + int[] anIntArray3908; + + /** + * A unknown byte. + */ + private byte aByte3912; + + /** + * A unknown integer. + */ + int anInt3913; + + /** + * A unknown byte. + */ + private byte aByte3914; + + /** + * A unknown integer. + */ + private int anInt3915; + + /** + * A unknown integer array. + */ + private int[][] anIntArrayArray3916; + + /** + * A unknown integer. + */ + private int anInt3917; + + /** + * A unknown short array. + */ + private short[] aShortArray3919; + + /** + * A unknown short array. + */ + private short[] aShortArray3920; + + /** + * A unknown integer. + */ + int anInt3921; + + /** + * A unknown object. + */ + private Object aClass194_3922; + + /** + * A unknown integer. + */ + boolean aBoolean3923; + + /** + * A unknown integer. + */ + boolean aBoolean3924; + + /** + * The walking flag. + */ + int walkingFlag; + + /** + * If the object has hidden options. + */ + private boolean hasHiddenOptions; + + /** + * Construct a new {@code ObjectDefinition} {@code Object}. + */ + public ObjectDefinition() { + anInt3835 = -1; + anInt3860 = -1; + configFileId = -1; + aBoolean3866 = false; + anInt3851 = -1; + anInt3865 = 255; + aBoolean3845 = false; + aBoolean3867 = false; + anInt3850 = 0; + anInt3844 = -1; + anInt3881 = 0; + anInt3857 = -1; + aBoolean3872 = true; + anInt3882 = -1; + anInt3834 = 0; + options = new String[5]; + anInt3875 = 0; + aBoolean3839 = false; + anIntArray3869 = null; + sizeY = 1; + boolean1 = false; + projectileClipped = true; + anInt3883 = 0; + aBoolean3895 = true; + anInt3840 = 0; + aBoolean3870 = false; + anInt3889 = 0; + aBoolean3853 = true; + secondBool = false; + clipType = 2; + anInt3855 = -1; + anInt3878 = 0; + anInt3904 = 0; + sizeX = 1; + addObjectCheck = -1; + notClipped = false; + aBoolean3891 = false; + anInt3905 = 0; + name = "null"; + anInt3913 = -1; + aBoolean3906 = false; + membersOnly = false; + aByte3914 = (byte) 0; + anInt3915 = 0; + anInt3900 = 0; + secondInt = -1; + aBoolean3894 = false; + aByte3912 = (byte) 0; + anInt3921 = 0; + anInt3902 = 128; + configId = -1; + anInt3877 = 0; + walkingFlag = 0; + anInt3892 = 64; + aBoolean3923 = false; + aBoolean3924 = false; + anInt3841 = 128; + anInt3917 = 128; + } + + /** + * Gets an object definition. + * @param objectId The object's id. + * @return The object definition. + */ + public static ObjectDefinition forId(int objectId) { + return DEFINITIONS.get(objectId); + } + + /** + * Parses an object's definitions. + * @param objectId The object id. + * @param buffer The buffer. + * @return The object definition. + */ + public static ObjectDefinition parseDefinition(int objectId, ByteBuffer buffer) { + ObjectDefinition def = new ObjectDefinition(); + def.id = objectId; + while (true) { + int opcode = buffer.get() & 0xFF; + if (opcode == 1 || opcode == 5) { + int length = buffer.get() & 0xff; + if (def.modelIds == null) { + def.modelIds = new int[length]; + if (opcode == 1) { + def.modelConfiguration = new int[length]; + } + for (int i = 0; i < length; i++) { + def.modelIds[i] = buffer.getShort() & 0xFFFF; + if (opcode == 1) { + def.modelConfiguration[i] = buffer.get() & 0xFF; + } + } +// if (objectId == 2618) { +// System.out.println(Arrays.toString(def.modelIds) + ", " + Arrays.toString(def.modelConfiguration)); +// } + } else { + buffer.position(buffer.position() + (length * (opcode == 1 ? 3 : 2))); + } + } else if (opcode == 2) { + def.name = ByteBufferUtils.getString(buffer); + } else if (opcode == 14) { + def.sizeX = buffer.get() & 0xFF; + } else if (opcode == 15) { + def.sizeY = buffer.get() & 0xFF; + } else if (opcode == 17) { + def.projectileClipped = false; + def.clipType = 0; + } else if (opcode == 18) { + def.projectileClipped = false; + } else if (opcode == 19) { + def.secondInt = buffer.get() & 0xFF; + } else if (opcode == 21) { + def.aByte3912 = (byte) 1; + } else if (opcode == 22) { + def.aBoolean3867 = true; + } else if (opcode == 23) { + def.boolean1 = true; + } else if (opcode == 24) { + def.addObjectCheck = buffer.getShort() & 0xFFFF; + if (def.addObjectCheck == 65535) { + def.addObjectCheck = -1; + } + } else if (opcode == 27) { + def.clipType = 1; + } else if (opcode == 28) { + def.anInt3892 = ((buffer.get() & 0xFF) << 2); + } else if (opcode == 29) { + def.anInt3878 = buffer.get(); + } else if (opcode == 39) { + def.anInt3840 = buffer.get() * 5; + } else if (opcode >= 30 && opcode < 35) { + def.options[opcode - 30] = ByteBufferUtils.getString(buffer); + if (def.options[opcode - 30].equals("Hidden")) { +// def.options[opcode - 30] = null; + def.hasHiddenOptions = true; + } + } else if (opcode == 40) { + int length = buffer.get() & 0xFF; + def.originalColors = new short[length]; + def.modifiedColors = new short[length]; + for (int i = 0; i < length; i++) { + def.originalColors[i] = buffer.getShort(); + def.modifiedColors[i] = buffer.getShort(); + } + } else if (opcode == 41) { + int length = buffer.get() & 0xFF; + def.aShortArray3920 = new short[length]; + def.aShortArray3919 = new short[length]; + for (int i = 0; i < length; i++) { + def.aShortArray3920[i] = buffer.getShort(); + def.aShortArray3919[i] = buffer.getShort(); + } + } else if (opcode == 42) { + int length = buffer.get() & 0xFF; + def.aByteArray3858 = new byte[length]; + for (int i = 0; i < length; i++) { + def.aByteArray3858[i] = buffer.get(); + } + } else if (opcode == 60) { + buffer.getShort(); + } else if (opcode == 62) { + def.aBoolean3839 = true; + } else if (opcode == 64) { + def.aBoolean3872 = false; + } else if (opcode == 65) { + def.anInt3902 = buffer.getShort() & 0xFFFF; + } else if (opcode == 66) { + def.anInt3841 = buffer.getShort() & 0xFFFF; + } else if (opcode == 67) { + def.anInt3917 = buffer.getShort() & 0xFFFF; + } else if (opcode == 68) { + buffer.getShort(); + } else if (opcode == 69) { + def.walkingFlag = buffer.get() & 0xFF; + } else if (opcode == 70) { + def.anInt3883 = buffer.getShort() << 2; + } else if (opcode == 71) { + def.anInt3889 = buffer.getShort() << 2; + } else if (opcode == 72) { + def.anInt3915 = buffer.getShort() << 2; + } else if (opcode == 73) { + def.secondBool = true; + } else if (opcode == 74) { + def.notClipped = true; + } else if (opcode == 75) { + def.anInt3855 = buffer.get() & 0xFF; + } else if (opcode == 77 || opcode == 92) { + def.configFileId = buffer.getShort() & 0xFFFF; + if (def.configFileId == 65535) { + def.configFileId = -1; + } + def.configId = buffer.getShort() & 0xFFFF; + if (def.configId == 65535) { + def.configId = -1; + } + int i_66_ = -1; + if (opcode == 92) { + i_66_ = buffer.getShort() & 0xFFFF; + if (i_66_ == 65535) { + i_66_ = -1; + } + } + int i_67_ = buffer.get() & 0xFF; + def.childrenIds = new int[i_67_ + 2]; + for (int i_68_ = 0; i_67_ >= i_68_; i_68_++) { + def.childrenIds[i_68_] = buffer.getShort() & 0xFFFF; + if (def.childrenIds[i_68_] == 65535) { + def.childrenIds[i_68_] = -1; + } + } + def.childrenIds[i_67_ + 1] = i_66_; + } else if (opcode == 78) { + def.anInt3860 = buffer.getShort() & 0xFFFF; + def.anInt3904 = buffer.get() & 0xFF; + } else if (opcode == 79) { + def.anInt3900 = buffer.getShort() & 0xFFFF; + def.anInt3905 = buffer.getShort() & 0xFFFF; + def.anInt3904 = buffer.get() & 0xFF; + int length = buffer.get() & 0xFF; + def.anIntArray3859 = new int[length]; + for (int i = 0; i < length; i++) { + def.anIntArray3859[i] = buffer.getShort() & 0xFFFF; + } + } else if (opcode == 81) { + def.aByte3912 = (byte) 2; + def.anInt3882 = 256 * buffer.get() & 0xFF; + } else if (opcode == 82 || opcode == 88) { + //Nothing. + } else if (opcode == 89) { + def.aBoolean3895 = false; + } else if (opcode == 90) { + def.aBoolean3870 = true; + } else if (opcode == 91) { + def.membersOnly = true; + } else if (opcode == 93) { + def.aByte3912 = (byte) 3; + def.anInt3882 = buffer.getShort() & 0xFFFF; + } else if (opcode == 94) { + def.aByte3912 = (byte) 4; + } else if (opcode == 95) { + def.aByte3912 = (byte) 5; + } else { + if (opcode != 0) { + System.err.println("Unhandled opcode: " + opcode); + } + break; + } + } + def.configureObject(); + if (def.notClipped) { + if (def.id == 23745 || def.id == 35343) { + System.out.println("Not clipped - " + def.id); + } + def.clipType = 0; + def.projectileClipped = false; + } + /*if (!Main.getGameWorld().getMainContext().isMembers() && def.membersOnly) { + def.options = null; + }*///dont need + return def; + } + + private static final int[] PIT_TRAPS = new int[] { 6632, 6633, 12602, 19227, 19260, 19261, 19262, 19263, 19264, 19265, 19266, 19267, 19268, 30082 }; + + /** + * Configures the object definitions. + */ + final void configureObject() { + if (id == 4039) { + name = "Trapdoor"; + options[0] = "Open"; + } + if (id == 9260) { + options[0] = "Take-seed"; + } + if (id == 32836) { + options[0] = "Walk-up"; + } else if (id == 2614) { + options[0] = "Open"; + } + for (int i : PIT_TRAPS) { + if (i == id) { + name = "Pit"; + options[0] = "Trap"; + } + } + if (id == 15042) { + options[0] = "Take"; + } + if (id == 5492) { + options[0] = "Open"; + options[4] = "Pick-lock"; + options[2] = "Close"; + options[3] = "Climb-down"; + } + if (id >= 2452 && id <= 2462) { + name = "Mysterious ruins"; + options[0] = "Enter"; + } + if (id == 7153 || id == 7143) { + name = "Rock"; + options[0] = "Mine"; + } + if (id == 7145 || id == 7151) { + name = "Boil"; + options[0] = "Burn-down"; + } + if (id == 7152 || id == 7144) { + name = "Tendrils"; + options[0] = "Chop"; + } + if (id == 7148) { + name = "Passage"; + options[0] = "go-through"; + } + if (id == 7149 || id == 7147) { + name = "Gap"; + options[0] = "squeeze-through"; + } + if (id == 7146 || id == 7150) { + name = "eyes"; + options[0] = "distract"; + } + if (id == 2464) { + name = "Strange stones"; + options[0] = "Search"; + } + if (id == 1781) { + name = "Flour bin"; + options[0] = "Empty"; + } + if (id == 12163 || id == 12164 || id == 12165 || id == 12166) { + name = "Canoe station"; + options[0] = "Chop-down"; + } + if (id >= 146 && id <= 151) { + options[0] = "Pull"; + options[1] = "Inspect"; + } + if (id <= 145 && id >= 137) { + options[0] = "Open"; + } + if (id == 17431) {//silverlight quest + name = "Rusty key"; + options[0] = "Take"; + } + if (id == 31759) { + name = "Drain"; + options[0] = "Search"; + } + if (secondInt == -1) { + secondInt = 0; + if (modelIds != null && (modelConfiguration == null || modelConfiguration[0] == 10)) { + secondInt = 1; + } + for (int i = 0; i < 5; i++) { + if (options[i] != null) { + secondInt = 1; + break; + } + } + } + if (anInt3855 == -1) { + anInt3855 = clipType == 0 ? 0 : 1; + } + } + + /** + * Get the aBoolean3839. + * @return the aBoolean3839 + */ + public boolean isaBoolean3839() { + return aBoolean3839; + } + + /** + * @param aBoolean3839 the aBoolean3839 to set + */ + public void setaBoolean3839(boolean aBoolean3839) { + this.aBoolean3839 = aBoolean3839; + } + + /** + * Get the originalColors. + * @return the originalColors + */ + public short[] getOriginalColors() { + return originalColors; + } + + /** + * Get the childrenIds. + * @return the childrenIds + */ + public int[] getChildrenIds() { + return childrenIds; + } + + /** + * Get the anInt3832. + * @return the anInt3832 + */ + public static int getAnInt3832() { + return anInt3832; + } + + /** + * Get the anIntArray3833. + * @return the anIntArray3833 + */ + public int[] getAnIntArray3833() { + return anIntArray3833; + } + + /** + * Get the anInt3834. + * @return the anInt3834 + */ + public int getAnInt3834() { + return anInt3834; + } + + /** + * Get the anInt3835. + * @return the anInt3835 + */ + public int getAnInt3835() { + return anInt3835; + } + + /** + * Get the anInt3836. + * @return the anInt3836 + */ + public static int getAnInt3836() { + return anInt3836; + } + + /** + * Get the aByte3837. + * @return the aByte3837 + */ + public byte getaByte3837() { + return aByte3837; + } + + /** + * Get the anInt3838. + * @return the anInt3838 + */ + public int getAnInt3838() { + return anInt3838; + } + + /** + * Get the anInt3840. + * @return the anInt3840 + */ + public int getAnInt3840() { + return anInt3840; + } + + /** + * Get the anInt3841. + * @return the anInt3841 + */ + public int getAnInt3841() { + return anInt3841; + } + + /** + * Get the anInt3842. + * @return the anInt3842 + */ + public static int getAnInt3842() { + return anInt3842; + } + + /** + * Get the anInt3843. + * @return the anInt3843 + */ + public static int getAnInt3843() { + return anInt3843; + } + + /** + * Get the anInt3844. + * @return the anInt3844 + */ + public int getAnInt3844() { + return anInt3844; + } + + /** + * Get the aBoolean3845. + * @return the aBoolean3845 + */ + public boolean isaBoolean3845() { + return aBoolean3845; + } + + /** + * Get the anInt3846. + * @return the anInt3846 + */ + public static int getAnInt3846() { + return anInt3846; + } + + /** + * Get the aByte3847. + * @return the aByte3847 + */ + public byte getaByte3847() { + return aByte3847; + } + + /** + * Get the aByte3849. + * @return the aByte3849 + */ + public byte getaByte3849() { + return aByte3849; + } + + /** + * Get the anInt3850. + * @return the anInt3850 + */ + public int getAnInt3850() { + return anInt3850; + } + + /** + * Get the anInt3851. + * @return the anInt3851 + */ + public int getAnInt3851() { + return anInt3851; + } + + /** + * Get the secondBool. + * @return the secondBool + */ + public boolean isSecondBool() { + return secondBool; + } + + /** + * Get the aBoolean3853. + * @return the aBoolean3853 + */ + public boolean isaBoolean3853() { + return aBoolean3853; + } + + /** + * Get the anInt3855. + * @return the anInt3855 + */ + public int getAnInt3855() { + return anInt3855; + } + + /** + * Get the firstBool. + * @return the firstBool + */ + public boolean isFirstBool() { + return notClipped; + } + + /** + * Get the anInt3857. + * @return the anInt3857 + */ + public int getAnInt3857() { + return anInt3857; + } + + /** + * Get the aByteArray3858. + * @return the aByteArray3858 + */ + public byte[] getaByteArray3858() { + return aByteArray3858; + } + + /** + * Get the anIntArray3859. + * @return the anIntArray3859 + */ + public int[] getAnIntArray3859() { + return anIntArray3859; + } + + /** + * Get the anInt3860. + * @return the anInt3860 + */ + public int getAnInt3860() { + return anInt3860; + } + + /** + * Get the options. + * @return the options + */ + @Override + public String[] getOptions() { + return options; + } + + /** + * Get the configFileId. + * @return the configFileId + */ + public int getConfigFileId() { + return configFileId; + } + + /** + * Get the modifiedColors. + * @return the modifiedColors + */ + public short[] getModifiedColors() { + return modifiedColors; + } + + /** + * Get the anInt3865. + * @return the anInt3865 + */ + public int getAnInt3865() { + return anInt3865; + } + + /** + * Get the aBoolean3866. + * @return the aBoolean3866 + */ + public boolean isaBoolean3866() { + return aBoolean3866; + } + + /** + * Get the aBoolean3867. + * @return the aBoolean3867 + */ + public boolean isaBoolean3867() { + return aBoolean3867; + } + + /** + * Get the solid. + * @return the solid + */ + public boolean isProjectileClipped() { + return projectileClipped; + } + + /** + * Get the anIntArray3869. + * @return the anIntArray3869 + */ + public int[] getAnIntArray3869() { + return anIntArray3869; + } + + /** + * Get the aBoolean3870. + * @return the aBoolean3870 + */ + public boolean isaBoolean3870() { + return aBoolean3870; + } + + /** + * Get the sizeY. + * @return the sizeY + */ + public int getSizeY() { + return sizeY; + } + + /** + * Get the aBoolean3872. + * @return the aBoolean3872 + */ + public boolean isaBoolean3872() { + return aBoolean3872; + } + + /** + * Get the membersOnly. + * @return the membersOnly + */ + public boolean isaBoolean3873() { + return membersOnly; + } + + /** + * Get the thirdInt. + * @return the thirdInt + */ + public boolean getThirdBoolean() { + return boolean1; + } + + /** + * Get the anInt3875. + * @return the anInt3875 + */ + public int getAnInt3875() { + return anInt3875; + } + + /** + * Get the addObjectCheck. + * @return the addObjectCheck + */ + public int getAddObjectCheck() { + return addObjectCheck; + } + + /** + * Get the anInt3877. + * @return the anInt3877 + */ + public int getAnInt3877() { + return anInt3877; + } + + /** + * Get the anInt3878. + * @return the anInt3878 + */ + public int getAnInt3878() { + return anInt3878; + } + + /** + * Get the clipType. + * @return the clipType + */ + public int getClipType() { + return clipType; + } + + /** + * Get the anInt3881. + * @return the anInt3881 + */ + public int getAnInt3881() { + return anInt3881; + } + + /** + * Get the anInt3882. + * @return the anInt3882 + */ + public int getAnInt3882() { + return anInt3882; + } + + /** + * Get the anInt3883. + * @return the anInt3883 + */ + public int getAnInt3883() { + return anInt3883; + } + + /** + * Get the loader. + * @return the loader + */ + public Object getLoader() { + return loader; + } + + /** + * Get the anInt3889. + * @return the anInt3889 + */ + public int getAnInt3889() { + return anInt3889; + } + + /** + * Get the sizeX. + * @return the sizeX + */ + public int getSizeX() { + return sizeX; + } + + /** + * Get the aBoolean3891. + * @return the aBoolean3891 + */ + public boolean isaBoolean3891() { + return aBoolean3891; + } + + /** + * Get the anInt3892. + * @return the anInt3892 + */ + public int getAnInt3892() { + return anInt3892; + } + + /** + * Get the secondInt. + * @return the secondInt + */ + public int getSecondInt() { + return secondInt; + } + + /** + * Get the aBoolean3894. + * @return the aBoolean3894 + */ + public boolean isaBoolean3894() { + return aBoolean3894; + } + + /** + * Get the aBoolean3895. + * @return the aBoolean3895 + */ + public boolean isaBoolean3895() { + return aBoolean3895; + } + + /** + * Get the anInt3896. + * @return the anInt3896 + */ + public int getAnInt3896() { + return anInt3896; + } + + /** + * Get the configId. + * @return the configId + */ + public int getConfigId() { + return configId; + } + + /** + * Get the aByteArray3899. + * @return the aByteArray3899 + */ + public byte[] getaByteArray3899() { + return aByteArray3899; + } + + /** + * Get the anInt3900. + * @return the anInt3900 + */ + public int getAnInt3900() { + return anInt3900; + } + + /** + * Get the name. + * @return the name + */ + @Override + public String getName() { + return name; + } + + /** + * Get the anInt3902. + * @return the anInt3902 + */ + public int getAnInt3902() { + return anInt3902; + } + + /** + * Get the anInt3904. + * @return the anInt3904 + */ + public int getAnInt3904() { + return anInt3904; + } + + /** + * Get the anInt3905. + * @return the anInt3905 + */ + public int getAnInt3905() { + return anInt3905; + } + + /** + * Get the aBoolean3906. + * @return the aBoolean3906 + */ + public boolean isaBoolean3906() { + return aBoolean3906; + } + + /** + * Get the anIntArray3908. + * @return the anIntArray3908 + */ + public int[] getAnIntArray3908() { + return anIntArray3908; + } + + /** + * Get the aByte3912. + * @return the aByte3912 + */ + public byte getaByte3912() { + return aByte3912; + } + + /** + * Get the anInt3913. + * @return the anInt3913 + */ + public int getAnInt3913() { + return anInt3913; + } + + /** + * Get the aByte3914. + * @return the aByte3914 + */ + public byte getaByte3914() { + return aByte3914; + } + + /** + * Get the anInt3915. + * @return the anInt3915 + */ + public int getAnInt3915() { + return anInt3915; + } + + /** + * Get the anIntArrayArray3916. + * @return the anIntArrayArray3916 + */ + public int[][] getAnIntArrayArray3916() { + return anIntArrayArray3916; + } + + /** + * Get the anInt3917. + * @return the anInt3917 + */ + public int getAnInt3917() { + return anInt3917; + } + + /** + * Get the aShortArray3919. + * @return the aShortArray3919 + */ + public short[] getaShortArray3919() { + return aShortArray3919; + } + + /** + * Get the aShortArray3920. + * @return the aShortArray3920 + */ + public short[] getaShortArray3920() { + return aShortArray3920; + } + + /** + * Get the anInt3921. + * @return the anInt3921 + */ + public int getAnInt3921() { + return anInt3921; + } + + /** + * Get the aClass194_3922. + * @return the aClass194_3922 + */ + public Object getaClass194_3922() { + return aClass194_3922; + } + + /** + * Get the aBoolean3923. + * @return the aBoolean3923 + */ + public boolean isaBoolean3923() { + return aBoolean3923; + } + + /** + * Get the aBoolean3924. + * @return the aBoolean3924 + */ + public boolean isaBoolean3924() { + return aBoolean3924; + } + + /** + * Gets the object's model ids. + * @return The model ids array. + */ + public int[] getModelIds() { + return modelIds; + } + + /** + * If the object has a action. + * @param action The specified action. + * @return If the object has the action {@code true}. + */ + public boolean hasAction(String action) { + if (options == null) { + return false; + } + for (String option: options) { + if (option == null) { + continue; + } + if (option.equalsIgnoreCase(action)) { + return true; + } + } + return false; + } + + /** + * Get the definitions. + * @return the definitions + */ + public static Map getDefinitions() { + return DEFINITIONS; + } + + /** + * Gets the hasHiddenOptions. + * @return The hasHiddenOptions. + */ + public boolean isHasHiddenOptions() { + return hasHiddenOptions; + } + + /** + * Sets the hasHiddenOptions. + * @param hasHiddenOptions The hasHiddenOptions to set. + */ + public void setHasHiddenOptions(boolean hasHiddenOptions) { + this.hasHiddenOptions = hasHiddenOptions; + } + + /** + * Gets the walking flag. + * @return The walking flag. + */ + public int getWalkingFlag() { + return walkingFlag; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/BZip2BlockEntry.java b/Tools/Arios Editor/src/org/arios/cache/misc/BZip2BlockEntry.java new file mode 100644 index 000000000..79cb9b9fc --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/BZip2BlockEntry.java @@ -0,0 +1,56 @@ +package org.arios.cache.misc; + +public class BZip2BlockEntry { + + boolean aBooleanArray2205[]; + boolean aBooleanArray2213[]; + byte aByte2201; + byte aByteArray2204[]; + byte aByteArray2211[]; + byte aByteArray2212[]; + byte aByteArray2214[]; + byte aByteArray2219[]; + byte aByteArray2224[]; + byte aByteArrayArray2229[][]; + int anInt2202; + int anInt2203; + int anInt2206; + int anInt2207; + int anInt2208; + int anInt2209; + int anInt2215; + int anInt2216; + int anInt2217; + int anInt2221; + int anInt2222; + int anInt2223; + int anInt2225; + int anInt2227; + int anInt2232; + int anIntArray2200[]; + int anIntArray2220[]; + int anIntArray2226[]; + int anIntArray2228[]; + int anIntArrayArray2210[][]; + int anIntArrayArray2218[][]; + int anIntArrayArray2230[][]; + + public BZip2BlockEntry() { + anIntArray2200 = new int[6]; + anInt2203 = 0; + aByteArray2204 = new byte[4096]; + aByteArray2211 = new byte[256]; + aByteArray2214 = new byte[18002]; + aByteArray2219 = new byte[18002]; + anIntArray2220 = new int[257]; + anIntArrayArray2218 = new int[6][258]; + aBooleanArray2205 = new boolean[16]; + aBooleanArray2213 = new boolean[256]; + anInt2209 = 0; + anIntArray2226 = new int[16]; + anIntArrayArray2210 = new int[6][258]; + aByteArrayArray2229 = new byte[6][258]; + anIntArrayArray2230 = new int[6][258]; + anIntArray2228 = new int[256]; + } +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/BZip2Decompressor.java b/Tools/Arios Editor/src/org/arios/cache/misc/BZip2Decompressor.java new file mode 100644 index 000000000..48cf40bd5 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/BZip2Decompressor.java @@ -0,0 +1,544 @@ +package org.arios.cache.misc; + +public class BZip2Decompressor { + + private static int anIntArray257[]; + private static BZip2BlockEntry entryInstance = new BZip2BlockEntry(); + + public static final void decompress(byte decompressedData[], byte packedData[], int containerSize, int blockSize) { + synchronized (entryInstance) { + entryInstance.aByteArray2224 = packedData; + entryInstance.anInt2209 = blockSize; + entryInstance.aByteArray2212 = decompressedData; + entryInstance.anInt2203 = 0; + entryInstance.anInt2206 = decompressedData.length; + entryInstance.anInt2232 = 0; + entryInstance.anInt2207 = 0; + entryInstance.anInt2217 = 0; + entryInstance.anInt2216 = 0; + method1793(entryInstance); + entryInstance.aByteArray2224 = null; + entryInstance.aByteArray2212 = null; + } + } + + private static final void method1785(BZip2BlockEntry entry) { + entry.anInt2215 = 0; + for (int i = 0; i < 256; i++) { + if (entry.aBooleanArray2213[i]) { + entry.aByteArray2211[entry.anInt2215] = (byte) i; + entry.anInt2215++; + } + } + + } + + private static final void method1786(int ai[], int ai1[], int ai2[], + byte abyte0[], int i, int j, int k) { + int l = 0; + for (int i1 = i; i1 <= j; i1++) { + for (int l2 = 0; l2 < k; l2++) { + if (abyte0[l2] == i1) { + ai2[l] = l2; + l++; + } + } + + } + + for (int j1 = 0; j1 < 23; j1++) { + ai1[j1] = 0; + } + + for (int k1 = 0; k1 < k; k1++) { + ai1[abyte0[k1] + 1]++; + } + + for (int l1 = 1; l1 < 23; l1++) { + ai1[l1] += ai1[l1 - 1]; + } + + for (int i2 = 0; i2 < 23; i2++) { + ai[i2] = 0; + } + + int i3 = 0; + for (int j2 = i; j2 <= j; j2++) { + i3 += ai1[j2 + 1] - ai1[j2]; + ai[j2] = i3 - 1; + i3 <<= 1; + } + + for (int k2 = i + 1; k2 <= j; k2++) { + ai1[k2] = (ai[k2 - 1] + 1 << 1) - ai1[k2]; + } + + } + + private static final void method1787(BZip2BlockEntry entry) { + byte byte4 = entry.aByte2201; + int i = entry.anInt2222; + int j = entry.anInt2227; + int k = entry.anInt2221; + int ai[] = anIntArray257; + int l = entry.anInt2208; + byte abyte0[] = entry.aByteArray2212; + int i1 = entry.anInt2203; + int j1 = entry.anInt2206; + int k1 = j1; + int l1 = entry.anInt2225 + 1; + label0: do { + if (i > 0) { + do { + if (j1 == 0) { + break label0; + } + if (i == 1) { + break; + } + abyte0[i1] = byte4; + i--; + i1++; + j1--; + } while (true); + if (j1 == 0) { + i = 1; + break; + } + abyte0[i1] = byte4; + i1++; + j1--; + } + boolean flag = true; + while (flag) { + flag = false; + if (j == l1) { + i = 0; + break label0; + } + byte4 = (byte) k; + l = ai[l]; + byte byte0 = (byte) (l & 0xff); + l >>= 8; + j++; + if (byte0 != k) { + k = byte0; + if (j1 == 0) { + i = 1; + } else { + abyte0[i1] = byte4; + i1++; + j1--; + flag = true; + continue; + } + break label0; + } + if (j != l1) { + continue; + } + if (j1 == 0) { + i = 1; + break label0; + } + abyte0[i1] = byte4; + i1++; + j1--; + flag = true; + } + i = 2; + l = ai[l]; + byte byte1 = (byte) (l & 0xff); + l >>= 8; + if (++j != l1) { + if (byte1 != k) { + k = byte1; + } else { + i = 3; + l = ai[l]; + byte byte2 = (byte) (l & 0xff); + l >>= 8; + if (++j != l1) { + if (byte2 != k) { + k = byte2; + } else { + l = ai[l]; + byte byte3 = (byte) (l & 0xff); + l >>= 8; + j++; + i = (byte3 & 0xff) + 4; + l = ai[l]; + k = (byte) (l & 0xff); + l >>= 8; + j++; + } + } + } + } + } while (true); + entry.anInt2216 += k1 - j1; + entry.aByte2201 = byte4; + entry.anInt2222 = i; + entry.anInt2227 = j; + entry.anInt2221 = k; + anIntArray257 = ai; + entry.anInt2208 = l; + entry.aByteArray2212 = abyte0; + entry.anInt2203 = i1; + entry.anInt2206 = j1; + } + + private static final byte method1788(BZip2BlockEntry entry) { + return (byte) method1790(1, entry); + } + + private static final byte method1789(BZip2BlockEntry entryInstance2) { + return (byte) method1790(8, entryInstance2); + } + + private static final int method1790(int i, BZip2BlockEntry entry) { + int j; + do { + if (entry.anInt2232 >= i) { + int k = entry.anInt2207 >> entry.anInt2232 - i & (1 << i) - 1; + entry.anInt2232 -= i; + j = k; + break; + } + entry.anInt2207 = entry.anInt2207 << 8 + | entry.aByteArray2224[entry.anInt2209] & 0xff; + entry.anInt2232 += 8; + entry.anInt2209++; + entry.anInt2217++; + } while (true); + return j; + } + + public static void clearBlockEntryInstance() { + entryInstance = null; + } + + private static final void method1793(BZip2BlockEntry entryInstance2) { + // unused + /* + * boolean flag = false; boolean flag1 = false; boolean flag2 = false; + * boolean flag3 = false; boolean flag4 = false; boolean flag5 = false; + * boolean flag6 = false; boolean flag7 = false; boolean flag8 = false; + * boolean flag9 = false; boolean flag10 = false; boolean flag11 = + * false; boolean flag12 = false; boolean flag13 = false; boolean flag14 + * = false; boolean flag15 = false; boolean flag16 = false; boolean + * flag17 = false; + */ + int j8 = 0; + int ai[] = null; + int ai1[] = null; + int ai2[] = null; + entryInstance2.anInt2202 = 1; + if (anIntArray257 == null) { + anIntArray257 = new int[entryInstance2.anInt2202 * 0x186a0]; + } + boolean flag18 = true; + while (flag18) { + byte byte0 = method1789(entryInstance2); + if (byte0 == 23) { + return; + } + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1789(entryInstance2); + byte0 = method1788(entryInstance2); + entryInstance2.anInt2223 = 0; + byte0 = method1789(entryInstance2); + entryInstance2.anInt2223 = entryInstance2.anInt2223 << 8 | byte0 & 0xff; + byte0 = method1789(entryInstance2); + entryInstance2.anInt2223 = entryInstance2.anInt2223 << 8 | byte0 & 0xff; + byte0 = method1789(entryInstance2); + entryInstance2.anInt2223 = entryInstance2.anInt2223 << 8 | byte0 & 0xff; + for (int j = 0; j < 16; j++) { + byte byte1 = method1788(entryInstance2); + if (byte1 == 1) { + entryInstance2.aBooleanArray2205[j] = true; + } else { + entryInstance2.aBooleanArray2205[j] = false; + } + } + + for (int k = 0; k < 256; k++) { + entryInstance2.aBooleanArray2213[k] = false; + } + + for (int l = 0; l < 16; l++) { + if (entryInstance2.aBooleanArray2205[l]) { + for (int i3 = 0; i3 < 16; i3++) { + byte byte2 = method1788(entryInstance2); + if (byte2 == 1) { + entryInstance2.aBooleanArray2213[l * 16 + i3] = true; + } + } + + } + } + + method1785(entryInstance2); + int i4 = entryInstance2.anInt2215 + 2; + int j4 = method1790(3, entryInstance2); + int k4 = method1790(15, entryInstance2); + for (int i1 = 0; i1 < k4; i1++) { + int j3 = 0; + do { + byte byte3 = method1788(entryInstance2); + if (byte3 == 0) { + break; + } + j3++; + } while (true); + entryInstance2.aByteArray2214[i1] = (byte) j3; + } + + byte abyte0[] = new byte[6]; + for (byte byte16 = 0; byte16 < j4; byte16++) { + abyte0[byte16] = byte16; + } + + for (int j1 = 0; j1 < k4; j1++) { + byte byte17 = entryInstance2.aByteArray2214[j1]; + byte byte15 = abyte0[byte17]; + for (; byte17 > 0; byte17--) { + abyte0[byte17] = abyte0[byte17 - 1]; + } + + abyte0[0] = byte15; + entryInstance2.aByteArray2219[j1] = byte15; + } + + for (int k3 = 0; k3 < j4; k3++) { + int k6 = method1790(5, entryInstance2); + for (int k1 = 0; k1 < i4; k1++) { + do { + byte byte4 = method1788(entryInstance2); + if (byte4 == 0) { + break; + } + byte4 = method1788(entryInstance2); + if (byte4 == 0) { + k6++; + } else { + k6--; + } + } while (true); + entryInstance2.aByteArrayArray2229[k3][k1] = (byte) k6; + } + + } + + for (int l3 = 0; l3 < j4; l3++) { + byte byte8 = 32; + int i = 0; + for (int l1 = 0; l1 < i4; l1++) { + if (entryInstance2.aByteArrayArray2229[l3][l1] > i) { + i = entryInstance2.aByteArrayArray2229[l3][l1]; + } + if (entryInstance2.aByteArrayArray2229[l3][l1] < byte8) { + byte8 = entryInstance2.aByteArrayArray2229[l3][l1]; + } + } + + method1786(entryInstance2.anIntArrayArray2230[l3], + entryInstance2.anIntArrayArray2218[l3], + entryInstance2.anIntArrayArray2210[l3], + entryInstance2.aByteArrayArray2229[l3], byte8, i, i4); + entryInstance2.anIntArray2200[l3] = byte8; + } + + int l4 = entryInstance2.anInt2215 + 1; + int i5 = -1; + int j5 = 0; + for (int i2 = 0; i2 <= 255; i2++) { + entryInstance2.anIntArray2228[i2] = 0; + } + + int i9 = 4095; + for (int k8 = 15; k8 >= 0; k8--) { + for (int l8 = 15; l8 >= 0; l8--) { + entryInstance2.aByteArray2204[i9] = (byte) (k8 * 16 + l8); + i9--; + } + + entryInstance2.anIntArray2226[k8] = i9 + 1; + } + + int l5 = 0; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte12 = entryInstance2.aByteArray2219[i5]; + j8 = entryInstance2.anIntArray2200[byte12]; + ai = entryInstance2.anIntArrayArray2230[byte12]; + ai2 = entryInstance2.anIntArrayArray2210[byte12]; + ai1 = entryInstance2.anIntArrayArray2218[byte12]; + } + j5--; + int l6 = j8; + int k7; + byte byte9; + for (k7 = method1790(l6, entryInstance2); k7 > ai[l6]; k7 = k7 << 1 | byte9) { + l6++; + byte9 = method1788(entryInstance2); + } + + for (int k5 = ai2[k7 - ai1[l6]]; k5 != l4;) { + if (k5 == 0 || k5 == 1) { + int i6 = -1; + int j6 = 1; + do { + if (k5 == 0) { + i6 += j6; + } else if (k5 == 1) { + i6 += 2 * j6; + } + j6 *= 2; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte13 = entryInstance2.aByteArray2219[i5]; + j8 = entryInstance2.anIntArray2200[byte13]; + ai = entryInstance2.anIntArrayArray2230[byte13]; + ai2 = entryInstance2.anIntArrayArray2210[byte13]; + ai1 = entryInstance2.anIntArrayArray2218[byte13]; + } + j5--; + int i7 = j8; + int l7; + byte byte10; + for (l7 = method1790(i7, entryInstance2); l7 > ai[i7]; l7 = l7 << 1 + | byte10) { + i7++; + byte10 = method1788(entryInstance2); + } + + k5 = ai2[l7 - ai1[i7]]; + } while (k5 == 0 || k5 == 1); + i6++; + byte byte5 = entryInstance2.aByteArray2211[entryInstance2.aByteArray2204[entryInstance2.anIntArray2226[0]] & 0xff]; + entryInstance2.anIntArray2228[byte5 & 0xff] += i6; + for (; i6 > 0; i6--) { + anIntArray257[l5] = byte5 & 0xff; + l5++; + } + + } else { + int i11 = k5 - 1; + byte byte6; + if (i11 < 16) { + int i10 = entryInstance2.anIntArray2226[0]; + byte6 = entryInstance2.aByteArray2204[i10 + i11]; + for (; i11 > 3; i11 -= 4) { + int j11 = i10 + i11; + entryInstance2.aByteArray2204[j11] = entryInstance2.aByteArray2204[j11 - 1]; + entryInstance2.aByteArray2204[j11 - 1] = entryInstance2.aByteArray2204[j11 - 2]; + entryInstance2.aByteArray2204[j11 - 2] = entryInstance2.aByteArray2204[j11 - 3]; + entryInstance2.aByteArray2204[j11 - 3] = entryInstance2.aByteArray2204[j11 - 4]; + } + + for (; i11 > 0; i11--) { + entryInstance2.aByteArray2204[i10 + i11] = entryInstance2.aByteArray2204[(i10 + i11) - 1]; + } + + entryInstance2.aByteArray2204[i10] = byte6; + } else { + int k10 = i11 / 16; + int l10 = i11 % 16; + int j10 = entryInstance2.anIntArray2226[k10] + l10; + byte6 = entryInstance2.aByteArray2204[j10]; + for (; j10 > entryInstance2.anIntArray2226[k10]; j10--) { + entryInstance2.aByteArray2204[j10] = entryInstance2.aByteArray2204[j10 - 1]; + } + + entryInstance2.anIntArray2226[k10]++; + for (; k10 > 0; k10--) { + entryInstance2.anIntArray2226[k10]--; + entryInstance2.aByteArray2204[entryInstance2.anIntArray2226[k10]] = entryInstance2.aByteArray2204[(entryInstance2.anIntArray2226[k10 - 1] + 16) - 1]; + } + + entryInstance2.anIntArray2226[0]--; + entryInstance2.aByteArray2204[entryInstance2.anIntArray2226[0]] = byte6; + if (entryInstance2.anIntArray2226[0] == 0) { + int l9 = 4095; + for (int j9 = 15; j9 >= 0; j9--) { + for (int k9 = 15; k9 >= 0; k9--) { + entryInstance2.aByteArray2204[l9] = entryInstance2.aByteArray2204[entryInstance2.anIntArray2226[j9] + + k9]; + l9--; + } + + entryInstance2.anIntArray2226[j9] = l9 + 1; + } + + } + } + entryInstance2.anIntArray2228[entryInstance2.aByteArray2211[byte6 & 0xff] & 0xff]++; + anIntArray257[l5] = entryInstance2.aByteArray2211[byte6 & 0xff] & 0xff; + l5++; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte14 = entryInstance2.aByteArray2219[i5]; + j8 = entryInstance2.anIntArray2200[byte14]; + ai = entryInstance2.anIntArrayArray2230[byte14]; + ai2 = entryInstance2.anIntArrayArray2210[byte14]; + ai1 = entryInstance2.anIntArrayArray2218[byte14]; + } + j5--; + int j7 = j8; + int i8; + byte byte11; + for (i8 = method1790(j7, entryInstance2); i8 > ai[j7]; i8 = i8 << 1 + | byte11) { + j7++; + byte11 = method1788(entryInstance2); + } + + k5 = ai2[i8 - ai1[j7]]; + } + } + + entryInstance2.anInt2222 = 0; + entryInstance2.aByte2201 = 0; + entryInstance2.anIntArray2220[0] = 0; + for (int j2 = 1; j2 <= 256; j2++) { + entryInstance2.anIntArray2220[j2] = entryInstance2.anIntArray2228[j2 - 1]; + } + + for (int k2 = 1; k2 <= 256; k2++) { + entryInstance2.anIntArray2220[k2] += entryInstance2.anIntArray2220[k2 - 1]; + } + + for (int l2 = 0; l2 < l5; l2++) { + byte byte7 = (byte) (anIntArray257[l2] & 0xff); + anIntArray257[entryInstance2.anIntArray2220[byte7 & 0xff]] |= l2 << 8; + entryInstance2.anIntArray2220[byte7 & 0xff]++; + } + + entryInstance2.anInt2208 = anIntArray257[entryInstance2.anInt2223] >> 8; + entryInstance2.anInt2227 = 0; + entryInstance2.anInt2208 = anIntArray257[entryInstance2.anInt2208]; + entryInstance2.anInt2221 = (byte) (entryInstance2.anInt2208 & 0xff); + entryInstance2.anInt2208 >>= 8; + entryInstance2.anInt2227++; + entryInstance2.anInt2225 = l5; + method1787(entryInstance2); + if (entryInstance2.anInt2227 == entryInstance2.anInt2225 + 1 && entryInstance2.anInt2222 == 0) { + flag18 = true; + } else { + flag18 = false; + } + } + } + +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/ByteBufferUtils.java b/Tools/Arios Editor/src/org/arios/cache/misc/ByteBufferUtils.java new file mode 100644 index 000000000..6a653b590 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/ByteBufferUtils.java @@ -0,0 +1,153 @@ +package org.arios.cache.misc; + +import java.nio.ByteBuffer; + + +/** + * Holds utility methods for reading/writing a byte buffer. + * @author Emperor + * + */ +public final class ByteBufferUtils { + + /** + * Gets a string from the byte buffer. + * @param buffer The byte buffer. + * @return The string. + */ + public static String getString(ByteBuffer buffer) { + StringBuilder sb = new StringBuilder(); + byte b; + while ((b = buffer.get()) != 0) { + sb.append((char) b); + } + return sb.toString(); + } + + /** + * Puts a string on the byte buffer. + * @param s The string to put. + * @param buffer The byte buffer. + */ + public static void putString(String s, ByteBuffer buffer) { + buffer.put(s.getBytes()).put((byte) 0); + } + + /** + * Gets a string from the byte buffer. + * @param s The string. + * @param buffer The byte buffer. + * @return The string. + */ + public static ByteBuffer putGJ2String(String s, ByteBuffer buffer) { + byte[] packed = new byte[256]; + int length = packGJString2(0, packed, s); + return buffer.put((byte) 0).put(packed, 0, length).put((byte) 0); + } + + /** + * Decodes the XTEA encryption. + * @param keys The keys. + * @param start The start index. + * @param end The end index. + * @param buffer The byte buffer. + */ + public static void decodeXTEA(int[] keys, int start, int end, ByteBuffer buffer) { + int l = buffer.position(); + buffer.position(start); + int length = (end - start) / 8; + for (int i = 0; i < length; i++) { + int firstInt = buffer.getInt(); + int secondInt = buffer.getInt(); + int sum = 0xc6ef3720; + int delta = 0x9e3779b9; + for (int j = 32; j-- > 0;) { + secondInt -= keys[(sum & 0x1c84) >>> 11] + sum ^ (firstInt >>> 5 ^ firstInt << 4) + firstInt; + sum -= delta; + firstInt -= (secondInt >>> 5 ^ secondInt << 4) + secondInt ^ keys[sum & 3] + sum; + } + buffer.position(buffer.position() - 8); + buffer.putInt(firstInt); + buffer.putInt(secondInt); + } + buffer.position(l); + } + + /** + * Converts a String to an Integer? + * + * @param position + * The position. + * @param buffer + * The buffer used. + * @param string + * The String to convert. + * @return The Integer. + */ + public static int packGJString2(int position, byte[] buffer, String string) { + int length = string.length(); + int offset = position; + for (int i = 0; length > i; i++) { + int character = string.charAt(i); + if (character > 127) { + if (character > 2047) { + buffer[offset++] = (byte) ((character | 919275) >> 12); + buffer[offset++] = (byte) (128 | ((character >> 6) & 63)); + buffer[offset++] = (byte) (128 | (character & 63)); + } else { + buffer[offset++] = (byte) ((character | 12309) >> 6); + buffer[offset++] = (byte) (128 | (character & 63)); + } + } else + buffer[offset++] = (byte) character; + } + return offset - position; + } + + /** + * Gets a tri-byte from the buffer. + * @param buffer The buffer. + * @return The value. + */ + public static int getTriByte(ByteBuffer buffer) { + return ((buffer.get() & 0xFF) << 16) + ((buffer.get() & 0xFF) << 8) + (buffer.get() & 0xFF); + } + + /** + * Gets a smart from the buffer. + * @param buffer The buffer. + * @return The value. + */ + public static int getSmart(ByteBuffer buffer) { + int peek = buffer.get(); + if (peek <= Byte.MAX_VALUE) { + return peek & 0xFF; + } + return (peek << 8) | (buffer.get() & 0xFF) - 32768; + } + + /** + * Gets a smart from the buffer. + * @param buffer The buffer. + * @return The value. + */ + public static int getSmart0(ByteBuffer buffer) { + int value = 0; + int current = getSmart(buffer); + while (current == 32767) { + current = getSmart(buffer); + value += 32767; + } + value += current; + return value; + } + + /** + * Constructs a new {@code ByteBufferUtils} {@code Object}. + */ + private ByteBufferUtils() { + /* + * empty. + */ + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/Container.java b/Tools/Arios Editor/src/org/arios/cache/misc/Container.java new file mode 100644 index 000000000..92810bd92 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/Container.java @@ -0,0 +1,110 @@ +package org.arios.cache.misc; + +/** + * A container. + * @author Dragonkk + * @author Apache Ah64 + */ +public class Container { + + /** + * The version. + */ + private int version; + + /** + * The CRC. + */ + private int crc; + + /** + * The name hash. + */ + private int nameHash; + + /** + * If updated. + */ + private boolean updated; + + /** + * Construct a new container. + */ + public Container() { + nameHash = -1; + version = -1; + crc = -1; + } + + /** + * Set the version. + * @param version + */ + public void setVersion(int version) { + this.version = version; + } + + /** + * Update the version. + */ + public void updateVersion() { + version++; + updated = true; + } + + /** + * Get the version. + * @return The version. + */ + public int getVersion() { + return version; + } + + /** + * Get the next version. + * @return The next version. + */ + public int getNextVersion() { + return updated ? version : version+1; + } + + /** + * Set the CRC. + * @param crc The cRC. + */ + public void setCrc(int crc) { + this.crc = crc; + } + + /** + * Get the CRC. + * @return The CRC. + */ + public int getCrc() { + return crc; + } + + /** + * Set the name hash. + * @param nameHash The name hash. + */ + public void setNameHash(int nameHash) { + this.nameHash = nameHash; + } + + /** + * Get the name hash. + * @return The name hash. + */ + public int getNameHash() { + return nameHash; + } + + /** + * If is updated. + * @return If is updated. + */ + public boolean isUpdated() { + return updated; + } +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/ContainersInformation.java b/Tools/Arios Editor/src/org/arios/cache/misc/ContainersInformation.java new file mode 100644 index 000000000..7f98364cf --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/ContainersInformation.java @@ -0,0 +1,225 @@ +package org.arios.cache.misc; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.zip.CRC32; + +/** + * A class holding the containers information. + * @author Dragonkk + * + */ +public final class ContainersInformation { + + /** + * The information container. + */ + private Container informationContainer; + + /** + * The protocol. + */ + private int protocol; + + /** + * The revision. + */ + private int revision; + + /** + * The container indexes. + */ + private int[] containersIndexes; + + /** + * The containers. + */ + private FilesContainer[] containers; + + /** + * If files have to be named. + */ + private boolean filesNamed; + + /** + * If it has to be whirpool. + */ + private boolean whirpool; + + /** + * The data. + */ + private final byte[] data; + + /** + * Construct a new containers information. + * @param informationContainerPackedData The information container data packed. + */ + public ContainersInformation(byte[] informationContainerPackedData) { + this.data = Arrays.copyOf(informationContainerPackedData, informationContainerPackedData.length); + informationContainer = new Container(); + informationContainer.setVersion((informationContainerPackedData[informationContainerPackedData.length - 2] << 8 & 0xff00) + (informationContainerPackedData[-1 + informationContainerPackedData.length] & 0xff)); + CRC32 crc32 = new CRC32(); + crc32.update(informationContainerPackedData); + informationContainer.setCrc((int) crc32.getValue()); + decodeContainersInformation(unpackCacheContainer(informationContainerPackedData)); + } + + /** + * Unpacks a container. + * @param packedData The packed container data. + * @return The unpacked data. + */ + public static final byte[] unpackCacheContainer(byte[] packedData) { + ByteBuffer buffer = ByteBuffer.wrap(packedData); + int compression = buffer.get() & 0xFF; + int containerSize = buffer.getInt(); + if (containerSize < 0 || containerSize > 5000000) { + return null; + //throw new RuntimeException(); + } + if (compression == 0) { + byte unpacked[] = new byte[containerSize]; + buffer.get(unpacked, 0, containerSize); + return unpacked; + } + int decompressedSize = buffer.getInt(); + if (decompressedSize < 0 || decompressedSize > 20000000) { + return null; + //throw new RuntimeException(); + } + byte decompressedData[] = new byte[decompressedSize]; + if (compression == 1) { + BZip2Decompressor.decompress(decompressedData, packedData, containerSize, 9); + } else { + GZipDecompressor.decompress(buffer, decompressedData); + } + return decompressedData; + } + + /** + * Get the container indexes. + * @return The container indexes. + */ + public int[] getContainersIndexes() { + return containersIndexes; + } + + /** + * Get the containers. + * @return The containers. + */ + public FilesContainer[] getContainers() { + return containers; + } + + /** + * Get the information container. + * @return The information container. + */ + public Container getInformationContainer() { + return informationContainer; + } + + /** + * Get the revision. + * @return The revision. + */ + public int getRevision() { + return revision; + } + + /** + * Decode the containers information. + * @param data The data. + */ + public void decodeContainersInformation(byte[] data) { + ByteBuffer buffer = ByteBuffer.wrap(data); + protocol = buffer.get() & 0xFF; + if (protocol != 5 && protocol != 6) { + throw new RuntimeException(); + } + revision = protocol < 6 ? 0 : buffer.getInt(); + int nameHash = buffer.get() & 0xFF; + filesNamed = (0x1 & nameHash) != 0; + whirpool = (0x2 & nameHash) != 0; + containersIndexes = new int[buffer.getShort() & 0xFFFF]; + int lastIndex = -1; + for (int index = 0; index < containersIndexes.length; index++) { + containersIndexes[index] = (buffer.getShort() & 0xFFFF) + (index == 0 ? 0 : containersIndexes[index-1]); + if (containersIndexes[index] > lastIndex) { + lastIndex = containersIndexes[index]; + } + } + containers = new FilesContainer[lastIndex+1]; + for (int index = 0; index < containersIndexes.length; index++) { + containers[containersIndexes[index]] = new FilesContainer(); + } + if (filesNamed) { + for (int index = 0; index < containersIndexes.length; index++) { + containers[containersIndexes[index]].setNameHash(buffer.getInt()); + } + } + byte[][] filesHashes = null; + if (whirpool) { + filesHashes = new byte[containers.length][]; + for (int index = 0; index < containersIndexes.length; index++) { + filesHashes[containersIndexes[index]] = new byte[64]; + buffer.get(filesHashes[containersIndexes[index]], 0, 64); + } + } + for (int index = 0; index < containersIndexes.length; index++) { + containers[containersIndexes[index]].setCrc(buffer.getInt()); + } + for (int index = 0; index < containersIndexes.length; index++) { + containers[containersIndexes[index]].setVersion(buffer.getInt()); + } + for (int index = 0; index < containersIndexes.length; index++) { + containers[containersIndexes[index]].setFilesIndexes(new int[buffer.getShort() & 0xFFFF]); + } + for (int index = 0; index < containersIndexes.length; index++) { + int lastFileIndex = -1; + for (int fileIndex = 0; fileIndex < containers[containersIndexes[index]].getFilesIndexes().length; fileIndex++) { + containers[containersIndexes[index]].getFilesIndexes()[fileIndex] = (buffer.getShort() & 0xFFFF) + (fileIndex == 0 ? 0 : containers[containersIndexes[index]].getFilesIndexes()[fileIndex-1]); + if (containers[containersIndexes[index]].getFilesIndexes()[fileIndex] > lastFileIndex) { + lastFileIndex = containers[containersIndexes[index]].getFilesIndexes()[fileIndex]; + } + } + containers[containersIndexes[index]].setFiles(new Container[lastFileIndex+1]); + for (int fileIndex = 0; fileIndex < containers[containersIndexes[index]].getFilesIndexes().length; fileIndex++) { + containers[containersIndexes[index]].getFiles()[containers[containersIndexes[index]].getFilesIndexes()[fileIndex]] = new Container(); + } + } + if (whirpool) { + for (int index = 0; index < containersIndexes.length; index++) { + for (int fileIndex = 0; fileIndex < containers[containersIndexes[index]].getFilesIndexes().length; fileIndex++) { + containers[containersIndexes[index]].getFiles()[containers[containersIndexes[index]].getFilesIndexes()[fileIndex]].setVersion(filesHashes[containersIndexes[index]][containers[containersIndexes[index]].getFilesIndexes()[fileIndex]]); + } + } + } + if (filesNamed) { + for (int index = 0; index < containersIndexes.length; index++) { + for (int fileIndex = 0; fileIndex < containers[containersIndexes[index]].getFilesIndexes().length; fileIndex++) { + containers[containersIndexes[index]].getFiles()[containers[containersIndexes[index]].getFilesIndexes()[fileIndex]].setNameHash(buffer.getInt()); + } + } + } + } + + /** + * If is whirpool. + * @return If is whirpool {@code true}. + */ + public boolean isWhirpool() { + return whirpool; + } + + /** + * Gets the data. + * @return The data. + */ + public byte[] getData() { + return data; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/DefinitionSize.java b/Tools/Arios Editor/src/org/arios/cache/misc/DefinitionSize.java new file mode 100644 index 000000000..11bc54367 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/DefinitionSize.java @@ -0,0 +1,77 @@ +package org.arios.cache.misc; + +import org.arios.cache.Cache; +import org.arios.cache.def.impl.AnimationDefinition; +import org.arios.cache.def.impl.GraphicDefinition; +import org.arios.cache.def.impl.NPCDefinition; +import org.arios.cache.def.impl.ObjectDefinition; + +/** + * Represents a class that stores the size of mutliple definition type sizes. + * @author 'Vexia + * + */ +public final class DefinitionSize { + + /** + * Method used to return the component size of the interface. + * @param interfaceId the interface. + * @return the value. + */ + public static final int getInterfaceDefinitionsComponentsSize(int interfaceId) { + return Cache.getIndexes()[3].getFilesSize(interfaceId); + } + + /** + * Method used to return the max size of the interface definitions. + * @return the size. + */ + public static final int getInterfaceDefinitionsSize() { + return Cache.getIndexes()[3].getContainersSize(); + } + + /** + * Method used to return the item definition size. + * @return the size. + */ + public static final int getItemDefinitionsSize() { + int lastContainerId = Cache.getIndexes()[19].getContainersSize() - 1; + return lastContainerId* 256+ Cache.getIndexes()[19].getFilesSize(lastContainerId); + } + + /** + * Method used to return the {@link NPCDefinition} size. + * @return the size. + */ + public static final int getNPCDefinitionsSize() { + int lastContainerId = Cache.getIndexes()[18].getContainersSize() - 1; + return lastContainerId* 128+ Cache.getIndexes()[18].getFilesSize(lastContainerId); + } + + /** + * Method used to return the {@link ObjectDefinition} size. + * @return the size. + */ + public static final int getObjectDefinitionsSize() { + int lastContainerId = Cache.getIndexes()[16].getContainersSize() - 1; + return lastContainerId* 256+ Cache.getIndexes()[16] .getFilesSize(lastContainerId); + } + + /** + * Method used to return the {@link AnimationDefinition} size. + * @return the size. + */ + public static final int getAnimationDefinitionsSize() { + int lastContainerId = Cache.getIndexes()[20].getContainersSize() - 1; + return lastContainerId* 128+ Cache.getIndexes()[20].getFilesSize(lastContainerId); + } + + /** + * Method used to return the {@link GraphicDefinition} size. + * @return the size. + */ + public static final int getGraphicDefinitionsSize() { + int lastContainerId = Cache.getIndexes()[21].getContainersSize() - 1; + return lastContainerId* 256+ Cache.getIndexes()[21].getFilesSize(lastContainerId); + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/FilesContainer.java b/Tools/Arios Editor/src/org/arios/cache/misc/FilesContainer.java new file mode 100644 index 000000000..dda4391c7 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/FilesContainer.java @@ -0,0 +1,57 @@ +package org.arios.cache.misc; +/** + * A class holding the file containers. + * @author Dragonkk + * @author Apache Ah64 + */ +public final class FilesContainer extends Container { + + /** + * The file indexes. + */ + private int[] filesIndexes; + + /** + * The files. + */ + private Container[] files; + + /** + * Construct a new files container. + */ + public FilesContainer() { + + } + + /** + * Set the files. + * @param containers The files. + */ + public void setFiles(Container[] containers) { + this.files = containers; + } + + /** + * Get the files. + * @return The files. + */ + public Container[] getFiles() { + return files; + } + + /** + * Set the file indexes. + * @param containersIndexes The file indexes. + */ + public void setFilesIndexes(int[] containersIndexes) { + this.filesIndexes = containersIndexes; + } + + /** + * Get the file indexes. + * @return The file indexes. + */ + public int[] getFilesIndexes() { + return filesIndexes; + } +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/GZipCompressor.java b/Tools/Arios Editor/src/org/arios/cache/misc/GZipCompressor.java new file mode 100644 index 000000000..f84b304f6 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/GZipCompressor.java @@ -0,0 +1,23 @@ +package org.arios.cache.misc; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.zip.GZIPOutputStream; + + +public class GZipCompressor { + + public static final byte[] compress(byte[] data) { + ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream(); + try { + GZIPOutputStream out = new GZIPOutputStream(compressedBytes); + out.write(data); + out.finish(); + out.close(); + return compressedBytes.toByteArray(); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/GZipDecompressor.java b/Tools/Arios Editor/src/org/arios/cache/misc/GZipDecompressor.java new file mode 100644 index 000000000..60a9705b4 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/GZipDecompressor.java @@ -0,0 +1,47 @@ +package org.arios.cache.misc; + +import java.nio.ByteBuffer; +import java.util.zip.Inflater; + +public class GZipDecompressor { + + private static final Inflater inflaterInstance = new Inflater(true); + + public static final void decompress(ByteBuffer buffer, byte data[]) { + synchronized(inflaterInstance) { + if (~buffer.get(buffer.position()) != -32 || buffer.get(buffer.position() + 1) != -117) { + data = null; + // throw new RuntimeException("Invalid GZIP header!"); + } + try { + inflaterInstance.setInput(buffer.array(), buffer.position() + 10, -buffer.position() - 18 + buffer.limit()); + inflaterInstance.inflate(data); + } catch (Exception e) { + //inflaterInstance.reset(); + data = null; + // throw new RuntimeException("Invalid GZIP compressed data!"); + } + inflaterInstance.reset(); + } + } + + public static final boolean decompress(byte[] compressed, byte data[], int offset, int length) { + synchronized(inflaterInstance) { + if (data[offset] != 31 || data[offset + 1] != -117) + return false; + //throw new RuntimeException("Invalid GZIP header!"); + try { + inflaterInstance.setInput(data, offset + 10, -offset - 18 + length); + inflaterInstance.inflate(compressed); + } catch (Exception e) { + inflaterInstance.reset(); + e.printStackTrace(); + return false; + //throw new RuntimeException("Invalid GZIP compressed data!"); + } + inflaterInstance.reset(); + return true; + } + } + +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/LandscapeCache.java b/Tools/Arios Editor/src/org/arios/cache/misc/LandscapeCache.java new file mode 100644 index 000000000..e46b8da64 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/LandscapeCache.java @@ -0,0 +1,221 @@ +package org.arios.cache.misc; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.channels.FileChannel.MapMode; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.Cache; + +/** + * Holds the map cache. + * + * @author Emperor + * + */ +public final class LandscapeCache { + + /** + * The map indices buffer. + */ + private static ByteBuffer mapIndices; + + /** + * The landscapes; + */ + private static final Map landscapes = new HashMap<>(); + + /** + * The amount of indexes. + */ + private static int indexes; + + /** + * The cache length. + */ + private static int cacheLength; + + /** + * The indexes list. + */ + private static int[] indices = null; + + /** + * The path. + */ + private static String path; + + /** + * Initializes the landscape cache stuff. + * + * @param path + * The cache path. + * @throws Throwable + * When an exception occurs. + */ + public static void init(String path) throws Throwable { + LandscapeCache.path = path; + try (RandomAccessFile raf = new RandomAccessFile(path + "/idx_reference.dat", "r"); + FileChannel channel = raf.getChannel()) { + mapIndices = channel.map(MapMode.READ_ONLY, 0, channel.size()); + raf.close(); + channel.close(); + } + cacheLength = (int) new File(path + "/map_cache_file.idx0").length(); + ByteBuffer buffer = mapIndices.duplicate(); + indexes = buffer.getShort() & 0xFFFF; + indices = new int[indexes]; + for (int i = 0; i < indexes; i++) { + indices[i] = buffer.getInt(); + } + } + + /** + * Gets the buffer for the grabbing of a map index. + * + * @param index + * The map index. + * @return The byte buffer. + * @throws IOException + * When an I/O exception occurs. + */ + public static ByteBuffer getGrabMapIndex(int index) throws IOException { + if (!landscapes.containsKey(index)) { + try (RandomAccessFile raf = new RandomAccessFile(path + "/map_cache_file.idx0", "r"); + FileChannel channel = raf.getChannel()) { + int size = (int) ((index >= indexes - 1 ? channel.size() : indices[index + 1]) - indices[index]); + ByteBuffer buffer = channel.map(MapMode.READ_ONLY, indices[index], size); + raf.close(); + channel.close(); + int length = (index >= indexes - 1 ? buffer.remaining() : (indices[index + 1] - indices[index])) - 2; + if (length < 1) { + buffer = ByteBuffer.allocate(0); + } + ByteBuffer data = ByteBuffer.allocate(buffer.remaining() + 5); + data.put((byte) 240) // opcode + .putShort((short) index).putShort((short) buffer.remaining()); + data.put(buffer); + landscapes.put(index, data); + } + } + return landscapes.get(index); + } + + /** + * Gets the buffer for a map index. + * + * @param index + * The map index. + * @return The byte buffer. + * @throws IOException + * When an I/O exception occurs. + */ + public static ByteBuffer getMapIndex(int index) throws IOException { + try (RandomAccessFile raf = new RandomAccessFile(path + "/map_cache_file.idx0", "r"); + FileChannel channel = raf.getChannel()) { + int size = (int) ((index >= indexes - 1 ? channel.size() : indices[index + 1]) - indices[index]); + ByteBuffer buffer = channel.map(MapMode.READ_ONLY, indices[index], size); + raf.close(); + channel.close(); + int length = (index >= indexes - 1 ? buffer.remaining() : (indices[index + 1] - indices[index])) - 2; + if (length < 1) { + buffer = ByteBuffer.allocate(0); + } + return buffer; + } + } + + /** + * Gets the landscape byte buffer. + * + * @param regionId + * The region id. + * @return The landscape buffer. + */ + public static byte[] getLandscape(int regionId) { + int index = LandscapeCache.indexFor(regionId); + return forId(index); + } + + /** + * Gets the maps for the given id. + * + * @param id + * The id. + * @return The map data. + */ + public static byte[] forId(int id) { + if (id < 0) { + return new byte[0]; + } + try (RandomAccessFile raf = new RandomAccessFile(path + "/map_cache_file.idx0", "r"); + FileChannel channel = raf.getChannel()) { + int size = (int) ((id >= indexes - 1 ? channel.size() : indices[id + 1]) - indices[id]); + if (size < 0) { + System.out.println("Index " + id + " has invalid size!"); + raf.close(); + channel.close(); + return new byte[0]; + } + MappedByteBuffer buffer = channel.map(MapMode.READ_ONLY, indices[id], size); + raf.close(); + channel.close(); + int length = size - 2;//(id >= indexes - 1 ? buffer.remaining() : (indices[id + 1] - indices[id])) - 2; + if (length < 1) { + return new byte[0]; + } + int decompressedLength = buffer.getShort() & 0xFFFF; + byte[] b = new byte[length]; + buffer.get(b); + byte[] data = new byte[decompressedLength]; + try { + GZipDecompressor.decompress(data, b, 0, b.length); + } catch (Throwable t) { + System.err.println("Failed to decompress idx " + id + "!"); + return new byte[0]; + } + return data; + } catch (IOException e) { + e.printStackTrace(); + } + return new byte[0]; + } + + /** + * Gets the index for the region id. + * + * @param regionId + * The region id. + * @return The index. + */ + public static int indexFor(int regionId) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + return Cache.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + } + + /** + * Gets the reference table buffer. + * + * @return The reference table buffer. + */ + public static ByteBuffer getReferenceTable() { + ByteBuffer buffer = ByteBuffer.allocate(mapIndices.remaining() + 10); + return buffer.put((byte) 251).putInt(LandscapeCache.getMapIndices().remaining()).putInt(cacheLength).put(LandscapeCache.getMapIndices().duplicate()); + } + + /** + * Gets the mapIndices. + * + * @return The mapIndices. + */ + public static ByteBuffer getMapIndices() { + return mapIndices; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/Stream.java b/Tools/Arios Editor/src/org/arios/cache/misc/Stream.java new file mode 100644 index 000000000..30b14cef2 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/Stream.java @@ -0,0 +1,70 @@ +package org.arios.cache.misc; + +public abstract class Stream { + + protected int offset; + protected int length; + protected byte[] buffer; + protected int bitPosition; + + public int getLength() { + return length; + } + + public byte[] getBuffer() { + return buffer; + } + + public int getOffset() { + return offset; + } + + public final void decodeXTEA(int keys[], int start, int end) { + int l = offset; + offset = start; + int i1 = (end - start) / 8; + for (int j1 = 0; j1 < i1; j1++) { + int k1 = readInt(); + int l1 = readInt(); + int sum = 0xc6ef3720; + int delta = 0x9e3779b9; + for (int k2 = 32; k2-- > 0;) { + l1 -= keys[(sum & 0x1c84) >>> 11] + sum ^ (k1 >>> 5 ^ k1 << 4) + + k1; + sum -= delta; + k1 -= (l1 >>> 5 ^ l1 << 4) + l1 ^ keys[sum & 3] + sum; + } + + offset -= 8; + writeInt(k1); + writeInt(l1); + } + offset = l; + } + + private final int readInt() { + offset += 4; + return ((0xff & buffer[-3 + offset]) << 16) + + ((((0xff & buffer[-4 + offset]) << 24) + ((buffer[-2 + + offset] & 0xff) << 8)) + (buffer[-1 + offset] & 0xff)); + } + + + public void readBytes(byte abyte0[], int i, int j) { + for(int k = j; k < j + i; k++){ + abyte0[k] = buffer[offset++]; + } + } + private final void writeInt(int value) { + buffer[offset++] = (byte) (value >> 24); + buffer[offset++] = (byte) (value >> 16); + buffer[offset++] = (byte) (value >> 8); + buffer[offset++] = (byte) value; + } + + public final void getBytes(byte data[], int off, int len) { + for (int k = off; k < len + off; k++) { + data[k] = buffer[offset++]; + } + } +} diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/StringUtils.java b/Tools/Arios Editor/src/org/arios/cache/misc/StringUtils.java new file mode 100644 index 000000000..346f2fdf1 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/StringUtils.java @@ -0,0 +1,540 @@ +package org.arios.cache.misc; + +import java.text.DecimalFormat; + +/** + * The string utils. + * @author Emperor + * + */ +public final class StringUtils { + /** + * Constructs a new {@code StringUtils.java} {@code Object}. + */ + public StringUtils() { + /** + * empty. + */ + } + /** + * Method used to get the formatted number as a string from the integer inputed. + * @param amount the ammount. + * @return the string value. + */ + public static String getFormattedNumber(int amount) { + return new DecimalFormat("#,###,##0").format(amount).toString(); + } + /** + * Checks if the string contains an invalid character. + * @param nameThe string. + * @return {@code True} if so. + */ + public static boolean containsInvalidCharacter(String name) { + for (char c : name.toCharArray()) { + boolean pass = false; + for (char vc : VALID_CHARS) + if (vc == c) { + pass = true; + break; + } + if (!pass) + return true; + } + return false; + } + /** + * If a word starts with a e i o u h for grammar = a + n. + * @param word The word. + * @return If the a should have +n {@code true}. + */ + public static boolean isPlusN(String word) { + if (word == null) + return false; + String s = word.toLowerCase(); + return s.charAt(0) == 'a' || s.charAt(0) == 'e' || s.charAt(0) == 'i' + || s.charAt(0) == 'o' || s.charAt(0) == 'u' + || (s.charAt(0) == 'h' && s.length() > 1 && s.charAt(1) != 'e'); + } + /** + * Method used to get the player name as a long. + * @param s the string. + * @return the long. + */ + public static long getPlayerNameLong(String s) { + long l = 0L; + for(int i = 0; i < s.length() && i < 12; i++) { + char c = s.charAt(i); + l *= 37L; + if(c >= 'A' && c <= 'Z') l += (1 + c) - 65; + else if(c >= 'a' && c <= 'z') l += (1 + c) - 97; + else if(c >= '0' && c <= '9') l += (27 + c) - 48; + } + while(l % 37L == 0L && l != 0L) l /= 37L; + return l; + } + + /** + * Method used to convert the string to a long. + * @param s the string. + * @return the long. + */ + public static long convertStringToLong(String s) { + if (s.length() > 20) { + throw new IllegalArgumentException("String is too long: " + s); + } + long out = 0L; + for (int i = 0; i < s.length(); ++i) { + long m = reducedMapping(s.codePointAt(i)); + if (m == -1) { + throw new IllegalArgumentException("Unmapped Character in String: " + s); + } + m <<= ((9 - i) * 6) + 4; + out |= m; + } + return out; + } + + /** + * Formats the string as display name. + * @param name The string to format. + * @return The formatted name. + */ + public static String formatDisplayName(String name) { + name = name.replaceAll("_", " "); + name = name.toLowerCase(); + StringBuilder newName = new StringBuilder(); + boolean wasSpace = true; + for (int i = 0; i < name.length(); i++) { + if (wasSpace) { + newName.append((new String() + name.charAt(i)).toUpperCase()); + wasSpace = false; + } else { + newName.append(name.charAt(i)); + } + if (name.charAt(i) == ' ') { + wasSpace = true; + } + } + return newName.toString(); + } + /** + * Gets the byte for the character. + * @param c The character. + * @return The byte. + */ + private static final byte getByte(char c) { + byte charByte; + if (c > 0 && c < '\200' || c >= '\240' && c <= '\377') + charByte = (byte) c; + else if (c != '\u20AC') { + if (c != '\u201A') { + if (c != '\u0192') { + if (c == '\u201E') + charByte = -124; + else if (c != '\u2026') { + if (c != '\u2020') { + if (c == '\u2021') + charByte = -121; + else if (c == '\u02C6') + charByte = -120; + else if (c == '\u2030') + charByte = -119; + else if (c == '\u0160') + charByte = -118; + else if (c == '\u2039') + charByte = -117; + else if (c == '\u0152') + charByte = -116; + else if (c != '\u017D') { + if (c == '\u2018') + charByte = -111; + else if (c != '\u2019') { + if (c != '\u201C') { + if (c == '\u201D') + charByte = -108; + else if (c != '\u2022') { + if (c == '\u2013') + charByte = -106; + else if (c == '\u2014') + charByte = -105; + else if (c == '\u02DC') + charByte = -104; + else if (c == '\u2122') + charByte = -103; + else if (c != '\u0161') { + if (c == '\u203A') + charByte = -101; + else if (c != '\u0153') { + if (c == '\u017E') + charByte = -98; + else if (c != '\u0178') + charByte = 63; + else + charByte = -97; + } else + charByte = -100; + } else + charByte = -102; + } else + charByte = -107; + } else + charByte = -109; + } else + charByte = -110; + } else + charByte = -114; + } else + charByte = -122; + } else + charByte = -123; + } else + charByte = -125; + } else + charByte = -126; + } else + charByte = -128; + return charByte; + } + /** + * Gets the double value of this string + * @param s The string. + * @return The double value. + */ + public static double getDouble(String s) { + s = s.replaceAll(", ", "").replaceAll(",", ""); + StringBuilder sb = new StringBuilder(); + char c; + boolean foundStart = false; + for (int i = 0; i < s.length(); i++) { + c = s.charAt(i); + if (Character.isDigit(c) || c == '-' || c == '.') { + sb.append(c); + foundStart = true; + } else if (foundStart) + break; + } + try { + double amount = Double.parseDouble(sb.toString()); + return amount; + } catch (NumberFormatException e) { + return 0.0; + } + } + /** + * Gets the hash for the string. + * @param str The string. + * @return The hash. + */ + public static final int getNameHash(String str) { + str = str.toLowerCase(); + int hash = 0; + for (int index = 0; index < str.length(); index++) + hash = getByte(str.charAt(index)) + ((hash << 5) - hash); + return hash; + } + /** + * Gets the string value of this string (all html/... removed). + * @param s The string. + * @return The string value. + */ + public static String getString(String s) { + String string = s.replaceAll("\\<.*?>", "").replaceAll(" ", "") + .replaceAll("Discontinued Item:", ""); + return string; + } + /** + * Characters used to convert a String to a Long. + */ + public static char[] validChars = { + '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', + 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's', + 't', 'u', 'v', 'w', 'x', 'y', 'z', '0', '1', '2', + '3', '4', '5', '6', '7', '8', '9' + }; + public static int[] anIntArray233 = { + 0, 1024, 2048, 3072, 4096, 5120, + 6144, 8192, 9216, 12288, 10240, 11264, 16384, 18432, 17408, 20480, + 21504, 22528, 23552, 24576, 25600, 26624, 27648, 28672, 29696, + 30720, 31744, 32768, 33792, 34816, 35840, 36864, 536870912, + 16777216, 37888, 65536, 38912, 131072, 196608, 33554432, 524288, + 1048576, 1572864, 262144, 67108864, 4194304, 134217728, 327680, + 8388608, 2097152, 12582912, 13631488, 14680064, 15728640, + 100663296, 101187584, 101711872, 101974016, 102760448, 102236160, + 40960, 393216, 229376, 117440512, 104857600, 109051904, 201326592, + 205520896, 209715200, 213909504, 106954752, 218103808, 226492416, + 234881024, 222298112, 224395264, 268435456, 272629760, 276824064, + 285212672, 289406976, 223346688, 293601280, 301989888, 318767104, + 297795584, 298844160, 310378496, 102498304, 335544320, 299892736, + 300941312, 301006848, 300974080, 39936, 301465600, 49152, + 1073741824, 369098752, 402653184, 1342177280, 1610612736, + 469762048, 1476395008, -2147483648, -1879048192, 352321536, + 1543503872, -2013265920, -1610612736, -1342177280, -1073741824, + -1543503872, 356515840, -1476395008, -805306368, -536870912, + -268435456, 1577058304, -134217728, 360710144, -67108864, + 364904448, 51200, 57344, 52224, 301203456, 53248, 54272, 55296, + 56320, 301072384, 301073408, 301074432, 301075456, 301076480, + 301077504, 301078528, 301079552, 301080576, 301081600, 301082624, + 301083648, 301084672, 301085696, 301086720, 301087744, 301088768, + 301089792, 301090816, 301091840, 301092864, 301093888, 301094912, + 301095936, 301096960, 301097984, 301099008, 301100032, 301101056, + 301102080, 301103104, 301104128, 301105152, 301106176, 301107200, + 301108224, 301109248, 301110272, 301111296, 301112320, 301113344, + 301114368, 301115392, 301116416, 301117440, 301118464, 301119488, + 301120512, 301121536, 301122560, 301123584, 301124608, 301125632, + 301126656, 301127680, 301128704, 301129728, 301130752, 301131776, + 301132800, 301133824, 301134848, 301135872, 301136896, 301137920, + 301138944, 301139968, 301140992, 301142016, 301143040, 301144064, + 301145088, 301146112, 301147136, 301148160, 301149184, 301150208, + 301151232, 301152256, 301153280, 301154304, 301155328, 301156352, + 301157376, 301158400, 301159424, 301160448, 301161472, 301162496, + 301163520, 301164544, 301165568, 301166592, 301167616, 301168640, + 301169664, 301170688, 301171712, 301172736, 301173760, 301174784, + 301175808, 301176832, 301177856, 301178880, 301179904, 301180928, + 301181952, 301182976, 301184000, 301185024, 301186048, 301187072, + 301188096, 301189120, 301190144, 301191168, 301193216, 301195264, + 301194240, 301197312, 301198336, 301199360, 301201408, 301202432 + }; + public static byte[] aByteArray235 = { + 22, 22, 22, 22, 22, 22, 21, 22, 22, + 20, 22, 22, 22, 21, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 3, 8, 22, 16, 22, 16, 17, 7, 13, 13, 13, + 16, 7, 10, 6, 16, 10, 11, 12, 12, 12, 12, 13, 13, 14, 14, 11, 14, + 19, 15, 17, 8, 11, 9, 10, 10, 10, 10, 11, 10, 9, 7, 12, 11, 10, 10, + 9, 10, 10, 12, 10, 9, 8, 12, 12, 9, 14, 8, 12, 17, 16, 17, 22, 13, + 21, 4, 7, 6, 5, 3, 6, 6, 5, 4, 10, 7, 5, 6, 4, 4, 6, 10, 5, 4, 4, + 5, 7, 6, 10, 6, 10, 22, 19, 22, 14, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, + 22, 22, 22, 22, 22, 22, 22, 22, 22, 22, 21, 22, 21, 22, 22, 22, 21, + 22, 22 + }; + /** + * Encrypt a string for the client. + */ + public static int encryptPlayerChat(byte[] is, int i_25_, int i_26_, int i_27_, byte[] is_28_) { + try { + i_27_ += i_25_; + int i_29_ = 0; + int i_30_ = i_26_ << -2116795453; + for (; i_27_ > i_25_; i_25_++) { + int i_31_ = 0xff & is_28_[i_25_]; + int i_32_ = anIntArray233[i_31_]; + int i_33_ = aByteArray235[i_31_]; + int i_34_ = i_30_ >> -1445887805; + int i_35_ = i_30_ & 0x7; + i_29_ &= (-i_35_ >> 473515839); + i_30_ += i_33_; + int i_36_ = ((-1 + (i_35_ - -i_33_)) >> -1430991229) + i_34_; + i_35_ += 24; + is[i_34_] = (byte) (i_29_ = (i_29_ | (i_32_ >>> i_35_))); + if ((i_36_ ^ 0xffffffff) < (i_34_ ^ 0xffffffff)) { + i_34_++; + i_35_ -= 8; + is[i_34_] = (byte) (i_29_ = i_32_ >>> i_35_); + if (i_36_ > i_34_) { + i_34_++; + i_35_ -= 8; + is[i_34_] = (byte) (i_29_ = i_32_ >>> i_35_); + if (i_36_ > i_34_) { + i_35_ -= 8; + i_34_++; + is[i_34_] = (byte) (i_29_ = i_32_ >>> i_35_); + if (i_34_ < i_36_) { + i_35_ -= 8; + i_34_++; + is[i_34_] = (byte) (i_29_ = i_32_ << -i_35_); + } + } + } + } + } + return -i_26_ + ((7 + i_30_) >> -662855293); + } catch (RuntimeException runtimeexception) { + } + return 0; + } + /** + * Gets an integer value from a string. + * @param s the string. + * @return The value; + */ + public static int getValue(String s) { + s = s.replaceAll(", ", "").replaceAll(",", ""); + StringBuilder sb = new StringBuilder(); + char c; + boolean foundStart = false; + for (int i = 0; i < s.length(); i++) { + c = s.charAt(i); + if (Character.isDigit(c) || c == '-') { + sb.append(c); + foundStart = true; + } else if (foundStart) + break; + } + try { + int amount = Integer.parseInt(sb.toString()); + return amount; + } catch (NumberFormatException e) { + return 0; + } + } + /** + * Checks if the account name is valid. + * @param name The account name. + * @return {@code True} if the account name is invalid. + */ + public static boolean invalidAccountName(String name) { + return name.length() < 2 || name.length() > 12 || name.startsWith("_")|| name.endsWith("_") || name.contains("__") || containsInvalidCharacter(name); + } + + /** + * Converts a long to a string. + * @param l The long. + * @return The string. + */ + public static String longToString(long l) { + int i = 0; + char ac[] = new char[32]; + while (l != 0L) { + long l1 = l; + l /= 37L; + ac[11 - i++] = VALID_CHARS[(int) (l1 - l * 37L)]; + } + return new String(ac, 12 - i, i); + } + + /** + * Packs a GJ2-String. + * @param position The position to start. + * @param buffer The byte-array. + * @param str The string. + * @return The size of the string. + */ + public static final int packGJString2(int position, byte[] buffer, + String str) { + int length = str.length(); + int offset = position; + for (int index = 0; length > index; index++) { + int character = str.charAt(index); + if (character > 127) { + if (character > 2047) { + buffer[offset++] = (byte) ((character | 919275) >> 12); + buffer[offset++] = (byte) (128 | ((character >> 6) & 63)); + buffer[offset++] = (byte) (128 | (character & 63)); + } else { + buffer[offset++] = (byte) ((character | 12309) >> 6); + buffer[offset++] = (byte) (128 | (character & 63)); + } + } else + buffer[offset++] = (byte) character; + } + return offset - position; + } + /** + * Method used to... + * @param x idk, + * @return the idk. + */ + public static long reducedMapping(int x) { + long out = -1; + if (x >= 97 && x <= 122) + out = x - 96; + else if (x >= 65 && x <= 90) + out = x - 37; + else if (x >= 48 && x <= 57) + out = x - +5; + else if (x == 32) + out = 63L; + return out; + } + /** + * Converts a string to a long. + * @param s The string. + * @return The long. + */ + public static long stringToLong(String s) { + long l = 0L; + for (int i = 0; i < s.length() && i < 12; i++) { + char c = s.charAt(i); + l *= 37L; + if (c >= 'A' && c <= 'Z') + l += (1 + c) - 65; + else if (c >= 'a' && c <= 'z') + l += (1 + c) - 97; + else if (c >= '0' && c <= '9') + l += (27 + c) - 48; + } + while (l % 37L == 0L && l != 0L) + l /= 37L; + return l; + } + + + /** + * The valid characters to be used in names/messages/... + */ + public static final char[] VALID_CHARS = { + '_', 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', + 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', + 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9' + }; + /** + * Character mapping. + */ + public static char[] mapping = { + '\n', + 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', + 'n', 'o', 'p', 'q', 'r', 's', 't', 'u', 'v', 'w', 'x', 'y', 'z', + 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', + 'N', 'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', + '0', '1', '2', '3', '4', '5', '6', '7', '8', '9', ' ' }; + /** + * The an int array241. + */ + public static int[] anIntArray241 = { 215, 203, 83, 158, 104, 101, 93, 84, + 107, 103, 109, 95, 94, 98, 89, 86, 70, 41, 32, 27, 24, 23, -1, -2, + 26, -3, -4, 31, 30, -5, -6, -7, 37, 38, 36, -8, -9, -10, 40, -11, + -12, 55, 48, 46, 47, -13, -14, -15, 52, 51, -16, -17, 54, -18, -19, + 63, 60, 59, -20, -21, 62, -22, -23, 67, 66, -24, -25, 69, -26, -27, + 199, 132, 80, 77, 76, -28, -29, 79, -30, -31, 87, 85, -32, -33, + -34, -35, -36, 197, -37, 91, -38, 134, -39, -40, -41, 97, -42, -43, + 133, 106, -44, 117, -45, -46, 139, -47, -48, 110, -49, -50, 114, + 113, -51, -52, 116, -53, -54, 135, 138, 136, 129, 125, 124, -55, + -56, 130, 128, -57, -58, -59, 183, -60, -61, -62, -63, -64, 148, + -65, -66, 153, 149, 145, 144, -67, -68, 147, -69, -70, -71, 152, + 154, -72, -73, -74, 157, 171, -75, -76, 207, 184, 174, 167, 166, + 165, -77, -78, -79, 172, 170, -80, -81, -82, 178, -83, 177, 182, + -84, -85, 187, 181, -86, -87, -88, -89, 206, 221, -90, 189, -91, + 198, 254, 262, 195, 196, -92, -93, -94, -95, -96, 252, 255, 250, + -97, 211, 209, -98, -99, 212, -100, 213, -101, -102, -103, 224, + -104, 232, 227, 220, 226, -105, -106, 246, 236, -107, 243, -108, + -109, 231, 237, 235, -110, -111, 239, 238, -112, -113, -114, -115, + -116, 241, -117, 244, -118, -119, 248, -120, 249, -121, -122, -123, + 253, -124, -125, -126, -127, 259, 258, -128, -129, 261, -130, -131, + 390, 327, 296, 281, 274, 271, 270, -132, -133, 273, -134, -135, + 278, 277, -136, -137, 280, -138, -139, 289, 286, 285, -140, -141, + 288, -142, -143, 293, 292, -144, -145, 295, -146, -147, 312, 305, + 302, 301, -148, -149, 304, -150, -151, 309, 308, -152, -153, 311, + -154, -155, 320, 317, 316, -156, -157, 319, -158, -159, 324, 323, + -160, -161, 326, -162, -163, 359, 344, 337, 334, 333, -164, -165, + 336, -166, -167, 341, 340, -168, -169, 343, -170, -171, 352, 349, + 348, -172, -173, 351, -174, -175, 356, 355, -176, -177, 358, -178, + -179, 375, 368, 365, 364, -180, -181, 367, -182, -183, 372, 371, + -184, -185, 374, -186, -187, 383, 380, 379, -188, -189, 382, -190, + -191, 387, 386, -192, -193, 389, -194, -195, 454, 423, 408, 401, + 398, 397, -196, -197, 400, -198, -199, 405, 404, -200, -201, 407, + -202, -203, 416, 413, 412, -204, -205, 415, -206, -207, 420, 419, + -208, -209, 422, -210, -211, 439, 432, 429, 428, -212, -213, 431, + -214, -215, 436, 435, -216, -217, 438, -218, -219, 447, 444, 443, + -220, -221, 446, -222, -223, 451, 450, -224, -225, 453, -226, -227, + 486, 471, 464, 461, 460, -228, -229, 463, -230, -231, 468, 467, + -232, -233, 470, -234, -235, 479, 476, 475, -236, -237, 478, -238, + -239, 483, 482, -240, -241, 485, -242, -243, 499, 495, 492, 491, + -244, -245, 494, -246, -247, 497, -248, 502, -249, 506, 503, -250, + -251, 505, -252, -253, 508, -254, 510, -255, -256, 0 }; +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/cache/misc/XTEACryption.java b/Tools/Arios Editor/src/org/arios/cache/misc/XTEACryption.java new file mode 100644 index 000000000..5c702f9be --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/cache/misc/XTEACryption.java @@ -0,0 +1,124 @@ +package org.arios.cache.misc; + +import java.nio.ByteBuffer; + +/** + * Holds XTEA cryption methods. + * @author ? + * @author Emperor + */ +public final class XTEACryption { + + /** + * The delta value + */ + private static final int DELTA = -1640531527; + + /** + * The sum. + */ + private static final int SUM = -957401312; + + /** + * The amount of "cryption cycles". + */ + private static final int NUM_ROUNDS = 32; + + /** + * Constructs a new {@code XTEACryption}. + */ + private XTEACryption() { + /* + * empty. + */ + } + + /** + * Decrypts the contents of the buffer. + * @param keys The cryption keys. + * @param buffer The buffer. + */ + public static ByteBuffer decrypt(int[] keys, ByteBuffer buffer) { + return decrypt(keys, buffer, buffer.position(), buffer.limit()); + } + + /** + * Decrypts the buffer data. + * @param keys The keys. + * @param buffer The buffer to decrypt. + * @param offset The offset of the data to decrypt. + * @param length The length. + * @return The decrypted data. + */ + public static ByteBuffer decrypt(int[] keys, ByteBuffer buffer, int offset, int length) { + int numBlocks = (length - offset) / 8; + int[] block = new int[2]; + for (int i = 0; i < numBlocks; i++) { + int index = i * 8 + offset; + block[0] = buffer.getInt(index); + block[1] = buffer.getInt(index + 4); + decipher(keys, block); + buffer.putInt(index, block[0]); + buffer.putInt(index + 4, block[1]); + } + return buffer; + } + + /** + * Deciphers the values. + * @param keys The cryption key. + * @param block The values to decipher. + */ + private static void decipher(int[] keys, int[] block) { + long sum = SUM; + for (int i = 0; i < NUM_ROUNDS; i++) { + block[1] -= (keys[(int) ((sum & 0x1933) >>> 11)] + sum ^ block[0] + (block[0] << 4 ^ block[0] >>> 5)); + sum -= DELTA; + block[0] -= ((block[1] << 4 ^ block[1] >>> 5) + block[1] ^ keys[(int) (sum & 0x3)] + sum); + } + } + + /** + * Encrypts the contents of the byte buffer. + * @param keys The cryption keys. + * @param buffer The buffer to encrypt. + */ + public static void encrypt(int[] keys, ByteBuffer buffer) { + encrypt(keys, buffer, buffer.position(), buffer.limit()); + } + + /** + * Encrypts the buffer data. + * @param keys The keys. + * @param buffer The buffer to encrypt. + * @param offset The offset of the data to encrypt. + * @param length The length. + * @return The encrypted data. + */ + public static void encrypt(int[] keys, ByteBuffer buffer, int offset, int length) { + int numBlocks = (length - offset) / 8; + int[] block = new int[2]; + for (int i = 0; i < numBlocks; i++) { + int index = i * 8 + offset; + block[0] = buffer.getInt(index); + block[1] = buffer.getInt(index + 4); + encipher(keys, block); + buffer.putInt(index, block[0]); + buffer.putInt(index + 4, block[1]); + } + } + + /** + * Enciphers the values of the block. + * @param keys The cryption keys. + * @param block The block to encipher. + */ + private static void encipher(int[] keys, int[] block) { + long sum = 0; + for (int i = 0; i < NUM_ROUNDS; i++) { + block[0] += ((block[1] << 4 ^ block[1] >>> 5) + block[1] ^ keys[(int) (sum & 0x3)] + sum); + sum += DELTA; + block[1] += (keys[(int) ((sum & 0x1933) >>> 11)] + sum ^ block[0] + (block[0] << 4 ^ block[0] >>> 5)); + } + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/workspace/WorkFrame.java b/Tools/Arios Editor/src/org/arios/workspace/WorkFrame.java new file mode 100644 index 000000000..61503ee91 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/WorkFrame.java @@ -0,0 +1,294 @@ +package org.arios.workspace; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.ArrayList; +import java.util.List; + +import javax.swing.JButton; +import javax.swing.JFileChooser; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JScrollPane; +import javax.swing.JTabbedPane; +import javax.swing.JTextArea; +import javax.swing.JTextField; +import javax.swing.ScrollPaneConstants; +import javax.swing.UIManager; +import javax.swing.border.BevelBorder; +import javax.swing.border.TitledBorder; +import javax.swing.event.ChangeEvent; +import javax.swing.event.ChangeListener; + +import org.arios.workspace.editor.EditorTab; +import org.arios.workspace.editor.EditorType; + +/** + * The working frame. + * @author Vexia + * + */ +public class WorkFrame extends JFrame implements ActionListener { + + /** + * The serail UID. + */ + private static final long serialVersionUID = 1669005276685828247L; + + /** + * The cache path field. + */ + private JTextField cachePath; + + /** + * The store path field. + */ + private JTextField storePath; + + /** + * The logging console. + */ + private JTextArea console = new JTextArea(); + + /** + * The opened editor tabs. + */ + private JTabbedPane editorTabs = new JTabbedPane(JTabbedPane.TOP); + + /** + * The list of opened editors. + */ + private List editors = new ArrayList<>(); + + /** + * The button to open the cache editor. + */ + private JButton btnCacheEditor = new JButton("Cache Editor"); + + /** + * The button to open the item editor. + */ + private JButton btnItemEditor = new JButton("Item Editor"); + + /** + * The button to open the npc editor. + */ + private JButton btnNpcEditor = new JButton("NPC Editor"); + + /** + * Constructs a new {@code MainFrame} {@code Object} + */ + public WorkFrame() { + super("Arios Editor - " + WorkSettings.VERSION); + setLocationRelativeTo(null); + setSize(WorkSettings.SIZE); + setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + getContentPane().setLayout(null); + + JPanel panel = new JPanel(); + panel.setBounds(10, 10, 1264, 143); + panel.setBorder(new TitledBorder(null, "Workspace Tools", TitledBorder.LEADING, TitledBorder.TOP, null, null)); + getContentPane().add(panel); + panel.setLayout(null); + + JLabel lblCachePath = new JLabel("Cache path:"); + lblCachePath.setBounds(10, 23, 95, 22); + panel.add(lblCachePath); + + cachePath = new JTextField(WorkSpace.getWorkSpace().getSettings().getCachePath()); + cachePath.setBounds(93, 20, 361, 28); + panel.add(cachePath); + cachePath.setColumns(10); + + JLabel lblStorePath = new JLabel("Store path:"); + lblStorePath.setBounds(10, 54, 95, 22); + panel.add(lblStorePath); + + storePath = new JTextField(WorkSpace.getWorkSpace().getSettings().getStorePath()); + storePath.setBounds(93, 51, 361, 28); + storePath.setColumns(10); + panel.add(storePath); + + JButton btnReplaceServerCache = new JButton("Replace Server Cache"); + btnReplaceServerCache.setBounds(6, 78, 168, 29); + panel.add(btnReplaceServerCache); + btnReplaceServerCache.addActionListener(this); + + JButton btnNewButton = new JButton("Save"); + btnNewButton.setBounds(170, 78, 164, 29); + panel.add(btnNewButton); + btnNewButton.addActionListener(this); + + JScrollPane scrollPane = new JScrollPane(); + scrollPane.setHorizontalScrollBarPolicy(ScrollPaneConstants.HORIZONTAL_SCROLLBAR_ALWAYS); + scrollPane.setVerticalScrollBarPolicy(ScrollPaneConstants.VERTICAL_SCROLLBAR_ALWAYS); + scrollPane.setBorder(new BevelBorder(BevelBorder.LOWERED, null, null, null, null)); + scrollPane.setBounds(458, 10, 800, 127); + panel.add(scrollPane); + console.setLineWrap(true); + + console.setBackground(UIManager.getColor("Button.background")); + console.setEditable(false); + console.setWrapStyleWord(true); + scrollPane.setViewportView(console); + console.setAutoscrolls(true); + + + btnNpcEditor.setBounds(10, 108, 164, 29); + panel.add(btnNpcEditor); + btnNpcEditor.addActionListener(this); + + btnItemEditor.setBounds(170, 108, 168, 29); + panel.add(btnItemEditor); + btnItemEditor.addActionListener(this); + + + btnCacheEditor.setBounds(332, 108, 117, 29); + panel.add(btnCacheEditor); + + JButton btnShopEditor = new JButton("Shop Editor"); + btnShopEditor.addActionListener(this); + btnShopEditor.setBounds(332, 78, 117, 29); + panel.add(btnShopEditor); + editorTabs.setBounds(0, 149, 1280, 521); + btnCacheEditor.addActionListener(this); + + editorTabs.setBorder(new TitledBorder(null, "Editors", TitledBorder.LEADING, TitledBorder.TOP, null, null)); + editorTabs.addChangeListener(new ChangeListener() { + + @Override + public void stateChanged(ChangeEvent e) { + if (editorTabs.getSelectedIndex() == -1) { + return; + } + EditorTab editor = (EditorTab) editorTabs.getSelectedComponent(); + WorkSpace.getWorkSpace().setEditor(editor); + } + + }); + getContentPane().add(editorTabs); + setResizable(false); + setVisible(true); + } + + @Override + public void actionPerformed(ActionEvent e) { + switch (e.getActionCommand()) { + case "NPC Editor": + case "Item Editor": + case "Cache Editor": + case "Shop Editor": + openEditor(EditorType.forName(e.getActionCommand())); + break; + case "Close NPC Editor": + case "Close Item Editor": + case "Close Cache Editor": + closeEditor(EditorType.forName(e.getActionCommand())); + break; + case "Replace Server Store": + case "Replace Server Cache": + JFileChooser chooser = new JFileChooser(); + chooser.setCurrentDirectory(new java.io.File(System.getProperty("user.home") + "/Dropbox/Arios/Source/data")); + chooser.setDialogTitle("Choose replace directory"); + chooser.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); + chooser.setAcceptAllFileFilterUsed(false); + if (chooser.showOpenDialog(null) == JFileChooser.APPROVE_OPTION) { + String from = e.getActionCommand().equals("Replace Server Store") ? WorkSpace.getWorkSpace().getSettings().getStorePath() : WorkSpace.getWorkSpace().getSettings().getCachePath(); + WorkSpace.getWorkSpace().replaceCache(from,chooser.getSelectedFile().getAbsolutePath()); + } else { + System.out.println("No directory choosen."); + } + break; + case "Save": + WorkSpace.getWorkSpace().save(); + break; + } + } + + /** + * Opens an editor tab. + * @param type the type. + * @return {@code True} if so. + */ + public boolean openEditor(EditorType type) { + if (editors.contains(type)) { + System.out.println("The " + type.getTab().getName() + " is already in view."); + return false; + } + JButton button = getButtonByEditor(type); + button.setText("Close " + type.getTab().getName()); + editors.add(type); + editorTabs.add(type.getTab()); + return type.getTab().init(); + } + + /** + * Closes an editor. + * @param type the type. + * @return {@code True} if so. + */ + public boolean closeEditor(EditorType type) { + if (!editors.contains(type)) { + System.out.println("The " + type.getTab().getName() + " isn't in view."); + return false; + } + JButton button = getButtonByEditor(type); + button.setText(type.getTab().getName()); + editorTabs.remove(type.getTab()); + return editors.remove(type); + } + + /** + * Gets the jbutton by the editor. + * @param type the type. + * @return the jbutton. + */ + public JButton getButtonByEditor(EditorType type) { + JButton button = btnNpcEditor; + if (type == EditorType.ITEM) { + button = btnItemEditor; + } + return button; + } + + /** + * Logs a message on the console. + * @param message the message. + */ + public void log(String message) { + console.append(message + "\n"); + } + + /** + * Gets the editorTabs. + * @return the editorTabs. + */ + public JTabbedPane getEditorTabs() { + return editorTabs; + } + + /** + * Sets the editorTabs. + * @param editorTabs the editorTabs to set + */ + public void setEditorTabs(JTabbedPane editorTabs) { + this.editorTabs = editorTabs; + } + + /** + * Gets the editors. + * @return the editors. + */ + public List getEditors() { + return editors; + } + + /** + * Sets the editors. + * @param editors the editors to set + */ + public void setEditors(List editors) { + this.editors = editors; + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/WorkLogger.java b/Tools/Arios Editor/src/org/arios/workspace/WorkLogger.java new file mode 100644 index 000000000..4570c1947 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/WorkLogger.java @@ -0,0 +1,71 @@ +package org.arios.workspace; + +import java.io.PrintStream; +import java.util.Calendar; +import java.util.Date; + +/** + * Loggs the users work. + * @author Vexia + * + */ +public class WorkLogger extends PrintStream { + + /** + * The current time. + */ + private static final Date TIME = Calendar.getInstance().getTime(); + + /** + * Constructs a new {@code WorkLogger} {@code Object} + * @param stream the stream. + */ + public WorkLogger(PrintStream stream) { + super(stream); + } + + @Override + public void println(String message) { + log(message); + } + + @Override + public PrintStream printf(String message, Object... objects) {return null;} + + @Override + public void println(boolean message) { + log(String.valueOf(message)); + } + + @Override + public void println(int message) { + log(String.valueOf(message)); + } + + @Override + public void println(double message) { + log(String.valueOf(message)); + } + + @Override + public void println(char message) {} + + @Override + public void println(long message) {} + + /** + * Method used to log the message. + * @param message the message. + */ + public void log(final String message) { + WorkSpace.getWorkSpace().getFrame().log(getDisplay() + message); + } + + /** + * Gets the display details. + * @return the string. + */ + public String getDisplay() { + return "[" + TIME + "][" + "Arios" + "]: "; + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/WorkSettings.java b/Tools/Arios Editor/src/org/arios/workspace/WorkSettings.java new file mode 100644 index 000000000..3024a5489 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/WorkSettings.java @@ -0,0 +1,87 @@ +package org.arios.workspace; + +import java.awt.Dimension; +import java.io.File; + +/** + * The settings for the work space. + * @author Vexia + * + */ +public class WorkSettings { + + /** + * The size of the work station. + */ + public static final Dimension SIZE = new Dimension(1280, 800 - 100); + + /** + * The version of the program. + */ + public static final double VERSION = 1.0; + + /** + * The cache path. + */ + private String cachePath; + + /** + * The store path.s + */ + private String storePath; + + /** + * Constructs a new {@Code WorkSettings} {@Code Object} + * @param cachePath the cache path. + * @param storePath the store path. + */ + public WorkSettings(String cachePath, String storePath) { + this.cachePath = cachePath; + this.storePath = storePath; + } + + /** + * Creates a default work space setting. + * @return the settings.s + */ + public static WorkSettings create() { + File file = new File(""); + String store = file.getAbsolutePath(); + System.err.println(store); + store = store.substring(0, store.lastIndexOf(File.separator) + 1) + "Source" + File.separator + "data" + File.separator + "store"; + return new WorkSettings("data" + File.separator + "cache", store);//System.getProperty("user.home") + "/Dropbox/Arios RSPS/Source/data/store"); + } + + /** + * Gets the cachePath. + * @return the cachePath. + */ + public String getCachePath() { + return cachePath; + } + + /** + * Sets the cachePath. + * @param cachePath the cachePath to set + */ + public void setCachePath(String cachePath) { + this.cachePath = cachePath; + } + + /** + * Gets the storePath. + * @return the storePath. + */ + public String getStorePath() { + return storePath; + } + + /** + * Sets the storePath. + * @param storePath the storePath to set + */ + public void setStorePath(String storePath) { + this.storePath = storePath; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/WorkSpace.java b/Tools/Arios Editor/src/org/arios/workspace/WorkSpace.java new file mode 100644 index 000000000..e436b022a --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/WorkSpace.java @@ -0,0 +1,227 @@ +package org.arios.workspace; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.nio.channels.FileChannel; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import javax.swing.JOptionPane; + +import org.arios.cache.Cache; +import org.arios.cache.ServerStore; +import org.arios.workspace.editor.EditorTab; +import org.arios.workspace.editor.EditorType; + + +/** + * The users work space. + * @author Vexia + * + */ +public final class WorkSpace { + + /** + * The workspace singleton. + */ + private static WorkSpace WORKSPACE = new WorkSpace(); + + /** + * The work settings. + */ + private final WorkSettings settings; + + /** + * The working frame. + */ + private WorkFrame frame; + + /** + * The current editor. + */ + private EditorTab editor; + + /** + * Constructs a new {@code WorkSpace} {@code Object} + * @param settings th∂e settings. + */ + public WorkSpace(WorkSettings settings) { + this.settings = settings; + } + + /** + * Constructs a new {@Code WorkSpace} {@Code Object} + */ + public WorkSpace() { + this(WorkSettings.create()); + } + + /** + * Initializes this work space. + * @return the work space. + * @throws Throwable the throwable. + */ + public WorkSpace init() throws Throwable { + setFrame(new WorkFrame()); + Cache.init(); + ServerStore.init(); + EditorType.init(); + System.out.println("Initialized the Arios editor!"); + return this; + } + + /** + * saves the editor. + */ + public void save() { + save(false); + } + + /** + * Saves the editors. + */ + public void save(boolean force) { + if (!force && frame.getEditors().size() == 0) { + JOptionPane.showMessageDialog(null, "Your workspace is empty."); + return; + } + /*String[] types = new String[] {"bronze", "iron", "steel", "black", "blurite", "mithril", "adamant", "rune", "dragon"}; + + for (Node i : EditorType.ITEM.getTab().getNodes().values()) { + Item item = (Item) i; + for (String s : types) { + if (item.getName().toLowerCase().startsWith(s) && item.getName().contains("sword") && !item.getName().contains("2h")) { + System.err.println(item.getName()); + item.setConfig("attack_audios", new Short[] {2517, 2517, 2500, 2517}); + } + } + } + for (Node i : EditorType.NPC.getTab().getNodes().values()) { + NPC n = (NPC) i; + if (n.getId() == 6263 || n.getId() == 6261 || n.getId() == 6265) { + System.err.println(n); + NPCDrop[] t = n.getDrobTable(TableType.MAIN); + for (NPCDrop d : t) { + if (d.getItemId() >= 11710) { + d.setSetRate(5400); + } + } + } + }*/ + backup(); + for (EditorType type : frame.getEditors()) { + type.getTab().preSave(); + type.getTab().save(); + } + ServerStore.createStaticStore(settings.getStorePath()); + System.out.println("Saved the workspace!"); + } + + /** + * Backs up the cache. + */ + private void backup() { + /*File file = new File("./data/backup/"); + if (!file.exists()) { + file.mkdir(); + } + File cache = new File(settings.getStorePath() + "/static_cache.arios"); + copyFile(cache, new File("./data/backup/static_cache_" + (new SimpleDateFormat("yyyy-MM-dd hh-mm-ss").format(new Date()) + ".arios")));*/ + } + + /** + * Replaces a directory with another. + * @param from the path to copy. + * @param toDirectory the path to copy to. + */ + public void replaceCache(String from, String toDirectory) { + File fileFrom = new File(from); + List files = new ArrayList<>(); + if (fileFrom.isDirectory()) { + files.addAll(Arrays.asList(fileFrom.listFiles())); + } else { + files.add(fileFrom); + } + for (File file : files) { + copyFile(file, new File(toDirectory + "/" + file.getName())); + } + JOptionPane.showMessageDialog(null, "Replaced a cache from " + from + " to " + toDirectory + "!"); + } + + /** + * Copies a file. + * @param in The file to be copied. + * @param out The file to copy to. + */ + private static void copyFile(File in, File out) { + try (FileChannel channel = new FileInputStream(in).getChannel()) { + try (FileChannel output = new FileOutputStream(out).getChannel()) { + channel.transferTo(0, channel.size(), output); + channel.close(); + output.close(); + } + } catch (IOException e) { + e.printStackTrace(); + } + } + + /** + * Gets the settings. + * @return the settings. + */ + public WorkSettings getSettings() { + return settings; + } + + /** + * Gets the frame. + * @return the frame. + */ + public WorkFrame getFrame() { + return frame; + } + + /** + * Sets the frame. + * @param frame the frame to set + */ + public void setFrame(WorkFrame frame) { + this.frame = frame; + } + + /** + * Gets the work space. + * @return the work space. + */ + public static WorkSpace getWorkSpace() { + return WORKSPACE; + } + + /** + * Sets the work space. + * @param workspace the space. + */ + public static void setWorkSpace(WorkSpace workSpace) { + WORKSPACE = workSpace; + } + + /** + * Gets the editor. + * @return the editor. + */ + public EditorTab getEditor() { + return editor; + } + + /** + * Sets the editor. + * @param editor the editor to set + */ + public void setEditor(EditorTab editor) { + this.editor = editor; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/editor/EditorTab.java b/Tools/Arios Editor/src/org/arios/workspace/editor/EditorTab.java new file mode 100644 index 000000000..c6927db81 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/editor/EditorTab.java @@ -0,0 +1,150 @@ +package org.arios.workspace.editor; + +import java.awt.Dimension; +import java.util.HashMap; +import java.util.Map; + +import javax.swing.JOptionPane; +import javax.swing.JPanel; + +import org.arios.workspace.node.Node; + +/** + * Represents an editor tab. + * @author Vexia + * + */ +public abstract class EditorTab extends JPanel { + + /** + * The size of the tab panel. + */ + public static final Dimension SIZE = new Dimension(1280, 521); + + /** + * The max editors. + */ + public static final int MAX_EDITORS = 10; + + /** + * The serial version UID. + */ + private static final long serialVersionUID = 8899748585906614907L; + + /** + * The preloaded nodes for this editor. + */ + protected final Map> nodes = new HashMap<>(); + + /** + * The panel used for searching nodes. + */ + protected final NodePanel nodePanel = new NodePanel(this); + + /** + * The opened editors. + */ + private Map editors = new HashMap<>(); + + /** + * The name of the tab. + */ + private final String name; + + /** + * Constructs a new {@Code EditorTab} {@Code Object} + * @param name the name. + */ + public EditorTab(String name) { + super(); + this.name = name; + setLayout(null); + setSize(SIZE); + nodePanel.getNodeList().setLocation(22, -122); + nodePanel.getNodeList().setSize(256, 300); + nodePanel.setLocation(0, 6); + add(nodePanel); + } + + /** + * Parses the configs for this editor. + */ + public abstract void parse(); + + /** + * Saves this editor. + */ + public abstract boolean save(); + + /** + * Initializes this tab. + */ + public boolean init() { + return true; + } + + /** + * Used to clean up & save configs. + */ + public void preSave() { + for (NodeEditor editor : editors.values()) { + editor.save(); + } + } + + /** + * Edits a node. + * @param edit the edit. + * @return {@code True} if opened. + */ + public boolean edit(Node edit) { + if (editors.size() > MAX_EDITORS) { + JOptionPane.showMessageDialog(null, "You can't have any more editors open."); + return false; + } + if (editors.containsKey(edit.getId())) { + JOptionPane.showMessageDialog(null, "Node with id - " + edit.getId() + " is already opened."); + return false; + } + NodeEditor editor = getEditor(edit); + editors.put(edit.getId(), editor); + nodePanel.getTabbedPane().addTab(edit.toString(), editor); + return true; + } + + /** + * Gets the node editor. + * @param edit the edit. + * @return the editor. + */ + public NodeEditor getEditor(Node edit) { + return new NodeEditor(edit); + } + + /** + * Closes the node editor. + * @param editor the editor. + */ + public void closeEditor(NodeEditor editor) { + editors.remove(editor.getNode().getId()); + nodePanel.getTabbedPane().remove(editor); + } + + /** + * Gets the name. + * @return the name. + */ + public String getName() { + return name; + } + + /** + * Gets the nodes. + * @return the nodes. + */ + public Map> getNodes() { + return nodes; + } + + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/editor/EditorType.java b/Tools/Arios Editor/src/org/arios/workspace/editor/EditorType.java new file mode 100644 index 000000000..827be5386 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/editor/EditorType.java @@ -0,0 +1,61 @@ +package org.arios.workspace.editor; + +import org.arios.workspace.node.item.ItemEditor; +import org.arios.workspace.node.item.shop.ShopEditor; +import org.arios.workspace.node.npc.NPCEditor; + +/** + * An editor type. + * @author Vexia + * + */ +public enum EditorType { + NPC(new NPCEditor("NPC Editor")), + ITEM(new ItemEditor("Item Editor")), + SHOP(new ShopEditor("Shop Editor")); + + /** + * The tab. + */ + private final EditorTab tab; + + /** + * Constructs a new {@Code EditorType} {@Code Object} + * @param tab the tab. + */ + private EditorType(EditorTab tab) { + this.tab = tab; + } + + /** + * Initializes the editors. + */ + public static void init() { + for (EditorType type : values()) { + type.getTab().parse(); + } + } + + /** + * Gets an editor type by the name. + * @param actionCommand the command. + * @return the editor. + */ + public static EditorType forName(String actionCommand) { + for (EditorType type : values()) { + if (type.getTab().getName().equals(actionCommand) || ("Close " + type.getTab().getName()).equals(actionCommand)) { + return type; + } + } + return NPC; + } + + /** + * Gets the tab. + * @return the tab. + */ + public EditorTab getTab() { + return tab; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/editor/NodeEditor.java b/Tools/Arios Editor/src/org/arios/workspace/editor/NodeEditor.java new file mode 100644 index 000000000..67a1b9f18 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/editor/NodeEditor.java @@ -0,0 +1,128 @@ +package org.arios.workspace.editor; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.Map.Entry; + +import javax.swing.JButton; +import javax.swing.JOptionPane; +import javax.swing.JPanel; + +import org.arios.workspace.WorkSpace; +import org.arios.workspace.node.Configuration; +import org.arios.workspace.node.Node; +import org.arios.workspace.node.npc.NPC; +import org.arios.workspace.node.npc.NPCDropPanel; + +/** + * Used for editing a node. + * @author Vexia + * + */ +public class NodeEditor extends JPanel { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 8520324823671510804L; + + /** + * The node being eddited. + */ + private final Node node; + + /** + * The node table. + */ + private NodeTable table; + + /** + * The npc drop panel. + */ + private NPCDropPanel dropPanel; + + /** + * Constructs a new {@Code NodeEditor} {@Code Object} + * @param node the node. + * @param o the o..? + */ + public NodeEditor(Node node, boolean o) { + super(); + this.node = node; + setLayout(null); + } + + /** + * Constructs a new {@Code NodeEditor} {@Code Object} + * @param node the node. + */ + public NodeEditor(final Node node) { + super(); + this.node = node; + this.table = new NodeTable(node); + setLayout(null); + JButton btnClose = new JButton("Close"); + btnClose.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + WorkSpace.getWorkSpace().getEditor().closeEditor(NodeEditor.this); + } + }); + btnClose.setBounds(6, 6, 117, 29); + add(btnClose); + add(table); + + JButton btnCopyConfigurations = new JButton("Copy configurations"); + btnCopyConfigurations.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + int id = Integer.parseInt(JOptionPane.showInputDialog("Enter the id:")); + Node n = WorkSpace.getWorkSpace().getEditor().getNodes().get(id); + if (n == null) { + JOptionPane.showMessageDialog(null, "Invalid id!"); + return; + } + for (Entry> s : n.getConfigurations().entrySet()) { + Configuration c = s.getValue(); + node.getConfigurations().get(s.getKey()).setValue(c.getValue()); + } + remove(table); + table = new NodeTable(node); + add(table); + } + }); + btnCopyConfigurations.setBounds(126, 6, 163, 29); + add(btnCopyConfigurations); + if (node instanceof NPC) { + dropPanel = new NPCDropPanel((NPC) node); + dropPanel.setLocation(303, 17); + dropPanel.setSize(633, 394); + add(dropPanel); + } + } + + /** + * Saves the node editor. + */ + public void save() { + table.save(); + if (dropPanel != null) { + dropPanel.save(); + } + } + + /** + * Gets the node. + * @return the node. + */ + public Node getNode() { + return node; + } + + /** + * Gets the table. + * @return the table. + */ + public NodeTable getTable() { + return table; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/editor/NodeList.java b/Tools/Arios Editor/src/org/arios/workspace/editor/NodeList.java new file mode 100644 index 000000000..e8d51c0db --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/editor/NodeList.java @@ -0,0 +1,101 @@ +package org.arios.workspace.editor; + +import java.awt.Component; + +import javax.swing.DefaultListCellRenderer; +import javax.swing.DefaultListModel; +import javax.swing.JLabel; +import javax.swing.JList; +import javax.swing.ListCellRenderer; +import javax.swing.event.ListSelectionEvent; +import javax.swing.event.ListSelectionListener; + +import org.arios.workspace.WorkSpace; +import org.arios.workspace.node.Node; + +/** + * A list of nodes to search. + * @author Vexia + * + */ +public class NodeList extends JList> { + + /** + * The serial version UID. + */ + private static final long serialVersionUID = 102017980938742769L; + + /** + * The node list model. + */ + private final DefaultListModel> model = new DefaultListModel>(); + + /** + * The node list renderer. + */ + private final NodeListRenderer renderer = new NodeListRenderer(); + + /** + * Constructs a new {@Code NodeList} {@Code Object} + */ + public NodeList() { + super(); + setModel(model); + setCellRenderer(renderer); + addListSelectionListener(new ListSelectionHandler()); + } + + /** + * Gets the default model. + * @return the model. + */ + public DefaultListModel> getDefaultModel() { + return model; + } + + /** + * Renderes nodes on a list. + * @author Vexia + * + */ + public class NodeListRenderer implements ListCellRenderer> { + + /** + * The default renderer. + */ + protected DefaultListCellRenderer defaultRenderer = new DefaultListCellRenderer(); + + @Override + public Component getListCellRendererComponent(JList> list, Node value, int index, boolean isSelected, boolean cellHasFocus) { + JLabel renderer = (JLabel) defaultRenderer.getListCellRendererComponent(list, value, index, isSelected, cellHasFocus); + renderer.setText(value.toString()); + return renderer; + } + + } + + /** + * Handles the list selection. + * @author Vexia + * + */ + public class ListSelectionHandler implements ListSelectionListener { + + @Override + public void valueChanged(ListSelectionEvent e) { + boolean adjust = e.getValueIsAdjusting(); + if (!adjust) { + @SuppressWarnings("unchecked") + JList> list = (JList>) e.getSource(); + int selections[] = list.getSelectedIndices(); + @SuppressWarnings("deprecation") + Object selectionValues[] = list.getSelectedValues(); + for (int i = 0, n = selections.length; i < n; i++) { + if (!WorkSpace.getWorkSpace().getEditor().edit((Node) selectionValues[i])) { + break; + } + } + } + } + } + } diff --git a/Tools/Arios Editor/src/org/arios/workspace/editor/NodePanel.java b/Tools/Arios Editor/src/org/arios/workspace/editor/NodePanel.java new file mode 100644 index 000000000..d000c1df0 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/editor/NodePanel.java @@ -0,0 +1,275 @@ +package org.arios.workspace.editor; + +import java.awt.GridLayout; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.ArrayList; +import java.util.List; + +import javax.swing.JButton; +import javax.swing.JLabel; +import javax.swing.JOptionPane; +import javax.swing.JPanel; +import javax.swing.JScrollPane; +import javax.swing.JTabbedPane; +import javax.swing.JTextField; + +import org.arios.workspace.node.Node; +import org.arios.workspace.node.item.ItemWrapper; +import org.arios.workspace.node.item.shop.Shop; +import org.arios.workspace.node.item.shop.ShopEditor; +import org.arios.workspace.node.item.shop.ShopManager; + + +/** + * The panel for a node. + * @author Vexia + * + */ +public class NodePanel extends JPanel implements ActionListener { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 2776394653113735129L; + + /** + * The editor tab. + */ + private final EditorTab tab; + + /** + * The list for this panel. + */ + private final NodeList nodeList = new NodeList(); + + /** + * The id searched. + */ + private JTextField idSearch; + + /** + * The searched name. + */ + private JTextField nameSearch; + + /** + * The search name lavel. + */ + private JLabel lblSearchName; + + /** + * The tabbed pane. + */ + private JTabbedPane tabbedPane = new JTabbedPane(JTabbedPane.TOP); + + /** + * Constructs a new {@Code NodeListPanel} {@Code Object} + */ + public NodePanel(EditorTab tab) { + super(); + this.tab = tab; + setLayout(null); + JScrollPane nodeScroll = new JScrollPane(nodeList); + nodeScroll.setBounds(18, 74, 260, 364); + add(nodeScroll); + + nameSearch = new JTextField(); + nameSearch.setColumns(10); + nameSearch.setBounds(95, 39, 179, 21); + add(nameSearch); + nameSearch.addActionListener(this); + + if (!(tab instanceof ShopEditor)) { + JLabel lblSearchId = new JLabel("Search id:"); + lblSearchId.setBounds(6, 4, 98, 27); + add(lblSearchId); + + idSearch = new JTextField(); + idSearch.setBounds(95, 6, 179, 21); + add(idSearch); + idSearch.setColumns(10); + idSearch.addActionListener(this); + + } else { + JButton addShop = new JButton("Add Shop"); + // public Shop(String title, ItemWrapper[] items, boolean general, int currency, int[] npcs, boolean highAlch) { + addShop.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + JPanel panel = new JPanel(new GridLayout(0, 1)); + panel.add(new JLabel("Enter title:")); + JTextField idField = new JTextField(""); + panel.add(idField); + panel.add(new JLabel("Items:")); + JTextField items = new JTextField(""); + panel.add(items); + panel.add(new JLabel("General:")); + JTextField general = new JTextField("false"); + panel.add(general); + panel.add(new JLabel("Currency:")); + JTextField currency = new JTextField("995"); + panel.add(currency); + panel.add(new JLabel("NPCS:")); + JTextField npcs = new JTextField(""); + panel.add(npcs); + panel.add(new JLabel("High alch:")); + JTextField highAlch = new JTextField("false"); + panel.add(highAlch); + int result = JOptionPane.showConfirmDialog(null, panel, "Drop Creator", JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE); + if (result == JOptionPane.OK_OPTION) { + String[] t = npcs.getText().split(","); + int[] n = new int[t.length]; + for (int i = 0; i < n.length; i++) { + n[i] = Integer.parseInt(t[i]); + } + String[] tokens = items.getText().trim().split(","); + boolean amt = false; + int id = 0; + int amount = 0; + List wraps = new ArrayList<>(); + for (String tok : tokens) { + if (amt) { + amount = Integer.parseInt(tok.replace(")", "").trim()); + wraps.add(new ItemWrapper(id, amount)); + } else { + id = Integer.parseInt(tok.replace("(", "").trim()); + } + amt = !amt; + } + Shop shop = new Shop(idField.getText(), wraps.toArray(new ItemWrapper[] {}), Boolean.parseBoolean(general.getText()), Integer.parseInt(currency.getText()), n, Boolean.parseBoolean(highAlch.getText())); + ShopManager.getShops().add(shop); + EditorType.SHOP.getTab().getNodes().put(shop.getTitle().hashCode(), shop); + } + } + }); + addShop.setBounds(38, 4, 200, 27); + add(addShop); + } + lblSearchName = new JLabel("Search name:"); + lblSearchName.setBounds(6, 33, 98, 27); + add(lblSearchName); + + tabbedPane.setBounds(286, 0, 964, 447); + add(tabbedPane); + setSize(1268, 446); + } + + @Override + public void actionPerformed(ActionEvent e) { + if (e.getSource() == idSearch || e.getSource() == nameSearch) { + search(e.getSource() == idSearch? true : false); + return; + } + } + + /** + * Searches fora node & lists them. + * @param id the id. + */ + private void search(boolean id) { + List> nodes = new ArrayList<>(); + String text = id ? idSearch.getText() : nameSearch.getText(); + if (text == null || text.length() == 0) { + return; + } + nodeList.getDefaultModel().clear(); + if (id) { + int realId = 0; + try { + realId = Integer.parseInt(text); + } catch (NumberFormatException e) { + + } + nodes.add(tab.getNodes().get(realId)); + } else { + for (Node node : tab.getNodes().values()) { + if (node.getName().toLowerCase().startsWith(text.toLowerCase())) { + nodes.add(node); + } + } + } + for (Node node : nodes) { + nodeList.getDefaultModel().addElement(node); + } + } + + /** + * Gets the nodeList. + * @return the nodeList. + */ + public NodeList getNodeList() { + return nodeList; + } + + /** + * Gets the idSearch. + * @return the idSearch. + */ + public JTextField getIdSearch() { + return idSearch; + } + + /** + * Sets the idSearch. + * @param idSearch the idSearch to set + */ + public void setIdSearch(JTextField idSearch) { + this.idSearch = idSearch; + } + + /** + * Gets the nameSearch. + * @return the nameSearch. + */ + public JTextField getNameSearch() { + return nameSearch; + } + + /** + * Sets the nameSearch. + * @param nameSearch the nameSearch to set + */ + public void setNameSearch(JTextField nameSearch) { + this.nameSearch = nameSearch; + } + + /** + * Gets the lblSearchName. + * @return the lblSearchName. + */ + public JLabel getLblSearchName() { + return lblSearchName; + } + + /** + * Sets the lblSearchName. + * @param lblSearchName the lblSearchName to set + */ + public void setLblSearchName(JLabel lblSearchName) { + this.lblSearchName = lblSearchName; + } + + /** + * Gets the tabbedPane. + * @return the tabbedPane. + */ + public JTabbedPane getTabbedPane() { + return tabbedPane; + } + + /** + * Sets the tabbedPane. + * @param tabbedPane the tabbedPane to set + */ + public void setTabbedPane(JTabbedPane tabbedPane) { + this.tabbedPane = tabbedPane; + } + + /** + * Gets the tab. + * @return the tab. + */ + public EditorTab getTab() { + return tab; + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/editor/NodeTable.java b/Tools/Arios Editor/src/org/arios/workspace/editor/NodeTable.java new file mode 100644 index 000000000..88a7bfeb6 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/editor/NodeTable.java @@ -0,0 +1,178 @@ +package org.arios.workspace.editor; + +import java.awt.Color; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import javax.swing.JScrollPane; +import javax.swing.JTable; +import javax.swing.border.MatteBorder; + +import org.arios.workspace.node.Configuration; +import org.arios.workspace.node.Node; +import org.arios.workspace.node.item.ItemWrapper; +import org.arios.workspace.node.item.shop.Shop; + +/** + * Represents a table used to represent a nodes configs. + * @author Vexia + * + */ +public final class NodeTable extends JScrollPane { + + /** + * The serail UID. + */ + private static final long serialVersionUID = -6345104732675498099L; + + /** + * The column names. + */ + private static final String[] COLUMN_NAMES = new String[] {"Config", "Value"}; + + /** + * The node we're editing. + */ + private final Node node; + + /** + * The table for the nodes configs. + */ + private final JTable table; + + /** + * Constructs a new {@Code NodeTable} {@Code Object} + * @param node the node. + */ + public NodeTable(Node node) { + super(); + setSize(285, 353); + this.table = createTable(node); + this.node = node; + setBorder(new MatteBorder(1, 1, 1, 1, (Color) new Color(0, 0, 0))); + setBounds(16, 47, node instanceof Shop ? 800 : 285, 341); + setViewportView(table); + setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS); + setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); + } + + /** + * Saves the table. + */ + public void save() { + for (int i = 0; i < table.getRowCount(); i++) { + String key = (String) table.getValueAt(i, 0); + Object obj = table.getValueAt(i, 1); + if (!(obj instanceof String)) { + node.setConfig(key, obj); + continue; + } + String value = (String) table.getValueAt(i, 1); + Configuration config = node.getConfigurations().get(key); + if (config.getType() == Byte.class) { + node.setConfig(key, Byte.valueOf(value)); + } else if (config.getType() == Short.class) { + node.setConfig(key, Short.valueOf(value)); + } else if (config.getType() == Integer.class) { + node.setConfig(key, Integer.valueOf(value)); + } else if (config.getType() == Double.class) { + node.setConfig(key, Double.valueOf(value)); + } else if (config.getType() == Boolean.class) { + node.setConfig(key, Boolean.valueOf(value)); + } else if (config.getType() == String.class) { + node.setConfig(key, value); + } else if (config.getType() == ItemWrapper[].class) { + String[] tokens = value.replace("[", "").replace("]", "").trim().split(","); + boolean amt = false; + int id = 0; + int amount = 0; + List wraps = new ArrayList<>(); + for (String tok : tokens) { + if (amt) { + amount = Integer.parseInt(tok.replace(")", "").trim()); + wraps.add(new ItemWrapper(id, amount)); + } else { + id = Integer.parseInt(tok.replace("(", "").trim()); + } + amt = !amt; + } + node.setConfig(key, wraps.toArray(new ItemWrapper[] {})); + } else if (config.getType() == Map.class || config.getType() == HashMap.class) { + @SuppressWarnings("unchecked") + Map req = (Map) config.getValue(); + req.clear(); + String[] tokens = value.replace("{", "").replace("}", "").trim().split(","); + for (String token : tokens) { + token = token.trim(); + req.put(Integer.parseInt(token.split("=")[0]), Integer.parseInt(token.split("=")[1])); + } + node.setConfig(key, req); + } else if (config.getType() == Short[].class) { + String[] tokens = value.replace("[", "").replace("]", "").trim().split(","); + if (tokens.length == 0) { + System.err.println("Tokens=" + Arrays.toString(tokens)); + return; + } + for (int i1 = 0; i1 < tokens.length; i1++) { + String token = tokens[i1].trim(); + if (token == null || token.length() == 0) { + continue; + } + if (token.equals("null")) { + continue; + } + Short[] s = (Short[]) config.getValue(); + if (s == null) { + continue; + } + if (s.length == 0) { + s = new Short[tokens.length]; + } + s[i1] = Short.valueOf(token); + config.setDefaultValue(false); + node.setConfig(key, s); + } + } + if (node instanceof Shop) { + Shop s = (Shop) node; + s.setFromConfigs(); + } + } + } + + /** + * Creates a jtable. + * @param node the node. + * @return the table. + */ + public static JTable createTable(Node node) { + Object[][] objects = new Object[node.getDefinition().getConfigurations().size()][2]; + int index = 0; + for (Entry> config : node.getDefinition().getConfigurations().entrySet() ) { + objects[index][0] = config.getKey(); + objects[index][1] = config.getValue().getValue(); + if (config.getValue().getValue() instanceof Short[]) { + objects[index][1] = Arrays.toString((Short[]) config.getValue().getValue()); + } else if (config.getValue().getValue() instanceof Integer[]) { + objects[index][1] = Arrays.toString((Integer[]) config.getValue().getValue()); + } else if (config.getValue().getValue() instanceof ItemWrapper[]) { + objects[index][1] = Arrays.toString((ItemWrapper[]) config.getValue().getValue()); + } + index++; + } + return new JTable(objects, COLUMN_NAMES); + } + + /** + * Gets the node. + * @return the node. + */ + public Node getNode() { + return node; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/Configuration.java b/Tools/Arios Editor/src/org/arios/workspace/node/Configuration.java new file mode 100644 index 000000000..ee0955a9c --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/Configuration.java @@ -0,0 +1,198 @@ +package org.arios.workspace.node; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.misc.ByteBufferUtils; + +/** + * Represents a configuration. + * @author Emperor + */ +public class Configuration { + + /** + * The parsing opcode. + */ + private final int opcode; + + /** + * The value of the configuration. + */ + protected T value; + + /** + * The class of the generic type. + */ + private final Class type; + + /** + * Checks if the value is default. + */ + private boolean defaultValue = true; + + /** + * Constructs a new {@code Configuration} {@code Object}. + * @param opcode The opcode. + * @param value The value. + */ + @SuppressWarnings("unchecked") + public Configuration(int opcode, T value) { + if (opcode < 1) { + throw new IllegalStateException("Opcode can't be smaller than 1!"); + } + this.opcode = opcode; + this.value = value; + this.type = (Class) value.getClass(); + } + + /** + * Saves to the byte buffer. + */ + public void save(ByteBuffer buffer) { + if (type == Integer[].class) { + Integer[] array = (Integer[]) value; + buffer.put((byte) array.length); + for (int i = 0; i < array.length; i++) { + buffer.putInt(array[i]); + } + } else if (type == Short[].class) { + Short[] array = (Short[]) value; + buffer.put((byte) array.length); + for (int i = 0; i < array.length; i++) { + buffer.putShort(array[i]); + } + } else if (type == Byte.class) { + buffer.put((Byte) value); + } else if (type == Integer.class) { + buffer.putInt((Integer) value); + } else if (type == Short.class) { + buffer.putShort((Short) value); + } else if (type == String.class) { + ByteBufferUtils.putString((String) value, buffer); + } else if (type == Double.class) { + buffer.putDouble((Double) value); + } else if (type == HashMap.class) { + int size = buffer.get(); + Map requirements = new HashMap<>(); + for (int i = 0; i < size; i++) { + requirements.put(buffer.get() & 0xFF, buffer.get() & 0xFF); + } + setValue(requirements); + } else if (type == Boolean.class) { + + } else { + System.err.println("unknown type for " + type); + } + } + + /** + * Parses a config. + * @param buffer the buffer. + */ + public void parse(ByteBuffer buffer) { + if (type == Integer[].class) { + int size = buffer.get(); + int[] array = new int[size]; + for (int i = 0; i < size; i++){ + array[i] = buffer.getInt(); + } + setValue(array); + } else if (type == Short[].class) { + int size = buffer.get(); + short[] array = new short[size]; + for (int i = 0; i < size; i++){ + array[i] = buffer.getShort(); + } + setValue(array); + } else if (type == HashMap.class) { + @SuppressWarnings("unchecked") + HashMap req = (HashMap) value; + buffer.put((byte) req.size()); + for (int skill : req.keySet()) { + buffer.put((byte) skill); + buffer.put((byte) (int) req.get(skill)); + } + } else if (type == Byte.class) { + setValue(buffer.get()); + } else if (type == Short.class) { + setValue(buffer.getShort()); + } else if (type == Integer.class) { + setValue(buffer.getInt()); + } else if (type == String.class) { + setValue(ByteBufferUtils.getString(buffer)); + } else if (type == Double.class) { + setValue(buffer.getDouble()); + } else if (type == Boolean.class) { + setValue(true); + } else { + System.err.println("unknown type for " + type); + } + } + + /** + * Checks if the config can be saved. + * @return {@code True} if so. + */ + public boolean canSave() { + if (type == Boolean.class) { + if ((Boolean) value == false) { + return false; + } + } + return !isDefaultValue(); + } + + /** + * Sets the value. + * @param value the value to set. + */ + @SuppressWarnings("unchecked") + public void setValue(Object value) { + if (value != this.value) { + this.defaultValue = false; + } + this.value = (T) value; + } + + /** + * Gets the value. + * @return the value. + */ + public T getValue() { + return value; + } + + /** + * Gets the opcode. + * @return the opcode. + */ + public int getOpcode() { + return opcode; + } + + /** + * Gets the defaultValue. + * @return the defaultValue. + */ + public boolean isDefaultValue() { + return defaultValue; + } + + /** + * Sets the defaultValue. + * @param defaultValue the defaultValue to set. + */ + public void setDefaultValue(boolean defaultValue) { + this.defaultValue = defaultValue; + } + + /** + * Gets the type. + * @return the type. + */ + public Class getType() { + return type; + } +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/Node.java b/Tools/Arios Editor/src/org/arios/workspace/node/Node.java new file mode 100644 index 000000000..b603cf6b3 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/Node.java @@ -0,0 +1,195 @@ +package org.arios.workspace.node; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import org.arios.cache.def.Definition; + +/** + * Represents a node to configure. + * @author Vexia + */ +public abstract class Node { + + /** + * The definition. + */ + protected T definition; + + /** + * The id of the config. + */ + protected final int id; + + /** + * Constructs a new {@code node} {@code Object} + */ + @SuppressWarnings("unchecked") + public Node(int id) { + this.id = id; + this.definition = (T) forId(id); + setDefaultConfigs(); + } + + /** + * Sets the defaults configs. + */ + public abstract void setDefaultConfigs(); + + /** + * Gets the definition for the id. + * @param id the id. + * @return the definition. + */ + public abstract Definition forId(int id); + + /** + * Parses the config for the byte buffer. + * @param buffer + */ + public void parse(ByteBuffer buffer) { + int opcode; + Configuration config; + while ((opcode = buffer.get() & 0xFF) != 0) { + config = getByOpcode(opcode); + if (config != null) { + config.parse(buffer); + } + } + } + + /** + * Saves the configs to the byte buffer. + * @param buffer the buffer. + */ + public void save(ByteBuffer buffer) { + for (Configuration config : getConfigurations().values()) { + if (config.canSave()) { + try { + config.save(buffer.put((byte) config.getOpcode())); + } catch (ClassCastException e) { + System.err.println("CONFIG OPCODE=" + config.getOpcode() + " VALUE=" + config.getValue() + " SET TYPE=" + config.getType()); + e.printStackTrace(); + } + } + } + buffer.put((byte) 0); + } + + /** + * Sets a config. + * @param name the name. + * @param value the value. + */ + public void setConfig(String name, Object value) { + Configuration config = definition.getConfiguration(name); + if (config == null) { + System.err.println("Error! No config set for name " + name + ", value=" + value); + return; + } + config.setValue(value); + } + + /** + * Gets the config by the opcode. + * @param opcode the opcode. + * @return the config. + */ + public Configuration getByOpcode(int opcode) { + for (Configuration c : definition.getConfigurations().values()) { + if (c.getOpcode() == opcode) { + return c; + } + } + return null; + } + + /** + * Gets the config value. + * @param name the name. + * @return the value. + */ + public Object getConfigValue(String name) { + if (!definition.getConfigurations().containsKey(name)) { + System.err.println("unfound config name - " + name); + return "null"; + } + return definition.getConfigurations().get(name).getValue(); + } + + /** + * Checks if this is a default config. + * @return {@code True} if so. + */ + public boolean isDefault() { + for (Configuration config : definition.getConfigurations().values()) { + if (!config.isDefaultValue()) { + return false; + } + } + return true; + } + + /** + * Gets the name. + * @return the name. + */ + public String getName() { + if (definition == null) { + return "Null"; + } + return definition.getName(); + } + + /** + * Gets the id. + * @return the id. + */ + public int getId() { + return id; + } + + /** + * Gets the configurations. + * @return the configurations. + */ + public Map> getConfigurations() { + if (definition == null) { + return new HashMap<>(); + } + return definition.getConfigurations(); + } + + /** + * Gets the definition. + * @return the definition. + */ + public T getDefinition() { + return definition; + } + + /** + * Sets the definition. + * @param definition the definition to set. + */ + public void setDefinition(T definition) { + this.definition = definition; + } + + /** + * Prints the configs for a node. + */ + public void printConfigs() { + System.err.println("For type -" + this); + for (Entry> s : getConfigurations().entrySet()) { + System.err.println("Config name - " + s.getKey() + ", value=" + s.getValue().getValue() + "!"); + } + } + + @Override + public String toString() { + return getName() + " - " + id; + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/Item.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/Item.java new file mode 100644 index 000000000..9bedbb3ea --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/Item.java @@ -0,0 +1,229 @@ +package org.arios.workspace.node.item; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.def.Definition; +import org.arios.cache.def.impl.ItemDefinition; +import org.arios.cache.misc.ByteBufferUtils; +import org.arios.workspace.node.Configuration; +import org.arios.workspace.node.Node; + +/** + * An item. + * @author Vexia + * + */ +public final class Item extends Node { + + /** + * Constructs a new {@Code Item} {@Code Object} + * @param id the id. + */ + public Item(int id) { + super(id); + } + + @Override + public void setDefaultConfigs() { + getConfigurations().put("tradeable", new Configuration(1, (boolean) false)); + getConfigurations().put("lendable", new Configuration(2, (boolean) false)); + getConfigurations().put("high_alch", new Configuration(3, (int) 0)); + getConfigurations().put("low_alch", new Configuration(4, (int) 0)); + getConfigurations().put("destroy", new Configuration(5, (boolean) false)); + getConfigurations().put("shop_price", new Configuration(6, (int) 0)); + getConfigurations().put("ge_price", new Configuration(7, (int) 0)); + getConfigurations().put("examine", new Configuration(8, "It's an item.") { + @Override + public void parse(ByteBuffer buffer) { + String s = ByteBufferUtils.getString(buffer); + while (s.length() > 0 && s.charAt(0) == ' ') { + s = s.substring(1, s.length()); + } + if (s.length() > 255) { + s = s.substring(0, 255); + } + setValue(s); + } + }); + getConfigurations().put("weight", new Configuration(9, (short) 0)); + getConfigurations().put("bonuses", new Configuration(10, new Short[15]) { + + @Override + public void parse(ByteBuffer buf) { + Short[] bonuses = new Short[15]; + for (int i = 0; i < bonuses.length; i++) { + bonuses[i] = buf.getShort(); + } + setValue(bonuses); + } + + @Override + public void save(ByteBuffer buf) { + Short[] bonuses = (Short[]) value; + for (int i = 0; i < bonuses.length; i++) { + buf.putShort((Short) bonuses[i]); + } + } + + @Override + public boolean canSave() { + if (Arrays.equals(new Short[15], (Short[]) value)) { + return false; + } + return super.canSave(); + } + }); + getConfigurations().put("absorb", new Configuration(11, new Short[3]) { + @Override + public void parse(ByteBuffer buf) { + Short[] absorb = new Short[3]; + for (int i = 0; i < absorb.length; i++) { + absorb[i] = buf.getShort(); + } + setValue(absorb); + } + + @Override + public void save(ByteBuffer buf) { + Short[] bonuses = (Short[]) value; + for (int i = 0; i < bonuses.length; i++) { + buf.putShort((short) bonuses[i]); + } + } + }); + getConfigurations().put("two_handed", new Configuration(12, false)); + getConfigurations().put("equip_slot", new Configuration(13, (byte) -1)); + getConfigurations().put("attack_speed", new Configuration(14, (byte) -1)); + getConfigurations().put("remove_head", new Configuration(15, false)); + getConfigurations().put("remove_beard", new Configuration(16, false)); + getConfigurations().put("remove_sleeves", new Configuration(17, false)); + getConfigurations().put("stand_anim", new Configuration(18, (int) -1)); + getConfigurations().put("stand_turn_anim", new Configuration(19, (int) -1)); + getConfigurations().put("walk_anim", new Configuration(20, (int) -1)); + getConfigurations().put("run_anim", new Configuration(21, (int) -1)); + getConfigurations().put("turn_180_anim", new Configuration(22, (int) -1)); + getConfigurations().put("turn_90_cw_anim", new Configuration(23, (int) -1)); + getConfigurations().put("turn_90_ccw_anim", new Configuration(24, (int) -1)); + getConfigurations().put("weapon_interface", new Configuration(25, (byte) 0)); + getConfigurations().put("has_special", new Configuration(26, (boolean) false)); + getConfigurations().put("attack_anims", new Configuration(27, new Short[] {}) { + @Override + public void parse(ByteBuffer buf) { + int size; + size = buf.get(); + Short[] animations = new Short[size]; + for (int i = 0; i < size; i++) { + animations[i] = buf.getShort(); + } + setValue(animations); + } + + @Override + public void save(ByteBuffer buf) { + Short[] anims = (Short[]) value; + buf.put((byte) anims.length); + for (Short anim : anims) { + buf.putShort(anim); + } + } + + @Override + public boolean canSave() { + Short[] val = (Short[]) value; + return val.length > 0 && super.canSave(); + } + }); + getConfigurations().put("destroy_message", new Configuration(28, "")); + getConfigurations().put("requirements", new Configuration>(29, new HashMap()) { + + @Override + public void parse(ByteBuffer buf) { + int size = buf.get() & 0xFF; + Map requirements = new HashMap<>(); + for (int i = 0; i < size; i++) { + int skill = buf.get() & 0xFF; + requirements.put(skill, buf.get() & 0xFF); + } + setValue(requirements); + } + + @Override + public void save(ByteBuffer buffer) { + Map req = (Map) value; + buffer.put((byte) req.size()); + for (int skill : req.keySet()) { + buffer.put((byte) skill); + buffer.put((byte) (int) req.get(skill)); + } + } + }); + getConfigurations().put("ge_limit", new Configuration(30, (int) 0)); + getConfigurations().put("defence_animation", new Configuration(31, (int) -1)); + getConfigurations().put("attack_audios", new Configuration(33, new Short[4]) { + @Override + public void parse(ByteBuffer buf) { + int size; + size = buf.get(); + Short[] audios = new Short[size]; + for (int i = 0; i < size; i++) { + audios[i] = buf.getShort(); + } + setValue(audios); + } + + @Override + public void save(ByteBuffer buf) { + Short[] anims = (Short[]) value; + buf.put((byte) anims.length); + //System.err.println(Arrays.toString(anims)); + for (Short anim : anims) { + if (anim == null) { + anim = 0; + } + buf.putShort(anim); + } + } + + @Override + public boolean canSave() { + Short[] anims = (Short[]) value; + if (anims == null) { + return false; + } + return super.canSave(); + } + }); + getConfigurations().put("pk_price", new Configuration(34, (int) 0)); + getConfigurations().put("spawnable", new Configuration(35, true) { + @Override + public void parse(ByteBuffer buf) { + setValue(false); + } + @Override + public boolean canSave() { + return super.getValue() == false; + } + }); + getConfigurations().put("trade-override", new Configuration(36, false)); + getConfigurations().put("bankable", new Configuration(37, true) { + @Override + public void parse(ByteBuffer buf) { + setValue(false); + } + @Override + public boolean canSave() { + return super.getValue() == false; + } + }); + getConfigurations().put("rare_item", new Configuration(38, false)); + } + + @Override + public Definition forId(int id) { + return ItemDefinition.forId(id); + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/ItemEditor.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/ItemEditor.java new file mode 100644 index 000000000..6a3c1032d --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/ItemEditor.java @@ -0,0 +1,199 @@ +package org.arios.workspace.node.item; + +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import org.arios.cache.ServerStore; +import org.arios.cache.misc.ByteBufferUtils; +import org.arios.cache.misc.DefinitionSize; +import org.arios.workspace.editor.EditorTab; + +/** + * An item editor. + * @author Vexia + * + */ +public class ItemEditor extends EditorTab { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 9106740527672015864L; + + /** + * If we're converting the old format. + */ + private static boolean convert = false; + + /** + * Constructs a new {@Code Itemditor} {@Code Object} + * @param name the name. + */ + public ItemEditor(String name) { + super(name); + } + + @Override + public void parse() { + ByteBuffer buf = ServerStore.getArchive("item_config"); + Item item; + for (int id = 0; id < DefinitionSize.getItemDefinitionsSize(); id++) { + item = new Item(id); + if (convert) { + convertDump(item, buf); + } else { + item.parse(buf); + } + nodes.put(id, item); + } + } + + @Override + public boolean save() { + ByteBuffer buffer = ByteBuffer.allocate(3145680 << 1); + for (int id = 0; id < DefinitionSize.getItemDefinitionsSize(); id++) { + Item item = (Item) nodes.get(id); + item.save(buffer); + } + buffer.flip(); + ServerStore.setArchive("item_config", buffer, false); + return true; + } + + private void convertDump(Item item, ByteBuffer buf) { + int opcode; + int size; + while ((opcode = buf.get() & 0xFF) != 0) { + switch (opcode) { + case 1://Tradable. + item.setConfig("tradeable", true); + break; + case 2://Lendable. + item.setConfig("lendable", true); + break; + case 3://High alch. + item.setConfig("high_alch", buf.getInt()); + break; + case 4://Low alch. + item.setConfig("low_alch", buf.getInt()); + break; + case 5://Destroy. + item.setConfig("destroy", true); + break; + case 6://Shop price. + item.setConfig("shop_price", buf.getInt()); + break; + case 7://GE price. + item.setConfig("ge_price", buf.getInt()); + break; + case 8://Examine. + String s = ByteBufferUtils.getString(buf); + while (s.length() > 0 && s.charAt(0) == ' ') { + s = s.substring(1, s.length()); + } + if (s.length() > 255) { + s = s.substring(0, 255); + } + item.setConfig("examine", s); + break; + case 9://Weight. + item.setConfig("weight", buf.getShort()); + break; + case 10://Bonuses + Short[] bonuses = new Short[15]; + for (int i = 0; i < bonuses.length; i++) { + bonuses[i] = buf.getShort(); + } + item.setConfig("bonuses", bonuses); + break; + case 11://Absorb. + Short[] absorb = new Short[3]; + for (int i = 0; i < absorb.length; i++) { + absorb[i] = buf.getShort(); + } + item.setConfig("absorb", absorb); + break; + case 12://Two handed. + item.setConfig("two_handed", true); + break; + case 13://Equipment slot. + item.setConfig("equip_slot", buf.get()); + break; + case 14://Attack speed. + item.setConfig("attack_speed", buf.get()); + break; + case 15: + item.setConfig("remove_head", true); + break; + case 16: + item.setConfig("remove_beard", true); + break; + case 17: + item.setConfig("remove_sleeves", true); + break; + case 18: + int anim = buf.getShort() & 0xFFFF; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + + } + anim = buf.getShort() & 0xFFFF; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + item.setConfig("stand_turn_anim", anim); + } + anim = buf.getShort() & 0xFFFF ; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + item.setConfig("walk_anim", anim); + } + anim = buf.getShort() & 0xFFFF; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + item.setConfig("run_anim", anim); + } + anim = buf.getShort() & 0xFFFF; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + item.setConfig("turn_180_anim", anim); + } + anim = buf.getShort() & 0xFFFF; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + item.setConfig("turn_90_cw_anim", anim); + } + anim = buf.getShort() & 0xFFFF; + if (anim < DefinitionSize.getAnimationDefinitionsSize()) { + item.setConfig("turn_90_ccw_anim", anim); + } + break; + case 19://Weapon interface. + item.setConfig("weapon_interface", buf.get()); + break; + case 20: //Has special attack bar. + item.setConfig("has_special", true); + break; + case 21: + size = buf.get(); + Short[] animation = new Short[size]; + for (int i = 0; i < size; i++) { + animation[i] = buf.getShort(); + } + item.setConfig("attack_anims", animation); + break; + case 22: + item.setConfig("destroy_message", ByteBufferUtils.getString(buf)); + break; + case 23: + size = buf.get() & 0xFF; + Map requirements = new HashMap<>(); + for (int i = 0; i < size; i++) { + requirements.put(buf.get() & 0xFF, buf.get() & 0xFF); + } + item.setConfig("requirements", requirements); + break; + case 24: + item.setConfig("ge_limit", buf.getShort() & 0xFFFF); + break; + case 25: + item.setConfig("defence_animation", buf.getShort() & 0xFFFF); + break; + } + } + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/ItemWrapper.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/ItemWrapper.java new file mode 100644 index 000000000..e92538085 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/ItemWrapper.java @@ -0,0 +1,68 @@ +package org.arios.workspace.node.item; + + +/** + * An item wrapper. + * @author Vexia + * + */ +public class ItemWrapper { + + /** + * The id. + */ + private int id; + + /** + * The amount. + */ + private int amount; + + /** + * Constructs a new {@Code ItemWrapper} {@Code Object} + * @param id the id. + * @param amount the amount. + */ + public ItemWrapper(int id, int amount) { + this.id = id; + this.amount = amount; + } + + /** + * Gets the id. + * @return the id. + */ + public int getId() { + return id; + } + + @Override + public String toString() { + return "(" + id + ", " + amount + ")"; + } + + /** + * Sets the id. + * @param id the id to set + */ + public void setId(int id) { + this.id = id; + } + + /** + * Gets the amount. + * @return the amount. + */ + public int getAmount() { + return amount; + } + + /** + * Sets the amount. + * @param amount the amount to set + */ + public void setAmount(int amount) { + this.amount = amount; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/Shop.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/Shop.java new file mode 100644 index 000000000..1012270ef --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/Shop.java @@ -0,0 +1,268 @@ +package org.arios.workspace.node.item.shop; + +import java.nio.ByteBuffer; +import java.util.Arrays; + +import org.arios.cache.def.Definition; +import org.arios.workspace.node.Configuration; +import org.arios.workspace.node.Node; +import org.arios.workspace.node.item.ItemWrapper; +import org.arios.workspace.node.item.shop.Shop.ShopDefinition; + +/** + * A shop. + * @author Vexia + * + */ +public class Shop extends Node { + + /** + * Represents the general store items. + */ + public final static ItemWrapper[] GENERAL_STORE = {new ItemWrapper(1931, 30), new ItemWrapper(1935, 30), new ItemWrapper(1735, 10), new ItemWrapper(1925, 10), new ItemWrapper(1923, 10), new ItemWrapper(1887, 10), new ItemWrapper(590, 10), new ItemWrapper(1755, 10), new ItemWrapper(2347, 10), new ItemWrapper(550, 10), new ItemWrapper(9003, 10)}; + + /** + * Represents the title of the shop. + */ + private String title; + + /** + * Represents the ItemWrappers in the store. + */ + private ItemWrapper[] items; + + /** + * Represents if it's a general store. + */ + private boolean general; + + /** + * Represents the currency the shop allows. + */ + private int currency; + + /** + * Represents the owners of the shop. + */ + private int[] npcs; + + /** + * If the shop buys for high alch. + */ + private boolean highAlch; + + /** + * Constructs a new {@Code Shop} {@Code Object} + * @param title the title. + * @param items the items. + * @param general the general. + * @param currency the currency. + * @param npcs the npcs. + * @param highAlch the high alch. + */ + public Shop(String title, ItemWrapper[] items, boolean general, int currency, int[] npcs, boolean highAlch) { + super(title.hashCode()); + this.title = title; + this.items = items; + this.general = general; + this.currency = currency; + this.npcs = npcs; + this.highAlch = highAlch; + setDefaultConfigs(); + setConfigs(); + } + + /** + * Constructs a new {@Code Shop} {@Code Object} + * @param title the title. + * @param items the items. + * @param general the general. + * @param currency the currency. + * @param npcs the npcs. + * @param highAlch the high alch. + */ + public Shop(String title2, boolean general2, int currency2, int[] npcs, boolean highAlch2) { + this(title2, GENERAL_STORE, general2, currency2, npcs, highAlch2); + } + + /** + * Sets the configs. + */ + public void setConfigs() { + setConfig("title", title); + setConfig("items", items); + setConfig("general", general); + setConfig("currency", currency); + setConfig("npcs", npcs); + setConfig("highAlch", highAlch); + } + + /** + * Sets from the configs. + */ + public void setFromConfigs() { + title = (String) getConfigValue("title"); + items = (ItemWrapper[]) getConfigValue("items"); + general = (boolean) getConfigValue("general"); + currency = (int) getConfigValue("currency"); + highAlch = (boolean) getConfigValue("highAlch"); + } + + /** + * Gets the title. + * @return the title. + */ + public String getTitle() { + return title; + } + + /** + * Gets the items. + * @return the items. + */ + public ItemWrapper[] getItems() { + return items; + } + + /** + * Gets the general. + * @return the general. + */ + public boolean isGeneral() { + return general; + } + + /** + * Gets the currency. + * @return the currency. + */ + public int getCurrency() { + return currency; + } + + /** + * Gets the npcs. + * @return the npcs. + */ + public int[] getNpcs() { + return npcs; + } + + /** + * Gets the highAlch. + * @return the highAlch. + */ + public boolean isHighAlch() { + return highAlch; + } + + /** + * Sets the title. + * @param title the title to set + */ + public void setTitle(String title) { + this.title = title; + } + + /** + * Sets the items. + * @param items the items to set + */ + public void setItems(ItemWrapper[] items) { + this.items = items; + } + + /** + * Sets the general. + * @param general the general to set + */ + public void setGeneral(boolean general) { + this.general = general; + } + + /** + * Sets the currency. + * @param currency the currency to set + */ + public void setCurrency(int currency) { + this.currency = currency; + } + + /** + * Sets the npcs. + * @param npcs the npcs to set + */ + public void setNpcs(int[] npcs) { + this.npcs = npcs; + } + + /** + * Sets the highAlch. + * @param highAlch the highAlch to set + */ + public void setHighAlch(boolean highAlch) { + this.highAlch = highAlch; + } + + @Override + public String toString() { + return getTitle() + ", " + Arrays.toString(npcs) + ""; + } + + @Override + public String getName() { + return getTitle(); + } + + @Override + public void save(ByteBuffer buffer) { + + } + + @Override + public void setDefaultConfigs() { + getConfigurations().put("title", new Configuration(1, "Shop")); + getConfigurations().put("items", new Configuration(2, new ItemWrapper[] {})); + getConfigurations().put("general", new Configuration(3, false)); + getConfigurations().put("currency", new Configuration(4, 995)); + getConfigurations().put("npcs", new Configuration(6, new Integer[] {})); + getConfigurations().put("highAlch", new Configuration(7, false)); + } + + @Override + public Definition forId(int id) { + return new ShopDefinition(this); + } + + /** + * The shop definitions. + * @author Vexia + * + */ + public static class ShopDefinition extends Definition { + + private Shop shop; + + public ShopDefinition(Shop shop) { + this.setShop(shop); + } + + /** + * Gets the shop. + * @return the shop. + */ + public Shop getShop() { + return shop; + } + + /** + * Sets the shop. + * @param shop the shop to set + */ + public void setShop(Shop shop) { + this.shop = shop; + } + + + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopEditor.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopEditor.java new file mode 100644 index 000000000..8022d3b2b --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopEditor.java @@ -0,0 +1,43 @@ +package org.arios.workspace.node.item.shop; + +import org.arios.workspace.editor.EditorTab; +import org.arios.workspace.editor.NodeEditor; +import org.arios.workspace.node.Node; + +/** + * Handles the shop editor. + * @author Vexia + * + */ +public class ShopEditor extends EditorTab { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 868480191503316753L; + + /** + * Constructs a new {@Code ShopEditor} {@Code Object} + * @param name the name. + */ + public ShopEditor(String name) { + super(name); + } + + @Override + public NodeEditor getEditor(Node edit) { + return new ShopPanel(edit); + } + + @Override + public void parse() { + ShopManager.parse(); + } + + @Override + public boolean save() { + ShopManager.save(); + return true; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopManager.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopManager.java new file mode 100644 index 000000000..bbaa34b48 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopManager.java @@ -0,0 +1,128 @@ +package org.arios.workspace.node.item.shop; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.List; + +import org.arios.cache.ServerStore; +import org.arios.cache.misc.ByteBufferUtils; +import org.arios.workspace.WorkSpace; +import org.arios.workspace.editor.EditorType; +import org.arios.workspace.node.item.ItemWrapper; + +/** + * Manages the shops. + * @author Vexia + * + */ +public class ShopManager { + + /** + * The list of shops. + */ + private static final List SHOPS = new ArrayList<>(); + + /** + * Constructs a new {@Code ShopManager} {@Code Object} + */ + public ShopManager() { + /** + * empty. + */ + } + + /** + * Parses the shops. + */ + public static void parse() { + ByteBuffer buf = ServerStore.getArchive("shop_data"); + @SuppressWarnings("unused") + int uid; + Shop shop; + String title; + boolean general; + ItemWrapper[] stock = null; + int size; + int[] npcs; + boolean highAlch; + int currency; + while ((uid = buf.getShort()) != 0) { + title = ByteBufferUtils.getString(buf); + general = buf.get() == 1; + boolean items = buf.get() == 1; + if (items) { + size = buf.get(); + stock = new ItemWrapper[size]; + for (int i = 0; i < size; i++) { + stock[i] = new ItemWrapper(buf.getShort(), buf.getInt()); + } + } + size = buf.get(); + npcs = new int[size]; + for (int i = 0; i < size; i++) { + npcs[i] = buf.getShort(); + } + highAlch = buf.get() == 1; + currency = buf.getShort(); + if (general && !items) { + shop = new Shop(title, general, currency, npcs, highAlch); + } else { + shop = new Shop(title, stock, general, currency, npcs, highAlch); + } + SHOPS.add(shop); + title = null; + stock = null; + npcs = null; + EditorType.SHOP.getTab().getNodes().put(shop.getName().hashCode(), shop); + } + } + + /** + * Saves the buffer. + */ + public static void save() { + ByteBuffer buffer = ByteBuffer.allocate(6666666); + for (int i = 0; i < SHOPS.size(); i++) { + Shop shop = SHOPS.get(i); + String title = shop.getTitle(); + buffer.putShort((short) (i + 1));//uid + ByteBufferUtils.putString(title, buffer);//title + boolean general = shop.isGeneral(); + buffer.put((byte) (general ? 1 : 0)); + boolean items = !shop.getItems().equals(Shop.GENERAL_STORE); + buffer.put((byte) (items ? 1 : 0)); + if (items) { + buffer.put((byte) shop.getItems().length);//length of array + for (ItemWrapper item : shop.getItems()) { + buffer.putShort((short) item.getId());//itemId + buffer.putInt(item.getAmount());//amount + } + } + if (shop.getNpcs() == null || shop.getNpcs().length == 0) { + // System.err.println("No npcs for shop " + shop.getTitle()); + } + if (shop.getNpcs() == null) { + shop.setNpcs(new int [] {}); + } + buffer.put((byte) shop.getNpcs().length);//shop length + for (int npc : shop.getNpcs()) { + buffer.putShort((short) npc); + } + buffer.put((byte) (shop.isHighAlch() ? 1 : 0));//if high alch. + buffer.putShort((short) shop.getCurrency()); + } + buffer.putShort((short) 0); + buffer.flip(); + ServerStore.setArchive("shop_data", buffer, false); + ServerStore.createStaticStore(WorkSpace.getWorkSpace().getSettings().getStorePath()); + } + + /** + * Gets the shops. + * @return the shops. + */ + public static List getShops() { + return SHOPS; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopPanel.java b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopPanel.java new file mode 100644 index 000000000..53ad861a9 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/item/shop/ShopPanel.java @@ -0,0 +1,26 @@ +package org.arios.workspace.node.item.shop; + +import org.arios.workspace.editor.NodeEditor; +import org.arios.workspace.node.Node; + +/** + * The shop panel. + * @author aVexia + * + */ +public class ShopPanel extends NodeEditor { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 1121989616279279951L; + + /** + * Constructs a new {@Code ShopPanel} {@Code Object} + * @param node the node. + */ + public ShopPanel(Node node) { + super(node); + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/DropFrequency.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/DropFrequency.java new file mode 100644 index 000000000..1c39e8a22 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/DropFrequency.java @@ -0,0 +1,50 @@ +package org.arios.workspace.node.npc; + +/** + * Represents the different types of drop frequency. + * @author Emperor + * + */ +public enum DropFrequency { + + /** + * This gets dropped all the time. + */ + ALWAYS, + + /** + * This gets commonly dropped. + */ + COMMON, + + /** + * This drop is uncommon. + */ + UNCOMMON, + + /** + * This gets rarely dropped. + */ + RARE, + + /** + * This gets very rarely dropped. + */ + VERY_RARE; + + + /** + * Gets the frequency by the name. + * @param name the name. + * @return the value. + */ + public static DropFrequency forName(String name) { + for (DropFrequency n : values()) { + if (n.name().equals(name)) { + return n; + } + } + return null; + } + +} \ No newline at end of file diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPC.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPC.java new file mode 100644 index 000000000..c856583da --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPC.java @@ -0,0 +1,148 @@ +package org.arios.workspace.node.npc; + +import java.nio.ByteBuffer; +import java.util.List; + +import org.arios.cache.def.Definition; +import org.arios.cache.def.impl.NPCDefinition; +import org.arios.workspace.node.Configuration; +import org.arios.workspace.node.Node; + +/** + * An npc. + * @author Vexia + * + */ +public final class NPC extends Node { + + /** + * Constructs a new {@Code NPC} {@Code Object} + * @param id the id. + */ + public NPC(int id) { + super(id); + } + + @Override + public void setDefaultConfigs() { + getConfigurations().put("lifepoints", new Configuration(1, (short) 10)); + getConfigurations().put("attack_level", new Configuration(2, (short) 1)); + getConfigurations().put("strength_level", new Configuration(3, (short) 1)); + getConfigurations().put("defence_level", new Configuration(4, (short) 1)); + getConfigurations().put("range_level", new Configuration(5, (short) 1)); + getConfigurations().put("magic_level", new Configuration(6, (short) 1)); + getConfigurations().put("examine", new Configuration(7, "It's an NPC.")); + getConfigurations().put("poison_amount", new Configuration(8, (byte) 0)); + getConfigurations().put("poison_immune", new Configuration(9, false)); + getConfigurations().put("respawn", new Configuration(10, (byte) 17)); + getConfigurations().put("attack_speed", new Configuration(11, (byte) 4)); + getConfigurations().put("movement_radius", new Configuration(12, (byte) 10)); + getConfigurations().put("aggressive_radius", new Configuration(13, (byte) 0)); + getConfigurations().put("attack_animation", new Configuration(14, (short) -1)); + getConfigurations().put("defence_animation", new Configuration(15, (short) -1)); + getConfigurations().put("death_animation", new Configuration(16, (short) -1)); + getConfigurations().put("spawn_animation", new Configuration(17, (short) -1)); + getConfigurations().put("attack_graphic", new Configuration(18, (short) -1)); + getConfigurations().put("attack_projectile", new Configuration(19, (short) -1)); + getConfigurations().put("impact_graphic", new Configuration(20, (short) -1)); + getConfigurations().put("weakness", new Configuration(21, (byte) -1)); + getConfigurations().put("slayer_task", new Configuration(22, (byte) -1)); + getConfigurations().put("slayer_experience", new Configuration(23, 0.0)); + getConfigurations().put("combat_style", new Configuration(24, (byte) 0)); + getConfigurations().put("poisonous", new Configuration(40, false)); + getConfigurations().put("aggressive", new Configuration(41, false)); + getConfigurations().put("start_height", new Configuration(42, (byte) 0)); + getConfigurations().put("prj_height", new Configuration(45, (byte) 42)); + getConfigurations().put("end_height", new Configuration(46, (byte) 96)); + getConfigurations().put("magic_animation", new Configuration(43, (short) -1)); + getConfigurations().put("range_animation", new Configuration(44, (short) -1)); + getConfigurations().put("clue_level", new Configuration(47, (byte) 0)); + getConfigurations().put("spell_id", new Configuration(48, (short) -1)); + for (int i = 0; i < 15; i++) { + getConfigurations().put("bonus-" + i, new Configuration(25 + i, (short) 0)); + } + getConfigurations().put("combat_audios", new Configuration(49, new Short[3]) { + @Override + public void parse(ByteBuffer buf) { + int size; + size = buf.get(); + Short[] audios = new Short[size]; + for (int i = 0; i < size; i++) { + audios[i] = buf.getShort(); + } + setValue(audios); + } + + @Override + public void save(ByteBuffer buf) { + Short[] anims = (Short[]) value; + buf.put((byte) anims.length); + for (Short anim : anims) { + if (anim == null) { + anim = 0; + } + buf.putShort(anim); + } + } + + @Override + public boolean canSave() { + Short[] anims = (Short[]) value; + if (anims == null) { + return false; + } + return super.canSave(); + } + }); + } + + /** + * Displays the drops in the output. + */ + public void displayDrops() { + for (TableType type : TableType.values()) { + List drops = NPCDropManager.getDrops(getId(), type); + for (NPCDrop drop : drops) { + System.err.println(drop); + } + } + } + + /** + * Gets the npc drop table. + * @param type the type. + * @return the npc drops. + */ + public NPCDrop[] getDrobTable(TableType type) { + return NPCDropManager.getDropTable(id, type); + } + + /** + * Adds a drop. + * @param drop the drop. + * @param type the type. + */ + public void addDrop(NPCDrop drop, TableType type) { + NPCDropManager.getDrops(id, type).add(drop); + } + + /** + * Removes the drop. + * @param drop the drop. + */ + public void removeDrop(NPCDrop drop, TableType type) { + NPCDropManager.getDrops(id, type).remove(drop); + } + + @Override + public Definition forId(int id) { + return NPCDefinition.forId(id); + } + + @Override + public String toString() { + return super.toString() + " (cb=" + definition.getCombatLevel() + ")"; + } + + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDrop.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDrop.java new file mode 100644 index 000000000..fd8e1d1ef --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDrop.java @@ -0,0 +1,208 @@ +package org.arios.workspace.node.npc; + +import java.nio.ByteBuffer; + +import org.arios.cache.def.impl.ItemDefinition; + +/** + * Represents an npc drop. + * @author Vexia + * + */ +public class NPCDrop { + + /** + * The item id. + */ + private int itemId; + + /** + * The minimum amount. + */ + private int minAmount; + + /** + * The maximum amount. + */ + private int maxAmount; + + /** + * The chance. + */ + private double chance; + + /** + * The drop frequency. + */ + private DropFrequency frequency; + + /** + * The set rate. + */ + private int setRate = -1; + + /** + * Constructs a new {@code TableRow} {@code Object} + * @param itemId the item id. + * @param minAmount the min amount. + * @param maxAmount the max amount. + * @param chance the chance. + * @param frequency the freqnecy. + */ + public NPCDrop(int itemId, int minAmount, int maxAmount, double chance, DropFrequency frequency, int setRate) { + this.itemId = itemId; + this.minAmount = minAmount; + this.maxAmount = maxAmount; + this.chance = chance; + this.frequency = frequency; + this.setSetRate(setRate); + } + + /** + * Saves to the byte buffer. + * @param buffer the buffer. + */ + public void save(ByteBuffer buffer) { + buffer.putShort((short) itemId); + buffer.putInt(minAmount); + buffer.putInt(maxAmount); + buffer.putDouble(chance); + buffer.put((byte) frequency.ordinal()); + buffer.put((byte) (setRate == -1 ? 0 : 1)); + if (setRate != -1) { + buffer.putInt(setRate); + } + } + + /** + * Creates a table row. + * @param itemId the item id. + * @param buffer the buffer. + * @return the row. + */ + public static NPCDrop create(int itemId, ByteBuffer buffer) { + //buffer.get() == 0 ? -1 : buffer.getInt() + //http://puu.sh/bxHJ3/0acb4ea8d9.png + return new NPCDrop(itemId, buffer.getInt(), buffer.getInt(), buffer.getDouble(), DropFrequency.values()[buffer.get()], buffer.get() == 0 ? -1 : buffer.getInt()); + } + + /** + * Gets the data. + * @return the data. + */ + public Object[] getData() { + return new Object[] {String.valueOf(itemId), String.valueOf(minAmount), String.valueOf(maxAmount), String.valueOf(chance), frequency.name(), String.valueOf(setRate)}; + } + + /** + * Sets the minAmount. + * @param minAmount the minAmount to set. + */ + public void setMinAmount(int minAmount) { + this.minAmount = minAmount; + } + + /** + * Sets the maxAmount. + * @param maxAmount the maxAmount to set. + */ + public void setMaxAmount(int maxAmount) { + this.maxAmount = maxAmount; + } + + /** + * Sets the itemId. + * @param itemId the itemId to set. + */ + public void setItemId(int itemId) { + this.itemId = itemId; + } + + /** + * Sets the chance. + * @param chance the chance to set. + */ + public void setChance(double chance) { + this.chance = chance; + } + + /** + * Sets the frequency. + * @param frequency the frequency to set. + */ + public void setFrequency(DropFrequency frequency) { + this.frequency = frequency; + } + + /** + * Gets the max amount. + * @return the amount. + */ + public int getMaxAmount() { + return maxAmount; + } + + /** + * Gets the min amount. + * @return the min amount. + */ + public int getMinAmount() { + return minAmount; + } + + /** + * Gets the name. + * @return the name. + */ + public String getName() { + return ItemDefinition.forId(itemId).getName(); + } + + /** + * Gets the itemId. + * @return the itemId. + */ + public int getItemId() { + return itemId; + } + + /** + * Gets the chance. + * @return the chance. + */ + public double getChance() { + return chance; + } + + /** + * Gets the frequency. + * @return the frequency. + */ + public DropFrequency getFrequency() { + return frequency; + } + + @Override + public String toString() { + return "NPCDrop [itemId=" + itemId + ", name=" + getName() + ", minAmount=" + minAmount + + ", maxAmount=" + maxAmount + ", chance=" + chance + + ", frequency=" + frequency + "]"; + } + + /** + * Gets the setRate. + * @return the setRate. + */ + public int getSetRate() { + return setRate; + } + + /** + * Sets the setRate. + * @param setRate the setRate to set + */ + public void setSetRate(int setRate) { + this.setRate = setRate; + } + +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDropManager.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDropManager.java new file mode 100644 index 000000000..3d4dbde6e --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDropManager.java @@ -0,0 +1,213 @@ +package org.arios.workspace.node.npc; + +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; +import java.util.Map.Entry; + +import org.arios.cache.ServerStore; +import org.arios.cache.misc.DefinitionSize; + +/** + * Manages the drop tables of npcs. + * @author Vexia + * + */ +public class NPCDropManager { + + /** + * The mapping of npc drop tables. + */ + private static final Map DROP_TABLES = new HashMap<>(); + + /** + * Constructs a new {@Code NPCDropManager} {@Code Object} + */ + public NPCDropManager() { + /** + * empty. + */ + } + + /** + * Parses the npc drops. + */ + public static void parse() { + ByteBuffer buffer = ServerStore.getArchive("npc_drops"); + int npcId = -1; + while ((npcId = buffer.getShort()) != -1) { + buffer.getShort(); + DropTable table = new DropTable(); + TableType type = null; + int tableOpcode = -1; + DROP_TABLES.put(npcId, table); + while ((tableOpcode = buffer.get()) != 0) { + switch (tableOpcode) { + case 1: + type = TableType.DEFAULT; + break; + case 2: + type = TableType.CHARM; + break; + case 3: + type = TableType.MAIN; + break; + } + int itemId = -1; + while ((itemId = buffer.getShort()) != -1) { + table.addDrop(type, NPCDrop.create(itemId, buffer)); + } + } + } + } + + /** + * Dumps the npc drops. + */ + public static void save() { + int capacity = 10000000; + ByteBuffer buffer = ByteBuffer.allocate(capacity); + for (int i = 0; i < DefinitionSize.getNPCDefinitionsSize(); i++) { + buffer.putShort((short) i); //Npc id. + buffer.putShort((short) -1); + for (Entry> entry : NPCDropManager.getTable(i).getDropTable().entrySet()) { + buffer.put((byte) entry.getKey().opcode()); + for (NPCDrop drop : entry.getValue()) { + drop.save(buffer); + } + buffer.putShort((short) -1); + } + buffer.put((byte) 0); + } + buffer.putShort((short) -1); + ServerStore.setArchive("npc_drops", (ByteBuffer) buffer.flip(), false); + } + + /** + * Gets an npc drop table. + * @param id the id. + * @param type the type. + * @return the drop. + */ + public static NPCDrop[] getDropTable(int id, TableType type) { + return getDrops(id, type).toArray(new NPCDrop[] {}); + } + + /** + * Gets the drop tables. + * @param id the id. + * @return the table. + */ + public Map> getDropTables(int id) { + DropTable table = getTable(id); + return table.getDropTable(); + } + + /** + * Creates a new table for an npc. + * @param id the id. + */ + public static void addTable(int id) { + if (!DROP_TABLES.containsKey(id)) { + DROP_TABLES.put(id, new DropTable()); + } else { + System.out.println("Already had a drop table!"); + } + } + + /** + * Gets the list of npc drops. + * @param id the id. + * @param type the type. + * @return the list of drops. + */ + public static List getDrops(int id, TableType type) { + DropTable table = getTable(id); + return table.getDrops(type); + } + + /** + * Gets the drop table. + * @param id the id. + * @return the table + */ + public static DropTable getTable(int id) { + DropTable table = getDropTables().get(id); + if (table == null) { + addTable(id); + table = getDropTables().get(id); + } + return table; + } + + /** + * Gets the dropTables. + * @return the dropTables. + */ + public static Map getDropTables() { + return DROP_TABLES; + } + + /** + * A drop table. + * @author Vexia + * + */ + public static class DropTable { + + /** + * The drop table. + */ + private final Map> dropTable = new HashMap<>(); + + /** + * Constructs a new {@Code DropTable} {@Code Object} + */ + public DropTable() { + for (TableType type : TableType.values()) { + dropTable.put(type, new ArrayList()); + } + } + + /** + * Adds a drop. + * @param type the type. + * @param drop the drop. + */ + public void addDrop(TableType type, NPCDrop drop) { + List drops = dropTable.get(type); + drops.add(drop); + } + + /** + * Removes a drop. + * @param type the type. + * @param drop the drop. + */ + public void removeDrop(TableType type, NPCDrop drop) { + List drops = dropTable.get(type); + drops.remove(drop); + } + + /** + * Gets a list of drops for a type. + * @param type the type. + * @return the type. + */ + public List getDrops(TableType type) { + return dropTable.get(type); + } + + /** + * Gets the dropTable. + * @return the dropTable. + */ + public Map> getDropTable() { + return dropTable; + } + + } + + } diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDropPanel.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDropPanel.java new file mode 100644 index 000000000..1438c679a --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCDropPanel.java @@ -0,0 +1,379 @@ +package org.arios.workspace.node.npc; + +import java.awt.Component; +import java.awt.GridLayout; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; + +import javax.swing.DefaultCellEditor; +import javax.swing.JButton; +import javax.swing.JComboBox; +import javax.swing.JComponent; +import javax.swing.JLabel; +import javax.swing.JOptionPane; +import javax.swing.JPanel; +import javax.swing.JScrollPane; +import javax.swing.JTabbedPane; +import javax.swing.JTable; +import javax.swing.JTextField; +import javax.swing.border.EtchedBorder; +import javax.swing.table.TableCellRenderer; +import javax.swing.table.TableColumn; + +import org.arios.cache.misc.StringUtils; + +/** + * The panel used for editing npc drops. + * @author Vexia + * + */ +public class NPCDropPanel extends JPanel { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 7628103873857408009L; + + /** + * The npc drop tabs. + */ + private final JTabbedPane tabs = new JTabbedPane(); + + /** + * The npc displaying. + */ + private final NPC npc; + + /** + * Constructs a new {@Code NPCDropPanel} {@Code Object} + * @param npc the npc. + */ + public NPCDropPanel(NPC npc) { + super(); + this.npc = npc; + setLayout(null); + tabs.setBounds(0, 0, 634, 383); + add(tabs); + for (TableType type : TableType.values()) { + addTab(type); + } + } + + /** + * Saves the panel. + */ + public void save() { + for (int i = 0; i < 3; i++) { + Component comp = tabs.getComponentAt(i); + DropTable table = (DropTable) comp; + table.save(); + } + } + + /** + * Adds a drop table tab. + * @param type the type. + */ + public void addTab(TableType type) { + tabs.addTab(StringUtils.formatDisplayName(type.name().toLowerCase()), new DropTable(npc, type)); + } + + /** + * Gets the npc. + * @return the npc. + */ + public NPC getNpc() { + return npc; + } + + /** + * A drop table. + * @author Vexia + * + */ + public class DropTable extends JPanel { + + /** + * The serial UID. + */ + private static final long serialVersionUID = -2901744614661688478L; + + /** + * The table type. + */ + private TableType type; + + /** + * The npc. + */ + private final NPC npc; + + /** + * The table pane. + */ + private JScrollPane tablePane; + + /** + * The table. + */ + private JTable table; + + /** + * The add drop jbutton. + */ + private JButton add = new JButton("Add"); + + /** + * Constructs a new {@Code DropTable} {@Code Object} + * @param npc the npc. + * @param type the type. + */ + public DropTable(NPC npc, TableType type) { + super(); + setLayout(null); + this.npc = npc; + this.type = type; + createTable(); + add.setBounds(510, 10, 70, 20); + add.addActionListener(new ActionListener() { + @Override + public void actionPerformed(ActionEvent e) { + showAddDrop(); + } + }); + add(add); + } + + + /** + * Shows the add drop. + */ + public void showAddDrop() { + JPanel panel = new JPanel(new GridLayout(0, 1)); + panel.add(new JLabel("Enter item id:")); + JTextField idField = new JTextField(""); + panel.add(idField); + panel.add(new JTextField("Min amount:")); + JTextField minAmount = new JTextField("1"); + panel.add(minAmount); + panel.add(new JTextField("Max amount:")); + JTextField maxAmount = new JTextField("1"); + panel.add(maxAmount); + panel.add(new JTextField("Chance rate:")); + JTextField chanceRate = new JTextField("0.0"); + panel.add(chanceRate); + panel.add(new JTextField("Drop frequency:")); + JComboBox combo = new JComboBox(new String[] {"ALWAYS", "COMMON", "UNCOMMON", "RARE", "VERY_RARE"}); + combo.setSelectedIndex(1); + panel.add(combo); + panel.add(new JLabel("Set rate:")); + JTextField rate = new JTextField("Enter rate:"); + panel.add(rate); + int result = JOptionPane.showConfirmDialog(null, panel, "Drop Creator", JOptionPane.OK_CANCEL_OPTION, JOptionPane.PLAIN_MESSAGE); + if (result == JOptionPane.OK_OPTION) { + int id = Integer.parseInt(idField.getText()); + int min = Integer.parseInt(minAmount.getText()); + int max = Integer.parseInt(maxAmount.getText()); + double chance = Double.parseDouble(chanceRate.getText()); + DropFrequency freq = DropFrequency.forName((String) combo.getSelectedItem()); + int setRate = rate.getText().equals("Enter rate:") ? -1 : Integer.parseInt(rate.getText()); + NPCDrop drop = new NPCDrop(id, min, max, chance, freq, setRate); + addDrop(drop); + reset(); + } + } + + /** + * Adds drops. + * @param drop the drop. + */ + private void addDrop(NPCDrop drop) { + npc.addDrop(drop, type); + } + + /** + * Saves the tab. + */ + public void save() { + for (int i = 0; i < table.getColumnCount(); i++) { + for (int k = 0; k < table.getRowCount(); k++) { + Object val = table.getValueAt(k, i); + NPCDrop drop = npc.getDrobTable(type)[k]; + saveDrop(drop, val, i); + } + } + } + + /** + * Saves the drop. + * @param drop the drop. + * @param val the value. + * @param column the column. + */ + public void saveDrop(NPCDrop drop, Object val, int column) { + String v = (String) val; + switch (column) { + case 0: + drop.setItemId(Integer.parseInt(v)); + break; + case 1: + drop.setMinAmount(Integer.parseInt(v)); + break; + case 2: + drop.setMaxAmount(Integer.parseInt(v)); + break; + case 3: + drop.setChance(Double.parseDouble(v)); + break; + case 4: + drop.setFrequency(DropFrequency.forName(v)); + break; + case 5: + drop.setSetRate(Integer.parseInt(v)); + break; + } + } + + /** + * Creates a table. + */ + @SuppressWarnings("serial") + public void createTable() { + table = new JTable(asObjects(npc.getDrobTable(type)), new String[] {"Item", "Min", "Max", "Frequency", "Rate", "Set rate"}) { + @Override + public Component prepareRenderer(TableCellRenderer renderer,int row, int col) { + Component comp = super.prepareRenderer(renderer, row, col); + JComponent jcomp = (JComponent)comp; + if (comp == jcomp) { + if (col == 0) { + NPCDrop drop = npc.getDrobTable(type)[row]; + jcomp.setToolTipText("" + drop.getName()); + } + } + return comp; + } + }; + table.addMouseListener(new java.awt.event.MouseAdapter() { + @Override + public void mouseClicked(java.awt.event.MouseEvent evt) { + int row = table.rowAtPoint(evt.getPoint()); + int col = table.columnAtPoint(evt.getPoint()); + if (row >= 0 && col >= 0 && evt.getButton() == 3) { + int value = JOptionPane.showConfirmDialog(null, "Delete this row?"); + if (value == 0) { + removeDrop(row); + } + } + } + + }); + tablePane = new JScrollPane(table); + tablePane.setBounds(4, 0, 488, 338); + tablePane.setBorder(new EtchedBorder(EtchedBorder.LOWERED, null, null)); + tablePane.setVerticalScrollBarPolicy(JScrollPane.VERTICAL_SCROLLBAR_ALWAYS); + tablePane.setHorizontalScrollBarPolicy(JScrollPane.HORIZONTAL_SCROLLBAR_ALWAYS); + add(tablePane); + TableColumn sportColumn = table.getColumnModel().getColumn(4); + JComboBox comboBox = new JComboBox(); + comboBox.addItem("ALWAYS"); + comboBox.addItem("COMMON"); + comboBox.addItem("UNCOMMON"); + comboBox.addItem("RARE"); + comboBox.addItem("VERY_RARE"); + sportColumn.setCellEditor(new DefaultCellEditor(comboBox)); + } + + /** + * Removes the drop. + * @param row the row. + */ + private void removeDrop(int row) { + NPCDrop drop = npc.getDrobTable(type)[row]; + if (drop != null) { + npc.removeDrop(drop, type); + reset(); + } + } + + /** + * Resets the view. + */ + public void reset() { + remove(tablePane); + createTable(); + } + + /** + * Gets the tables as objects. + * @param rows the rows. + * @return the rows. + */ + public Object[][] asObjects(NPCDrop...rows) { + if (rows == null) { + return new Object[][] {{1, 1, 1, 0.0, DropFrequency.COMMON}}; + } + Object[][] data = new Object[rows.length][5]; + for (int i = 0; i < rows.length; i++) { + data[i] = rows[i].getData(); + } + return data; + } + + /** + * Gets the npc. + * @return the npc. + */ + public NPC getNpc() { + return npc; + } + + /** + * Gets the type. + * @return the type. + */ + public TableType getType() { + return type; + } + + /** + * Sets the type. + * @param type the type to set + */ + public void setType(TableType type) { + this.type = type; + } + + /** + * Gets the tablePane. + * @return the tablePane. + */ + public JScrollPane getTablePane() { + return tablePane; + } + + /** + * Sets the tablePane. + * @param tablePane the tablePane to set + */ + public void setTablePane(JScrollPane tablePane) { + this.tablePane = tablePane; + } + + /** + * Gets the table. + * @return the table. + */ + public JTable getTable() { + return table; + } + + /** + * Sets the table. + * @param table the table to set + */ + public void setTable(JTable table) { + this.table = table; + } + + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCEditor.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCEditor.java new file mode 100644 index 000000000..5fae288ee --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/NPCEditor.java @@ -0,0 +1,71 @@ +package org.arios.workspace.node.npc; + +import java.nio.ByteBuffer; + +import org.arios.cache.ServerStore; +import org.arios.cache.misc.DefinitionSize; +import org.arios.workspace.editor.EditorTab; + +/** + * An npc editor. + * @author Vexia + * + */ +public class NPCEditor extends EditorTab { + + /** + * The serial UID. + */ + private static final long serialVersionUID = 9106740527672015864L; + + /** + * Constructs a new {@Code NPCEditor} {@Code Object} + * @param name the name. + */ + public NPCEditor(String name) { + super(name); + } + + @Override + public void parse() { + ByteBuffer buf = ServerStore.getArchive("npc_config"); + NPC npc; + for (int id = 0; id < DefinitionSize.getNPCDefinitionsSize(); id++) { + npc = new NPC(id); + npc.parse(buf); + nodes.put(id, npc); + } + NPCDropManager.parse(); + NPC imp = (NPC) nodes.get(6604); + for (int id = 0; id < DefinitionSize.getNPCDefinitionsSize(); id++) { + npc = (NPC) nodes.get(id); + if (npc.getName().startsWith("Revenant") && id != 6604) { + npc.setConfig("clue_level", (byte) (npc.getDefinition().getCombatLevel() >= 70 ? 3 : 2)); + NPCDropManager.getDropTables().get(id).getDrops(TableType.MAIN).clear(); + NPCDropManager.getDropTables().get(id).getDrops(TableType.CHARM).clear(); + for (NPCDrop drop : imp.getDrobTable(TableType.MAIN)) { + NPCDropManager.getDropTables().get(id).addDrop(TableType.MAIN, drop); + } + for (NPCDrop drop : imp.getDrobTable(TableType.CHARM)) { + NPCDropManager.getDropTables().get(id).addDrop(TableType.CHARM, drop); + } + for (NPCDrop drop : imp.getDrobTable(TableType.DEFAULT)) { + NPCDropManager.getDropTables().get(id).addDrop(TableType.DEFAULT, drop); + } + } + } + } + + @Override + public boolean save() { + ByteBuffer buffer = ByteBuffer.allocate(3145680 << 1); + for (int id = 0; id < DefinitionSize.getNPCDefinitionsSize(); id++) { + NPC npc = (NPC) nodes.get(id); + npc.save(buffer); + } + buffer.flip(); + ServerStore.setArchive("npc_config", buffer, false); + NPCDropManager.save(); + return true; + } +} diff --git a/Tools/Arios Editor/src/org/arios/workspace/node/npc/TableType.java b/Tools/Arios Editor/src/org/arios/workspace/node/npc/TableType.java new file mode 100644 index 000000000..8498beb67 --- /dev/null +++ b/Tools/Arios Editor/src/org/arios/workspace/node/npc/TableType.java @@ -0,0 +1,19 @@ +package org.arios.workspace.node.npc; + +/** + * A table type. + * @author VExia + */ +public enum TableType { + DEFAULT, + CHARM, + MAIN; + + /** + * Its opcode. + * @return the opcode. + */ + public int opcode() { + return ordinal() + 1; + } +} diff --git a/Tools/Cache Editor/.classpath b/Tools/Cache Editor/.classpath new file mode 100644 index 000000000..5fc6c5198 --- /dev/null +++ b/Tools/Cache Editor/.classpath @@ -0,0 +1,11 @@ + + + + + + + + + + + diff --git a/Tools/Cache Editor/.project b/Tools/Cache Editor/.project new file mode 100644 index 000000000..4e650fab6 --- /dev/null +++ b/Tools/Cache Editor/.project @@ -0,0 +1,17 @@ + + + DragonkkCacheReader + + + + + + org.eclipse.jdt.core.javabuilder + + + + + + org.eclipse.jdt.core.javanature + + diff --git a/Tools/Cache Editor/.settings/org.eclipse.jdt.core.prefs b/Tools/Cache Editor/.settings/org.eclipse.jdt.core.prefs new file mode 100644 index 000000000..8b9e3c71b --- /dev/null +++ b/Tools/Cache Editor/.settings/org.eclipse.jdt.core.prefs @@ -0,0 +1,10 @@ +eclipse.preferences.version=1 +org.eclipse.jdt.core.compiler.codegen.inlineJsrBytecode=enabled +org.eclipse.jdt.core.compiler.codegen.targetPlatform=13 +org.eclipse.jdt.core.compiler.compliance=13 +org.eclipse.jdt.core.compiler.problem.assertIdentifier=error +org.eclipse.jdt.core.compiler.problem.enablePreviewFeatures=disabled +org.eclipse.jdt.core.compiler.problem.enumIdentifier=error +org.eclipse.jdt.core.compiler.problem.reportPreviewFeatures=warning +org.eclipse.jdt.core.compiler.release=enabled +org.eclipse.jdt.core.compiler.source=13 diff --git a/Tools/Cache Editor/498main_file_cache.dat2 b/Tools/Cache Editor/498main_file_cache.dat2 new file mode 100644 index 000000000..e69de29bb diff --git a/Tools/Cache Editor/498main_file_cache.idx255 b/Tools/Cache Editor/498main_file_cache.idx255 new file mode 100644 index 000000000..e69de29bb diff --git a/Tools/Cache Editor/bin/alex/CacheLoader.class b/Tools/Cache Editor/bin/alex/CacheLoader.class new file mode 100644 index 000000000..14f55129d Binary files /dev/null and b/Tools/Cache Editor/bin/alex/CacheLoader.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/CacheFile.class b/Tools/Cache Editor/bin/alex/cache/CacheFile.class new file mode 100644 index 000000000..203eba1e2 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/CacheFile.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/CacheFileWorker.class b/Tools/Cache Editor/bin/alex/cache/CacheFileWorker.class new file mode 100644 index 000000000..03afc5fc0 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/CacheFileWorker.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/FileOnDisk.class b/Tools/Cache Editor/bin/alex/cache/FileOnDisk.class new file mode 100644 index 000000000..e3a53fc63 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/FileOnDisk.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/FileSystem.class b/Tools/Cache Editor/bin/alex/cache/FileSystem.class new file mode 100644 index 000000000..bc1048536 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/FileSystem.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/ReferenceTable.class b/Tools/Cache Editor/bin/alex/cache/ReferenceTable.class new file mode 100644 index 000000000..4ac1a05b5 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/ReferenceTable.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/SeekableFile.class b/Tools/Cache Editor/bin/alex/cache/SeekableFile.class new file mode 100644 index 000000000..d31278cfc Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/SeekableFile.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/loaders/ConfigFileDefinition.class b/Tools/Cache Editor/bin/alex/cache/loaders/ConfigFileDefinition.class new file mode 100644 index 000000000..99a37c19e Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/loaders/ConfigFileDefinition.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/loaders/EquipIds.class b/Tools/Cache Editor/bin/alex/cache/loaders/EquipIds.class new file mode 100644 index 000000000..a63753265 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/loaders/EquipIds.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/loaders/ItemDefinition.class b/Tools/Cache Editor/bin/alex/cache/loaders/ItemDefinition.class new file mode 100644 index 000000000..f234d273e Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/loaders/ItemDefinition.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/loaders/ObjectDefinitions.class b/Tools/Cache Editor/bin/alex/cache/loaders/ObjectDefinitions.class new file mode 100644 index 000000000..9fefa56e3 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/loaders/ObjectDefinitions.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/loaders/OverlayDefinition.class b/Tools/Cache Editor/bin/alex/cache/loaders/OverlayDefinition.class new file mode 100644 index 000000000..13a8adcf5 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/loaders/OverlayDefinition.class differ diff --git a/Tools/Cache Editor/bin/alex/cache/updateServer/UpdateServer.class b/Tools/Cache Editor/bin/alex/cache/updateServer/UpdateServer.class new file mode 100644 index 000000000..0ecd0062d Binary files /dev/null and b/Tools/Cache Editor/bin/alex/cache/updateServer/UpdateServer.class differ diff --git a/Tools/Cache Editor/bin/alex/compressors/BZip2Constants.class b/Tools/Cache Editor/bin/alex/compressors/BZip2Constants.class new file mode 100644 index 000000000..38772fa54 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/compressors/BZip2Constants.class differ diff --git a/Tools/Cache Editor/bin/alex/compressors/BZip2OutputStream$Data.class b/Tools/Cache Editor/bin/alex/compressors/BZip2OutputStream$Data.class new file mode 100644 index 000000000..968acaaa7 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/compressors/BZip2OutputStream$Data.class differ diff --git a/Tools/Cache Editor/bin/alex/compressors/BZip2OutputStream.class b/Tools/Cache Editor/bin/alex/compressors/BZip2OutputStream.class new file mode 100644 index 000000000..929e94ecf Binary files /dev/null and b/Tools/Cache Editor/bin/alex/compressors/BZip2OutputStream.class differ diff --git a/Tools/Cache Editor/bin/alex/compressors/CRC.class b/Tools/Cache Editor/bin/alex/compressors/CRC.class new file mode 100644 index 000000000..ecba5d70c Binary files /dev/null and b/Tools/Cache Editor/bin/alex/compressors/CRC.class differ diff --git a/Tools/Cache Editor/bin/alex/compressors/CompressorOutputStream.class b/Tools/Cache Editor/bin/alex/compressors/CompressorOutputStream.class new file mode 100644 index 000000000..ca388814e Binary files /dev/null and b/Tools/Cache Editor/bin/alex/compressors/CompressorOutputStream.class differ diff --git a/Tools/Cache Editor/bin/alex/compressors/Rand.class b/Tools/Cache Editor/bin/alex/compressors/Rand.class new file mode 100644 index 000000000..ac1e1215a Binary files /dev/null and b/Tools/Cache Editor/bin/alex/compressors/Rand.class differ diff --git a/Tools/Cache Editor/bin/alex/decompressors/BZip2BlockEntry.class b/Tools/Cache Editor/bin/alex/decompressors/BZip2BlockEntry.class new file mode 100644 index 000000000..8b24b8bbc Binary files /dev/null and b/Tools/Cache Editor/bin/alex/decompressors/BZip2BlockEntry.class differ diff --git a/Tools/Cache Editor/bin/alex/decompressors/BZip2Decompressor.class b/Tools/Cache Editor/bin/alex/decompressors/BZip2Decompressor.class new file mode 100644 index 000000000..839de2f53 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/decompressors/BZip2Decompressor.class differ diff --git a/Tools/Cache Editor/bin/alex/decompressors/GZipDecompressor.class b/Tools/Cache Editor/bin/alex/decompressors/GZipDecompressor.class new file mode 100644 index 000000000..92edf551d Binary files /dev/null and b/Tools/Cache Editor/bin/alex/decompressors/GZipDecompressor.class differ diff --git a/Tools/Cache Editor/bin/alex/io/Stream.class b/Tools/Cache Editor/bin/alex/io/Stream.class new file mode 100644 index 000000000..7ef78592f Binary files /dev/null and b/Tools/Cache Editor/bin/alex/io/Stream.class differ diff --git a/Tools/Cache Editor/bin/alex/util/LookupTable.class b/Tools/Cache Editor/bin/alex/util/LookupTable.class new file mode 100644 index 000000000..dd6156f25 Binary files /dev/null and b/Tools/Cache Editor/bin/alex/util/LookupTable.class differ diff --git a/Tools/Cache Editor/bin/alex/util/Methods.class b/Tools/Cache Editor/bin/alex/util/Methods.class new file mode 100644 index 000000000..11662fa5a Binary files /dev/null and b/Tools/Cache Editor/bin/alex/util/Methods.class differ diff --git a/Tools/Cache Editor/bin/com/alex/io/InputStream.class b/Tools/Cache Editor/bin/com/alex/io/InputStream.class new file mode 100644 index 000000000..c29d1fb06 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/io/InputStream.class differ diff --git a/Tools/Cache Editor/bin/com/alex/io/OutputStream.class b/Tools/Cache Editor/bin/com/alex/io/OutputStream.class new file mode 100644 index 000000000..b3e36ebaa Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/io/OutputStream.class differ diff --git a/Tools/Cache Editor/bin/com/alex/io/Stream.class b/Tools/Cache Editor/bin/com/alex/io/Stream.class new file mode 100644 index 000000000..2b92eab17 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/io/Stream.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/clientscripts/CS2Mapping.class b/Tools/Cache Editor/bin/com/alex/loaders/clientscripts/CS2Mapping.class new file mode 100644 index 000000000..8e3a78617 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/clientscripts/CS2Mapping.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/clientscripts/ClientScript.class b/Tools/Cache Editor/bin/com/alex/loaders/clientscripts/ClientScript.class new file mode 100644 index 000000000..e9954ea77 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/clientscripts/ClientScript.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/images/IndexedColorImageFile.class b/Tools/Cache Editor/bin/com/alex/loaders/images/IndexedColorImageFile.class new file mode 100644 index 000000000..2cb3f7ec2 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/images/IndexedColorImageFile.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/images/LoaderImageArchive.class b/Tools/Cache Editor/bin/com/alex/loaders/images/LoaderImageArchive.class new file mode 100644 index 000000000..4a52bdc10 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/images/LoaderImageArchive.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/interfaces/IComponent.class b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/IComponent.class new file mode 100644 index 000000000..50876f11c Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/IComponent.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/interfaces/IComponentSettings.class b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/IComponentSettings.class new file mode 100644 index 000000000..98ea6b351 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/IComponentSettings.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/interfaces/Interface.class b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/Interface.class new file mode 100644 index 000000000..640588801 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/Interface.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/interfaces/InterfaceName.class b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/InterfaceName.class new file mode 100644 index 000000000..bf7cc4781 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/interfaces/InterfaceName.class differ diff --git a/Tools/Cache Editor/bin/com/alex/loaders/items/ItemDefinitions.class b/Tools/Cache Editor/bin/com/alex/loaders/items/ItemDefinitions.class new file mode 100644 index 000000000..87ad42c7f Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/loaders/items/ItemDefinitions.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/Archive.class b/Tools/Cache Editor/bin/com/alex/store/Archive.class new file mode 100644 index 000000000..ed28fc6ee Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/Archive.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/ArchiveReference.class b/Tools/Cache Editor/bin/com/alex/store/ArchiveReference.class new file mode 100644 index 000000000..a97a522be Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/ArchiveReference.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/FileReference.class b/Tools/Cache Editor/bin/com/alex/store/FileReference.class new file mode 100644 index 000000000..e50711d95 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/FileReference.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/Index.class b/Tools/Cache Editor/bin/com/alex/store/Index.class new file mode 100644 index 000000000..afa9f26b3 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/Index.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/MainFile.class b/Tools/Cache Editor/bin/com/alex/store/MainFile.class new file mode 100644 index 000000000..5dc735683 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/MainFile.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/ReferenceTable.class b/Tools/Cache Editor/bin/com/alex/store/ReferenceTable.class new file mode 100644 index 000000000..2a9cebfad Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/ReferenceTable.class differ diff --git a/Tools/Cache Editor/bin/com/alex/store/Store.class b/Tools/Cache Editor/bin/com/alex/store/Store.class new file mode 100644 index 000000000..8817d7fb0 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/store/Store.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/ArchiveValidation.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/ArchiveValidation.class new file mode 100644 index 000000000..f4d06adbc Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/ArchiveValidation.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CacheEditor.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CacheEditor.class new file mode 100644 index 000000000..4df402a07 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CacheEditor.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CacheEditormodels.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CacheEditormodels.class new file mode 100644 index 000000000..f9468bc1d Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CacheEditormodels.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CheckMap.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CheckMap.class new file mode 100644 index 000000000..cabc57b9d Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CheckMap.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CopyCache.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CopyCache.class new file mode 100644 index 000000000..833674674 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/CopyCache.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/OriginalXteas.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/OriginalXteas.class new file mode 100644 index 000000000..af3db16e1 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/OriginalXteas.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/ProtectCache.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/ProtectCache.class new file mode 100644 index 000000000..5e3cf0312 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/ProtectCache.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/RSXteas.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/RSXteas.class new file mode 100644 index 000000000..643621038 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/RSXteas.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/SpritesDumper.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/SpritesDumper.class new file mode 100644 index 000000000..ac78d8bb4 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/SpritesDumper.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/UpdateCache.class b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/UpdateCache.class new file mode 100644 index 000000000..3c63a1b2e Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/clientCacheUpdater/UpdateCache.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$1.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$1.class new file mode 100644 index 000000000..b054d72f6 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$1.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$2.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$2.class new file mode 100644 index 000000000..7ad57d468 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$2.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$3.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$3.class new file mode 100644 index 000000000..409c46bd6 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$3.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$4.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$4.class new file mode 100644 index 000000000..e3d929f32 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$4.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$5.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$5.class new file mode 100644 index 000000000..035fcde6d Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$5.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$6.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$6.class new file mode 100644 index 000000000..99f87533c Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$6.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$7.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$7.class new file mode 100644 index 000000000..14062fbca Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$7.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$8.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$8.class new file mode 100644 index 000000000..9770efbdc Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$8.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$9.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$9.class new file mode 100644 index 000000000..288a91acc Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application$9.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application.class new file mode 100644 index 000000000..af9e20b85 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/Application.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys$1.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys$1.class new file mode 100644 index 000000000..113be4af0 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys$1.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys$2.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys$2.class new file mode 100644 index 000000000..cfe5463b7 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys$2.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys.class new file mode 100644 index 000000000..146267285 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/GeneratedUkeys.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor$1.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor$1.class new file mode 100644 index 000000000..df46d8035 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor$1.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor$2.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor$2.class new file mode 100644 index 000000000..8fb7a4473 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor$2.class differ diff --git a/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor.class b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor.class new file mode 100644 index 000000000..6de1154ef Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/tools/itemsDefsEditor/ItemDefsEditor.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2BlockEntry.class b/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2BlockEntry.class new file mode 100644 index 000000000..4f50e68ef Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2BlockEntry.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2Compressor.class b/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2Compressor.class new file mode 100644 index 000000000..fad14d3f8 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2Compressor.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2Decompressor.class b/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2Decompressor.class new file mode 100644 index 000000000..57a0a1b6f Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/bzip2/BZip2Decompressor.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/crc32/CRC32HGenerator.class b/Tools/Cache Editor/bin/com/alex/util/crc32/CRC32HGenerator.class new file mode 100644 index 000000000..9efb70bc9 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/crc32/CRC32HGenerator.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/gzip/GZipCompressor.class b/Tools/Cache Editor/bin/com/alex/util/gzip/GZipCompressor.class new file mode 100644 index 000000000..0a5b6f9f1 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/gzip/GZipCompressor.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/gzip/GZipDecompressor.class b/Tools/Cache Editor/bin/com/alex/util/gzip/GZipDecompressor.class new file mode 100644 index 000000000..31235a52e Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/gzip/GZipDecompressor.class differ diff --git a/Tools/Cache Editor/bin/com/alex/util/whirlpool/Whirlpool.class b/Tools/Cache Editor/bin/com/alex/util/whirlpool/Whirlpool.class new file mode 100644 index 000000000..f5fa28267 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/util/whirlpool/Whirlpool.class differ diff --git a/Tools/Cache Editor/bin/com/alex/utils/ByteBufferUtils.class b/Tools/Cache Editor/bin/com/alex/utils/ByteBufferUtils.class new file mode 100644 index 000000000..c2e162633 Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/utils/ByteBufferUtils.class differ diff --git a/Tools/Cache Editor/bin/com/alex/utils/Constants.class b/Tools/Cache Editor/bin/com/alex/utils/Constants.class new file mode 100644 index 000000000..d592375fb Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/utils/Constants.class differ diff --git a/Tools/Cache Editor/bin/com/alex/utils/Utils.class b/Tools/Cache Editor/bin/com/alex/utils/Utils.class new file mode 100644 index 000000000..488d4723d Binary files /dev/null and b/Tools/Cache Editor/bin/com/alex/utils/Utils.class differ diff --git a/Tools/Cache Editor/bin/emperor/DefDumper.class b/Tools/Cache Editor/bin/emperor/DefDumper.class new file mode 100644 index 000000000..d50825705 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/DefDumper.class differ diff --git a/Tools/Cache Editor/bin/emperor/DonatorIconPacker.class b/Tools/Cache Editor/bin/emperor/DonatorIconPacker.class new file mode 100644 index 000000000..2bbae7f36 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/DonatorIconPacker.class differ diff --git a/Tools/Cache Editor/bin/emperor/ItemPacker.class b/Tools/Cache Editor/bin/emperor/ItemPacker.class new file mode 100644 index 000000000..6ec7d381d Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/ItemPacker.class differ diff --git a/Tools/Cache Editor/bin/emperor/LandMap.class b/Tools/Cache Editor/bin/emperor/LandMap.class new file mode 100644 index 000000000..3a3aecded Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/LandMap.class differ diff --git a/Tools/Cache Editor/bin/emperor/Landscape.class b/Tools/Cache Editor/bin/emperor/Landscape.class new file mode 100644 index 000000000..dc8e77cee Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/Landscape.class differ diff --git a/Tools/Cache Editor/bin/emperor/LandscapeCache.class b/Tools/Cache Editor/bin/emperor/LandscapeCache.class new file mode 100644 index 000000000..a9d4a3647 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/LandscapeCache.class differ diff --git a/Tools/Cache Editor/bin/emperor/LandscapeEditor.class b/Tools/Cache Editor/bin/emperor/LandscapeEditor.class new file mode 100644 index 000000000..7250a5806 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/LandscapeEditor.class differ diff --git a/Tools/Cache Editor/bin/emperor/MapEditor.class b/Tools/Cache Editor/bin/emperor/MapEditor.class new file mode 100644 index 000000000..3221ee00c Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/MapEditor.class differ diff --git a/Tools/Cache Editor/bin/emperor/ModelPacker.class b/Tools/Cache Editor/bin/emperor/ModelPacker.class new file mode 100644 index 000000000..0a36b8412 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/ModelPacker.class differ diff --git a/Tools/Cache Editor/bin/emperor/MusicPropertiesPacker.class b/Tools/Cache Editor/bin/emperor/MusicPropertiesPacker.class new file mode 100644 index 000000000..ba9e6b655 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/MusicPropertiesPacker.class differ diff --git a/Tools/Cache Editor/bin/emperor/ObjectMap$GameObject.class b/Tools/Cache Editor/bin/emperor/ObjectMap$GameObject.class new file mode 100644 index 000000000..cf2df69c7 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/ObjectMap$GameObject.class differ diff --git a/Tools/Cache Editor/bin/emperor/ObjectMap$Location.class b/Tools/Cache Editor/bin/emperor/ObjectMap$Location.class new file mode 100644 index 000000000..e3f68b115 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/ObjectMap$Location.class differ diff --git a/Tools/Cache Editor/bin/emperor/ObjectMap$QueueEntry.class b/Tools/Cache Editor/bin/emperor/ObjectMap$QueueEntry.class new file mode 100644 index 000000000..78d889310 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/ObjectMap$QueueEntry.class differ diff --git a/Tools/Cache Editor/bin/emperor/ObjectMap.class b/Tools/Cache Editor/bin/emperor/ObjectMap.class new file mode 100644 index 000000000..228d18eb4 Binary files /dev/null and b/Tools/Cache Editor/bin/emperor/ObjectMap.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4BlockInputStream.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4BlockInputStream.class new file mode 100644 index 000000000..e25224a43 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4BlockInputStream.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4BlockOutputStream.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4BlockOutputStream.class new file mode 100644 index 000000000..0db9ccb59 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4BlockOutputStream.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Compressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Compressor.class new file mode 100644 index 000000000..05bada1d6 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Compressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Constants.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Constants.class new file mode 100644 index 000000000..47ff9ed5a Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Constants.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Decompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Decompressor.class new file mode 100644 index 000000000..caa9cdd9d Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Decompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Exception.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Exception.class new file mode 100644 index 000000000..d1f7167a1 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Exception.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Factory.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Factory.class new file mode 100644 index 000000000..3ecc51eac Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Factory.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4FastDecompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4FastDecompressor.class new file mode 100644 index 000000000..0e839a0aa Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4FastDecompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4HCJNICompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4HCJNICompressor.class new file mode 100644 index 000000000..6ad626759 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4HCJNICompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNI.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNI.class new file mode 100644 index 000000000..e7d1b67de Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNI.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNICompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNICompressor.class new file mode 100644 index 000000000..94e714505 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNICompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNIFastDecompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNIFastDecompressor.class new file mode 100644 index 000000000..b6e085d6c Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNIFastDecompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNISafeDecompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNISafeDecompressor.class new file mode 100644 index 000000000..f5e77569e Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4JNISafeDecompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4SafeDecompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4SafeDecompressor.class new file mode 100644 index 000000000..939b3660f Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4SafeDecompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4UnknownSizeDecompressor.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4UnknownSizeDecompressor.class new file mode 100644 index 000000000..433132910 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4UnknownSizeDecompressor.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Utils$Match.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Utils$Match.class new file mode 100644 index 000000000..8e81fd7f0 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Utils$Match.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Utils.class b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Utils.class new file mode 100644 index 000000000..afa1a9877 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/lz4/LZ4Utils.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/lz4/package.html b/Tools/Cache Editor/bin/net/jpountz/lz4/package.html new file mode 100644 index 000000000..e53410674 --- /dev/null +++ b/Tools/Cache Editor/bin/net/jpountz/lz4/package.html @@ -0,0 +1,55 @@ + + + + + + + +

LZ4 compression. The entry point of the API is the +{@link net.jpountz.lz4.LZ4Factory} class, which gives access to +{@link net.jpountz.lz4.LZ4Compressor compressors} and +{@link net.jpountz.lz4.LZ4SafeDecompressor decompressors}.

+ + +

Sample usage:

+ +
+    LZ4Factory factory = LZ4Factory.fastestInstance();
+
+    byte[] data = "12345345234572".getBytes("UTF-8");
+    final int decompressedLength = data.length;
+
+    // compress data
+    LZ4Compressor compressor = factory.fastCompressor();
+    int maxCompressedLength = compressor.maxCompressedLength(decompressedLength);
+    byte[] compressed = new byte[maxCompressedLength];
+    int compressedLength = compressor.compress(data, 0, decompressedLength, compressed, 0, maxCompressedLength);
+
+    // decompress data
+    // - method 1: when the decompressed length is known
+    LZ4FastDecompressor decompressor = factory.fastDecompressor();
+    byte[] restored = new byte[decompressedLength];
+    int compressedLength2 = decompressor.decompress(compressed, 0, restored, 0, decompressedLength);
+    // compressedLength == compressedLength2
+
+    // - method 2: when the compressed length is known (a little slower)
+    // the destination buffer needs to be over-sized
+    LZ4SafeDecompressor decompressor2 = factory.safeDecompressor();
+    int decompressedLength2 = decompressor2.decompress(compressed, 0, compressedLength, restored, 0);
+    // decompressedLength == decompressedLength2
+
+ + + diff --git a/Tools/Cache Editor/bin/net/jpountz/util/Native$OS.class b/Tools/Cache Editor/bin/net/jpountz/util/Native$OS.class new file mode 100644 index 000000000..131d67e0e Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/util/Native$OS.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/util/Native.class b/Tools/Cache Editor/bin/net/jpountz/util/Native.class new file mode 100644 index 000000000..1e66acd11 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/util/Native.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/util/Utils.class b/Tools/Cache Editor/bin/net/jpountz/util/Utils.class new file mode 100644 index 000000000..35fb6323b Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/util/Utils.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/util/package.html b/Tools/Cache Editor/bin/net/jpountz/util/package.html new file mode 100644 index 000000000..4b3ceb980 --- /dev/null +++ b/Tools/Cache Editor/bin/net/jpountz/util/package.html @@ -0,0 +1,22 @@ + + + + + + + +

Utility classes.

+ + \ No newline at end of file diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/AbstractStreamingXXHash32Java.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/AbstractStreamingXXHash32Java.class new file mode 100644 index 000000000..29f5c606b Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/AbstractStreamingXXHash32Java.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32$1.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32$1.class new file mode 100644 index 000000000..183e38abf Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32$1.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32$Factory.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32$Factory.class new file mode 100644 index 000000000..737329c17 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32$Factory.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32.class new file mode 100644 index 000000000..25c89a0e9 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32JNI$Factory.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32JNI$Factory.class new file mode 100644 index 000000000..a548531b9 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32JNI$Factory.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32JNI.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32JNI.class new file mode 100644 index 000000000..f512d2ea7 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/StreamingXXHash32JNI.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHash32.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHash32.class new file mode 100644 index 000000000..57b107ef1 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHash32.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHash32JNI.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHash32JNI.class new file mode 100644 index 000000000..97ff151ee Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHash32JNI.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashConstants.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashConstants.class new file mode 100644 index 000000000..1da599fe4 Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashConstants.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashFactory.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashFactory.class new file mode 100644 index 000000000..0b39be7aa Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashFactory.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashJNI.class b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashJNI.class new file mode 100644 index 000000000..3eee1c86d Binary files /dev/null and b/Tools/Cache Editor/bin/net/jpountz/xxhash/XXHashJNI.class differ diff --git a/Tools/Cache Editor/bin/net/jpountz/xxhash/package.html b/Tools/Cache Editor/bin/net/jpountz/xxhash/package.html new file mode 100644 index 000000000..f595d25a8 --- /dev/null +++ b/Tools/Cache Editor/bin/net/jpountz/xxhash/package.html @@ -0,0 +1,65 @@ + + + + + + + +

xxhash hashing. This package supports both block hashing via +{@link net.jpountz.xxhash.XXHash32} and streaming hashing via +{@link net.jpountz.xxhash.StreamingXXHash32}. Have a look at +{@link net.jpountz.xxhash.XXHashFactory} to know how to get instances of these +interfaces.

+ +

Streaming hashing is a little slower but doesn't require to load the whole +stream into memory.

+ +

Sample block usage:

+ +
+    XXHashFactory factory = XXHashFactory.fastestInstance();
+
+    byte[] data = "12345345234572".getBytes("UTF-8");
+
+    XXHash32 hash32 = factory.hash32();
+    int seed = 0x9747b28c; // used to initialize the hash value, use whatever
+                           // value you want, but always the same
+    int hash = hash32.hash(data, 0, data.length, seed);
+
+ +

Sample streaming usage:

+ +
+    XXHashFactory factory = XXHashFactory.fastestInstance();
+
+    byte[] data = "12345345234572".getBytes("UTF-8");
+    ByteArrayInputStream in = new ByteArrayInputStream(data);
+
+    int seed = 0x9747b28c; // used to initialize the hash value, use whatever
+                           // value you want, but always the same
+    StreamingXXHash32 hash32 = factory.newStreamingHash32(seed);
+    byte[] buf = new byte[8]; // for real-world usage, use a larger buffer, like 8192 bytes
+    for (;;) {
+      int read = in.read(buf);
+      if (read == -1) {
+        break;
+      }
+      hash32.update(buf, 0, read);
+    }
+    int hash = hash32.getValue();
+
+ + + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveEntry.class new file mode 100644 index 000000000..01617adff Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveException.class new file mode 100644 index 000000000..f02b8a6ec Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveInputStream.class new file mode 100644 index 000000000..59477f738 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveOutputStream.class new file mode 100644 index 000000000..75738f72d Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveStreamFactory.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveStreamFactory.class new file mode 100644 index 000000000..680705fb6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ArchiveStreamFactory.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/Lister.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/Lister.class new file mode 100644 index 000000000..bbd836aac Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/Lister.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveEntry.class new file mode 100644 index 000000000..3a2c05b27 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.class new file mode 100644 index 000000000..95f5d15f8 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.class new file mode 100644 index 000000000..44f3f4c1e Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/package.html new file mode 100644 index 000000000..9c80f96e1 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/ar/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading and writing archives using + the AR format.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveEntry$HostOs.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveEntry$HostOs.class new file mode 100644 index 000000000..9d4498bc9 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveEntry$HostOs.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.class new file mode 100644 index 000000000..b73fc94e6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.class new file mode 100644 index 000000000..febd154be Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$FileTypes.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$FileTypes.class new file mode 100644 index 000000000..6c1029ca2 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$FileTypes.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$Flags.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$Flags.class new file mode 100644 index 000000000..2a651c263 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$Flags.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$Methods.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$Methods.class new file mode 100644 index 000000000..b2f0e83c5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader$Methods.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader.class new file mode 100644 index 000000000..86dac8d4c Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/LocalFileHeader.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/MainHeader$Flags.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/MainHeader$Flags.class new file mode 100644 index 000000000..f91121d2a Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/MainHeader$Flags.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/MainHeader.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/MainHeader.class new file mode 100644 index 000000000..aa195023d Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/MainHeader.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/package.html new file mode 100644 index 000000000..de18f61d8 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/arj/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading archives using + the ARJ format.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.class new file mode 100644 index 000000000..6ec2b68f0 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.class new file mode 100644 index 000000000..cf50c9fb5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.class new file mode 100644 index 000000000..5bda0ddfd Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioConstants.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioConstants.class new file mode 100644 index 000000000..9359c4bad Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioConstants.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioUtil.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioUtil.class new file mode 100644 index 000000000..c2d1c2100 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/CpioUtil.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/package.html new file mode 100644 index 000000000..985828725 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/cpio/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading and writing archives using + the CPIO format.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/Dirent.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/Dirent.class new file mode 100644 index 000000000..e05a4cec2 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/Dirent.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants$COMPRESSION_TYPE.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants$COMPRESSION_TYPE.class new file mode 100644 index 000000000..5321e5ed2 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants$COMPRESSION_TYPE.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants$SEGMENT_TYPE.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants$SEGMENT_TYPE.class new file mode 100644 index 000000000..071277234 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants$SEGMENT_TYPE.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.class new file mode 100644 index 000000000..190f05651 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$PERMISSION.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$PERMISSION.class new file mode 100644 index 000000000..3416664ec Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$PERMISSION.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$TYPE.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$TYPE.class new file mode 100644 index 000000000..763fa1429 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$TYPE.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$TapeSegmentHeader.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$TapeSegmentHeader.class new file mode 100644 index 000000000..3157647c6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry$TapeSegmentHeader.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.class new file mode 100644 index 000000000..6fa04a99c Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveException.class new file mode 100644 index 000000000..cde2fd820 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream$1.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream$1.class new file mode 100644 index 000000000..0d0aabc70 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream$1.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.class new file mode 100644 index 000000000..221493063 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.class new file mode 100644 index 000000000..c2246679b Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.class new file mode 100644 index 000000000..534a285ce Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/InvalidFormatException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/InvalidFormatException.class new file mode 100644 index 000000000..98de546fc Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/InvalidFormatException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/ShortFileException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/ShortFileException.class new file mode 100644 index 000000000..71e329b1f Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/ShortFileException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/TapeInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/TapeInputStream.class new file mode 100644 index 000000000..3cfd073aa Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/TapeInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/UnrecognizedFormatException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/UnrecognizedFormatException.class new file mode 100644 index 000000000..152904ee4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/UnrecognizedFormatException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.class new file mode 100644 index 000000000..263b4be59 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/package.html new file mode 100644 index 000000000..72f3c68c4 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/dump/package.html @@ -0,0 +1,56 @@ + + + +

This package provides stream classes for reading archives + using the Unix DUMP format. This format is similar to (and + contemporary with) TAR but reads the raw filesystem directly. + This means that writers are filesystem-specific even though the + created archives are filesystem-agnostic. +

+ +

Unlike other formats DUMP offers clean support for sparse files, + extended attributes, and other file metadata. In addition DUMP + supports incremental dump files can capture (most) file deletion. + It also provides a native form of compression and will soon support + native encryption as well. +

+ +

In practice TAR archives are used for both distribution + and backups. DUMP archives are used exclusively for backups. +

+ +

Like any 30+-year-old application there are a number of variants. + For pragmatic reasons we will only support archives with the + 'new' tape header and inode formats. Other restrictions: + +

    +
  • We only support ZLIB compression. The format + also permits LZO and BZLIB compression.
  • +
  • Sparse files will have the holes filled.
  • +
  • MacOS finder and resource streams are ignored.
  • +
  • Extended attributes are not currently provided.
  • +
  • SELinux labels are not currently provided.
  • +
+

+ +

As of Apache Commons Compress 1.3 support for the dump format is + read-only.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveEntry.class new file mode 100644 index 000000000..7c58b1cd1 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.class new file mode 100644 index 000000000..31db28af1 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.class new file mode 100644 index 000000000..4bbbfdb57 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/package.html new file mode 100644 index 000000000..09829ae6a --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/jar/package.html @@ -0,0 +1,25 @@ + + + +

Provides stream classes for reading and writing archives using + the ZIP format with some extensions for the special case of JAR + archives.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/package.html new file mode 100644 index 000000000..df1922b4a --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/package.html @@ -0,0 +1,24 @@ + + + +

Provides a unified API and factories for dealing with archives + in different formats.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveEntry.class new file mode 100644 index 000000000..b3955d248 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.class new file mode 100644 index 000000000..782ff50dc Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.class new file mode 100644 index 000000000..6d272a492 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveSparseEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveSparseEntry.class new file mode 100644 index 000000000..3632fdd58 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarArchiveSparseEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarConstants.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarConstants.class new file mode 100644 index 000000000..96eb1638a Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarConstants.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarUtils$1.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarUtils$1.class new file mode 100644 index 000000000..77925bbe8 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarUtils$1.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarUtils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarUtils.class new file mode 100644 index 000000000..606d60af9 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/TarUtils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/package.html new file mode 100644 index 000000000..141f33b61 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/tar/package.html @@ -0,0 +1,30 @@ + + + +

Provides stream classes for reading and writing archives using + the TAR format.

+ +

There are many different format dialects that call themselves + TAR. The classes of this package can read and write archives in + the traditional pre-POSIX ustar format and support GNU + specific extensions for long filenames that GNU tar itself by + now refers to as oldgnu.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.class new file mode 100644 index 000000000..80ffea1b7 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/AsiExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/AsiExtraField.class new file mode 100644 index 000000000..c1894e5c4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/AsiExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/BinaryTree.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/BinaryTree.class new file mode 100644 index 000000000..07106c514 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/BinaryTree.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/BitStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/BitStream.class new file mode 100644 index 000000000..d4a96dfd8 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/BitStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/CircularBuffer.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/CircularBuffer.class new file mode 100644 index 000000000..80fa38aff Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/CircularBuffer.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExplodingInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExplodingInputStream.class new file mode 100644 index 000000000..ebca2e5ae Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExplodingInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExtraFieldUtils$UnparseableExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExtraFieldUtils$UnparseableExtraField.class new file mode 100644 index 000000000..538929cfe Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExtraFieldUtils$UnparseableExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.class new file mode 100644 index 000000000..e2b82a184 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/FallbackZipEncoding.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/FallbackZipEncoding.class new file mode 100644 index 000000000..1d0d59658 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/FallbackZipEncoding.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.class new file mode 100644 index 000000000..7cb44f5de Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/JarMarker.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/JarMarker.class new file mode 100644 index 000000000..110f287c4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/JarMarker.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/NioZipEncoding.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/NioZipEncoding.class new file mode 100644 index 000000000..ec1938dce Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/NioZipEncoding.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding$Simple8BitChar.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding$Simple8BitChar.class new file mode 100644 index 000000000..ad421cb3e Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding$Simple8BitChar.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.class new file mode 100644 index 000000000..e867c3817 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Simple8BitZipEncoding.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnicodeCommentExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnicodeCommentExtraField.class new file mode 100644 index 000000000..63970fb8a Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnicodeCommentExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnicodePathExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnicodePathExtraField.class new file mode 100644 index 000000000..f69a24a07 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnicodePathExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnixStat.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnixStat.class new file mode 100644 index 000000000..20e77c9b4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnixStat.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnparseableExtraFieldData.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnparseableExtraFieldData.class new file mode 100644 index 000000000..9410e3d67 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnparseableExtraFieldData.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnrecognizedExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnrecognizedExtraField.class new file mode 100644 index 000000000..7994e005f Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnrecognizedExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.class new file mode 100644 index 000000000..b97e8ed7c Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException$Feature.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException$Feature.class new file mode 100644 index 000000000..8c2c01203 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException$Feature.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.class new file mode 100644 index 000000000..4c959afea Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.class new file mode 100644 index 000000000..3186a1616 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/X7875_NewUnix.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/X7875_NewUnix.class new file mode 100644 index 000000000..4018a9146 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/X7875_NewUnix.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.class new file mode 100644 index 000000000..30986c299 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64Mode.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64Mode.class new file mode 100644 index 000000000..980dbbf66 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64Mode.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64RequiredException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64RequiredException.class new file mode 100644 index 000000000..40f315caa Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/Zip64RequiredException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.class new file mode 100644 index 000000000..5a17ea335 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream$BoundedInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream$BoundedInputStream.class new file mode 100644 index 000000000..d8fe282a5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream$BoundedInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream$CurrentEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream$CurrentEntry.class new file mode 100644 index 000000000..3fa5f7e9d Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream$CurrentEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.class new file mode 100644 index 000000000..7dd7f64c9 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream$CurrentEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream$CurrentEntry.class new file mode 100644 index 000000000..0416870c0 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream$CurrentEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream$UnicodeExtraFieldPolicy.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream$UnicodeExtraFieldPolicy.class new file mode 100644 index 000000000..19cc84636 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream$UnicodeExtraFieldPolicy.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.class new file mode 100644 index 000000000..de8a8e986 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipConstants.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipConstants.class new file mode 100644 index 000000000..908a9f432 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipConstants.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEightByteInteger.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEightByteInteger.class new file mode 100644 index 000000000..00b70af55 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEightByteInteger.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncoding.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncoding.class new file mode 100644 index 000000000..ecae5ccb5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncoding.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncodingHelper$SimpleEncodingHolder.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncodingHelper$SimpleEncodingHolder.class new file mode 100644 index 000000000..f1feef6e6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncodingHelper$SimpleEncodingHolder.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.class new file mode 100644 index 000000000..d43dec4f4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipExtraField.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipExtraField.class new file mode 100644 index 000000000..b8abde710 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipExtraField.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$1.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$1.class new file mode 100644 index 000000000..e582f4614 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$1.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$2.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$2.class new file mode 100644 index 000000000..a10989743 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$2.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$BoundedInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$BoundedInputStream.class new file mode 100644 index 000000000..f295de7cf Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$BoundedInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$Entry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$Entry.class new file mode 100644 index 000000000..505796910 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$Entry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$NameAndComment.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$NameAndComment.class new file mode 100644 index 000000000..766827e1b Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$NameAndComment.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$OffsetEntry.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$OffsetEntry.class new file mode 100644 index 000000000..26b238137 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile$OffsetEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile.class new file mode 100644 index 000000000..97bd9522e Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipFile.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipLong.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipLong.class new file mode 100644 index 000000000..f3ac89f31 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipLong.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipMethod.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipMethod.class new file mode 100644 index 000000000..07eb5c6c6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipMethod.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipShort.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipShort.class new file mode 100644 index 000000000..d438356de Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipShort.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipUtil.class b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipUtil.class new file mode 100644 index 000000000..18eb88811 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/ZipUtil.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/package.html new file mode 100644 index 000000000..521687be6 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/archivers/zip/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading and writing archives using + the ZIP format.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/Change.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/Change.class new file mode 100644 index 000000000..1886722f2 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/Change.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSet.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSet.class new file mode 100644 index 000000000..fa107a6fd Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSet.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ArchiveEntryIterator.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ArchiveEntryIterator.class new file mode 100644 index 000000000..edd5f93e1 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ArchiveEntryIterator.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ArchiveInputStreamIterator.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ArchiveInputStreamIterator.class new file mode 100644 index 000000000..6ac6240f6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ArchiveInputStreamIterator.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ZipFileIterator.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ZipFileIterator.class new file mode 100644 index 000000000..41d7c18e5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer$ZipFileIterator.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer.class new file mode 100644 index 000000000..9fb6c5d80 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetPerformer.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetResults.class b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetResults.class new file mode 100644 index 000000000..050ed5faf Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/ChangeSetResults.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/changes/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/package.html new file mode 100644 index 000000000..4ba3e87d0 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/changes/package.html @@ -0,0 +1,27 @@ + + + +

EXPERIMENTAL support for changesets that are applied to + archives.

+ +

This API is considered unstable and may be modified or even + removed in future releases.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorException.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorException.class new file mode 100644 index 000000000..9d1ba1341 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorException.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorInputStream.class new file mode 100644 index 000000000..0df9c66c2 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorOutputStream.class new file mode 100644 index 000000000..33a8327c4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorStreamFactory.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorStreamFactory.class new file mode 100644 index 000000000..9c07e4d37 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/CompressorStreamFactory.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/FileNameUtil.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/FileNameUtil.class new file mode 100644 index 000000000..5110ff829 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/FileNameUtil.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream$Data.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream$Data.class new file mode 100644 index 000000000..742ac2a1f Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream$Data.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.class new file mode 100644 index 000000000..6b5a1ee84 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream$Data.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream$Data.class new file mode 100644 index 000000000..acbfa817d Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream$Data.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.class new file mode 100644 index 000000000..17eab8537 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2Constants.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2Constants.class new file mode 100644 index 000000000..908bda51d Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2Constants.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2Utils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2Utils.class new file mode 100644 index 000000000..e81a541ac Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BZip2Utils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BlockSort.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BlockSort.class new file mode 100644 index 000000000..77670b33f Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/BlockSort.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/CRC.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/CRC.class new file mode 100644 index 000000000..9d8fbbe23 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/CRC.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/Rand.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/Rand.class new file mode 100644 index 000000000..4c0bbd32b Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/Rand.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/package.html new file mode 100644 index 000000000..fe27e6e66 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/bzip2/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for compressing and decompressing + streams using the BZip2 algorithm.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.class new file mode 100644 index 000000000..5e7a0a50c Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.class new file mode 100644 index 000000000..c47fb15e3 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipParameters.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipParameters.class new file mode 100644 index 000000000..4b9430145 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipParameters.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipUtils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipUtils.class new file mode 100644 index 000000000..9738be7a5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/GzipUtils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/package.html new file mode 100644 index 000000000..e18b50f2f --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/gzip/package.html @@ -0,0 +1,29 @@ + + + +

Provides stream classes for compressing and decompressing + streams using the GZip algorithm.

+ +

The classes in this package are wrappers around {@link + java.util.zip.GZIPInputStream java.util.zip.GZIPInputStream} and + {@link java.util.zip.GZIPOutputStream + java.util.zip.GZIPOutputStream}.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/InMemoryCachingStreamBridge.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/InMemoryCachingStreamBridge.class new file mode 100644 index 000000000..0887b29e5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/InMemoryCachingStreamBridge.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream$1.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream$1.class new file mode 100644 index 000000000..42e7d1ee6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream$1.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.class new file mode 100644 index 000000000..437f67c11 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.class new file mode 100644 index 000000000..f213a2919 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy$1.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy$1.class new file mode 100644 index 000000000..b73f32b24 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy$1.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy$2.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy$2.class new file mode 100644 index 000000000..288a5b463 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy$2.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy.class new file mode 100644 index 000000000..83293c096 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Strategy.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Utils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Utils.class new file mode 100644 index 000000000..3da434185 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/Pack200Utils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/StreamBridge.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/StreamBridge.class new file mode 100644 index 000000000..bf1f3434b Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/StreamBridge.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge$1.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge$1.class new file mode 100644 index 000000000..f50abcac3 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge$1.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge.class new file mode 100644 index 000000000..d53376693 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/package.html new file mode 100644 index 000000000..dfbcb88e6 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/pack200/package.html @@ -0,0 +1,82 @@ + + + +

Provides stream classes for compressing and decompressing + streams using the Pack200 algorithm used to compress Java + archives.

+ +

The streams of this package only work on JAR archives, i.e. a + {@link + org.apache.commons.compress.compressors.pack200.Pack200CompressorOutputStream + Pack200CompressorOutputStream} expects to be wrapped around a + stream that a valid JAR archive will be written to and a {@link + org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream + Pack200CompressorInputStream} provides a stream to read from a + JAR archive.

+ +

JAR archives compressed with Pack200 will in general be + different from the original archive when decompressed again. + For details see + the API + documentation of Pack200.

+ +

The streams of this package work on non-deflated streams, + i.e. archives like those created with the --no-gzip + option of the JDK's pack200 command line tool. If + you want to work on deflated streams you must use an additional + stream layer - for example by using Apache Commons Compress' + gzip package.

+ +

The Pack200 API provided by the Java class library doesn't lend + itself to real stream + processing. Pack200CompressorInputStream will + uncompress its input immediately and then provide + an InputStream to a cached result. + Likewise Pack200CompressorOutputStream will not + write anything to the given OutputStream + until finish or close is called - at + which point the cached output written so far gets + compressed.

+ +

Two different caching modes are available - "in memory", which + is the default, and "temporary file". By default data is cached + in memory but you should switch to the temporary file option if + your archives are really big.

+ +

Given there always is an intermediate result + the getBytesRead and getCount methods + of Pack200CompressorInputStream are meaningless + (read from the real stream or from the intermediate result?) + and always return 0.

+ +

During development of the initial version several attempts have + been made to use a real streaming API based for example + on Piped(In|Out)putStream or explicit stream + pumping like Commons Exec's InputStreamPumper but + they have all failed because they rely on the output end to be + consumed completely or else the (un)pack will block + forever. Especially for Pack200InputStream it is + very likely that it will be wrapped in + a ZipArchiveInputStream which will never read the + archive completely as it is not interested in the ZIP central + directory data at the end of the JAR archive.

+ + + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/package.html new file mode 100644 index 000000000..7b7d504b9 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/package.html @@ -0,0 +1,24 @@ + + + +

Provides a unified API and factories for dealing with + compressed streams.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.class new file mode 100644 index 000000000..b351661a0 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.class new file mode 100644 index 000000000..d4e116347 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.class new file mode 100644 index 000000000..bc4be88c8 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/package.html new file mode 100644 index 000000000..a0d61de05 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/snappy/package.html @@ -0,0 +1,38 @@ + + + +

Provides stream classes for decompressing streams using the + Snappy + algorithm.

+ +

The raw Snappy format which only contains the compressed data + is supported by the SnappyCompressorInputStream + class while the so called "framing format" is implemented + by FramedSnappyCompressorInputStream. Note there + have been different versions of the fraing format specification, + the implementation in Commons Compress is based on the + specification "Last revised: 2013-10-25".

+ +

Only the "framing format" can be auto-detected this means you + have to speficy the format explicitly if you want to read a + "raw" Snappy stream + via CompressorStreamFactory.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/ZCompressorInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/ZCompressorInputStream.class new file mode 100644 index 000000000..faf0e3ea6 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/ZCompressorInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/_internal_/InternalLZWInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/_internal_/InternalLZWInputStream.class new file mode 100644 index 000000000..fee71d91e Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/_internal_/InternalLZWInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/_internal_/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/_internal_/package.html new file mode 100644 index 000000000..b0f1525ec --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/_internal_/package.html @@ -0,0 +1,25 @@ + + + +

This package is not part of Commons Compress' published + API. It may change without warning. Contains classes + used by Commons Compress internally.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/package.html new file mode 100644 index 000000000..ca9924b78 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/compressors/z/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for decompressing + streams using the "compress" algorithm used to write .Z files.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/ArchiveUtils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/ArchiveUtils.class new file mode 100644 index 000000000..095945b40 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/ArchiveUtils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/BoundedInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/BoundedInputStream.class new file mode 100644 index 000000000..fca28e031 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/BoundedInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CRC32VerifyingInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CRC32VerifyingInputStream.class new file mode 100644 index 000000000..4be51baa1 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CRC32VerifyingInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CharsetNames.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CharsetNames.class new file mode 100644 index 000000000..9cd2ac43d Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CharsetNames.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/Charsets.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/Charsets.class new file mode 100644 index 000000000..0a14fb778 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/Charsets.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/ChecksumVerifyingInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/ChecksumVerifyingInputStream.class new file mode 100644 index 000000000..359ad83cb Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/ChecksumVerifyingInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CountingInputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CountingInputStream.class new file mode 100644 index 000000000..9aa68500f Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CountingInputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CountingOutputStream.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CountingOutputStream.class new file mode 100644 index 000000000..b1d1f3a6c Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/CountingOutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/IOUtils.class b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/IOUtils.class new file mode 100644 index 000000000..9086133e0 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/IOUtils.class differ diff --git a/Tools/Cache Editor/bin/org/apache/commons/compress/utils/package.html b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/package.html new file mode 100644 index 000000000..0409d1267 --- /dev/null +++ b/Tools/Cache Editor/bin/org/apache/commons/compress/utils/package.html @@ -0,0 +1,23 @@ + + + +

Contains utilities used internally by the compress library.

+ + diff --git a/Tools/Cache Editor/bin/org/apache/tools/bzip2/BZip2Constants.class b/Tools/Cache Editor/bin/org/apache/tools/bzip2/BZip2Constants.class new file mode 100644 index 000000000..456839ce4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/tools/bzip2/BZip2Constants.class differ diff --git a/Tools/Cache Editor/bin/org/apache/tools/bzip2/CBZip2OutputStream$StackElem.class b/Tools/Cache Editor/bin/org/apache/tools/bzip2/CBZip2OutputStream$StackElem.class new file mode 100644 index 000000000..def38cea5 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/tools/bzip2/CBZip2OutputStream$StackElem.class differ diff --git a/Tools/Cache Editor/bin/org/apache/tools/bzip2/CBZip2OutputStream.class b/Tools/Cache Editor/bin/org/apache/tools/bzip2/CBZip2OutputStream.class new file mode 100644 index 000000000..db7fc9983 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/tools/bzip2/CBZip2OutputStream.class differ diff --git a/Tools/Cache Editor/bin/org/apache/tools/bzip2/CRC.class b/Tools/Cache Editor/bin/org/apache/tools/bzip2/CRC.class new file mode 100644 index 000000000..2d7a8a0c4 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apache/tools/bzip2/CRC.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/FileDescriptor.class b/Tools/Cache Editor/bin/org/apollo/fs/FileDescriptor.class new file mode 100644 index 000000000..1946a6240 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/FileDescriptor.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/FileSystemConstants.class b/Tools/Cache Editor/bin/org/apollo/fs/FileSystemConstants.class new file mode 100644 index 000000000..a1c16f517 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/FileSystemConstants.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/Index.class b/Tools/Cache Editor/bin/org/apollo/fs/Index.class new file mode 100644 index 000000000..4c287caf3 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/Index.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/IndexedFileSystem.class b/Tools/Cache Editor/bin/org/apollo/fs/IndexedFileSystem.class new file mode 100644 index 000000000..c30219229 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/IndexedFileSystem.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/archive/Archive.class b/Tools/Cache Editor/bin/org/apollo/fs/archive/Archive.class new file mode 100644 index 000000000..71aef11db Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/archive/Archive.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/archive/ArchiveEntry.class b/Tools/Cache Editor/bin/org/apollo/fs/archive/ArchiveEntry.class new file mode 100644 index 000000000..870243e66 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/archive/ArchiveEntry.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/archive/package-info.class b/Tools/Cache Editor/bin/org/apollo/fs/archive/package-info.class new file mode 100644 index 000000000..428bb6427 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/archive/package-info.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/package-info.class b/Tools/Cache Editor/bin/org/apollo/fs/package-info.class new file mode 100644 index 000000000..63e08b1db Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/package-info.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/parser/package-info.class b/Tools/Cache Editor/bin/org/apollo/fs/parser/package-info.class new file mode 100644 index 000000000..f93d69871 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/parser/package-info.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/util/ByteBufferUtil.class b/Tools/Cache Editor/bin/org/apollo/fs/util/ByteBufferUtil.class new file mode 100644 index 000000000..7c52e1052 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/util/ByteBufferUtil.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/util/CompressionUtil.class b/Tools/Cache Editor/bin/org/apollo/fs/util/CompressionUtil.class new file mode 100644 index 000000000..d10f9e55a Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/util/CompressionUtil.class differ diff --git a/Tools/Cache Editor/bin/org/apollo/fs/util/ZipUtils.class b/Tools/Cache Editor/bin/org/apollo/fs/util/ZipUtils.class new file mode 100644 index 000000000..bd9eb07c0 Binary files /dev/null and b/Tools/Cache Editor/bin/org/apollo/fs/util/ZipUtils.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/CachePacker.class b/Tools/Cache Editor/bin/valkyrion/CachePacker.class new file mode 100644 index 000000000..b462bf867 Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/CachePacker.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/MusicEncoder.class b/Tools/Cache Editor/bin/valkyrion/MusicEncoder.class new file mode 100644 index 000000000..792d37a46 Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/MusicEncoder.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/PackerGUI$1.class b/Tools/Cache Editor/bin/valkyrion/PackerGUI$1.class new file mode 100644 index 000000000..8f238e6ca Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/PackerGUI$1.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/PackerGUI$2.class b/Tools/Cache Editor/bin/valkyrion/PackerGUI$2.class new file mode 100644 index 000000000..6207c106b Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/PackerGUI$2.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/PackerGUI$3.class b/Tools/Cache Editor/bin/valkyrion/PackerGUI$3.class new file mode 100644 index 000000000..1b8d92d30 Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/PackerGUI$3.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/PackerGUI$4.class b/Tools/Cache Editor/bin/valkyrion/PackerGUI$4.class new file mode 100644 index 000000000..5025f188e Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/PackerGUI$4.class differ diff --git a/Tools/Cache Editor/bin/valkyrion/PackerGUI.class b/Tools/Cache Editor/bin/valkyrion/PackerGUI.class new file mode 100644 index 000000000..0db098370 Binary files /dev/null and b/Tools/Cache Editor/bin/valkyrion/PackerGUI.class differ diff --git a/Tools/Cache Editor/busy.png b/Tools/Cache Editor/busy.png new file mode 100644 index 000000000..aa0457572 Binary files /dev/null and b/Tools/Cache Editor/busy.png differ diff --git a/Tools/Cache Editor/icon_dump/dds_icon.png b/Tools/Cache Editor/icon_dump/dds_icon.png new file mode 100644 index 000000000..1edc1e1d2 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/dds_icon.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-0.png b/Tools/Cache Editor/icon_dump/icon-0.png new file mode 100644 index 000000000..69a492e64 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-0.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-1.png b/Tools/Cache Editor/icon_dump/icon-1.png new file mode 100644 index 000000000..34dcb11b8 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-1.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-10.png b/Tools/Cache Editor/icon_dump/icon-10.png new file mode 100644 index 000000000..64116eaba Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-10.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-11.png b/Tools/Cache Editor/icon_dump/icon-11.png new file mode 100644 index 000000000..26075e67c Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-11.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-12.png b/Tools/Cache Editor/icon_dump/icon-12.png new file mode 100644 index 000000000..376fdf565 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-12.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-13.png b/Tools/Cache Editor/icon_dump/icon-13.png new file mode 100644 index 000000000..11be38d1f Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-13.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-14.png b/Tools/Cache Editor/icon_dump/icon-14.png new file mode 100644 index 000000000..b58d17e2f Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-14.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-15.png b/Tools/Cache Editor/icon_dump/icon-15.png new file mode 100644 index 000000000..0ce3656d5 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-15.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-16.png b/Tools/Cache Editor/icon_dump/icon-16.png new file mode 100644 index 000000000..b253ee0ec Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-16.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-2.png b/Tools/Cache Editor/icon_dump/icon-2.png new file mode 100644 index 000000000..b89d336d4 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-2.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-3.png b/Tools/Cache Editor/icon_dump/icon-3.png new file mode 100644 index 000000000..ae01a4273 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-3.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-4.png b/Tools/Cache Editor/icon_dump/icon-4.png new file mode 100644 index 000000000..8e8d3a970 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-4.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-5.png b/Tools/Cache Editor/icon_dump/icon-5.png new file mode 100644 index 000000000..acf25140f Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-5.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-6.png b/Tools/Cache Editor/icon_dump/icon-6.png new file mode 100644 index 000000000..69cfa1844 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-6.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-7.png b/Tools/Cache Editor/icon_dump/icon-7.png new file mode 100644 index 000000000..29858022d Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-7.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-8.png b/Tools/Cache Editor/icon_dump/icon-8.png new file mode 100644 index 000000000..edd3dfdda Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-8.png differ diff --git a/Tools/Cache Editor/icon_dump/icon-9.png b/Tools/Cache Editor/icon_dump/icon-9.png new file mode 100644 index 000000000..1bada9395 Binary files /dev/null and b/Tools/Cache Editor/icon_dump/icon-9.png differ diff --git a/Tools/Cache Editor/icons/ATTACK.png b/Tools/Cache Editor/icons/ATTACK.png new file mode 100644 index 000000000..4a113306d Binary files /dev/null and b/Tools/Cache Editor/icons/ATTACK.png differ diff --git a/Tools/Cache Editor/icons/blue.png b/Tools/Cache Editor/icons/blue.png new file mode 100644 index 000000000..082604936 Binary files /dev/null and b/Tools/Cache Editor/icons/blue.png differ diff --git a/Tools/Cache Editor/icons/brown.png b/Tools/Cache Editor/icons/brown.png new file mode 100644 index 000000000..d0ebf662e Binary files /dev/null and b/Tools/Cache Editor/icons/brown.png differ diff --git a/Tools/Cache Editor/icons/canada.png b/Tools/Cache Editor/icons/canada.png new file mode 100644 index 000000000..31ed78cfe Binary files /dev/null and b/Tools/Cache Editor/icons/canada.png differ diff --git a/Tools/Cache Editor/icons/dds_icon.png b/Tools/Cache Editor/icons/dds_icon.png new file mode 100644 index 000000000..184b56de6 Binary files /dev/null and b/Tools/Cache Editor/icons/dds_icon.png differ diff --git a/Tools/Cache Editor/icons/green.png b/Tools/Cache Editor/icons/green.png new file mode 100644 index 000000000..77eb2806e Binary files /dev/null and b/Tools/Cache Editor/icons/green.png differ diff --git a/Tools/Cache Editor/icons/medal.png b/Tools/Cache Editor/icons/medal.png new file mode 100644 index 000000000..6d19a792c Binary files /dev/null and b/Tools/Cache Editor/icons/medal.png differ diff --git a/Tools/Cache Editor/icons/orange.png b/Tools/Cache Editor/icons/orange.png new file mode 100644 index 000000000..af0841d78 Binary files /dev/null and b/Tools/Cache Editor/icons/orange.png differ diff --git a/Tools/Cache Editor/icons/pink.png b/Tools/Cache Editor/icons/pink.png new file mode 100644 index 000000000..ba5bb18df Binary files /dev/null and b/Tools/Cache Editor/icons/pink.png differ diff --git a/Tools/Cache Editor/icons/purple.png b/Tools/Cache Editor/icons/purple.png new file mode 100644 index 000000000..fa3a43c05 Binary files /dev/null and b/Tools/Cache Editor/icons/purple.png differ diff --git a/Tools/Cache Editor/icons/rainbow.png b/Tools/Cache Editor/icons/rainbow.png new file mode 100644 index 000000000..f2e9e858d Binary files /dev/null and b/Tools/Cache Editor/icons/rainbow.png differ diff --git a/Tools/Cache Editor/icons/red.png b/Tools/Cache Editor/icons/red.png new file mode 100644 index 000000000..0e1363267 Binary files /dev/null and b/Tools/Cache Editor/icons/red.png differ diff --git a/Tools/Cache Editor/icons/whip_icon.png b/Tools/Cache Editor/icons/whip_icon.png new file mode 100644 index 000000000..050144f76 Binary files /dev/null and b/Tools/Cache Editor/icons/whip_icon.png differ diff --git a/Tools/Cache Editor/icons/world_announce.png b/Tools/Cache Editor/icons/world_announce.png new file mode 100644 index 000000000..f93367794 Binary files /dev/null and b/Tools/Cache Editor/icons/world_announce.png differ diff --git a/Tools/Cache Editor/icons/yellow.png b/Tools/Cache Editor/icons/yellow.png new file mode 100644 index 000000000..354b5c849 Binary files /dev/null and b/Tools/Cache Editor/icons/yellow.png differ diff --git a/Tools/Cache Editor/logo.png b/Tools/Cache Editor/logo.png new file mode 100644 index 000000000..c457d315d Binary files /dev/null and b/Tools/Cache Editor/logo.png differ diff --git a/Tools/Cache Editor/models/43660.dat b/Tools/Cache Editor/models/43660.dat new file mode 100644 index 000000000..bfc505ee8 Binary files /dev/null and b/Tools/Cache Editor/models/43660.dat differ diff --git a/Tools/Cache Editor/models/44590.dat b/Tools/Cache Editor/models/44590.dat new file mode 100644 index 000000000..404f207f2 Binary files /dev/null and b/Tools/Cache Editor/models/44590.dat differ diff --git a/Tools/Cache Editor/nazi.png b/Tools/Cache Editor/nazi.png new file mode 100644 index 000000000..f5edafb22 Binary files /dev/null and b/Tools/Cache Editor/nazi.png differ diff --git a/Tools/Cache Editor/oldschool/LIVE/preferences.dat b/Tools/Cache Editor/oldschool/LIVE/preferences.dat new file mode 100644 index 000000000..0a653d802 Binary files /dev/null and b/Tools/Cache Editor/oldschool/LIVE/preferences.dat differ diff --git a/Tools/Cache Editor/src/alex/CacheLoader.java b/Tools/Cache Editor/src/alex/CacheLoader.java new file mode 100644 index 000000000..f96d2402b --- /dev/null +++ b/Tools/Cache Editor/src/alex/CacheLoader.java @@ -0,0 +1,165 @@ +package alex; + +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.util.Arrays; + +import alex.cache.CacheFile; +import alex.cache.FileOnDisk; +import alex.cache.FileSystem; +import alex.cache.SeekableFile; +import alex.cache.loaders.ItemDefinition; +import alex.cache.updateServer.UpdateServer; +import alex.io.Stream; +import alex.util.Methods; + +/* + * ----------\_/-------------- + * ----------/-\-------------- + * -------|-/@.@\-|---------- + * ---------\___/------------- + * ALL CREDITS TO ALEX(DRAGONKK) + * CREATED DATA 15/04/2011 + * @@alex_dkk@hotmail.com@@ + * ---------------------------- + * ---------------------------- + * ---------------------------- + */ +public class CacheLoader { + + private static final String cachePath = "data/cache/"; + public static SeekableFile dataFile; + private static final FileSystem[] fileSystems = new FileSystem[30]; + public static final SeekableFile[] indexFiles = new SeekableFile[getFileSystems().length]; + public static boolean OLD_CACHE; + private static CacheFile referenceCache; + private static SeekableFile referenceFile; + + + public static void main(String[] args) { + if (args.length < 2) { + System.out.println("Parameters: isOldCache[bool], preload[bool]"); + return; + } + OLD_CACHE = Boolean.parseBoolean(args[0]); + boolean preload = Boolean.parseBoolean(args[1]); + if (load(preload)) { + makeTests(); + } + } + + public static boolean putItemOnCache(ItemDefinition item) { + return fileSystems[Methods.ITEMDEF_IDX_ID].putFile(item.id >>> 8, 0xff & item.id, null, 2, item.packItemDefinition()); + } + + + public static void makeTests() { + + ItemDefinition dragonkkAgsDefinition = new ItemDefinition(11694); + System.out.println("DragonkkAgs: "+dragonkkAgsDefinition.getName()); + dragonkkAgsDefinition.setName("Dragonkk's AGS"); + dragonkkAgsDefinition.id = Methods.getAmountOfItems(); //a new item :o + dragonkkAgsDefinition.inventoryOptions[0] = "kill Noobs"; + dragonkkAgsDefinition.inventoryOptions[1] = "I love cakes"; + dragonkkAgsDefinition.inventoryOptions[2] = "unban flamable please <3"; + System.out.println("DragonkkAgs Id: "+dragonkkAgsDefinition.id); + System.out.println(putItemOnCache(dragonkkAgsDefinition)); + + + byte[] ukeys = generateUkeysFile(); + System.out.println("UKEYS: "+Arrays.toString(ukeys)); + /*byte[] whipData = fileSystems[19].getFile(4151 >>> 8, 0xff & 4151, null); + if(fileSystems[19].putFile(11694 >>> 8, 0xff & 11694, null, 2, whipData)) + System.out.println("Packed sucefully.");*/ + } + + public static byte[] generateUkeysFile() { + return UpdateServer.getReadyForSendFile(255, 255, 0, generateUkeysContainer()); + } + + public static byte[] generateUkeysContainer() { + Stream stream = new Stream(5+fileSystems.length * 8); + for(int index = 0; index < fileSystems.length; index++) { + if(fileSystems[index] == null) { + stream.putInt(0); + stream.putInt(0); + } + byte[] buffer = CacheLoader.getReferenceCache().readFile(index); + stream.putInt(Methods.getCrc(buffer, buffer.length)); + stream.putInt(fileSystems[index].referenceTable.revision); + } + byte[] ukeysFile = new byte[stream.offset]; + stream.offset = 0; + stream.getBytes(ukeysFile, 0, ukeysFile.length); + return ukeysFile; + } + + + private static void createFileSystems() { + for (int id = 0; id < getFileSystems().length; id++) { + if (indexFiles[id] == null) + continue; + boolean discardEntryBuffers = false; + if (id == 5 || id == 6 || id == 23 || id == 26 || id == 28) + discardEntryBuffers = true; + getFileSystems()[id] = new FileSystem(id, discardEntryBuffers, 1); + } + } + + public static FileSystem[] getFileSystems() { + return fileSystems; + } + + public static CacheFile getReferenceCache() { + return referenceCache; + } + + public static boolean load(boolean preload) { + File[] files = new File(cachePath).listFiles(); + for (File file : files) { + if (file.getName().startsWith("main_file_cache.idx")) { + if (file.length() == 0) + continue; + try { + try { + int id = Integer + .parseInt(file.getName().split(".idx")[1]); + if (id == 255) + referenceFile = new SeekableFile(new FileOnDisk(file), 6000, 0); + else if (id < fileSystems.length) + indexFiles[id] = new SeekableFile(new FileOnDisk(file), 6000, 0); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } catch (Exception e) { + e.printStackTrace(); + } + } else if (file.getName().equals("main_file_cache.dat2")) { + try { + dataFile = new SeekableFile(new FileOnDisk(file), 5200, 0); + } catch (FileNotFoundException e) { + e.printStackTrace(); + } catch (IOException e) { + e.printStackTrace(); + } + } + } + if (dataFile == null) + return false; + if (referenceFile == null) + return false; + referenceCache = new CacheFile(255, dataFile, referenceFile, 0x7a120); + createFileSystems(); + if(preload) { + for(int index = 0; index < fileSystems.length; index++) { + if(fileSystems[index] == null) + continue; + fileSystems[index].filesCompleted(); + } + } + return true; + } +} diff --git a/Tools/Cache Editor/src/alex/cache/CacheFile.java b/Tools/Cache Editor/src/alex/cache/CacheFile.java new file mode 100644 index 000000000..081345972 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/CacheFile.java @@ -0,0 +1,215 @@ +package alex.cache; + +import java.io.EOFException; +import java.io.IOException; + +import alex.io.Stream; +import alex.util.Methods; + +public class CacheFile { + + public static byte cacheFileBuffer[] = new byte[520]; + + private int cacheId; + private SeekableFile dataFile; + private SeekableFile indexFile; + private int maxLength; + + public CacheFile(int cacheId, SeekableFile dataFile, SeekableFile indexFile, int length) { + this.indexFile = indexFile; + this.maxLength = length; + this.dataFile = dataFile; + this.cacheId = cacheId; + } + + public final byte[] readFile(int file) { + synchronized (dataFile) { + try { + if (indexFile.getFileLength() < (6 * file + 6)) { + return null; + } + indexFile.seek(6 * file); + indexFile.read(CacheFile.cacheFileBuffer, 0, 6); + int fileSize = (CacheFile.cacheFileBuffer[2] & 0xff) + + (((0xff & CacheFile.cacheFileBuffer[0]) << 16) + (CacheFile.cacheFileBuffer[1] << 8 & 0xff00)); + int sector = ((CacheFile.cacheFileBuffer[3] & 0xff) << 16) + - (-(0xff00 & CacheFile.cacheFileBuffer[4] << 8) - (CacheFile.cacheFileBuffer[5] & 0xff)); + if (fileSize < 0 || fileSize > maxLength) { + return null; + } + if (sector <= 0 + || dataFile.getFileLength() / 520L < sector) { + return null; + } + byte buffer[] = new byte[fileSize]; + int dataRead = 0; + int part = 0; + while (fileSize > dataRead) { + if (sector == 0) { + return null; + } + dataFile.seek(520 * sector); + int dataToRead = fileSize - dataRead; + if (dataToRead > 512) { + dataToRead = 512; + } + dataFile.read(CacheFile.cacheFileBuffer, 0, 8 + dataToRead); + int currentFile = (0xff & CacheFile.cacheFileBuffer[1]) + + (0xff00 & CacheFile.cacheFileBuffer[0] << 8); + int currentPart = ((CacheFile.cacheFileBuffer[2] & 0xff) << 8) + + (0xff & CacheFile.cacheFileBuffer[3]); + int nextSector = (CacheFile.cacheFileBuffer[6] & 0xff) + + (0xff00 & CacheFile.cacheFileBuffer[5] << 8) + + ((0xff & CacheFile.cacheFileBuffer[4]) << 16); + int currentCache = CacheFile.cacheFileBuffer[7] & 0xff; + if (file != currentFile || currentPart != part + || cacheId != currentCache) { + return null; + } + if (nextSector < 0 + || (dataFile.getFileLength() / 520L) < nextSector) { + return null; + } + for (int l2 = 0; dataToRead > l2; l2++) { + buffer[dataRead++] = CacheFile.cacheFileBuffer[8 + l2]; + } + + part++; + sector = nextSector; + } + return buffer; + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } + } + + @Override + public final String toString() { + return "Cache:" + cacheId; + } + + + public boolean writeFile(int file, int compression, byte[] data, int version, int[] keys) { + byte[] readyFileData = Methods.packContainer(compression, data); + if (keys != null && (~keys[0] != -1 || keys[1] != 0 || keys[2] != 0 || ~keys[3] != -1)) { + Stream stream = new Stream(readyFileData); + stream.encodeXTEA(keys); + } + readyFileData[readyFileData.length - 2] = (byte) (version >>> 8); + readyFileData[readyFileData.length - 1] = (byte) version; + return writeFile(file, readyFileData, readyFileData.length); + } + + private final boolean writeFile(int file, byte buffer[], int fileSize) { + synchronized (dataFile) { + if (fileSize < 0 || maxLength < fileSize) { + throw new IllegalArgumentException(); + } + boolean succ = writeFile(file, buffer, fileSize, true); + if (!succ) { + succ = writeFile(file, buffer, fileSize, false); + } + return succ; + } + } + + private final boolean writeFile(int file, byte buffer[], int fileSize, + boolean exists) { + synchronized (dataFile) { + try { + int sector; + if (!exists) { + sector = (int) ((dataFile.getFileLength() + 519L) / 520L); + if (sector == 0) { + sector = 1; + } + } else { + if ((6 * file + 6) > indexFile.getFileLength()) { + return false; + } + indexFile.seek(file * 6); + indexFile.read(CacheFile.cacheFileBuffer, 0, 6); + sector = (CacheFile.cacheFileBuffer[5] & 0xff) + + (((CacheFile.cacheFileBuffer[4] & 0xff) << 8) + (CacheFile.cacheFileBuffer[3] << 16 & 0xff0000)); + if (sector <= 0 + || sector > dataFile.getFileLength() / 520L) { + return false; + } + } + CacheFile.cacheFileBuffer[1] = (byte) (fileSize >> 8); + CacheFile.cacheFileBuffer[3] = (byte) (sector >> 16); + CacheFile.cacheFileBuffer[2] = (byte) fileSize; + CacheFile.cacheFileBuffer[0] = (byte) (fileSize >> 16); + CacheFile.cacheFileBuffer[4] = (byte) (sector >> 8); + CacheFile.cacheFileBuffer[5] = (byte) sector; + indexFile.seek(file * 6); + indexFile.write(CacheFile.cacheFileBuffer, 0, 6); + int dataWritten = 0; + for (int part = 0; dataWritten < fileSize; part++) { + int nextSector = 0; + if (exists) { + dataFile.seek(sector * 520); + try { + dataFile.read(CacheFile.cacheFileBuffer, 0, 8); + } catch (EOFException e) { + e.printStackTrace(); + break; + } + int currentFile = (0xff & CacheFile.cacheFileBuffer[1]) + + (0xff00 & CacheFile.cacheFileBuffer[0] << 8); + int currentPart = (0xff & CacheFile.cacheFileBuffer[3]) + + (0xff00 & CacheFile.cacheFileBuffer[2] << 8); + nextSector = ((0xff & CacheFile.cacheFileBuffer[4]) << 16) + + (((0xff & CacheFile.cacheFileBuffer[5]) << 8) + (0xff & CacheFile.cacheFileBuffer[6])); + int currentCache = CacheFile.cacheFileBuffer[7] & 0xff; + if (currentFile != file || part != currentPart + || cacheId != currentCache) { + return false; + } + if (nextSector < 0 + || dataFile.getFileLength() / 520L < nextSector) { + return false; + } + } + if (nextSector == 0) { + exists = false; + nextSector = (int) ((dataFile.getFileLength() + 519L) / 520L); + if (nextSector == 0) { + nextSector++; + } + if (nextSector == sector) { + nextSector++; + } + } + CacheFile.cacheFileBuffer[3] = (byte) part; + if (fileSize - dataWritten <= 512) { + nextSector = 0; + } + CacheFile.cacheFileBuffer[0] = (byte) (file >> 8); + CacheFile.cacheFileBuffer[1] = (byte) file; + CacheFile.cacheFileBuffer[2] = (byte) (part >> 8); + CacheFile.cacheFileBuffer[7] = (byte) cacheId; + CacheFile.cacheFileBuffer[4] = (byte) (nextSector >> 16); + CacheFile.cacheFileBuffer[5] = (byte) (nextSector >> 8); + CacheFile.cacheFileBuffer[6] = (byte) nextSector; + dataFile.seek(sector * 520); + dataFile.write(CacheFile.cacheFileBuffer, 0, 8); + int dataToWrite = fileSize - dataWritten; + if (dataToWrite > 512) { + dataToWrite = 512; + } + dataFile.write(buffer, dataWritten, dataToWrite); + dataWritten += dataToWrite; + sector = nextSector; + } + return true; + } catch (IOException e) { + e.printStackTrace(); + } + return false; + } + } + +} diff --git a/Tools/Cache Editor/src/alex/cache/CacheFileWorker.java b/Tools/Cache Editor/src/alex/cache/CacheFileWorker.java new file mode 100644 index 000000000..04668a914 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/CacheFileWorker.java @@ -0,0 +1,44 @@ +package alex.cache; + +import alex.CacheLoader; +import alex.util.Methods; + +public class CacheFileWorker { + + private CacheFile cache; + private int id; + private ReferenceTable referenceTable; + private int tableVersion; + public CacheFileWorker(int id) { + cache = new CacheFile(id, CacheLoader.dataFile, CacheLoader.indexFiles[id], 0xf4240); + this.id = id; + byte[] buffer = CacheLoader.getReferenceCache().readFile(id); + tableVersion = (buffer[buffer.length - 2] << 8 & 0xff00) + (buffer[-1 + buffer.length] & 0xff); + referenceTable = new ReferenceTable(buffer); + } + + public byte[] getFileBuffer(int file) { + return cache.readFile(file); + } + + public ReferenceTable getReferenceTable() { + return referenceTable; + } + + public int generateTableFileVersion() { + tableVersion++; + return tableVersion; + } + + public int getTableFileVersion() { + return tableVersion; + } + + public boolean putFile(int fileId, int compression, byte[] data, int version) { + return putFile(fileId, compression, data, version, null); + } + + public boolean putFile(int fileId, int compression, byte[] data, int version, int[] keys) { + return cache.writeFile(fileId, compression, data, version, keys); + } +} diff --git a/Tools/Cache Editor/src/alex/cache/FileOnDisk.java b/Tools/Cache Editor/src/alex/cache/FileOnDisk.java new file mode 100644 index 000000000..b7599f6a9 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/FileOnDisk.java @@ -0,0 +1,70 @@ +package alex.cache; + +import java.io.EOFException; +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; + +public class FileOnDisk { + + private RandomAccessFile file; + private long length; + private long position; + private File wrappedFile; + + public FileOnDisk(File wrappedFile) throws IOException { + this.wrappedFile = wrappedFile; + file = new RandomAccessFile(wrappedFile, "rw"); + length = getFileLength(); + } + + public final void close() throws IOException { + if (file != null) { + file.close(); + file = null; + } + } + + @Override + protected final void finalize() throws Throwable { + if (file != null) { + System.out + .println("Warning! fileondisk " + + wrappedFile + + " not closed correctly using close(). Auto-closing instead. "); + close(); + } + } + + public final long getFileLength() throws IOException { + return file.length(); + } + + public final File getWrappedFile() { + return wrappedFile; + } + + public final int read(byte buffer[], int off, int len) throws IOException { + int k = file.read(buffer, off, len); + if (k > 0) { + position += k; + } + return k; + } + + public final void seek(long l) throws IOException { + file.seek(l); + position = l; + } + + public final void write(byte buffer[], int off, int len) throws IOException { + //we gonna write so size wil get bigger + /*if (length < len + position) { + file.seek(length); + file.write(1); + throw new EOFException(); + }*/ + file.write(buffer, off, len); + position += len; + } +} diff --git a/Tools/Cache Editor/src/alex/cache/FileSystem.java b/Tools/Cache Editor/src/alex/cache/FileSystem.java new file mode 100644 index 000000000..b34e5c56c --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/FileSystem.java @@ -0,0 +1,576 @@ +package alex.cache; + +import alex.CacheLoader; +import alex.io.Stream; +import alex.util.Methods; + +public class FileSystem { + + private Object childBuffers[][]; + private boolean discardEntryBuffers; + private int discardUnpacked; + private Object entryBuffers[]; + private int id; + public ReferenceTable referenceTable; + public CacheFileWorker worker; + + public FileSystem(int id, boolean discardEntryBuffers, int discardUnpacked) { + if (discardUnpacked < 0 || discardUnpacked > 2) + throw new IllegalArgumentException("js5: Invalid value " + + discardUnpacked + " supplied for discardunpacked"); + this.id = id; + this.discardEntryBuffers = discardEntryBuffers; + this.discardUnpacked = discardUnpacked; + worker = new CacheFileWorker(id); + referenceTable = worker.getReferenceTable(); + entryBuffers = new Object[referenceTable.entryIndexCount]; + childBuffers = new Object[referenceTable.entryIndexCount][]; + } + + public void clearChildBuffer(int file) { + if (childBuffers != null) { + childBuffers[file] = null; + } + } + + public void clearChildBuffers() { + if (childBuffers != null) { + for (int i = 0; childBuffers.length > i; i++) { + childBuffers[i] = null; + } + } + } + + public void clearEntryBuffers() { + if (entryBuffers != null) { + for (int i = 0; i < entryBuffers.length; i++) { + entryBuffers[i] = null; + } + } + } + + public void clearIdentifiers(boolean children, boolean entries) { + if (children) { + referenceTable.childIdentTables = null; + referenceTable.childIdentifiers = null; + } + if (entries) { + referenceTable.entryIdentifiers = null; + referenceTable.entryIdentTable = null; + } + } + + public boolean filesCompleted() { + boolean complete = true; + for (int index = 0; referenceTable.entryIndices.length > index; index++) { + int file = referenceTable.entryIndices[index]; + if (entryBuffers[file] == null) { + loadBuffer(file); + if (entryBuffers[file] == null) + complete = false; + } + } + return complete; + } + + public int getChildCount() { + return referenceTable.childIndexCounts.length; + } + + public int getChildIndexCount(int file) { + if (!validEntryIndex(file)) { + return 0; + } + return referenceTable.childIndexCounts[file]; + } + + final int[] getChildIndices(int file) { + int childIndices[] = referenceTable.childIndices[file]; + if (childIndices == null) { + childIndices = new int[referenceTable.entryChildCounts[file]]; + for (int index = 0; childIndices.length > index; index++) + childIndices[index] = index; + } + return childIndices; + } + + public byte[] getFile(int file) { + if (referenceTable.childIndexCounts.length == 1) { + return getFile(0, file); + } + if (!validEntryIndex(file)) { + return null; + } + if (referenceTable.childIndexCounts[file] == 1) { + return getFile(file, 0); + } else { + throw new RuntimeException(); + } + } + + + + public byte[] getFile(int file, int child) { + return getFile(file, child, null); + } + + + /* + * packs all container, havnt finished this just use the other putfile which is fully done + */ + public boolean putFile(int fileId, int compression, byte[] unpackedContainer) { + int version = referenceTable.entryVersions[fileId]+1; + if(worker.putFile(fileId, compression, unpackedContainer, version)) { + referenceTable.entryVersions[fileId] = version; + byte[] packedBuffer = worker.getFileBuffer(fileId); + Methods.CRC32.reset(); + Methods.CRC32.update(packedBuffer, 0, packedBuffer.length-2); + int crc = (int) Methods.CRC32.getValue(); + referenceTable.entryCrcs[fileId] = crc; + byte[] packedTable = referenceTable.packTable(); + return CacheLoader.getReferenceCache().writeFile(id, 2, packedTable, worker.generateTableFileVersion(), null); + } + return false; + } + public byte[] getFile(int file, int child, int keys[]) { + if (!validIndices(file, child)) { + return null; + } + if (childBuffers[file] == null || childBuffers[file][child] == null) { + boolean prepared = prepareChildBuffers(file, child, keys); + if (!prepared) { + loadBuffer(file); + boolean prepared1 = prepareChildBuffers(file, child, keys); + if (!prepared1) { + return null; + } + } + } + byte unwrapped[] = (byte[]) childBuffers[file][child]; + if (discardUnpacked != 1) { + if (discardUnpacked == 2) { + childBuffers[file] = null; + } + } else { + childBuffers[file][child] = null; + if (referenceTable.childIndexCounts[file] == 1) { + childBuffers[file] = null; + } + } + return unwrapped; + } + + public boolean putFile(int fileId, int child, int[] keys, int compression, byte[] data) { + return putFile(fileId, child, keys, compression, data, null, null); + } + public boolean putFile(int fileId, int childId, int[] keys, int compression, byte[] data, String fileName, String childName) { + if(!validEntryIndex(fileId)) + referenceTable.expandTable(fileId+1); + int oldChildCount = referenceTable.entryChildCounts[fileId]; + if (!validIndices(fileId, childId)) + referenceTable.expandTableChilds(fileId, childId+1); //gonna create thid now + int childCount = referenceTable.entryChildCounts[fileId]; + if (!validIndices(fileId, childId)) { + return false; + } + byte[] unpackedContainer; + if (childCount > 1) { + byte childBufferData[][] = null; + if(oldChildCount > 0) { + byte[] unpackedData = Methods.unpackContainer(worker.getFileBuffer(fileId)); + int length = unpackedData.length; + int amtOfLoops = 0xff & unpackedData[--length]; + length -= amtOfLoops * (oldChildCount * 4); + Stream stream = new Stream(unpackedData); + int childBufferLength[] = new int[oldChildCount]; + stream.offset = length; + for (int l2 = 0; l2 < amtOfLoops; l2++) { + int offset = 0; + for (int childIndex = 0; oldChildCount > childIndex; childIndex++) { + offset += stream.getInt(); + childBufferLength[childIndex] += offset; + } + } + childBufferData = new byte[oldChildCount][]; + for (int childIndex = 0; childIndex < oldChildCount; childIndex++) { + childBufferData[childIndex] = new byte[childBufferLength[childIndex]]; + childBufferLength[childIndex] = 0; + } + stream.offset = length; + int unpackedOff = 0; + for (int loop = 0; amtOfLoops > loop; loop++) { + int dataRead = 0; + for (int childIndex = 0; oldChildCount > childIndex; childIndex++) { + dataRead += stream.getInt(); + System.arraycopy(unpackedData, unpackedOff, childBufferData[childIndex], childBufferLength[childIndex],dataRead); + unpackedOff += dataRead; + childBufferLength[childIndex] += dataRead; + } + } + } + //we setted new data + Stream outStream = new Stream(250000); + int amtOfLoops = 1; //dont change this + byte[][] childsData = new byte[childCount][]; + if(childBufferData != null) + for(int index = 0; index < oldChildCount; index++) + childsData[index] = childBufferData[index]; + childsData[childId] = data; + //added files data + for(int index = 0; index < childCount; index++) { + if(childsData[index] != null) { + for(int i = 0; i < childsData[index].length; i++) { + outStream.putByte(childsData[index][i]); + } + } + } + //added files lengths + int lastLength = 0; + for(int index = 0; index < childCount; index++) { + outStream.putInt((childsData[index] == null ? 0 : childsData[index].length)-lastLength); + lastLength = childsData[index] == null ? 0 : childsData[index].length; + } + outStream.putByte(amtOfLoops); + unpackedContainer = new byte[outStream.offset]; + outStream.offset = 0; + outStream.getBytes(unpackedContainer, 0, unpackedContainer.length); + }else + unpackedContainer = data; + int version = referenceTable.entryVersions[fileId]+1; + if(worker.putFile(fileId, compression, unpackedContainer, version, keys)) { + referenceTable.entryVersions[fileId] = version; + byte[] packedBuffer = worker.getFileBuffer(fileId); + Methods.CRC32.reset(); + Methods.CRC32.update(packedBuffer, 0, packedBuffer.length-2); + referenceTable.entryCrcs[fileId] = (int) Methods.CRC32.getValue(); + if(referenceTable.identifierFlag != 0) { + referenceTable.entryIdentifiers[fileId] = fileName == null ? -1 : Methods.hashFile(fileName); + referenceTable.childIdentifiers[fileId][childId] = childName == null ? -1 : Methods.hashFile(childName); + } + CacheLoader.getReferenceCache().writeFile(id, 2, referenceTable.packTable(), worker.generateTableFileVersion(), null); + } + + return true; + } + + private final boolean prepareChildBuffers(int file, int child, int keys[]) { + if (!validEntryIndex(file)) { + return false; + } + if (entryBuffers[file] == null) { + return false; + } + int childCount = referenceTable.entryChildCounts[file]; + int childIndices[] = referenceTable.childIndices[file]; + if (childBuffers[file] == null) { + childBuffers[file] = new Object[referenceTable.childIndexCounts[file]]; + } + Object buffers[] = childBuffers[file]; + boolean prepared = true; + for (int childIndex = 0; childCount > childIndex; childIndex++) { + int childIndice; + if (childIndices == null) { + childIndice = childIndex; + } else { + childIndice = childIndices[childIndex]; + } + if (buffers[childIndice] != null) { + continue; + } + prepared = false; + break; + } + + if (prepared) { + return true; + } + byte unwrapped[]; + if (keys != null && (~keys[0] != -1 || keys[1] != 0 || keys[2] != 0 || ~keys[3] != -1)) { + unwrapped = Methods.copyBuffer((byte[]) entryBuffers[file]);// Methods.unwrapBuffer(entryBuffers[file], + // true); + Stream stream = new Stream(unwrapped); + stream.decodeXTEA(keys, 5, stream.payload.length); + } else { + unwrapped = (byte[]) entryBuffers[file];// Methods.unwrapBuffer(entryBuffers[file], + // false); + } + byte unpackedData[]; + try { + unpackedData = Methods.unpackContainer(unwrapped); + } catch (RuntimeException runtimeexception) { + throw runtimeexception; + } + if (discardEntryBuffers) { + entryBuffers[file] = null; + } + if (childCount > 1) { + if (discardUnpacked != 2) { + int length = unpackedData.length; + int amtOfLoops = 0xff & unpackedData[--length]; + length -= amtOfLoops * (childCount * 4); + Stream stream = new Stream(unpackedData); + int childBufferOffset[] = new int[childCount]; + stream.offset = length; + for (int l2 = 0; l2 < amtOfLoops; l2++) { + int childLength = 0; + for (int childIndex = 0; childCount > childIndex; childIndex++) { + childLength += stream.getInt(); + // System.out.println(childLength); + childBufferOffset[childIndex] += childLength; + // System.out.println(offset); + } + + } + + byte childBufferData[][] = new byte[childCount][]; + for (int childIndex = 0; childIndex < childCount; childIndex++) { + childBufferData[childIndex] = new byte[childBufferOffset[childIndex]]; + childBufferOffset[childIndex] = 0; + } + stream.offset = length; + int unpackedOff = 0; + for (int loop = 0; amtOfLoops > loop; loop++) { + int dataRead = 0; + for (int childIndex = 0; childCount > childIndex; childIndex++) { + dataRead += stream.getInt(); + System.arraycopy(unpackedData, unpackedOff, childBufferData[childIndex], childBufferOffset[childIndex],dataRead); + unpackedOff += dataRead; + childBufferOffset[childIndex] += dataRead; + } + } + + for (int index = 0; childCount > index; index++) { + int childIndice; + if (childIndices != null) { + childIndice = childIndices[index]; + } else { + childIndice = index; + } + if (discardUnpacked != 0) { + buffers[childIndice] = childBufferData[index]; + } else { + buffers[childIndice] = childBufferData[index];// Methods.wrapBuffer(childBufs[j6], + // false); + } + } + + //after here useless + } else { + int unpackedLength = unpackedData.length; + int lastUnpackedByte = unpackedData[--unpackedLength] & 0xff; + unpackedLength -= lastUnpackedByte * childCount * 4; + Stream stream_2 = new Stream(unpackedData); + int childOffset = 0; + stream_2.offset = unpackedLength; + int childIndice = 0; + for (int k3 = 0; k3 < lastUnpackedByte; k3++) { + int dataLength = 0; + for (int childIndex = 0; childCount > childIndex; childIndex++) { + dataLength += stream_2.getInt(); + int thisChildIndice; + if (childIndices != null) { + thisChildIndice = childIndices[childIndex]; + } else { + thisChildIndice = childIndex; + } + if (child == thisChildIndice) { + childIndice = thisChildIndice; + childOffset += dataLength; + } + } + + } + + if (childOffset == 0) { + return true; + } + byte childBufferData[] = new byte[childOffset]; + stream_2.offset = unpackedLength; + childOffset = 0; + int unpackedOffset = 0; + for (int l5 = 0; l5 < lastUnpackedByte; l5++) { + int dataLength = 0; + for (int childIndex = 0; childIndex < childCount; childIndex++) { + dataLength += stream_2.getInt(); + int thisChildIndice; + if (childIndices == null) { + thisChildIndice = childIndex; + } else { + thisChildIndice = childIndices[childIndex]; + } + if (thisChildIndice == child) { + System.arraycopy(unpackedData, unpackedOffset, childBufferData, childOffset, dataLength); + childOffset += dataLength; + } + unpackedOffset += dataLength; + } + + } + + buffers[childIndice] = childBufferData; + } + } else { + int l1; + if (childIndices != null) { + l1 = childIndices[0]; + } else { + l1 = 0; + } + if (discardUnpacked == 0) { + buffers[l1] = unpackedData;// Methods.wrapBuffer(unpacked, false); + } else { + buffers[l1] = unpackedData; + } + } + return true; + } + + public byte[] getFile(String fileName, String childName) { + fileName = fileName.toLowerCase(); + childName = childName.toLowerCase(); + int file = referenceTable.entryIdentTable.lookupIdentifier(Methods.hashFile(fileName)); + if (!validEntryIndex(file)) { + return null; + } else { + int child = referenceTable.childIdentTables[file].lookupIdentifier(Methods.hashFile(childName)); + return getFile(file, child); + } + } + + private final int getFileCompletion(int file) { + if (entryBuffers[file] != null) { + return 100; + } else { + return 0; + } + } + + public int getFileCompletion(String name) { + name = name.toLowerCase(); + int file = referenceTable.entryIdentTable.lookupIdentifier(Methods + .hashFile(name)); + return getFileCompletion(file); + } + + public int getFileIndex(int ident) { + int file = referenceTable.entryIdentTable.lookupIdentifier(ident); + if (!validEntryIndex(file)) { + return -1; + } + return file; + } + + public int getFileIndex(String name) { + name = name.toLowerCase(); + int index = referenceTable.entryIdentTable.lookupIdentifier(Methods + .hashFile(name)); + if (!validEntryIndex(index)) + return -1; + else + return index; + } + + public int getReferenceCrc() { + return referenceTable.crc; + } + + public int getTotalCompletion() { + int total = 0; + int completed = 0; + for (int k = 0; k < entryBuffers.length; k++) { + if (referenceTable.entryChildCounts[k] > 0) { + total += 100; + completed += getFileCompletion(k); + } + } + + if (total == 0) { + return 100; + } else { + return (completed * 100) / total; + } + } + + public boolean hasEntryBuffer(int file) { + if (referenceTable.childIndexCounts.length == 1) { + return hasEntryBuffer(0, file); + } + if (!validEntryIndex(file)) { + return false; + } + if (referenceTable.childIndexCounts[file] == 1) { + return hasEntryBuffer(file, 0); + } else { + throw new RuntimeException(); + } + } + + public boolean hasEntryBuffer(int file, int child) { + if (!validIndices(file, child)) { + return false; + } + if (childBuffers[file] != null && childBuffers[file][child] != null) { + return true; + } + if (entryBuffers[file] != null) { + return true; + } + loadBuffer(file); + return entryBuffers[file] != null; + } + + final boolean hasEntryBuffer(String fileName, String childName) { + fileName = fileName.toLowerCase(); + childName = childName.toLowerCase(); + int file = referenceTable.entryIdentTable.lookupIdentifier(Methods + .hashFile(fileName)); + if (!validEntryIndex(file)) { + return false; + } + int child = referenceTable.childIdentTables[file] + .lookupIdentifier(Methods.hashFile(childName)); + return hasEntryBuffer(file, child); + } + + final boolean hasFile(String name) { + name = name.toLowerCase(); + int file = referenceTable.entryIdentTable.lookupIdentifier(Methods + .hashFile(name)); + return file >= 0; + } + + private boolean hasFileBuffer(int file) { + if (!validEntryIndex(file)) + return false; + if (entryBuffers[file] != null) + return true; + loadBuffer(file); + return entryBuffers[file] != null; + } + + private boolean hasFileBuffer(String name) { + name = name.toLowerCase(); + int file = referenceTable.entryIdentTable.lookupIdentifier(Methods + .hashFile(name)); + return hasFileBuffer(file); + } + + public void loadBuffer(int file) { + entryBuffers[file] = worker.getFileBuffer(file); + } + + private final boolean validEntryIndex(int file) { + if (file < 0 || referenceTable.childIndexCounts.length <= file + || referenceTable.childIndexCounts[file] == 0) + return false; + return true; + } + + private boolean validIndices(int file, int child) { + if (file < 0 || child < 0 + || referenceTable.childIndexCounts.length <= file + || child >= referenceTable.childIndexCounts[file]) + return false; + return true; + } +} diff --git a/Tools/Cache Editor/src/alex/cache/ReferenceTable.java b/Tools/Cache Editor/src/alex/cache/ReferenceTable.java new file mode 100644 index 000000000..3127bde59 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/ReferenceTable.java @@ -0,0 +1,333 @@ +package alex.cache; + +import alex.CacheLoader; +import alex.io.Stream; +import alex.util.LookupTable; +import alex.util.Methods; + +public class ReferenceTable { + + int childIdentifiers[][]; + LookupTable childIdentTables[]; + int childIndexCounts[]; + int childIndices[][]; + int crc; + int entryChildCounts[]; + private int entryCount; + int entryCrcs[]; + int entryIdentifiers[]; + LookupTable entryIdentTable; + int entryIndexCount; + int entryIndices[]; + public int entryVersions[]; + public int revision; + private int protocol; + int identifierFlag; + private boolean needRevisionUpdate; + + public ReferenceTable(byte buffer[]) { + crc = Methods.getCrc(buffer, buffer.length); + unpackTable(buffer); + } + /* + * notice if we make it smaller than actualy is we will loss alot of files information + * + */ + + + public byte[] packTable() { + Stream stream = new Stream(2500000); + if (CacheLoader.OLD_CACHE) { + stream.putByte(protocol); + if (protocol >= 6) { + if(needRevisionUpdate) + revision++; + stream.putInt(revision); + } + stream.putByte(identifierFlag); + stream.putShort(entryCount); + int lastEntryOffset = 0; + for (int i = 0; entryCount > i; i++) { + stream.putShort(entryIndices[i] - lastEntryOffset); + lastEntryOffset = entryIndices[i]; + } + if (identifierFlag != 0) { + for (int index = 0; entryCount > index; index++) { + stream.putInt(entryIdentifiers[entryIndices[index]]); + } + } + for (int index = 0; index < entryCount; index++) { + stream.putInt(entryCrcs[entryIndices[index]]); + } + for (int index = 0; index < entryCount; index++) { + stream.putInt(entryVersions[entryIndices[index]]); + } + for (int index = 0; index < entryCount; index++) { + stream.putShort(entryChildCounts[entryIndices[index]]); + } + for (int index = 0; entryCount > index; index++) { + int indice = entryIndices[index]; + int lastEntryChildOffset = 0; + for (int childIndex = 0; entryChildCounts[indice] > childIndex; childIndex++) { + int nextChildIndice = childIndices[indice] != null ? childIndices[indice][childIndex] : childIndex; + stream.putShort(nextChildIndice - lastEntryChildOffset); + lastEntryChildOffset = nextChildIndice; + } + } + if (identifierFlag != 0) { + for (int index = 0; index < entryCount; index++) { + int indice = entryIndices[index]; + int entryChildCount = entryChildCounts[indice]; + for (int childIndex = 0; childIndex < entryChildCount; childIndex++) { + int childIndice; + if (childIndices[indice] != null) { + childIndice = childIndices[indice][childIndex]; + } else { + childIndice = childIndex; + } + stream.putInt(childIdentifiers[indice][childIndice]); + } + } + } + } + byte[] buffer = new byte[stream.offset]; + stream.offset = 0; + stream.getBytes(buffer, 0, buffer.length); + needRevisionUpdate = false; + return buffer; + } + + public void expandTable(int newEntryCount) { + int[] newEntryIndices = new int[newEntryCount]; + int count = entryIndexCount - 1; //the old count + //copys the indices and creates new indices + System.arraycopy(entryIndices, 0, newEntryIndices, 0, entryIndices.length); + for(int index = entryIndices.length; index < newEntryIndices.length; index++) { + newEntryIndices[index] = index == 0 ? 1 : newEntryIndices[index-1]+1; + if (newEntryIndices[index] > count) + count = newEntryIndices[index]; + } + + //creates new stuff with new size + int newEntryIndexCount = count + 1; + int[] newChildIndexCounts = new int[newEntryIndexCount]; + int[][] newChildIndices = new int[newEntryIndexCount][]; + int[] newEntryVersions = new int[newEntryIndexCount]; + int[] newEntryCrcs = new int[newEntryIndexCount]; + int[] newEntryChildCounts = new int[newEntryIndexCount]; + LookupTable newEntryIdentTable = null; + int[] newEntryIdentifiers = null; + + if (identifierFlag != 0) { + newEntryIdentifiers = new int[newEntryIndexCount]; + //sets default identifiers + for (int l1 = 0; l1 < newEntryIndexCount; l1++) { + newEntryIdentifiers[l1] = -1; + } + //copys the old entry identifiers + System.arraycopy(entryIdentifiers, 0, newEntryIdentifiers, 0, entryIdentifiers.length); + newEntryIdentTable = new LookupTable(newEntryIdentifiers); + } + + //copys the old entrycrcs + System.arraycopy(entryCrcs, 0, newEntryCrcs, 0, entryCrcs.length); + //copys the old entryVersions + System.arraycopy(entryVersions, 0, newEntryVersions, 0, entryVersions.length); + //copys the old entryChildCounts + System.arraycopy(entryChildCounts, 0, newEntryChildCounts, 0, entryChildCounts.length); + + for (int index = 0; newEntryCount > index; index++) { + int indice = newEntryIndices[index]; + if(childIndices.length > indice) { + int entryChildCount = newEntryChildCounts[indice]; + for (int childIndex = 0; entryChildCount > childIndex; childIndex++) + newChildIndices[indice] = childIndices[indice]; + newChildIndexCounts[index] = childIndexCounts[indice]; + }else{ + int entryChildCount = newEntryChildCounts[indice]; + newChildIndices[indice] = new int[entryChildCount]; + newChildIndexCounts[index] = 1; + } + } + + LookupTable[] newChildIdentTables = null; + int[][] newChildIdentifiers = null; + if (identifierFlag != 0) { + newChildIdentifiers = new int[1 + count][]; + newChildIdentTables = new LookupTable[1 + count]; + for (int index = 0; index < newEntryCount; index++) { + int indice = newEntryIndices[index]; + int entryChildCount = newEntryChildCounts[indice]; + newChildIdentifiers[indice] = new int[newChildIndexCounts[indice]]; + for (int childIndex = 0; childIndex < newChildIndexCounts[indice]; childIndex++) { + newChildIdentifiers[indice][childIndex] = -1; + } + for (int childIndex = 0; childIndex < entryChildCount; childIndex++) { + int childIndice; + if (newChildIndices[indice] != null) { + childIndice = newChildIndices[indice][childIndex]; + } else { + childIndice = childIndex; + } + if(newChildIdentifiers.length > indice) + newChildIdentifiers[indice][childIndice] = childIdentifiers[indice][childIndice]; + + } + newChildIdentTables[indice] = new LookupTable(newChildIdentifiers[indice]); + } + } + + //sets the new entrys that were expanded + entryCount = newEntryCount; + entryIndices = newEntryIndices; + entryIndexCount = newEntryIndexCount; + childIndexCounts = newChildIndexCounts; + childIndices = newChildIndices; + entryVersions = newEntryVersions; + entryCrcs = newEntryCrcs; + entryChildCounts = newEntryChildCounts; + entryIdentTable = newEntryIdentTable; + entryIdentifiers = newEntryIdentifiers; + childIdentTables = newChildIdentTables; + childIdentifiers = newChildIdentifiers; + //on end + + needRevisionUpdate = true; + } + + public void expandTableChilds(int indice, int entryChildCount) { + int[] newChildIndices = new int[entryChildCount]; + int count = childIndexCounts[indice] - 1; + if(childIndices[indice] != null) + System.arraycopy(childIndices[indice], 0, newChildIndices, 0, childIndices[indice].length); + for(int index = childIndices[indice] == null ? 0 : childIndices[indice].length; index < newChildIndices.length; index++) { + newChildIndices[index] = index == 0 ? 1 : newChildIndices[index-1]+1; + if (newChildIndices[index] > count) + count = newChildIndices[index]; + + } + int newChildIndexCounts = count+1; + int[] newChildIdentifiers = null; + LookupTable newChildIdentTable = null; + if (identifierFlag != 0) { + newChildIdentifiers = new int[newChildIndexCounts]; + //sets default identifiers + for (int l1 = 0; l1 < newChildIndexCounts; l1++) { + newChildIdentifiers[l1] = -1; + } + //copys the old entry identifiers + if(childIdentifiers[indice] != null) + System.arraycopy(childIdentifiers[indice], 0, newChildIdentifiers, 0, childIdentifiers[indice].length); + newChildIdentTable = new LookupTable(newChildIdentifiers); + childIdentTables[indice] = newChildIdentTable; + childIdentifiers[indice] = newChildIdentifiers; + } + childIndices[indice] = newChildIndices; + childIndexCounts[indice] = newChildIndexCounts; + entryChildCounts[indice] = entryChildCount; + } + + private void unpackTable(byte buffer[]) { + if (CacheLoader.OLD_CACHE) { + Stream stream = new Stream(Methods.unpackContainer(buffer)); + protocol = stream.getUByte(); + if (protocol != 5 && protocol != 6) { + throw new RuntimeException(); + } + if (protocol < 6) { + revision = 0; + } else { + revision = stream.getInt(); + } + identifierFlag = stream.getUByte(); + entryCount = stream.getUShort(); + int offset = 0; + entryIndices = new int[entryCount]; + int count = -1; + for (int index = 0; entryCount > index; index++) { + entryIndices[index] = offset += stream.getUShort(); + if (entryIndices[index] > count) { + count = entryIndices[index]; + } + } + + entryIndexCount = count + 1; + childIndexCounts = new int[entryIndexCount]; + childIndices = new int[entryIndexCount][]; + entryVersions = new int[entryIndexCount]; + entryCrcs = new int[entryIndexCount]; + entryChildCounts = new int[entryIndexCount]; + if (identifierFlag != 0) { + entryIdentifiers = new int[entryIndexCount]; + for (int l1 = 0; l1 < entryIndexCount; l1++) { + entryIdentifiers[l1] = -1; + } + + for (int index = 0; entryCount > index; index++) { + entryIdentifiers[entryIndices[index]] = stream.getInt(); + } + + entryIdentTable = new LookupTable(entryIdentifiers); + } + for (int index = 0; index < entryCount; index++) { + entryCrcs[entryIndices[index]] = stream.getInt(); + } + + for (int index = 0; index < entryCount; index++) { + entryVersions[entryIndices[index]] = stream.getInt(); + } + + for (int index = 0; index < entryCount; index++) { + entryChildCounts[entryIndices[index]] = stream.getUShort(); + } + + for (int index = 0; entryCount > index; index++) { + int indice = entryIndices[index]; + int childOffset = 0; + int entryChildCount = entryChildCounts[indice]; + childIndices[indice] = new int[entryChildCount]; + int childCount = -1; + for (int childIndex = 0; entryChildCount > childIndex; childIndex++) { + int childIndice = childIndices[indice][childIndex] = childOffset += stream.getUShort(); + if (childIndice > childCount) { + childCount = childIndice; + } + } + + childIndexCounts[indice] = childCount + 1; + if ((childCount + 1) == entryChildCount) { + childIndices[indice] = null; + } + } + + if (identifierFlag != 0) { + childIdentifiers = new int[1 + count][]; + childIdentTables = new LookupTable[1 + count]; + for (int index = 0; index < entryCount; index++) { + int indice = entryIndices[index]; + int entryChildCount = entryChildCounts[indice]; + childIdentifiers[indice] = new int[childIndexCounts[indice]]; + for (int childIndex = 0; childIndex < childIndexCounts[indice]; childIndex++) { + childIdentifiers[indice][childIndex] = -1; + } + + for (int childIndex = 0; childIndex < entryChildCount; childIndex++) { + int childIndice; + if (childIndices[indice] != null) { + childIndice = childIndices[indice][childIndex]; + } else { + childIndice = childIndex; + } + childIdentifiers[indice][childIndice] = stream.getInt(); + } + + childIdentTables[indice] = new LookupTable(childIdentifiers[indice]); + } + + } + } else { + // TODO + } + } + +} diff --git a/Tools/Cache Editor/src/alex/cache/SeekableFile.java b/Tools/Cache Editor/src/alex/cache/SeekableFile.java new file mode 100644 index 000000000..f79e7c34a --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/SeekableFile.java @@ -0,0 +1,294 @@ +package alex.cache; + +import java.io.EOFException; +import java.io.File; +import java.io.IOException; + +public class SeekableFile { + + private byte aByteArray2745[]; + private byte aByteArray2748[]; + private long aLong2739; + private long aLong2741; + private long aLong2743; + private long aLong2750; + private int anInt2737; + private int anInt2749; + private FileOnDisk file; + private long fileLength; + private long position; + + public SeekableFile(FileOnDisk fileOnDisk, int maxFileLength, + int unknownLength) throws IOException { + anInt2737 = 0; + aLong2741 = -1L; + aLong2743 = -1L; + file = fileOnDisk; + fileLength = aLong2750 = fileOnDisk.getFileLength(); + aByteArray2748 = new byte[maxFileLength]; + aByteArray2745 = new byte[unknownLength]; + position = 0L; + } + + final long getFileLength() { + return fileLength; + } + + private final File getWrappedFile() { + return file.getWrappedFile(); + } + + private final void method2264() throws IOException { + anInt2749 = 0; + if (~position != ~aLong2739) { + file.seek(position); + aLong2739 = position; + } + aLong2741 = position; + while (~anInt2749 > ~aByteArray2748.length) { + int i = aByteArray2748.length - anInt2749; + if (i > 0xbebc200) { + i = 0xbebc200; + } + int j = file.read(aByteArray2748, anInt2749, i); + if (j == -1) { + break; + } + anInt2749 += j; + aLong2739 += j; + } + } + + final void read(byte buffer[]) throws IOException { + read(buffer, 0, buffer.length); + } + + final void read(byte buffer[], int off, int len) throws IOException { + try { + if (~buffer.length > ~(off + len)) { + throw new ArrayIndexOutOfBoundsException(-buffer.length + len + + off); + } + if (~aLong2743 != 0L + && position >= aLong2743 + && ~(position + len) >= ~(aLong2743 + anInt2737)) { + System.arraycopy(aByteArray2745, (int) (-aLong2743 + position), + buffer, off, len); + position += len; + return; + } + long l = position; + int k = off; + int i1 = len; + if (position >= aLong2741 + && position < aLong2741 + anInt2749) { + int j1 = (int) (anInt2749 + aLong2741 - position); + if (j1 > len) { + j1 = len; + } + System.arraycopy(aByteArray2748, (int) (-aLong2741 + position), + buffer, off, j1); + off += j1; + position += j1; + len -= j1; + } + if (aByteArray2748.length >= len) { + if (len > 0) { + method2264(); + int k1 = len; + if (k1 > anInt2749) { + k1 = anInt2749; + } + System.arraycopy(aByteArray2748, 0, buffer, off, k1); + off += k1; + position += k1; + len -= k1; + } + } else { + file.seek(position); + aLong2739 = position; + while (len > 0) { + int l1 = file.read(buffer, off, len); + if (l1 == -1) { + break; + } + position += l1; + len -= l1; + aLong2739 += l1; + off += l1; + } + } + if (~aLong2743 != 0L) { + if (aLong2743 > position && len > 0) { + int i2 = (int) (-position + aLong2743) + off; + if (len + off < i2) { + i2 = off + len; + } + while (i2 > off) { + len--; + buffer[off++] = 0; + position++; + } + } + long l2 = -1L; + long l3 = -1L; + if (~l < ~aLong2743 || aLong2743 >= l + i1) { + if (~aLong2743 >= ~l && l < aLong2743 + anInt2737) { + l2 = l; + } + } else { + l2 = aLong2743; + } + if (aLong2743 + anInt2737 <= l + || anInt2737 + aLong2743 > l + i1) { + if (i1 + l > aLong2743 + && ~(i1 + l) >= ~(aLong2743 + anInt2737)) { + l3 = l + i1; + } + } else { + l3 = aLong2743 + anInt2737; + } + if (l2 > -1L && ~l2 > ~l3) { + int j2 = (int) (-l2 + l3); + System.arraycopy(aByteArray2745, (int) (-aLong2743 + l2), + buffer, k + (int) (-l + l2), j2); + if (~position > ~l3) { + len = (int) (len - (l3 - position)); + position = l3; + } + } + } + } catch (IOException ioexception) { + aLong2739 = -1L; + throw ioexception; + } + if (len > 0) { + throw new EOFException(); + } else { + return; + } + } + + private final void refresh() throws IOException { + if (~aLong2743 != 0L) { + if (aLong2739 != aLong2743) { + file.seek(aLong2743); + aLong2739 = aLong2743; + } + file.write(aByteArray2745, 0, anInt2737); + aLong2739 += anInt2737; + if (aLong2750 < aLong2739) { + aLong2750 = aLong2739; + } + long l = -1L; + long l1 = -1L; + if (aLong2743 < aLong2741 + || aLong2743 >= anInt2749 + aLong2741) { + if (~aLong2741 <= ~aLong2743 + && anInt2737 + aLong2743 > aLong2741) { + l = aLong2741; + } + } else { + l = aLong2743; + } + if (aLong2741 < anInt2737 + aLong2743 + && aLong2741 + anInt2749 >= anInt2737 + + aLong2743) { + l1 = aLong2743 + anInt2737; + } else if (~(anInt2749 + aLong2741) < ~aLong2743 + && ~(anInt2749 + aLong2741) >= ~(aLong2743 + anInt2737)) { + l1 = anInt2749 + aLong2741; + } + if (l > -1L && l1 > l) { + int i = (int) (l1 - l); + System.arraycopy(aByteArray2745, (int) (l - aLong2743), + aByteArray2748, (int) (l - aLong2741), i); + } + aLong2743 = -1L; + anInt2737 = 0; + } + } + + final void seek(long l) throws IOException { + if (l < 0L) { + throw new IOException("Invalid seek to " + l + " in file " + + getWrappedFile()); + } + position = l; + } + + final void write(byte buffer[], int off, int len) throws IOException { + try { + if (~(len + position) < ~fileLength) { + fileLength = len + position; + } + if (~aLong2743 != 0L + && (position < aLong2743 || ~(anInt2737 + aLong2743) > ~position)) { + refresh(); + } + if (~aLong2743 != 0L + && ~(aLong2743 + aByteArray2745.length) > ~(position + len)) { + int l = (int) (aByteArray2745.length - (position - aLong2743)); + System.arraycopy(buffer, off, aByteArray2745, + (int) (position - aLong2743), l); + off += l; + len -= l; + position += l; + anInt2737 = aByteArray2745.length; + refresh(); + } + if (~len < ~aByteArray2745.length) { + if (position != aLong2739) { + file.seek(position); + aLong2739 = position; + } + file.write(buffer, off, len); + aLong2739 += len; + if (aLong2739 > aLong2750) { + aLong2750 = aLong2739; + } + long l1 = -1L; + long l2 = -1L; + if (~position <= ~aLong2741 + && aLong2741 + anInt2749 > position) { + l1 = position; + } else if (position <= aLong2741 + && ~(position + len) < ~aLong2741) { + l1 = aLong2741; + } + if (~(len + position) < ~aLong2741 + && position + len <= aLong2741 + + anInt2749) { + l2 = len + position; + } else if (~position > ~(anInt2749 + aLong2741) + && position + len >= anInt2749 + + aLong2741) { + l2 = anInt2749 + aLong2741; + } + if (l1 > -1L && ~l2 < ~l1) { + int i1 = (int) (-l1 + l2); + System.arraycopy(buffer, + (int) (-position + off + l1), + aByteArray2748, (int) (-aLong2741 + l1), i1); + } + position += len; + return; + } + if (len > 0) { + if (aLong2743 == -1L) { + aLong2743 = position; + } + System.arraycopy(buffer, off, aByteArray2745, + (int) (-aLong2743 + position), len); + position += len; + if (~(long) anInt2737 > ~(position - aLong2743)) { + anInt2737 = (int) (position - aLong2743); + } + return; + } + } catch (IOException ioexception) { + aLong2739 = -1L; + throw ioexception; + } + } +} diff --git a/Tools/Cache Editor/src/alex/cache/loaders/ConfigFileDefinition.java b/Tools/Cache Editor/src/alex/cache/loaders/ConfigFileDefinition.java new file mode 100644 index 000000000..692cb43b8 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/loaders/ConfigFileDefinition.java @@ -0,0 +1,130 @@ +package alex.cache.loaders; + +import java.util.HashMap; +import java.util.Map; + +import com.alex.io.InputStream; +import com.alex.store.Store; + +/** + * Handles config definition reading. + * @author Emperor + * + */ +public final class ConfigFileDefinition { + + /** + * The config definitions mapping. + */ + private static final Map MAPPING = new HashMap<>(); + + /** + * The bit size flags. + */ + private static final int[] BITS = new int[32]; + + /** + * The file id. + */ + private final int id; + + /** + * The config id. + */ + private int configId; + + /** + * The bit shift amount. + */ + private int bitShift; + + /** + * The bit amount. + */ + private int bitSize; + + /** + * Constructs a new {@code ConfigFileDefinition} {@code Object}. + * @param id The file id. + */ + public ConfigFileDefinition(int id) { + this.id = id; + } + + /** + * Initializes the bit flags. + */ + static { + int flag = 2; + for (int i = 0; i < 32; i++) { + BITS[i] = flag - 1; + flag += flag; + } + } + + /** + * Gets the config file definitions for the given file id. + * @param id The file id. + * @return The definition. + */ + public static ConfigFileDefinition forId(int id, Store store) { + ConfigFileDefinition def = MAPPING.get(id); + if (def != null) { + return def; + } + byte[] bs = store.getIndexes()[22].getFile(id >>> 1416501898, id & 0x3ff); + if (bs == null) { + return null; + } + def = new ConfigFileDefinition(id); + InputStream buffer = new InputStream(bs); + int opcode = 0; + while ((opcode = buffer.readByte()) != 0) { + if (opcode == 1) { + def.configId = buffer.readShort(); + def.bitShift = buffer.readByte(); + def.bitSize = buffer.readByte(); + } + } + return def; + } + + /** + * Gets the mapping. + * @return The mapping. + */ + public static Map getMapping() { + return MAPPING; + } + + /** + * Gets the id. + * @return The id. + */ + public int getId() { + return id; + } + /** + * Gets the configId. + * @return The configId. + */ + public int getConfigId() { + return configId; + } + + /** + * Gets the bitShift. + * @return The bitShift. + */ + public int getBitShift() { + return bitShift; + } + + /** + * Gets the bitSize. + * @return The bitSize. + */ + public int getBitSize() { + return bitSize; + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/alex/cache/loaders/EquipIds.java b/Tools/Cache Editor/src/alex/cache/loaders/EquipIds.java new file mode 100644 index 000000000..29e428e02 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/loaders/EquipIds.java @@ -0,0 +1,38 @@ +package alex.cache.loaders; + +import java.util.HashMap; + +import alex.util.Methods; + +public class EquipIds { + + private static HashMap equipIds = new HashMap(); + + public static int getEquipId(int itemId) { + return getEquipIds(itemId, true, true); + } + public static int getEquipIds(int itemId, boolean putEquipIdsOnMemory, boolean putItemsOnMemory) { + if(!equipIds.isEmpty()) { + if(!equipIds.containsKey(itemId)) + return -1; + return equipIds.get(itemId); + } + int equipId = 0; + for(int itemIds = 0; itemIds < Methods.getAmountOfItems(); itemIds++) { + ItemDefinition itemDef = putItemsOnMemory ? ItemDefinition.getItemDefinition(itemIds) : new ItemDefinition(itemIds); + if(itemDef.getMaleWornModelId1()>= 0 || itemDef.getMaleWornModelId2()>= 0) { + if(putEquipIdsOnMemory) + equipIds.put(itemId, equipId); + else { + if(itemIds == itemId) + return equipId; + } + equipId++; + + } + } + if(!equipIds.containsKey(itemId)) + return -1; + return equipIds.get(itemId); + } +} diff --git a/Tools/Cache Editor/src/alex/cache/loaders/ItemDefinition.java b/Tools/Cache Editor/src/alex/cache/loaders/ItemDefinition.java new file mode 100644 index 000000000..bfc6452e1 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/loaders/ItemDefinition.java @@ -0,0 +1,600 @@ +package alex.cache.loaders; + +import java.util.HashMap; + +import alex.CacheLoader; +import alex.io.Stream; +import alex.util.Methods; + +public class ItemDefinition { + + private static HashMap itemsDefs = new HashMap(); + + + public int id; + private boolean loaded; + + private int interfaceModelId; + private String name; + + //model size information + private int modelZoom; + private int modelRotation1; + private int modelRotation2; + private int modelOffset1; + private int modelOffset2; + + //extra information + private int stackable; + private int value; + private boolean membersOnly; + + //wearing model information + private int maleWornModelId1; + private int femaleWornModelId1; + private int maleWornModelId2; + private int femaleWornModelId2; + + //options + private String[] groundOptions; + public String[] inventoryOptions; + + //model information + private short[] originalModelColors; + private short[] modifiedModelColors; + private short[] textureColour1; + private short[] textureColour2; + private byte[] unknownArray1; + private int[] unknownArray2; + //extra information, not used for newer items + private boolean unnoted; + + private int colourEquip1; + private int colourEquip2; + private int unknownInt1; + private int unknownInt2; + private int unknownInt3; + private int unknownInt4; + private int unknownInt5; + private int unknownInt6; + private int certId; + private int certTemplateId; + private int[] stackIds; + private int[] stackAmounts; + private int unknownInt7; + private int unknownInt8; + private int unknownInt9; + private int unknownInt10; + private int unknownInt11; + private int teamId; + private int lendId; + private int lendTemplateId; + private int unknownInt12; + private int unknownInt13; + private int unknownInt14; + private int unknownInt15; + private int unknownInt16; + private int unknownInt17; + private int unknownInt18; + private int unknownInt19; + private int unknownInt20; + private int unknownInt21; + private int unknownInt22; + private int unknownInt23; + private static HashMap clientScriptData; + + public static ItemDefinition getItemDefinition(int itemId) { + return getItemDefinition(itemId, true); + } + + public static ItemDefinition getItemDefinition(int itemId, boolean load) { + if (itemsDefs.containsKey(itemId)) + return itemsDefs.get(itemId); + ItemDefinition def = new ItemDefinition(itemId, load); + itemsDefs.put(itemId, def); + return def; + } + + public ItemDefinition(int id) { + this(id, true); + } + + public ItemDefinition(int id, boolean load) { + this.id = id; + setDefaultsVariableValules(); + setDefaultOptions(); + if (load) { + loadItemDefinition(); + } + } + + public boolean isLoaded() { + return loaded; + } + + public void loadItemDefinition() { + byte[] data = CacheLoader.getFileSystems()[Methods.ITEMDEF_IDX_ID].getFile(id >>> 8, 0xff & id); + if (data == null) { + System.out.println("FAILED LOADING ITEM " + id); + return; + } + readOpcodeValues(new Stream(data)); + printClientScriptData(); + loaded = true; + } + + public boolean hasSpecialBar() { + if(clientScriptData == null) + return false; + Object specialBar = clientScriptData.get(686); + if(specialBar != null && specialBar instanceof Integer) + return (Integer) specialBar == 1; + return false; + } + public int getRenderAnimId() { + if(clientScriptData == null) + return 1426; + Object animId = clientScriptData.get(644); + if(animId != null && animId instanceof Integer) + return (Integer) animId; + return 1426; + } + + public int getQuestId() { + if(clientScriptData == null) + return -1; + Object questId = clientScriptData.get(861); + if(questId != null && questId instanceof Integer) + return (Integer) questId; + return -1; + } + + public HashMap getWearingSkillRequiriments() { + if(clientScriptData == null) + return null; + HashMap skills = new HashMap(); + int nextLevel = -1; + int nextSkill = -1; + for(int key : clientScriptData.keySet()) { + Object value = clientScriptData.get(key); + if(value instanceof String) + continue; + if(key == 23) { + skills.put(Methods.RANGE, (Integer) value); + skills.put(Methods.FIREMAKING, 61); + }else if (key >= 749 && key < 797) { + if(key % 2 == 0) + nextLevel = (Integer) value; + else + nextSkill = (Integer) value; + if(nextLevel != -1 && nextSkill != -1) { + skills.put(nextSkill, nextLevel); + nextLevel = -1; + nextSkill = -1; + } + } + + } + return skills; + } + + //test :P + public void printClientScriptData() { + for(int key : clientScriptData.keySet()) { + Object value = clientScriptData.get(key); + System.out.println("KEY: "+key+", VALUE: "+value); + } + HashMap requiriments = getWearingSkillRequiriments(); + if(requiriments == null) { + System.out.println("null."); + return; + } + System.out.println(requiriments.keySet().size()); + for(int key : requiriments.keySet()) { + Object value = requiriments.get(key); + System.out.println("SKILL: "+key+", LEVEL: "+value); + } + } + + + private void setDefaultOptions() { + groundOptions = new String[] { null, null, "take", null, null }; + inventoryOptions = new String[] { null, null, null, null, "drop" }; + } + + private void setDefaultsVariableValules() { + name = "null"; + maleWornModelId1 = -1; + maleWornModelId2 = -1; + femaleWornModelId1 = -1; + femaleWornModelId2 = -1; + modelZoom = 2000; + lendId = -1; + lendTemplateId = -1; + certId = -1; + certTemplateId = -1; + unknownInt9 = 128; + value = 1; + colourEquip1 = -1; + colourEquip2 = -1; + } + + public byte[] packItemDefinition() { + Stream stream = new Stream(10000); + + stream.putByte(1); + stream.putShort(interfaceModelId); + + if(!name.equals("null")) { + stream.putByte(2); + stream.putString(name); + } + + if(modelZoom != 2000) { + stream.putByte(4); + stream.putShort(modelZoom); + } + + if(modelRotation1 != 0) { + stream.putByte(5); + stream.putShort(modelRotation1); + } + + if(modelRotation2 != 0) { + stream.putByte(6); + stream.putShort(modelRotation2); + } + + if(modelOffset1 != 0) { + stream.putByte(7); + int value = modelOffset1 >>= 0; + if (value < 0) + value += 65536; + stream.putShort(value); + } + + if(modelOffset2 != 0) { + stream.putByte(8); + int value = modelOffset2 >>= 0; + if (value < 0) + value += 65536; + stream.putShort(value); + } + + if(stackable >= 1) { + stream.putByte(11); + } + + if(value != 1) { + stream.putByte(12); + stream.putInt(value); + } + + if(membersOnly) { + stream.putByte(16); + } + + if(maleWornModelId1 != -1) { + stream.putByte(23); + stream.putShort(maleWornModelId1); + } + + if(maleWornModelId2 != -1) { + stream.putByte(24); + stream.putShort(maleWornModelId2); + } + + if(femaleWornModelId1 != -1) { + stream.putByte(25); + stream.putShort(femaleWornModelId1); + } + + if(femaleWornModelId2 != -1) { + stream.putByte(26); + stream.putShort(femaleWornModelId2); + } + + for(int index = 0; index < groundOptions.length; index++) { + if(groundOptions[index] == null || (index == 2 && groundOptions[index].equals("take"))) + continue; + stream.putByte(30+index); + stream.putString(groundOptions[index]); + } + + for(int index = 0; index < inventoryOptions.length; index++) { + if(inventoryOptions[index] == null || (index == 4 && inventoryOptions[index].equals("drop"))) + continue; + stream.putByte(35+index); + stream.putString(inventoryOptions[index]); + } + + if(originalModelColors != null && modifiedModelColors != null) { + stream.putByte(40); + stream.putByte(originalModelColors.length); + for(int index = 0; index < originalModelColors.length; index++) { + stream.putShort(originalModelColors[index]); + stream.putShort(modifiedModelColors[index]); + } + } + + if(textureColour1 != null && textureColour2 != null) { + stream.putByte(41); + stream.putByte(textureColour1.length); + for(int index = 0; index < textureColour1.length; index++) { + stream.putShort(textureColour1[index]); + stream.putShort(textureColour2[index]); + } + } + + if(unknownArray1 != null) { + stream.putByte(42); + stream.putByte(unknownArray1.length); + for(int index = 0; index < unknownArray1.length; index++) + stream.putByte(unknownArray1[index]); + } + if(unnoted) { + stream.putByte(65); + } + + if(colourEquip1 != -1) { + stream.putByte(78); + stream.putShort(colourEquip1); + } + + if(colourEquip2 != -1) { + stream.putByte(79); + stream.putShort(colourEquip2); + } + + //TODO FEW OPCODES HERE + + if(certId != -1) { + stream.putByte(97); + stream.putShort(certId); + } + + if(certTemplateId != -1) { + stream.putByte(98); + stream.putShort(certTemplateId); + } + + if(stackIds != null && stackAmounts != null) { + for(int index = 0; index < stackIds.length; index++) { + if(stackIds[index] == 0 && stackAmounts[index] == 0) + continue; + stream.putByte(100+index); + stream.putShort(stackIds[index]); + stream.putShort(stackAmounts[index]); + } + } + + //TODO FEW OPCODES HERE + + if(teamId != 0) { + stream.putByte(115); + stream.putByte(teamId); + } + + if(lendId != -1) { + stream.putByte(121); + stream.putShort(lendId); + } + + if(lendTemplateId != -1) { + stream.putByte(122); + stream.putShort(lendTemplateId); + } + + + //TODO FEW OPCODES HERE + + if(unknownArray2 != null) { + stream.putByte(132); + stream.putByte(unknownArray2.length); + for(int index = 0; index < unknownArray2.length; index++) + stream.putShort(unknownArray2[index]); + } + + if(clientScriptData != null) { + stream.putByte(249); + stream.putByte(clientScriptData.size()); + for(int key : clientScriptData.keySet()) { + Object value = clientScriptData.get(key); + stream.putByte(value instanceof String ? 1 : 0); + stream.putMediumInt(key); + if(value instanceof String) { + stream.putString((String) value); + }else{ + stream.putInt((Integer) value); + } + } + } + + //end + stream.putByte(0); + + byte[] data = new byte[stream.offset]; + stream.offset = 0; + stream.getBytes(data, 0, data.length); + return data; + + } + + private void readValues(Stream stream, int opcode) { + if(opcode == 1) + interfaceModelId = stream.getUShort(); + else if (opcode == 2) + setName(stream.getString()); + else if (opcode == 4) + modelZoom = stream.getUShort(); + else if (opcode == 5) + modelRotation1 = stream.getUShort(); + else if (opcode == 6) + modelRotation2 = stream.getUShort(); + else if (opcode == 7) { + modelOffset1 = stream.getUShort(); + if (modelOffset1 > 32767) + modelOffset1 -= 65536; + modelOffset1 <<= 0; + }else if (opcode == 8) { + modelOffset2 = stream.getUShort(); + if (modelOffset2 > 32767) + modelOffset2 -= 65536; + modelOffset2 <<= 0; + }else if (opcode == 11) + stackable = 1; + else if (opcode == 12) + value = stream.getInt(); + else if (opcode == 16) + membersOnly = true; + else if (opcode == 23) + setMaleWornModelId1(stream.getUShort()); + else if (opcode == 24) + femaleWornModelId1 = stream.getUShort(); + else if (opcode == 25) + setMaleWornModelId2(stream.getUShort()); + else if (opcode == 26) + femaleWornModelId2 = stream.getUShort(); + else if (opcode >= 30 && opcode < 35) + groundOptions[opcode-30] = stream.getString(); + else if (opcode >= 35 && opcode < 40) + inventoryOptions[opcode-35] = stream.getString(); + else if (opcode == 40) { + int length = stream.getUByte(); + originalModelColors = new short[length]; + modifiedModelColors = new short[length]; + for(int index = 0; index < length; index++) { + originalModelColors[index] = (short) stream.getUShort(); + modifiedModelColors[index] = (short) stream.getUShort(); + } + }else if (opcode == 41) { + int length = stream.getUByte(); + textureColour1 = new short[length]; + textureColour2 = new short[length]; + for(int index = 0; index < length; index++) { + textureColour1[index] = (short) stream.getUShort(); + textureColour2[index] = (short) stream.getUShort(); + } + }else if (opcode == 42) { + int length = stream.getUByte(); + unknownArray1 = new byte[length]; + for(int index = 0; index < length; index++) + unknownArray1[index] = stream.getByte(); + }else if (opcode == 65) + unnoted = true; + else if (opcode == 78) + colourEquip1 = stream.getUShort(); + else if (opcode == 79) + colourEquip2 = stream.getUShort(); + else if (opcode == 90) + unknownInt1 = stream.getUShort(); + else if (opcode == 91) + unknownInt2 = stream.getUShort(); + else if (opcode == 92) + unknownInt3 = stream.getUShort(); + else if (opcode == 93) + unknownInt4 = stream.getUShort(); + else if (opcode == 95) + unknownInt5 = stream.getUShort(); + else if (opcode == 96) + unknownInt6 = stream.getUShort(); + else if (opcode == 97) + certId = stream.getUShort(); + else if (opcode == 98) + certTemplateId = stream.getUShort(); + else if (opcode >= 100 && opcode < 110) { + if (stackIds == null) { + stackIds = new int[10]; + stackAmounts = new int[10]; + } + stackIds[opcode-100] = stream.getUShort(); + stackAmounts[opcode-100] = stream.getUShort(); + }else if (opcode == 110) + unknownInt7 = stream.getUShort(); + else if (opcode == 111) + unknownInt8 = stream.getUShort(); + else if (opcode == 112) + unknownInt9 = stream.getUShort(); + else if (opcode == 113) + unknownInt10 = stream.getByte(); + else if (opcode == 114) + unknownInt11 = stream.getByte() * 5; + else if (opcode == 115) + teamId = stream.getUByte(); + else if (opcode == 121) + lendId = stream.getUShort(); + else if (opcode == 122) + lendTemplateId = stream.getUShort(); + else if (opcode == 125) { + unknownInt12 = stream.getByte() << 0; + unknownInt13 = stream.getByte() << 0; + unknownInt14 = stream.getByte() << 0; + }else if (opcode == 126) { + unknownInt15 = stream.getByte() << 0; + unknownInt16 = stream.getByte() << 0; + unknownInt17 = stream.getByte() << 0; + }else if (opcode == 127) { + unknownInt18 = stream.getUByte(); + unknownInt19 = stream.getUShort(); + }else if (opcode == 128) { + unknownInt20 = stream.getUByte(); + unknownInt21 = stream.getUShort(); + }else if (opcode == 129) { + unknownInt20 = stream.getUByte(); + unknownInt21 = stream.getUShort(); + }else if (opcode == 130) { + unknownInt22 = stream.getUByte(); + unknownInt23 = stream.getUShort(); + }else if (opcode == 132) { + int length = stream.getUByte(); + unknownArray2 = new int[length]; + for(int index = 0; index < length; index++) + unknownArray2[index] = stream.getUShort(); + }else if (opcode == 249) { + int length = stream.getUByte(); + if(clientScriptData == null) + clientScriptData = new HashMap(Methods.getTableSize(length)); + for (int index = 0; index < length; index++) { + boolean stringInstance = stream.getUByte() == 1; + int key = stream.getMediumInt(); + Object value = stringInstance ? stream.getString() : stream.getInt(); + clientScriptData.put(key, value); + } + } + else + throw new IllegalArgumentException("MISSING OPCODE "+opcode+" FOR ITEM "+id); + } + + private void readOpcodeValues(Stream stream) { + while (true) { + int opcode = stream.getUByte(); + if (opcode == 0) + break; + readValues(stream, opcode); + } + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void setMaleWornModelId1(int maleWornModelId1) { + this.maleWornModelId1 = maleWornModelId1; + } + + public int getMaleWornModelId1() { + return maleWornModelId1; + } + + public void setMaleWornModelId2(int maleWornModelId2) { + this.maleWornModelId2 = maleWornModelId2; + } + + public int getMaleWornModelId2() { + return maleWornModelId2; + } +} diff --git a/Tools/Cache Editor/src/alex/cache/loaders/ObjectDefinitions.java b/Tools/Cache Editor/src/alex/cache/loaders/ObjectDefinitions.java new file mode 100644 index 000000000..a8f1ec9de --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/loaders/ObjectDefinitions.java @@ -0,0 +1,914 @@ +package alex.cache.loaders; + +import java.util.HashMap; +import java.util.Map; +import java.util.TreeMap; + +import com.alex.io.InputStream; +import com.alex.store.Store; + + +import emperor.ObjectMap.GameObject; + +/** + * The {@link Definitions} of {@link GameObject}s + * @author SonicForce41 + */ +public class ObjectDefinitions { + + /** + * The {@link Map} of {@link ObjectDefinitions} + */ + private static Map DEFINITIONS = new HashMap(); + + /** + * anInt3832 + */ + static int anInt3832; + + /** + * anInt3836 + */ + static int anInt3836; + + /** + * anInt3842 + */ + static int anInt3842; + + /** + * anInt3843 + */ + static int anInt3843; + + /** + * anInt3846 + */ + static int anInt3846; + + /** + * Gets the {@link ObjectDefinitions} for 'objectId' + */ + public static ObjectDefinitions forId(final int id) { + return DEFINITIONS.get(id); + } + + /** + * Gets the {@link Map} of {@link] ObjectDefinitions} + * @return + */ + public static Map getObjectDefinitions() { + return DEFINITIONS; + } + + /** + * Gets the archive Id + * @return + */ + private static int getArchiveId(final int objectID2) { + return objectID2 >>> -1135990488; + } + + /** + * aBoolean3853 + */ + public boolean aBoolean3853; + + /** + * aBoolean3891 + */ + public boolean aBoolean3891; + + /** + * actionCount + */ + public int actionCount; + + /** + * clippingFlag + */ + public boolean clippingFlag; + + /** + * configFileId + */ + public int configFileId; + + /** + * id + */ + public int id; + + /** + * name + */ + public String name; + + /** + * secondBool + */ + public boolean secondBool; + + /** + * secondInt + */ + public int secondInt; + + /** + * sizeX + */ + public int sizeX; + + /** + * sizeY + */ + public int sizeY; + + /** + * thirdInt + */ + public int thirdInt; + + /** + * aByte3912 + */ + private byte aByte3912; + + /** + * aByteArray3858 + */ + private byte[] aByteArray3858; + + /** + * anInt3881 + */ + private int anInt3881; + + /** + * anIntArray3869 + */ + private int[] anIntArray3869; + + /** + * anIntArrayArray3916 + */ + @SuppressWarnings("unused") + private int[][] anIntArrayArray3916; + + /** + * aShortArray3919 + */ + private short[] aShortArray3919; + + public String[] options; + /** + * aShortArray3920 + */ + private short[] aShortArray3920; + + /** + * modelConfiguration + */ + private byte[] modelConfiguration; + + /** + * modifiedColors + */ + private short[] modifiedColors; + + /** + * The object Id + */ + private int objectId; + + /** + * The original colors array + */ + private short[] originalColors; + + /** + * solid + */ + private boolean solid; + + /** + * walkBitFlag + */ + private int walkBitFlag; + + /** + * aBoolean3839 + */ + boolean aBoolean3839; + + /** + * aBoolean3845 + */ + boolean aBoolean3845; + + /** + * aBoolean3866 + */ + boolean aBoolean3866; + + /** + * aBoolean3867 + */ + boolean aBoolean3867; + + /** + * aBoolean3870 + */ + boolean aBoolean3870; + + /** + * aBoolean3872 + */ + boolean aBoolean3872; + + /** + * aBoolean3873 + */ + boolean aBoolean3873; + + /** + * aBoolean3894 + */ + boolean aBoolean3894; + + /** + * aBoolean3895 + */ + boolean aBoolean3895; + + /** + * aBoolean3906 + */ + boolean aBoolean3906; + + /** + * aBoolean3923 + */ + boolean aBoolean3923; + + /** + * aBoolean3924 + */ + boolean aBoolean3924; + + /** + * anInt3835 + */ + int anInt3835; + + /** + * anInt3838 + */ + int anInt3838 = -1; + + /** + * anInt3844 + */ + int anInt3844; + + /** + * anInt3850 + */ + int anInt3850; + + /** + * anInt3851 + */ + int anInt3851; + + /** + * anInt3855 + */ + int anInt3855; + + /** + * anInt3857 + */ + int anInt3857; + + /** + * anInt3860 + */ + int anInt3860; + + /** + * anInt3865 + */ + int anInt3865; + + /** + * anInt3876 + */ + public int animationId; + + /** + * anInt3892 + */ + int anInt3892; + + /** + * anInt3896 + */ + int anInt3896; + + /** + * anInt3900 + */ + int anInt3900; + + /** + * anInt3904 + */ + int anInt3904; + + /** + * anInt3905 + */ + int anInt3905; + + /** + * anInt3913 + */ + int anInt3913; + + /** + * anInt3921 + */ + int anInt3921; + + /** + * anIntArray3833 + */ + int[] anIntArray3833 = null; + + /** + * anIntArray3859 + */ + int[] anIntArray3859; + + /** + * anIntArray3908 + */ + int[] anIntArray3908; + + /** + * The childrens id + */ + int[] childrenIds; + + /** + * configId + */ + int configId; + + public int[] models; + + /** + * Constructs a new {@code ObjectDefinitions.java} {@code Object}. + * @param objectId the object Id + */ + public ObjectDefinitions(final int objectId) { + this.objectId = objectId; + anInt3835 = -1; + anInt3860 = -1; + configFileId = -1; + aBoolean3866 = false; + anInt3851 = -1; + anInt3865 = 255; + aBoolean3845 = false; + aBoolean3867 = false; + anInt3850 = 0; + anInt3844 = -1; + setAnInt3881(0); + anInt3857 = -1; + aBoolean3872 = true; + options = new String[5]; + aBoolean3839 = false; + anIntArray3869 = null; + sizeX = 1; + thirdInt = -1; + solid = true; + aBoolean3895 = true; + aBoolean3870 = false; + aBoolean3853 = true; + secondBool = false; + actionCount = 2; + anInt3855 = -1; + anInt3904 = 0; + sizeY = 1; + animationId = -1; + clippingFlag = false; + aBoolean3891 = false; + anInt3905 = 0; + name = "null"; + anInt3913 = -1; + aBoolean3906 = false; + aBoolean3873 = false; + anInt3900 = 0; + secondInt = -1; + aBoolean3894 = false; + setaByte3912((byte) 0); + anInt3921 = 0; + configId = -1; + setWalkBitFlag(0); + anInt3892 = 64; + aBoolean3923 = false; + aBoolean3924 = false; + } + + /** + * Method returns the value of aByte3912 + * @return the aByte3912 + */ + public byte getaByte3912() { + return aByte3912; + } + + /** + * Method returns the value of anInt3881 + * @return the anInt3881 + */ + public int getAnInt3881() { + return anInt3881; + } + + /** + * Gets the object Id + * @return the objectId + */ + public int getObjectId() { + return objectId; + } + + /** + * Method returns the value of walkBitFlag + * @return the walkBitFlag + */ + public int getWalkBitFlag() { + return walkBitFlag; + } + + public static ObjectDefinitions initialize(int objectId, Store store) { + byte[] is = store.getIndexes()[16].getFile(getArchiveId(objectId), objectId & 0xff); + if (is == null) { + return null; + } + ObjectDefinitions def = new ObjectDefinitions(objectId); + def.readValueLoop(new InputStream(is)); + def.configureObject(); + if (def.clippingFlag) { + def.solid = false; + def.actionCount = 0; + } + if (def.name.contains("booth")) { + def.clippingFlag = false; + def.solid = true; + def.actionCount = 2; + } + DEFINITIONS.put(objectId, def); + return def; + } + + /** + * Checks if the object is clipped + * @return + */ + public boolean isClippingFlag() { + return clippingFlag; + } + + /** + * Checks if the object is solid + * @return + */ + public boolean isSolid() { + return solid; + } + + /** + * Gets the size X + * @return + */ + public int getSizeX() { + return sizeX; + } + + /** + * Gets the size Y + * @return + */ + public int getSizeY() { + return sizeY; + } + + /** + * Gets the acount count + * @return + */ + public int getActionCount() { + return actionCount; + } + + /** + * Gets the name + */ + public String getName() { + return name; + } + + /** + * Gets the walk bit + * @return + */ + public int getWalkBit() { + return walkBitFlag; + } + + /** + * Method sets the value for aByte3912 + * @param aByte3912 the aByte3912 to set + */ + public void setaByte3912(final byte aByte3912) { + this.aByte3912 = aByte3912; + } + + /** + * Method sets the value for anInt3881 + * @param anInt3881 the anInt3881 to set + */ + public void setAnInt3881(final int anInt3881) { + this.anInt3881 = anInt3881; + } + + /** + * Method sets the value for walkBitFlag + * @param walkBitFlag the walkBitFlag to set + */ + public void setWalkBitFlag(final int walkBitFlag) { + this.walkBitFlag = walkBitFlag; + } + + /** + * Reads the values in a loop + * @param builder + */ + private void readValueLoop(final InputStream builder) { + for (;;) { + int opcode = builder.readUnsignedByte(); + if (opcode == 0) + break; + readValues(builder, opcode); + } + } + + /** + * Reads the values + * @param builder the PacketBuilder + * @param opcode the opcode + */ + private void readValues(final InputStream builder, final int opcode) { +// System.out.println("Reading opcode " + opcode); + if (opcode != 1 && opcode != 5) { + if (opcode != 2) { + if (opcode != 14) { + if (opcode != 15) { + if (opcode == 17) { + solid = false; + actionCount = 0; + } else if (opcode != 18) { + if (opcode == 19) + secondInt = builder.readUnsignedByte(); + else if (opcode == 21) + setaByte3912((byte) 1); + else if (opcode != 22) { + if (opcode != 23) { + if (opcode != 24) { + if (opcode == 27) + actionCount = 1; + else if (opcode == 28) + anInt3892 = (builder.readUnsignedByte() << 2); + else if (opcode != 29) { + if (opcode != 39) { + if (opcode < 30 || opcode >= 35) { + if (opcode == 40) { + int i_53_ = (builder.readUnsignedByte()); + originalColors = new short[i_53_]; + modifiedColors = new short[i_53_]; + for (int i_54_ = 0; i_53_ > i_54_; i_54_++) { + originalColors[i_54_] = (short) (builder.readUnsignedShort()); + modifiedColors[i_54_] = (short) (builder.readUnsignedShort()); + } + } else if (opcode != 41) { + if (opcode != 42) { + if (opcode != 62) { + if (opcode != 64) { + if (opcode == 65) + builder.readUnsignedShort(); + else if (opcode != 66) { + if (opcode != 67) { + if (opcode == 69) + setWalkBitFlag(builder.readUnsignedByte()); + else if (opcode != 70) { + if (opcode == 71) + builder.readShort(); + else if (opcode != 72) { + if (opcode == 73) + secondBool = true; + else if (opcode == 74) + clippingFlag = true; + else if (opcode != 75) { + if (opcode != 77 && opcode != 92) { + if (opcode == 78) { + anInt3860 = builder.readUnsignedShort(); + anInt3904 = builder.readUnsignedByte(); + } else if (opcode != 79) { + if (opcode == 81) { + setaByte3912((byte) 2); + builder.readUnsignedByte(); + } else if (opcode != 82) { + if (opcode == 88) + aBoolean3853 = false; + else if (opcode != 89) { + if (opcode == 90) + aBoolean3870 = true; + else if (opcode != 91) { + if (opcode != 93) { + if (opcode == 94) + setaByte3912((byte) 4); + else if (opcode != 95) { + if (opcode != 96) { + if (opcode == 97) + aBoolean3866 = true; + else if (opcode == 98) + aBoolean3923 = true; + else if (opcode == 99) { + anInt3857 = builder.readUnsignedByte(); + anInt3835 = builder.readUnsignedShort(); + } else if (opcode == 100) { + anInt3844 = builder.readUnsignedByte(); + anInt3913 = builder.readUnsignedShort(); + } else if (opcode != 101) { + if (opcode == 102) + anInt3838 = builder.readUnsignedShort(); + else if (opcode == 103) + thirdInt = 0; + else if (opcode != 104) { + if (opcode == 105) + aBoolean3906 = true; + else if (opcode == 106) { + int i_55_ = builder.readUnsignedByte(); + anIntArray3869 = new int[i_55_]; + anIntArray3833 = new int[i_55_]; + for (int i_56_ = 0; i_56_ < i_55_; i_56_++) { + anIntArray3833[i_56_] = builder.readUnsignedShort(); + int i_57_ = builder.readUnsignedByte(); + anIntArray3869[i_56_] = i_57_; + setAnInt3881(getAnInt3881() + + i_57_); + } + } else if (opcode == 107) + anInt3851 = builder.readUnsignedShort(); + else if (opcode >= 150 && opcode < 155) { + options[opcode - 150] = builder.readString(); + } else if (opcode != 160) { + if (opcode == 162) { + setaByte3912((byte) 3); + builder.readInt(); + } else if (opcode == 163) { + builder.readByte(); + builder.readByte(); + builder.readByte(); + builder.readByte(); + } else if (opcode != 164) { + if (opcode != 165) { + if (opcode != 166) { + if (opcode == 167) + anInt3921 = builder.readUnsignedShort(); + else if (opcode != 168) { + if (opcode == 169) { + aBoolean3845 = true; + } else if (opcode == 170) { + builder.readUnsignedSmart(); + } else if (opcode == 171) { + builder.readUnsignedSmart(); + } else if (opcode == 173) { + builder.readUnsignedShort(); + builder.readUnsignedShort(); + } else if (opcode == 177) { + // something + // = + // true + } else if (opcode == 178) { + builder.readUnsignedByte(); + } else if (opcode == 249) { + int i_58_ = builder.readUnsignedByte(); + for (int i_60_ = 0; i_60_ < i_58_; i_60_++) { + boolean bool = builder.readUnsignedByte() == 1; + builder.readByte(); + builder.readShort(); + if (!bool) + builder.readInt(); + else + builder.readString(); + } + } + } else + aBoolean3894 = true; + } else + builder.readShort(); + } else + builder.readShort(); + } else + builder.readShort(); + } else { + int i_62_ = builder.readUnsignedByte(); + anIntArray3908 = new int[i_62_]; + for (int i_63_ = 0; i_62_ > i_63_; i_63_++) + anIntArray3908[i_63_] = builder.readUnsignedShort(); + } + } else + anInt3865 = builder.readUnsignedByte(); + } else + anInt3850 = builder.readUnsignedByte(); + } else + aBoolean3924 = true; + } else { + setaByte3912((byte) 5); + builder.readShort(); + } + } else { + setaByte3912((byte) 3); + builder.readUnsignedShort(); + } + } else + aBoolean3873 = true; + } else + aBoolean3895 = false; + } else + aBoolean3891 = true; + } else { + anInt3900 = builder.readUnsignedShort(); + anInt3905 = builder.readUnsignedShort(); + anInt3904 = builder.readUnsignedByte(); + int i_64_ = builder.readUnsignedByte(); + anIntArray3859 = new int[i_64_]; + for (int i_65_ = 0; i_65_ < i_64_; i_65_++) + anIntArray3859[i_65_] = builder.readUnsignedShort(); + } + } else { + configFileId = builder.readUnsignedShort(); + if (configFileId == 65535) + configFileId = -1; + configId = builder.readUnsignedShort(); + if (configId == 65535) + configId = -1; + int i_66_ = -1; + if (opcode == 92) { + i_66_ = builder.readUnsignedShort(); + if (i_66_ == 65535) + i_66_ = -1; + } + int i_67_ = builder.readUnsignedByte(); + childrenIds = new int[i_67_ + 2]; + for (int i_68_ = 0; i_67_ >= i_68_; i_68_++) { + childrenIds[i_68_] = builder.readUnsignedShort(); + if (childrenIds[i_68_] == 65535) + childrenIds[i_68_] = -1; + } + childrenIds[i_67_ + 1] = i_66_; + } + } else + anInt3855 = builder.readUnsignedByte(); + } else + builder.readShort(); + } else + builder.readShort(); + } else + builder.readUnsignedShort(); + } else + builder.readUnsignedShort(); + } else + aBoolean3872 = false; + } else + aBoolean3839 = true; + } else { + int i_69_ = builder.readUnsignedByte(); + aByteArray3858 = new byte[i_69_]; + for (int i_70_ = 0; i_70_ < i_69_; i_70_++) + aByteArray3858[i_70_] = (byte) builder.readUnsignedByte(); + } + } else { + int i_71_ = builder.readUnsignedByte(); + aShortArray3920 = new short[i_71_]; + aShortArray3919 = new short[i_71_]; + for (int i_72_ = 0; i_71_ > i_72_; i_72_++) { + aShortArray3920[i_72_] = (short) builder.readUnsignedShort(); + aShortArray3919[i_72_] = (short) builder.readUnsignedShort(); + } + } + } else + options[opcode - 30] = builder.readString(); + } else + builder.readByte(); + } else + builder.readByte(); + } else { + animationId = builder.readUnsignedShort(); + if (animationId == 65535) + animationId = -1; + } + } else + thirdInt = 1; + } else + aBoolean3867 = true; + } else + solid = false; + } else + sizeX = builder.readUnsignedByte(); + } else + sizeY = builder.readUnsignedByte(); + } else + name = builder.readString(); + } else { + int length = builder.readUnsignedByte() & 0xff; + if (opcode == 1) { + modelConfiguration = new byte[length]; + } + models = new int[length]; + for (int i = 0; i < length; i++) { + models[i] = builder.readShort() & 0xFFFF; + int config = -1; + if (opcode == 1) { + config = modelConfiguration[i] = (byte) (builder.readUnsignedByte() & 0xFF); + } +// System.out.println("Model id: " + model + ", " + config); + } +// boolean aBoolean1162 = false; +// if (opcode == 5 && aBoolean1162) +// skipBytes(builder); +// int length = builder.readUnsignedByte(); +// anIntArrayArray3916 = new int[length][]; +// modelConfiguration = new byte[length]; +// for (int i = 0; i < length; i++) { +// modelConfiguration[i] = (byte) builder.readByte(); +// int i_75_ = builder.readUnsignedByte(); +// anIntArrayArray3916[i] = new int[i_75_]; +// for (int i_76_ = 0; i_75_ > i_76_; i_76_++) { +// anIntArrayArray3916[i][i_76_] = builder.readUnsignedShort(); +// if (opcode == 1) +// System.out.println("Model id " + anIntArrayArray3916[i][i_76_]); +// } +// } +// if (opcode == 5 && !aBoolean1162) +// skipBytes(builder); + } + } + + /** + * Skips few bytes + * @param builder + */ + private void skipBytes(final InputStream builder) { + int length = builder.readUnsignedByte(); + for (int index = 0; index < length; index++) { + builder.skip(1); + builder.skip(builder.readUnsignedByte() * 2); + } + } + + /** + * Checks object variables + */ + void configureObject() { + if (id == 4039) { + name = "Trapdoor"; + options[0] = "Open"; + } + if (secondInt == -1) { + secondInt = 0; + if (modelConfiguration != null && modelConfiguration.length == 1 && modelConfiguration[0] == 10) + secondInt = 1; + for (int i = 0; i < 5; i++) { + if (options[i] != null) { + secondInt = 1; + break; + } + } + } + if (anInt3855 == -1) + anInt3855 = actionCount != 0 ? 1 : 0; + } + + /** + * Clears the definition + */ + public static void clear() { + DEFINITIONS = new TreeMap(); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/alex/cache/loaders/OverlayDefinition.java b/Tools/Cache Editor/src/alex/cache/loaders/OverlayDefinition.java new file mode 100644 index 000000000..0c0230d76 --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/loaders/OverlayDefinition.java @@ -0,0 +1,147 @@ +package alex.cache.loaders; + +import java.util.HashMap; +import java.util.Map; + +import com.alex.io.InputStream; +import com.alex.store.Store; + + +public class OverlayDefinition { + + private static final Map DEFINITIONS = new HashMap<>(); + private int rgb = -1; + private int textureId; + private boolean bool; + private int id; + + public OverlayDefinition(int id) { + this.id = id; + } + + public static OverlayDefinition forId(Store store, int id) { + OverlayDefinition def = DEFINITIONS.get(id); + if (def != null) { + return def; + } + byte[] data = store.getIndexes()[2].getFile(4, id); + if (data == null) { + return null; + } + def = new OverlayDefinition(id); + def.readValues(new InputStream(data), id); + DEFINITIONS.put(id, def); + return def; + } + + public void readValues(InputStream buffer, int id) { + for (;;) { + int opcode = buffer.readByte(); + if (opcode == 0) { + break; + } + parseOpcode(buffer, opcode, id); + } + } + + private final void parseOpcode(InputStream buffer, int opcode, int id) { + switch (opcode) { + case 1: + rgb = ((buffer.readByte() & 0xff) << 16) + ((buffer.readByte() & 0xff) << 8) + (buffer.readByte() & 0xff); + break; + case 2: + textureId = buffer.readByte(); + break; + case 3: + textureId = buffer.readShort() & 0xFFFF; + if (textureId == 65535) { + textureId = -1; + } + break; + case 5: + bool = false; + break; + case 7: + buffer.readByte(); + buffer.readShort(); //Class68.method1252(false, buffer.getTriByte(124)); + break; +// case 8: Class17.anInt305 = id; + case 9: + buffer.readShort(); + break; + case 11: + buffer.readByte(); + break; + case 13: + buffer.readByte(); + buffer.readShort(); + break; + case 14: + buffer.readByte(); + break; + } + } + + /** + * @return the textureId + */ + public int getTextureId() { + return textureId; + } + + /** + * @param textureId the textureId to set + */ + public void setTextureId(int textureId) { + this.textureId = textureId; + } + + /** + * @return the bool + */ + public boolean isBool() { + return bool; + } + + /** + * @param bool the bool to set + */ + public void setBool(boolean bool) { + this.bool = bool; + } + + /** + * @return the id + */ + public int getId() { + return id; + } + + /** + * @param id the id to set + */ + public void setId(int id) { + this.id = id; + } + + /** + * @return the definitions + */ + public static Map getDefinitions() { + return DEFINITIONS; + } + + /** + * @return the rgb + */ + public int getRgb() { + return rgb; + } + + /** + * @param rgb the rgb to set + */ + public void setRgb(int rgb) { + this.rgb = rgb; + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/alex/cache/updateServer/UpdateServer.java b/Tools/Cache Editor/src/alex/cache/updateServer/UpdateServer.java new file mode 100644 index 000000000..68af6c6fb --- /dev/null +++ b/Tools/Cache Editor/src/alex/cache/updateServer/UpdateServer.java @@ -0,0 +1,20 @@ +package alex.cache.updateServer; + +import alex.io.Stream; +import alex.util.Methods; + +public class UpdateServer { + + public static byte[] getReadyForSendFile(int idxid, int fileid, int compression, byte[] data) { + Stream stream = new Stream(data.length+100); + stream.putByte(idxid); + stream.putShort(fileid); + byte[] compressedData = Methods.packContainer(compression, data); + for(int index = 0; index < compressedData.length; index++) + stream.putByte(compressedData[index]); + byte[] file = new byte[stream.offset]; + stream.offset = 0; + stream.getBytes(file, 0, file.length); + return file; + } +} diff --git a/Tools/Cache Editor/src/alex/compressors/BZip2Constants.java b/Tools/Cache Editor/src/alex/compressors/BZip2Constants.java new file mode 100644 index 000000000..ba1b229b2 --- /dev/null +++ b/Tools/Cache Editor/src/alex/compressors/BZip2Constants.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package alex.compressors; + +/** + * Constants for both the compress and decompress BZip2 classes. + */ +interface BZip2Constants { + + int BASEBLOCKSIZE = 100000; + int MAX_ALPHA_SIZE = 258; + int MAX_CODE_LEN = 23; + int RUNA = 0; + int RUNB = 1; + int N_GROUPS = 6; + int G_SIZE = 50; + int N_ITERS = 4; + int MAX_SELECTORS = (2 + (900000 / G_SIZE)); + int NUM_OVERSHOOT_BYTES = 20; + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/alex/compressors/BZip2OutputStream.java b/Tools/Cache Editor/src/alex/compressors/BZip2OutputStream.java new file mode 100644 index 000000000..0bef70b92 --- /dev/null +++ b/Tools/Cache Editor/src/alex/compressors/BZip2OutputStream.java @@ -0,0 +1,1879 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package alex.compressors; + +import java.io.IOException; +import java.io.OutputStream; + +/** + * An output stream that compresses into the BZip2 format into another stream. + * + *

+ * The compression requires large amounts of memory. Thus you should call the + * {@link #close() close()} method as soon as possible, to force + * BZip2CompressorOutputStream to release the allocated memory. + *

+ * + *

You can shrink the amount of allocated memory and maybe raise + * the compression speed by choosing a lower blocksize, which in turn + * may cause a lower compression ratio. You can avoid unnecessary + * memory allocation by avoiding using a blocksize which is bigger + * than the size of the input.

+ * + *

You can compute the memory usage for compressing by the + * following formula:

+ * + *
+ * <code>400k + (9 * blocksize)</code>.
+ * 
+ * + *

To get the memory required for decompression by {@link + * BZip2CompressorInputStream} use

+ * + *
+ * <code>65k + (5 * blocksize)</code>.
+ * 
+ * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
Memory usage by blocksize
Blocksize Compression
+ * memory usage
Decompression
+ * memory usage
100k1300k565k
200k2200k1065k
300k3100k1565k
400k4000k2065k
500k4900k2565k
600k5800k3065k
700k6700k3565k
800k7600k4065k
900k8500k4565k
+ * + *

+ * For decompression BZip2CompressorInputStream allocates less memory if the + * bzipped input is smaller than one block. + *

+ * + *

+ * Instances of this class are not threadsafe. + *

+ * + *

+ * TODO: Update to BZip2 1.0.1 + *

+ * @NotThreadSafe + */ +public class BZip2OutputStream extends CompressorOutputStream + implements BZip2Constants { + + /** + * The minimum supported blocksize == 1. + */ + public static final int MIN_BLOCKSIZE = 1; + + /** + * The maximum supported blocksize == 9. + */ + public static final int MAX_BLOCKSIZE = 9; + + private static final int SETMASK = (1 << 21); + private static final int CLEARMASK = (~SETMASK); + private static final int GREATER_ICOST = 15; + private static final int LESSER_ICOST = 0; + private static final int SMALL_THRESH = 20; + private static final int DEPTH_THRESH = 10; + private static final int WORK_FACTOR = 30; + + /* + *

If you are ever unlucky/improbable enough to get a stack + * overflow whilst sorting, increase the following constant and + * try again. In practice I have never seen the stack go above 27 + * elems, so the following limit seems very generous.

+ */ + private static final int QSORT_STACK_SIZE = 1000; + + /** + * Knuth's increments seem to work better than Incerpi-Sedgewick here. + * Possibly because the number of elems to sort is usually small, typically + * <= 20. + */ + private static final int[] INCS = { 1, 4, 13, 40, 121, 364, 1093, 3280, + 9841, 29524, 88573, 265720, 797161, + 2391484 }; + + private static void hbMakeCodeLengths(final byte[] len, final int[] freq, + final Data dat, final int alphaSize, + final int maxLen) { + /* + * Nodes and heap entries run from 1. Entry 0 for both the heap and + * nodes is a sentinel. + */ + final int[] heap = dat.heap; + final int[] weight = dat.weight; + final int[] parent = dat.parent; + + for (int i = alphaSize; --i >= 0;) { + weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8; + } + + for (boolean tooLong = true; tooLong;) { + tooLong = false; + + int nNodes = alphaSize; + int nHeap = 0; + heap[0] = 0; + weight[0] = 0; + parent[0] = -2; + + for (int i = 1; i <= alphaSize; i++) { + parent[i] = -1; + nHeap++; + heap[nHeap] = i; + + int zz = nHeap; + int tmp = heap[zz]; + while (weight[tmp] < weight[heap[zz >> 1]]) { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + } + + while (nHeap > 1) { + int n1 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + + int yy = 0; + int zz = 1; + int tmp = heap[1]; + + while (true) { + yy = zz << 1; + + if (yy > nHeap) { + break; + } + + if ((yy < nHeap) + && (weight[heap[yy + 1]] < weight[heap[yy]])) { + yy++; + } + + if (weight[tmp] < weight[heap[yy]]) { + break; + } + + heap[zz] = heap[yy]; + zz = yy; + } + + heap[zz] = tmp; + + int n2 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + + yy = 0; + zz = 1; + tmp = heap[1]; + + while (true) { + yy = zz << 1; + + if (yy > nHeap) { + break; + } + + if ((yy < nHeap) + && (weight[heap[yy + 1]] < weight[heap[yy]])) { + yy++; + } + + if (weight[tmp] < weight[heap[yy]]) { + break; + } + + heap[zz] = heap[yy]; + zz = yy; + } + + heap[zz] = tmp; + nNodes++; + parent[n1] = parent[n2] = nNodes; + + final int weight_n1 = weight[n1]; + final int weight_n2 = weight[n2]; + weight[nNodes] = ((weight_n1 & 0xffffff00) + + (weight_n2 & 0xffffff00)) + | (1 + (((weight_n1 & 0x000000ff) + > (weight_n2 & 0x000000ff)) + ? (weight_n1 & 0x000000ff) + : (weight_n2 & 0x000000ff))); + + parent[nNodes] = -1; + nHeap++; + heap[nHeap] = nNodes; + + tmp = 0; + zz = nHeap; + tmp = heap[zz]; + final int weight_tmp = weight[tmp]; + while (weight_tmp < weight[heap[zz >> 1]]) { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + + } + + for (int i = 1; i <= alphaSize; i++) { + int j = 0; + int k = i; + + for (int parent_k; (parent_k = parent[k]) >= 0;) { + k = parent_k; + j++; + } + + len[i - 1] = (byte) j; + if (j > maxLen) { + tooLong = true; + } + } + + if (tooLong) { + for (int i = 1; i < alphaSize; i++) { + int j = weight[i] >> 8; + j = 1 + (j >> 1); + weight[i] = j << 8; + } + } + } + } + + /** + * Index of the last char in the block, so the block size == last + 1. + */ + private int last; + + /** + * Index in fmap[] of original string after sorting. + */ + private int origPtr; + + /** + * Always: in the range 0 .. 9. The current block size is 100000 * this + * number. + */ + private final int blockSize100k; + + private boolean blockRandomised; + + private int bsBuff; + private int bsLive; + private final CRC crc = new CRC(); + + private int nInUse; + + private int nMTF; + + /* + * Used when sorting. If too many long comparisons happen, we stop sorting, + * randomise the block slightly, and try again. + */ + private int workDone; + private int workLimit; + private boolean firstAttempt; + + private int currentChar = -1; + private int runLength = 0; + + private int blockCRC; + private int combinedCRC; + private int allowableBlockSize; + + /** + * All memory intensive stuff. + */ + private Data data; + + private OutputStream out; + + /** + * Chooses a blocksize based on the given length of the data to compress. + * + * @return The blocksize, between {@link #MIN_BLOCKSIZE} and + * {@link #MAX_BLOCKSIZE} both inclusive. For a negative + * inputLength this method returns MAX_BLOCKSIZE + * always. + * + * @param inputLength + * The length of the data which will be compressed by + * CBZip2OutputStream. + */ + public static int chooseBlockSize(long inputLength) { + return (inputLength > 0) ? (int) Math + .min((inputLength / 132000) + 1, 9) : MAX_BLOCKSIZE; + } + + /** + * Constructs a new CBZip2OutputStream with a blocksize of 900k. + * + * @param out + * the destination stream. + * + * @throws IOException + * if an I/O error occurs in the specified stream. + * @throws NullPointerException + * if out == null. + */ + public BZip2OutputStream(final OutputStream out) + throws IOException { + this(out, MAX_BLOCKSIZE); + } + + /** + * Constructs a new CBZip2OutputStream with specified blocksize. + * + * @param out + * the destination stream. + * @param blockSize + * the blockSize as 100k units. + * + * @throws IOException + * if an I/O error occurs in the specified stream. + * @throws IllegalArgumentException + * if (blockSize < 1) || (blockSize > 9). + * @throws NullPointerException + * if out == null. + * + * @see #MIN_BLOCKSIZE + * @see #MAX_BLOCKSIZE + */ + public BZip2OutputStream(final OutputStream out, + final int blockSize) + throws IOException { + super(); + + if (blockSize < 1) { + throw new IllegalArgumentException("blockSize(" + blockSize + + ") < 1"); + } + if (blockSize > 9) { + throw new IllegalArgumentException("blockSize(" + blockSize + + ") > 9"); + } + + this.blockSize100k = blockSize; + this.out = out; + init(); + } + + /** {@inheritDoc} */ + public void write(final int b) throws IOException { + if (this.out != null) { + write0(b); + } else { + throw new IOException("closed"); + } + } + + private void writeRun() throws IOException { + final int lastShadow = this.last; + + if (lastShadow < this.allowableBlockSize) { + final int currentCharShadow = this.currentChar; + final Data dataShadow = this.data; + dataShadow.inUse[currentCharShadow] = true; + final byte ch = (byte) currentCharShadow; + + int runLengthShadow = this.runLength; + this.crc.updateCRC(currentCharShadow, runLengthShadow); + + switch (runLengthShadow) { + case 1: + dataShadow.block[lastShadow + 2] = ch; + this.last = lastShadow + 1; + break; + + case 2: + dataShadow.block[lastShadow + 2] = ch; + dataShadow.block[lastShadow + 3] = ch; + this.last = lastShadow + 2; + break; + + case 3: { + final byte[] block = dataShadow.block; + block[lastShadow + 2] = ch; + block[lastShadow + 3] = ch; + block[lastShadow + 4] = ch; + this.last = lastShadow + 3; + } + break; + + default: { + runLengthShadow -= 4; + dataShadow.inUse[runLengthShadow] = true; + final byte[] block = dataShadow.block; + block[lastShadow + 2] = ch; + block[lastShadow + 3] = ch; + block[lastShadow + 4] = ch; + block[lastShadow + 5] = ch; + block[lastShadow + 6] = (byte) runLengthShadow; + this.last = lastShadow + 5; + } + break; + + } + } else { + endBlock(); + initBlock(); + writeRun(); + } + } + + /** + * Overriden to close the stream. + */ + protected void finalize() throws Throwable { + finish(); + super.finalize(); + } + + + public void finish() throws IOException { + if (out != null) { + try { + if (this.runLength > 0) { + writeRun(); + } + this.currentChar = -1; + endBlock(); + endCompression(); + } finally { + this.out = null; + this.data = null; + } + } + } + + public void close() throws IOException { + if (out != null) { + OutputStream outShadow = this.out; + finish(); + outShadow.close(); + } + } + + public void flush() throws IOException { + OutputStream outShadow = this.out; + if (outShadow != null) { + outShadow.flush(); + } + } + + /** + * Writes magic bytes like BZ on the first position of the stream + * and bytes indiciating the file-format, which is + * huffmanised, followed by a digit indicating blockSize100k. + * @throws IOException if the magic bytes could not been written + */ + private void init() throws IOException { + bsPutUByte('B'); + bsPutUByte('Z'); + + this.data = new Data(this.blockSize100k); + + // huffmanised magic bytes + bsPutUByte('h'); + bsPutUByte('0' + this.blockSize100k); + + this.combinedCRC = 0; + initBlock(); + } + + private void initBlock() { + // blockNo++; + this.crc.initialiseCRC(); + this.last = -1; + // ch = 0; + + boolean[] inUse = this.data.inUse; + for (int i = 256; --i >= 0;) { + inUse[i] = false; + } + + /* 20 is just a paranoia constant */ + this.allowableBlockSize = (this.blockSize100k * BZip2Constants.BASEBLOCKSIZE) - 20; + } + + private void endBlock() throws IOException { + this.blockCRC = this.crc.getFinalCRC(); + this.combinedCRC = (this.combinedCRC << 1) | (this.combinedCRC >>> 31); + this.combinedCRC ^= this.blockCRC; + + // empty block at end of file + if (this.last == -1) { + return; + } + + /* sort the block and establish posn of original string */ + blockSort(); + + /* + * A 6-byte block header, the value chosen arbitrarily as 0x314159265359 + * :-). A 32 bit value does not really give a strong enough guarantee + * that the value will not appear by chance in the compressed + * datastream. Worst-case probability of this event, for a 900k block, + * is about 2.0e-3 for 32 bits, 1.0e-5 for 40 bits and 4.0e-8 for 48 + * bits. For a compressed file of size 100Gb -- about 100000 blocks -- + * only a 48-bit marker will do. NB: normal compression/ decompression + * donot rely on these statistical properties. They are only important + * when trying to recover blocks from damaged files. + */ + bsPutUByte(0x31); + bsPutUByte(0x41); + bsPutUByte(0x59); + bsPutUByte(0x26); + bsPutUByte(0x53); + bsPutUByte(0x59); + + /* Now the block's CRC, so it is in a known place. */ + bsPutInt(this.blockCRC); + + /* Now a single bit indicating randomisation. */ + if (this.blockRandomised) { + bsW(1, 1); + } else { + bsW(1, 0); + } + + /* Finally, block's contents proper. */ + moveToFrontCodeAndSend(); + } + + private void endCompression() throws IOException { + /* + * Now another magic 48-bit number, 0x177245385090, to indicate the end + * of the last block. (sqrt(pi), if you want to know. I did want to use + * e, but it contains too much repetition -- 27 18 28 18 28 46 -- for me + * to feel statistically comfortable. Call me paranoid.) + */ + bsPutUByte(0x17); + bsPutUByte(0x72); + bsPutUByte(0x45); + bsPutUByte(0x38); + bsPutUByte(0x50); + bsPutUByte(0x90); + + bsPutInt(this.combinedCRC); + bsFinishedWithStream(); + } + + /** + * Returns the blocksize parameter specified at construction time. + */ + public final int getBlockSize() { + return this.blockSize100k; + } + + public void write(final byte[] buf, int offs, final int len) + throws IOException { + if (offs < 0) { + throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); + } + if (len < 0) { + throw new IndexOutOfBoundsException("len(" + len + ") < 0."); + } + if (offs + len > buf.length) { + throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + + len + ") > buf.length(" + + buf.length + ")."); + } + if (this.out == null) { + throw new IOException("stream closed"); + } + + for (int hi = offs + len; offs < hi;) { + write0(buf[offs++]); + } + } + + private void write0(int b) throws IOException { + if (this.currentChar != -1) { + b &= 0xff; + if (this.currentChar == b) { + if (++this.runLength > 254) { + writeRun(); + this.currentChar = -1; + this.runLength = 0; + } + // else nothing to do + } else { + writeRun(); + this.runLength = 1; + this.currentChar = b; + } + } else { + this.currentChar = b & 0xff; + this.runLength++; + } + } + + private static void hbAssignCodes(final int[] code, final byte[] length, + final int minLen, final int maxLen, + final int alphaSize) { + int vec = 0; + for (int n = minLen; n <= maxLen; n++) { + for (int i = 0; i < alphaSize; i++) { + if ((length[i] & 0xff) == n) { + code[i] = vec; + vec++; + } + } + vec <<= 1; + } + } + + private void bsFinishedWithStream() throws IOException { + while (this.bsLive > 0) { + int ch = this.bsBuff >> 24; + this.out.write(ch); // write 8-bit + this.bsBuff <<= 8; + this.bsLive -= 8; + } + } + + private void bsW(final int n, final int v) throws IOException { + final OutputStream outShadow = this.out; + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + + this.bsBuff = bsBuffShadow | (v << (32 - bsLiveShadow - n)); + this.bsLive = bsLiveShadow + n; + } + + private void bsPutUByte(final int c) throws IOException { + bsW(8, c); + } + + private void bsPutInt(final int u) throws IOException { + bsW(8, (u >> 24) & 0xff); + bsW(8, (u >> 16) & 0xff); + bsW(8, (u >> 8) & 0xff); + bsW(8, u & 0xff); + } + + private void sendMTFValues() throws IOException { + final byte[][] len = this.data.sendMTFValues_len; + final int alphaSize = this.nInUse + 2; + + for (int t = N_GROUPS; --t >= 0;) { + byte[] len_t = len[t]; + for (int v = alphaSize; --v >= 0;) { + len_t[v] = GREATER_ICOST; + } + } + + /* Decide how many coding tables to use */ + // assert (this.nMTF > 0) : this.nMTF; + final int nGroups = (this.nMTF < 200) ? 2 : (this.nMTF < 600) ? 3 + : (this.nMTF < 1200) ? 4 : (this.nMTF < 2400) ? 5 : 6; + + /* Generate an initial set of coding tables */ + sendMTFValues0(nGroups, alphaSize); + + /* + * Iterate up to N_ITERS times to improve the tables. + */ + final int nSelectors = sendMTFValues1(nGroups, alphaSize); + + /* Compute MTF values for the selectors. */ + sendMTFValues2(nGroups, nSelectors); + + /* Assign actual codes for the tables. */ + sendMTFValues3(nGroups, alphaSize); + + /* Transmit the mapping table. */ + sendMTFValues4(); + + /* Now the selectors. */ + sendMTFValues5(nGroups, nSelectors); + + /* Now the coding tables. */ + sendMTFValues6(nGroups, alphaSize); + + /* And finally, the block data proper */ + sendMTFValues7(nSelectors); + } + + private void sendMTFValues0(final int nGroups, final int alphaSize) { + final byte[][] len = this.data.sendMTFValues_len; + final int[] mtfFreq = this.data.mtfFreq; + + int remF = this.nMTF; + int gs = 0; + + for (int nPart = nGroups; nPart > 0; nPart--) { + final int tFreq = remF / nPart; + int ge = gs - 1; + int aFreq = 0; + + for (final int a = alphaSize - 1; (aFreq < tFreq) && (ge < a);) { + aFreq += mtfFreq[++ge]; + } + + if ((ge > gs) && (nPart != nGroups) && (nPart != 1) + && (((nGroups - nPart) & 1) != 0)) { + aFreq -= mtfFreq[ge--]; + } + + final byte[] len_np = len[nPart - 1]; + for (int v = alphaSize; --v >= 0;) { + if ((v >= gs) && (v <= ge)) { + len_np[v] = LESSER_ICOST; + } else { + len_np[v] = GREATER_ICOST; + } + } + + gs = ge + 1; + remF -= aFreq; + } + } + + private int sendMTFValues1(final int nGroups, final int alphaSize) { + final Data dataShadow = this.data; + final int[][] rfreq = dataShadow.sendMTFValues_rfreq; + final int[] fave = dataShadow.sendMTFValues_fave; + final short[] cost = dataShadow.sendMTFValues_cost; + final char[] sfmap = dataShadow.sfmap; + final byte[] selector = dataShadow.selector; + final byte[][] len = dataShadow.sendMTFValues_len; + final byte[] len_0 = len[0]; + final byte[] len_1 = len[1]; + final byte[] len_2 = len[2]; + final byte[] len_3 = len[3]; + final byte[] len_4 = len[4]; + final byte[] len_5 = len[5]; + final int nMTFShadow = this.nMTF; + + int nSelectors = 0; + + for (int iter = 0; iter < N_ITERS; iter++) { + for (int t = nGroups; --t >= 0;) { + fave[t] = 0; + int[] rfreqt = rfreq[t]; + for (int i = alphaSize; --i >= 0;) { + rfreqt[i] = 0; + } + } + + nSelectors = 0; + + for (int gs = 0; gs < this.nMTF;) { + /* Set group start & end marks. */ + + /* + * Calculate the cost of this group as coded by each of the + * coding tables. + */ + + final int ge = Math.min(gs + G_SIZE - 1, nMTFShadow - 1); + + if (nGroups == N_GROUPS) { + // unrolled version of the else-block + + short cost0 = 0; + short cost1 = 0; + short cost2 = 0; + short cost3 = 0; + short cost4 = 0; + short cost5 = 0; + + for (int i = gs; i <= ge; i++) { + final int icv = sfmap[i]; + cost0 += len_0[icv] & 0xff; + cost1 += len_1[icv] & 0xff; + cost2 += len_2[icv] & 0xff; + cost3 += len_3[icv] & 0xff; + cost4 += len_4[icv] & 0xff; + cost5 += len_5[icv] & 0xff; + } + + cost[0] = cost0; + cost[1] = cost1; + cost[2] = cost2; + cost[3] = cost3; + cost[4] = cost4; + cost[5] = cost5; + + } else { + for (int t = nGroups; --t >= 0;) { + cost[t] = 0; + } + + for (int i = gs; i <= ge; i++) { + final int icv = sfmap[i]; + for (int t = nGroups; --t >= 0;) { + cost[t] += len[t][icv] & 0xff; + } + } + } + + /* + * Find the coding table which is best for this group, and + * record its identity in the selector table. + */ + int bt = -1; + for (int t = nGroups, bc = 999999999; --t >= 0;) { + final int cost_t = cost[t]; + if (cost_t < bc) { + bc = cost_t; + bt = t; + } + } + + fave[bt]++; + selector[nSelectors] = (byte) bt; + nSelectors++; + + /* + * Increment the symbol frequencies for the selected table. + */ + final int[] rfreq_bt = rfreq[bt]; + for (int i = gs; i <= ge; i++) { + rfreq_bt[sfmap[i]]++; + } + + gs = ge + 1; + } + + /* + * Recompute the tables based on the accumulated frequencies. + */ + for (int t = 0; t < nGroups; t++) { + hbMakeCodeLengths(len[t], rfreq[t], this.data, alphaSize, 20); + } + } + + return nSelectors; + } + + private void sendMTFValues2(final int nGroups, final int nSelectors) { + // assert (nGroups < 8) : nGroups; + + final Data dataShadow = this.data; + byte[] pos = dataShadow.sendMTFValues2_pos; + + for (int i = nGroups; --i >= 0;) { + pos[i] = (byte) i; + } + + for (int i = 0; i < nSelectors; i++) { + final byte ll_i = dataShadow.selector[i]; + byte tmp = pos[0]; + int j = 0; + + while (ll_i != tmp) { + j++; + byte tmp2 = tmp; + tmp = pos[j]; + pos[j] = tmp2; + } + + pos[0] = tmp; + dataShadow.selectorMtf[i] = (byte) j; + } + } + + private void sendMTFValues3(final int nGroups, final int alphaSize) { + int[][] code = this.data.sendMTFValues_code; + byte[][] len = this.data.sendMTFValues_len; + + for (int t = 0; t < nGroups; t++) { + int minLen = 32; + int maxLen = 0; + final byte[] len_t = len[t]; + for (int i = alphaSize; --i >= 0;) { + final int l = len_t[i] & 0xff; + if (l > maxLen) { + maxLen = l; + } + if (l < minLen) { + minLen = l; + } + } + + // assert (maxLen <= 20) : maxLen; + // assert (minLen >= 1) : minLen; + + hbAssignCodes(code[t], len[t], minLen, maxLen, alphaSize); + } + } + + private void sendMTFValues4() throws IOException { + final boolean[] inUse = this.data.inUse; + final boolean[] inUse16 = this.data.sentMTFValues4_inUse16; + + for (int i = 16; --i >= 0;) { + inUse16[i] = false; + final int i16 = i * 16; + for (int j = 16; --j >= 0;) { + if (inUse[i16 + j]) { + inUse16[i] = true; + } + } + } + + for (int i = 0; i < 16; i++) { + bsW(1, inUse16[i] ? 1 : 0); + } + + final OutputStream outShadow = this.out; + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int i = 0; i < 16; i++) { + if (inUse16[i]) { + final int i16 = i * 16; + for (int j = 0; j < 16; j++) { + // inlined: bsW(1, inUse[i16 + j] ? 1 : 0); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + if (inUse[i16 + j]) { + bsBuffShadow |= 1 << (32 - bsLiveShadow - 1); + } + bsLiveShadow++; + } + } + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void sendMTFValues5(final int nGroups, final int nSelectors) + throws IOException { + bsW(3, nGroups); + bsW(15, nSelectors); + + final OutputStream outShadow = this.out; + final byte[] selectorMtf = this.data.selectorMtf; + + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int i = 0; i < nSelectors; i++) { + for (int j = 0, hj = selectorMtf[i] & 0xff; j < hj; j++) { + // inlined: bsW(1, 1); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= 1 << (32 - bsLiveShadow - 1); + bsLiveShadow++; + } + + // inlined: bsW(1, 0); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + // bsBuffShadow |= 0 << (32 - bsLiveShadow - 1); + bsLiveShadow++; + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void sendMTFValues6(final int nGroups, final int alphaSize) + throws IOException { + final byte[][] len = this.data.sendMTFValues_len; + final OutputStream outShadow = this.out; + + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int t = 0; t < nGroups; t++) { + byte[] len_t = len[t]; + int curr = len_t[0] & 0xff; + + // inlined: bsW(5, curr); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= curr << (32 - bsLiveShadow - 5); + bsLiveShadow += 5; + + for (int i = 0; i < alphaSize; i++) { + int lti = len_t[i] & 0xff; + while (curr < lti) { + // inlined: bsW(2, 2); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= 2 << (32 - bsLiveShadow - 2); + bsLiveShadow += 2; + + curr++; /* 10 */ + } + + while (curr > lti) { + // inlined: bsW(2, 3); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= 3 << (32 - bsLiveShadow - 2); + bsLiveShadow += 2; + + curr--; /* 11 */ + } + + // inlined: bsW(1, 0); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + // bsBuffShadow |= 0 << (32 - bsLiveShadow - 1); + bsLiveShadow++; + } + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void sendMTFValues7(final int nSelectors) throws IOException { + final Data dataShadow = this.data; + final byte[][] len = dataShadow.sendMTFValues_len; + final int[][] code = dataShadow.sendMTFValues_code; + final OutputStream outShadow = this.out; + final byte[] selector = dataShadow.selector; + final char[] sfmap = dataShadow.sfmap; + final int nMTFShadow = this.nMTF; + + int selCtr = 0; + + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int gs = 0; gs < nMTFShadow;) { + final int ge = Math.min(gs + G_SIZE - 1, nMTFShadow - 1); + final int selector_selCtr = selector[selCtr] & 0xff; + final int[] code_selCtr = code[selector_selCtr]; + final byte[] len_selCtr = len[selector_selCtr]; + + while (gs <= ge) { + final int sfmap_i = sfmap[gs]; + + // + // inlined: bsW(len_selCtr[sfmap_i] & 0xff, + // code_selCtr[sfmap_i]); + // + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + final int n = len_selCtr[sfmap_i] & 0xFF; + bsBuffShadow |= code_selCtr[sfmap_i] << (32 - bsLiveShadow - n); + bsLiveShadow += n; + + gs++; + } + + gs = ge + 1; + selCtr++; + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void moveToFrontCodeAndSend() throws IOException { + bsW(24, this.origPtr); + generateMTFValues(); + sendMTFValues(); + } + + /** + * This is the most hammered method of this class. + * + *

+ * This is the version using unrolled loops. Normally I never use such ones + * in Java code. The unrolling has shown a noticable performance improvement + * on JRE 1.4.2 (Linux i586 / HotSpot Client). Of course it depends on the + * JIT compiler of the vm. + *

+ */ + private boolean mainSimpleSort(final Data dataShadow, final int lo, + final int hi, final int d) { + final int bigN = hi - lo + 1; + if (bigN < 2) { + return this.firstAttempt && (this.workDone > this.workLimit); + } + + int hp = 0; + while (INCS[hp] < bigN) { + hp++; + } + + final int[] fmap = dataShadow.fmap; + final char[] quadrant = dataShadow.quadrant; + final byte[] block = dataShadow.block; + final int lastShadow = this.last; + final int lastPlus1 = lastShadow + 1; + final boolean firstAttemptShadow = this.firstAttempt; + final int workLimitShadow = this.workLimit; + int workDoneShadow = this.workDone; + + // Following block contains unrolled code which could be shortened by + // coding it in additional loops. + + HP: while (--hp >= 0) { + final int h = INCS[hp]; + final int mj = lo + h - 1; + + for (int i = lo + h; i <= hi;) { + // copy + for (int k = 3; (i <= hi) && (--k >= 0); i++) { + final int v = fmap[i]; + final int vd = v + d; + int j = i; + + // for (int a; + // (j > mj) && mainGtU((a = fmap[j - h]) + d, vd, + // block, quadrant, lastShadow); + // j -= h) { + // fmap[j] = a; + // } + // + // unrolled version: + + // start inline mainGTU + boolean onceRunned = false; + int a = 0; + + HAMMER: while (true) { + if (onceRunned) { + fmap[j] = a; + if ((j -= h) <= mj) { + break HAMMER; + } + } else { + onceRunned = true; + } + + a = fmap[j - h]; + int i1 = a + d; + int i2 = vd; + + // following could be done in a loop, but + // unrolled it for performance: + if (block[i1 + 1] == block[i2 + 1]) { + if (block[i1 + 2] == block[i2 + 2]) { + if (block[i1 + 3] == block[i2 + 3]) { + if (block[i1 + 4] == block[i2 + 4]) { + if (block[i1 + 5] == block[i2 + 5]) { + if (block[(i1 += 6)] == block[(i2 += 6)]) { + int x = lastShadow; + X: while (x > 0) { + x -= 4; + + if (block[i1 + 1] == block[i2 + 1]) { + if (quadrant[i1] == quadrant[i2]) { + if (block[i1 + 2] == block[i2 + 2]) { + if (quadrant[i1 + 1] == quadrant[i2 + 1]) { + if (block[i1 + 3] == block[i2 + 3]) { + if (quadrant[i1 + 2] == quadrant[i2 + 2]) { + if (block[i1 + 4] == block[i2 + 4]) { + if (quadrant[i1 + 3] == quadrant[i2 + 3]) { + if ((i1 += 4) >= lastPlus1) { + i1 -= lastPlus1; + } + if ((i2 += 4) >= lastPlus1) { + i2 -= lastPlus1; + } + workDoneShadow++; + continue X; + } else if ((quadrant[i1 + 3] > quadrant[i2 + 3])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 4] & 0xff) > (block[i2 + 4] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((quadrant[i1 + 2] > quadrant[i2 + 2])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 3] & 0xff) > (block[i2 + 3] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((quadrant[i1 + 1] > quadrant[i2 + 1])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 2] & 0xff) > (block[i2 + 2] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((quadrant[i1] > quadrant[i2])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 1] & 0xff) > (block[i2 + 1] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + + } + break HAMMER; + } // while x > 0 + else { + if ((block[i1] & 0xff) > (block[i2] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } + } else if ((block[i1 + 5] & 0xff) > (block[i2 + 5] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 4] & 0xff) > (block[i2 + 4] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 3] & 0xff) > (block[i2 + 3] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 2] & 0xff) > (block[i2 + 2] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 1] & 0xff) > (block[i2 + 1] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + + } // HAMMER + // end inline mainGTU + + fmap[j] = v; + } + + if (firstAttemptShadow && (i <= hi) + && (workDoneShadow > workLimitShadow)) { + break HP; + } + } + } + + this.workDone = workDoneShadow; + return firstAttemptShadow && (workDoneShadow > workLimitShadow); + } + + private static void vswap(int[] fmap, int p1, int p2, int n) { + n += p1; + while (p1 < n) { + int t = fmap[p1]; + fmap[p1++] = fmap[p2]; + fmap[p2++] = t; + } + } + + private static byte med3(byte a, byte b, byte c) { + return (a < b) ? (b < c ? b : a < c ? c : a) : (b > c ? b : a > c ? c + : a); + } + + private void blockSort() { + this.workLimit = WORK_FACTOR * this.last; + this.workDone = 0; + this.blockRandomised = false; + this.firstAttempt = true; + mainSort(); + + if (this.firstAttempt && (this.workDone > this.workLimit)) { + randomiseBlock(); + this.workLimit = this.workDone = 0; + this.firstAttempt = false; + mainSort(); + } + + int[] fmap = this.data.fmap; + this.origPtr = -1; + for (int i = 0, lastShadow = this.last; i <= lastShadow; i++) { + if (fmap[i] == 0) { + this.origPtr = i; + break; + } + } + + // assert (this.origPtr != -1) : this.origPtr; + } + + /** + * Method "mainQSort3", file "blocksort.c", BZip2 1.0.2 + */ + private void mainQSort3(final Data dataShadow, final int loSt, + final int hiSt, final int dSt) { + final int[] stack_ll = dataShadow.stack_ll; + final int[] stack_hh = dataShadow.stack_hh; + final int[] stack_dd = dataShadow.stack_dd; + final int[] fmap = dataShadow.fmap; + final byte[] block = dataShadow.block; + + stack_ll[0] = loSt; + stack_hh[0] = hiSt; + stack_dd[0] = dSt; + + for (int sp = 1; --sp >= 0;) { + final int lo = stack_ll[sp]; + final int hi = stack_hh[sp]; + final int d = stack_dd[sp]; + + if ((hi - lo < SMALL_THRESH) || (d > DEPTH_THRESH)) { + if (mainSimpleSort(dataShadow, lo, hi, d)) { + return; + } + } else { + final int d1 = d + 1; + final int med = med3(block[fmap[lo] + d1], + block[fmap[hi] + d1], block[fmap[(lo + hi) >>> 1] + d1]) & 0xff; + + int unLo = lo; + int unHi = hi; + int ltLo = lo; + int gtHi = hi; + + while (true) { + while (unLo <= unHi) { + final int n = (block[fmap[unLo] + d1] & 0xff) + - med; + if (n == 0) { + final int temp = fmap[unLo]; + fmap[unLo++] = fmap[ltLo]; + fmap[ltLo++] = temp; + } else if (n < 0) { + unLo++; + } else { + break; + } + } + + while (unLo <= unHi) { + final int n = (block[fmap[unHi] + d1] & 0xff) + - med; + if (n == 0) { + final int temp = fmap[unHi]; + fmap[unHi--] = fmap[gtHi]; + fmap[gtHi--] = temp; + } else if (n > 0) { + unHi--; + } else { + break; + } + } + + if (unLo <= unHi) { + final int temp = fmap[unLo]; + fmap[unLo++] = fmap[unHi]; + fmap[unHi--] = temp; + } else { + break; + } + } + + if (gtHi < ltLo) { + stack_ll[sp] = lo; + stack_hh[sp] = hi; + stack_dd[sp] = d1; + sp++; + } else { + int n = ((ltLo - lo) < (unLo - ltLo)) ? (ltLo - lo) + : (unLo - ltLo); + vswap(fmap, lo, unLo - n, n); + int m = ((hi - gtHi) < (gtHi - unHi)) ? (hi - gtHi) + : (gtHi - unHi); + vswap(fmap, unLo, hi - m + 1, m); + + n = lo + unLo - ltLo - 1; + m = hi - (gtHi - unHi) + 1; + + stack_ll[sp] = lo; + stack_hh[sp] = n; + stack_dd[sp] = d; + sp++; + + stack_ll[sp] = n + 1; + stack_hh[sp] = m - 1; + stack_dd[sp] = d1; + sp++; + + stack_ll[sp] = m; + stack_hh[sp] = hi; + stack_dd[sp] = d; + sp++; + } + } + } + } + + private void mainSort() { + final Data dataShadow = this.data; + final int[] runningOrder = dataShadow.mainSort_runningOrder; + final int[] copy = dataShadow.mainSort_copy; + final boolean[] bigDone = dataShadow.mainSort_bigDone; + final int[] ftab = dataShadow.ftab; + final byte[] block = dataShadow.block; + final int[] fmap = dataShadow.fmap; + final char[] quadrant = dataShadow.quadrant; + final int lastShadow = this.last; + final int workLimitShadow = this.workLimit; + final boolean firstAttemptShadow = this.firstAttempt; + + // Set up the 2-byte frequency table + for (int i = 65537; --i >= 0;) { + ftab[i] = 0; + } + + /* + * In the various block-sized structures, live data runs from 0 to + * last+NUM_OVERSHOOT_BYTES inclusive. First, set up the overshoot area + * for block. + */ + for (int i = 0; i < NUM_OVERSHOOT_BYTES; i++) { + block[lastShadow + i + 2] = block[(i % (lastShadow + 1)) + 1]; + } + for (int i = lastShadow + NUM_OVERSHOOT_BYTES +1; --i >= 0;) { + quadrant[i] = 0; + } + block[0] = block[lastShadow + 1]; + + // Complete the initial radix sort: + + int c1 = block[0] & 0xff; + for (int i = 0; i <= lastShadow; i++) { + final int c2 = block[i + 1] & 0xff; + ftab[(c1 << 8) + c2]++; + c1 = c2; + } + + for (int i = 1; i <= 65536; i++) + ftab[i] += ftab[i - 1]; + + c1 = block[1] & 0xff; + for (int i = 0; i < lastShadow; i++) { + final int c2 = block[i + 2] & 0xff; + fmap[--ftab[(c1 << 8) + c2]] = i; + c1 = c2; + } + + fmap[--ftab[((block[lastShadow + 1] & 0xff) << 8) + (block[1] & 0xff)]] = lastShadow; + + /* + * Now ftab contains the first loc of every small bucket. Calculate the + * running order, from smallest to largest big bucket. + */ + for (int i = 256; --i >= 0;) { + bigDone[i] = false; + runningOrder[i] = i; + } + + for (int h = 364; h != 1;) { + h /= 3; + for (int i = h; i <= 255; i++) { + final int vv = runningOrder[i]; + final int a = ftab[(vv + 1) << 8] - ftab[vv << 8]; + final int b = h - 1; + int j = i; + for (int ro = runningOrder[j - h]; (ftab[(ro + 1) << 8] - ftab[ro << 8]) > a; ro = runningOrder[j + - h]) { + runningOrder[j] = ro; + j -= h; + if (j <= b) { + break; + } + } + runningOrder[j] = vv; + } + } + + /* + * The main sorting loop. + */ + for (int i = 0; i <= 255; i++) { + /* + * Process big buckets, starting with the least full. + */ + final int ss = runningOrder[i]; + + // Step 1: + /* + * Complete the big bucket [ss] by quicksorting any unsorted small + * buckets [ss, j]. Hopefully previous pointer-scanning phases have + * already completed many of the small buckets [ss, j], so we don't + * have to sort them at all. + */ + for (int j = 0; j <= 255; j++) { + final int sb = (ss << 8) + j; + final int ftab_sb = ftab[sb]; + if ((ftab_sb & SETMASK) != SETMASK) { + final int lo = ftab_sb & CLEARMASK; + final int hi = (ftab[sb + 1] & CLEARMASK) - 1; + if (hi > lo) { + mainQSort3(dataShadow, lo, hi, 2); + if (firstAttemptShadow + && (this.workDone > workLimitShadow)) { + return; + } + } + ftab[sb] = ftab_sb | SETMASK; + } + } + + // Step 2: + // Now scan this big bucket so as to synthesise the + // sorted order for small buckets [t, ss] for all t != ss. + + for (int j = 0; j <= 255; j++) { + copy[j] = ftab[(j << 8) + ss] & CLEARMASK; + } + + for (int j = ftab[ss << 8] & CLEARMASK, hj = (ftab[(ss + 1) << 8] & CLEARMASK); j < hj; j++) { + final int fmap_j = fmap[j]; + c1 = block[fmap_j] & 0xff; + if (!bigDone[c1]) { + fmap[copy[c1]] = (fmap_j == 0) ? lastShadow : (fmap_j - 1); + copy[c1]++; + } + } + + for (int j = 256; --j >= 0;) + ftab[(j << 8) + ss] |= SETMASK; + + // Step 3: + /* + * The ss big bucket is now done. Record this fact, and update the + * quadrant descriptors. Remember to update quadrants in the + * overshoot area too, if necessary. The "if (i < 255)" test merely + * skips this updating for the last bucket processed, since updating + * for the last bucket is pointless. + */ + bigDone[ss] = true; + + if (i < 255) { + final int bbStart = ftab[ss << 8] & CLEARMASK; + final int bbSize = (ftab[(ss + 1) << 8] & CLEARMASK) - bbStart; + int shifts = 0; + + while ((bbSize >> shifts) > 65534) { + shifts++; + } + + for (int j = 0; j < bbSize; j++) { + final int a2update = fmap[bbStart + j]; + final char qVal = (char) (j >> shifts); + quadrant[a2update] = qVal; + if (a2update < NUM_OVERSHOOT_BYTES) { + quadrant[a2update + lastShadow + 1] = qVal; + } + } + } + + } + } + + private void randomiseBlock() { + final boolean[] inUse = this.data.inUse; + final byte[] block = this.data.block; + final int lastShadow = this.last; + + for (int i = 256; --i >= 0;) + inUse[i] = false; + + int rNToGo = 0; + int rTPos = 0; + for (int i = 0, j = 1; i <= lastShadow; i = j, j++) { + if (rNToGo == 0) { + rNToGo = (char) Rand.rNums(rTPos); + if (++rTPos == 512) { + rTPos = 0; + } + } + + rNToGo--; + block[j] ^= ((rNToGo == 1) ? 1 : 0); + + // handle 16 bit signed numbers + inUse[block[j] & 0xff] = true; + } + + this.blockRandomised = true; + } + + private void generateMTFValues() { + final int lastShadow = this.last; + final Data dataShadow = this.data; + final boolean[] inUse = dataShadow.inUse; + final byte[] block = dataShadow.block; + final int[] fmap = dataShadow.fmap; + final char[] sfmap = dataShadow.sfmap; + final int[] mtfFreq = dataShadow.mtfFreq; + final byte[] unseqToSeq = dataShadow.unseqToSeq; + final byte[] yy = dataShadow.generateMTFValues_yy; + + // make maps + int nInUseShadow = 0; + for (int i = 0; i < 256; i++) { + if (inUse[i]) { + unseqToSeq[i] = (byte) nInUseShadow; + nInUseShadow++; + } + } + this.nInUse = nInUseShadow; + + final int eob = nInUseShadow + 1; + + for (int i = eob; i >= 0; i--) { + mtfFreq[i] = 0; + } + + for (int i = nInUseShadow; --i >= 0;) { + yy[i] = (byte) i; + } + + int wr = 0; + int zPend = 0; + + for (int i = 0; i <= lastShadow; i++) { + final byte ll_i = unseqToSeq[block[fmap[i]] & 0xff]; + byte tmp = yy[0]; + int j = 0; + + while (ll_i != tmp) { + j++; + byte tmp2 = tmp; + tmp = yy[j]; + yy[j] = tmp2; + } + yy[0] = tmp; + + if (j == 0) { + zPend++; + } else { + if (zPend > 0) { + zPend--; + while (true) { + if ((zPend & 1) == 0) { + sfmap[wr] = RUNA; + wr++; + mtfFreq[RUNA]++; + } else { + sfmap[wr] = RUNB; + wr++; + mtfFreq[RUNB]++; + } + + if (zPend >= 2) { + zPend = (zPend - 2) >> 1; + } else { + break; + } + } + zPend = 0; + } + sfmap[wr] = (char) (j + 1); + wr++; + mtfFreq[j + 1]++; + } + } + + if (zPend > 0) { + zPend--; + while (true) { + if ((zPend & 1) == 0) { + sfmap[wr] = RUNA; + wr++; + mtfFreq[RUNA]++; + } else { + sfmap[wr] = RUNB; + wr++; + mtfFreq[RUNB]++; + } + + if (zPend >= 2) { + zPend = (zPend - 2) >> 1; + } else { + break; + } + } + } + + sfmap[wr] = (char) eob; + mtfFreq[eob]++; + this.nMTF = wr + 1; + } + + private static final class Data extends Object { + + // with blockSize 900k + final boolean[] inUse = new boolean[256]; // 256 byte + final byte[] unseqToSeq = new byte[256]; // 256 byte + final int[] mtfFreq = new int[MAX_ALPHA_SIZE]; // 1032 byte + final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte + final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte + + final byte[] generateMTFValues_yy = new byte[256]; // 256 byte + final byte[][] sendMTFValues_len = new byte[N_GROUPS][MAX_ALPHA_SIZE]; // 1548 + // byte + final int[][] sendMTFValues_rfreq = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 + // byte + final int[] sendMTFValues_fave = new int[N_GROUPS]; // 24 byte + final short[] sendMTFValues_cost = new short[N_GROUPS]; // 12 byte + final int[][] sendMTFValues_code = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 + // byte + final byte[] sendMTFValues2_pos = new byte[N_GROUPS]; // 6 byte + final boolean[] sentMTFValues4_inUse16 = new boolean[16]; // 16 byte + + final int[] stack_ll = new int[QSORT_STACK_SIZE]; // 4000 byte + final int[] stack_hh = new int[QSORT_STACK_SIZE]; // 4000 byte + final int[] stack_dd = new int[QSORT_STACK_SIZE]; // 4000 byte + + final int[] mainSort_runningOrder = new int[256]; // 1024 byte + final int[] mainSort_copy = new int[256]; // 1024 byte + final boolean[] mainSort_bigDone = new boolean[256]; // 256 byte + + final int[] heap = new int[MAX_ALPHA_SIZE + 2]; // 1040 byte + final int[] weight = new int[MAX_ALPHA_SIZE * 2]; // 2064 byte + final int[] parent = new int[MAX_ALPHA_SIZE * 2]; // 2064 byte + + final int[] ftab = new int[65537]; // 262148 byte + // ------------ + // 333408 byte + + final byte[] block; // 900021 byte + final int[] fmap; // 3600000 byte + final char[] sfmap; // 3600000 byte + // ------------ + // 8433529 byte + // ============ + + /** + * Array instance identical to sfmap, both are used only + * temporarily and indepently, so we do not need to allocate + * additional memory. + */ + final char[] quadrant; + + Data(int blockSize100k) { + super(); + + final int n = blockSize100k * BZip2Constants.BASEBLOCKSIZE; + this.block = new byte[(n + 1 + NUM_OVERSHOOT_BYTES)]; + this.fmap = new int[n]; + this.sfmap = new char[2 * n]; + this.quadrant = this.sfmap; + } + + } + +} diff --git a/Tools/Cache Editor/src/alex/compressors/CRC.java b/Tools/Cache Editor/src/alex/compressors/CRC.java new file mode 100644 index 000000000..bd64bfa5e --- /dev/null +++ b/Tools/Cache Editor/src/alex/compressors/CRC.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package alex.compressors; + +/** + * A simple class the hold and calculate the CRC for sanity checking of the + * data. + * @NotThreadSafe + */ +class CRC { + private static final int crc32Table[] = { + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + }; + + CRC() { + initialiseCRC(); + } + + void initialiseCRC() { + globalCrc = 0xffffffff; + } + + int getFinalCRC() { + return ~globalCrc; + } + + int getGlobalCRC() { + return globalCrc; + } + + void setGlobalCRC(int newCrc) { + globalCrc = newCrc; + } + + void updateCRC(int inCh) { + int temp = (globalCrc >> 24) ^ inCh; + if (temp < 0) { + temp = 256 + temp; + } + globalCrc = (globalCrc << 8) ^ CRC.crc32Table[temp]; + } + + void updateCRC(int inCh, int repeat) { + int globalCrcShadow = this.globalCrc; + while (repeat-- > 0) { + int temp = (globalCrcShadow >> 24) ^ inCh; + globalCrcShadow = (globalCrcShadow << 8) ^ crc32Table[(temp >= 0) + ? temp + : (temp + 256)]; + } + this.globalCrc = globalCrcShadow; + } + + private int globalCrc; +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/alex/compressors/CompressorOutputStream.java b/Tools/Cache Editor/src/alex/compressors/CompressorOutputStream.java new file mode 100644 index 000000000..12bd94804 --- /dev/null +++ b/Tools/Cache Editor/src/alex/compressors/CompressorOutputStream.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package alex.compressors; + +import java.io.OutputStream; + +public abstract class CompressorOutputStream extends OutputStream { + // TODO +} diff --git a/Tools/Cache Editor/src/alex/compressors/Rand.java b/Tools/Cache Editor/src/alex/compressors/Rand.java new file mode 100644 index 000000000..6c3d70a30 --- /dev/null +++ b/Tools/Cache Editor/src/alex/compressors/Rand.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package alex.compressors; + +/** + * Random numbers for both the compress and decompress BZip2 classes. + */ +final class Rand { + + private static final int[] RNUMS = { + 619, 720, 127, 481, 931, 816, 813, 233, 566, 247, + 985, 724, 205, 454, 863, 491, 741, 242, 949, 214, + 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, + 419, 436, 278, 496, 867, 210, 399, 680, 480, 51, + 878, 465, 811, 169, 869, 675, 611, 697, 867, 561, + 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, + 150, 238, 59, 379, 684, 877, 625, 169, 643, 105, + 170, 607, 520, 932, 727, 476, 693, 425, 174, 647, + 73, 122, 335, 530, 442, 853, 695, 249, 445, 515, + 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, + 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, + 161, 604, 958, 533, 221, 400, 386, 867, 600, 782, + 382, 596, 414, 171, 516, 375, 682, 485, 911, 276, + 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, + 227, 730, 475, 186, 263, 647, 537, 686, 600, 224, + 469, 68, 770, 919, 190, 373, 294, 822, 808, 206, + 184, 943, 795, 384, 383, 461, 404, 758, 839, 887, + 715, 67, 618, 276, 204, 918, 873, 777, 604, 560, + 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, + 652, 934, 970, 447, 318, 353, 859, 672, 112, 785, + 645, 863, 803, 350, 139, 93, 354, 99, 820, 908, + 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, + 653, 282, 762, 623, 680, 81, 927, 626, 789, 125, + 411, 521, 938, 300, 821, 78, 343, 175, 128, 250, + 170, 774, 972, 275, 999, 639, 495, 78, 352, 126, + 857, 956, 358, 619, 580, 124, 737, 594, 701, 612, + 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, + 944, 375, 748, 52, 600, 747, 642, 182, 862, 81, + 344, 805, 988, 739, 511, 655, 814, 334, 249, 515, + 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, + 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, + 686, 754, 806, 760, 493, 403, 415, 394, 687, 700, + 946, 670, 656, 610, 738, 392, 760, 799, 887, 653, + 978, 321, 576, 617, 626, 502, 894, 679, 243, 440, + 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, + 707, 151, 457, 449, 797, 195, 791, 558, 945, 679, + 297, 59, 87, 824, 713, 663, 412, 693, 342, 606, + 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, + 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, + 140, 206, 73, 263, 980, 736, 876, 478, 430, 305, + 170, 514, 364, 692, 829, 82, 855, 953, 676, 246, + 369, 970, 294, 750, 807, 827, 150, 790, 288, 923, + 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, + 896, 831, 547, 261, 524, 462, 293, 465, 502, 56, + 661, 821, 976, 991, 658, 869, 905, 758, 745, 193, + 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, + 61, 688, 793, 644, 986, 403, 106, 366, 905, 644, + 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, + 780, 773, 635, 389, 707, 100, 626, 958, 165, 504, + 920, 176, 193, 713, 857, 265, 203, 50, 668, 108, + 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, + 936, 638 + }; + + /** + * Return the random number at a specific index. + * + * @param i the index + * @return the random number + */ + static int rNums(int i){ + return RNUMS[i]; + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/alex/decompressors/BZip2BlockEntry.java b/Tools/Cache Editor/src/alex/decompressors/BZip2BlockEntry.java new file mode 100644 index 000000000..d18b560c8 --- /dev/null +++ b/Tools/Cache Editor/src/alex/decompressors/BZip2BlockEntry.java @@ -0,0 +1,56 @@ +package alex.decompressors; + +public class BZip2BlockEntry { + + boolean aBooleanArray2205[]; + boolean aBooleanArray2213[]; + byte aByte2201; + byte aByteArray2204[]; + byte aByteArray2211[]; + byte aByteArray2212[]; + byte aByteArray2214[]; + byte aByteArray2219[]; + byte aByteArray2224[]; + byte aByteArrayArray2229[][]; + int anInt2202; + int anInt2203; + int anInt2206; + int anInt2207; + int anInt2208; + int anInt2209; + int anInt2215; + int anInt2216; + int anInt2217; + int anInt2221; + int anInt2222; + int anInt2223; + int anInt2225; + int anInt2227; + int anInt2232; + int anIntArray2200[]; + int anIntArray2220[]; + int anIntArray2226[]; + int anIntArray2228[]; + int anIntArrayArray2210[][]; + int anIntArrayArray2218[][]; + int anIntArrayArray2230[][]; + + BZip2BlockEntry() { + anIntArray2200 = new int[6]; + anInt2203 = 0; + aByteArray2204 = new byte[4096]; + aByteArray2211 = new byte[256]; + aByteArray2214 = new byte[18002]; + aByteArray2219 = new byte[18002]; + anIntArray2220 = new int[257]; + anIntArrayArray2218 = new int[6][258]; + aBooleanArray2205 = new boolean[16]; + aBooleanArray2213 = new boolean[256]; + anInt2209 = 0; + anIntArray2226 = new int[16]; + anIntArrayArray2210 = new int[6][258]; + aByteArrayArray2229 = new byte[6][258]; + anIntArrayArray2230 = new int[6][258]; + anIntArray2228 = new int[256]; + } +} diff --git a/Tools/Cache Editor/src/alex/decompressors/BZip2Decompressor.java b/Tools/Cache Editor/src/alex/decompressors/BZip2Decompressor.java new file mode 100644 index 000000000..96c7862b8 --- /dev/null +++ b/Tools/Cache Editor/src/alex/decompressors/BZip2Decompressor.java @@ -0,0 +1,550 @@ +package alex.decompressors; + +public class BZip2Decompressor { + + private static int anIntArray257[]; + + private static BZip2BlockEntry entry = new BZip2BlockEntry(); + + public static final int decompress(byte abyte0[], int i, byte abyte1[], + int j, int k) { + synchronized (entry) { + entry.aByteArray2224 = abyte1; + entry.anInt2209 = k; + entry.aByteArray2212 = abyte0; + entry.anInt2203 = 0; + entry.anInt2206 = i; + entry.anInt2232 = 0; + entry.anInt2207 = 0; + entry.anInt2217 = 0; + entry.anInt2216 = 0; + method1793(entry); + i -= entry.anInt2206; + entry.aByteArray2224 = null; + entry.aByteArray2212 = null; + int l = i; + return l; + } + } + + private static final void method1785(BZip2BlockEntry entry) { + entry.anInt2215 = 0; + for (int i = 0; i < 256; i++) { + if (entry.aBooleanArray2213[i]) { + entry.aByteArray2211[entry.anInt2215] = (byte) i; + entry.anInt2215++; + } + } + + } + + private static final void method1786(int ai[], int ai1[], int ai2[], + byte abyte0[], int i, int j, int k) { + int l = 0; + for (int i1 = i; i1 <= j; i1++) { + for (int l2 = 0; l2 < k; l2++) { + if (abyte0[l2] == i1) { + ai2[l] = l2; + l++; + } + } + + } + + for (int j1 = 0; j1 < 23; j1++) { + ai1[j1] = 0; + } + + for (int k1 = 0; k1 < k; k1++) { + ai1[abyte0[k1] + 1]++; + } + + for (int l1 = 1; l1 < 23; l1++) { + ai1[l1] += ai1[l1 - 1]; + } + + for (int i2 = 0; i2 < 23; i2++) { + ai[i2] = 0; + } + + int i3 = 0; + for (int j2 = i; j2 <= j; j2++) { + i3 += ai1[j2 + 1] - ai1[j2]; + ai[j2] = i3 - 1; + i3 <<= 1; + } + + for (int k2 = i + 1; k2 <= j; k2++) { + ai1[k2] = (ai[k2 - 1] + 1 << 1) - ai1[k2]; + } + + } + + private static final void method1787(BZip2BlockEntry entry) { + byte byte4 = entry.aByte2201; + int i = entry.anInt2222; + int j = entry.anInt2227; + int k = entry.anInt2221; + int ai[] = anIntArray257; + int l = entry.anInt2208; + byte abyte0[] = entry.aByteArray2212; + int i1 = entry.anInt2203; + int j1 = entry.anInt2206; + int k1 = j1; + int l1 = entry.anInt2225 + 1; + label0: do { + if (i > 0) { + do { + if (j1 == 0) { + break label0; + } + if (i == 1) { + break; + } + abyte0[i1] = byte4; + i--; + i1++; + j1--; + } while (true); + if (j1 == 0) { + i = 1; + break; + } + abyte0[i1] = byte4; + i1++; + j1--; + } + boolean flag = true; + while (flag) { + flag = false; + if (j == l1) { + i = 0; + break label0; + } + byte4 = (byte) k; + l = ai[l]; + byte byte0 = (byte) (l & 0xff); + l >>= 8; + j++; + if (byte0 != k) { + k = byte0; + if (j1 == 0) { + i = 1; + } else { + abyte0[i1] = byte4; + i1++; + j1--; + flag = true; + continue; + } + break label0; + } + if (j != l1) { + continue; + } + if (j1 == 0) { + i = 1; + break label0; + } + abyte0[i1] = byte4; + i1++; + j1--; + flag = true; + } + i = 2; + l = ai[l]; + byte byte1 = (byte) (l & 0xff); + l >>= 8; + if (++j != l1) { + if (byte1 != k) { + k = byte1; + } else { + i = 3; + l = ai[l]; + byte byte2 = (byte) (l & 0xff); + l >>= 8; + if (++j != l1) { + if (byte2 != k) { + k = byte2; + } else { + l = ai[l]; + byte byte3 = (byte) (l & 0xff); + l >>= 8; + j++; + i = (byte3 & 0xff) + 4; + l = ai[l]; + k = (byte) (l & 0xff); + l >>= 8; + j++; + } + } + } + } + } while (true); + int i2 = entry.anInt2216; + entry.anInt2216 += k1 - j1; + entry.aByte2201 = byte4; + entry.anInt2222 = i; + entry.anInt2227 = j; + entry.anInt2221 = k; + anIntArray257 = ai; + entry.anInt2208 = l; + entry.aByteArray2212 = abyte0; + entry.anInt2203 = i1; + entry.anInt2206 = j1; + } + + private static final byte method1788(BZip2BlockEntry entry) { + return (byte) method1790(1, entry); + } + + private static final byte method1789(BZip2BlockEntry entry) { + return (byte) method1790(8, entry); + } + + private static final int method1790(int i, BZip2BlockEntry entry) { + int j; + do { + if (entry.anInt2232 >= i) { + int k = entry.anInt2207 >> entry.anInt2232 - i & (1 << i) - 1; + entry.anInt2232 -= i; + j = k; + break; + } + entry.anInt2207 = entry.anInt2207 << 8 + | entry.aByteArray2224[entry.anInt2209] & 0xff; + entry.anInt2232 += 8; + entry.anInt2209++; + entry.anInt2217++; + } while (true); + return j; + } + + public static void method1791() { + entry = null; + } + + private static final void method1793(BZip2BlockEntry entry) { + // unused + /* + * boolean flag = false; boolean flag1 = false; boolean flag2 = false; + * boolean flag3 = false; boolean flag4 = false; boolean flag5 = false; + * boolean flag6 = false; boolean flag7 = false; boolean flag8 = false; + * boolean flag9 = false; boolean flag10 = false; boolean flag11 = + * false; boolean flag12 = false; boolean flag13 = false; boolean flag14 + * = false; boolean flag15 = false; boolean flag16 = false; boolean + * flag17 = false; + */ + int j8 = 0; + int ai[] = null; + int ai1[] = null; + int ai2[] = null; + entry.anInt2202 = 1; + if (anIntArray257 == null) { + anIntArray257 = new int[entry.anInt2202 * 0x186a0]; + } + boolean flag18 = true; + while (flag18) { + byte byte0 = method1789(entry); + if (byte0 == 23) { + return; + } + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1788(entry); + entry.anInt2223 = 0; + byte0 = method1789(entry); + entry.anInt2223 = entry.anInt2223 << 8 | byte0 & 0xff; + byte0 = method1789(entry); + entry.anInt2223 = entry.anInt2223 << 8 | byte0 & 0xff; + byte0 = method1789(entry); + entry.anInt2223 = entry.anInt2223 << 8 | byte0 & 0xff; + for (int j = 0; j < 16; j++) { + byte byte1 = method1788(entry); + if (byte1 == 1) { + entry.aBooleanArray2205[j] = true; + } else { + entry.aBooleanArray2205[j] = false; + } + } + + for (int k = 0; k < 256; k++) { + entry.aBooleanArray2213[k] = false; + } + + for (int l = 0; l < 16; l++) { + if (entry.aBooleanArray2205[l]) { + for (int i3 = 0; i3 < 16; i3++) { + byte byte2 = method1788(entry); + if (byte2 == 1) { + entry.aBooleanArray2213[l * 16 + i3] = true; + } + } + + } + } + + method1785(entry); + int i4 = entry.anInt2215 + 2; + int j4 = method1790(3, entry); + int k4 = method1790(15, entry); + for (int i1 = 0; i1 < k4; i1++) { + int j3 = 0; + do { + byte byte3 = method1788(entry); + if (byte3 == 0) { + break; + } + j3++; + } while (true); + entry.aByteArray2214[i1] = (byte) j3; + } + + byte abyte0[] = new byte[6]; + for (byte byte16 = 0; byte16 < j4; byte16++) { + abyte0[byte16] = byte16; + } + + for (int j1 = 0; j1 < k4; j1++) { + byte byte17 = entry.aByteArray2214[j1]; + byte byte15 = abyte0[byte17]; + for (; byte17 > 0; byte17--) { + abyte0[byte17] = abyte0[byte17 - 1]; + } + + abyte0[0] = byte15; + entry.aByteArray2219[j1] = byte15; + } + + for (int k3 = 0; k3 < j4; k3++) { + int k6 = method1790(5, entry); + for (int k1 = 0; k1 < i4; k1++) { + do { + byte byte4 = method1788(entry); + if (byte4 == 0) { + break; + } + byte4 = method1788(entry); + if (byte4 == 0) { + k6++; + } else { + k6--; + } + } while (true); + entry.aByteArrayArray2229[k3][k1] = (byte) k6; + } + + } + + for (int l3 = 0; l3 < j4; l3++) { + byte byte8 = 32; + int i = 0; + for (int l1 = 0; l1 < i4; l1++) { + if (entry.aByteArrayArray2229[l3][l1] > i) { + i = entry.aByteArrayArray2229[l3][l1]; + } + if (entry.aByteArrayArray2229[l3][l1] < byte8) { + byte8 = entry.aByteArrayArray2229[l3][l1]; + } + } + + method1786(entry.anIntArrayArray2230[l3], + entry.anIntArrayArray2218[l3], + entry.anIntArrayArray2210[l3], + entry.aByteArrayArray2229[l3], byte8, i, i4); + entry.anIntArray2200[l3] = byte8; + } + + int l4 = entry.anInt2215 + 1; + int i5 = -1; + int j5 = 0; + for (int i2 = 0; i2 <= 255; i2++) { + entry.anIntArray2228[i2] = 0; + } + + int i9 = 4095; + for (int k8 = 15; k8 >= 0; k8--) { + for (int l8 = 15; l8 >= 0; l8--) { + entry.aByteArray2204[i9] = (byte) (k8 * 16 + l8); + i9--; + } + + entry.anIntArray2226[k8] = i9 + 1; + } + + int l5 = 0; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte12 = entry.aByteArray2219[i5]; + j8 = entry.anIntArray2200[byte12]; + ai = entry.anIntArrayArray2230[byte12]; + ai2 = entry.anIntArrayArray2210[byte12]; + ai1 = entry.anIntArrayArray2218[byte12]; + } + j5--; + int l6 = j8; + int k7; + byte byte9; + for (k7 = method1790(l6, entry); k7 > ai[l6]; k7 = k7 << 1 | byte9) { + l6++; + byte9 = method1788(entry); + } + + for (int k5 = ai2[k7 - ai1[l6]]; k5 != l4;) { + if (k5 == 0 || k5 == 1) { + int i6 = -1; + int j6 = 1; + do { + if (k5 == 0) { + i6 += j6; + } else if (k5 == 1) { + i6 += 2 * j6; + } + j6 *= 2; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte13 = entry.aByteArray2219[i5]; + j8 = entry.anIntArray2200[byte13]; + ai = entry.anIntArrayArray2230[byte13]; + ai2 = entry.anIntArrayArray2210[byte13]; + ai1 = entry.anIntArrayArray2218[byte13]; + } + j5--; + int i7 = j8; + int l7; + byte byte10; + for (l7 = method1790(i7, entry); l7 > ai[i7]; l7 = l7 << 1 + | byte10) { + i7++; + byte10 = method1788(entry); + } + + k5 = ai2[l7 - ai1[i7]]; + } while (k5 == 0 || k5 == 1); + i6++; + byte byte5 = entry.aByteArray2211[entry.aByteArray2204[entry.anIntArray2226[0]] & 0xff]; + entry.anIntArray2228[byte5 & 0xff] += i6; + for (; i6 > 0; i6--) { + anIntArray257[l5] = byte5 & 0xff; + l5++; + } + + } else { + int i11 = k5 - 1; + byte byte6; + if (i11 < 16) { + int i10 = entry.anIntArray2226[0]; + byte6 = entry.aByteArray2204[i10 + i11]; + for (; i11 > 3; i11 -= 4) { + int j11 = i10 + i11; + entry.aByteArray2204[j11] = entry.aByteArray2204[j11 - 1]; + entry.aByteArray2204[j11 - 1] = entry.aByteArray2204[j11 - 2]; + entry.aByteArray2204[j11 - 2] = entry.aByteArray2204[j11 - 3]; + entry.aByteArray2204[j11 - 3] = entry.aByteArray2204[j11 - 4]; + } + + for (; i11 > 0; i11--) { + entry.aByteArray2204[i10 + i11] = entry.aByteArray2204[(i10 + i11) - 1]; + } + + entry.aByteArray2204[i10] = byte6; + } else { + int k10 = i11 / 16; + int l10 = i11 % 16; + int j10 = entry.anIntArray2226[k10] + l10; + byte6 = entry.aByteArray2204[j10]; + for (; j10 > entry.anIntArray2226[k10]; j10--) { + entry.aByteArray2204[j10] = entry.aByteArray2204[j10 - 1]; + } + + entry.anIntArray2226[k10]++; + for (; k10 > 0; k10--) { + entry.anIntArray2226[k10]--; + entry.aByteArray2204[entry.anIntArray2226[k10]] = entry.aByteArray2204[(entry.anIntArray2226[k10 - 1] + 16) - 1]; + } + + entry.anIntArray2226[0]--; + entry.aByteArray2204[entry.anIntArray2226[0]] = byte6; + if (entry.anIntArray2226[0] == 0) { + int l9 = 4095; + for (int j9 = 15; j9 >= 0; j9--) { + for (int k9 = 15; k9 >= 0; k9--) { + entry.aByteArray2204[l9] = entry.aByteArray2204[entry.anIntArray2226[j9] + + k9]; + l9--; + } + + entry.anIntArray2226[j9] = l9 + 1; + } + + } + } + entry.anIntArray2228[entry.aByteArray2211[byte6 & 0xff] & 0xff]++; + anIntArray257[l5] = entry.aByteArray2211[byte6 & 0xff] & 0xff; + l5++; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte14 = entry.aByteArray2219[i5]; + j8 = entry.anIntArray2200[byte14]; + ai = entry.anIntArrayArray2230[byte14]; + ai2 = entry.anIntArrayArray2210[byte14]; + ai1 = entry.anIntArrayArray2218[byte14]; + } + j5--; + int j7 = j8; + int i8; + byte byte11; + for (i8 = method1790(j7, entry); i8 > ai[j7]; i8 = i8 << 1 + | byte11) { + j7++; + byte11 = method1788(entry); + } + + k5 = ai2[i8 - ai1[j7]]; + } + } + + entry.anInt2222 = 0; + entry.aByte2201 = 0; + entry.anIntArray2220[0] = 0; + for (int j2 = 1; j2 <= 256; j2++) { + entry.anIntArray2220[j2] = entry.anIntArray2228[j2 - 1]; + } + + for (int k2 = 1; k2 <= 256; k2++) { + entry.anIntArray2220[k2] += entry.anIntArray2220[k2 - 1]; + } + + for (int l2 = 0; l2 < l5; l2++) { + byte byte7 = (byte) (anIntArray257[l2] & 0xff); + anIntArray257[entry.anIntArray2220[byte7 & 0xff]] |= l2 << 8; + entry.anIntArray2220[byte7 & 0xff]++; + } + + entry.anInt2208 = anIntArray257[entry.anInt2223] >> 8; + entry.anInt2227 = 0; + entry.anInt2208 = anIntArray257[entry.anInt2208]; + entry.anInt2221 = (byte) (entry.anInt2208 & 0xff); + entry.anInt2208 >>= 8; + entry.anInt2227++; + entry.anInt2225 = l5; + method1787(entry); + if (entry.anInt2227 == entry.anInt2225 + 1 && entry.anInt2222 == 0) { + flag18 = true; + } else { + flag18 = false; + } + } + } + +} diff --git a/Tools/Cache Editor/src/alex/decompressors/GZipDecompressor.java b/Tools/Cache Editor/src/alex/decompressors/GZipDecompressor.java new file mode 100644 index 000000000..b2f625d65 --- /dev/null +++ b/Tools/Cache Editor/src/alex/decompressors/GZipDecompressor.java @@ -0,0 +1,26 @@ +package alex.decompressors; + +import java.util.zip.Inflater; + +import alex.io.Stream; + +public class GZipDecompressor { + + private static final Inflater inflater = new Inflater(true); + + public static final void decompress(Stream stream, byte output[]) { + if (~stream.payload[stream.offset] != -32 + || stream.payload[stream.offset + 1] != -117) { + throw new RuntimeException("Invalid GZIP header!"); + } + try { + inflater.setInput(stream.payload, stream.offset + 10, + -stream.offset - 18 + stream.payload.length); + inflater.inflate(output); + } catch (Exception _ex) { + inflater.reset(); + throw new RuntimeException("Invalid GZIP compressed data!"); + } + inflater.reset(); + } +} diff --git a/Tools/Cache Editor/src/alex/io/Stream.java b/Tools/Cache Editor/src/alex/io/Stream.java new file mode 100644 index 000000000..ec98e32ba --- /dev/null +++ b/Tools/Cache Editor/src/alex/io/Stream.java @@ -0,0 +1,504 @@ +package alex.io; + +import java.math.BigInteger; + +import alex.util.Methods; + +public class Stream { + public int offset; + public byte payload[]; + + public Stream(byte abyte0[]) { + offset = 0; + payload = abyte0; + } + + public Stream(int abyte0[]) { + offset = 0; + payload = new byte[abyte0.length]; + for(int i = 0; i < payload.length; i++) + payload[i] = (byte) abyte0[i]; + } + + public Stream(int size) { + payload = new byte[size]; + offset = 0; + } + + final boolean compareCrcs() { + offset -= 4; + int i = Methods.getCrc(payload, 0, offset); + int j = getInt(); + return j == i; + } + + public final void decodeXTEA(int keys[], int start, int end) { + int l = offset; + offset = start; + int i1 = (end - start) / 8; + for (int j1 = 0; j1 < i1; j1++) { + int k1 = getInt(); + int l1 = getInt(); + int sum = 0xc6ef3720; + int delta = 0x9e3779b9; + for (int k2 = 32; k2-- > 0;) { + l1 -= keys[(sum & 0x1c84) >>> 11] + sum ^ (k1 >>> 5 ^ k1 << 4) + + k1; + sum -= delta; + k1 -= (l1 >>> 5 ^ l1 << 4) + l1 ^ keys[sum & 3] + sum; + } + + offset -= 8; + putInt(k1); + putInt(l1); + } + + offset = l; + } + + public final void encodeXTEA(int keys[]) { + int j = offset / 8; + offset = 0; + for (int k = 0; k < j; k++) { + int l = getInt(); + int i1 = getInt(); + int sum = 0; + int delta = 0x9e3779b9; + for (int l1 = 32; l1-- > 0;) { + l += sum + keys[3 & sum] ^ i1 + (i1 >>> 5 ^ i1 << 4); + sum += delta; + i1 += l + (l >>> 5 ^ l << 4) ^ keys[(0x1eec & sum) >>> 11] + + sum; + } + + offset -= 8; + putInt(l); + putInt(i1); + } + + } + + public final byte getByte() { + return payload[offset++]; + } + + final byte getByteA() { + return (byte) (payload[offset++] - 128); + } + + public final void getBytes(byte buffer[], int off, int len) { + for (int k = off; k < len + off; k++) { + buffer[k] = payload[offset++]; + } + + } + + final byte getByteS() { + return (byte) (-payload[offset++] + 128); + } + + final void getBytesAReverse(byte buffer[], int off, int len) { + int l = -1 + len + off; + for (; off <= l; l--) { + buffer[l] = (byte) (payload[offset++] - 128); + } + + } + + final void getBytesReverse(byte buffer[], int off, int len) { + for (int l = -1 + (off + len); l >= off; l--) { + buffer[l] = payload[offset++]; + } + + } + + final String getCheckedString() { + if (payload[offset] == 0) { + offset++; + return null; + } + return getString(); + } + + public final int getInt() { + offset += 4; + return ((0xff & payload[-3 + offset]) << 16) + + ((((0xff & payload[-4 + offset]) << 24) + ((payload[-2 + + offset] & 0xff) << 8)) + (payload[-1 + offset] & 0xff)); + } + + final String getJStr() { + byte byte0 = payload[offset++]; + if (byte0 != 0) { + throw new IllegalStateException("Bad version number in gjstr2"); + } + int j = offset; + while (payload[offset++] != 0) + ; + int k = -1 + offset - j; + if (k == 0) { + return ""; + } else { + return Methods.getStringFromBytes(payload, j, k); + } + } + + final int getLEInt() { + offset += 4; + return ((0xff & payload[offset - 1]) << 24) + + ((0xff0000 & payload[-2 + offset] << 16) + + ((0xff & payload[offset - 3]) << 8) + (0xff & payload[offset - 4])); + } + + final int getLEShort() { + offset += 2; + int i = (0xff & payload[-2 + offset]) + + (0xff00 & payload[offset - 1] << 8); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + final int getLEShortA() { + offset += 2; + int i = (-128 + payload[-2 + offset] & 0xff) + + ((0xff & payload[offset - 1]) << 8); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + final int getLEUShort() { + offset += 2; + return (payload[-2 + offset] & 0xff) + + ((0xff & payload[-1 + offset]) << 8); + } + + final int getLEUShortA() { + offset += 2; + return ((payload[offset - 1] & 0xff) << 8) + + (-128 + payload[-2 + offset] & 0xff); + } + + final long getLong() { + long l = 0xffffffffL & getInt(); + long l1 = 0xffffffffL & getInt(); + return l1 + (l << 32); + } + + public final int getMediumInt() { + offset += 3; + return (0xff & payload[offset - 1]) + + ((payload[offset - 3] << 16 & 0xff0000) + (0xff00 & payload[offset - 2] << 8)); + } + + final int getMEInt1() { + offset += 4; + return ((payload[offset - 4] & 0xff) << 16) + + (((0xff000000 & payload[offset - 3] << 24) + ((0xff & payload[offset - 1]) << 8)) + (0xff & payload[offset - 2])); + } + + final int getMEInt2() { + offset += 4; + return (payload[offset - 2] << 24 & 0xff000000) + + (((payload[-1 + offset] & 0xff) << 16) + (payload[-4 + offset] << 8 & 0xff00)) + + (0xff & payload[-3 + offset]); + } + + final byte getNegByte() { + return (byte) (-payload[offset++]); + } + + final int getNegUByte() { + return 0xff & -payload[offset++]; + } + + final long getShiftedLong(int i) { + if (--i < 0 || i > 7) { + throw new IllegalArgumentException(); + } + int j = i * 8; + long l = 0L; + for (; j >= 0; j -= 8) { + l |= (payload[offset++] & 255L) << j; + } + + return l; + } + + public final int getShort() { + offset += 2; + int i = ((payload[offset - 2] & 0xff) << 8) + + (0xff & payload[offset - 1]); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + final int getShortA() { + offset += 2; + int j = (payload[-1 + offset] - 128 & 0xff) + + (0xff00 & payload[offset - 2] << 8); + if (j > 32767) { + j -= 0x10000; + } + return j; + } + + final int getSmallSmart() { + int i = 0xff & payload[offset]; + if (i >= 128) { + return -49152 + getUShort(); + } else { + return -64 + getUByte(); + } + } + + final int getSmart() { + int i = payload[offset] & 0xff; + if (i >= 128) { + return getUShort() - 32768; + } else { + return getUByte(); + } + } + + final int getSmarts() { + int i = 0; + int j; + for (j = getSmart(); j == 32767;) { + j = getSmart(); + i += 32767; + } + + i += j; + return i; + } + + public final String getString() { + int j = offset; + while (payload[offset++] != 0) + ; + int k = -1 + (offset - j); + if (k == 0) { + return ""; + } else { + return Methods.getStringFromBytes(payload, j, k); + } + } + + public final int getUByte() { + return payload[offset++] & 0xff; + } + + final int getUByteA() { + return -128 + payload[offset++] & 0xff; + } + + final int getUByteS() { + return 0xff & -payload[offset++] + 128; + } + + public final int getUShort() { + offset += 2; + return (payload[offset - 2] << 8 & 0xff00) + + (payload[offset - 1] & 0xff); + } + + final int getUShortA() { + offset += 2; + return (0xff & payload[offset - 1] - 128) + + ((0xff & payload[offset - 2]) << 8); + } + + final int method124() { + byte byte1 = payload[offset++]; + int i = 0; + for (; byte1 < 0; byte1 = payload[offset++]) { + i = (0x7f & byte1 | i) << 7; + } + + return i | byte1; + } + + public final void putByte(int i) { + payload[offset++] = (byte) i; + } + + public void putString(String s) { + System.arraycopy(s.getBytes(), 0, payload, offset, s.length()); + offset = offset + s.length(); + putByte(0); + } + + final void putByteA(int i) { + payload[offset++] = (byte) (i + 128); + } + + final void putBytes(byte buffer[], int off, int len) { + for (int k = off; off + len > k; k++) { + payload[offset++] = buffer[k]; + } + + } + + final void putByteS(int i) { + payload[offset++] = (byte) (128 - i); + } + + final int putCrc(int off) { + int k = Methods.getCrc(payload, off, offset); + putInt(k); + return k; + } + + final void putFlags(int i) { + if (~(0xffffff80 & i) != -1) { + if (~(i & 0xffffc000) != -1) { + if ((0xffe00000 & i) != 0) { + if ((i & 0xf0000000) != 0) { + putByte(i >>> 28 | 0x80); + } + putByte((0x10039c30 | i) >>> 21); + } + putByte((i | 0x203a0e) >>> 14); + } + putByte((0x403d | i) >>> 7); + } + putByte(i & 0x7f); + } + + public final void putInt(int i) { + payload[offset++] = (byte) (i >> 24); + payload[offset++] = (byte) (i >> 16); + payload[offset++] = (byte) (i >> 8); + payload[offset++] = (byte) i; + } + + final void putJStr(String s) { + int j = s.indexOf('\0'); + if (j >= 0) { + throw new IllegalArgumentException("NUL character at " + j + + " - cannot pjstr"); + } + offset += Methods.getStringBytes(s, 0, s.length(), payload, offset); + payload[offset++] = 0; + } + + + + final void putLEInt(int i) { + payload[offset++] = (byte) i; + payload[offset++] = (byte) (i >> 8); + payload[offset++] = (byte) (i >> 16); + payload[offset++] = (byte) (i >> 24); + } + + final void putLEShort(int i) { + payload[offset++] = (byte) i; + payload[offset++] = (byte) (i >> 8); + } + + final void putLEShortA(int i) { + payload[offset++] = (byte) (i + 128); + payload[offset++] = (byte) (i >> 8); + } + + final void putLong(long l) { + payload[offset++] = (byte) (int) (l >> 56); + payload[offset++] = (byte) (int) (l >> 48); + payload[offset++] = (byte) (int) (l >> 40); + payload[offset++] = (byte) (int) (l >> 32); + payload[offset++] = (byte) (int) (l >> 24); + payload[offset++] = (byte) (int) (l >> 16); + payload[offset++] = (byte) (int) (l >> 8); + payload[offset++] = (byte) (int) l; + } + + public final void putMediumInt(int j) { + payload[offset++] = (byte) (j >> 16); + payload[offset++] = (byte) (j >> 8); + payload[offset++] = (byte) j; + } + + final void putMEInt1(int j) { + payload[offset++] = (byte) (j >> 16); + payload[offset++] = (byte) (j >> 24); + payload[offset++] = (byte) j; + payload[offset++] = (byte) (j >> 8); + } + + final void putMEInt2(int i) { + payload[offset++] = (byte) (i >> 8); + payload[offset++] = (byte) i; + payload[offset++] = (byte) (i >> 24); + payload[offset++] = (byte) (i >> 16); + } + + final void putNegByte(int i) { + payload[offset++] = (byte) (-i); + } + + final void putShiftedLong(int j, long l) { + if (--j < 0 || j > 7) { + throw new IllegalArgumentException(); + } + for (int k = j * 8; k >= 0; k -= 8) { + payload[offset++] = (byte) (int) (l >> k); + } + } + + public final void putShort(int i) { + payload[offset++] = (byte) (i >> 8); + payload[offset++] = (byte) i; + } + + final void putShortA(int i) { + payload[offset++] = (byte) (i >> 8); + payload[offset++] = (byte) (128 + i); + } + + final void putSizeByte(int i) { + payload[-1 - i + offset] = (byte) i; + } + + final void putSizeInt(int i) { + payload[offset - (i + 4)] = (byte) (i >> 24); + payload[-3 + (-i + offset)] = (byte) (i >> 16); + payload[-2 + (offset - i)] = (byte) (i >> 8); + payload[-i + (offset - 1)] = (byte) i; + } + + final void putSizeShort(int j) { + payload[-2 + (offset - j)] = (byte) (j >> 8); + payload[-1 + offset - j] = (byte) j; + } + + final void putSmart(int i) { + if (i >= 0 && i < 128) { + putByte(i); + return; + } + if (i >= 0 && i < 32768) { + putShort(i + 32768); + } else { + throw new IllegalArgumentException(); + } + } + + final void rsaEncode(BigInteger exponent, BigInteger modulus) { + int j = offset; + offset = 0; + byte abyte0[] = new byte[j]; + getBytes(abyte0, 0, j); + BigInteger biginteger2 = new BigInteger(abyte0); + BigInteger biginteger3 = biginteger2.modPow(exponent, modulus); + byte abyte1[] = biginteger3.toByteArray(); + offset = 0; + putByte(abyte1.length); + putBytes(abyte1, 0, abyte1.length); + } +} diff --git a/Tools/Cache Editor/src/alex/util/LookupTable.java b/Tools/Cache Editor/src/alex/util/LookupTable.java new file mode 100644 index 000000000..a3ec2131a --- /dev/null +++ b/Tools/Cache Editor/src/alex/util/LookupTable.java @@ -0,0 +1,41 @@ +package alex.util; + +public class LookupTable { + + private int identTable[]; + + public LookupTable(int ai[]) { + int i; + for (i = 1; (ai.length >> 1) + ai.length >= i; i <<= 1) { + } + identTable = new int[i + i]; + for (int j = 0; i + i > j; j++) { + identTable[j] = -1; + } + + for (int k = 0; ai.length > k; k++) { + int l; + for (l = -1 + i & ai[k]; ~identTable[l + l + 1] != 0; l = 1 + l + & -1 + i) { + } + identTable[l + l] = ai[k]; + identTable[1 + l + l] = k; + } + + } + + public final int lookupIdentifier(int i) { + int k = (identTable.length >> 1) - 1; + int l = i & k; + do { + int i1 = identTable[1 + l + l]; + if (i1 == -1) { + return -1; + } + if (i == identTable[l + l]) { + return i1; + } + l = l + 1 & k; + } while (true); + } +} diff --git a/Tools/Cache Editor/src/alex/util/Methods.java b/Tools/Cache Editor/src/alex/util/Methods.java new file mode 100644 index 000000000..854c334d6 --- /dev/null +++ b/Tools/Cache Editor/src/alex/util/Methods.java @@ -0,0 +1,372 @@ +package alex.util; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.zip.CRC32; +import java.util.zip.GZIPOutputStream; + +import alex.CacheLoader; +import alex.compressors.BZip2OutputStream; +import alex.decompressors.BZip2Decompressor; +import alex.decompressors.GZipDecompressor; +import alex.io.Stream; + +public class Methods { + + public static final CRC32 CRC32 = new CRC32(); + public static final int ATTACK = 0, DEFENCE = 1, STRENGTH = 2, HITPOINTS = 3, RANGE = 4, PRAYER = 5, + MAGIC = 6, COOKING = 7, WOODCUTTING = 8, FLETCHING = 9, FISHING = 10, FIREMAKING = 11, + CRAFTING = 12, SMITHING = 13, MINING = 14, HERBLORE = 15, AGILITY = 16, THIEVING = 17, SLAYER = 18, + FARMING = 19, RUNECRAFTING = 20, CONSTRUCTION = 21, HUNTER = 22, SUMMONING = 23; + + public static char aCharArray5916[] = { '\u20AC', '\0', '\u201A', '\u0192', + '\u201E', '\u2026', '\u2020', '\u2021', '\u02C6', '\u2030', + '\u0160', '\u2039', '\u0152', '\0', '\u017D', '\0', '\0', '\u2018', + '\u2019', '\u201C', '\u201D', '\u2022', '\u2013', '\u2014', + '\u02DC', '\u2122', '\u0161', '\u203A', '\u0153', '\0', '\u017E', + '\u0178' }; + public final static byte ANIM_IDX_ID = 20; + public final static byte ANIMFRAMES_IDX_ID = 0; + static int minLength = 0; + static int crcTable[]; + public final static short CRCTABLE_IDX_ID = 255; + public final static byte GFX_IDX_ID = 21; + public final static byte HUFFMAN_IDX_ID = 10; + public final static byte INTERFACEDEF_IDX_ID = 3; + public final static byte INTERFACESCRIPT_IDX_ID = 12; + public final static byte ITEMDEF_IDX_ID = 19; + public final static byte LANDSCAPEDEF_IDX_ID = 5; + public final static byte MODELS_IDX_ID = 7; + + public final static byte MUSIC_IDX_ID = 6; + + public final static byte NPCDEF_IDX_ID = 18; + + public final static byte OBJECTDEF_IDX_ID = 16; + + public final static byte SPRITES_IDX_ID = 8; + + static { + crcTable = new int[256]; + for (int j = 0; j < 256; j++) { + int i = j; + for (int k = 0; k < 8; k++) { + if ((1 & i) != 1) { + i >>>= 1; + } else { + i = 0xedb88320 ^ i >>> 1; + } + } + + crcTable[j] = i; + } + + } + + public static final int getAmountOfItems() { + int lastContainerId = CacheLoader.getFileSystems()[ITEMDEF_IDX_ID].getChildCount() -1; + return (256 * lastContainerId) + CacheLoader.getFileSystems()[ITEMDEF_IDX_ID].getChildIndexCount(lastContainerId); + //256 is the max size of each container for items(rs does that doesnt mean its limit), and then the size of last container cuz it may not be 256 + } + public static final int getTableSize(int length) { + length--; + length |= length >>> -1810941663; + length |= length >>> 2010624802; + length |= length >>> 10996420; + length |= length >>> 491045480; + length |= length >>> 1388313616; + return 1 + length; + } + + public static final byte[] copyBuffer(byte buffer[]) { + int len = buffer.length; + byte copy[] = new byte[len]; + System.arraycopy(buffer, 0, copy, 0, len); + return copy; + } + + public static final int getCrc(byte buffer[], int len) { + return getCrc(buffer, 0, len); + } + + public static final int getCrc(byte buffer[], int off, int len) { + int l = -1; + for (int i1 = off; len > i1; i1++) { + l = crcTable[(buffer[i1] ^ l) & 0xff] ^ l >>> 8; + } + l = ~l; + return l; + } + + public static final int getStringBytes(String s, int strOff, int strLen, + byte buffer[], int bufOff) { + int l = -strOff + strLen; + for (int i1 = 0; i1 < l; i1++) { + char c = s.charAt(strOff + i1); + if (c > '\0' && c < '\200' || c >= '\240' && c <= '\377') { + buffer[i1 + bufOff] = (byte) c; + } else if (c == '\u20AC') { + buffer[i1 + bufOff] = -128; + } else if (c != '\u201A') { + if (c == '\u0192') { + buffer[bufOff + i1] = -125; + } else if (c != '\u201E') { + if (c == '\u2026') { + buffer[bufOff + i1] = -123; + } else if (c != '\u2020') { + if (c == '\u2021') { + buffer[bufOff + i1] = -121; + } else if (c != '\u02C6') { + if (c == '\u2030') { + buffer[i1 + bufOff] = -119; + } else if (c == '\u0160') { + buffer[bufOff + i1] = -118; + } else if (c == '\u2039') { + buffer[i1 + bufOff] = -117; + } else if (c == '\u0152') { + buffer[i1 + bufOff] = -116; + } else if (c == '\u017D') { + buffer[i1 + bufOff] = -114; + } else if (c == '\u2018') { + buffer[i1 + bufOff] = -111; + } else if (c == '\u2019') { + buffer[i1 + bufOff] = -110; + } else if (c == '\u201C') { + buffer[bufOff + i1] = -109; + } else if (c == '\u201D') { + buffer[i1 + bufOff] = -108; + } else if (c == '\u2022') { + buffer[i1 + bufOff] = -107; + } else if (c != '\u2013') { + if (c == '\u2014') { + buffer[i1 + bufOff] = -105; + } else if (c != '\u02DC') { + if (c != '\u2122') { + if (c == '\u0161') { + buffer[i1 + bufOff] = -102; + } else if (c == '\u203A') { + buffer[bufOff + i1] = -101; + } else if (c == '\u0153') { + buffer[bufOff + i1] = -100; + } else if (c == '\u017E') { + buffer[bufOff + i1] = -98; + } else if (c == '\u0178') { + buffer[i1 + bufOff] = -97; + } else { + buffer[i1 + bufOff] = 63; + } + } else { + buffer[bufOff + i1] = -103; + } + } else { + buffer[i1 + bufOff] = -104; + } + } else { + buffer[i1 + bufOff] = -106; + } + } else { + buffer[i1 + bufOff] = -120; + } + } else { + buffer[bufOff + i1] = -122; + } + } else { + buffer[i1 + bufOff] = -124; + } + } else { + buffer[bufOff + i1] = -126; + } + } + + return l; + } + + public static final String getStringFromBytes(byte buffer[], int off, + int len) { + char ac[] = new char[len]; + int l = 0; + for (int i1 = 0; len > i1; i1++) { + int j1 = 0xff & buffer[off + i1]; + if (j1 != 0) { + if (j1 >= 128 && j1 < 160) { + char c = aCharArray5916[-128 + j1]; + if (c == 0) { + c = '?'; + } + j1 = c; + } + ac[l++] = (char) j1; + } + } + + return new String(ac, 0, l); + } + + public static final int hashFile(String name) { + int j = name.length(); + int k = 0; + for (int l = 0; l < j; l++) { + k = method1258(name.charAt(l)) + ((k << 5) - k); + } + + return k; + } + + static final byte method1258(char c) { + byte byte0; + if (c > 0 && c < '\200' || c >= '\240' && c <= '\377') { + byte0 = (byte) c; + } else if (c != '\u20AC') { + if (c != '\u201A') { + if (c != '\u0192') { + if (c == '\u201E') { + byte0 = -124; + } else if (c != '\u2026') { + if (c != '\u2020') { + if (c == '\u2021') { + byte0 = -121; + } else if (c == '\u02C6') { + byte0 = -120; + } else if (c == '\u2030') { + byte0 = -119; + } else if (c == '\u0160') { + byte0 = -118; + } else if (c == '\u2039') { + byte0 = -117; + } else if (c == '\u0152') { + byte0 = -116; + } else if (c != '\u017D') { + if (c == '\u2018') { + byte0 = -111; + } else if (c != '\u2019') { + if (c != '\u201C') { + if (c == '\u201D') { + byte0 = -108; + } else if (c != '\u2022') { + if (c == '\u2013') { + byte0 = -106; + } else if (c == '\u2014') { + byte0 = -105; + } else if (c == '\u02DC') { + byte0 = -104; + } else if (c == '\u2122') { + byte0 = -103; + } else if (c != '\u0161') { + if (c == '\u203A') { + byte0 = -101; + } else if (c != '\u0153') { + if (c == '\u017E') { + byte0 = -98; + } else if (c != '\u0178') { + byte0 = 63; + } else { + byte0 = -97; + } + } else { + byte0 = -100; + } + } else { + byte0 = -102; + } + } else { + byte0 = -107; + } + } else { + byte0 = -109; + } + } else { + byte0 = -110; + } + } else { + byte0 = -114; + } + } else { + byte0 = -122; + } + } else { + byte0 = -123; + } + } else { + byte0 = -125; + } + } else { + byte0 = -126; + } + } else { + byte0 = -128; + } + return byte0; + } + + static int method664(int i, int j) { + return i ^ j; + } + + public static final byte[] packContainer(int compression, byte[] data) { + Stream stream = new Stream(data.length+100); //lets be sure enougth space + if(compression == 1) //we dont have compression 1 working + compression = 2; + stream.putByte(compression); + byte[] compressedData = null; + if(compression == 0) { + compressedData = data; + }else if(compression == 1) {//BZip2Compressor + ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream(); + try { + BZip2OutputStream out = new BZip2OutputStream(compressedBytes, 9); + out.write(data); + out.finish(); + out.close(); + compressedData = compressedBytes.toByteArray(); + } catch (IOException e) { + e.printStackTrace(); + } + }else if (compression >= 2) { //GZipCompressor + ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream(); + try { + GZIPOutputStream out = new GZIPOutputStream(compressedBytes); + out.write(data); + out.finish(); + out.close(); + compressedData = compressedBytes.toByteArray(); + } catch (IOException e) { + e.printStackTrace(); + } + } + stream.putInt(compressedData.length); + if(compression >= 1) + stream.putInt(data.length); + for(int index = 0; index < compressedData.length; index++) + stream.putByte(compressedData[index]); + byte[] readyFileData = new byte[stream.offset]; + stream.offset = 0; + stream.getBytes(readyFileData, 0, readyFileData.length); + return readyFileData; + } + + public static final byte[] unpackContainer(byte buffer[]) { + Stream stream = new Stream(buffer); + int compression = stream.getUByte(); + int fileSize = stream.getInt(); + if (fileSize < 0 || minLength != 0 && minLength < fileSize) { + throw new RuntimeException(); + } + if (compression == 0) { + byte unpacked[] = new byte[fileSize]; + stream.getBytes(unpacked, 0, fileSize); + return unpacked; + } + int decompressedSize = stream.getInt(); + if (decompressedSize < 0 || minLength != 0 && minLength < decompressedSize) { + throw new RuntimeException(); + } + byte decompressed[] = new byte[decompressedSize]; + if (compression != 1) { + GZipDecompressor.decompress(stream, decompressed); + } else { + BZip2Decompressor.decompress(decompressed, decompressedSize, buffer, fileSize, 9); + } + return decompressed; + } + +} diff --git a/Tools/Cache Editor/src/com/alex/io/InputStream.java b/Tools/Cache Editor/src/com/alex/io/InputStream.java new file mode 100644 index 000000000..3ade11073 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/io/InputStream.java @@ -0,0 +1,263 @@ +package com.alex.io; + +import com.alex.utils.Constants; + + +public final class InputStream extends Stream { + + + public void initBitAccess() { + bitPosition = offset * 8; + } + + private static final int[] BIT_MASK = new int[] { 0, 1, 3, 7, 15, 31, 63, 127, 255, 511, 1023, + 2047, 4095, 8191, 16383, 32767, 65535, 131071, 262143, 524287, + 1048575, 2097151, 4194303, 8388607, 16777215, 33554431, 67108863, + 134217727, 268435455, 536870911, 1073741823, 2147483647, -1 }; + + public void finishBitAccess() { + offset = (7 + bitPosition) / 8; + } + + public int readBits(int bitOffset) { + + int bytePos = bitPosition >> 1779819011; + int i_8_ = -(0x7 & bitPosition) + 8; + bitPosition += bitOffset; + int value = 0; + for (/**/; (bitOffset ^ 0xffffffff) < (i_8_ ^ 0xffffffff); i_8_ = 8) { + value += (BIT_MASK[i_8_] & buffer[bytePos++]) << -i_8_ + bitOffset; + bitOffset -= i_8_; + } + if ((i_8_ ^ 0xffffffff) == (bitOffset ^ 0xffffffff)) + value += buffer[bytePos] & BIT_MASK[i_8_]; + else + value += (buffer[bytePos] >> -bitOffset + i_8_ & BIT_MASK[bitOffset]); + return value; + } + + public InputStream(int capacity) { + buffer = new byte[capacity]; + } + + public InputStream(byte[] buffer) { + this.buffer = buffer; + this.length = buffer.length; + } + + public void checkCapacity(int length) { + if (offset + length >= buffer.length) { + byte[] newBuffer = new byte[(offset + length) * 2]; + System.arraycopy(buffer, 0, newBuffer, 0, buffer.length); + buffer = newBuffer; + } + } + + public void skip(int length) { + offset += length; + } + + public void setLength(int length) { + this.length = length; + } + + public void setOffset(int offset) { + this.offset = offset; + } + + public int getRemaining() { + return offset < length ? length - offset : 0; + } + + public void addBytes(byte[] b, int offset, int length) { + checkCapacity(length - offset); + System.arraycopy(b, offset, buffer, this.offset, length); + this.length += length - offset; + } + + public int readPacket() { + return readUnsignedByte(); + } + + public int readByte() { + return getRemaining() > 0 ? buffer[offset++] : 0; + } + + public void readBytes(byte buffer[], int off, int len) { + for (int k = off; k < len + off; k++) { + buffer[k] = (byte) readByte(); + } + } + + public void readBytes(byte buffer[]) { + readBytes(buffer, 0, buffer.length); + } + + public int readSmart2() { + int i = 0; + int i_33_ = readUnsignedSmart(); + while ((i_33_ ^ 0xffffffff) == -32768) { + i_33_ = readUnsignedSmart(); + i += 32767; + } + i += i_33_; + return i; + } + + public int readUnsignedByte() { + return readByte() & 0xff; + } + + public int readByte128() { + return (byte) (readByte() - 128); + } + + public int readByteC() { + return (byte) -readByte(); + } + + public int read128Byte() { + return (byte) (128 - readByte()); + } + + public int readUnsignedByte128() { + return readUnsignedByte() - 128 & 0xff; + } + + public int readUnsignedByteC() { + return -readUnsignedByte() & 0xff; + } + + public int readUnsigned128Byte() { + return 128 - readUnsignedByte() & 0xff; + } + + public int readShortLE() { + int i = readUnsignedByte() + (readUnsignedByte() << 8); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + public int readShort128() { + int i = (readUnsignedByte() << 8) + (readByte() - 128 & 0xff); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + public int readShortLE128() { + int i = (readByte() - 128 & 0xff) + (readUnsignedByte() << 8); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + public int read128ShortLE() { + int i = (128 - readByte() & 0xff) + (readUnsignedByte() << 8); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + public int readShort() { + int i = (readUnsignedByte() << 8) + readUnsignedByte(); + if (i > 32767) { + i -= 0x10000; + } + return i; + } + + public int readUnsignedShortLE() { + return readUnsignedByte() + (readUnsignedByte() << 8); + } + + public int readUnsignedShort() { + return (readUnsignedByte() << 8) + readUnsignedByte(); + } + + public int readUnsignedShort128() { + return (readUnsignedByte() << 8) + (readByte() - 128 & 0xff); + } + + public int readUnsignedShortLE128() { + return (readByte() - 128 & 0xff) + (readUnsignedByte() << 8); + } + + public int readInt() { + return (readUnsignedByte() << 24) + (readUnsignedByte() << 16) + + (readUnsignedByte() << 8) + readUnsignedByte(); + } + + + public int read24BitInt() { + return (readUnsignedByte() << 16) + (readUnsignedByte() << 8) + + (readUnsignedByte()); + } + + public int readIntV1() { + return (readUnsignedByte() << 8) + readUnsignedByte() + + (readUnsignedByte() << 24) + (readUnsignedByte() << 16); + } + + public int readIntV2() { + return (readUnsignedByte() << 16) + (readUnsignedByte() << 24) + + readUnsignedByte() + (readUnsignedByte() << 8); + } + + public int readIntLE() { + return readUnsignedByte() + (readUnsignedByte() << 8) + + (readUnsignedByte() << 16) + (readUnsignedByte() << 24); + } + + public long readLong() { + long l = readInt() & 0xffffffffL; + long l1 = readInt() & 0xffffffffL; + return (l << 32) + l1; + } + + public String readString() { + String s = ""; + int b; + while ((b = readByte()) != 0) { + s += (char) b; + } + return s; + } + + public String readJagString() { + readByte(); + String s = ""; + int b; + while ((b = readByte()) != 0) { + s += (char) b; + } + return s; + } + + @SuppressWarnings("unused") + public int readBigSmart() { + if(Constants.CLIENT_BUILD < 670) + return readUnsignedShort(); + if ((buffer[offset] ^ 0xffffffff) <= -1) { + int value = readUnsignedShort(); + if (value == 32767) { + return -1; + } + return value; + } + return readInt() & 0x7fffffff; + } + + public int readUnsignedSmart() { + int i = 0xff & buffer[offset]; + if (i >= 128) + return -32768 + readUnsignedShort(); + return readUnsignedByte(); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/com/alex/io/OutputStream.java b/Tools/Cache Editor/src/com/alex/io/OutputStream.java new file mode 100644 index 000000000..e3402edc3 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/io/OutputStream.java @@ -0,0 +1,335 @@ +package com.alex.io; + +import java.math.BigInteger; + +import com.alex.utils.Constants; + + + +public final class OutputStream extends Stream { + + private static final int[] BIT_MASK = new int[32]; + private int opcodeStart = 0; + + static { + for (int i = 0; i < 32; i++) + BIT_MASK[i] = (1 << i) - 1; + } + + public OutputStream(int capacity) { + setBuffer(new byte[capacity]); + } + + public OutputStream() { + setBuffer(new byte[16]); + } + + public OutputStream(byte[] buffer) { + this.setBuffer(buffer); + this.offset = buffer.length; + length = buffer.length; + } + + + public OutputStream(int[] buffer) { + setBuffer(new byte[buffer.length]); + for(int value : buffer) + writeByte(value); + } + + public void checkCapacityPosition(int position) { + if (position >= getBuffer().length) { + byte[] newBuffer = new byte[position + 16]; + System.arraycopy(getBuffer(), 0, newBuffer, 0, getBuffer().length); + setBuffer(newBuffer); + } + } + + public void skip(int length) { + setOffset(getOffset() + length); + } + + public void setOffset(int offset) { + this.offset = offset; + } + + + public void writeBytes(byte[] b, int offset, int length) { + checkCapacityPosition(this.getOffset() + length - offset); + System.arraycopy(b, offset, getBuffer(), this.getOffset(), length); + this.setOffset(this.getOffset() + (length - offset)); + } + + public void writeBytes(byte[] b) { + int offset = 0; + int length = b.length; + checkCapacityPosition(this.getOffset() + length - offset); + System.arraycopy(b, offset, getBuffer(), this.getOffset(), length); + this.setOffset(this.getOffset() + (length - offset)); + } + + public void addBytes128(byte[] data, int offset, int len) { + for (int k = offset; k < len; k++) + writeByte((byte) (data[k] + 128)); + } + + public void addBytesS(byte[] data, int offset, int len) { + for (int k = offset; k < len; k++) + writeByte((byte) (-128 + data[k])); + } + + public void addBytes_Reverse(byte[] data, int offset, int len) { + for (int i = len - 1; i >= 0; i--) { + writeByte((byte) (data[i])); + } + } + + public void addBytes_Reverse128(byte[] data, int offset, int len) { + for (int i = len - 1; i >= 0; i--) { + writeByte((byte) (data[i] + 128)); + } + } + + public void writeByte(int i) { + writeByte(i, offset++); + } + + public void writeNegativeByte(int i) { + writeByte(-i, offset++); + } + + public void writeByte(int i, int position) { + checkCapacityPosition(position); + getBuffer()[position] = (byte) i; + } + + public void writeByte128(int i) { + writeByte(i + 128); + } + + public void writeByteC(int i) { + writeByte(-i); + } + + public void write3Byte(int i) { + writeByte(i >> 16); + writeByte(i >> 8); + writeByte(i); + } + + public void write128Byte(int i) { + writeByte(128 - i); + } + + public void writeShortLE128(int i) { + writeByte(i + 128); + writeByte(i >> 8); + } + + public void writeShort128(int i) { + writeByte(i >> 8); + writeByte(i + 128); + } + + @SuppressWarnings("unused") + public void writeBigSmart(int i) { + if(Constants.CLIENT_BUILD < 670) { + writeShort(i); + return; + } + if(i >= Short.MAX_VALUE && i >= 0) + writeInt(i-Integer.MAX_VALUE-1); + else { + writeShort(i >= 0 ? i : 32767); + } + } + + public void writeSmart2(int i) { + while (i >= 0) { + if (i < 32767) { + writeSmart(i); + return; + } + writeSmart(32767); + i -= 32767; + } + } + + public void writeSmart(int i) { + if (i >= 128) { + writeShort(i + 32768); + } else { + writeByte(i); + } + } + + public void writeShort(int i) { + writeByte(i >> 8); + writeByte(i); + } + + public void writeShortLE(int i) { + writeByte(i); + writeByte(i >> 8); + } + + public void write24BitInt(int i) { + writeByte(i >> 16); + writeByte(i >> 8); + writeByte(i); + } + + @Override + public void writeInt(int i) { + writeByte(i >> 24); + writeByte(i >> 16); + writeByte(i >> 8); + writeByte(i); + } + + public void writeIntV1(int i) { + writeByte(i >> 8); + writeByte(i); + writeByte(i >> 24); + writeByte(i >> 16); + } + + public void writeIntV2(int i) { + writeByte(i >> 16); + writeByte(i >> 24); + writeByte(i); + writeByte(i >> 8); + } + + public void writeIntLE(int i) { + writeByte(i); + writeByte(i >> 8); + writeByte(i >> 16); + writeByte(i >> 24); + } + + public void writeLong(long l) { + writeByte((int) (l >> 56)); + writeByte((int) (l >> 48)); + writeByte((int) (l >> 40)); + writeByte((int) (l >> 32)); + writeByte((int) (l >> 24)); + writeByte((int) (l >> 16)); + writeByte((int) (l >> 8)); + writeByte((int) l); + } + + public void writePSmarts(int i) { + if (i < 128) { + writeByte(i); + return; + } + if (i < 32768) { + writeShort(32768 + i); + return; + } else { + System.out.println("Error psmarts out of range:"); + return; + } + } + + public void writeString(String s) { + checkCapacityPosition(getOffset() + s.length() + 1); + System.arraycopy(s.getBytes(), 0, getBuffer(), getOffset(), s.length()); + setOffset(getOffset() + s.length()); + writeByte(0); + } + + public void writeGJString(String s) { + writeByte(0); + writeString(s); + } + + public void putGJString3(String s) { + writeByte(0); + writeString(s); + writeByte(0); + } + + public void writePacket(int id) { + writeByte(id); + } + + public void writePacketVarByte(int id) { + writePacket(id); + writeByte(0); + opcodeStart = getOffset() - 1; + } + + public void writePacketVarShort(int id) { + writePacket(id); + writeShort(0); + opcodeStart = getOffset() - 2; + } + + /* + * public void writePacketShort(int id) { writeByte(id); writeShort(0); + * opcodeStart = getOffset() - 2; } + */ + + public void endPacketVarByte() { + writeByte(getOffset() - (opcodeStart + 2) + 1, opcodeStart); + } + + public void endPacketVarShort() { + int size = getOffset() - (opcodeStart + 2); + writeByte(size >> 8, opcodeStart++); + writeByte(size, opcodeStart); + } + + public void initBitAccess() { + bitPosition = getOffset() * 8; + } + + public void finishBitAccess() { + setOffset((bitPosition + 7) / 8); + } + + public int getBitPos(int i) { + return 8 * i - bitPosition; + } + + public void writeBits(int numBits, int value) { + int bytePos = bitPosition >> 3; + int bitOffset = 8 - (bitPosition & 7); + bitPosition += numBits; + for (; numBits > bitOffset; bitOffset = 8) { + checkCapacityPosition(bytePos); + getBuffer()[bytePos] &= ~BIT_MASK[bitOffset]; + getBuffer()[bytePos++] |= value >> numBits - bitOffset + & BIT_MASK[bitOffset]; + numBits -= bitOffset; + } + checkCapacityPosition(bytePos); + if (numBits == bitOffset) { + getBuffer()[bytePos] &= ~BIT_MASK[bitOffset]; + getBuffer()[bytePos] |= value & BIT_MASK[bitOffset]; + } else { + getBuffer()[bytePos] &= ~(BIT_MASK[numBits] << bitOffset - numBits); + getBuffer()[bytePos] |= (value & BIT_MASK[numBits]) << bitOffset + - numBits; + } + } + + public void setBuffer(byte[] buffer) { + this.buffer = buffer; + } + + public final void rsaEncode(BigInteger key, BigInteger modulus) { + int length = offset; + offset = 0; + byte data[] = new byte[length]; + getBytes(data, 0, length); + BigInteger biginteger2 = new BigInteger(data); + BigInteger biginteger3 = biginteger2.modPow(key, modulus); + byte out[] = biginteger3.toByteArray(); + offset = 0; + writeBytes(out, 0, out.length); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/com/alex/io/Stream.java b/Tools/Cache Editor/src/com/alex/io/Stream.java new file mode 100644 index 000000000..ab2e06f8e --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/io/Stream.java @@ -0,0 +1,94 @@ +package com.alex.io; + + +public abstract class Stream { + + protected int offset; + protected int length; + protected byte[] buffer; + protected int bitPosition; + + public int getLength() { + return length; + } + + public byte[] getBuffer() { + return buffer; + } + + public int getOffset() { + return offset; + } + + + public void decodeXTEA(int keys[]) { + decodeXTEA(keys, 5, length); + } + + public void decodeXTEA(int keys[], int start, int end) { + int l = offset; + offset = start; + int i1 = (end - start) / 8; + for (int j1 = 0; j1 < i1; j1++) { + int k1 = readInt(); + int l1 = readInt(); + int sum = 0xc6ef3720; + int delta = 0x9e3779b9; + for (int k2 = 32; k2-- > 0;) { + l1 -= keys[(sum & 0x1c84) >>> 11] + sum ^ (k1 >>> 5 ^ k1 << 4) + + k1; + sum -= delta; + k1 -= (l1 >>> 5 ^ l1 << 4) + l1 ^ keys[sum & 3] + sum; + } + offset -= 8; + writeInt(k1); + writeInt(l1); + } + offset = l; + } + + public final void encodeXTEA(int keys[], int start, int end) { + int o = offset; + int j = (end - start) / 8; + offset = start; + for (int k = 0; k < j; k++) { + int l = readInt(); + int i1 = readInt(); + int sum = 0; + int delta = 0x9e3779b9; + for (int l1 = 32; l1-- > 0;) { + l += sum + keys[3 & sum] ^ i1 + (i1 >>> 5 ^ i1 << 4); + sum += delta; + i1 += l + (l >>> 5 ^ l << 4) ^ keys[(0x1eec & sum) >>> 11] + + sum; + } + + offset -= 8; + writeInt(l); + writeInt(i1); + } + offset = o; + } + + private final int readInt() { + offset += 4; + return ((0xff & buffer[-3 + offset]) << 16) + + ((((0xff & buffer[-4 + offset]) << 24) + ((buffer[-2 + + offset] & 0xff) << 8)) + (buffer[-1 + offset] & 0xff)); + } + + public void writeInt(int value) { + buffer[offset++] = (byte) (value >> 24); + buffer[offset++] = (byte) (value >> 16); + buffer[offset++] = (byte) (value >> 8); + buffer[offset++] = (byte) value; + } + + public final void getBytes(byte data[], int off, int len) { + for (int k = off; k < len + off; k++) { + data[k] = buffer[offset++]; + } + } + + +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/clientscripts/CS2Mapping.java b/Tools/Cache Editor/src/com/alex/loaders/clientscripts/CS2Mapping.java new file mode 100644 index 000000000..6a0d70f19 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/clientscripts/CS2Mapping.java @@ -0,0 +1,253 @@ +package com.alex.loaders.clientscripts; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.Map; + +import com.alex.store.Store; +import com.alex.utils.ByteBufferUtils; + +/** + * The CS2 mapping. + * @author Emperor + * + */ +public final class CS2Mapping { + + /** + * The CS2 mappings. + */ + private static final Map maps = new HashMap<>(); + + /** + * The script id. + */ + private final int scriptId; + + /** + * Unknown value. + */ + private int unknown; + + /** + * Second unknown value. + */ + private int unknown1; + + /** + * The default string value. + */ + private String defaultString; + + /** + * The default integer value. + */ + private int defaultInt; + + /** + * The mapping. + */ + private HashMap map; + + /** + * Constructs a new {@code CS2Mapping} {@code Object}. + * @param scriptId The script id. + */ + public CS2Mapping(int scriptId) { + this.scriptId = scriptId; + } + + /** + * The main method. + * @param args The arguments cast on runtime. + * @throws Throwable When an exception occurs. + */ + public static void main(String...args) throws Throwable { + Store store = new Store("./666/"); + BufferedWriter bw = new BufferedWriter(new FileWriter("./music_indexes.txt")); + CS2Mapping musicList = forId(1347, store); + CS2Mapping idList = forId(1351, store); + for (int index : musicList.map.keySet()) { + String name = (String) musicList.map.get(index); + int id = (int) idList.map.get(index); + bw.append(name + ": " + id); + bw.newLine(); + } +// for (int i = 0; i < 20000; i++) { +// CS2Mapping mapping = forId(i, store); +// if (mapping == null || mapping.map == null) { +// continue; +// } +// bw.append(i + " - [default=" + mapping.defaultString + "/" + mapping.defaultInt + "] map=" + mapping.map); +// bw.newLine(); +// } + bw.flush(); + bw.close(); + } + + /** + * Gets the mapping for the given script id. + * @param scriptId The script id. + * @return The mapping. + */ + public static CS2Mapping forId(int scriptId, Store store) { + CS2Mapping mapping = maps.get(scriptId); + if (mapping != null) { + return mapping; + } + mapping = new CS2Mapping(scriptId); + byte[] bs = store.getIndexes()[17].getFile(scriptId >>> 8, scriptId & 0xFF); + if (bs != null) { + mapping.load(ByteBuffer.wrap(bs)); + } else { + return null; + } + maps.put(scriptId, mapping); + return mapping; + } + + /** + * Loads the mapping data. + * @param stream The buffer to read the data from. + */ + private void load(ByteBuffer buffer) { + int opcode; + while ((opcode = buffer.get() & 0xFF) != 0) { + switch (opcode) { + case 1: + unknown = buffer.get() & 0xFF; + break; + case 2: + unknown1 = buffer.get() & 0xFF; + break; + case 3: + defaultString = ByteBufferUtils.getString(buffer); + break; + case 4: + defaultInt = buffer.getInt(); + break; + case 5: + case 6: + case 7: + case 8: + int size = buffer.getShort() & 0xFFFF; + map = new HashMap<>(size); + int loop = opcode > 6 ? buffer.getShort() & 0xFFFF : size; + for (int i = 0; i < loop; i++) { + int key = opcode > 6 ? buffer.getShort() & 0xFFFF : buffer.getInt(); + if (opcode % 2 != 0) { + map.put(key, ByteBufferUtils.getString(buffer)); + } else { + map.put(key, buffer.getInt()); + } + } + break; + /* + * else if (opcode == 5 || opcode == 6 || opcode == 7 || opcode == 8) { + int count = stream.readUnsignedShort(); + int loop = opcode == 7 || opcode == 8 ? stream.readUnsignedShort() + : count; + values = new HashMap(Utils.getHashMapSize(count)); + for (int i = 0; i < loop; i++) { + int key = opcode == 7 || opcode == 8 ? stream + .readUnsignedShort() : stream.readInt(); + Object value = opcode == 5 || opcode == 7 ? stream.readString() + : stream.readInt(); + values.put((long) key, value); + } + } + */ + } + } + } + + /** + * Gets the scriptId. + * @return The scriptId. + */ + public int getScriptId() { + return scriptId; + } + + /** + * Gets the unknown. + * @return The unknown. + */ + public int getUnknown() { + return unknown; + } + + /** + * Sets the unknown. + * @param unknown The unknown to set. + */ + public void setUnknown(int unknown) { + this.unknown = unknown; + } + + /** + * Gets the unknown1. + * @return The unknown1. + */ + public int getUnknown1() { + return unknown1; + } + + /** + * Sets the unknown1. + * @param unknown1 The unknown1 to set. + */ + public void setUnknown1(int unknown1) { + this.unknown1 = unknown1; + } + + /** + * Gets the defaultString. + * @return The defaultString. + */ + public String getDefaultString() { + return defaultString; + } + + /** + * Sets the defaultString. + * @param defaultString The defaultString to set. + */ + public void setDefaultString(String defaultString) { + this.defaultString = defaultString; + } + + /** + * Gets the defaultInt. + * @return The defaultInt. + */ + public int getDefaultInt() { + return defaultInt; + } + + /** + * Sets the defaultInt. + * @param defaultInt The defaultInt to set. + */ + public void setDefaultInt(int defaultInt) { + this.defaultInt = defaultInt; + } + + /** + * Gets the map. + * @return The map. + */ + public HashMap getMap() { + return map; + } + + /** + * Sets the map. + * @param map The map to set. + */ + public void setMap(HashMap map) { + this.map = map; + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/com/alex/loaders/clientscripts/ClientScript.java b/Tools/Cache Editor/src/com/alex/loaders/clientscripts/ClientScript.java new file mode 100644 index 000000000..0be86fc81 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/clientscripts/ClientScript.java @@ -0,0 +1,5 @@ +package com.alex.loaders.clientscripts; + +public class ClientScript { + +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/images/IndexedColorImageFile.java b/Tools/Cache Editor/src/com/alex/loaders/images/IndexedColorImageFile.java new file mode 100644 index 000000000..ac794e1fd --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/images/IndexedColorImageFile.java @@ -0,0 +1,336 @@ +package com.alex.loaders.images; + +import java.awt.image.BufferedImage; +import java.util.Arrays; + +import com.alex.io.InputStream; +import com.alex.io.OutputStream; +import com.alex.store.Store; +import com.alex.utils.Constants; + +public final class IndexedColorImageFile { + + private BufferedImage[] images; + + public static boolean oldRevision = true; + private int pallete[]; + private int pixelsIndexes[][]; + private byte alpha[][]; + private boolean[] usesAlpha; + private int biggestWidth; + private int biggestHeight; + private int[] minX; + private int[] minY; + + public IndexedColorImageFile(BufferedImage... images) { + this.images = images; + } + + public IndexedColorImageFile(Store cache, int archiveId, int fileId) { + this(cache, Constants.SPRITES_INDEX, archiveId, fileId); + } + + /* + * + */ + public IndexedColorImageFile(Store cache, int idx, int archiveId, int fileId) { + decodeArchive(cache, idx, archiveId, fileId); + } + + public void decodeArchive(Store cache, int idx, int archiveId, int fileId) { + byte[] data = cache.getIndexes()[idx].getFile(archiveId, fileId); + if(data == null) + return; + InputStream stream = new InputStream(data); + stream.setOffset(data.length - 2); + int count = stream.readUnsignedShort(); + images = new BufferedImage[count]; + pixelsIndexes = new int[images.length][]; + alpha = new byte[images.length][]; + usesAlpha = new boolean[images.length]; + minX = new int[images.length]; + minY = new int[images.length]; + int[] imagesWidth = new int[images.length]; + int[] imagesHeight = new int[images.length]; + stream.setOffset(data.length - 7 - images.length * 8); + setBiggestWidth(stream.readShort()); //biggestWidth + setBiggestHeight(stream.readShort()); //biggestHeight + int palleteLength = (stream.readUnsignedByte() & 0xff) + 1; + for (int index = 0; index < images.length; index++) { + minX[index] = stream.readUnsignedShort(); + if (minX[index] != 0) { + // System.out.println("Hai x " + minX[index] + ", index " + index + ", length " + images.length); + } + } + for (int index = 0; index < images.length; index++) { + minY[index] = stream.readUnsignedShort(); + if (minY[index] != 0) { + //System.out.println("Hai y " + minY[index] + ", index " + index + ", length " + images.length); + } + } + for (int index = 0; index < images.length; index++) { + imagesWidth[index] = stream.readUnsignedShort(); + } + for (int index = 0; index < images.length; index++) { + imagesHeight[index] = stream.readUnsignedShort(); + } + stream.setOffset(data.length - 7 - images.length * 8 - (palleteLength - 1) * 3); + pallete = new int[palleteLength]; + for (int index = 1; index < palleteLength; index++) { + pallete[index] = stream.read24BitInt(); + if (pallete[index] == 0) + pallete[index] = 1; + } + stream.setOffset(0); + for (int i_20_ = 0; i_20_ < images.length; i_20_++) { + int pixelsIndexesLength = imagesWidth[i_20_] * imagesHeight[i_20_]; + pixelsIndexes[i_20_] = new int[pixelsIndexesLength]; + alpha[i_20_] = new byte[pixelsIndexesLength]; + int maskData = stream.readUnsignedByte(); + if ((maskData & 0x2) == 0) { + if ((maskData & 0x1) == 0) { + for (int index = 0; index < pixelsIndexesLength; index++) { + pixelsIndexes[i_20_][index] = (byte) stream.readByte(); + } + } else { + for (int i_24_ = 0; i_24_ < imagesWidth[i_20_]; i_24_++) { + for (int i_25_ = 0; i_25_ < imagesHeight[i_20_]; i_25_++) { + pixelsIndexes[i_20_][i_24_ + i_25_ * imagesWidth[i_20_]] = (byte) stream.readByte(); + } + } + } + } else { + usesAlpha[i_20_] = true; + boolean bool = false; + if ((maskData & 0x1) == 0) { + for (int index = 0; index < pixelsIndexesLength; index++) { + pixelsIndexes[i_20_][index] = (byte) stream.readByte(); + } + for (int i_27_ = 0; i_27_ < pixelsIndexesLength; i_27_++) { + byte i_28_ = (alpha[i_20_][i_27_] = (byte) stream.readByte()); + bool = bool | i_28_ != -1; + } + } else { + for (int i_29_ = 0; i_29_ < imagesWidth[i_20_]; i_29_++) { + for (int i_30_ = 0; i_30_ < imagesHeight[i_20_]; i_30_++) { + pixelsIndexes[i_20_][i_29_ + i_30_ * imagesWidth[i_20_]] = stream.readByte(); + } + } + for (int i_31_ = 0; i_31_ < imagesWidth[i_20_]; i_31_++) { + for (int i_32_ = 0; i_32_ < imagesHeight[i_20_]; i_32_++) { + byte i_33_ = (alpha[i_20_][i_31_ + i_32_ + * imagesWidth[i_20_]] = (byte) stream.readByte()); + bool = bool | i_33_ != -1; + } + } + } + if (!bool) + alpha[i_20_] = null; + } + images[i_20_] = getBufferedImage(imagesWidth[i_20_], imagesHeight[i_20_], pixelsIndexes[i_20_], alpha[i_20_], usesAlpha[i_20_]); + } + } + + public BufferedImage getBufferedImage(int width, int height, int[] pixelsIndexes, byte[] extraPixels, boolean useExtraPixels) { + if(width <= 0 || height <= 0) + return null; + BufferedImage image = new BufferedImage(width, height, BufferedImage.TYPE_4BYTE_ABGR); + int[] rgbArray = new int[width * height]; + int i = 0; + int i_43_ = 0; + if(useExtraPixels && extraPixels != null) { + for (int i_44_ = 0; i_44_ < height; i_44_++) { + for (int i_45_ = 0; i_45_ < width; i_45_++) { + rgbArray[i_43_++] = (extraPixels[i] << 24 | (pallete[pixelsIndexes[i] & 0xff])); + i++; + } + } + }else{ + for (int i_46_ = 0; i_46_ < height; i_46_++) { + for (int i_47_ = 0; i_47_ < width; i_47_++) { + int i_48_ = pallete[pixelsIndexes[i++] & 0xff]; + rgbArray[i_43_++] = i_48_ != 0 ? ~0xffffff | i_48_ : 0; + } + } + } + image.setRGB(0, 0, width, height, rgbArray, 0, width); + image.flush(); + return image; + } + + + public byte[] encodeFile() { + if(pallete == null) //if not generated yet + generatePallete(); + OutputStream stream = new OutputStream(); + //sets pallete indexes and int size bytes + for(int imageId = 0; imageId < images.length; imageId++) { + int pixelsMask = 0; + if(usesAlpha[imageId] && !oldRevision) + pixelsMask |= 0x2; + //pixelsMask |= 0x1; //sets read all rgbarray indexes 1by1 + stream.writeByte(pixelsMask); + for (int index = 0; index < pixelsIndexes[imageId].length; index++) + stream.writeByte(pixelsIndexes[imageId][index]); + if(usesAlpha[imageId] && !oldRevision) + for (int index = 0; index < alpha[imageId].length; index++) + stream.writeByte(alpha[imageId][index]); + } + + //sets up to 256colors pallete, index0 is black + for(int index = 0; index < pallete.length; index++) + stream.write24BitInt(pallete[index]); + + //extra inform + if(biggestWidth == 0 && biggestHeight == 0) { + for(BufferedImage image : images) { + if(image.getWidth() > biggestWidth) + biggestWidth = image.getWidth(); + if(image.getHeight() > biggestHeight) + biggestHeight = image.getHeight(); + } + } + stream.writeShort(biggestWidth); //probably used for textures + stream.writeShort(biggestHeight);//probably used for textures + stream.writeByte(pallete.length-1); //sets pallete size, -1 cuz of black index + for(int imageId = 0; imageId < images.length; imageId++) + stream.writeShort(minX[imageId]); + for(int imageId = 0; imageId < images.length; imageId++) + stream.writeShort(minY[imageId]); + for(int imageId = 0; imageId < images.length; imageId++) + stream.writeShort(images[imageId].getWidth()); + for(int imageId = 0; imageId < images.length; imageId++) + stream.writeShort(images[imageId].getHeight()); + stream.writeShort(images.length); //amt of images + //generates fixed byte data array + byte[] container = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(container, 0, container.length); + return container; + } + + + public int getPalleteIndex(int rgb) { + if(pallete == null) { + pallete = new int[] {0}; + } + for(int index = 0; index < pallete.length; index++) { + if(pallete[index] == rgb) + return index; + } + if(pallete.length == 256) { + System.out.println("Pallete to big, please reduce images quality."); + return 0; + } + //throw new RuntimeException("Pallete to big, please reduce images quality."); + int[] newpallete = new int[pallete.length+1]; + System.arraycopy(pallete, 0, newpallete, 0, pallete.length); + newpallete[pallete.length] = rgb; + pallete = newpallete; + return pallete.length-1; + } + + + public void delete(int index) { + System.out.println(images.length); + BufferedImage[] newImages = Arrays.copyOf(images, images.length-1); + images = newImages; + int[] offsetX = Arrays.copyOf(this.minX, this.minX.length - 1); + offsetX[this.minX.length-2] = 0; + this.minX = offsetX; + int[] offsetY = Arrays.copyOf(this.minY, this.minY.length - 1); + offsetY[this.minY.length-2] = 0; + this.minY = offsetY; + pallete = null; + pixelsIndexes = null; + alpha = null; + usesAlpha = null; + } + + public int addImage(BufferedImage image) { + return addImage(image, 0, 0); + } + + public int addImage(BufferedImage image, int minX, int minY) { + BufferedImage[] newImages = Arrays.copyOf(images, images.length+1); + newImages[images.length] = image; + images = newImages; + int[] offsetX = Arrays.copyOf(this.minX, this.minX.length + 1); + offsetX[this.minX.length] = minX; + this.minX = offsetX; + int[] offsetY = Arrays.copyOf(this.minY, this.minY.length + 1); + offsetY[this.minY.length] = minY; + this.minY = offsetY; + pallete = null; + pixelsIndexes = null; + alpha = null; + usesAlpha = null; + return images.length - 1; + } + + public void replaceImage(BufferedImage image, int index) { + images[index] = image; + pallete = null; + pixelsIndexes = null; + alpha = null; + usesAlpha = null; + } + + public void generatePallete() { + pixelsIndexes = new int[images.length][]; + alpha = new byte[images.length][]; + usesAlpha = new boolean[images.length]; + for(int index = 0; index < images.length; index++) { + BufferedImage image = images[index]; + int[] rgbArray = new int[image.getWidth()*image.getHeight()]; + image.getRGB(0, 0, image.getWidth(), image.getHeight(), rgbArray, 0, image.getWidth()); + pixelsIndexes[index] = new int[image.getWidth()*image.getHeight()]; + alpha[index] = new byte[image.getWidth()*image.getHeight()]; + for(int pixel = 0; pixel < pixelsIndexes[index].length; pixel++) { + int rgb = rgbArray[pixel]; + int medintrgb = convertToMediumInt(rgb); + int i = getPalleteIndex(medintrgb); + pixelsIndexes[index][pixel] = i; + if(rgb >> 24 != 0) { + alpha[index][pixel] = (byte) (rgb >> 24); + usesAlpha[index] = !oldRevision; + } + } + } + } + + + public int convertToMediumInt(int rgb) { + + OutputStream out = new OutputStream(4); + out.writeInt(rgb); + InputStream stream = new InputStream(out.getBuffer()); + stream.setOffset(1); + rgb = stream.read24BitInt(); + return rgb; + } + + public BufferedImage[] getImages() { + return images; + } + + public int getBiggestWidth() { + return biggestWidth; + } + + public void setBiggestWidth(int biggestWidth) { + this.biggestWidth = biggestWidth; + } + + public int getBiggestHeight() { + return biggestHeight; + } + + public void setBiggestHeight(int biggestHeight) { + this.biggestHeight = biggestHeight; + } + + + + +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/images/LoaderImageArchive.java b/Tools/Cache Editor/src/com/alex/loaders/images/LoaderImageArchive.java new file mode 100644 index 000000000..d2dd434f1 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/images/LoaderImageArchive.java @@ -0,0 +1,40 @@ +package com.alex.loaders.images; + +import java.awt.Image; +import java.awt.Toolkit; + +import com.alex.store.Store; +import com.alex.utils.Constants; + +public class LoaderImageArchive { + + private byte[] data; + + public LoaderImageArchive(byte[] data) { + this.data = data; + } + + public LoaderImageArchive(Store cache, int archiveId) { + this(cache, Constants.LOADER_IMAGES_INDEX, archiveId, 0); + } + + private LoaderImageArchive(Store cache, int idx, int archiveId, int fileId) { + decodeArchive(cache, idx, archiveId, fileId); + } + + private void decodeArchive(Store cache, int idx, int archiveId, int fileId) { + byte[] data = cache.getIndexes()[idx].getFile(archiveId, fileId); + if(data == null) + return; + this.data = data; + } + + public Image getImage() { + return Toolkit.getDefaultToolkit().createImage(data); + } + + public byte[] getImageData() { + return data; + } + +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/interfaces/IComponent.java b/Tools/Cache Editor/src/com/alex/loaders/interfaces/IComponent.java new file mode 100644 index 000000000..2ee2f0a6b --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/interfaces/IComponent.java @@ -0,0 +1,801 @@ +package com.alex.loaders.interfaces; +import java.lang.reflect.Array; +import java.lang.reflect.Field; +import java.lang.reflect.Modifier; +import java.util.List; + +import alex.cache.loaders.ConfigFileDefinition; + +import com.alex.io.InputStream; +import com.alex.store.Store; + + +public class IComponent { + + public Object[] anObjectArray2296; + public int anInt2297; + public int otherAnimationId; + public int[] anIntArray2299; + public int anInt2300; + public int anInt2301; + public Object[] anObjectArray2302; + public int anInt2303; + public int anInt2305; + public boolean aBoolean2306; + public int anInt2308 = 0; + public int[] anIntArray2310; + public byte aByte2311; + public int anInt2312; + public Object[] anObjectArray2313; + public int anInt2314; + public int[] anIntArray2315; + public Object[] anObjectArray2316; + public byte[] aByteArray2317; + public Object[] anObjectArray2318; + public int anInt2319; + public int anInt2321; + public int height; + public int[] anIntArray2323; + public int anInt2324; + public int anInt2325; + public IComponent[] aClass173Array2326; + public int[][] childDataBuffers; + public Object[] anObjectArray2328; + public String optionName; + public String aString2330; + public Object[] anObjectArray2331; + public int anInt2332; + public int anInt2333; + public String aString2334; + public int anInt2335; + public Object[] anObjectArray2336; + public int[] anIntArray2337; + public int anInt2338; + public int anInt2340; + public byte aByte2341; + public boolean aBoolean2342; + public int anInt2343; + public Object[] anObjectArray2344; + public IComponent aClass173_2345; + public static int anInt2346; + public int anInt2347; + public Object[] anObjectArray2348; + public int anInt2349; + public int anInt2350; + public Object[] anObjectArray2351; + public Object[] anObjectArray2352; + public boolean aBoolean2353; + public boolean useScripts; + public byte aByte2356; + public String aString2357; + public int modelId; + public int[] anIntArray2360; + public int anInt2361; + public Object[] anObjectArray2362; + public String[] aStringArray2363; + public int anInt2364; + public int anInt2365; + public boolean aBoolean2366; + public boolean aBoolean2367; + public boolean aBoolean2368; + public int anInt2369; + public Object[] anObjectArray2371; + public String aString2373; + public int anInt2374; + public int anInt2375; + public int imageId; + public int[] anIntArray2379; + public boolean aBoolean2380; + public int anInt2381; + public int anInt2382; + public short aShort2383; + public int[] anIntArray2384; + public String[] aStringArray2385; + public int anInt2386; + public int[] anIntArray2388; + public int anInt2389; + public int anInt2390; + public String textToolTip; + public boolean aBoolean2393; + public int anInt2394; + public Object[] anObjectArray2395; + public int anInt2396; + public int anInt2397; + public IComponentSettings settings; + public Object[] anObjectArray2399; + public int[] itemIds; + public boolean aBoolean2401; + public Object[] anObjectArray2402; + public int anInt2403; + public boolean hidden; + public Object[] anObjectArray2405; + public int[] anIntArray2407; + public Object[] anObjectArray2408; + public int anInt2409; + public Object[] anObjectArray2410; + public int anInt2411; + public int anInt2412; + public boolean aBoolean2413; + public int anInt2414; + public int anInt2415; + public int modelType; + public byte[] aByteArray2417; + public int[] anIntArray2418; + public boolean aBoolean2419; + public short aShort2420; + public int anInt2421; + public boolean aBoolean2422; + public int anInt2423; + public int anInt2424; + public Object[] defaultScript; + public int anInt2427; + public boolean aBoolean2429; + public int[] anIntArray2431; + public int y; + public int borderThickness; + public boolean aBoolean2434; + public int anInt2435; + public boolean aBoolean2436; + public int anInt2437; + public int anInt2438; + public Object[] anObjectArray2439; + public int width; + public int anInt2441; + public int anInt2442; + public int animationId; + public int anInt2444; + public int x; + public Object[] anObjectArray2446; + public Object[] anObjectArray2447; + public int anInt2448; + public int[] anIntArray2449; + public int anInt2450; + public int anInt2451; + public int[] anIntArray2452; + public int anInt2453; + public Object[] anObjectArray2454; + public int hash; + public int parentId; + public int anInt2457; + public int anInt2458; + public int anInt2459; + public int anInt2461; + public Object[] anObjectArray2462; + public String aString2463; + public Object[] anObjectArray2464; + public Object[] anObjectArray2465; + public int anInt2467; + public byte aByte2469; + public int type; + public int anInt2471; + public int[] anIntArray2472; + public String aString2473; + public int anInt2474; + public Object[] anObjectArray2475; + public boolean aBoolean2476; + public int anInt2477; + public int[] anIntArray2478; + public int anInt2479; + public int anInt2480; + public int anInt2481; + public int anInt2482; + public Object[] anObjectArray2483; + public int anInt2484; + @SuppressWarnings("unused") + private boolean aBoolean4782; + int[] configs; + int[] configShifts; + + public void debug() throws IllegalArgumentException, IllegalAccessException { + for (Field f : getClass().getDeclaredFields()) { + if (!Modifier.isStatic(f.getModifiers())) { + if (f.getType().isArray()) { + Object object = f.get(this); + if (object != null) { + int length = Array.getLength(object); + System.out.print(f.getName() + ", ["); + for (int i = 0; i < length; i++) { + System.out.print(Array.get(object, i) + (i < (length - 1) ? ", " : "")); + } + System.out.println("]"); + continue; + } + } + System.out.println(f.getName() + ", " + f.get(this)); + } + } + } + + public void decodeScriptsFormat(InputStream stream) { + useScripts = true; + int newInt = stream.readUnsignedByte(); + if (newInt == 255) { + newInt = -1; + } + type = stream.readUnsignedByte(); + if ((type & 0x80 ^ 0xffffffff) != -1) { + type &= 0x7f; + aString2473 = stream.readString(); + } + anInt2441 = stream.readUnsignedShort(); + x = stream.readShort(); + y = stream.readShort(); + width = stream.readUnsignedShort(); + height = stream.readUnsignedShort(); + aByte2356 = (byte) stream.readByte(); + aByte2341 = (byte) stream.readByte(); + aByte2469 = (byte) stream.readByte(); + aByte2311 = (byte) stream.readByte(); + parentId = stream.readUnsignedShort(); + if ((parentId ^ 0xffffffff) != -65536) + parentId = (hash & ~0xffff) + parentId; + else + parentId = -1; + int i_17_ = stream.readUnsignedByte(); + hidden = (0x1 & i_17_ ^ 0xffffffff) != -1; + if (newInt >= 0) { + aBoolean2429 = (i_17_ & 0x2 ^ 0xffffffff) != -1; + } + if ((type ^ 0xffffffff) == -1) { + anInt2444 = stream.readUnsignedShort(); + anInt2479 = stream.readUnsignedShort(); + if ((newInt ^ 0xffffffff) > -1) + aBoolean2429 = stream.readUnsignedByte() == 1; + } + if ((type ^ 0xffffffff) == -6) { + imageId = stream.readInt(); + anInt2381 = stream.readUnsignedShort(); + int i = stream.readUnsignedByte(); + aBoolean2422 = (0x2 & i ^ 0xffffffff) != -1; + aBoolean2434 = (i & 0x1 ^ 0xffffffff) != -1; + anInt2369 = stream.readUnsignedByte(); + borderThickness = stream.readUnsignedByte(); + anInt2325 = stream.readInt(); + aBoolean2419 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + aBoolean2342 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + anInt2467 = stream.readInt(); + if ((newInt ^ 0xffffffff) <= -4) + aBoolean4782 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + } + if ((type ^ 0xffffffff) == -7) { + modelType = 1; + modelId = stream.readBigSmart(); + anInt2480 = stream.readShort(); + anInt2459 = stream.readShort(); + anInt2461 = stream.readUnsignedShort(); + anInt2482 = stream.readUnsignedShort(); + anInt2308 = stream.readUnsignedShort(); + anInt2403 = stream.readUnsignedShort(); + animationId = stream.readUnsignedShort(); + if (animationId == 65535) + animationId = -1; + aBoolean2476 = stream.readUnsignedByte() == 1; + aShort2383 = (short) stream.readUnsignedShort(); + aShort2420 = (short) stream.readUnsignedShort(); + aBoolean2368 = stream.readUnsignedByte() == 1; + if ((aByte2356 ^ 0xffffffff) != -1) + anInt2423 = stream.readUnsignedShort(); + if (aByte2341 != 0) + anInt2397 = stream.readUnsignedShort(); + } + if (type == 4) { + anInt2375 = stream.readBigSmart(); + if ((anInt2375 ^ 0xffffffff) == -65536) + anInt2375 = -1; + aString2357 = stream.readString(); + if(aString2357.toLowerCase().contains("ship")) + System.out.println(this.hash >> 16); + anInt2364 = stream.readUnsignedByte(); + anInt2312 = stream.readUnsignedByte(); + anInt2297 = stream.readUnsignedByte(); + aBoolean2366 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + anInt2467 = stream.readInt(); + } + if (type == 3) { + anInt2467 = stream.readInt(); + aBoolean2367 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + anInt2369 = stream.readUnsignedByte(); + } + if ((type ^ 0xffffffff) == -10) { + anInt2471 = stream.readUnsignedByte(); + anInt2467 = stream.readInt(); + aBoolean2306 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + } + int settingsHash = stream.read24BitInt(); + // int i_28_ = stream.readUnsignedByte(); + // if (i_28_ != 0) { + // anIntArray2449 = new int[11]; + // aByteArray2417 = new byte[11]; + // aByteArray2317 = new byte[11]; + // for (/**/; (i_28_ ^ 0xffffffff) != -1; + // i_28_ = stream.readUnsignedByte()) { + // int i_29_ = -1 + (i_28_ >> 360744868); + // i_28_ = i_28_ << -456693784 | stream.readUnsignedByte(); + // i_28_ &= 0xfff; + // if ((i_28_ ^ 0xffffffff) != -4096) + // anIntArray2449[i_29_] = i_28_; + // else + // anIntArray2449[i_29_] = -1; + // aByteArray2317[i_29_] = (byte) stream.readByte(); + // if ((aByteArray2317[i_29_] ^ 0xffffffff) != -1) + // aBoolean2401 = true; + // aByteArray2417[i_29_] = (byte) stream.readByte(); + // } + // } + textToolTip = stream.readString(); + int i_30_ = stream.readUnsignedByte(); + int i_31_ = i_30_ & 0xf; + if ((i_31_ ^ 0xffffffff) < -1) { + aStringArray2385 = new String[i_31_]; + for (int i_32_ = 0; i_31_ > i_32_; i_32_++) + aStringArray2385[i_32_] = stream.readString(); + } + int i_33_ = i_30_ >> -686838332; + if ((i_33_ ^ 0xffffffff) < -1) { + int i_34_ = stream.readUnsignedByte(); + anIntArray2315 = new int[1 + i_34_]; + for (int i_35_ = 0; i_35_ < anIntArray2315.length; i_35_++) + anIntArray2315[i_35_] = -1; + anIntArray2315[i_34_] = stream.readUnsignedShort(); + } + if ((i_33_ ^ 0xffffffff) < -2) { + int i_36_ = stream.readUnsignedByte(); + anIntArray2315[i_36_] = stream.readUnsignedShort(); + } + aString2330 = stream.readString(); + if (aString2330.equals("")) + aString2330 = null; + anInt2335 = stream.readUnsignedByte(); + anInt2319 = stream.readUnsignedByte(); + aBoolean2436 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + aString2463 = stream.readString(); + int defaultHash = -1; + if ((method2412(settingsHash) ^ 0xffffffff) != -1) { + defaultHash = stream.readUnsignedShort(); + if ((defaultHash ^ 0xffffffff) == -65536) + defaultHash = -1; + anInt2303 = stream.readUnsignedShort(); + if (anInt2303 == 65535) + anInt2303 = -1; + anInt2374 = stream.readUnsignedShort(); + if (anInt2374 == 65535) + anInt2374 = -1; + } + settings = new IComponentSettings(settingsHash, defaultHash); + defaultScript = decodeScript(stream); + anObjectArray2462 = decodeScript(stream); + anObjectArray2402 = decodeScript(stream); + anObjectArray2371 = decodeScript(stream); + anObjectArray2408 = decodeScript(stream); + anObjectArray2439 = decodeScript(stream); + anObjectArray2454 = decodeScript(stream); + anObjectArray2410 = decodeScript(stream); + anObjectArray2316 = decodeScript(stream); + anObjectArray2465 = decodeScript(stream); + anObjectArray2446 = decodeScript(stream); + anObjectArray2313 = decodeScript(stream); + anObjectArray2318 = decodeScript(stream); + anObjectArray2328 = decodeScript(stream); + anObjectArray2395 = decodeScript(stream); + anObjectArray2331 = decodeScript(stream); + anObjectArray2405 = decodeScript(stream); + anObjectArray2351 = decodeScript(stream); + anObjectArray2302 = decodeScript(stream); + anObjectArray2296 = decodeScript(stream); + anIntArray2452 = method2465(stream); + anIntArray2472 = method2465(stream); + anIntArray2360 = method2465(stream); + anIntArray2388 = method2465(stream); + anIntArray2299 = method2465(stream); + } + + public Object[] decodeScript(InputStream stream) { + int size = stream.readUnsignedByte(); + Object[] objects = new Object[size]; + for (int index = 0; index < size; index++) { + int type = stream.readUnsignedByte(); + if (type == 0) { + objects[index] = new Integer(stream.readInt()); + } + else if (type == 1) { + objects[index] = stream.readString(); + } + } + aBoolean2353 = true; + return objects; + } + + public int[] method2465(InputStream stream) { + int size = stream.readUnsignedByte(); + if (size == 0) + return null; + int[] array = new int[size]; + for (int index = 0; size > index; index++) + array[index] = stream.readInt(); + return array; + } + + public int setConfigs(List configs, int childIndex, Store store) { + if (childDataBuffers == null || childIndex >= childDataBuffers.length) { + return -2; + } + try { + int[] buffer = childDataBuffers[childIndex]; + int index = 0; + for (;;) { + int opcode = buffer[index++]; + if (opcode == 0) { + this.configs = new int[configs.size()]; + this.configShifts = new int[configs.size()]; + for (int i = 0; i < configs.size(); i++) { + int configId = configs.get(i); + int id = configId & 0xFFFF; + int shift = configId >> 16 & 0xFF; + this.configs[i] = id; + this.configShifts[i] = shift; + } + return 0; + } + if (opcode == 1) { + index++; + } + if (opcode == 2) { + index++; + } + if (opcode == 3) { + index++; + } + if (opcode == 4) { + index += 3; + } + if (opcode == 5) { + int configId = buffer[index++]; + if (!configs.contains(configId)) { + configs.add(configId); + } + } + if (opcode == 6) { + index++; + } + if (opcode == 7) { + int configId = buffer[index++]; + if (!configs.contains(configId)) { + configs.add(configId); + } + } + if (opcode == 10) { + index += 3; + } + if (opcode == 13) { + int configId = buffer[index++]; + int shift = buffer[index++]; + int id = configId | shift << 16; + if (!configs.contains(id)) { + configs.add(id); + } + } + if (opcode == 14) { + int configFileId = buffer[index++]; + ConfigFileDefinition def = ConfigFileDefinition.forId(configFileId, store); + int id = def.getConfigId() | (def.getBitShift() << 16); + if (!configs.contains(id)) { + configs.add(id); + } + } + if (opcode == 20) { + index++; + } + } + } catch (Exception exception) { + return -1; + } + } + + + public void decodeNoscriptsFormat(InputStream stream) { + useScripts = false; + type = stream.readUnsignedByte(); + anInt2324 = stream.readUnsignedByte(); + anInt2441 = stream.readUnsignedShort(); + x = stream.readShort(); + y = stream.readShort(); + width = stream.readUnsignedShort(); + height = stream.readUnsignedShort(); + aByte2341 = (byte) 0; + aByte2356 = (byte) 0; + aByte2311 = (byte) 0; + aByte2469 = (byte) 0; + anInt2369 = stream.readUnsignedByte(); + parentId = stream.readUnsignedShort(); + if ((parentId ^ 0xffffffff) == -65536) + parentId = -1; + else + parentId = parentId + (hash & ~0xffff); + anInt2448 = stream.readUnsignedShort(); + if ((anInt2448 ^ 0xffffffff) == -65536) + anInt2448 = -1; + int i = stream.readUnsignedByte(); + if ((i ^ 0xffffffff) < -1) { + anIntArray2407 = new int[i]; + anIntArray2384 = new int[i]; + for (int i_0_ = 0; i > i_0_; i_0_++) { + anIntArray2384[i_0_] = stream.readUnsignedByte(); + anIntArray2407[i_0_] = stream.readUnsignedShort(); + } + } + int i_1_ = stream.readUnsignedByte(); + if ((i_1_ ^ 0xffffffff) < -1) { + childDataBuffers = new int[i_1_][]; + for (int i_2_ = 0; + (i_1_ ^ 0xffffffff) < (i_2_ ^ 0xffffffff); i_2_++) { + int i_3_ = stream.readUnsignedShort(); + childDataBuffers[i_2_] = new int[i_3_]; + for (int i_4_ = 0; (i_3_ ^ 0xffffffff) < (i_4_ ^ 0xffffffff); i_4_++) { + childDataBuffers[i_2_][i_4_] = stream.readUnsignedShort(); + if ((childDataBuffers[i_2_][i_4_] ^ 0xffffffff) == -65536) + childDataBuffers[i_2_][i_4_] = -1; + } + } + } + if ((type ^ 0xffffffff) == -1) { + anInt2479 = stream.readUnsignedShort(); + hidden = stream.readUnsignedByte() == 1; + } + if (type == 1) { + stream.readUnsignedShort(); + stream.readUnsignedByte(); + } + int i_5_ = 0; + if ((type ^ 0xffffffff) == -3) { + itemIds = new int[height * width]; + aByte2341 = (byte) 3; + anIntArray2418 = new int[height * width]; + aByte2356 = (byte) 3; + int i_6_ = stream.readUnsignedByte(); + if (i_6_ == 1) + i_5_ |= 0x10000000; + int i_7_ = stream.readUnsignedByte(); + if (i_7_ == 1) + i_5_ |= 0x40000000; + int i_8_ = stream.readUnsignedByte(); + stream.readUnsignedByte(); + if ((i_8_ ^ 0xffffffff) == -2) + i_5_ |= ~0x7fffffff; + anInt2332 = stream.readUnsignedByte(); + anInt2414 = stream.readUnsignedByte(); + anIntArray2337 = new int[20]; + anIntArray2323 = new int[20]; + anIntArray2431 = new int[20]; + for (int i_9_ = 0; i_9_ < 20; i_9_++) { + int i_10_ = stream.readUnsignedByte(); + if ((i_10_ ^ 0xffffffff) != -2) + anIntArray2431[i_9_] = -1; + else { + anIntArray2323[i_9_] = stream.readShort(); + anIntArray2337[i_9_] = stream.readShort(); + anIntArray2431[i_9_] = stream.readInt(); + } + } + aStringArray2363 = new String[5]; + for (int i_11_ = 0; i_11_ < 5; i_11_++) { + String string = stream.readString(); + if ((string.length() ^ 0xffffffff) < -1) { + aStringArray2363[i_11_] = string; + i_5_ |= 1 << 23 + i_11_; + } + } + } + if ((type ^ 0xffffffff) == -4) + aBoolean2367 = (stream.readUnsignedByte() ^ 0xffffffff) == -2; + if ((type ^ 0xffffffff) == -5 || type == 1) { + anInt2312 = stream.readUnsignedByte(); + anInt2297 = stream.readUnsignedByte(); + anInt2364 = stream.readUnsignedByte(); + anInt2375 = stream.readUnsignedShort(); + if ((anInt2375 ^ 0xffffffff) == -65536) + anInt2375 = -1; + aBoolean2366 = stream.readUnsignedByte() == 1; + } + if ((type ^ 0xffffffff) == -5) { + aString2357 = stream.readString(); + aString2334 = stream.readString(); + } + if (type == 1 || (type ^ 0xffffffff) == -4 + || type == 4) + anInt2467 = stream.readInt(); + if (type == 3 || type == 4) { + anInt2424 = stream.readInt(); + anInt2451 = stream.readInt(); + anInt2477 = stream.readInt(); + } + if ((type ^ 0xffffffff) == -6) { + imageId = stream.readInt(); + anInt2349 = stream.readInt(); + } + if ((type ^ 0xffffffff) == -7) { + modelType = 1; + modelId = stream.readUnsignedShort(); + anInt2301 = 1; + if (modelId == 65535) + modelId = -1; + anInt2386 = stream.readUnsignedShort(); //Model id + if ((anInt2386 ^ 0xffffffff) == -65536) + anInt2386 = -1; + animationId = stream.readUnsignedShort(); + if (animationId == 65535) + animationId = -1; + otherAnimationId = stream.readUnsignedShort(); + if (otherAnimationId == 65535) + otherAnimationId = -1; + anInt2403 = stream.readUnsignedShort(); + anInt2461 = stream.readUnsignedShort(); + anInt2482 = stream.readUnsignedShort(); + } + if ((type ^ 0xffffffff) == -8) { + aByte2341 = (byte) 3; + anIntArray2418 = new int[width * height]; + aByte2356 = (byte) 3; + itemIds = new int[width * height]; + anInt2312 = stream.readUnsignedByte(); + anInt2375 = stream.readUnsignedShort(); + if (anInt2375 == 65535) + anInt2375 = -1; + aBoolean2366 = stream.readUnsignedByte() == 1; + anInt2467 = stream.readInt(); + anInt2332 = stream.readShort(); + anInt2414 = stream.readShort(); + int i_12_ = stream.readUnsignedByte(); + if ((i_12_ ^ 0xffffffff) == -2) + i_5_ |= 0x40000000; + aStringArray2363 = new String[5]; + for (int i_13_ = 0; i_13_ < 5; i_13_++) { + String string = stream.readString(); + if (string.length() > 0) { + aStringArray2363[i_13_] = string; + i_5_ |= 1 << i_13_ + 23; + } + } + } + if ((type ^ 0xffffffff) == -9) + aString2357 = stream.readString(); + if (anInt2324 == 2 || (type ^ 0xffffffff) == -3) { + aString2463 = stream.readString(); + aString2373 = stream.readString(); + int i_14_ = 0x3f & stream.readUnsignedShort(); + i_5_ |= i_14_ << -116905845; + } + if ((anInt2324 ^ 0xffffffff) == -2 + || (anInt2324 ^ 0xffffffff) == -5 || anInt2324 == 5 + || anInt2324 == 6) { + optionName = stream.readString(); + if ((optionName.length() ^ 0xffffffff) == -1) { + if ((anInt2324 ^ 0xffffffff) == -2) + optionName = "Ok"; + if ((anInt2324 ^ 0xffffffff) == -5) + optionName = "Select"; + if ((anInt2324 ^ 0xffffffff) == -6) + optionName = "Select"; + if ((anInt2324 ^ 0xffffffff) == -7) + optionName = "Continue"; + } + } + if (anInt2324 == 1 || anInt2324 == 4 + || (anInt2324 ^ 0xffffffff) == -6) + i_5_ |= 0x400000; + if ((anInt2324 ^ 0xffffffff) == -7) + i_5_ |= 0x1; + settings = new IComponentSettings(i_5_, -1); + } + + + + public static int method2412(int arg0) { + return 0x7f & arg0 >> -809958741; + } + + public IComponent() { + anInt2301 = 1; + otherAnimationId = -1; + aByte2311 = (byte) 0; + optionName = "Ok"; + anInt2347 = 0; + anInt2319 = 0; + anInt2349 = -1; + aBoolean2366 = false; + aString2357 = ""; + anInt2321 = -1; + imageId = -1; + aBoolean2380 = false; + anInt2350 = -1; + aBoolean2306 = false; + anInt2364 = 0; + anInt2374 = -1; + anInt2324 = 0; + anInt2375 = -1; + anInt2343 = 0; + anInt2396 = 0; + anInt2369 = 0; + anInt2394 = 1; + aBoolean2401 = false; + height = 0; + anInt2303 = -1; + anInt2390 = 0; + aBoolean2393 = false; + anInt2333 = 0; + textToolTip = ""; + aBoolean2367 = false; + anInt2415 = 0; + anInt2332 = 0; + anInt2312 = 0; + anInt2386 = -1; + anInt2381 = 0; + anInt2423 = 0; + anInt2305 = 0; + aBoolean2436 = false; + aShort2383 = (short) 0; + anInt2389 = 0; + anInt2335 = 0; + aClass173_2345 = null; + aString2334 = ""; + aBoolean2422 = false; + hidden = false; + anInt2448 = -1; + aByte2356 = (byte) 0; + anInt2325 = 0; + anInt2442 = 0; + modelType = 1; + anInt2438 = 1; + anInt2441 = 0; + width = 0; + anInt2437 = 0; + anInt2414 = 0; + hash = -1; + aString2373 = ""; + aBoolean2368 = false; + anInt2457 = -1; + anInt2365 = -1; + anInt2435 = 0; + anInt2467 = 0; + anInt2397 = 0; + aBoolean2434 = false; + anInt2361 = -1; + anInt2424 = 0; + useScripts = false; + x = 0; + anInt2427 = 0; + anInt2412 = 0; + y = 0; + aBoolean2413 = false; + animationId = -1; + anInt2444 = 0; + borderThickness = 0; + aBoolean2476 = false; + anInt2471 = 1; + anInt2459 = 0; + anInt2403 = 100; + aByte2469 = (byte) 0; + anInt2477 = 0; + aBoolean2353 = false; + anInt2461 = 0; + aByte2341 = (byte) 0; + anInt2479 = 0; + anInt2297 = 0; + anInt2411 = 0; + aBoolean2429 = false; + anInt2481 = 1; + aShort2420 = (short) 3000; + anInt2338 = 0; + anInt2451 = 0; + anInt2450 = 0; + aString2463 = ""; + anInt2480 = 0; + anInt2453 = -1; + anInt2484 = 0; + anInt2474 = 2; + parentId = -1; + anInt2482 = 0; + anInt2421 = -1; + } + + +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/interfaces/IComponentSettings.java b/Tools/Cache Editor/src/com/alex/loaders/interfaces/IComponentSettings.java new file mode 100644 index 000000000..a12f6e49d --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/interfaces/IComponentSettings.java @@ -0,0 +1,14 @@ +package com.alex.loaders.interfaces; + +public final class IComponentSettings { + + @SuppressWarnings("unused") + private int settingsHash; + @SuppressWarnings("unused") + private int defaultHash; + + public IComponentSettings(int settingsHash, int defaultHash) { //not using atm but can be used for easy find which options unlock, easy as fk + this.settingsHash = settingsHash; + this.defaultHash = defaultHash; + } +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/interfaces/Interface.java b/Tools/Cache Editor/src/com/alex/loaders/interfaces/Interface.java new file mode 100644 index 000000000..75d8ee0e6 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/interfaces/Interface.java @@ -0,0 +1,135 @@ +package com.alex.loaders.interfaces; + +import java.awt.Component; +import java.awt.Image; +import java.awt.image.FilteredImageSource; +import java.awt.image.ImageFilter; +import java.awt.image.ImageProducer; +import java.awt.image.ReplicateScaleFilter; +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.ArrayList; +import java.util.HashMap; +import java.util.List; +import java.util.Map; + +import javax.swing.JComponent; + +import com.alex.io.InputStream; +import com.alex.store.Store; +import com.alex.utils.Utils; + +public class Interface { + + public int id; + public Store cache; + public IComponent[] components; + public JComponent[] jcomponents; + + + public static void main(String[] args) throws IOException, Throwable { + Store rscache = new Store("./498/"); + if (true) { + Interface inter = new Interface(25, rscache); + for (int i = 0; i < inter.components.length; i++) { + if (inter.components[i] != null) { + inter.components[i].debug(); + System.out.println("----------------------------------------"); + } + } + return; + } + @SuppressWarnings("unused") + BufferedWriter bw = new BufferedWriter(new FileWriter("498_interface_configs.txt")); + for (int i = 0; i < 750; i++) { + try { + Interface inter = new Interface(i, rscache); + if (inter.components == null) { + continue; + } + int child = 0; + Map> childConfigs = new HashMap<>(); + for (IComponent c : inter.components) { + if (c == null) { + continue; + } + List configs = new ArrayList<>(); + childConfigs.put(child, configs); + if (c.childDataBuffers != null) { + if (c.childDataBuffers[0][0] == 5) { + int id = c.childDataBuffers[0][1]; + if (!configs.contains(id)) { + configs.add(id); + } + } + for (int j = 0; j < c.childDataBuffers.length; j++) { + c.setConfigs(configs, j, rscache); + } + } + child++; + } + for (int c : childConfigs.keySet()) { + List configs = childConfigs.get(c); + if (configs.isEmpty()) { + continue; + } + String data = "Interface " + i + " child " + c + " config: "; + for (int j = 0; j < configs.size(); j++) { + if (j != 0) { + data += ", "; + } + int id = configs.get(j); + data += "[" + (id & 0xFFFF) + ", " + (id >> 16) + "]"; + } + bw.append(data); + bw.newLine(); + } + } catch(Throwable e) { + e.printStackTrace(); + } + } + bw.flush(); + bw.close(); + } + + public Interface(int id, Store cache) { + this(id,cache,true); + } + public Interface(int id, Store cache, boolean load) { + this.id = id; + this.cache = cache; + if(load) + getComponents(); + } + + public void draw(JComponent parent) { + + } + + public Image resizeImage(Image image, int width, int height, Component c) { + ImageFilter replicate = new ReplicateScaleFilter(width, height); + ImageProducer prod = new FilteredImageSource(image.getSource(),replicate); + return c.createImage(prod); + } + + + public void getComponents() { + if (Utils.getInterfaceDefinitionsSize(cache) <= id) { +// throw new RuntimeException("Invalid interface id."); + return; + } + components = new IComponent[Utils.getInterfaceDefinitionsComponentsSize(cache, id)]; + for(int componentId = 0; componentId < components.length; componentId++) { + components[componentId] = new IComponent(); + components[componentId].hash = id << 16 | componentId; + byte[] data = cache.getIndexes()[3].getFile(id, componentId); + if (data == null) + throw new RuntimeException("Interface "+id+", component "+componentId+" data is null."); + if (data[0] != -1) + components[componentId].decodeNoscriptsFormat(new InputStream(data)); + else + components[componentId].decodeScriptsFormat(new InputStream(data)); + } + } +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/interfaces/InterfaceName.java b/Tools/Cache Editor/src/com/alex/loaders/interfaces/InterfaceName.java new file mode 100644 index 000000000..aea9a0848 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/interfaces/InterfaceName.java @@ -0,0 +1,58 @@ +package com.alex.loaders.interfaces; + +import java.io.IOException; + +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class InterfaceName { + + public static final char[] VALID_CHARS = { 'a', 'b', 'c', 'd', 'e', + 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', + 's', 't', 'u', 'v', 'w', 'x', 'y', 'z' }; + + public static void printAllCombinations4Letters(){ + } + + + /** + * @param args + * @throws IOException + */ + public static void main(String[] args) throws IOException { + + Store rscache = new Store("cache697/", false); + + System.out.println( rscache.getIndexes()[Constants.INTERFACE_DEFINITIONS_INDEX].getTable().isNamed()); + + System.out.println(rscache.getIndexes()[Constants.INTERFACE_DEFINITIONS_INDEX].getArchiveId("chat")); + System.out.println(Utils.getNameHash("price checker")); + /* System.out.println(Utils.getNameHash("prayer")); + System.out.println(Utils.unhash(Utils.getNameHash("t")));*/ + //System.out.println(Utils.getNameHash("prayer")); + + /* int hash = rscache.getIndexes()[Constants.INTERFACE_DEFINITIONS_INDEX].getTable().getArchives()[884].getNameHash(); + for(char l1 : VALID_CHARS) { + System.out.println(l1); + for(char l2 : VALID_CHARS) { + for(char l3 : VALID_CHARS) { + + for(char l4 : VALID_CHARS) { + for(char l5 : VALID_CHARS) { + for(char l6 : VALID_CHARS) { + String name = new String(new char[] {l1, l2, l3, l4,l5, l6}); + if(Utils.getNameHash(name) == hash) + System.out.println(name); + } + } + } + } + } + }*/ + + } + + + +} diff --git a/Tools/Cache Editor/src/com/alex/loaders/items/ItemDefinitions.java b/Tools/Cache Editor/src/com/alex/loaders/items/ItemDefinitions.java new file mode 100644 index 000000000..dbce1c7cd --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/loaders/items/ItemDefinitions.java @@ -0,0 +1,738 @@ +package com.alex.loaders.items; + +import java.util.Arrays; +import java.util.HashMap; + +import com.alex.io.InputStream; +import com.alex.io.OutputStream; +import com.alex.store.Store; +import com.alex.utils.Constants; + +@SuppressWarnings("unused") +public class ItemDefinitions implements Cloneable { + + public int id; + private boolean loaded; + + public int invModelId; + private String name; + + //model size information + private int invModelZoom; + private int modelRotation1; + private int modelRotation2; + private int modelOffset1; + private int modelOffset2; + + //extra information + private int stackable; + private int value; + public boolean membersOnly; + + //wearing model information + public int maleEquipModelId1; + public int femaleEquipModelId1; + public int maleEquipModelId2; + public int femaleEquipModelId2; + + public int maleEquipModelId3; + public int femaleEquipModelId3; + //options + private String[] groundOptions; + public String[] inventoryOptions; + + //model information + public int[] originalModelColors; + public int[] modifiedModelColors; + public int[] originalTextureColors; + public int[] modifiedTextureColors; + private byte[] unknownArray1; + private int[] unknownArray2; + //extra information, not used for newer items + private boolean unnoted; + private int unknownInt1; + private int unknownInt2; + private int unknownInt3; + private int unknownInt4; + private int unknownInt5; + private int unknownInt6; + public int switchNoteItemId; + public int notedItemId; + private int[] stackIds; + private int[] stackAmounts; + private int unknownInt7; + private int unknownInt8; + private int unknownInt9; + private int unknownInt10; + private int unknownInt11; + public int teamId; + public int switchLendItemId; + public int lendedItemId; + private int unknownInt12; + private int unknownInt13; + private int unknownInt14; + private int unknownInt15; + private int unknownInt16; + private int unknownInt17; + private int unknownInt18; + private int unknownInt19; + private int unknownInt20; + private int unknownInt21; + private int unknownInt22; + private int unknownInt23; + private int equipSlot; + private HashMap clientScriptData; + + public static ItemDefinitions getItemDefinition(Store cache, int itemId) { + return getItemDefinition(cache, itemId, true); + } + + public static ItemDefinitions getItemDefinition(Store cache, int itemId, boolean load) { + return new ItemDefinitions(cache, itemId, load); + } + + public ItemDefinitions(Store cache, int id) { + this(cache, id, true); + } + + public ItemDefinitions(Store cache, int id, boolean load) { + this.id = id; + setDefaultsVariableValules(); + setDefaultOptions(); + if (load) + loadItemDefinition(cache); + } + + public boolean isLoaded() { + return loaded; + } + + public void write(Store store) { + store.getIndexes()[Constants.ITEM_DEFINITIONS_INDEX].putFile(getArchiveId(), getFileId(), encode()); + } + + private void loadItemDefinition(Store cache) { + byte[] data = cache.getIndexes()[Constants.ITEM_DEFINITIONS_INDEX].getFile(getArchiveId(), getFileId()); + if (data == null) { + System.out.println("FAILED LOADING ITEM " + id); + return; + } + try { + readOpcodeValues(new InputStream(data)); + }catch(RuntimeException e) { + e.printStackTrace(); + } + if(notedItemId != -1) + toNote(cache); + if(lendedItemId != -1) + toLend(cache); + loaded = true; + } + + private void toNote(Store store) { + //ItemDefinitions noteItem; //certTemplateId + ItemDefinitions realItem = getItemDefinition(store, switchNoteItemId); + membersOnly = realItem.membersOnly; + value = realItem.value; + name = realItem.name; + stackable = 1; + } + + private void toLend(Store store) { + //ItemDefinitions lendItem; //lendTemplateId + ItemDefinitions realItem = getItemDefinition(store, switchLendItemId); + originalModelColors = realItem.originalModelColors; + modifiedModelColors = realItem.modifiedModelColors; + teamId = realItem.teamId; + value = 0; + membersOnly = realItem.membersOnly; + name = realItem.name; + inventoryOptions = new String[5]; + groundOptions = realItem.groundOptions; + if (realItem.inventoryOptions != null) + for (int optionIndex = 0; optionIndex < 4; optionIndex++) + inventoryOptions[optionIndex] = realItem.inventoryOptions[optionIndex]; + inventoryOptions[4] = "Discard"; + maleEquipModelId1 = realItem.maleEquipModelId1; + maleEquipModelId2 = realItem.maleEquipModelId2; + femaleEquipModelId1 = realItem.femaleEquipModelId1; + femaleEquipModelId2 = realItem.femaleEquipModelId2; + maleEquipModelId3 = realItem.maleEquipModelId3; + femaleEquipModelId3 = realItem.femaleEquipModelId3; + equipSlot = realItem.equipSlot; + } + public int getArchiveId() { + return id >>> 8; + } + + public int getFileId() { + return 0xff & id; + } + + public boolean hasSpecialBar() { + if(clientScriptData == null) + return false; + Object specialBar = clientScriptData.get(686); + if(specialBar != null && specialBar instanceof Integer) + return (Integer) specialBar == 1; + return false; + } + public int getRenderAnimId() { + if(clientScriptData == null) + return 1426; + Object animId = clientScriptData.get(644); + if(animId != null && animId instanceof Integer) + return (Integer) animId; + return 1426; + } + + public void setRenderAnimId(int animId) { + if(clientScriptData == null) + clientScriptData = new HashMap(); + clientScriptData.put(644, animId); + } + + public int getQuestId() { + if(clientScriptData == null) + return -1; + Object questId = clientScriptData.get(861); + if(questId != null && questId instanceof Integer) + return (Integer) questId; + return -1; + } + + public HashMap getWearingSkillRequiriments() { + if(clientScriptData == null) + return null; + HashMap skills = new HashMap(); + int nextLevel = -1; + int nextSkill = -1; + for(int key : clientScriptData.keySet()) { + Object value = clientScriptData.get(key); + if(value instanceof String) + continue; + if(key == 23) { + skills.put(4, (Integer) value); + skills.put(11, 61); + }else if (key >= 749 && key < 797) { + if(key % 2 == 0) + nextLevel = (Integer) value; + else + nextSkill = (Integer) value; + if(nextLevel != -1 && nextSkill != -1) { + skills.put(nextSkill, nextLevel); + nextLevel = -1; + nextSkill = -1; + } + } + + } + return skills; + } + + //test :P + public void printClientScriptData() { + for(int key : clientScriptData.keySet()) { + Object value = clientScriptData.get(key); + System.out.println("KEY: "+key+", VALUE: "+value); + } + HashMap requiriments = getWearingSkillRequiriments(); + if(requiriments == null) { + System.out.println("null."); + return; + } + System.out.println(requiriments.keySet().size()); + for(int key : requiriments.keySet()) { + Object value = requiriments.get(key); + System.out.println("SKILL: "+key+", LEVEL: "+value); + } + } + + + private void setDefaultOptions() { + groundOptions = new String[] { null, null, "take", null, null }; + inventoryOptions = new String[] { null, null, null, null, "drop" }; + } + + private void setDefaultsVariableValules() { + name = "null"; + maleEquipModelId1 = -1; + maleEquipModelId2 = -1; + femaleEquipModelId1 = -1; + femaleEquipModelId2 = -1; + invModelZoom = 2000; + switchLendItemId = -1; + lendedItemId = -1; + switchNoteItemId = -1; + notedItemId = -1; + unknownInt9 = 128; + value = 1; + maleEquipModelId3 = -1; + femaleEquipModelId3 = -1; + equipSlot = -1; + } + + public byte[] encode() { + OutputStream stream = new OutputStream(); + + stream.writeByte(1); + stream.writeBigSmart(invModelId); + + if(!name.equals("null") && notedItemId == -1) { + stream.writeByte(2); + stream.writeString(name); + } + + if(invModelZoom != 2000) { + stream.writeByte(4); + stream.writeShort(invModelZoom); + } + + if(modelRotation1 != 0) { + stream.writeByte(5); + stream.writeShort(modelRotation1); + } + + if(modelRotation2 != 0) { + stream.writeByte(6); + stream.writeShort(modelRotation2); + } + + if(modelOffset1 != 0) { + stream.writeByte(7); + int value = modelOffset1 >>= 0; + if (value < 0) + value += 65536; + stream.writeShort(value); + } + + if(modelOffset2 != 0) { + stream.writeByte(8); + int value = modelOffset2 >>= 0; + if (value < 0) + value += 65536; + stream.writeShort(value); + } + + if(stackable >= 1 && notedItemId == -1) { + stream.writeByte(11); + } + + if(value != 1 && lendedItemId == -1) { + stream.writeByte(12); + stream.writeInt(value); + } + + if(equipSlot != -1) { + stream.writeByte(13); + stream.writeByte(equipSlot); + } + + if(membersOnly && notedItemId == -1) { + stream.writeByte(16); + } + + if(maleEquipModelId1 != -1) { + stream.writeByte(23); + stream.writeBigSmart(maleEquipModelId1); + } + + if(maleEquipModelId2 != -1) { + stream.writeByte(24); + stream.writeBigSmart(maleEquipModelId2); + } + + if(femaleEquipModelId1 != -1) { + stream.writeByte(25); + stream.writeBigSmart(femaleEquipModelId1); + } + + if(femaleEquipModelId2 != -1) { + stream.writeByte(26); + stream.writeBigSmart(femaleEquipModelId2); + } + + for(int index = 0; index < groundOptions.length; index++) { + if(groundOptions[index] == null || (index == 2 && groundOptions[index].equals("take"))) + continue; + stream.writeByte(30+index); + stream.writeString(groundOptions[index]); + } + + for(int index = 0; index < inventoryOptions.length; index++) { + if(inventoryOptions[index] == null || (index == 4 && inventoryOptions[index].equals("drop"))) + continue; + stream.writeByte(35+index); + stream.writeString(inventoryOptions[index]); + } + + if(originalModelColors != null && modifiedModelColors != null) { + stream.writeByte(40); + stream.writeByte(originalModelColors.length); + for(int index = 0; index < originalModelColors.length; index++) { + stream.writeShort(originalModelColors[index]); + stream.writeShort(modifiedModelColors[index]); + } + } + + if(originalTextureColors != null && modifiedTextureColors != null) { + stream.writeByte(41); + stream.writeByte(originalTextureColors.length); + for(int index = 0; index < originalTextureColors.length; index++) { + stream.writeShort(originalTextureColors[index]); + stream.writeShort(modifiedTextureColors[index]); + } + } + + if(unknownArray1 != null) { + stream.writeByte(42); + stream.writeByte(unknownArray1.length); + for(int index = 0; index < unknownArray1.length; index++) + stream.writeByte(unknownArray1[index]); + } + if(unnoted) { + stream.writeByte(65); + } + + if(maleEquipModelId3 != -1) { + stream.writeByte(78); + stream.writeBigSmart(maleEquipModelId3); + } + + if(femaleEquipModelId3 != -1) { + stream.writeByte(79); + stream.writeBigSmart(femaleEquipModelId3); + } + + //TODO FEW OPCODES HERE + + if(switchNoteItemId != -1) { + stream.writeByte(97); + stream.writeShort(switchNoteItemId); + } + + if(notedItemId != -1) { + stream.writeByte(98); + stream.writeShort(notedItemId); + } + + if(stackIds != null && stackAmounts != null) { + for(int index = 0; index < stackIds.length; index++) { + if(stackIds[index] == 0 && stackAmounts[index] == 0) + continue; + stream.writeByte(100+index); + stream.writeShort(stackIds[index]); + stream.writeShort(stackAmounts[index]); + } + } + + //TODO FEW OPCODES HERE + + if(teamId != 0) { + stream.writeByte(115); + stream.writeByte(teamId); + } + + if(switchLendItemId != -1) { + stream.writeByte(121); + stream.writeShort(switchLendItemId); + } + + if(lendedItemId != -1) { + stream.writeByte(122); + stream.writeShort(lendedItemId); + } + + //TODO FEW OPCODES HERE + + if(unknownArray2 != null) { + stream.writeByte(132); + stream.writeByte(unknownArray2.length); + for(int index = 0; index < unknownArray2.length; index++) + stream.writeShort(unknownArray2[index]); + } + + if(clientScriptData != null) { + stream.writeByte(249); + stream.writeByte(clientScriptData.size()); + for(int key : clientScriptData.keySet()) { + Object value = clientScriptData.get(key); + stream.writeByte(value instanceof String ? 1 : 0); + stream.write24BitInt(key); + if(value instanceof String) { + stream.writeString((String) value); + }else{ + stream.writeInt((Integer) value); + } + } + } + //end + stream.writeByte(0); + + byte[] data = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(data, 0, data.length); + return data; + } + + public int getInvModelId() { + return invModelId; + } + + public void setInvModelId(int modelId) { + this.invModelId = modelId; + } + + public int getInvModelZoom() { + return invModelZoom; + } + + public void setInvModelZoom(int modelZoom) { + this.invModelZoom = modelZoom; + } + + private void readValues(InputStream stream, int opcode) { + if(opcode == 1) + invModelId = stream.readBigSmart(); + else if (opcode == 2) + name = stream.readString(); + else if (opcode == 4) + invModelZoom = stream.readUnsignedShort(); + else if (opcode == 5) + modelRotation1 = stream.readUnsignedShort(); + else if (opcode == 6) + modelRotation2 = stream.readUnsignedShort(); + else if (opcode == 7) { + modelOffset1 = stream.readUnsignedShort(); + if (modelOffset1 > 32767) + modelOffset1 -= 65536; + modelOffset1 <<= 0; + }else if (opcode == 8) { + modelOffset2 = stream.readUnsignedShort(); + if (modelOffset2 > 32767) + modelOffset2 -= 65536; + modelOffset2 <<= 0; + }else if (opcode == 11) + stackable = 1; + else if (opcode == 12) + value = stream.readInt(); + else if (opcode == 13) + equipSlot = stream.readUnsignedByte(); + else if (opcode == 14) + stream.readUnsignedByte(); + else if (opcode == 16) + membersOnly = true; + else if (opcode == 23) + maleEquipModelId1 = stream.readBigSmart(); + else if (opcode == 24) + maleEquipModelId2 = stream.readBigSmart(); + else if (opcode == 25) + femaleEquipModelId1 = stream.readBigSmart(); + else if (opcode == 26) + femaleEquipModelId2 = stream.readBigSmart(); + else if (opcode >= 30 && opcode < 35) + groundOptions[opcode-30] = stream.readString(); + else if (opcode >= 35 && opcode < 40) + inventoryOptions[opcode-35] = stream.readString(); + else if (opcode == 40) { + int length = stream.readUnsignedByte(); + originalModelColors = new int[length]; + modifiedModelColors = new int[length]; + for(int index = 0; index < length; index++) { + originalModelColors[index] = stream.readUnsignedShort(); + modifiedModelColors[index] = stream.readUnsignedShort(); + } + }else if (opcode == 41) { + int length = stream.readUnsignedByte(); + originalTextureColors = new int[length]; + modifiedTextureColors = new int[length]; + for(int index = 0; index < length; index++) { + originalTextureColors[index] = stream.readUnsignedShort(); + modifiedTextureColors[index] = stream.readUnsignedShort(); + } + }else if (opcode == 42) { + int length = stream.readUnsignedByte(); + unknownArray1 = new byte[length]; + for(int index = 0; index < length; index++) + unknownArray1[index] = (byte) stream.readByte(); + }else if (opcode == 65) + unnoted = true; + else if (opcode == 78) + maleEquipModelId3 = stream.readBigSmart(); + else if (opcode == 79) + femaleEquipModelId3 = stream.readBigSmart(); + else if (opcode == 90) + unknownInt1 = stream.readBigSmart(); + else if (opcode == 91) + unknownInt2 = stream.readBigSmart(); + else if (opcode == 92) + unknownInt3 = stream.readBigSmart(); + else if (opcode == 93) + unknownInt4 = stream.readBigSmart(); + else if (opcode == 95) + unknownInt5 = stream.readUnsignedShort(); + else if (opcode == 96) + unknownInt6 = stream.readUnsignedByte(); + else if (opcode == 97) + switchNoteItemId = stream.readUnsignedShort(); + else if (opcode == 98) + notedItemId = stream.readUnsignedShort(); + else if (opcode >= 100 && opcode < 110) { + if (stackIds == null) { + stackIds = new int[10]; + stackAmounts = new int[10]; + } + stackIds[opcode-100] = stream.readUnsignedShort(); + stackAmounts[opcode-100] = stream.readUnsignedShort(); + }else if (opcode == 110) + unknownInt7 = stream.readUnsignedShort(); + else if (opcode == 111) + unknownInt8 = stream.readUnsignedShort(); + else if (opcode == 112) + unknownInt9 = stream.readUnsignedShort(); + else if (opcode == 113) + unknownInt10 = stream.readByte(); + else if (opcode == 114) + unknownInt11 = stream.readByte() * 5; + else if (opcode == 115) + teamId = stream.readUnsignedByte(); + else if (opcode == 121) + switchLendItemId = stream.readUnsignedShort(); + else if (opcode == 122) + lendedItemId = stream.readUnsignedShort(); + else if (opcode == 125) { + unknownInt12 = stream.readByte() << 0; + unknownInt13 = stream.readByte() << 0; + unknownInt14 = stream.readByte() << 0; + }else if (opcode == 126) { + unknownInt15 = stream.readByte() << 0; + unknownInt16 = stream.readByte() << 0; + unknownInt17 = stream.readByte() << 0; + }else if (opcode == 127) { + unknownInt18 = stream.readUnsignedByte(); + unknownInt19 = stream.readUnsignedShort(); + }else if (opcode == 128) { + unknownInt20 = stream.readUnsignedByte(); + unknownInt21 = stream.readUnsignedShort(); + }else if (opcode == 129) { + unknownInt20 = stream.readUnsignedByte(); + unknownInt21 = stream.readUnsignedShort(); + }else if (opcode == 130) { + unknownInt22 = stream.readUnsignedByte(); + unknownInt23 = stream.readUnsignedShort(); + }else if (opcode == 132) { + int length = stream.readUnsignedByte(); + unknownArray2 = new int[length]; + for(int index = 0; index < length; index++) + unknownArray2[index] = stream.readUnsignedShort(); + } else if (opcode == 134) { + int unknownValue = stream.readUnsignedByte(); + }else if (opcode == 139) { + int unknownValue = stream.readUnsignedShort(); + }else if (opcode == 140) { + int unknownValue = stream.readUnsignedShort(); + }else if (opcode == 249) { + int length = stream.readUnsignedByte(); + if(clientScriptData == null) + clientScriptData = new HashMap(length); + for (int index = 0; index < length; index++) { + boolean stringInstance = stream.readUnsignedByte() == 1; + int key = stream.read24BitInt(); + Object value = stringInstance ? stream.readString() : stream.readInt(); + clientScriptData.put(key, value); + } + } + else + throw new RuntimeException("MISSING OPCODE "+opcode+" FOR ITEM "+id); + } + + private void readOpcodeValues(InputStream stream) { + while (true) { + int opcode = stream.readUnsignedByte(); + if (opcode == 0) + break; + readValues(stream, opcode); + } + } + + public void setName(String name) { + this.name = name; + } + + public String getName() { + return name; + } + + public void resetTextureColors() { + originalTextureColors = null; + modifiedTextureColors = null; + } + + public void changeTextureColor(int originalModelColor, int modifiedModelColor) { + if(originalTextureColors != null) { + for(int i = 0; i < originalTextureColors.length; i++) { + if(originalTextureColors[i] == originalModelColor) { + modifiedTextureColors[i] = modifiedModelColor; + return; + } + } + int[] newOriginalModelColors = Arrays.copyOf(originalTextureColors, originalTextureColors.length+1); + int[] newModifiedModelColors = Arrays.copyOf(modifiedTextureColors, modifiedTextureColors.length+1); + newOriginalModelColors[newOriginalModelColors.length-1] = originalModelColor; + newModifiedModelColors[newModifiedModelColors.length-1] = modifiedModelColor; + originalTextureColors = newOriginalModelColors; + modifiedTextureColors = newModifiedModelColors; + }else{ + originalTextureColors = new int[] { originalModelColor}; + modifiedTextureColors = new int[] { modifiedModelColor}; + } + } + + public void resetModelColors() { + originalModelColors = null; + modifiedModelColors = null; + } + + public void changeModelColor(int originalModelColor, int modifiedModelColor) { + if(originalModelColors != null) { + for(int i = 0; i < originalModelColors.length; i++) { + if(originalModelColors[i] == originalModelColor) { + modifiedModelColors[i] = modifiedModelColor; + return; + } + } + int[] newOriginalModelColors = Arrays.copyOf(originalModelColors, originalModelColors.length+1); + int[] newModifiedModelColors = Arrays.copyOf(modifiedModelColors, modifiedModelColors.length+1); + newOriginalModelColors[newOriginalModelColors.length-1] = originalModelColor; + newModifiedModelColors[newModifiedModelColors.length-1] = modifiedModelColor; + originalModelColors = newOriginalModelColors; + modifiedModelColors = newModifiedModelColors; + }else{ + originalModelColors = new int[] { originalModelColor}; + modifiedModelColors = new int[] { modifiedModelColor}; + } + } + + public String[] getGroundOptions() { + return groundOptions; + } + + public String[] getInventoryOptions() { + return inventoryOptions; + } + + @Override + public Object clone() { + try { + return super.clone(); + } catch (CloneNotSupportedException e) { + e.printStackTrace(); + } + return null; + } + + @Override + public String toString() { + return id+" - "+name; + } +} diff --git a/Tools/Cache Editor/src/com/alex/store/Archive.java b/Tools/Cache Editor/src/com/alex/store/Archive.java new file mode 100644 index 000000000..f60b27064 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/Archive.java @@ -0,0 +1,159 @@ +package com.alex.store; + +import com.alex.io.InputStream; +import com.alex.io.OutputStream; +import com.alex.util.bzip2.BZip2Compressor; +import com.alex.util.bzip2.BZip2Decompressor; +import com.alex.util.crc32.CRC32HGenerator; +import com.alex.util.gzip.GZipCompressor; +import com.alex.util.gzip.GZipDecompressor; +import com.alex.util.whirlpool.Whirlpool; +import com.alex.utils.Constants; + +public class Archive { + + private int id; + private int revision; + private int compression; + private byte[] data; + private int[] keys; + + protected Archive(int id, byte[] archive, int[] keys) { + this.id = id; + this.keys = keys; + decompress(archive); + + } + + public Archive(int id, int compression, int revision, byte[] data) { + this.id = id; + this.compression = compression; + this.revision = revision; + this.data = data; + } + + public byte[] compress() { + OutputStream stream = new OutputStream(); + stream.writeByte(compression); + byte[] compressedData; + switch(compression) { + case Constants.NO_COMPRESSION: //no compression + compressedData = data; + stream.writeInt(data.length); + break; + case Constants.BZIP2_COMPRESSION: + compressedData = null; //TODO + compressedData = BZip2Compressor.compress(data); + stream.writeInt(compressedData.length); + stream.writeInt(data.length); + //throw new RuntimeException("BZIP2_COMPRESSION NOT ADDED"); + default: //gzip + compressedData = GZipCompressor.compress(data); + stream.writeInt(compressedData.length); + stream.writeInt(data.length); + break; + } + stream.writeBytes(compressedData); + if(keys != null && keys.length == 4) + stream.encodeXTEA(keys, 5, stream.getOffset()); + if(revision != -1) + stream.writeShort(revision); + byte[] compressed = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(compressed, 0, compressed.length); + return compressed; + } + + private void decompress(byte[] archive) { + InputStream stream = new InputStream(archive); + if(keys != null && keys.length == 4) + stream.decodeXTEA(keys); + compression = stream.readUnsignedByte(); + int compressedLength = stream.readInt(); + if(compressedLength < 0 || compressedLength > Constants.MAX_VALID_ARCHIVE_LENGTH) + throw new RuntimeException("INVALID ARCHIVE HEADER"); + switch(compression) { + case Constants.NO_COMPRESSION: //no compression + data = new byte[compressedLength]; + checkRevision(compressedLength, archive, stream.getOffset()); + stream.readBytes(data, 0, compressedLength); + break; + case Constants.BZIP2_COMPRESSION: //bzip2 + int length = stream.readInt(); + if(length <= 0) { + data = null; + break; + } + data = new byte[length]; + checkRevision(compressedLength, archive, stream.getOffset()); + BZip2Decompressor.decompress(data, archive, compressedLength, 9); + break; + default: //gzip + length = stream.readInt(); + if(length <= 0 || length > 1000000000) { + data = null; + break; + } + data = new byte[length]; + checkRevision(compressedLength, archive, stream.getOffset()); + if(!GZipDecompressor.decompress(stream, data)) + data = null; + break; + } + } + + private void checkRevision(int compressedLength, byte[] archive, int o) { + InputStream stream = new InputStream(archive); + int offset = stream.getOffset(); + if(stream.getLength()- (compressedLength+o) >= 2) { + stream.setOffset(stream.getLength()-2); + revision = stream.readUnsignedShort(); + stream.setOffset(offset); + }else + revision = -1; + + } + + public Object[] editNoRevision(byte[] data, MainFile mainFile) { + this.data = data; + if(compression == Constants.BZIP2_COMPRESSION) + compression = Constants.GZIP_COMPRESSION; + byte[] compressed = compress(); + if(!mainFile.putArchiveData(id, compressed)) + return null; + return new Object[] {CRC32HGenerator.getHash(compressed), Whirlpool.getHash(compressed, 0, compressed.length)}; + } + + public int getId() { + return id; + } + + public byte[] getData() { + return data; + } + + public int getDecompressedLength() { + return data.length; + } + + public int getRevision() { + return revision; + } + + public void setRevision(int revision) { + this.revision = revision; + } + + public int getCompression() { + return compression; + } + + public int[] getKeys() { + return keys; + } + + public void setKeys(int[] keys) { + this.keys = keys; + } + +} diff --git a/Tools/Cache Editor/src/com/alex/store/ArchiveReference.java b/Tools/Cache Editor/src/com/alex/store/ArchiveReference.java new file mode 100644 index 000000000..4394035a7 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/ArchiveReference.java @@ -0,0 +1,133 @@ +package com.alex.store; + +import java.util.Arrays; + + + +public class ArchiveReference { + + private int nameHash; + private byte[] whirpool; + private int crc; + private int revision; + private FileReference[] files; + private int[] validFileIds; + private boolean needsFilesSort; + private boolean updatedRevision; + + public void updateRevision() { + if(updatedRevision) + return; + revision++; + updatedRevision = true; + } + + public int getNameHash() { + return nameHash; + } + + public void setNameHash(int nameHash) { + this.nameHash = nameHash; + } + + public byte[] getWhirpool() { + return whirpool; + } + + public void setWhirpool(byte[] whirpool) { + this.whirpool = whirpool; + } + + public int getCRC() { + return crc; + } + + public void setCrc(int crc) { + this.crc = crc; + } + + public int getRevision() { + return revision; + } + + public FileReference[] getFiles() { + return files; + } + + public void setFiles(FileReference[] files) { + this.files = files; + } + + public void setRevision(int revision) { + this.revision = revision; + } + + public int[] getValidFileIds() { + return validFileIds; + } + + public void setValidFileIds(int[] validFileIds) { + this.validFileIds = validFileIds; + } + + public boolean isNeedsFilesSort() { + return needsFilesSort; + } + + public void setNeedsFilesSort(boolean needsFilesSort) { + this.needsFilesSort = needsFilesSort; + } + + public void removeFileReference(int fileId) { + int[] newValidFileIds = new int[validFileIds.length-1]; + int count = 0; + for(int id : validFileIds) { + if(id == fileId) + continue; + newValidFileIds[count++] = id; + } + validFileIds = newValidFileIds; + files[fileId] = null; + } + + public void addEmptyFileReference(int fileId) { + needsFilesSort = true; + int[] newValidFileIds = Arrays.copyOf(validFileIds, validFileIds.length+1); + newValidFileIds[newValidFileIds.length-1] = fileId; + validFileIds = newValidFileIds; + if(files.length <= fileId) { + FileReference[] newFiles = Arrays.copyOf(files, fileId+1); + newFiles[fileId] = new FileReference(); + files = newFiles; + }else + files[fileId] = new FileReference(); + } + + public void sortFiles() { + Arrays.sort(validFileIds); + needsFilesSort = false; + } + + public void reset() { + whirpool = null; + updatedRevision = true; + revision = 0; + nameHash = 0; + crc = 0; + files = new FileReference[0]; + validFileIds = new int[0]; + needsFilesSort = false; + } + + + public void copyHeader(ArchiveReference fromReference) { + setCrc(fromReference.getCRC()); + setNameHash(fromReference.getNameHash()); + setWhirpool(fromReference.getWhirpool()); + int[] validFiles = fromReference.getValidFileIds(); + setValidFileIds(Arrays.copyOf(validFiles, validFiles.length)); + FileReference[] files = fromReference.getFiles(); + setFiles(Arrays.copyOf(files, files.length)); + } + +} diff --git a/Tools/Cache Editor/src/com/alex/store/FileReference.java b/Tools/Cache Editor/src/com/alex/store/FileReference.java new file mode 100644 index 000000000..28bd2abd8 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/FileReference.java @@ -0,0 +1,15 @@ +package com.alex.store; + +public class FileReference { + + private int nameHash; + + public int getNameHash() { + return nameHash; + } + + public void setNameHash(int nameHash) { + this.nameHash = nameHash; + } + +} diff --git a/Tools/Cache Editor/src/com/alex/store/Index.java b/Tools/Cache Editor/src/com/alex/store/Index.java new file mode 100644 index 000000000..76df68623 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/Index.java @@ -0,0 +1,446 @@ +package com.alex.store; + +import com.alex.io.InputStream; +import com.alex.io.OutputStream; +import com.alex.util.crc32.CRC32HGenerator; +import com.alex.util.whirlpool.Whirlpool; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public final class Index { + + private MainFile mainFile; + private MainFile index255; + private ReferenceTable table; + private byte[][][] cachedFiles; + private int crc; + private byte[] whirlpool; + + protected Index(MainFile index255, MainFile mainFile, int[] keys) { + this.mainFile = mainFile; + this.index255 = index255; + byte[] archiveData = index255.getArchiveData(getId()); + if (archiveData == null) + return; + crc = CRC32HGenerator.getHash(archiveData); + whirlpool = Whirlpool.getHash(archiveData, 0, archiveData.length); + Archive archive = new Archive(getId(), archiveData, keys); + table = new ReferenceTable(archive); + resetCachedFiles(); + } + + public void resetCachedFiles() { + cachedFiles = new byte[getLastArchiveId() + 1][][]; + } + + public int getLastFileId(int archiveId) { + if (!archiveExists(archiveId)) + return -1; + return table.getArchives()[archiveId].getFiles().length - 1; + } + + public int getLastArchiveId() { + return table.getArchives().length - 1; + } + + public int getValidArchivesCount() { + return table.getValidArchiveIds().length; + } + + public int getValidFilesCount(int archiveId) { + if (!archiveExists(archiveId)) + return -1; + return table.getArchives()[archiveId].getValidFileIds().length; + } + + public boolean archiveExists(int archiveId) { + if(archiveId < 0) + return false; + ArchiveReference[] archives = table.getArchives(); + return archives.length > archiveId && archives[archiveId] != null; + } + + public boolean fileExists(int archiveId, int fileId) { + if (!archiveExists(archiveId)) + return false; + FileReference[] files = table.getArchives()[archiveId].getFiles(); + return files.length > fileId && files[fileId] != null; + } + + public int getArchiveId(String name) { + int nameHash = Utils.getNameHash(name); + ArchiveReference[] archives = table.getArchives(); + int[] validArchiveIds = table.getValidArchiveIds(); + for (int archiveId : validArchiveIds) { + if (archives[archiveId].getNameHash() == nameHash) + return archiveId; + } + return -1; + } + + public int getFileId(int archiveId, String name) { + if (!archiveExists(archiveId)) + return -1; + int nameHash = Utils.getNameHash(name); + FileReference[] files = table.getArchives()[archiveId].getFiles(); + int[] validFileIds = table.getArchives()[archiveId].getValidFileIds(); + for (int index = 0; index < validFileIds.length; index++) { + int fileId = validFileIds[index]; + if (files[fileId].getNameHash() == nameHash) + return fileId; + } + return -1; + } + + public byte[] getFile(int archiveId) { + if (!archiveExists(archiveId)) + return null; + return getFile(archiveId, + table.getArchives()[archiveId].getValidFileIds()[0]); + } + + public byte[] getFile(int archiveId, int fileId) { + return getFile(archiveId, fileId, null); + } + + public byte[] getFile(int archiveId, int fileId, int[] keys) { + try { + if (!fileExists(archiveId, fileId)) { + return null; + } + if (cachedFiles[archiveId] == null || cachedFiles[archiveId][fileId] == null) cacheArchiveFiles(archiveId, keys); + byte[] file = cachedFiles[archiveId][fileId]; + cachedFiles[archiveId][fileId] = null; + return file; + } catch (Throwable e) { + e.printStackTrace(); + return null; + } + } + + public boolean packIndex(Store originalStore) { + return packIndex(originalStore, false); + } + + public boolean packIndex(Store originalStore, boolean checkCRC) { + try { + return packIndex(getId(), originalStore, checkCRC); + }catch (Exception e) { + + } + return packIndex(getId(), originalStore, checkCRC); + } + + public boolean packIndex(int id, Store originalStore, boolean checkCRC) { + try { + Index originalIndex = originalStore.getIndexes()[id]; + for (int archiveId : originalIndex.table.getValidArchiveIds()) { + if (checkCRC + && archiveExists(archiveId) + && originalIndex.table.getArchives()[archiveId] + .getCRC() == table.getArchives()[archiveId] + .getCRC()) + continue; + if (!putArchive(id, archiveId, originalStore, false, false)) + return false; + } + if (!rewriteTable()) + return false; + resetCachedFiles(); + return true; + } catch (Exception e) { + + } + return true; + } + + public boolean putArchive(int archiveId, Store originalStore) { + return putArchive(getId(), archiveId, originalStore, true, true); + } + public boolean putArchive(int archiveId, Store originalStore, + boolean rewriteTable, boolean resetCache) { + return putArchive(getId(), archiveId, originalStore, rewriteTable, resetCache); + } + + + public boolean putArchive(int id, int archiveId, Store originalStore, + boolean rewriteTable, boolean resetCache) { + try { + Index originalIndex = originalStore.getIndexes()[id]; + byte[] data = originalIndex.getMainFile().getArchiveData(archiveId); + if (data == null) + return false; + if (!archiveExists(archiveId)) + table.addEmptyArchiveReference(archiveId); + ArchiveReference reference = table.getArchives()[archiveId]; + reference.updateRevision(); + ArchiveReference originalReference = originalIndex.table.getArchives()[archiveId]; + reference.copyHeader(originalReference); + int revision = reference.getRevision(); + data[data.length - 2] = (byte) (revision >> 8); + data[data.length - 1] = (byte) revision; + if (!mainFile.putArchiveData(archiveId, data)) + return false; + if (rewriteTable && !rewriteTable()) + return false; + if (resetCache) + resetCachedFiles(); + return true; + } catch (Exception e) { + e.printStackTrace(); + } + return false; + } + + + + + public boolean putFile(int archiveId, int fileId, byte[] data) { + return putFile(archiveId, fileId, Constants.GZIP_COMPRESSION, data, + null, true, true, -1, -1); + } + + public boolean removeFile(int archiveId, int fileId) { + return removeFile(archiveId, fileId, Constants.GZIP_COMPRESSION, null); + } + + public boolean removeFile(int archiveId, int fileId, int compression, + int[] keys) { + if (!fileExists(archiveId, fileId)) + return false; + cacheArchiveFiles(archiveId, keys); + ArchiveReference reference = table.getArchives()[archiveId]; + reference.removeFileReference(fileId); + int filesCount = getValidFilesCount(archiveId); + byte[] archiveData; + if (filesCount == 1) + archiveData = getFile(archiveId, reference.getValidFileIds()[0], + keys); + else { + int[] filesSize = new int[filesCount]; + OutputStream stream = new OutputStream(); + for (int index = 0; index < filesCount; index++) { + int id = reference.getValidFileIds()[index]; + byte[] fileData = getFile(archiveId, id, keys); + filesSize[index] = fileData.length; + stream.writeBytes(fileData); + } + for (int index = 0; index < filesSize.length; index++) { + int offset = filesSize[index]; + if (index != 0) + offset -= filesSize[index - 1]; + stream.writeInt(offset); + } + stream.writeByte(1); // 1loop + archiveData = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(archiveData, 0, archiveData.length); + } + reference.updateRevision(); + Archive archive = new Archive(archiveId, compression, + reference.getRevision(), archiveData); + byte[] closedArchive = archive.compress(); + reference.setCrc(CRC32HGenerator.getHash(closedArchive, 0, + closedArchive.length - 2)); + reference.setWhirpool(Whirlpool.getHash(closedArchive, 0, + closedArchive.length - 2)); + if (!mainFile.putArchiveData(archiveId, closedArchive)) + return false; + if (!rewriteTable()) + return false; + resetCachedFiles(); + return true; + } + + public boolean putFile(int archiveId, int fileId, int compression, + byte[] data, int[] keys, boolean rewriteTable, boolean resetCache, + int archiveName, int fileName) { + if (!archiveExists(archiveId)) { + table.addEmptyArchiveReference(archiveId); + resetCachedFiles(); + cachedFiles[archiveId] = new byte[1][]; + } else { + cacheArchiveFiles(archiveId, keys); + } + ArchiveReference reference = table.getArchives()[archiveId]; + if (!fileExists(archiveId, fileId)) + reference.addEmptyFileReference(fileId); + reference.sortFiles(); + int filesCount = getValidFilesCount(archiveId); + byte[] archiveData; + if (filesCount == 1) + archiveData = data; + else { + int[] filesSize = new int[filesCount]; + OutputStream stream = new OutputStream(); + for (int index = 0; index < filesCount; index++) { + int id = reference.getValidFileIds()[index]; + byte[] fileData; + if (id == fileId) + fileData = data; + else + fileData = getFile(archiveId, id, keys); + filesSize[index] = fileData.length; + stream.writeBytes(fileData); + } + for (int index = 0; index < filesCount; index++) { + int offset = filesSize[index]; + if (index != 0) + offset -= filesSize[index - 1]; + stream.writeInt(offset); + } + stream.writeByte(1); // 1loop + archiveData = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(archiveData, 0, archiveData.length); + } + reference.updateRevision(); + Archive archive = new Archive(archiveId, compression, + reference.getRevision(), archiveData); + + //Fixed packing maps on 498 + archive.setKeys(keys); + + byte[] closedArchive = archive.compress(); + reference.setCrc(CRC32HGenerator.getHash(closedArchive, 0, + closedArchive.length - 2)); + reference.setWhirpool(Whirlpool.getHash(closedArchive, 0, + closedArchive.length - 2)); + if (archiveName != -1) + reference.setNameHash(archiveName); + if (fileName != -1) + reference.getFiles()[fileId].setNameHash(fileName); + if (!mainFile.putArchiveData(archiveId, closedArchive)) + return false; + if (rewriteTable && !rewriteTable()) + return false; + if (resetCache) + resetCachedFiles(); + return true; + } + + public boolean encryptArchive(int archiveId, int[] keys) { + return encryptArchive(archiveId, null, keys, true, true); + } + + public boolean encryptArchive(int archiveId, int[] oldKeys, int[] keys, boolean rewriteTable, boolean resetCache) { + if (!archiveExists(archiveId)) + return false; + Archive archive = mainFile.getArchive(archiveId, oldKeys); + if (archive == null) + return false; + ArchiveReference reference = table.getArchives()[archiveId]; + if(reference.getRevision() != archive.getRevision()) + throw new RuntimeException("ERROR REVISION"); + reference.updateRevision(); + archive.setRevision(reference.getRevision()); + archive.setKeys(keys); + byte[] closedArchive = archive.compress(); + reference.setCrc(CRC32HGenerator.getHash(closedArchive, 0, + closedArchive.length - 2)); + reference.setWhirpool(Whirlpool.getHash(closedArchive, 0, + closedArchive.length - 2)); + if (!mainFile.putArchiveData(archiveId, closedArchive)) + return false; + if (rewriteTable && !rewriteTable()) + return false; + if (resetCache) + resetCachedFiles(); + return true; + + } + + public boolean rewriteTable() { + table.updateRevision(); + table.sortTable(); + Object[] hashes = table.encodeHeader(index255); + if (hashes == null) + return false; + //crc = (int) hashes[0]; + whirlpool = (byte[]) hashes[1]; + return true; + } + + public void setKeys(int[] keys) { + table.setKeys(keys); + } + + public int[] getKeys() { + return table.getKeys(); + } + + private void cacheArchiveFiles(int archiveId, int[] keys) { + Archive archive = getArchive(archiveId, keys); + int lastFileId = getLastFileId(archiveId); + cachedFiles[archiveId] = new byte[lastFileId + 1][]; + if (archive == null) + return; + byte[] data = archive.getData(); + if (data == null) + return; + int filesCount = getValidFilesCount(archiveId); + if (filesCount == 1) + cachedFiles[archiveId][lastFileId] = data; + else { + int readPosition = data.length; + int amtOfLoops = data[--readPosition] & 0xff; + readPosition -= amtOfLoops * (filesCount * 4); + InputStream stream = new InputStream(data); + stream.setOffset(readPosition); + int filesSize[] = new int[filesCount]; + for (int loop = 0; loop < amtOfLoops; loop++) { + int offset = 0; + for (int i = 0; i < filesCount; i++) + filesSize[i] += offset += stream.readInt(); + } + byte[][] filesData = new byte[filesCount][]; + for (int i = 0; i < filesCount; i++) { + filesData[i] = new byte[filesSize[i]]; + filesSize[i] = 0; + } + stream.setOffset(readPosition); + int sourceOffset = 0; + for (int loop = 0; loop < amtOfLoops; loop++) { + int dataRead = 0; + for (int i = 0; i < filesCount; i++) { + dataRead += stream.readInt(); + System.arraycopy(data, sourceOffset, filesData[i], + filesSize[i], dataRead); + sourceOffset += dataRead; + filesSize[i] += dataRead; + } + } + int count = 0; + for (int fileId : table.getArchives()[archiveId].getValidFileIds()) + cachedFiles[archiveId][fileId] = filesData[count++]; + } + } + + public int getId() { + return mainFile.getId(); + } + + public ReferenceTable getTable() { + return table; + } + + public MainFile getMainFile() { + return mainFile; + } + + public Archive getArchive(int id) { + return mainFile.getArchive(id, null); + } + + public Archive getArchive(int id, int[] keys) { + return mainFile.getArchive(id, keys); + } + + public int getCRC() { + return crc; + } + + public byte[] getWhirlpool() { + return whirlpool; + } +} diff --git a/Tools/Cache Editor/src/com/alex/store/MainFile.java b/Tools/Cache Editor/src/com/alex/store/MainFile.java new file mode 100644 index 000000000..69f30b9ed --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/MainFile.java @@ -0,0 +1,249 @@ +package com.alex.store; + +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; + +import com.alex.utils.Constants; + +/* + * Created by Alex(Dragonkk) + * 23/10/11 + */ +public final class MainFile { + + private static final int IDX_BLOCK_LEN = 6; + private static final int HEADER_LEN = 8; + private static final int EXPANDED_HEADER_LEN = 10; + private static final int BLOCK_LEN = 512; + private static final int EXPANDED_BLOCK_LEN = 510; + private static final int TOTAL_BLOCK_LEN = HEADER_LEN + BLOCK_LEN; + private static final ByteBuffer tempBuffer = ByteBuffer.allocateDirect(TOTAL_BLOCK_LEN); + + private int id; + private FileChannel index; + private FileChannel data; + private boolean newProtocol; + + protected MainFile(int id, RandomAccessFile data, RandomAccessFile index, boolean newProtocol) throws IOException { + this.id = id; + this.data = data.getChannel(); + this.index = index.getChannel(); + this.newProtocol = newProtocol; + } + + public Archive getArchive(int id) { + return getArchive(id, null); + } + + public Archive getArchive(int id, int[] keys) { + byte[] data = getArchiveData(id); + if(data == null) + return null; + return new Archive(id, data, keys); + } + + public byte[] getArchiveData(int archiveId) { + synchronized(data) { + try { + tempBuffer.position(0).limit(IDX_BLOCK_LEN); + index.read(tempBuffer, archiveId * IDX_BLOCK_LEN); + tempBuffer.flip(); + int size = getMediumInt(tempBuffer); + int block = getMediumInt(tempBuffer); + if (size < 0) + return null; + if (block <= 0 || block > data.size() / TOTAL_BLOCK_LEN) { + return null; + } + ByteBuffer fileBuffer = ByteBuffer.allocate(size); + int remaining = size; + int chunk = 0; + int blockLen = !newProtocol || archiveId <= 0xffff ? BLOCK_LEN : EXPANDED_BLOCK_LEN; + int headerLen = !newProtocol || archiveId <= 0xffff ? HEADER_LEN : EXPANDED_HEADER_LEN; + while (remaining > 0) { + if (block == 0) { + System.out.println(archiveId+", "+newProtocol); + return null; + } + int blockSize = remaining > blockLen ? blockLen : remaining; + tempBuffer.position(0).limit(blockSize + headerLen); + data.read(tempBuffer, block * TOTAL_BLOCK_LEN); + tempBuffer.flip(); + + int currentFile, currentChunk, nextBlock, currentIndex; + + if (!newProtocol || archiveId <= 65535) { + currentFile = tempBuffer.getShort() & 0xffff; + currentChunk = tempBuffer.getShort() & 0xffff; + nextBlock = getMediumInt(tempBuffer); + currentIndex = tempBuffer.get() & 0xff; + } else { + currentFile = tempBuffer.getInt(); + currentChunk = tempBuffer.getShort() & 0xffff; + nextBlock = getMediumInt(tempBuffer); + currentIndex = tempBuffer.get() & 0xff; + } + + if ((archiveId != currentFile && archiveId <= 65535) || chunk != currentChunk || id != currentIndex) { + return null; + } + if (nextBlock < 0 || nextBlock > data.size() / TOTAL_BLOCK_LEN) { + return null; + } + + fileBuffer.put(tempBuffer); + remaining -= blockSize; + block = nextBlock; + chunk++; + } + return (byte[]) fileBuffer.flip().array(); + } catch (Exception ex) { + return null; + } + } + } + + + private static int getMediumInt(ByteBuffer buffer) { + return ((buffer.get() & 0xff) << 16) | ((buffer.get() & 0xff) << 8) | + (buffer.get() & 0xff); + } + + private static void putMediumInt(ByteBuffer buffer, int val) { + buffer.put((byte) (val >> 16)); + buffer.put((byte) (val >> 8)); + buffer.put((byte) val); + } + + public boolean putArchive(Archive archive) { + return putArchiveData(archive.getId(), archive.getData()); + } + + public boolean putArchiveData(int id, byte[] archive) { + ByteBuffer buffer = ByteBuffer.wrap(archive); + boolean done = putArchiveData(id, buffer, archive.length, true); + if(!done) + done = putArchiveData(id, buffer, archive.length, false); + return done; + } + + public boolean putArchiveData(int archiveId, ByteBuffer archive, int size, boolean exists) { + synchronized(data) { + try { + int block; + if (exists) { + if (archiveId * IDX_BLOCK_LEN + IDX_BLOCK_LEN > index.size()) { + return false; + } + + tempBuffer.position(0).limit(IDX_BLOCK_LEN); + index.read(tempBuffer, archiveId * IDX_BLOCK_LEN); + tempBuffer.flip().position(3); + block = getMediumInt(tempBuffer); + + if (block <= 0 || block > data.size() / TOTAL_BLOCK_LEN) { + return false; + } + } else { + block = (int) (data.size() + TOTAL_BLOCK_LEN - 1) / TOTAL_BLOCK_LEN; + if (block == 0) { + block = 1; + } + } + + tempBuffer.position(0); + putMediumInt(tempBuffer, size); + putMediumInt(tempBuffer, block); + tempBuffer.flip(); + index.write(tempBuffer, archiveId * IDX_BLOCK_LEN); + + int remaining = size; + int chunk = 0; + int blockLen = !newProtocol || archiveId <= 0xffff ? BLOCK_LEN : EXPANDED_BLOCK_LEN; + int headerLen = !newProtocol || archiveId <= 0xffff ? HEADER_LEN : EXPANDED_HEADER_LEN; + while (remaining > 0) { + int nextBlock = 0; + if (exists) { + tempBuffer.position(0).limit(headerLen); + data.read(tempBuffer, block * TOTAL_BLOCK_LEN); + tempBuffer.flip(); + + int currentFile, currentChunk, currentIndex; + if (!newProtocol || archiveId <= 0xffff) { + currentFile = tempBuffer.getShort() & 0xffff; + currentChunk = tempBuffer.getShort() & 0xffff; + nextBlock = getMediumInt(tempBuffer); + currentIndex = tempBuffer.get() & 0xff; + } else { + currentFile = tempBuffer.getInt(); + currentChunk = tempBuffer.getShort() & 0xffff; + nextBlock = getMediumInt(tempBuffer); + currentIndex = tempBuffer.get() & 0xff; + } + + if ((archiveId != currentFile && archiveId <= 65535)|| chunk != currentChunk || id != currentIndex) { + return false; + } + if (nextBlock < 0 || nextBlock > data.size() / TOTAL_BLOCK_LEN) { + return false; + } + } + + if (nextBlock == 0) { + exists = false; + nextBlock = (int) ((data.size() + TOTAL_BLOCK_LEN - 1) / TOTAL_BLOCK_LEN); + if (nextBlock == 0) { + nextBlock = 1; + } + if (nextBlock == block) { + nextBlock++; + } + } + + if (remaining <= blockLen) { + nextBlock = 0; + } + tempBuffer.position(0).limit(TOTAL_BLOCK_LEN); + if (!newProtocol || archiveId <= 0xffff) { + tempBuffer.putShort((short) archiveId); + tempBuffer.putShort((short) chunk); + putMediumInt(tempBuffer, nextBlock); + tempBuffer.put((byte) id); + } else { + tempBuffer.putInt(archiveId); + tempBuffer.putShort((short) chunk); + putMediumInt(tempBuffer, nextBlock); + tempBuffer.put((byte) id); + } + + int blockSize = remaining > blockLen ? blockLen : remaining; + archive.limit(archive.position() + blockSize); + tempBuffer.put(archive); + tempBuffer.flip(); + + data.write(tempBuffer, block * TOTAL_BLOCK_LEN); + remaining -= blockSize; + block = nextBlock; + chunk++; + } + + return true; + } catch (Exception ex) { + return false; + } + } + } + + + public int getId() { + return id; + } + + public int getArchivesCount() throws IOException { + synchronized(index) { + return (int) (index.size()/6); + } + } +} diff --git a/Tools/Cache Editor/src/com/alex/store/ReferenceTable.java b/Tools/Cache Editor/src/com/alex/store/ReferenceTable.java new file mode 100644 index 000000000..e1e82f7b4 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/ReferenceTable.java @@ -0,0 +1,241 @@ +package com.alex.store; + +import java.util.Arrays; + +import com.alex.io.InputStream; +import com.alex.io.OutputStream; + + + + +public final class ReferenceTable { + + private Archive archive; + private int revision; + private boolean named; + private boolean usesWhirpool; + private ArchiveReference[] archives; + private int[] validArchiveIds; + + //editing + private boolean updatedRevision; + private boolean needsArchivesSort; + + protected ReferenceTable(Archive archive) { + this.archive = archive; + decodeHeader(); + } + + public void setKeys(int[] keys) { + archive.setKeys(keys); + } + + public int[] getKeys() { + return archive.getKeys(); + } + + public void sortArchives() { + Arrays.sort(validArchiveIds); + needsArchivesSort = false; + } + + public void addEmptyArchiveReference(int archiveId) { + needsArchivesSort = true; + int[] newValidArchiveIds = Arrays.copyOf(validArchiveIds, validArchiveIds.length+1); + newValidArchiveIds[newValidArchiveIds.length-1] = archiveId; + validArchiveIds = newValidArchiveIds; + ArchiveReference reference; + if(archives.length <= archiveId) { + ArchiveReference[] newArchives = Arrays.copyOf(archives, archiveId+1); + reference = newArchives[archiveId] = new ArchiveReference(); + archives = newArchives; + }else + reference = archives[archiveId] = new ArchiveReference(); + reference.reset(); + } + + public void sortTable() { + if(needsArchivesSort) + sortArchives(); + for(int index = 0; index < validArchiveIds.length; index++) { + ArchiveReference archive = archives[validArchiveIds[index]]; + if(archive.isNeedsFilesSort()) + archive.sortFiles(); + } + } + + public Object[] encodeHeader(MainFile mainFile) { + OutputStream stream = new OutputStream(); + int protocol = getProtocol(); + stream.writeByte(protocol); + if(protocol >= 6) + stream.writeInt(revision); + stream.writeByte((named ? 0x1 : 0) | (usesWhirpool ? 0x2 : 0)); + if(protocol >= 7) + stream.writeBigSmart(validArchiveIds.length); + else + stream.writeShort(validArchiveIds.length); + for(int index = 0; index < validArchiveIds.length; index++) { + int offset = validArchiveIds[index]; + if(index != 0) + offset -= validArchiveIds[index-1]; + if(protocol >= 7) + stream.writeBigSmart(offset); + else + stream.writeShort(offset); + } + if(named) + for(int index = 0; index < validArchiveIds.length; index++) + stream.writeInt(archives[validArchiveIds[index]].getNameHash()); + if(usesWhirpool) + for(int index = 0; index < validArchiveIds.length; index++) + stream.writeBytes(archives[validArchiveIds[index]].getWhirpool()); + for(int index = 0; index < validArchiveIds.length; index++) + stream.writeInt(archives[validArchiveIds[index]].getCRC()); + for(int index = 0; index < validArchiveIds.length; index++) + stream.writeInt(archives[validArchiveIds[index]].getRevision()); + for(int index = 0; index < validArchiveIds.length; index++) { + int value = archives[validArchiveIds[index]].getValidFileIds().length; + if(protocol >= 7) + stream.writeBigSmart(value); + else + stream.writeShort(value); + } + for(int index = 0; index < validArchiveIds.length; index++) { + ArchiveReference archive = archives[validArchiveIds[index]]; + for(int index2 = 0; index2 < archive.getValidFileIds().length; index2++) { + int offset = archive.getValidFileIds()[index2]; + if(index2 != 0) + offset -= archive.getValidFileIds()[index2-1]; + if(protocol >= 7) + stream.writeBigSmart(offset); + else + stream.writeShort(offset); + } + } + if(named) { + for(int index = 0; index < validArchiveIds.length; index++) { + ArchiveReference archive = archives[validArchiveIds[index]]; + for(int index2 = 0; index2 < archive.getValidFileIds().length; index2++) + stream.writeInt(archive.getFiles()[archive.getValidFileIds()[index2]].getNameHash()); + } + } + byte[] data = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(data, 0, data.length); + return archive.editNoRevision(data, mainFile); + } + + public int getProtocol() { + if(archives.length > 65535) + return 7; + for(int index = 0; index < validArchiveIds.length; index++) { + if(index > 0) + if(validArchiveIds[index] - validArchiveIds[index-1] > 65535) + return 7; + if(archives[validArchiveIds[index]].getValidFileIds().length > 65535) + return 7; + } + return revision == 0 ? 5 : 6; + } + + public void setRevision(int revision) { + updatedRevision = true; + this.revision = revision; + } + + public void updateRevision() { + if(updatedRevision) + return; + revision++; + updatedRevision = true; + } + + private void decodeHeader() { + InputStream stream = new InputStream(archive.getData()); + int protocol = stream.readUnsignedByte(); + if (protocol < 5 || protocol > 7) + throw new RuntimeException("INVALID PROTOCOL"); + if(protocol >= 6) + revision = stream.readInt(); + int hash = stream.readUnsignedByte(); + named = (0x1 & hash) != 0; + usesWhirpool = (0x2 & hash) != 0; + int validArchivesCount = protocol >= 7 ? stream.readBigSmart() : stream.readUnsignedShort(); + validArchiveIds = new int[validArchivesCount]; + int lastArchiveId = 0; + int biggestArchiveId = 0; + for(int index = 0; index < validArchivesCount; index++) { + int archiveId = lastArchiveId += protocol >= 7 ? stream.readBigSmart() : stream.readUnsignedShort(); + if(archiveId > biggestArchiveId) + biggestArchiveId = archiveId; + validArchiveIds[index] = archiveId; + } + archives = new ArchiveReference[biggestArchiveId+1]; + for(int index = 0; index < validArchivesCount; index++) + archives[validArchiveIds[index]] = new ArchiveReference(); + if(named) + for(int index = 0; index < validArchivesCount; index++) + archives[validArchiveIds[index]].setNameHash(stream.readInt()); + if(usesWhirpool) { + for(int index = 0; index < validArchivesCount; index++) { + byte[] whirpool = new byte[64]; + stream.getBytes(whirpool, 0, 64); + archives[validArchiveIds[index]].setWhirpool(whirpool); + } + } + for(int index = 0; index < validArchivesCount; index++) + archives[validArchiveIds[index]].setCrc(stream.readInt()); + for(int index = 0; index < validArchivesCount; index++) + archives[validArchiveIds[index]].setRevision(stream.readInt()); + for(int index = 0; index < validArchivesCount; index++) + archives[validArchiveIds[index]].setValidFileIds(new int[protocol >= 7 ? stream.readBigSmart() : stream.readUnsignedShort()]); + for(int index = 0; index < validArchivesCount; index++) { + int lastFileId = 0; + int biggestFileId = 0; + ArchiveReference archive = archives[validArchiveIds[index]]; + for(int index2 = 0; index2 < archive.getValidFileIds().length; index2++) { + int fileId = lastFileId += protocol >= 7 ? stream.readBigSmart() : stream.readUnsignedShort(); + if(fileId > biggestFileId) + biggestFileId = fileId; + archive.getValidFileIds()[index2] = fileId; + } + archive.setFiles(new FileReference[biggestFileId+1]); + for(int index2 = 0; index2 < archive.getValidFileIds().length; index2++) + archive.getFiles()[archive.getValidFileIds()[index2]] = new FileReference(); + } + if(named) { + for(int index = 0; index < validArchivesCount; index++) { + ArchiveReference archive = archives[validArchiveIds[index]]; + for(int index2 = 0; index2 < archive.getValidFileIds().length; index2++) + archive.getFiles()[archive.getValidFileIds()[index2]].setNameHash(stream.readInt()); + } + } + } + + public int getRevision() { + return revision; + } + + public ArchiveReference[] getArchives() { + return archives; + } + + public int[] getValidArchiveIds() { + return validArchiveIds; + } + + public boolean isNamed() { + return named; + } + + + public boolean usesWhirpool() { + return usesWhirpool; + } + + public int getCompression() { + return archive.getCompression(); + } + +} diff --git a/Tools/Cache Editor/src/com/alex/store/Store.java b/Tools/Cache Editor/src/com/alex/store/Store.java new file mode 100644 index 000000000..09f562023 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/store/Store.java @@ -0,0 +1,149 @@ +package com.alex.store; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.math.BigInteger; +import java.util.Arrays; + +import com.alex.io.OutputStream; +import com.alex.util.whirlpool.Whirlpool; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public final class Store { + + private Index[] indexes; + private MainFile index255; + private String path; + private RandomAccessFile data; + private boolean newProtocol; + + public Store(String path) throws IOException { + this(path, Constants.CLIENT_BUILD >= 704); + } + + public Store(String path, boolean newProtocol) throws IOException { + this(path, newProtocol, null); + } + + public Store(String path, boolean newProtocol, int[][] keys) throws IOException { + this.path = path; + this.newProtocol = newProtocol; + data = new RandomAccessFile(path + "main_file_cache.dat2", "rw"); + index255 = new MainFile(255, data, new RandomAccessFile(path + "main_file_cache.idx255", "rw"), newProtocol); + int idxsCount = index255.getArchivesCount(); + indexes = new Index[idxsCount]; + for (int id = 0; id < idxsCount; id++) { + Index index = new Index(index255, new MainFile(id, data, new RandomAccessFile(path + "main_file_cache.idx" + id, "rw"), newProtocol), keys == null ? null : keys[id]); + if (index.getTable() == null) + continue; + indexes[id] = index; + } + } + + public final byte[] generateIndex255Archive255Current(BigInteger grab_server_private_exponent, BigInteger grab_server_modulus) { + OutputStream stream = new OutputStream(); + stream.writeByte(getIndexes().length); + for (int index = 0; index < getIndexes().length; index++) { + if (getIndexes()[index] == null) { + stream.writeInt(0); + stream.writeInt(0); + stream.writeBytes(new byte[64]); + continue; + } + stream.writeInt(getIndexes()[index].getCRC()); + stream.writeInt(getIndexes()[index].getTable().getRevision()); + stream.writeBytes(getIndexes()[index].getWhirlpool()); + if (Constants.ENCRYPTED_CACHE) { + // custom protection, encryption of tables addition, by me + // dragonkk ofc + if (getIndexes()[index].getKeys() != null) + for (int key : getIndexes()[index].getKeys()) + stream.writeInt(key); + else + for (int i = 0; i < 4; i++) + stream.writeInt(0); + } + } + byte[] archive = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(archive, 0, archive.length); + + OutputStream hashStream = new OutputStream(65); + hashStream.writeByte(0); + hashStream.writeBytes(Whirlpool.getHash(archive, 0, archive.length)); + byte[] hash = new byte[hashStream.getOffset()]; + hashStream.setOffset(0); + hashStream.getBytes(hash, 0, hash.length); + if (grab_server_private_exponent != null && grab_server_modulus != null) + hash = Utils.cryptRSA(hash, grab_server_private_exponent, grab_server_modulus); + stream.writeBytes(hash); + archive = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(archive, 0, archive.length); + return archive; + } + + @SuppressWarnings("unused") + public byte[] generateIndex255Archive255() { + return Constants.CLIENT_BUILD < 614 ? generateIndex255Archive255Outdated() : generateIndex255Archive255Current(null, null); + } + + /* + * old code + */ + public byte[] generateIndex255Archive255Outdated() { + OutputStream stream = new OutputStream(indexes.length * 8); + for (int index = 0; index < indexes.length; index++) { + if (indexes[index] == null) { + stream.writeInt(0); + stream.writeInt(0); + continue; + } + stream.writeInt(indexes[index].getCRC()); + stream.writeInt(indexes[index].getTable().getRevision()); + } + byte[] archive = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(archive, 0, archive.length); + return archive; + } + + public Index[] getIndexes() { + return indexes; + } + + public MainFile getIndex255() { + return index255; + } + + /* + * returns index + */ + public int addIndex(boolean named, boolean usesWhirpool, int tableCompression) throws IOException { + int id = indexes.length; + Index[] newIndexes = Arrays.copyOf(indexes, indexes.length + 1); + resetIndex(id, newIndexes, named, usesWhirpool, tableCompression); + indexes = newIndexes; + return id; + } + + public void resetIndex(int id, boolean named, boolean usesWhirpool, int tableCompression) throws FileNotFoundException, IOException { + resetIndex(id, indexes, named, usesWhirpool, tableCompression); + } + + public void resetIndex(int id, Index[] indexes, boolean named, boolean usesWhirpool, int tableCompression) throws FileNotFoundException, IOException { + OutputStream stream = new OutputStream(4); + stream.writeByte(5); + stream.writeByte((named ? 0x1 : 0) | (usesWhirpool ? 0x2 : 0)); + stream.writeShort(0); + byte[] archiveData = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(archiveData, 0, archiveData.length); + Archive archive = new Archive(id, tableCompression, -1, archiveData); + index255.putArchiveData(id, archive.compress()); + indexes[id] = new Index(index255, new MainFile(id, data, new RandomAccessFile(path + "main_file_cache.idx" + id, "rw"), newProtocol), null); + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/ArchiveValidation.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/ArchiveValidation.java new file mode 100644 index 000000000..14419efc9 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/ArchiveValidation.java @@ -0,0 +1,45 @@ +package com.alex.tools.clientCacheUpdater; + +import java.io.IOException; +import java.util.Random; + +import com.alex.store.Archive; +import com.alex.store.ArchiveReference; +import com.alex.store.Index; +import com.alex.store.Store; + +public class ArchiveValidation { + + /** + * @param args + * @throws IOException + */ + public static void main(String[] args) throws IOException { + Store rscache = new Store("498/"); + for(int i = 0; i < rscache.getIndexes().length; i++) { + if(i == 5) + continue; + Index index = rscache.getIndexes()[i]; + System.out.println("checking index: "+i); + for(int archiveId : index.getTable().getValidArchiveIds()) { + Archive archive = index.getArchive(archiveId); + if(archive == null) { + System.out.println("Missing:: "+i+", "+archiveId); + continue; + } + ArchiveReference reference = index.getTable().getArchives()[archiveId]; + if(archive.getRevision() != reference.getRevision() ) { + System.out.println("corrupted: "+i+", "+archiveId); + } + } + } + } + + public static int[] generateKeys() { + int[] keys = new int[4]; + for (int index = 0; index < keys.length; index++) + keys[index] = new Random().nextInt(); + return keys; + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CacheEditor.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CacheEditor.java new file mode 100644 index 000000000..0c56fa13d --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CacheEditor.java @@ -0,0 +1,272 @@ +package com.alex.tools.clientCacheUpdater; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import javax.imageio.ImageIO; +import javax.imageio.stream.ImageOutputStream; + +import com.alex.loaders.items.ItemDefinitions; +import com.alex.store.Index; +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class CacheEditor { + + public static byte[] getBytesFromFile(File file) throws IOException { + InputStream is = new FileInputStream(file); + + // Get the size of the file + long length = file.length(); + + // You cannot create an array using a long type. + // It needs to be an int type. + // Before converting to an int type, check + // to ensure that file is not larger than Integer.MAX_VALUE. + if (length > Integer.MAX_VALUE) { + // File is too large + } + + // Create the byte array to hold the data + byte[] bytes = new byte[(int)length]; + + // Read in the bytes + int offset = 0; + int numRead = 0; + while (offset < bytes.length + && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { + offset += numRead; + } + + // Ensure all the bytes have been read in + if (offset < bytes.length) { + throw new IOException("Could not completely read file "+file.getName()); + } + + // Close the input stream and return bytes + is.close(); + return bytes; + } + + public static int packCustomModel(Store cache, byte[] data) { + //recommended id 80000+ since rs uses all ids till 66000 + int archiveId = cache.getIndexes()[19].getLastArchiveId()+1; + if(cache.getIndexes()[19].putFile(archiveId, 0, data)) + return archiveId; + System.out.println("Failing packing model "+archiveId); + return -1; + } + + public static void packCustomItems(Store cache) throws IOException { + int modelID = packCustomModel(cache, getBytesFromFile(new File("44590.dat"))); + if (modelID == -1) { + System.err.println("Error! Model id =-1!"); + return; + } + ItemDefinitions donatorCape = ItemDefinitions.getItemDefinition(cache, 9747); + donatorCape.setName("Dragon Claws"); + donatorCape.femaleEquipModelId1 = modelID; + donatorCape.maleEquipModelId1 = modelID; + donatorCape.invModelId = modelID; + donatorCape.resetModelColors(); + packCustomItem(cache, 29999, donatorCape); + } + + public static void packCustomItem(Store cache, int id, ItemDefinitions def) { + cache.getIndexes()[19].putFile(id >>> 8, 0xff & id, def.encode()); + + } + + + + /* + * divides bg + */ + public static void divideBackgrounds() throws IOException { + BufferedImage background = ImageIO.read(new File("718/sprites/bg.jpg")); + int id = 4139; + int sx = background.getWidth() / 4; + int sy = background.getHeight() / 2; + for(int y = 0; y < 2; y++) { + for(int x = 0; x < 4; x++) { + BufferedImage part = background.getSubimage(x * sx, y * sy, sx, sy); + ImageIO.write(part, "gif", new File("718/sprites/bg/"+(id++)+".gif")); + } + } + BufferedImage load = ImageIO.read(new File("718/sprites/load.png")); + id = 3769; + sx = load.getWidth() / 2; + sy = load.getHeight() / 2; + for(int y = 0; y < 2; y++) { + for(int x = 0; x < 2; x++) { + BufferedImage part = load.getSubimage(x * sx, y * sy, sx, sy); + ImageIO.write(part, "png", new File("718/sprites/load/"+id+".png")); + ImageIO.write(part, "gif", new File("718/sprites/load/"+(id++)+".gif")); + } + } + } + + public static byte[] getImage(File file) throws IOException { + ImageOutputStream stream = ImageIO.createImageOutputStream(file); + byte[] data = new byte[(int) stream.length()]; + stream.read(data); + return data; + } + + public static void main(String[] args) throws IOException { + packCustomItems(new Store("./498/")); + /*boolean beta = false; + boolean addNewItemDefinitions = false; //only needed once + boolean divideBackgrounds = false; //only needed once + if(divideBackgrounds) + divideBackgrounds(); + Store rscache = new Store(beta ? "718/rsCacheBeta/" : "718/rscache/"); + Store cache = new Store(beta ? "718/cacheBeta/" : "718/cache/"); + boolean result; + cache.resetIndex(7, false, false, Constants.GZIP_COMPRESSION); + for(int i = 0; i < cache.getIndexes().length; i++) { + if(i != 3 //interfaces + && i != 5 //maps + && i != 12) //client scripts + { + result = cache.getIndexes()[i].packIndex(rscache, true); + System.out.println("Packed index archives: "+i+", "+result); + } + } + if(addNewItemDefinitions) { + System.out.println("Packing old item definitions..."); + Store cache667 = new Store("cache667/", false); + int currentSize = 30000;//Utils.getItemDefinitionsSize(cache); + int oldSize = Utils.getItemDefinitionsSize(cache667); + for(int i = currentSize ; i < currentSize+oldSize; i++) { + int newItemId = i; + int oldItemId = i - currentSize; + cache.getIndexes()[19].putFile(newItemId >>> 8, 0xff & newItemId, Constants.GZIP_COMPRESSION, cache667.getIndexes()[19].getFile(oldItemId >>> 8, 0xff & oldItemId), null, false, false, -1, -1); + } + result = cache.getIndexes()[19].rewriteTable(); + System.out.println("Packed old item definitions: "+result); + } + + /*System.out.println("Packing custom items..."); + packCustomItems(cache); + + System.out.println("Adding new interfaces..."); + for(int i = cache.getIndexes()[3].getLastArchiveId()+1; i <= rscache.getIndexes()[3].getLastArchiveId(); i++) { + if(i == 548 || i == 746) + continue; + if(rscache.getIndexes()[3].archiveExists(i)) + cache.getIndexes()[3].putArchive(i, rscache, false, false); + } + result = cache.getIndexes()[3].rewriteTable(); + System.out.println("Packed new interfaces: "+result);*/ + + //System.out.println("Adding custom sprites..."); + + //adds icons + //IndexedColorImageFile iconsFile = new IndexedColorImageFile(cache, 1455, 0); + //BufferedImage icon = ImageIO.read(new File("1455.png")); + //System.out.println("Added icon: "+iconsFile.addImage(icon)+"."); + //BufferedImage icon2 = ImageIO.read(new File("1455f.png")); + //System.out.println("Added icon2: "+iconsFile.addImage(icon2)+"."); + //BufferedImage icon3 = ImageIO.read(new File("crown_green.gif")); + //System.out.println("Added icon3: "+iconsFile.addImage(icon3)+"."); + //BufferedImage icon4 = ImageIO.read(new File("1455_11.png")); + //System.out.println("Added icon4: "+iconsFile.addImage(icon4)+"."); + //result = cache.getIndexes()[8].putFile(1455, 0, Constants.GZIP_COMPRESSION, iconsFile.encodeFile(), null, false, false, -1, -1); + //System.out.println("Added icons: "+result); + + //result = cache.getIndexes()[8].putFile(2173, 0, Constants.GZIP_COMPRESSION, + //new IndexedColorImageFile(ImageIO.read(new File("2173.png"))).encodeFile() + //, null, false, false, -1, -1); + //System.out.println("Added matrix flag: "+result); + + //result = cache.getIndexes()[8].putFile(2498, 0, Constants.GZIP_COMPRESSION, + //new IndexedColorImageFile(ImageIO.read(new File("718/sprites/logo.png"))).encodeFile() + //, null, false, false, -1, -1); + //System.out.println("Added matrix logo: "+result); + + //Login Background + /* + for(int i = 4139; i <= 4146; i++) { + result = cache.getIndexes()[8].putFile(i, 0, Constants.GZIP_COMPRESSION, + new IndexedColorImageFile(ImageIO.read(new File("718/sprites/bg/"+i+".png"))).encodeFile() + , null, false, false, -1, -1); + } + System.out.println("Added noregret background: "+result); + *s + //Loading Background + for(int i = 0; i < 4; i++) { + int realid = 3769 + i; + byte[] sprite = new IndexedColorImageFile(ImageIO.read(new File("718/sprites/load/"+realid+".gif"))).encodeFile(); + byte[] image = getImage(new File("718/sprites/load/"+realid+".png")); + + int[] ids = new int[] {3769 + i + , 3779 + i + , 3783 + (i >= 2 ? (i-2) : i + 2) + , 8494 + (i >= 2 ? (i-2) : i + 2) + , 8498 + (i >= 2 ? (i-2) : i + 2)}; + for(int id : ids) { + result = cache.getIndexes()[8].putFile(id, 0, Constants.GZIP_COMPRESSION, sprite, null, false, false, -1, -1); + result = cache.getIndexes()[32].putFile(id, 0, Constants.GZIP_COMPRESSION, image, null, false, false, -1, -1); + result = cache.getIndexes()[34].putFile(id, 0, Constants.GZIP_COMPRESSION, image, null, false, false, -1, -1); + } + } + //System.out.println("Added Loading background: "+result); + + result = cache.getIndexes()[8].rewriteTable(); + result = cache.getIndexes()[32].rewriteTable(); + result = cache.getIndexes()[34].rewriteTable(); + System.out.println("Added custom sprites: "+result); + + /*RSXteas.loadUnpackedXteas(); + System.out.println("Updating Maps."); + for(int regionId = 0; regionId < 30000; regionId++) { + int regionX = (regionId >> 8) * 64; + int regionY = (regionId & 0xff) * 64; + String name = "m"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + byte[] data = rscache.getIndexes()[5].getFile(rscache.getIndexes()[5].getArchiveId(name)); + if(data != null) { + result = addMapFile(cache.getIndexes()[5], name, data); + System.out.println(name+", "+result); + } + name = "um"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rscache.getIndexes()[5].getFile(rscache.getIndexes()[5].getArchiveId(name)); + if(data != null) { + result = addMapFile(cache.getIndexes()[5], name, data); + System.out.println(name+", "+result); + } + int[] xteas = RSXteas.getXteas(regionId); + name = "l"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rscache.getIndexes()[5].getFile(rscache.getIndexes()[5].getArchiveId(name), 0, xteas); + if(data != null) { + result = addMapFile(cache.getIndexes()[5], name, data); + System.out.println(name+", "+result); + } + name = "ul"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rscache.getIndexes()[5].getFile(rscache.getIndexes()[5].getArchiveId(name), 0, xteas); + if(data != null) { + result = addMapFile(cache.getIndexes()[5], name, data); + System.out.println(name+", "+result); + } + name = "n"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rscache.getIndexes()[5].getFile(rscache.getIndexes()[5].getArchiveId(name), 0); + if(data != null) { + result = addMapFile(cache.getIndexes()[5], name, data); + System.out.println(name+", "+result); + } + } + result = cache.getIndexes()[5].rewriteTable(); + System.out.println("Updated maps: "+result);*/ + } + + public static boolean addMapFile(Index index, String name, byte[] data) { + int archiveId = index.getArchiveId(name); + if(archiveId == -1) + archiveId = index.getTable().getValidArchiveIds().length; + return index.putFile(archiveId, 0, Constants.GZIP_COMPRESSION, data, null, false, false, Utils.getNameHash(name), -1); + } +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CacheEditormodels.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CacheEditormodels.java new file mode 100644 index 000000000..09ccec131 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CacheEditormodels.java @@ -0,0 +1,162 @@ +package com.alex.tools.clientCacheUpdater; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import javax.imageio.ImageIO; +import javax.imageio.stream.ImageOutputStream; + +import com.alex.loaders.images.IndexedColorImageFile; +import com.alex.loaders.items.ItemDefinitions; +import com.alex.store.Index; +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class CacheEditormodels { + + public static byte[] getBytesFromFile(File file) throws IOException { + InputStream is = new FileInputStream(file); + + // Get the size of the file + long length = file.length(); + + // You cannot create an array using a long type. + // It needs to be an int type. + // Before converting to an int type, check + // to ensure that file is not larger than Integer.MAX_VALUE. + if (length > Integer.MAX_VALUE) { + // File is too large + } + + // Create the byte array to hold the data + byte[] bytes = new byte[(int)length]; + + // Read in the bytes + int offset = 0; + int numRead = 0; + while (offset < bytes.length + && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { + offset += numRead; + } + + // Ensure all the bytes have been read in + if (offset < bytes.length) { + throw new IOException("Could not completely read file "+file.getName()); + } + + // Close the input stream and return bytes + is.close(); + return bytes; + } + + public static int packCustomModel(Store cache, byte[] data) { + //recommended id 80000+ since rs uses all ids till 66000 + int archiveId = cache.getIndexes()[7].getLastArchiveId()+1; + if(cache.getIndexes()[7].putFile(archiveId, 0, data)) + return archiveId; + System.out.println("Failing packing model "+archiveId); + return -1; + } + + public static void packCustomItems(Store cache) throws IOException { + int modelID = packCustomModel(cache, getBytesFromFile(new File("pkcapefinalb.dat"))); + ItemDefinitions pkCape = ItemDefinitions.getItemDefinition(cache, 9747); + pkCape.setName("PK Cape"); + //donatorCape.getInventoryOptions()[2] = "Customise"; + pkCape.femaleEquipModelId1 = modelID; + pkCape.maleEquipModelId1 = modelID; + pkCape.invModelId = modelID; + pkCape.resetModelColors(); + packCustomItem(cache, 30000, pkCape); + + /*int wearModelID = packCustomModel(cache, getBytesFromFile(new File("718/lightSaber/wear.dat"))); + int invModelID = packCustomModel(cache, getBytesFromFile(new File("718/lightSaber/inv.dat"))); + ItemDefinitions lightSaber = ItemDefinitions.getItemDefinition(cache, 2402); + lightSaber.setName("Light Saber"); + lightSaber.getInventoryOptions()[2] = "Customise"; + lightSaber.femaleEquipModelId1 = wearModelID; + lightSaber.maleEquipModelId1 = wearModelID; + lightSaber.invModelId = invModelID; + lightSaber.resetModelColors(); + packCustomItem(cache, 29998, lightSaber);*/ + } + + public static void packCustomItem(Store cache, int id, ItemDefinitions def) { + cache.getIndexes()[19].putFile(id >>> 8, 0xff & id, def.encode()); + + } + + + + /* + * divides bg + */ + public static void divideBackgrounds() throws IOException { + BufferedImage background = ImageIO.read(new File("718/sprites/bg.jpg")); + int id = 4139; + int sx = background.getWidth() / 4; + int sy = background.getHeight() / 2; + for(int y = 0; y < 2; y++) { + for(int x = 0; x < 4; x++) { + BufferedImage part = background.getSubimage(x * sx, y * sy, sx, sy); + ImageIO.write(part, "gif", new File("718/sprites/bg/"+(id++)+".gif")); + } + } + BufferedImage load = ImageIO.read(new File("718/sprites/load.png")); + id = 3769; + sx = load.getWidth() / 2; + sy = load.getHeight() / 2; + for(int y = 0; y < 2; y++) { + for(int x = 0; x < 2; x++) { + BufferedImage part = load.getSubimage(x * sx, y * sy, sx, sy); + ImageIO.write(part, "png", new File("718/sprites/load/"+id+".png")); + ImageIO.write(part, "gif", new File("718/sprites/load/"+(id++)+".gif")); + } + } + } + + public static byte[] getImage(File file) throws IOException { + ImageOutputStream stream = ImageIO.createImageOutputStream(file); + byte[] data = new byte[(int) stream.length()]; + stream.read(data); + return data; + } + + public static void main(String[] args) throws IOException { + boolean beta = false; + boolean addNewItemDefinitions = false; //only needed once + boolean divideBackgrounds = false; //only needed once + if(divideBackgrounds) + divideBackgrounds(); + Store rscache = new Store(beta ? "718/rsCacheBeta/" : "718/rscache/"); + Store cache = new Store(beta ? "718/cacheBeta/" : "718/cache/"); + boolean result; + cache.resetIndex(7, false, false, Constants.GZIP_COMPRESSION); + for(int i = 0; i < cache.getIndexes().length; i++) { + if(i != 3 //interfaces + && i != 5 //maps + && i != 12) //client scripts + { + result = cache.getIndexes()[i].packIndex(rscache, true); + System.out.println("Packed index archives: "+i+", "+result); + } + } + if(addNewItemDefinitions) { + System.out.println("Packing old item definitions..."); + Store cache667 = new Store("cache667/", false); + int currentSize = 30000;//Utils.getItemDefinitionsSize(cache); + int oldSize = Utils.getItemDefinitionsSize(cache667); + for(int i = currentSize ; i < currentSize+oldSize; i++) { + int newItemId = i; + int oldItemId = i - currentSize; + cache.getIndexes()[19].putFile(newItemId >>> 8, 0xff & newItemId, Constants.GZIP_COMPRESSION, cache667.getIndexes()[19].getFile(oldItemId >>> 8, 0xff & oldItemId), null, false, false, -1, -1); + } + result = cache.getIndexes()[19].rewriteTable(); + System.out.println("Packed old item definitions: "+result); + } + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CheckMap.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CheckMap.java new file mode 100644 index 000000000..535974e99 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CheckMap.java @@ -0,0 +1,114 @@ +package com.alex.tools.clientCacheUpdater; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Arrays; +import java.util.Random; + +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class CheckMap { + + /** + * @param args + * @throws IOException + */ + public static void main(String[] args) throws IOException { + /* OriginalXteas.init(); + int count = 1; + Store cache = new Store("cache667_2/", false, CACHE_TABLE_KEYS); + Store mapsFrom = new Store("newCache/", false); + for(int regionId = 0; regionId < 30000; regionId++) { + int regionX = (regionId >> 8) * 64; + int regionY = (regionId & 0xff) * 64; + String name = "l" + + ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + int archiveId = cache.getIndexes()[5].getArchiveId(name); + if(archiveId != -1) + continue; + int archiveId2 = mapsFrom.getIndexes()[5].getArchiveId(name); + if(archiveId2 == -1) + continue; + boolean pass = passArchive(regionId, mapsFrom, cache, name, 5, null, OriginalXteas.getXteas(regionId)); + if(pass) { + System.out.println("count: "+(count++)+", region: "+regionId); + } + //else + + } + cache.getIndexes()[5].rewriteTable(); + cache.getIndexes()[5].resetCachedFiles();*/ + + Store cache = new Store("cache667_2/", false, null); + double land = 0; + double map = 0; + for(int regionId = 0; regionId < 30000; regionId++) { + int regionX = (regionId >> 8) * 64; + int regionY = (regionId & 0xff) * 64; + String name1 = "l" + + ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + String name2 = "m" + + ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + if(cache.getIndexes()[5].getArchiveId(name1) != -1) + land ++; + if(cache.getIndexes()[5].getArchiveId(name2) != -1) + map ++; + } + System.out.println("land: "+land+", newMaps: "+map); + double perc = land * 100 / map ; + System.out.println( perc + "% complete!"); + } + + + public static boolean passArchive(int regionId, Store store1, Store store2, String nameHash, int i, int[] keys1, int[] keys2) { + if(keys2 != null) + System.out.println(keys2); + int archiveId = store1.getIndexes()[i].getArchiveId(nameHash); + if(archiveId == -1) + return false; + int oldArchiveId = store2.getIndexes()[i].getArchiveId(nameHash); + if(oldArchiveId == -1) + oldArchiveId = store2.getIndexes()[i].getLastArchiveId()+1; + byte[] data = store1.getIndexes()[i].getFile(archiveId, 0, keys1); + if(data == null) + return false; + try { + boolean pass = store2.getIndexes()[i].putFile(oldArchiveId, 0, Constants.GZIP_COMPRESSION, data, keys2, false, false, Utils.getNameHash(nameHash), -1); + if(!pass) + return false; + int[] keys = writeKeys(regionId); + return store2.getIndexes()[i].encryptArchive(oldArchiveId, keys2, keys, false, false); + }catch(Error e) { + return false; + }catch(Exception e) { + e.printStackTrace(); + return false; + } + } + + + public static int[] generateKeys() { + int[] keys = new int[4]; + for (int index = 0; index < keys.length; index++) + keys[index] = new Random().nextInt(); + return keys; + + } + + public static int[] writeKeys(int regionId) throws IOException { + BufferedWriter writer = new BufferedWriter(new FileWriter("cache667_protected/keys/"+regionId+".txt")); + int[] keys = generateKeys(); + for (int index = 0; index < keys.length; index++) { + writer.write("" + keys[index]); + writer.newLine(); + writer.flush(); + } + System.out.println("Region: "+regionId+", "+Arrays.toString(keys)); + return keys; + + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CopyCache.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CopyCache.java new file mode 100644 index 000000000..ed964295a --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/CopyCache.java @@ -0,0 +1,27 @@ +package com.alex.tools.clientCacheUpdater; + +import java.io.IOException; + +import com.alex.store.Index; +import com.alex.store.Store; +import com.alex.utils.Constants; + +public class CopyCache { + + /** + * @param args + * @throws IOException + */ + public static void main(String[] args) throws IOException { + Store cache = new Store("./498/"); + Store newCache = new Store("./498_out/"); + for(int i = 0; i < cache.getIndexes().length; i++) { + Index index = cache.getIndexes()[i]; + newCache.addIndex(index.getTable().isNamed(), index.getTable().usesWhirpool(), Constants.GZIP_COMPRESSION); + newCache.getIndexes()[i].packIndex(cache); + newCache.getIndexes()[i].getTable().setRevision(cache.getIndexes()[i].getTable().getRevision()); + newCache.getIndexes()[i].rewriteTable(); + } + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/OriginalXteas.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/OriginalXteas.java new file mode 100644 index 000000000..855aa2bea --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/OriginalXteas.java @@ -0,0 +1,58 @@ +package com.alex.tools.clientCacheUpdater; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.io.IOException; +import java.util.HashMap; + +public final class OriginalXteas { + + public final static HashMap mapContainersXteas = new HashMap(); + + + public static final int[] getXteas(int regionId) { + return mapContainersXteas.get(regionId); + } + public static void init() { + loadUnpackedXteas(); + } + + + public static final void delete() { + + } + + public static final void loadUnpackedXteas() { + try { + File unpacked = new File("cache667_protected/keys"); + File[] xteasFiles = unpacked.listFiles(); + for (File region : xteasFiles) { + String name = region.getName(); + if (!name.contains(".txt")) { + region.delete(); + continue; + } + int regionId = Short.parseShort(name.replace(".txt", "")); + if(regionId <= 0) { + region.delete(); + continue; + } + BufferedReader in = new BufferedReader(new FileReader(region)); + final int[] xteas = new int[4]; + for (int index = 0; index < 4; index++) { + xteas[index] = Integer.parseInt(in.readLine()); + } + mapContainersXteas.put(regionId, xteas); + in.close(); + } + } catch (IOException e) { + e.printStackTrace(); + } + } + + private OriginalXteas() { + + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/ProtectCache.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/ProtectCache.java new file mode 100644 index 000000000..11aad3d35 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/ProtectCache.java @@ -0,0 +1,92 @@ +package com.alex.tools.clientCacheUpdater; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.io.IOException; +import java.util.Arrays; +import java.util.Random; + +import com.alex.store.Index; +import com.alex.store.Store; + +public class ProtectCache { + + + + public static void main(String[] args) throws IOException { + boolean encryptMaps = true; + boolean encryptTables = false; + Store cache = new Store("718/cacheEncrypted/"); + + + if(encryptMaps) { + Store rscache = new Store("718/rscache/"); + Index index = cache.getIndexes()[5]; + Index rsIndex = rscache.getIndexes()[5]; + for(int regionId = 0; regionId < 25000; regionId++) { + int regionX = (regionId >> 8) * 64; + int regionY = (regionId & 0xff) * 64; + + String name; + int[] keys = null; + name = "l"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + if(rsIndex.getFile(rsIndex.getArchiveId(name), 0) == null) {//not backgground file + int archiveId = index.getArchiveId(name); + if(archiveId != -1) { + keys = writeKeys(regionId); + if(!index.encryptArchive(archiveId, null, keys, false, false)) + throw new RuntimeException("FAIL"); + } + } + name = "ul"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + if(rsIndex.getFile(rsIndex.getArchiveId(name), 0) == null) {//not backgground file + int archiveId = index.getArchiveId(name); + if(archiveId != -1) { + if(keys == null) + keys = writeKeys(regionId); + if(!index.encryptArchive(archiveId, null, keys, false, false)) + throw new RuntimeException("FAIL"); + } + } + } + index.rewriteTable(); + } + + if(encryptTables) { + int[][] keys = new int[cache.getIndexes().length][]; + for(int i = 0; i < keys.length; i++) { + keys[i] = generateKeys(); + if(cache.getIndexes()[i] == null) + continue; + System.out.println("encrypting idx table: "+i); + cache.getIndexes()[i].setKeys(keys[i]); + cache.getIndexes()[i].rewriteTable(); + } + for(int i = 0; i < keys.length; i++) + System.out.println(Arrays.toString(keys[i])); + } + + } + + + public static int[] generateKeys() { + int[] keys = new int[4]; + for (int index = 0; index < keys.length; index++) + keys[index] = new Random().nextInt(); + return keys; + } + + public static int[] writeKeys(int regionId) throws IOException { + BufferedWriter writer = new BufferedWriter(new FileWriter("718/maps/unpacked/"+regionId+".txt")); + int[] keys = generateKeys(); + for (int index = 0; index < keys.length; index++) { + writer.write("" + keys[index]); + writer.newLine(); + writer.flush(); + } + System.out.println("Region: "+regionId+", "+Arrays.toString(keys)); + return keys; + + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/RSXteas.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/RSXteas.java new file mode 100644 index 000000000..cfbf17080 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/RSXteas.java @@ -0,0 +1,67 @@ +package com.alex.tools.clientCacheUpdater; + +import java.io.BufferedReader; +import java.io.File; +import java.io.FileReader; +import java.util.HashMap; + +public final class RSXteas { + + public final static HashMap mapContainersXteas = new HashMap(); + + + public static final int[] getXteas(int regionId) { + return mapContainersXteas.get(regionId); + } + public static void init() { + loadUnpackedXteas(468); + } + + + + public static final void loadUnpackedXteas(int revision) { + try { + File unpacked = new File("xteas" + revision + "/"); + File[] xteasFiles = unpacked.listFiles(); + for (File region : xteasFiles) { + String name = region.getName(); + if (!name.contains(".txt")) { + region.delete(); + continue; + } + int regionId = -1; + try { + regionId = Short.parseShort(name.replace(".txt", "")); + } catch (Throwable t) { + continue; + } + if (regionId <= 0) { + region.delete(); + continue; + } + BufferedReader in = new BufferedReader(new FileReader(region)); + final int[] xteas = new int[4]; + boolean delete = true; + for (int index = 0; index < 4; index++) { + xteas[index] = Integer.parseInt(in.readLine()); + if (xteas[index] != 0) { + delete = false; + } + } + in.close(); + if (delete) { + region.delete(); + continue; + } + mapContainersXteas.put(regionId, xteas); + } + } catch (Exception e) { + e.printStackTrace(); + } + } + + private RSXteas() { + + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/SpritesDumper.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/SpritesDumper.java new file mode 100644 index 000000000..3104b467e --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/SpritesDumper.java @@ -0,0 +1,249 @@ +package com.alex.tools.clientCacheUpdater; + +import java.awt.Graphics; +import java.awt.GraphicsConfiguration; +import java.awt.GraphicsDevice; +import java.awt.GraphicsEnvironment; +import java.awt.HeadlessException; +import java.awt.Image; +import java.awt.Toolkit; +import java.awt.Transparency; +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; + +import javax.imageio.ImageIO; +import javax.imageio.stream.ImageOutputStream; +import javax.swing.ImageIcon; + +import com.alex.store.Index; +import com.alex.store.Store; + +public class SpritesDumper { + + + + /*public static void main(String[] args) throws IOException { + Store cache = new Store("cache667_2/", false); + ImagesFile file = new ImagesFile(cache, 498, 0); + file.replaceImage(ImageIO.read(new File("498_0_0.png")), 0); + cache.getIndexes()[8].putFile(2498, 0, file.encodeFile()); + file = new ImagesFile(cache, 2498, 0); + for(int count = 0; count < file.getImages().length; count++) { + String name = ""+498+"_2_"+0+"_"+count; + BufferedImage image = file.getImages()[count]; + if(image == null) { + System.out.println("NULL: "+name); + continue; + } + ImageIO.write(image, "png", new File(name+".png")); + System.out.println(name); + } + }*/ + + /*private static BufferedImage internalResize(BufferedImage source, int destWidth, int destHeight) { + int sourceWidth = source.getWidth(); + int sourceHeight = source.getHeight(); + double xScale = ((double) destWidth) / (double) sourceWidth; + double yScale = ((double) destHeight) / (double) sourceHeight; + Graphics2D g2d = null; + + BufferedImage resizedImage = new BufferedImage(destWidth, destHeight, BufferedImage.TRANSLUCENT); + + try { + + g2d = resizedImage.createGraphics(); + + g2d.setRenderingHint(RenderingHints.KEY_ALPHA_INTERPOLATION, RenderingHints.VALUE_ALPHA_INTERPOLATION_QUALITY); + g2d.setRenderingHint(RenderingHints.KEY_ANTIALIASING, RenderingHints.VALUE_ANTIALIAS_ON); + g2d.setRenderingHint(RenderingHints.KEY_COLOR_RENDERING, RenderingHints.VALUE_COLOR_RENDER_QUALITY); + g2d.setRenderingHint(RenderingHints.KEY_DITHERING, RenderingHints.VALUE_DITHER_ENABLE); + g2d.setRenderingHint(RenderingHints.KEY_INTERPOLATION, RenderingHints.VALUE_INTERPOLATION_BICUBIC); + g2d.setRenderingHint(RenderingHints.KEY_RENDERING, RenderingHints.VALUE_RENDER_QUALITY); + + AffineTransform at = AffineTransform.getScaleInstance(xScale, yScale); + + g2d.drawRenderedImage(source, at); + + } finally { + if (g2d != null) + g2d.dispose(); + } + + //doesn't keep the transparency + if (source.getType() == BufferedImage.TYPE_BYTE_INDEXED) { + + BufferedImage indexedImage = new BufferedImage(destWidth, destHeight, BufferedImage.TYPE_BYTE_INDEXED); + + try { + Graphics g = indexedImage.createGraphics(); + g.drawImage(resizedImage, 0, 0, null); + } finally { + if (g != null) + g.dispose(); + } + + return indexedImage; + } + + return resizedImage; + + }*/ + + + /* + * divides backgorund + */ + public static void main2(String[] args) throws IOException { + BufferedImage background = ImageIO.read(new File("bg/matrix.jpg")); + int id = 3769; + + int sx = background.getWidth() / 2; + int sy = background.getHeight() / 2; + + for(int y = 0; y < 2; y++) { + for(int x = 0; x < 2; x++) { + System.out.println("id "+id); + BufferedImage part = background.getSubimage(x * sx, y * sy, sx, sy); + ImageIO.write(part, "gif", new File("bg/"+(id++)+".gif")); + + } + } + + } + + public static void main3(String[] args) throws IOException { + Store cache = new Store("cache667_2/", false); + UpdateCache.packLogo(cache); + System.out.println("Adding donator icon..."); + UpdateCache.packDonatorIcon(cache); + System.out.println("Adding Matrix icon..."); + UpdateCache.packMatrixIcon(cache); + /*for(int i = 0; i < 4; i++) { + int realid = 3769 + i; + int id = 3769 + i; + cache.getIndexes()[8].putFile(id, 0, new ImagesFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3779 + i; + cache.getIndexes()[8].putFile(id, 0, new ImagesFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3783 + (i >= 2 ? (i-2) : i + 2); + cache.getIndexes()[8].putFile(id, 0, new ImagesFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3769 + i; + cache.getIndexes()[34].putFile(id, 0, new ImagesFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3779 + i; + cache.getIndexes()[34].putFile(id, 0, new ImagesFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3783 + (i >= 2 ? (i-2) : i + 2); + cache.getIndexes()[34].putFile(id, 0, new ImagesFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3769 + i; + cache.getIndexes()[32].putFile(id, 0, getImage(new File("bg/"+realid+".png"))); + id = 3779 + i; + cache.getIndexes()[32].putFile(id, 0, getImage(new File("bg/"+realid+".png"))); + id = 3783 + (i >= 2 ? (i-2) : i + 2); + cache.getIndexes()[32].putFile(id, 0, getImage(new File("bg/"+realid+".png")));; + + + System.out.println("added file: "+i); + }*/ + } + + + public static byte[] getImage(File file) throws IOException { + ImageOutputStream stream = ImageIO.createImageOutputStream(file); + byte[] data = new byte[(int) stream.length()]; + stream.read(data); + return data; + } + public static void main(String[] args) throws IOException { + Store cache = new Store("718/rscache/"); + Index sprites = cache.getIndexes()[32]; + for(int archiveId : sprites.getTable().getValidArchiveIds()) { + for(int fileId : sprites.getTable().getArchives()[archiveId].getValidFileIds()) { + byte[] data = sprites.getFile(archiveId, fileId); + Image image = Toolkit.getDefaultToolkit().createImage(data); + String name = "sprites32/"+archiveId+"_"+fileId; + BufferedImage bi = toBufferedImage(image); + if(bi == null) { + System.out.println("failed "+name); + continue; + } + ImageIO.write(bi, "png", new File(name+".png")); + } + } + } + + // This method returns a buffered image with the contents of an image + public static BufferedImage toBufferedImage(Image image) { + if (image instanceof BufferedImage) { + return (BufferedImage)image; + } + + // This code ensures that all the pixels in the image are loaded + image = new ImageIcon(image).getImage(); + + // Determine if the image has transparent pixels; for this method's + // implementation, see Determining If an Image Has Transparent Pixels + boolean hasAlpha = true;//hasAlpha(image); + + // Create a buffered image with a format that's compatible with the screen + BufferedImage bimage = null; + GraphicsEnvironment ge = GraphicsEnvironment.getLocalGraphicsEnvironment(); + try { + // Determine the type of transparency of the new buffered image + int transparency = Transparency.OPAQUE; + if (hasAlpha) { + transparency = Transparency.BITMASK; + } + + // Create the buffered image + GraphicsDevice gs = ge.getDefaultScreenDevice(); + GraphicsConfiguration gc = gs.getDefaultConfiguration(); + if(image.getWidth(null) < 0 || image.getHeight(null) < 0) + return null; + bimage = gc.createCompatibleImage( + image.getWidth(null), image.getHeight(null), transparency); + } catch (HeadlessException e) { + // The system does not have a screen + } + + if (bimage == null) { + // Create a buffered image using the default color model + int type = BufferedImage.TYPE_INT_RGB; + if (hasAlpha) { + type = BufferedImage.TYPE_INT_ARGB; + } + bimage = new BufferedImage(image.getWidth(null), image.getHeight(null), type); + } + + // Copy image to buffered image + Graphics g = bimage.createGraphics(); + + // Paint the image onto the buffered image + g.drawImage(image, 0, 0, null); + g.dispose(); + + return bimage; + } + + + /*public static void main(String[] args) throws IOException { + Store cache = new Store("cache667_2/", false); + Index sprites = cache.getIndexes()[34]; + for(int archiveId : sprites.getTable().getValidArchiveIds()) { + for(int fileId : sprites.getTable().getArchives()[archiveId].getValidFileIds()) { + ImagesFile file = new ImagesFile(cache, 34, archiveId, fileId); + /*if(file.getImages() == null) + continue;*/ + /* for(int count = 0; count < file.getImages().length; count++) { + String name = "sprites34/"+archiveId+"_"+fileId+"_"+count; + BufferedImage image = file.getImages()[count]; + if(image == null) { + System.out.println("NULL: "+name); + continue; + } + ImageIO.write(image, "png", new File(name+".png")); + System.out.println(name); + } + } + } + }*/ + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/UpdateCache.java b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/UpdateCache.java new file mode 100644 index 000000000..6419dbf40 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/clientCacheUpdater/UpdateCache.java @@ -0,0 +1,390 @@ +package com.alex.tools.clientCacheUpdater; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import javax.imageio.ImageIO; + +import com.alex.loaders.images.IndexedColorImageFile; +import com.alex.loaders.items.ItemDefinitions; +import com.alex.store.Index; +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class UpdateCache { + + +/* public static void main(String[] args) throws IOException { + Store rscache = new Store("cache697/"); + Store cache = new Store("cache667_2/", false); + // System.out.println(rscache.getIndexes()[36].getTable().getValidArchiveIds().length); + for(int i = 0; i < rscache.getIndexes()[3].getLastArchiveId(); i++) { + if(i == 548 || i == 746) + continue; + cache.getIndexes()[3].putArchive(i, rscache, false, false); + } + cache.getIndexes()[3].rewriteTable(); + //Interface inter = new Interface(746, rscache); + }*/ + + // Returns the contents of the file in a byte array. + public static byte[] getBytesFromFile(File file) throws IOException { + InputStream is = new FileInputStream(file); + + // Get the size of the file + long length = file.length(); + + // You cannot create an array using a long type. + // It needs to be an int type. + // Before converting to an int type, check + // to ensure that file is not larger than Integer.MAX_VALUE. + if (length > Integer.MAX_VALUE) { + // File is too large + } + + // Create the byte array to hold the data + byte[] bytes = new byte[(int)length]; + + // Read in the bytes + int offset = 0; + int numRead = 0; + while (offset < bytes.length + && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { + offset += numRead; + } + + // Ensure all the bytes have been read in + if (offset < bytes.length) { + throw new IOException("Could not completely read file "+file.getName()); + } + + // Close the input stream and return bytes + is.close(); + return bytes; + } + + + public static void main6(String[] args) throws IOException { + Store cache = new Store("cache667_2/", false); + cache.getIndexes()[6].putFile(0, 0, getBytesFromFile(new File("0"))); + } + + + public static void main5(String[] args) throws IOException { + Store rscache = new Store("cache697/"); + Store cache = new Store("cache667_2/", false); + boolean result = false; + //settings + /*cache.getIndexes()[3].putArchive(261, rscache); + System.out.println("Packed skill interface: 261, "+result);*/ + //skills + result = cache.getIndexes()[3].putArchive(320, rscache, false, false); + System.out.println("Packed skill interface: 320, "+result); + /* //equipment + result = cache.getIndexes()[3].putArchive(387, rscache, false, false); + System.out.println("Packed skill interface: 387, "+result);*/ + //inventory + result = cache.getIndexes()[3].putArchive(679, rscache, false, false); + System.out.println("Packed skill interface: 679, "+result); + //attack style bar + // result = cache.getIndexes()[3].putArchive(884, rscache); + // System.out.println("Packed skill interface: 884, "+result); + cache.getIndexes()[3].rewriteTable(); + } + +/* public static void main(String[] args) throws IOException { + Store rscache = new Store("cache697/"); + Store cache = new Store("cache667_2/", false); + cache.getIndexes()[17].packIndex(rscache); + }*/ + + + public static void main555(String[] args) throws IOException { + Store cache = new Store("cache667_2/", false); + Store originalCache = new Store("rscache/", false); + cache.addIndex(false, false, Constants.GZIP_COMPRESSION); + for(int i : originalCache.getIndexes()[19].getTable().getValidArchiveIds()) { + System.out.println(i); + for(int i2 : originalCache.getIndexes()[19].getTable().getArchives()[i].getValidFileIds()) { + try { + cache.getIndexes()[37].putFile(i, i2, Constants.GZIP_COMPRESSION, originalCache.getIndexes()[19].getFile(i, i2), null, false, false, -1, -1); + }catch(Throwable e) { + e.printStackTrace(); + } + } + } + cache.getIndexes()[37].rewriteTable(); + //cache.getIndexes()[37].packIndex(19, originalCache, false); + /* System.out.println(ItemDefinitions.getItemDefinition(cache, 4708).maleEquipModelId1); + System.out.println(ItemDefinitions.getItemDefinition(cache, 4708).femaleEquipModelId1); + System.out.println(ItemDefinitions.getItemDefinition(cache, 4708).invModelId);*/ + // Store originalCache = new Store("cache667/", false); + // cache.addIndex(cache.getIndexes()[7].getTable().isNamed(), cache.getIndexes()[7].getTable().usesWhirpool(), Constants.GZIP_COMPRESSION); + + } + + public static void main77(String[] args) throws IOException { + //Store mapcache = new Store("cache667_1/", false); + Store originalCache = new Store("cache667/", false); + Store cache = new Store("cache667_2/", false); + for(int i = 1610; i < 1616; i++) + cache.getIndexes()[17].putFile(i >>> 8, i & 0xff, originalCache.getIndexes()[17].getFile(i >>> 8, i & 0xff)); + + /* cache.getIndexes()[3].putArchive(320, rscache, false, false); + cache.getIndexes()[3].putArchive(667, rscache, false, false); + cache.getIndexes()[3].putArchive(751, rscache, false, false); + cache.getIndexes()[3].rewriteTable();*/ + + /*cache.resetIndex(5, true, mapcache.getIndexes()[5].getTable().usesWhirpool(), Constants.GZIP_COMPRESSION); + + boolean result = cache.getIndexes()[5].packIndex(mapcache, false);*/ + // cache.getIndexes()[8].packIndex(originalCache); + // System.out.println("Packed index archives: "+5+", "+result); + } + + public static void packLogo(Store cache) throws IOException { + int id = 2498; + IndexedColorImageFile f = null; + try { + f = new IndexedColorImageFile(ImageIO.read(new File("bg/logo.png"))); + } catch (IOException e) { + e.printStackTrace(); + } + byte[] data = f.encodeFile(); + cache.getIndexes()[8].putFile(id, 0, data); + + //back background + for(int i = 4139; i <= 4146; i++) { + try { + cache.getIndexes()[8].putFile(i, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+i+".gif"))).encodeFile()); + } catch (IOException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + } + + for(int i = 0; i < 4; i++) { + int realid = 3769 + i; + id = 3769 + i; + cache.getIndexes()[8].putFile(id, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3779 + i; + cache.getIndexes()[8].putFile(id, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3783 + (i >= 2 ? (i-2) : i + 2); + cache.getIndexes()[8].putFile(id, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3769 + i; + cache.getIndexes()[34].putFile(id, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3779 + i; + cache.getIndexes()[34].putFile(id, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3783 + (i >= 2 ? (i-2) : i + 2); + cache.getIndexes()[34].putFile(id, 0, new IndexedColorImageFile(ImageIO.read(new File("bg/"+realid+".gif"))).encodeFile()); + id = 3769 + i; + cache.getIndexes()[32].putFile(id, 0, SpritesDumper.getImage(new File("bg/"+realid+".png"))); + id = 3779 + i; + cache.getIndexes()[32].putFile(id, 0, SpritesDumper.getImage(new File("bg/"+realid+".png"))); + id = 3783 + (i >= 2 ? (i-2) : i + 2); + cache.getIndexes()[32].putFile(id, 0, SpritesDumper.getImage(new File("bg/"+realid+".png")));; + + + System.out.println("added file: "+i); + } + } + + public static void packDonatorIcon(Store cache) { + int id = 1455; + IndexedColorImageFile f = null; + try { + f = new IndexedColorImageFile(cache, id, 0); + BufferedImage icon = ImageIO.read(new File("1455.png")); + System.out.println("Added icon: "+f.addImage(icon)+"."); + BufferedImage icon2 = ImageIO.read(new File("1455f.png")); + System.out.println("Added icon2: "+f.addImage(icon2)+"."); + BufferedImage icon3 = ImageIO.read(new File("crown_green.gif")); + System.out.println("Added icon3: "+f.addImage(icon3)+"."); + BufferedImage icon4 = ImageIO.read(new File("1455_11.png")); + System.out.println("Added icon4: "+f.addImage(icon4)+"."); + + } catch (IOException e) { + e.printStackTrace(); + } + cache.getIndexes()[8].putFile(id, 0, f.encodeFile()); + } + + public static void packMatrixIcon(Store cache) { + int id = 2173; + IndexedColorImageFile f = null; + try { + f = new IndexedColorImageFile(ImageIO.read(new File("2173.png"))); + } catch (IOException e) { + e.printStackTrace(); + } + byte[] data = f.encodeFile(); + cache.getIndexes()[8].putFile(id, 0, data); + } + + + public static int packCustomModel(Store cache, byte[] data) { + //recommended id 80000+ since rs uses all ids till 66000 + int archiveId = cache.getIndexes()[7].getLastArchiveId()+1; + if(cache.getIndexes()[7].putFile(archiveId, 0, data)) + return archiveId; + System.out.println("Failing packing model "+archiveId); + return -1; + } + + public static void packCustomItems(Store cache) throws IOException { + int modelID = packCustomModel(cache, getBytesFromFile(new File("donatorCape.dat"))); + System.out.println("model id "+modelID); + ItemDefinitions donatorCape = ItemDefinitions.getItemDefinition(cache, 9747); + donatorCape.setName("Donator cape"); + //donatorCape.getInventoryOptions()[2] = "Customise"; + donatorCape.femaleEquipModelId1 = modelID; + donatorCape.maleEquipModelId1 = modelID; + donatorCape.invModelId = modelID; + donatorCape.resetModelColors(); + // donatorCape.changeModelColor(); + int newId = 29999; + System.out.println(cache.getIndexes()[19].putFile(newId >>> 8, 0xff & newId, donatorCape.encode())); + } + + public static void main(String[] args) throws IOException { + boolean updateJustMaps = false; + boolean addOldItems = true; + Store rscache = new Store("cache697/"); + Store cache = new Store("cache667_2/", false); + Store originalCache = new Store("cache667/", false); + if(addOldItems) + cache.resetIndex(19, false, false, Constants.GZIP_COMPRESSION); + + cache.resetIndex(7, false, false, Constants.GZIP_COMPRESSION); + cache.getIndexes()[7].packIndex(originalCache); + + if(!updateJustMaps) { + for(int i = 0; i < cache.getIndexes().length; i++) { + if(i != 3 //interfaces + && i != 5 //maps + && i != 12 //client scripts + && i != 33 + && i != 30) //native libs + { + boolean result = cache.getIndexes()[i].packIndex(rscache, true); + System.out.println("Packed index archives: "+i+", "+result); + } + } + System.out.println("Adding logo..."); + packLogo(cache); + System.out.println("Adding donator icon..."); + packDonatorIcon(cache); + System.out.println("Adding Matrix icon..."); + packMatrixIcon(cache); + System.out.println("Adding Custom items..."); + packCustomItems(cache); + if(addOldItems) { + System.out.println("Adding back old item definitions..."); + int currentSize = 30000;//Utils.getItemDefinitionsSize(cache); + System.out.println(currentSize); + int oldSize = Utils.getItemDefinitionsSize(originalCache); + for(int i = currentSize ; i < currentSize+oldSize; i++) { + int newItemId = i; + int oldItemId = i - currentSize; + cache.getIndexes()[19].putFile(newItemId >>> 8, 0xff & newItemId, Constants.GZIP_COMPRESSION, originalCache.getIndexes()[19].getFile(oldItemId >>> 8, 0xff & oldItemId), null, false, false, -1, -1); + } + cache.getIndexes()[19].rewriteTable(); + } + System.out.println("Recovering Client Script Maps..."); + for(int i : originalCache.getIndexes()[17].getTable().getValidArchiveIds()) { + for(int i2 : originalCache.getIndexes()[17].getTable().getArchives()[i].getValidFileIds()) { + if(!cache.getIndexes()[17].fileExists(i, i2) || cache.getIndexes()[17].getFile(i, i2).length == 1) { + cache.getIndexes()[17].putFile(i, i2, originalCache.getIndexes()[17].getFile(i, i2)); + } + } + } + System.out.println("Recovering Bank Client Script Maps..."); + for(int i = 1610; i < 1616; i++) + cache.getIndexes()[17].putFile(i >>> 8, i & 0xff, originalCache.getIndexes()[17].getFile(i >>> 8, i & 0xff)); + + System.out.println("Adding new interfaces..."); + + //adds new interfaces + for(int i = cache.getIndexes()[3].getLastArchiveId()+1; i <= rscache.getIndexes()[3].getLastArchiveId(); i++) { + if(rscache.getIndexes()[3].archiveExists(i)) + cache.getIndexes()[3].putArchive(i, rscache, false, false); + } + cache.getIndexes()[3].putArchive(320, rscache, false, false); + cache.getIndexes()[3].putArchive(751, rscache, false, false); + cache.getIndexes()[3].putArchive(1092, rscache, false, false); + + boolean result = cache.getIndexes()[3].rewriteTable(); + cache.getIndexes()[8].rewriteTable(); + System.out.println("Packed new interfaces: "+result); + } + boolean result; + // int oldRevision = cache.getIndexes()[5].getTable().getRevision(); + // cache.resetIndex(5, true, cache.getIndexes()[5].getTable().usesWhirpool(), Constants.GZIP_COMPRESSION); + Index index = cache.getIndexes()[5]; + // index.getTable().setRevision(oldRevision+1); + Index rsIndex = rscache.getIndexes()[5]; + + Index originalIndex = originalCache.getIndexes()[5]; + RSXteas.loadUnpackedXteas(679); + //OriginalXteas.loadUnpackedXteas(); + + System.out.println("Updating Maps."); + for(int regionId = 0; regionId < 30000; regionId++) { + int regionX = (regionId >> 8) * 64; + int regionY = (regionId & 0xff) * 64; + String name = "m"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + byte[] data = rsIndex.getFile(rsIndex.getArchiveId(name)); + if(data == null) + data = originalIndex.getFile(originalIndex.getArchiveId(name)); + if(data != null) { + result = addMapFile(index, name, data); + System.out.println(name+", "+result); + } + name = "um"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rsIndex.getFile(rsIndex.getArchiveId(name)); + if(data == null) + data = originalIndex.getFile(originalIndex.getArchiveId(name)); + if(data != null) { + result = addMapFile(index, name, data); + System.out.println(name+", "+result); + } + int[] xteas = RSXteas.getXteas(regionId); + name = "l"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rsIndex.getFile(rsIndex.getArchiveId(name), 0, xteas); + /*if(data == null) + data = originalIndex.getFile(originalIndex.getArchiveId(name), 0, OriginalXteas.getXteas(regionId)); + */if(data != null) { + result = addMapFile(index, name, data); + System.out.println(name+", "+result); + } + name = "ul"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rsIndex.getFile(rsIndex.getArchiveId(name), 0, xteas); + /*if(data == null) + data = originalIndex.getFile(originalIndex.getArchiveId(name), 0, OriginalXteas.getXteas(regionId)); + */if(data != null) { + result = addMapFile(index, name, data); + System.out.println(name+", "+result); + } + name = "n"+ ((regionX >> 3) / 8) + "_" + ((regionY >> 3) / 8); + data = rsIndex.getFile(rsIndex.getArchiveId(name), 0); + if(data == null) + data = originalIndex.getFile(originalIndex.getArchiveId(name), 0); + if(data != null) { + result = addMapFile(index, name, data); + System.out.println(name+", "+result); + } + } + index.rewriteTable(); + } + + public static boolean addMapFile(Index index, String name, byte[] data) { + int archiveId = index.getArchiveId(name); + if(archiveId == -1) + archiveId = index.getTable().getValidArchiveIds().length; + return index.putFile(archiveId, 0, Constants.GZIP_COMPRESSION, data, null, false, false, Utils.getNameHash(name), -1); + } +} diff --git a/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/Application.java b/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/Application.java new file mode 100644 index 000000000..25b2d02b6 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/Application.java @@ -0,0 +1,220 @@ +package com.alex.tools.itemsDefsEditor; + +import java.awt.BorderLayout; +import java.awt.EventQueue; +import java.awt.Font; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.io.IOException; + +import javax.swing.DefaultListModel; +import javax.swing.JButton; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JList; +import javax.swing.JPanel; +import javax.swing.JScrollPane; +import javax.swing.JTabbedPane; +import javax.swing.ListSelectionModel; +import javax.swing.UIManager; +import javax.swing.UIManager.LookAndFeelInfo; + +import com.alex.loaders.items.ItemDefinitions; +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class Application { + + public static Store STORE; + private JFrame frmCacheEditorV; + + /** + * Launch the application. + * @throws IOException + */ + public static void main(String[] args) throws IOException { + STORE = new Store("cache/", false); + EventQueue.invokeLater(new Runnable() { + public void run() { + try { + Application window = new Application(); + window.frmCacheEditorV.setVisible(true); + } catch (Exception e) { + e.printStackTrace(); + } + } + }); + } + + /** + * Create the application. + */ + public Application() { + initialize(); + } + + private void setLook() { + boolean found = false; + for (LookAndFeelInfo info : UIManager.getInstalledLookAndFeels()) { + if(info.getName().equals("Nimbus")) + try { + UIManager.setLookAndFeel(info.getClassName()); + found = true; + }catch(Exception e) { + e.printStackTrace(); + } + } + if(!found) + try { + UIManager.setLookAndFeel(UIManager.getSystemLookAndFeelClassName()); + }catch(Exception e) { + e.printStackTrace(); + } + } + + private JList itemsList; + private DefaultListModel itemsListmodel; + + /** + * Initialize the contents of the frame. + */ + private void initialize() { + setLook(); + frmCacheEditorV = new JFrame(); + frmCacheEditorV.setTitle("Cache Editor V0.1"); + frmCacheEditorV.setBounds(100, 100, 352, 435); + frmCacheEditorV.setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + + JTabbedPane tabbedPane = new JTabbedPane(JTabbedPane.TOP); + frmCacheEditorV.getContentPane().add(tabbedPane, BorderLayout.CENTER); + + JPanel panel = new JPanel(); + tabbedPane.addTab("Main", null, panel, null); + panel.setLayout(null); + + JButton btnGenerateUkeys = new JButton("Generate Ukeys (614- Client Builts)"); + btnGenerateUkeys.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + byte[] ukeys = Utils.getArchivePacketData(255, 255, STORE.generateIndex255Archive255Outdated()); + new GeneratedUkeys(getFrame(), ukeys); + } + }); + btnGenerateUkeys.setBounds(33, 64, 257, 28); + panel.add(btnGenerateUkeys); + + JLabel lblCreatedByAlexalso = new JLabel("Created By Alex(Also named Dragonkk)"); + lblCreatedByAlexalso.setFont(new Font("Tekton Pro Ext", Font.PLAIN, 15)); + lblCreatedByAlexalso.setBounds(6, 290, 322, 46); + panel.add(lblCreatedByAlexalso); + + JPanel panel_1 = new JPanel(); + tabbedPane.addTab("Items", null, panel_1, null); + panel_1.setLayout(null); + itemsListmodel = new DefaultListModel(); + itemsList = new JList(itemsListmodel); + itemsList.setSelectionMode(ListSelectionModel.SINGLE_INTERVAL_SELECTION); + itemsList.setLayoutOrientation(JList.VERTICAL); + itemsList.setVisibleRowCount(-1); + JScrollPane itemListscrollPane = new JScrollPane(itemsList); + itemListscrollPane.setBounds(34, 49, 155, 254); + panel_1.add(itemListscrollPane); + + JButton btnEdit = new JButton("Edit"); + final Application app = this; + btnEdit.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + ItemDefinitions defs = itemsList.getSelectedValue(); + if(defs == null) + return; + new ItemDefsEditor(app, defs); + } + }); + btnEdit.setBounds(201, 48, 90, 28); + panel_1.add(btnEdit); + + JButton btnAdd = new JButton("Add"); + btnAdd.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + + new ItemDefsEditor(app, new ItemDefinitions(STORE, Utils.getItemDefinitionsSize(STORE) , false)); + } + }); + btnAdd.setBounds(201, 88, 90, 28); + panel_1.add(btnAdd); + + JButton btnRemove = new JButton("Remove"); + btnRemove.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + ItemDefinitions defs = itemsList.getSelectedValue(); + if(defs == null) + return; + STORE.getIndexes()[Constants.ITEM_DEFINITIONS_INDEX].removeFile(defs.getArchiveId(), defs.getFileId()); + removeItemDefs(defs); + } + }); + btnRemove.setBounds(201, 128, 90, 28); + panel_1.add(btnRemove); + + JLabel label = new JLabel("Cached Items:"); + label.setFont(new Font("Comic Sans MS", Font.PLAIN, 18)); + label.setBounds(34, 18, 155, 21); + panel_1.add(label); + + JButton btnDuplicate = new JButton("Clone"); + btnDuplicate.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + ItemDefinitions defs = itemsList.getSelectedValue(); + if(defs == null) + return; + defs = (ItemDefinitions) defs.clone(); + if(defs == null) + return; + defs.id = Utils.getItemDefinitionsSize(STORE); + new ItemDefsEditor(app, defs); + } + }); + btnDuplicate.setBounds(201, 168, 90, 28); + panel_1.add(btnDuplicate); + addAllItems(); + } + + public void addAllItems() { + for(int id = 0; id < Utils.getItemDefinitionsSize(STORE) - 22314; id++) { + addItemDefs(ItemDefinitions.getItemDefinition(STORE, id)); + } + } + + public void addItemDefs(final ItemDefinitions defs) { + EventQueue.invokeLater(new Runnable() { + public void run() { + itemsListmodel.addElement(defs); + } + }); + } + + public void updateItemDefs(final ItemDefinitions defs) { + EventQueue.invokeLater(new Runnable() { + public void run() { + int index = itemsListmodel.indexOf(defs); + if(index == -1) + itemsListmodel.addElement(defs); + else + itemsListmodel.setElementAt(defs, index); + } + }); + } + + public void removeItemDefs(final ItemDefinitions defs) { + EventQueue.invokeLater(new Runnable() { + public void run() { + itemsListmodel.removeElement(defs); + } + }); + } + + public JFrame getFrame() { + return frmCacheEditorV; + } + +} diff --git a/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/GeneratedUkeys.java b/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/GeneratedUkeys.java new file mode 100644 index 000000000..da61674c8 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/GeneratedUkeys.java @@ -0,0 +1,48 @@ +package com.alex.tools.itemsDefsEditor; + +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; +import java.util.Arrays; + +import javax.swing.JButton; +import javax.swing.JDialog; +import javax.swing.JEditorPane; +import javax.swing.JFrame; + +@SuppressWarnings("serial") +public class GeneratedUkeys extends JDialog { + + + public GeneratedUkeys(JFrame frame, byte[] ukeys) { + super(frame, "Ukeys", true); + setBounds(100, 100, 450, 300); + getContentPane().setLayout(null); + + final JEditorPane editorPane = new JEditorPane(); + editorPane.setText(Arrays.toString(ukeys)); + editorPane.setBounds(6, 6, 420, 213); + getContentPane().add(editorPane); + + JButton btnClose = new JButton("Close"); + btnClose.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + dispose(); + } + }); + btnClose.setBounds(101, 221, 90, 28); + getContentPane().add(btnClose); + + JButton btnCopy = new JButton("Copy"); + btnCopy.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + ActionEvent nev = new ActionEvent(editorPane, ActionEvent.ACTION_PERFORMED, "copy"); + editorPane.selectAll(); + editorPane.getActionMap().get(nev.getActionCommand()).actionPerformed(nev); + } + }); + btnCopy.setBounds(6, 221, 90, 28); + getContentPane().add(btnCopy); + setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE); + setVisible(true); + } +} diff --git a/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/ItemDefsEditor.java b/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/ItemDefsEditor.java new file mode 100644 index 000000000..9004820a4 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/tools/itemsDefsEditor/ItemDefsEditor.java @@ -0,0 +1,404 @@ +package com.alex.tools.itemsDefsEditor; + +import java.awt.BorderLayout; +import java.awt.FlowLayout; +import java.awt.Font; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; + +import javax.swing.JButton; +import javax.swing.JCheckBox; +import javax.swing.JDialog; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JTextField; +import javax.swing.border.EmptyBorder; + +import com.alex.loaders.items.ItemDefinitions; + +@SuppressWarnings("serial") +public class ItemDefsEditor extends JDialog { + + private final JPanel contentPanel = new JPanel(); + private ItemDefinitions defs; + private Application application; + private JTextField modelIDField; + private JTextField nameField; + private JTextField modelZoomField; + private JTextField groundOptionsField; + private JTextField inventoryOptionsField; + private JTextField femaleModelId2Field; + private JTextField maleModelId1Field; + private JTextField maleModelId2Field; + private JTextField maleModelId3Field; + private JTextField femaleModelId1Field; + private JTextField femaleModelId3Field; + private JTextField teamIdField; + private JTextField notedItemIdField; + private JTextField switchNotedItemField; + private JTextField lendedItemIdField; + private JTextField switchLendedItemField; + private JTextField changedModelColorsField; + private JTextField changedTextureColorsField; + private JCheckBox membersOnlyCheck; + + public void save() { + + //inv + defs.setInvModelId(Integer.valueOf(modelIDField.getText())); + defs.setName(nameField.getText()); + defs.setInvModelZoom(Integer.valueOf(modelZoomField.getText())); + String[] groundOptions = groundOptionsField.getText().split(";"); + for(int i = 0; i < defs.getGroundOptions().length; i++) + defs.getGroundOptions()[i] = groundOptions[i].equals("null") ? null : groundOptions[i]; + String[] invOptions = inventoryOptionsField.getText().split(";"); + for(int i = 0; i < defs.getInventoryOptions().length; i++) + defs.getInventoryOptions()[i] = invOptions[i].equals("null") ? null : invOptions[i]; + + //wearing + + defs.maleEquipModelId1 = Integer.valueOf(maleModelId1Field.getText()); + defs.maleEquipModelId2 = Integer.valueOf(maleModelId2Field.getText()); + defs.maleEquipModelId3 = Integer.valueOf(maleModelId3Field.getText()); + + defs.femaleEquipModelId1 = Integer.valueOf(femaleModelId1Field.getText()); + defs.femaleEquipModelId2 = Integer.valueOf(femaleModelId2Field.getText()); + defs.femaleEquipModelId3 = Integer.valueOf(femaleModelId3Field.getText()); + defs.teamId = Integer.valueOf(teamIdField.getText()); + + //others + defs.notedItemId = Integer.valueOf(notedItemIdField.getText()); + defs.switchNoteItemId = Integer.valueOf(switchNotedItemField.getText()); + defs.lendedItemId = Integer.valueOf(lendedItemIdField.getText()); + defs.switchLendItemId = Integer.valueOf(switchLendedItemField.getText()); + defs.resetModelColors(); + if(!changedModelColorsField.getText().equals("")) { + String[] splitedModelColorsTexts = changedModelColorsField.getText().split(";"); + for(String t : splitedModelColorsTexts) { + String[] editedColor = t.split("="); + defs.changeModelColor(Integer.valueOf(editedColor[0]), Integer.valueOf(editedColor[1])); + } + } + defs.resetTextureColors(); + if(!changedTextureColorsField.getText().equals("")) { + String[] splitedTextureColorsTexts = changedTextureColorsField.getText().split(";"); + for(String t : splitedTextureColorsTexts) { + String[] editedColor = t.split("="); + defs.changeTextureColor(Integer.valueOf(editedColor[0]), Integer.valueOf(editedColor[1])); + } + } + defs.membersOnly = membersOnlyCheck.isSelected(); + defs.write(Application.STORE); + application.updateItemDefs(defs); + } + + /** + * Create the dialog. + */ + public ItemDefsEditor(Application application, ItemDefinitions defs) { + super(application.getFrame(), "Item Definitions Editor", true); + this.defs = defs; + this.application = application; + setBounds(100, 100, 912, 354); + getContentPane().setLayout(new BorderLayout()); + contentPanel.setBorder(new EmptyBorder(5, 5, 5, 5)); + getContentPane().add(contentPanel, BorderLayout.CENTER); + contentPanel.setLayout(null); + + JLabel lblNewLabel = new JLabel("Model ID:"); + lblNewLabel.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + lblNewLabel.setBounds(6, 43, 81, 21); + contentPanel.add(lblNewLabel); + { + modelIDField = new JTextField(); + modelIDField.setBounds(139, 40, 122, 28); + contentPanel.add(modelIDField); + modelIDField.setColumns(10); + modelIDField.setText(""+defs.getInvModelId()); + } + { + JLabel label = new JLabel("Name:"); + label.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label.setBounds(6, 76, 81, 21); + contentPanel.add(label); + } + { + nameField = new JTextField(); + nameField.setBounds(139, 73, 122, 28); + contentPanel.add(nameField); + nameField.setColumns(10); + nameField.setText(defs.getName()); + } + { + JLabel label = new JLabel("Model Zoom:"); + label.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label.setBounds(6, 109, 95, 21); + contentPanel.add(label); + } + { + modelZoomField = new JTextField(); + modelZoomField.setBounds(139, 106, 122, 28); + contentPanel.add(modelZoomField); + modelZoomField.setColumns(10); + modelZoomField.setText(""+defs.getInvModelZoom()); + } + { + JLabel label = new JLabel("Ground Options:"); + label.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label.setBounds(6, 142, 108, 21); + contentPanel.add(label); + } + { + groundOptionsField = new JTextField(); + groundOptionsField.setBounds(139, 139, 122, 28); + contentPanel.add(groundOptionsField); + groundOptionsField.setColumns(10); + String text = ""; + for(String option : defs.getGroundOptions()) + text += (option == null ? "null" : option)+";"; + groundOptionsField.setText(text); + } + { + JLabel label = new JLabel("Inventory Options:"); + label.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label.setBounds(6, 175, 139, 21); + contentPanel.add(label); + } + { + inventoryOptionsField = new JTextField(); + inventoryOptionsField.setBounds(139, 172, 122, 28); + contentPanel.add(inventoryOptionsField); + inventoryOptionsField.setColumns(10); + String text = ""; + for(String option : defs.getInventoryOptions()) + text += (option == null ? "null" : option)+";"; + inventoryOptionsField.setText(text); + } + { + JButton saveButton = new JButton("Save"); + saveButton.setBounds(6, 265, 55, 28); + contentPanel.add(saveButton); + saveButton.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + save(); + dispose(); + } + }); + getRootPane().setDefaultButton(saveButton); + } + { + JButton cancelButton = new JButton("Cancel"); + cancelButton.setBounds(73, 265, 67, 28); + contentPanel.add(cancelButton); + cancelButton.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + dispose(); + } + }); + cancelButton.setActionCommand("Cancel"); + } + + JLabel label = new JLabel("Interface / Droped"); + label.setFont(new Font("Comic Sans MS", Font.PLAIN, 18)); + label.setBounds(6, 6, 205, 21); + contentPanel.add(label); + + JLabel label_1 = new JLabel("Wearing"); + label_1.setFont(new Font("Comic Sans MS", Font.PLAIN, 18)); + label_1.setBounds(273, 6, 205, 21); + contentPanel.add(label_1); + + JLabel label_2 = new JLabel("Male Model ID 1:"); + label_2.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_2.setBounds(273, 43, 131, 21); + contentPanel.add(label_2); + + JLabel label_3 = new JLabel("Male Model ID 2:"); + label_3.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_3.setBounds(273, 76, 131, 21); + contentPanel.add(label_3); + + JLabel label_4 = new JLabel("Male Model ID 3:"); + label_4.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_4.setBounds(273, 112, 131, 21); + contentPanel.add(label_4); + + JLabel label_5 = new JLabel("Female Model ID 1:"); + label_5.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_5.setBounds(273, 145, 131, 21); + contentPanel.add(label_5); + + JLabel label_6 = new JLabel("Female Model ID 2:"); + label_6.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_6.setBounds(273, 175, 131, 21); + contentPanel.add(label_6); + + JLabel label_7 = new JLabel("Female Model ID 3:"); + label_7.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_7.setBounds(273, 208, 131, 21); + contentPanel.add(label_7); + + femaleModelId2Field = new JTextField(); + femaleModelId2Field.setBounds(411, 172, 122, 28); + contentPanel.add(femaleModelId2Field); + femaleModelId2Field.setColumns(10); + femaleModelId2Field.setText(""+defs.femaleEquipModelId2); + + maleModelId1Field = new JTextField(); + maleModelId1Field.setBounds(411, 40, 122, 28); + contentPanel.add(maleModelId1Field); + maleModelId1Field.setColumns(10); + maleModelId1Field.setText(""+defs.maleEquipModelId1); + { + maleModelId2Field = new JTextField(); + maleModelId2Field.setBounds(411, 73, 122, 28); + contentPanel.add(maleModelId2Field); + maleModelId2Field.setColumns(10); + maleModelId2Field.setText(""+defs.maleEquipModelId2); + } + { + maleModelId3Field = new JTextField(); + maleModelId3Field.setBounds(411, 106, 122, 28); + contentPanel.add(maleModelId3Field); + maleModelId3Field.setColumns(10); + maleModelId3Field.setText(""+defs.maleEquipModelId3); + } + { + femaleModelId1Field = new JTextField(); + femaleModelId1Field.setBounds(411, 139, 122, 28); + contentPanel.add(femaleModelId1Field); + femaleModelId1Field.setColumns(10); + femaleModelId1Field.setText(""+defs.femaleEquipModelId1); + } + { + femaleModelId3Field = new JTextField(); + femaleModelId3Field.setBounds(411, 205, 122, 28); + contentPanel.add(femaleModelId3Field); + femaleModelId3Field.setColumns(10); + femaleModelId3Field.setText(""+defs.femaleEquipModelId3); + } + { + JLabel label_8 = new JLabel("Team ID:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(273, 241, 131, 21); + contentPanel.add(label_8); + } + { + teamIdField = new JTextField(); + teamIdField.setBounds(411, 238, 122, 28); + contentPanel.add(teamIdField); + teamIdField.setColumns(10); + teamIdField.setText(""+defs.teamId); + } + { + JLabel label_8 = new JLabel("Others"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 18)); + label_8.setBounds(539, 6, 205, 21); + contentPanel.add(label_8); + } + { + JLabel label_8 = new JLabel("Noted Item ID:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(545, 43, 131, 21); + contentPanel.add(label_8); + } + { + JLabel label_8 = new JLabel("Switch Noted Item Id:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(545, 76, 160, 21); + contentPanel.add(label_8); + } + { + JLabel label_8 = new JLabel("Lended Item ID:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(545, 109, 160, 21); + contentPanel.add(label_8); + } + { + JLabel label_8 = new JLabel("Switch Lended Item Id:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(545, 145, 160, 21); + contentPanel.add(label_8); + } + { + notedItemIdField = new JTextField(); + notedItemIdField.setBounds(707, 39, 122, 28); + contentPanel.add(notedItemIdField); + notedItemIdField.setColumns(10); + notedItemIdField.setText(""+defs.notedItemId); + } + { + switchNotedItemField = new JTextField(); + switchNotedItemField.setBounds(707, 73, 122, 28); + contentPanel.add(switchNotedItemField); + switchNotedItemField.setColumns(10); + switchNotedItemField.setText(""+defs.switchNoteItemId); + } + { + lendedItemIdField = new JTextField(); + lendedItemIdField.setBounds(707, 106, 122, 28); + contentPanel.add(lendedItemIdField); + lendedItemIdField.setColumns(10); + lendedItemIdField.setText(""+defs.lendedItemId); + } + { + switchLendedItemField = new JTextField(); + switchLendedItemField.setBounds(707, 139, 122, 28); + contentPanel.add(switchLendedItemField); + switchLendedItemField.setColumns(10); + switchLendedItemField.setText(""+defs.switchLendItemId); + } + { + JLabel label_8 = new JLabel("Changed Model Colors:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(545, 175, 160, 21); + contentPanel.add(label_8); + } + { + changedModelColorsField = new JTextField(); + changedModelColorsField.setBounds(707, 172, 122, 28); + contentPanel.add(changedModelColorsField); + changedModelColorsField.setColumns(10); + String text = ""; + if(defs.originalModelColors != null) { + for(int i = 0; i < defs.originalModelColors.length; i++) { + text += defs.originalModelColors[i]+"="+defs.modifiedModelColors[i]+";"; + } + } + changedModelColorsField.setText(text); + } + { + JLabel label_8 = new JLabel("Changed Texture Colors:"); + label_8.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + label_8.setBounds(545, 205, 160, 21); + contentPanel.add(label_8); + } + { + changedTextureColorsField = new JTextField(); + changedTextureColorsField.setBounds(707, 205, 122, 28); + contentPanel.add(changedTextureColorsField); + changedTextureColorsField.setColumns(10); + String text = ""; + if(defs.originalTextureColors != null) { + for(int i = 0; i < defs.originalTextureColors.length; i++) { + text += defs.originalTextureColors[i]+"="+defs.modifiedTextureColors[i]+";"; + } + } + changedTextureColorsField.setText(text); + } + + membersOnlyCheck = new JCheckBox("Members Only"); + membersOnlyCheck.setFont(new Font("Comic Sans MS", Font.PLAIN, 14)); + membersOnlyCheck.setBounds(545, 243, 131, 18); + membersOnlyCheck.setSelected(defs.membersOnly); + contentPanel.add(membersOnlyCheck); + { + JPanel buttonPane = new JPanel(); + buttonPane.setLayout(new FlowLayout(FlowLayout.RIGHT)); + getContentPane().add(buttonPane, BorderLayout.SOUTH); + } + + setDefaultCloseOperation(JDialog.DISPOSE_ON_CLOSE); + setVisible(true); + } +} diff --git a/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2BlockEntry.java b/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2BlockEntry.java new file mode 100644 index 000000000..4f941a098 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2BlockEntry.java @@ -0,0 +1,56 @@ +package com.alex.util.bzip2; + +public class BZip2BlockEntry { + + boolean aBooleanArray2205[]; + boolean aBooleanArray2213[]; + byte aByte2201; + byte aByteArray2204[]; + byte aByteArray2211[]; + byte aByteArray2212[]; + byte aByteArray2214[]; + byte aByteArray2219[]; + byte aByteArray2224[]; + byte aByteArrayArray2229[][]; + int anInt2202; + int anInt2203; + int anInt2206; + int anInt2207; + int anInt2208; + int anInt2209; + int anInt2215; + int anInt2216; + int anInt2217; + int anInt2221; + int anInt2222; + int anInt2223; + int anInt2225; + int anInt2227; + int anInt2232; + int anIntArray2200[]; + int anIntArray2220[]; + int anIntArray2226[]; + int anIntArray2228[]; + int anIntArrayArray2210[][]; + int anIntArrayArray2218[][]; + int anIntArrayArray2230[][]; + + public BZip2BlockEntry() { + anIntArray2200 = new int[6]; + anInt2203 = 0; + aByteArray2204 = new byte[4096]; + aByteArray2211 = new byte[256]; + aByteArray2214 = new byte[18002]; + aByteArray2219 = new byte[18002]; + anIntArray2220 = new int[257]; + anIntArrayArray2218 = new int[6][258]; + aBooleanArray2205 = new boolean[16]; + aBooleanArray2213 = new boolean[256]; + anInt2209 = 0; + anIntArray2226 = new int[16]; + anIntArrayArray2210 = new int[6][258]; + aByteArrayArray2229 = new byte[6][258]; + anIntArrayArray2230 = new int[6][258]; + anIntArray2228 = new int[256]; + } +} diff --git a/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2Compressor.java b/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2Compressor.java new file mode 100644 index 000000000..2d51b995d --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2Compressor.java @@ -0,0 +1,22 @@ +package com.alex.util.bzip2; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; + +import org.apache.tools.bzip2.CBZip2OutputStream; + +public class BZip2Compressor { + + public static final byte[] compress(byte[] data) { + ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream(); + try { + CBZip2OutputStream out = new CBZip2OutputStream(compressedBytes); + out.write(data); + out.close(); + return compressedBytes.toByteArray(); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } +} diff --git a/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2Decompressor.java b/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2Decompressor.java new file mode 100644 index 000000000..8ca0affd0 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/bzip2/BZip2Decompressor.java @@ -0,0 +1,546 @@ +package com.alex.util.bzip2; + + + +public class BZip2Decompressor { + + private static int anIntArray257[]; + private static BZip2BlockEntry entryInstance = new BZip2BlockEntry(); + + public static final void decompress(byte decompressedData[], byte packedData[], int containerSize, int blockSize) { + synchronized (entryInstance) { + entryInstance.aByteArray2224 = packedData; + entryInstance.anInt2209 = blockSize; + entryInstance.aByteArray2212 = decompressedData; + entryInstance.anInt2203 = 0; + entryInstance.anInt2206 = decompressedData.length; + entryInstance.anInt2232 = 0; + entryInstance.anInt2207 = 0; + entryInstance.anInt2217 = 0; + entryInstance.anInt2216 = 0; + method1793(entryInstance); + entryInstance.aByteArray2224 = null; + entryInstance.aByteArray2212 = null; + } + } + + private static final void method1785(BZip2BlockEntry entry) { + entry.anInt2215 = 0; + for (int i = 0; i < 256; i++) { + if (entry.aBooleanArray2213[i]) { + entry.aByteArray2211[entry.anInt2215] = (byte) i; + entry.anInt2215++; + } + } + + } + + private static final void method1786(int ai[], int ai1[], int ai2[], + byte abyte0[], int i, int j, int k) { + int l = 0; + for (int i1 = i; i1 <= j; i1++) { + for (int l2 = 0; l2 < k; l2++) { + if (abyte0[l2] == i1) { + ai2[l] = l2; + l++; + } + } + + } + + for (int j1 = 0; j1 < 23; j1++) { + ai1[j1] = 0; + } + + for (int k1 = 0; k1 < k; k1++) { + ai1[abyte0[k1] + 1]++; + } + + for (int l1 = 1; l1 < 23; l1++) { + ai1[l1] += ai1[l1 - 1]; + } + + for (int i2 = 0; i2 < 23; i2++) { + ai[i2] = 0; + } + + int i3 = 0; + for (int j2 = i; j2 <= j; j2++) { + i3 += ai1[j2 + 1] - ai1[j2]; + ai[j2] = i3 - 1; + i3 <<= 1; + } + + for (int k2 = i + 1; k2 <= j; k2++) { + ai1[k2] = (ai[k2 - 1] + 1 << 1) - ai1[k2]; + } + + } + + private static final void method1787(BZip2BlockEntry entry) { + byte byte4 = entry.aByte2201; + int i = entry.anInt2222; + int j = entry.anInt2227; + int k = entry.anInt2221; + int ai[] = anIntArray257; + int l = entry.anInt2208; + byte abyte0[] = entry.aByteArray2212; + int i1 = entry.anInt2203; + int j1 = entry.anInt2206; + int k1 = j1; + int l1 = entry.anInt2225 + 1; + label0: do { + if (i > 0) { + do { + if (j1 == 0) { + break label0; + } + if (i == 1) { + break; + } + abyte0[i1] = byte4; + i--; + i1++; + j1--; + } while (true); + if (j1 == 0) { + i = 1; + break; + } + abyte0[i1] = byte4; + i1++; + j1--; + } + boolean flag = true; + while (flag) { + flag = false; + if (j == l1) { + i = 0; + break label0; + } + byte4 = (byte) k; + l = ai[l]; + byte byte0 = (byte) (l & 0xff); + l >>= 8; + j++; + if (byte0 != k) { + k = byte0; + if (j1 == 0) { + i = 1; + } else { + abyte0[i1] = byte4; + i1++; + j1--; + flag = true; + continue; + } + break label0; + } + if (j != l1) { + continue; + } + if (j1 == 0) { + i = 1; + break label0; + } + abyte0[i1] = byte4; + i1++; + j1--; + flag = true; + } + i = 2; + l = ai[l]; + byte byte1 = (byte) (l & 0xff); + l >>= 8; + if (++j != l1) { + if (byte1 != k) { + k = byte1; + } else { + i = 3; + l = ai[l]; + byte byte2 = (byte) (l & 0xff); + l >>= 8; + if (++j != l1) { + if (byte2 != k) { + k = byte2; + } else { + l = ai[l]; + byte byte3 = (byte) (l & 0xff); + l >>= 8; + j++; + i = (byte3 & 0xff) + 4; + l = ai[l]; + k = (byte) (l & 0xff); + l >>= 8; + j++; + } + } + } + } + } while (true); + entry.anInt2216 += k1 - j1; + entry.aByte2201 = byte4; + entry.anInt2222 = i; + entry.anInt2227 = j; + entry.anInt2221 = k; + anIntArray257 = ai; + entry.anInt2208 = l; + entry.aByteArray2212 = abyte0; + entry.anInt2203 = i1; + entry.anInt2206 = j1; + } + + private static final byte method1788(BZip2BlockEntry entry) { + return (byte) method1790(1, entry); + } + + private static final byte method1789(BZip2BlockEntry entry) { + return (byte) method1790(8, entry); + } + + private static final int method1790(int i, BZip2BlockEntry entry) { + int j; + do { + if (entry.anInt2232 >= i) { + int k = entry.anInt2207 >> entry.anInt2232 - i & (1 << i) - 1; + entry.anInt2232 -= i; + j = k; + break; + } + entry.anInt2207 = entry.anInt2207 << 8 + | entry.aByteArray2224[entry.anInt2209] & 0xff; + entry.anInt2232 += 8; + entry.anInt2209++; + entry.anInt2217++; + } while (true); + return j; + } + + public static void clearBlockEntryInstance() { + entryInstance = null; + } + + private static final void method1793(BZip2BlockEntry entry) { + // unused + /* + * boolean flag = false; boolean flag1 = false; boolean flag2 = false; + * boolean flag3 = false; boolean flag4 = false; boolean flag5 = false; + * boolean flag6 = false; boolean flag7 = false; boolean flag8 = false; + * boolean flag9 = false; boolean flag10 = false; boolean flag11 = + * false; boolean flag12 = false; boolean flag13 = false; boolean flag14 + * = false; boolean flag15 = false; boolean flag16 = false; boolean + * flag17 = false; + */ + int j8 = 0; + int ai[] = null; + int ai1[] = null; + int ai2[] = null; + entry.anInt2202 = 1; + if (anIntArray257 == null) { + anIntArray257 = new int[entry.anInt2202 * 0x186a0]; + } + boolean flag18 = true; + while (flag18) { + byte byte0 = method1789(entry); + if (byte0 == 23) { + return; + } + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1789(entry); + byte0 = method1788(entry); + entry.anInt2223 = 0; + byte0 = method1789(entry); + entry.anInt2223 = entry.anInt2223 << 8 | byte0 & 0xff; + byte0 = method1789(entry); + entry.anInt2223 = entry.anInt2223 << 8 | byte0 & 0xff; + byte0 = method1789(entry); + entry.anInt2223 = entry.anInt2223 << 8 | byte0 & 0xff; + for (int j = 0; j < 16; j++) { + byte byte1 = method1788(entry); + if (byte1 == 1) { + entry.aBooleanArray2205[j] = true; + } else { + entry.aBooleanArray2205[j] = false; + } + } + + for (int k = 0; k < 256; k++) { + entry.aBooleanArray2213[k] = false; + } + + for (int l = 0; l < 16; l++) { + if (entry.aBooleanArray2205[l]) { + for (int i3 = 0; i3 < 16; i3++) { + byte byte2 = method1788(entry); + if (byte2 == 1) { + entry.aBooleanArray2213[l * 16 + i3] = true; + } + } + + } + } + + method1785(entry); + int i4 = entry.anInt2215 + 2; + int j4 = method1790(3, entry); + int k4 = method1790(15, entry); + for (int i1 = 0; i1 < k4; i1++) { + int j3 = 0; + do { + byte byte3 = method1788(entry); + if (byte3 == 0) { + break; + } + j3++; + } while (true); + entry.aByteArray2214[i1] = (byte) j3; + } + + byte abyte0[] = new byte[6]; + for (byte byte16 = 0; byte16 < j4; byte16++) { + abyte0[byte16] = byte16; + } + + for (int j1 = 0; j1 < k4; j1++) { + byte byte17 = entry.aByteArray2214[j1]; + byte byte15 = abyte0[byte17]; + for (; byte17 > 0; byte17--) { + abyte0[byte17] = abyte0[byte17 - 1]; + } + + abyte0[0] = byte15; + entry.aByteArray2219[j1] = byte15; + } + + for (int k3 = 0; k3 < j4; k3++) { + int k6 = method1790(5, entry); + for (int k1 = 0; k1 < i4; k1++) { + do { + byte byte4 = method1788(entry); + if (byte4 == 0) { + break; + } + byte4 = method1788(entry); + if (byte4 == 0) { + k6++; + } else { + k6--; + } + } while (true); + entry.aByteArrayArray2229[k3][k1] = (byte) k6; + } + + } + + for (int l3 = 0; l3 < j4; l3++) { + byte byte8 = 32; + int i = 0; + for (int l1 = 0; l1 < i4; l1++) { + if (entry.aByteArrayArray2229[l3][l1] > i) { + i = entry.aByteArrayArray2229[l3][l1]; + } + if (entry.aByteArrayArray2229[l3][l1] < byte8) { + byte8 = entry.aByteArrayArray2229[l3][l1]; + } + } + + method1786(entry.anIntArrayArray2230[l3], + entry.anIntArrayArray2218[l3], + entry.anIntArrayArray2210[l3], + entry.aByteArrayArray2229[l3], byte8, i, i4); + entry.anIntArray2200[l3] = byte8; + } + + int l4 = entry.anInt2215 + 1; + int i5 = -1; + int j5 = 0; + for (int i2 = 0; i2 <= 255; i2++) { + entry.anIntArray2228[i2] = 0; + } + + int i9 = 4095; + for (int k8 = 15; k8 >= 0; k8--) { + for (int l8 = 15; l8 >= 0; l8--) { + entry.aByteArray2204[i9] = (byte) (k8 * 16 + l8); + i9--; + } + + entry.anIntArray2226[k8] = i9 + 1; + } + + int l5 = 0; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte12 = entry.aByteArray2219[i5]; + j8 = entry.anIntArray2200[byte12]; + ai = entry.anIntArrayArray2230[byte12]; + ai2 = entry.anIntArrayArray2210[byte12]; + ai1 = entry.anIntArrayArray2218[byte12]; + } + j5--; + int l6 = j8; + int k7; + byte byte9; + for (k7 = method1790(l6, entry); k7 > ai[l6]; k7 = k7 << 1 | byte9) { + l6++; + byte9 = method1788(entry); + } + + for (int k5 = ai2[k7 - ai1[l6]]; k5 != l4;) { + if (k5 == 0 || k5 == 1) { + int i6 = -1; + int j6 = 1; + do { + if (k5 == 0) { + i6 += j6; + } else if (k5 == 1) { + i6 += 2 * j6; + } + j6 *= 2; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte13 = entry.aByteArray2219[i5]; + j8 = entry.anIntArray2200[byte13]; + ai = entry.anIntArrayArray2230[byte13]; + ai2 = entry.anIntArrayArray2210[byte13]; + ai1 = entry.anIntArrayArray2218[byte13]; + } + j5--; + int i7 = j8; + int l7; + byte byte10; + for (l7 = method1790(i7, entry); l7 > ai[i7]; l7 = l7 << 1 + | byte10) { + i7++; + byte10 = method1788(entry); + } + + k5 = ai2[l7 - ai1[i7]]; + } while (k5 == 0 || k5 == 1); + i6++; + byte byte5 = entry.aByteArray2211[entry.aByteArray2204[entry.anIntArray2226[0]] & 0xff]; + entry.anIntArray2228[byte5 & 0xff] += i6; + for (; i6 > 0; i6--) { + anIntArray257[l5] = byte5 & 0xff; + l5++; + } + + } else { + int i11 = k5 - 1; + byte byte6; + if (i11 < 16) { + int i10 = entry.anIntArray2226[0]; + byte6 = entry.aByteArray2204[i10 + i11]; + for (; i11 > 3; i11 -= 4) { + int j11 = i10 + i11; + entry.aByteArray2204[j11] = entry.aByteArray2204[j11 - 1]; + entry.aByteArray2204[j11 - 1] = entry.aByteArray2204[j11 - 2]; + entry.aByteArray2204[j11 - 2] = entry.aByteArray2204[j11 - 3]; + entry.aByteArray2204[j11 - 3] = entry.aByteArray2204[j11 - 4]; + } + + for (; i11 > 0; i11--) { + entry.aByteArray2204[i10 + i11] = entry.aByteArray2204[(i10 + i11) - 1]; + } + + entry.aByteArray2204[i10] = byte6; + } else { + int k10 = i11 / 16; + int l10 = i11 % 16; + int j10 = entry.anIntArray2226[k10] + l10; + byte6 = entry.aByteArray2204[j10]; + for (; j10 > entry.anIntArray2226[k10]; j10--) { + entry.aByteArray2204[j10] = entry.aByteArray2204[j10 - 1]; + } + + entry.anIntArray2226[k10]++; + for (; k10 > 0; k10--) { + entry.anIntArray2226[k10]--; + entry.aByteArray2204[entry.anIntArray2226[k10]] = entry.aByteArray2204[(entry.anIntArray2226[k10 - 1] + 16) - 1]; + } + + entry.anIntArray2226[0]--; + entry.aByteArray2204[entry.anIntArray2226[0]] = byte6; + if (entry.anIntArray2226[0] == 0) { + int l9 = 4095; + for (int j9 = 15; j9 >= 0; j9--) { + for (int k9 = 15; k9 >= 0; k9--) { + entry.aByteArray2204[l9] = entry.aByteArray2204[entry.anIntArray2226[j9] + + k9]; + l9--; + } + + entry.anIntArray2226[j9] = l9 + 1; + } + + } + } + entry.anIntArray2228[entry.aByteArray2211[byte6 & 0xff] & 0xff]++; + anIntArray257[l5] = entry.aByteArray2211[byte6 & 0xff] & 0xff; + l5++; + if (j5 == 0) { + i5++; + j5 = 50; + byte byte14 = entry.aByteArray2219[i5]; + j8 = entry.anIntArray2200[byte14]; + ai = entry.anIntArrayArray2230[byte14]; + ai2 = entry.anIntArrayArray2210[byte14]; + ai1 = entry.anIntArrayArray2218[byte14]; + } + j5--; + int j7 = j8; + int i8; + byte byte11; + for (i8 = method1790(j7, entry); i8 > ai[j7]; i8 = i8 << 1 + | byte11) { + j7++; + byte11 = method1788(entry); + } + + k5 = ai2[i8 - ai1[j7]]; + } + } + + entry.anInt2222 = 0; + entry.aByte2201 = 0; + entry.anIntArray2220[0] = 0; + for (int j2 = 1; j2 <= 256; j2++) { + entry.anIntArray2220[j2] = entry.anIntArray2228[j2 - 1]; + } + + for (int k2 = 1; k2 <= 256; k2++) { + entry.anIntArray2220[k2] += entry.anIntArray2220[k2 - 1]; + } + + for (int l2 = 0; l2 < l5; l2++) { + byte byte7 = (byte) (anIntArray257[l2] & 0xff); + anIntArray257[entry.anIntArray2220[byte7 & 0xff]] |= l2 << 8; + entry.anIntArray2220[byte7 & 0xff]++; + } + + entry.anInt2208 = anIntArray257[entry.anInt2223] >> 8; + entry.anInt2227 = 0; + entry.anInt2208 = anIntArray257[entry.anInt2208]; + entry.anInt2221 = (byte) (entry.anInt2208 & 0xff); + entry.anInt2208 >>= 8; + entry.anInt2227++; + entry.anInt2225 = l5; + method1787(entry); + if (entry.anInt2227 == entry.anInt2225 + 1 && entry.anInt2222 == 0) { + flag18 = true; + } else { + flag18 = false; + } + } + } + +} diff --git a/Tools/Cache Editor/src/com/alex/util/crc32/CRC32HGenerator.java b/Tools/Cache Editor/src/com/alex/util/crc32/CRC32HGenerator.java new file mode 100644 index 000000000..e38e49ea9 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/crc32/CRC32HGenerator.java @@ -0,0 +1,26 @@ +package com.alex.util.crc32; + +import java.util.zip.CRC32; + +public final class CRC32HGenerator { + + public static final CRC32 CRC32Instance = new CRC32(); + + public static int getHash(byte[] data) { + return getHash(data, 0, data.length); + } + + public static int getHash(byte[] data, int offset, int length) { + synchronized(CRC32Instance) { + CRC32Instance.update(data, offset, length); + int hash = (int) CRC32Instance.getValue(); + CRC32Instance.reset(); + return hash; + } + } + + + private CRC32HGenerator() { + + } +} diff --git a/Tools/Cache Editor/src/com/alex/util/gzip/GZipCompressor.java b/Tools/Cache Editor/src/com/alex/util/gzip/GZipCompressor.java new file mode 100644 index 000000000..68bad73c7 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/gzip/GZipCompressor.java @@ -0,0 +1,22 @@ +package com.alex.util.gzip; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.util.zip.GZIPOutputStream; + +public class GZipCompressor { + + public static final byte[] compress(byte[] data) { + ByteArrayOutputStream compressedBytes = new ByteArrayOutputStream(); + try { + GZIPOutputStream out = new GZIPOutputStream(compressedBytes); + out.write(data); + out.finish(); + out.close(); + return compressedBytes.toByteArray(); + } catch (IOException e) { + e.printStackTrace(); + } + return null; + } +} diff --git a/Tools/Cache Editor/src/com/alex/util/gzip/GZipDecompressor.java b/Tools/Cache Editor/src/com/alex/util/gzip/GZipDecompressor.java new file mode 100644 index 000000000..96260f490 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/gzip/GZipDecompressor.java @@ -0,0 +1,48 @@ +package com.alex.util.gzip; + +import java.util.zip.Inflater; + +import com.alex.io.Stream; + +public class GZipDecompressor { + + private static final Inflater inflaterInstance = new Inflater(true); + + public static final boolean decompress(Stream stream, byte data[]) { + synchronized(inflaterInstance) { + if (stream.getBuffer()[stream.getOffset()] != 31 || stream.getBuffer()[stream.getOffset() + 1] != -117) + return false; + //throw new RuntimeException("Invalid GZIP header!"); + try { + inflaterInstance.setInput(stream.getBuffer(), stream.getOffset() + 10, -stream.getOffset() - 18 + stream.getBuffer().length); + inflaterInstance.inflate(data); + } catch (Exception e) { + inflaterInstance.reset(); + return false; + //throw new RuntimeException("Invalid GZIP compressed data!"); + } + inflaterInstance.reset(); + return true; + } + } + + public static final boolean decompress(byte[] compressed, byte data[], int offset, int length) { + synchronized(inflaterInstance) { + if (data[offset] != 31 || data[offset + 1] != -117) + return false; + //throw new RuntimeException("Invalid GZIP header!"); + try { + inflaterInstance.setInput(data, offset + 10, -offset - 18 + length); + inflaterInstance.inflate(compressed); + } catch (Exception e) { + inflaterInstance.reset(); + e.printStackTrace(); + return false; + //throw new RuntimeException("Invalid GZIP compressed data!"); + } + inflaterInstance.reset(); + return true; + } + } + +} diff --git a/Tools/Cache Editor/src/com/alex/util/whirlpool/Whirlpool.java b/Tools/Cache Editor/src/com/alex/util/whirlpool/Whirlpool.java new file mode 100644 index 000000000..86cce2df0 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/util/whirlpool/Whirlpool.java @@ -0,0 +1,415 @@ +package com.alex.util.whirlpool; + +import java.util.Arrays; + +/** + * The Whirlpool hashing function. + * + *

+ * References + * + *

+ * The Whirlpool algorithm was developed by + * Paulo S. L. M. Barreto and + * Vincent Rijmen. + * + * See + * P.S.L.M. Barreto, V. Rijmen, + * ``The Whirlpool hashing function,'' + * First NESSIE workshop, 2000 (tweaked version, 2003), + * + * + * @author Paulo S.L.M. Barreto + * @author Vincent Rijmen. + * + * @version 3.0 (2003.03.12) + * + * ============================================================================= + * + * Differences from version 2.1: + * + * - Suboptimal diffusion matrix replaced by cir(1, 1, 4, 1, 8, 5, 2, 9). + * + * ============================================================================= + * + * Differences from version 2.0: + * + * - Generation of ISO/IEC 10118-3 test vectors. + * - Bug fix: nonzero carry was ignored when tallying the data length + * (this bug apparently only manifested itself when feeding data + * in pieces rather than in a single chunk at once). + * + * Differences from version 1.0: + * + * - Original S-box replaced by the tweaked, hardware-efficient version. + * + * ============================================================================= + * + * THIS SOFTWARE IS PROVIDED BY THE AUTHORS ''AS IS'' AND ANY EXPRESS + * OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED + * WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE + * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS OR CONTRIBUTORS BE + * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR + * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF + * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR + * BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, + * WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE + * OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, + * EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + * + */ +public class Whirlpool { + + /** + * The message digest size (in bits) + */ + public static final int DIGESTBITS = 512; + + /** + * The message digest size (in bytes) + */ + public static final int DIGESTBYTES = DIGESTBITS >>> 3; + + /** + * The number of rounds of the internal dedicated block cipher. + */ + protected static final int R = 10; + + /** + * The substitution box. + */ + private static final String sbox = + "\u1823\uc6E8\u87B8\u014F\u36A6\ud2F5\u796F\u9152" + + "\u60Bc\u9B8E\uA30c\u7B35\u1dE0\ud7c2\u2E4B\uFE57" + + "\u1577\u37E5\u9FF0\u4AdA\u58c9\u290A\uB1A0\u6B85" + + "\uBd5d\u10F4\ucB3E\u0567\uE427\u418B\uA77d\u95d8" + + "\uFBEE\u7c66\udd17\u479E\ucA2d\uBF07\uAd5A\u8333" + + "\u6302\uAA71\uc819\u49d9\uF2E3\u5B88\u9A26\u32B0" + + "\uE90F\ud580\uBEcd\u3448\uFF7A\u905F\u2068\u1AAE" + + "\uB454\u9322\u64F1\u7312\u4008\uc3Ec\udBA1\u8d3d" + + "\u9700\ucF2B\u7682\ud61B\uB5AF\u6A50\u45F3\u30EF" + + "\u3F55\uA2EA\u65BA\u2Fc0\udE1c\uFd4d\u9275\u068A" + + "\uB2E6\u0E1F\u62d4\uA896\uF9c5\u2559\u8472\u394c" + + "\u5E78\u388c\ud1A5\uE261\uB321\u9c1E\u43c7\uFc04" + + "\u5199\u6d0d\uFAdF\u7E24\u3BAB\ucE11\u8F4E\uB7EB" + + "\u3c81\u94F7\uB913\u2cd3\uE76E\uc403\u5644\u7FA9" + + "\u2ABB\uc153\udc0B\u9d6c\u3174\uF646\uAc89\u14E1" + + "\u163A\u6909\u70B6\ud0Ed\ucc42\u98A4\u285c\uF886"; + + private static long[][] C = new long[8][256]; + private static long[] rc = new long[R + 1]; + + static { + for (int x = 0; x < 256; x++) { + char c = sbox.charAt(x/2); + long v1 = ((x & 1) == 0) ? c >>> 8 : c & 0xff; + long v2 = v1 << 1; + if (v2 >= 0x100L) { + v2 ^= 0x11dL; + } + long v4 = v2 << 1; + if (v4 >= 0x100L) { + v4 ^= 0x11dL; + } + long v5 = v4 ^ v1; + long v8 = v4 << 1; + if (v8 >= 0x100L) { + v8 ^= 0x11dL; + } + long v9 = v8 ^ v1; + /* + * build the circulant table C[0][x] = S[x].[1, 1, 4, 1, 8, 5, 2, 9]: + */ + C[0][x] = + (v1 << 56) | (v1 << 48) | (v4 << 40) | (v1 << 32) | + (v8 << 24) | (v5 << 16) | (v2 << 8) | (v9 ); + /* + * build the remaining circulant tables C[t][x] = C[0][x] rotr t + */ + for (int t = 1; t < 8; t++) { + C[t][x] = (C[t - 1][x] >>> 8) | ((C[t - 1][x] << 56)); + } + } + + /* + * build the round constants: + */ + rc[0] = 0L; /* not used (assigment kept only to properly initialize all variables) */ + for (int r = 1; r <= R; r++) { + int i = 8*(r - 1); + rc[r] = + (C[0][i ] & 0xff00000000000000L) ^ + (C[1][i + 1] & 0x00ff000000000000L) ^ + (C[2][i + 2] & 0x0000ff0000000000L) ^ + (C[3][i + 3] & 0x000000ff00000000L) ^ + (C[4][i + 4] & 0x00000000ff000000L) ^ + (C[5][i + 5] & 0x0000000000ff0000L) ^ + (C[6][i + 6] & 0x000000000000ff00L) ^ + (C[7][i + 7] & 0x00000000000000ffL); + } + } + + public static byte[] getHash(byte[] data, int off, int len) { + byte source[]; + if(off <= 0) { + source = data; + } else { + source = new byte[len]; + for(int i = 0; i < len; i++) + source[i] = data[off + i]; + } + Whirlpool whirlpool = new Whirlpool(); + whirlpool.NESSIEinit(); + whirlpool.NESSIEadd(source, len * 8); + byte digest[] = new byte[64]; + whirlpool.NESSIEfinalize(digest); + return digest; + } + + /** + * Global number of hashed bits (256-bit counter). + */ + protected byte[] bitLength = new byte[32]; + + /** + * Buffer of data to hash. + */ + protected byte[] buffer = new byte[64]; + + /** + * Current number of bits on the buffer. + */ + protected int bufferBits = 0; + + /** + * Current (possibly incomplete) byte slot on the buffer. + */ + protected int bufferPos = 0; + + /** + * The hashing state. + */ + protected long[] hash = new long[8]; + protected long[] K = new long[8]; // the round key + protected long[] L = new long[8]; + protected long[] block = new long[8]; // mu(buffer) + protected long[] state = new long[8]; // the cipher state + + public Whirlpool() { + } + + /** + * The core Whirlpool transform. + */ + protected void processBuffer() { + /* + * map the buffer to a block: + */ + for (int i = 0, j = 0; i < 8; i++, j += 8) { + block[i] = + (((long)buffer[j ] ) << 56) ^ + (((long)buffer[j + 1] & 0xffL) << 48) ^ + (((long)buffer[j + 2] & 0xffL) << 40) ^ + (((long)buffer[j + 3] & 0xffL) << 32) ^ + (((long)buffer[j + 4] & 0xffL) << 24) ^ + (((long)buffer[j + 5] & 0xffL) << 16) ^ + (((long)buffer[j + 6] & 0xffL) << 8) ^ + (((long)buffer[j + 7] & 0xffL) ); + } + /* + * compute and apply K^0 to the cipher state: + */ + for (int i = 0; i < 8; i++) { + state[i] = block[i] ^ (K[i] = hash[i]); + } + /* + * iterate over all rounds: + */ + for (int r = 1; r <= R; r++) { + /* + * compute K^r from K^{r-1}: + */ + for (int i = 0; i < 8; i++) { + L[i] = 0L; + for (int t = 0, s = 56; t < 8; t++, s -= 8) { + L[i] ^= C[t][(int)(K[(i - t) & 7] >>> s) & 0xff]; + } + } + for (int i = 0; i < 8; i++) { + K[i] = L[i]; + } + K[0] ^= rc[r]; + /* + * apply the r-th round transformation: + */ + for (int i = 0; i < 8; i++) { + L[i] = K[i]; + for (int t = 0, s = 56; t < 8; t++, s -= 8) { + L[i] ^= C[t][(int)(state[(i - t) & 7] >>> s) & 0xff]; + } + } + for (int i = 0; i < 8; i++) { + state[i] = L[i]; + } + } + /* + * apply the Miyaguchi-Preneel compression function: + */ + for (int i = 0; i < 8; i++) { + hash[i] ^= state[i] ^ block[i]; + } + } + + /** + * Initialize the hashing state. + */ + public void NESSIEinit() { + Arrays.fill(bitLength, (byte)0); + bufferBits = bufferPos = 0; + buffer[0] = 0; // it's only necessary to cleanup buffer[bufferPos]. + Arrays.fill(hash, 0L); // initial value + } + + /** + * Delivers input data to the hashing algorithm. + * + * @param source plaintext data to hash. + * @param sourceBits how many bits of plaintext to process. + * + * This method maintains the invariant: bufferBits < 512 + */ + public void NESSIEadd(byte[] source, long sourceBits) { + /* + sourcePos + | + +-------+-------+------- + ||||||||||||||||||||| source + +-------+-------+------- + +-------+-------+-------+-------+-------+------- + |||||||||||||||||||||| buffer + +-------+-------+-------+-------+-------+------- + | + bufferPos + */ + int sourcePos = 0; // index of leftmost source byte containing data (1 to 8 bits). + int sourceGap = (8 - ((int)sourceBits & 7)) & 7; // space on source[sourcePos]. + int bufferRem = bufferBits & 7; // occupied bits on buffer[bufferPos]. + int b; + // tally the length of the added data: + long value = sourceBits; + for (int i = 31, carry = 0; i >= 0; i--) { + carry += (bitLength[i] & 0xff) + ((int)value & 0xff); + bitLength[i] = (byte)carry; + carry >>>= 8; + value >>>= 8; + } + // process data in chunks of 8 bits: + while (sourceBits > 8) { // at least source[sourcePos] and source[sourcePos+1] contain data. + // take a byte from the source: + b = ((source[sourcePos] << sourceGap) & 0xff) | + ((source[sourcePos + 1] & 0xff) >>> (8 - sourceGap)); + if (b < 0 || b >= 256) { + throw new RuntimeException("LOGIC ERROR"); + } + // process this byte: + buffer[bufferPos++] |= b >>> bufferRem; + bufferBits += 8 - bufferRem; // bufferBits = 8*bufferPos; + if (bufferBits == 512) { + // process data block: + processBuffer(); + // reset buffer: + bufferBits = bufferPos = 0; + } + buffer[bufferPos] = (byte)((b << (8 - bufferRem)) & 0xff); + bufferBits += bufferRem; + // proceed to remaining data: + sourceBits -= 8; + sourcePos++; + } + // now 0 <= sourceBits <= 8; + // furthermore, all data (if any is left) is in source[sourcePos]. + if (sourceBits > 0) { + b = (source[sourcePos] << sourceGap) & 0xff; // bits are left-justified on b. + // process the remaining bits: + buffer[bufferPos] |= b >>> bufferRem; + } else { + b = 0; + } + if (bufferRem + sourceBits < 8) { + // all remaining data fits on buffer[bufferPos], and there still remains some space. + bufferBits += sourceBits; + } else { + // buffer[bufferPos] is full: + bufferPos++; + bufferBits += 8 - bufferRem; // bufferBits = 8*bufferPos; + sourceBits -= 8 - bufferRem; + // now 0 <= sourceBits < 8; furthermore, all data is in source[sourcePos]. + if (bufferBits == 512) { + // process data block: + processBuffer(); + // reset buffer: + bufferBits = bufferPos = 0; + } + buffer[bufferPos] = (byte)((b << (8 - bufferRem)) & 0xff); + bufferBits += (int)sourceBits; + } + } + + /** + * Get the hash value from the hashing state. + * + * This method uses the invariant: bufferBits < 512 + */ + public void NESSIEfinalize(byte[] digest) { + // append a '1'-bit: + buffer[bufferPos] |= 0x80 >>> (bufferBits & 7); + bufferPos++; // all remaining bits on the current byte are set to zero. + // pad with zero bits to complete 512N + 256 bits: + if (bufferPos > 32) { + while (bufferPos < 64) { + buffer[bufferPos++] = 0; + } + // process data block: + processBuffer(); + // reset buffer: + bufferPos = 0; + } + while (bufferPos < 32) { + buffer[bufferPos++] = 0; + } + // append bit length of hashed data: + System.arraycopy(bitLength, 0, buffer, 32, 32); + // process data block: + processBuffer(); + // return the completed message digest: + for (int i = 0, j = 0; i < 8; i++, j += 8) { + long h = hash[i]; + digest[j ] = (byte)(h >>> 56); + digest[j + 1] = (byte)(h >>> 48); + digest[j + 2] = (byte)(h >>> 40); + digest[j + 3] = (byte)(h >>> 32); + digest[j + 4] = (byte)(h >>> 24); + digest[j + 5] = (byte)(h >>> 16); + digest[j + 6] = (byte)(h >>> 8); + digest[j + 7] = (byte)(h ); + } + } + + /** + * Delivers string input data to the hashing algorithm. + * + * @param source plaintext data to hash (ASCII text string). + * + * This method maintains the invariant: bufferBits < 512 + */ + public void NESSIEadd(String source) { + if (source.length() > 0) { + byte[] data = new byte[source.length()]; + for (int i = 0; i < source.length(); i++) { + data[i] = (byte)source.charAt(i); + } + NESSIEadd(data, 8 * data.length); + } + } + +} diff --git a/Tools/Cache Editor/src/com/alex/utils/ByteBufferUtils.java b/Tools/Cache Editor/src/com/alex/utils/ByteBufferUtils.java new file mode 100644 index 000000000..e04aa5797 --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/utils/ByteBufferUtils.java @@ -0,0 +1,153 @@ +package com.alex.utils; + +import java.nio.ByteBuffer; + + +/** + * Holds utility methods for reading/writing a byte buffer. + * @author Emperor + * + */ +public final class ByteBufferUtils { + + /** + * Gets a string from the byte buffer. + * @param buffer The byte buffer. + * @return The string. + */ + public static String getString(ByteBuffer buffer) { + StringBuilder sb = new StringBuilder(); + byte b; + while ((b = buffer.get()) != 0) { + sb.append((char) b); + } + return sb.toString(); + } + + /** + * Puts a string on the byte buffer. + * @param s The string to put. + * @param buffer The byte buffer. + */ + public static void putString(String s, ByteBuffer buffer) { + buffer.put(s.getBytes()).put((byte) 0); + } + + /** + * Gets a string from the byte buffer. + * @param s The string. + * @param buffer The byte buffer. + * @return The string. + */ + public static ByteBuffer putGJ2String(String s, ByteBuffer buffer) { + byte[] packed = new byte[256]; + int length = packGJString2(0, packed, s); + return buffer.put((byte) 0).put(packed, 0, length).put((byte) 0); + } + + /** + * Decodes the XTEA encryption. + * @param keys The keys. + * @param start The start index. + * @param end The end index. + * @param buffer The byte buffer. + */ + public static void decodeXTEA(int[] keys, int start, int end, ByteBuffer buffer) { + int l = buffer.position(); + buffer.position(start); + int length = (end - start) / 8; + for (int i = 0; i < length; i++) { + int firstInt = buffer.getInt(); + int secondInt = buffer.getInt(); + int sum = 0xc6ef3720; + int delta = 0x9e3779b9; + for (int j = 32; j-- > 0;) { + secondInt -= keys[(sum & 0x1c84) >>> 11] + sum ^ (firstInt >>> 5 ^ firstInt << 4) + firstInt; + sum -= delta; + firstInt -= (secondInt >>> 5 ^ secondInt << 4) + secondInt ^ keys[sum & 3] + sum; + } + buffer.position(buffer.position() - 8); + buffer.putInt(firstInt); + buffer.putInt(secondInt); + } + buffer.position(l); + } + + /** + * Converts a String to an Integer? + * + * @param position + * The position. + * @param buffer + * The buffer used. + * @param string + * The String to convert. + * @return The Integer. + */ + public static int packGJString2(int position, byte[] buffer, String string) { + int length = string.length(); + int offset = position; + for (int i = 0; length > i; i++) { + int character = string.charAt(i); + if (character > 127) { + if (character > 2047) { + buffer[offset++] = (byte) ((character | 919275) >> 12); + buffer[offset++] = (byte) (128 | ((character >> 6) & 63)); + buffer[offset++] = (byte) (128 | (character & 63)); + } else { + buffer[offset++] = (byte) ((character | 12309) >> 6); + buffer[offset++] = (byte) (128 | (character & 63)); + } + } else + buffer[offset++] = (byte) character; + } + return offset - position; + } + + /** + * Gets a tri-byte from the buffer. + * @param buffer The buffer. + * @return The value. + */ + public static int getTriByte(ByteBuffer buffer) { + return ((buffer.get() & 0xFF) << 16) + ((buffer.get() & 0xFF) << 8) + (buffer.get() & 0xFF); + } + + /** + * Gets a smart from the buffer. + * @param buffer The buffer. + * @return The value. + */ + public static int getSmart(ByteBuffer buffer) { + int peek = buffer.get() & 0xFF; + if (peek <= Byte.MAX_VALUE) { + return peek; + } + return ((peek << 8) | (buffer.get() & 0xFF)) - 32768; + } + + /** + * Gets a smart from the buffer. + * @param buffer The buffer. + * @return The value. + */ + public static int getBigSmart(ByteBuffer buffer) { + int value = 0; + int current = getSmart(buffer); + while (current == 32767) { + current = getSmart(buffer); + value += 32767; + } + value += current; + return value; + } + + /** + * Constructs a new {@code ByteBufferUtils} {@code Object}. + */ + private ByteBufferUtils() { + /* + * empty. + */ + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/com/alex/utils/Constants.java b/Tools/Cache Editor/src/com/alex/utils/Constants.java new file mode 100644 index 000000000..2c5558e2a --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/utils/Constants.java @@ -0,0 +1,26 @@ +package com.alex.utils; + +public final class Constants { + + public static final int NO_COMPRESSION = 0; + public static final int BZIP2_COMPRESSION = 1; + public static final int GZIP_COMPRESSION = 2; + + public static final int MAX_VALID_ARCHIVE_LENGTH = 1000000; + + public static final int INTERFACE_DEFINITIONS_INDEX = 3; + public static final int MAPS_INDEX = 5; + public static final int MODELS_INDEX = 7; + public static final int SPRITES_INDEX = 8; + public static final int INDEXED_IMAGES_INDEX = 8; + public static final int OBJECTS_DEFINITIONS_INDEX = 18; + public static final int ITEM_DEFINITIONS_INDEX = 19; + public static final int LOADER_IMAGES_INDEX = 32; + public static final int LOADER_INDEXED_IMAGES_INDEX = 34; + public static final int CLIENT_BUILD = 718; + public static final boolean ENCRYPTED_CACHE = true; + + private Constants() { + + } +} diff --git a/Tools/Cache Editor/src/com/alex/utils/Utils.java b/Tools/Cache Editor/src/com/alex/utils/Utils.java new file mode 100644 index 000000000..afa5cf9ad --- /dev/null +++ b/Tools/Cache Editor/src/com/alex/utils/Utils.java @@ -0,0 +1,62 @@ +package com.alex.utils; + +import java.math.BigInteger; + +import com.alex.io.OutputStream; +import com.alex.store.Store; + +public final class Utils { + + public static byte[] cryptRSA(byte[] data, BigInteger exponent, BigInteger modulus) { + return new BigInteger(data).modPow(exponent, modulus).toByteArray(); + } + + public static byte[] getArchivePacketData(int indexId, int archiveId, + byte[] archive) { + OutputStream stream = new OutputStream(archive.length + 4); + stream.writeByte(indexId); + stream.writeShort(archiveId); + stream.writeByte(0); // priority, no compression + stream.writeInt(archive.length); + int offset = 8; + for (int index = 0; index < archive.length; index++) { + if (offset == 512) { + stream.writeByte(-1); + offset = 1; + } + stream.writeByte(archive[index]); + offset++; + } + byte[] packet = new byte[stream.getOffset()]; + stream.setOffset(0); + stream.getBytes(packet, 0, packet.length); + return packet; + } + + public static int getNameHash(String name) { + return name.toLowerCase().hashCode(); + } + + public static final int getInterfaceDefinitionsSize(Store store) { + return store.getIndexes()[3].getLastArchiveId(); + } + + public static final int getInterfaceDefinitionsComponentsSize(Store store, + int interfaceId) { + return store.getIndexes()[3].getLastFileId(interfaceId); + } + + public static final int getItemDefinitionsSize(Store store) { + int lastArchiveId = store.getIndexes()[19].getLastArchiveId(); + return lastArchiveId * 256 + + store.getIndexes()[19].getValidFilesCount(lastArchiveId); + } + + + + + private Utils() { + + } + +} diff --git a/Tools/Cache Editor/src/emperor/DefDumper.java b/Tools/Cache Editor/src/emperor/DefDumper.java new file mode 100644 index 000000000..158865588 --- /dev/null +++ b/Tools/Cache Editor/src/emperor/DefDumper.java @@ -0,0 +1,27 @@ +package emperor; + +import java.io.BufferedWriter; +import java.io.FileWriter; +import java.util.Arrays; + +import alex.cache.loaders.ObjectDefinitions; + +import com.alex.store.Store; + +public class DefDumper { + + public static void main(String...args) throws Throwable { + Store store = new Store("./508/"); + BufferedWriter bw = new BufferedWriter(new FileWriter("./508_object_list.txt")); + for (int i = 0; i < 100_000; i++) { + ObjectDefinitions def = ObjectDefinitions.initialize(i, store); + if (def == null) { + continue; + } + bw.append("definition [id=" + i + ", options=" + Arrays.toString(def.options) + "]"); + bw.newLine(); + } + bw.flush(); + bw.close(); + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/DonatorIconPacker.java b/Tools/Cache Editor/src/emperor/DonatorIconPacker.java new file mode 100644 index 000000000..bf9744778 --- /dev/null +++ b/Tools/Cache Editor/src/emperor/DonatorIconPacker.java @@ -0,0 +1,159 @@ +package emperor; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; + +import javax.imageio.ImageIO; + +import com.alex.loaders.images.IndexedColorImageFile; +import com.alex.store.Store; + +/** + * Handles the donator icon packing. + * @author Vexia + * + */ +public final class DonatorIconPacker { + + /** + * The icons to pack. + */ + private static String[] ICONS = new String[] {"green", "red", "yellow", "blue", "orange", "pink", "purple", "brown", "world_announce", "rainbow", "whip_icon"}; + + /** + * The path. + */ + private static final String PATH = "./icons"; + + /** + * The icon dump. + */ + private static final String DUMP_PATH = "./icon_dump"; + + /** + * The archive id. + */ + private static final int ACRHIVE_ID = 815; + + /** + * The starting index. + */ + private static final int START_INDEX = 2; + + /** + * The index color image file. + */ + private static IndexedColorImageFile colorFile; + + /** + * The store to work with. + */ + private static Store store; + + /** + * Runs the donator icon packer. + * @param args the arguments. + * @throws IOException the exception. + */ + public static void main(String...args) throws IOException { + setStore(new Store("./498/")); + colorFile = new IndexedColorImageFile(store, ACRHIVE_ID, 0); + //colorFile.replaceImage(ImageIO.read(new File("logo.png")), 0); + colorFile.addImage(ImageIO.read(new File("nazi.png"))); + //colorFile.delete(1); + //packAll(); + dump(); + save(); + } + + /** + * Packs all the icons. + * @throws IOException the exception. + */ + public static void packAll() throws IOException { + for (int i = 0; i < ICONS.length; i++) { + pack(i, getImage(ICONS[i])); + } + } + + /** + * Packs an image to the cache. + * @param index the index. + * @param image the image. + */ + public static void pack(int index, BufferedImage image) { + if (image == null) { + System.out.println("Image null at " + index + "!"); + return; + } + String name = ICONS[index]; + int realIndex = START_INDEX + index; + int indexPacked = 0; + boolean replace = false; + if (realIndex < colorFile.getImages().length) { + colorFile.replaceImage(image, realIndex); + replace = true; + } else { + indexPacked = colorFile.addImage(image); + } + save(); + System.out.println("Packing icon with name - " + name + ", chat index=" + realIndex + ", indexPacked=" + indexPacked + ", replace=" + replace + "!"); + } + + /** + * Dumps the icon + * @throws IOException the exception. + */ + public static void dump() throws IOException { + dumpIcons(DUMP_PATH); + } + + /** + * Dumps the icons to a path. + * @param path the path. + * @throws IOException the exception. + */ + public static void dumpIcons(String path) throws IOException { + int index = 0; + System.out.println("Size=" + colorFile.getImages().length); + for (BufferedImage image : colorFile.getImages()) { + String name = path + "/icon-" + index++ + ".png"; + ImageIO.write(image, "PNG", new File(name)); + System.out.println("Dumping icon - " + name); + } + } + + /** + * Saves the index. + */ + public static void save() { + store.getIndexes()[8].putFile(ACRHIVE_ID, 0, colorFile.encodeFile()); + } + + /** + * Gets a buffered image. + * @param name the name. + * @return the image. + * @throws IOException the exception. + */ + public static BufferedImage getImage(String name) throws IOException { + return ImageIO.read(new File(PATH + "/" + name + ".png")); + } + + /** + * Gets the store. + * @return the store + */ + public static Store getStore() { + return store; + } + + /** + * Sets the store. + * @param store the store to set + */ + public static void setStore(Store store) { + DonatorIconPacker.store = store; + } +} diff --git a/Tools/Cache Editor/src/emperor/ItemPacker.java b/Tools/Cache Editor/src/emperor/ItemPacker.java new file mode 100644 index 000000000..bebe23ac7 --- /dev/null +++ b/Tools/Cache Editor/src/emperor/ItemPacker.java @@ -0,0 +1,139 @@ +package emperor; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import com.alex.loaders.items.ItemDefinitions; +import com.alex.store.Index; +import com.alex.store.Store; + +/** + * Packs items. + * @author Vexia + * + */ +public class ItemPacker { + + /** + * The store to pack to. + */ + private static Store store; + + /** + * The main method. + * @param args the arguments. + * @throws IOException the exception. + */ + public static void main(String...args) throws IOException { + store = new Store("./498/"); + String modelName = "models/44590.dat"; + packItem(modelName, "Dragon claws"); + } + + /** + * Gets the size. + * @return the size. + */ + public static int getSize() { + Index index = store.getIndexes()[19]; + int lastId = index.getLastArchiveId(); + int fileSize = index.getFile(lastId).length; + System.err.println(fileSize); + System.err.println(index.getValidFilesCount(lastId)); + int size = lastId * 256 + fileSize; + return size;//13247, 51, 191 + + } + + /** + * Packs an item. + * @param modelName the model name. + * @param itemName the name. + * @throws IOException the exception. + */ + public static void packItem(String modelName, String itemName) throws IOException { + ItemDefinitions def = buildItem(modelName, itemName); + System.out.println("Attempting to pack the model - " + modelName + ", for item name - " + itemName); + packCustomItem(def); + System.out.println("Item packed."); + } + + /** + * Packs a custom model. + * @param data the data. + * @return the model. + */ + public static int packCustomModel(byte[] data) { + int archiveId = store.getIndexes()[19].getLastArchiveId()+1; + if(store.getIndexes()[19].putFile(archiveId, 0, data)) { + return archiveId; + } + return -1; + } + + /** + * Builds an item. + * @param modelName the name. + * @param itemName the item name. + * @return the def. + * @throws IOException + */ + public static ItemDefinitions buildItem(String modelName, String itemName) throws IOException { + int modelId = packCustomModel(getBytesFromFile(new File(modelName))); + ItemDefinitions definition = ItemDefinitions.getItemDefinition(store, 3101); + definition.setName(itemName); + definition.femaleEquipModelId1 = modelId; + definition.maleEquipModelId1 = modelId; + definition.invModelId = modelId; + return definition; + } + + /** + * Packs the custom item. + * @param cache the cache. + * @param id the id. + * @param def the def. + */ + public static void packCustomItem(ItemDefinitions def) { + int id = 13248; + store.getIndexes()[19].putFile(id >>> 8, 0xff & id, def.encode()); + } + + /** + * Gets all the bytes from the file. + * @param file the file. + * @return the bytes. + * @throws IOException the exception. + */ + @SuppressWarnings("resource") + public static byte[] getBytesFromFile(File file) throws IOException { + InputStream is = new FileInputStream(file); + // Get the size of the file + long length = file.length(); + // You cannot create an array using a long type. + // It needs to be an int type. + // Before converting to an int type, check + // to ensure that file is not larger than Integer.MAX_VALUE. + if (length > Integer.MAX_VALUE) { + // File is too large + } + // Create the byte array to hold the data + byte[] bytes = new byte[(int)length]; + // Read in the bytes + int offset = 0; + int numRead = 0; + while (offset < bytes.length + && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { + offset += numRead; + } + // Ensure all the bytes have been read in + if (offset < bytes.length) { + throw new IOException("Could not completely read file "+file.getName()); + } + // Close the input stream and return bytes + is.close(); + return bytes; + } +} diff --git a/Tools/Cache Editor/src/emperor/LandMap.java b/Tools/Cache Editor/src/emperor/LandMap.java new file mode 100644 index 000000000..9bfaa3c0c --- /dev/null +++ b/Tools/Cache Editor/src/emperor/LandMap.java @@ -0,0 +1,83 @@ +package emperor; + +import java.nio.ByteBuffer; + +public class LandMap { + + ByteBuffer buffer; + Byte[][][] overlayOpcodes = new Byte[4][64][64]; + Byte[][][] overlays = new Byte[4][64][64]; + Byte[][][] underlays = new Byte[4][64][64]; + Byte[][][] defaultOpcodes = new Byte[4][64][64]; + Byte[][][] height = new Byte[4][64][64]; + + public void addOverlay(int z, int x, int y, int overlay) { + overlays[z][x][y] = (byte) overlay; + } + public void addUnderlay(int z, int x, int y, int underlay) { + underlays[z][x][y] = (byte) underlay; + } + + public byte[] generate() { + ByteBuffer buffer = ByteBuffer.allocate(1 << 20); + for (int z = 0; z < 4; z++) { + for (int x = 0; x < 64; x++) { + for (int y = 0; y < 64; y++) { + Byte b = null; + if ((b = defaultOpcodes[z][x][y]) != null) { + buffer.put(b); + } + if ((b = underlays[z][x][y]) != null) { + buffer.put(b); + } + if ((b = overlayOpcodes[z][x][y]) != null) { + buffer.put(b); + buffer.put(overlays[z][x][y]); + } + if ((b = height[z][x][y]) != null) { + buffer.put((byte) 1); + buffer.put(b); + } else { + buffer.put((byte) 0); + } + } + } + } + while (this.buffer.hasRemaining()) { + buffer.put(this.buffer.get()); + } + buffer.flip(); + byte[] bs = new byte[buffer.remaining()]; + buffer.get(bs); + return bs; + } + + public void map(ByteBuffer buffer) { + this.buffer = buffer; + for (int z = 0; z < 4; z++) { + for (int x = 0; x < 64; x++) { + for (int y = 0; y < 64; y++) { + while (true) { + int opcode = buffer.get() & 0xFF; + if (opcode == 0) { + break; + } + if (opcode == 1) { + height[z][x][y] = buffer.get(); + break; + } + if (opcode <= 49) { + overlayOpcodes[z][x][y] = (byte) opcode; + overlays[z][x][y] = buffer.get(); + } else if (opcode <= 81) { + underlays[z][x][y] = (byte) opcode; + } else { + defaultOpcodes[z][x][y] = (byte) opcode; + } + } + } + } + } + System.out.println("Read landscape (remaining=" + buffer.remaining() + ")."); + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/Landscape.java b/Tools/Cache Editor/src/emperor/Landscape.java new file mode 100644 index 000000000..68e86bece --- /dev/null +++ b/Tools/Cache Editor/src/emperor/Landscape.java @@ -0,0 +1,15 @@ +package emperor; + +public class Landscape { + + byte[][][] flags = new byte[4][64][64]; + byte[][][] overlays = new byte[4][64][64]; + byte[][][] underlays = new byte[4][64][64]; + + public void addOverlay(int z, int x, int y, int overlay) { + overlays[z][x][y] = (byte) overlay; + } + public void addUnderlay(int z, int x, int y, int underlay) { + underlays[z][x][y] = (byte) underlay; + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/LandscapeCache.java b/Tools/Cache Editor/src/emperor/LandscapeCache.java new file mode 100644 index 000000000..1d8ebafd8 --- /dev/null +++ b/Tools/Cache Editor/src/emperor/LandscapeCache.java @@ -0,0 +1,243 @@ +package emperor; + +import java.io.File; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.MappedByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.channels.FileChannel.MapMode; +import java.util.HashMap; +import java.util.Map; + +import com.alex.store.Store; +import com.alex.util.gzip.GZipCompressor; +import com.alex.util.gzip.GZipDecompressor; + +/** + * Holds the map cache. + * + * @author Emperor + * + */ +public final class LandscapeCache { + + /** + * The map indices buffer. + */ + private static ByteBuffer mapIndices; + + /** + * The landscapes; + */ + private static final Map landscapes = new HashMap<>(); + + /** + * The amount of indexes. + */ + private static int indexes; + + /** + * The cache length. + */ + private static int cacheLength; + + /** + * The indexes list. + */ + private static int[] indices = null; + + /** + * The path. + */ + private static String path; + + /** + * The file store. + */ + private static Store store; + + /** + * Initializes the landscape cache stuff. + * + * @param path + * The cache path. + * @throws Throwable + * When an exception occurs. + */ + public static void init(String path, Store store) throws Throwable { + LandscapeCache.path = path; + LandscapeCache.store = store; + try { + RandomAccessFile raf = new RandomAccessFile(path + "/idx_reference.dat", "r"); + FileChannel channel = raf.getChannel(); + mapIndices = channel.map(MapMode.READ_ONLY, 0, channel.size()); + raf.close(); + channel.close(); + } catch (Throwable t) { + t.printStackTrace(); + } + cacheLength = (int) new File(path + "/map_cache_file.idx0").length(); + ByteBuffer buffer = mapIndices.duplicate(); + indexes = buffer.getShort() & 0xFFFF; + indices = new int[indexes]; + for (int i = 0; i < indexes; i++) { + indices[i] = buffer.getInt(); + } + int count = 0; + for (int i = 0; i < indexes; i++) { + byte[] b = forId(i); + if (b != null && b.length > 0) { + landscapes.put(i, b); + count++; + } + } + System.out.println("Succesfully loaded " + count + "/" + indexes + " regions!"); + } + + /** + * Gets the landscape byte buffer. + * + * @param regionId + * The region id. + * @return The landscape buffer. + */ + public static byte[] getLandscape(int regionId) { + int index = LandscapeCache.indexFor(regionId); + return forId(index); + } + + /** + * Gets the maps for the given id. + * + * @param id + * The id. + * @return The map data. + */ + public static byte[] forId(int id) { + if (id < 0) { + return new byte[0]; + } + try { + RandomAccessFile raf = new RandomAccessFile(path + "/map_cache_file.idx0", "r"); + FileChannel channel = raf.getChannel(); + int size = (int) ((id >= indexes - 1 ? channel.size() : indices[id + 1]) - indices[id]); + if (size < 3) { + raf.close(); + channel.close(); + // System.out.println("Index " + id + " has invalid size!"); + channel.close(); + return new byte[0]; + } + //System.out.println("Size: " + size + "/" + channel.size() + ", index: " + indices[id]); + MappedByteBuffer buffer = channel.map(MapMode.READ_ONLY, indices[id], size); + raf.close(); + channel.close(); + int length = size - 2; + if (length < 1) { + return new byte[0]; + } + int decompressedLength = buffer.getShort() & 0xFFFF; + byte[] b = new byte[length]; + buffer.get(b); + byte[] data = new byte[decompressedLength]; + try { + GZipDecompressor.decompress(data, b, 0, b.length); + } catch (Throwable t) { + System.err.println("Failed to decompress idx " + id + "!"); + return new byte[0]; + } + return data; + } catch (IOException e) { + e.printStackTrace(); + } + return new byte[0]; + } + + public static void dump(String path) throws Throwable { + indices = new int[indexes]; + int offset = 0; + ByteBuffer mapCache = ByteBuffer.allocate(10_000_000); + for (int i = 0; i < indexes; i++) { + indices[i] = offset; + byte[] bs = landscapes.get(i); + if (bs != null && bs.length > 1) { + mapCache.putShort((short) bs.length); + byte[] b = GZipCompressor.compress(bs); + mapCache.put(b); + offset += 2 + b.length; + } + } + mapCache.flip(); + File f = new File(path + "/map_cache_file.idx0"); + if (f.exists()) { + if (!f.delete()) { + System.err.println("Could not delete #1!"); + } + } + RandomAccessFile raf = new RandomAccessFile(f, "rw"); + FileChannel channel = raf.getChannel(); + channel.write(mapCache); + raf.close(); + channel.close(); + ByteBuffer buffer = ByteBuffer.allocate(100_000); + buffer.putShort((short) indexes); + for (int i = 0; i < indexes; i++) { + buffer.putInt(indices[i]); + } + buffer.flip(); + f = new File(path + "/idx_reference.dat"); + if (f.exists()) { + if (!f.delete()) { + System.err.println("Could not delete #2!"); + f = new File(path + "/conflict-idx_reference.dat"); + } + } + raf = new RandomAccessFile(f, "rw"); + channel = raf.getChannel(); + channel.write(buffer); + raf.close(); + channel.close(); + } + + /** + * Gets the index for the region id. + * + * @param regionId + * The region id. + * @return The index. + */ + public static int indexFor(int regionId) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + return store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + } + + /** + * Gets the reference table buffer. + * + * @return The reference table buffer. + */ + public static ByteBuffer getReferenceTable() { + ByteBuffer buffer = ByteBuffer.allocate(mapIndices.remaining() + 10); + return buffer.put((byte) 251).putInt(LandscapeCache.getMapIndices().remaining()).putInt(cacheLength).put(LandscapeCache.getMapIndices().duplicate()); + } + + /** + * Gets the mapIndices. + * + * @return The mapIndices. + */ + public static ByteBuffer getMapIndices() { + return mapIndices; + } + + /** + * Gets the landscapes mapping. + * @return The mapping. + */ + public static Map getLandscapes() { + return landscapes; + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/LandscapeEditor.java b/Tools/Cache Editor/src/emperor/LandscapeEditor.java new file mode 100644 index 000000000..7491f61ea --- /dev/null +++ b/Tools/Cache Editor/src/emperor/LandscapeEditor.java @@ -0,0 +1,465 @@ +package emperor; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.nio.channels.FileChannel; +import java.nio.channels.FileChannel.MapMode; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.List; + +import org.apollo.fs.IndexedFileSystem; +import org.apollo.fs.util.ZipUtils; + +import alex.cache.loaders.OverlayDefinition; + +import com.alex.io.InputStream; +import com.alex.store.Index; +import com.alex.store.Store; +import com.alex.tools.clientCacheUpdater.RSXteas; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +import emperor.ObjectMap.GameObject; + +/** + * @author Emperor + */ +public class LandscapeEditor { + + public static final boolean COPY_OUT = true; + + public static final void main(String...args) throws Throwable { + if (COPY_OUT) { + for (File f : new File("./mapcache_out/").listFiles()) { + copyFile(f, new File("./mapcache/" + f.getName())); + } + } +// Store store = new Store("./498/"); +// packMaps(store); +// checkNonOceanic(store, new int[] {8240, 8241, 8242, 8243, 8249, 8250, 8251, 8254, 8255, 8256, 8505, 8506, 8507, 8510, 8511, 8512, 8761, 8762, 8764, 8765, 8766, 8767, 8768, 9018, 9019, 9020, 9021, 9022, 9023, 9024, 9262, 9274, 9277, 9278, 9279, 9280, 9363, 9518, 9529, 9530, 9533, 9534, 9535, 9536, 9539, 9618, 9784, 9785, 9786, 9787, 9788, 9789, 9790, 9791, 9792, 10023, 10024, 10025, 10026, 10027, 10041, 10045, 10046, 10047, 10048, 10279, 10280, 10281, 10282, 10283, 10298, 10299, 10302, 10303, 10304, 10535, 10538, 10539, 10541, 10543, 10555, 10556, 10557, 10560, 10568, 10570, 10791, 10792, 10796, 10797, 10798, 10799, 10800, 10813, 10815, 10816, 10824, 10825, 10826, 11047, 11048, 11049, 11052, 11069, 11070, 11071, 11072, 11080, 11082, 11303, 11305, 11307, 11308, 11326, 11327, 11328, 11559, 11560, 11561, 11563, 11564, 11582, 11583, 11584, 11815, 11816, 11817, 11818, 11819, 11820, 11838, 11839, 11840, 12071, 12072, 12073, 12074, 12075, 12076, 12077, 12094, 12095, 12096, 12333, 12334, 12350, 12351, 12352, 12606, 12607, 12608, 12862, 12863, 12864, 13118, 13119, 13120, 13374, 13375, 13376, 13466, 13610, 13628, 13629, 13630, 13631, 13632, 13866, 13867, 13868, 14128, 14136, 14379, 14380, 14381, 14382, 14383, 14384, 14392, 14635, 14636, 14640, 14891, 14892, 14896, 14903, 14904, 15147, 15152, 15158, 15160, 15403, 15404, 15405, 15407, 15408, 15414, 15415, 15416});//new int[] {6731, 6985, 8022, 8240, 8241, 8242, 8243, 8249, 8250, 8251, 8254, 8255, 8256, 8280, 8505, 8506, 8507, 8510, 8511, 8512, 8513, 8515, 8761, 8762, 8764, 8765, 8766, 8767, 8768, 9018, 9019, 9020, 9021, 9022, 9023, 9024, 9262, 9274, 9277, 9278, 9279, 9280, 9363, 9518, 9529, 9530, 9533, 9534, 9535, 9536, 9539, 9618, 9784, 9785, 9786, 9787, 9788, 9789, 9790, 9791, 9792, 10023, 10024, 10025, 10026, 10027, 10041, 10045, 10046, 10047, 10048, 10129, 10279, 10280, 10281, 10282, 10283, 10298, 10299, 10302, 10303, 10304, 10308, 10535, 10538, 10539, 10541, 10543, 10555, 10556, 10557, 10560, 10568, 10570, 10583, 10791, 10792, 10796, 10797, 10798, 10799, 10800, 10813, 10815, 10816, 10824, 10825, 10826, 11047, 11048, 11049, 11052, 11069, 11070, 11071, 11072, 11080, 11082, 11303, 11304, 11305, 11307, 11308, 11326, 11327, 11328, 11559, 11560, 11561, 11563, 11564, 11582, 11583, 11584, 11815, 11816, 11817, 11818, 11819, 11820, 11838, 11839, 11840, 12071, 12072, 12073, 12074, 12075, 12076, 12077, 12094, 12095, 12096, 12333, 12334, 12350, 12351, 12352, 12606, 12607, 12608, 12627, 12862, 12863, 12864, 12889, 12890, 13118, 13119, 13120, 13144, 13145, 13146, 13354, 13374, 13375, 13376, 13400, 13401, 13402, 13466, 13610, 13625, 13626, 13628, 13629, 13630, 13631, 13632, 13866, 13867, 13868, 14128, 14136, 14379, 14380, 14381, 14382, 14383, 14384, 14392, 14635, 14636, 14640, 14648, 14891, 14892, 14896, 14903, 14904, 15147, 15152, 15158, 15160, 15403, 15404, 15405, 15407, 15408, 15414, 15415, 15416}); +// generateCache(store); +// override(store, 788, 12187); +// packOSRSMaps(store); +// packLandscape(store); +// pack377Maps(store); +// addMissingMaps(store); +// createMap(store); +// changeMap(store); + } + + static void packMaps(Store store) throws Throwable { + LandscapeCache.init("./mapcache/", store); + int[] keys = new int[] { 14881828, -6662814, 58238456, 146761213 }; + int count = 0; + int failed = 0; + for (int regionId = 0; regionId < 50_000; regionId++) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + String name = "l" + regionX + "_" + regionY; + int index = store.getIndexes()[5].getArchiveId(name); + if (index < 0) { + continue; + } + byte[] b = LandscapeCache.forId(index); + if (b == null || b.length < 2 || !validRegion(new InputStream(b))) { + failed++; + continue; + } + if (store.getIndexes()[5].putFile(index, 0, Constants.GZIP_COMPRESSION, b, keys, true, true, Utils.getNameHash(name), -1)) { + count++; + } else { + failed++; + } + } +// store.getIndexes()[5].rewriteTable(); +// store.getIndexes()[5].resetCachedFiles(); + System.out.println("Packed " + count + " maps (failed " + failed + " maps)!"); +// store2.getIndexes()[i].putFile(oldArchiveId, 0, Constants.GZIP_COMPRESSION, data, keys2, false, false, Utils.getNameHash(nameHash), -1); + } + + static void createMap(Store store) throws Throwable { + ObjectMap map = new ObjectMap(); + for (int x = 0; x < 64; x++) { + for (int y = 0; y < 64; y++) { + if (x == 32 || y == 32) { + continue; + } + map.add(1276, x, y, 0, 10, 0); + } + } + byte[] bs = map.generate(); + int regionId = 11110; + int x = regionId >> 8 & 0xFF; + int y = regionId & 0xFF; + LandscapeCache.init("./mapcache/", store); + int archive = store.getIndexes()[5].getArchiveId("l" + x + "_" + y); + if (archive > -1) { + System.out.println("Already contained region " + regionId + " (archive=" + archive + ", len=" + bs.length + " - " +LandscapeCache.forId(archive).length + ")!"); + return; + } + for (int ar = 0; ar < 50000; ar++) { + if (!store.getIndexes()[5].archiveExists(ar)) { + if (LandscapeCache.forId(ar).length < 1) { + archive = ar; + System.out.println("Archive available: " + ar); + break; + } + } + } + store.getIndexes()[5].putFile(archive, 0, Constants.GZIP_COMPRESSION, bs, null, true, true, + Utils.getNameHash("l" + x + "_" + y), -1); + LandscapeCache.getLandscapes().put(archive, bs); + LandscapeCache.dump("./mapcache_out/"); + System.out.println("Done!"); + } + + static void changeMap(Store store) throws Throwable { + int regionId = 12439; + GameObject[] remove = new GameObject[] { + new GameObject(32099, 42, 27, 0, 10, 3) + }; + GameObject[] replace = new GameObject[] { + new GameObject(29139, 42, 27, 0, 10, 3) + }; + + LandscapeCache.init("./mapcache/", store); + ObjectMap map = new ObjectMap(); + map.map(new InputStream(LandscapeCache.getLandscape(regionId))); + for (int i = 0; i < remove.length; i++) { + GameObject r = remove[i]; + GameObject object = map.get(r.id, r.loc.x, r.loc.y, r.loc.z, r.type, r.rotation); + if (object == null) { + System.err.println("Could not find object!"); + return; + } + map.getObjects().remove(object); + if (replace[i] != null) { + map.getObjects().add(replace[i]); + } + } + byte[] bs = map.generate(); + LandscapeCache.getLandscapes().put(LandscapeCache.indexFor(regionId), bs); + LandscapeCache.dump("./mapcache_out/"); + } + + /** + * Overrides the regions. + * @param store The file store. + * @param revision The revision to get the regions from. + * @param regionIds The region ids to override. + * @throws Throwable When an exception occurs. + */ + public static void override(Store store, int revision, int...regionIds) throws Throwable { + LandscapeCache.init("./mapcache/", store); + int count = 0; + if (revision == 377) { + Store s = new Store("./468/"); + IndexedFileSystem fs = new IndexedFileSystem(new File("./377/"), true); + for (int regionId : regionIds) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int index = store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + byte[] bs = null; + try { + ByteBuffer buffer = fs.getFile(4, index); + bs = ZipUtils.unzip(buffer).array(); + } catch (Throwable t) { + continue; + } + if (bs != null && validRegion(new InputStream(bs))) { + System.out.println("Added region " + regionId + "!"); + count++; + LandscapeCache.getLandscapes().put(index, bs); + store.getIndexes()[5].putArchive(s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()), s); + } + } + fs.close(); + } else { + RSXteas.loadUnpackedXteas(revision); + Store s = new Store("./" + revision + "/"); + boolean newFormat = revision > 750; + for (int regionId : regionIds) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int index = store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + int[] xteas = RSXteas.getXteas(regionId); + byte[] b = newFormat ? s.getIndexes()[5].getFile(regionX | regionY << 7, 0) + : s.getIndexes()[5].getFile(index, 0, xteas); + if (b != null && b.length > 1 && validRegion(new InputStream(b))) { + System.out.println("Added region " + regionId + "!"); + LandscapeCache.getLandscapes().put(index, b); + count++; + if (!newFormat) { + store.getIndexes()[5].putArchive(s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()), s); + } + } + } + } + LandscapeCache.dump("./mapcache_out/"); + System.out.println("Packed " + count + "/" + regionIds.length + " regions."); + } + + /** + * Fully generates a map cache (from scratch). + * @param store The file store. + * @throws Throwable + */ + public static void generateCache(Store store) throws Throwable { + LandscapeCache.init("./mapcache/", store); + Store s = new Store("./508/"); + List missingRegions = new ArrayList<>(); + System.out.println("Packing 508 maps..."); + RSXteas.loadUnpackedXteas(508); + int count = 0; + for (int regionId = 0; regionId < 50_000; regionId++) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int index = store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + if (index < 0) { + continue; + } + int[] xteas = RSXteas.getXteas(regionId); + byte[] b = s.getIndexes()[5].getFile(index, 0, xteas); + if (b == null || b.length < 2 || !validRegion(new InputStream(b))) { + RandomAccessFile raf = new RandomAccessFile(new File("./508_Maps/" + index), "r"); + ByteBuffer buffer = raf.getChannel().map(MapMode.READ_ONLY, 0, raf.length()); + b = new byte[(int) raf.length()]; + buffer.get(b); + raf.close(); + if (!validRegion(new InputStream(b))) { + missingRegions.add(regionId); + continue; + } + System.out.println("Used 508 map data file for index " + index + "!"); + } + int archiveId = s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()); + if (archiveId > -1) { + store.getIndexes()[5].putArchive(archiveId, s); + } + LandscapeCache.getLandscapes().put(index, b); + count++; + } + System.out.println("Added " + count + " 508 regions!"); + System.out.println("Packing 468 maps..."); + RSXteas.loadUnpackedXteas(468); + s = new Store("./468/"); + int subCount = 0; + for (int regionId = 0; regionId < 50_000; regionId++) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int index = s.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + if (!missingRegions.contains(regionId)) { + continue; + } + int[] xteas = RSXteas.getXteas(regionId); + byte[] b = s.getIndexes()[5].getFile(index, 0, xteas); + if (b != null && b.length > 1 && validRegion(new InputStream(b))) { + System.out.println("Added missing region " + regionId + "!"); + count++; + subCount++; + missingRegions.remove((Object) regionId); + LandscapeCache.getLandscapes().put(index, b); + store.getIndexes()[5].putArchive(s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()), s); + } + } + System.out.println("Added " + subCount + " 468 regions!"); + System.out.println("Packing 377 maps..."); + subCount = 0; + IndexedFileSystem fs = new IndexedFileSystem(new File("./377/"), true); + for (int regionId = 0; regionId < 50_000; regionId++) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int index = store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + if (!missingRegions.contains(regionId)) { + continue; + } + byte[] bs = null; + try { + ByteBuffer buffer = fs.getFile(4, index); + bs = ZipUtils.unzip(buffer).array(); + } catch (Throwable t) { + continue; + } + if (bs != null && validRegion(new InputStream(bs))) { + System.out.println("Added missing region " + regionId + "!"); + count++; + subCount++; + missingRegions.remove((Object) regionId); + LandscapeCache.getLandscapes().put(index, bs); + store.getIndexes()[5].putArchive(s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()), s); + } + } + System.out.println("Added " + subCount + " 377 regions!"); + fs.close(); + System.out.println("Packing 666 maps..."); + RSXteas.loadUnpackedXteas(666); + s = new Store("./666/"); + subCount = 0; + for (int regionId = 0; regionId < 50_000; regionId++) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int index = store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + if (index < 0) { + continue; + } + if (!missingRegions.contains(regionId)) { + continue; + } + int[] xteas = RSXteas.getXteas(regionId); + byte[] b = s.getIndexes()[5].getFile(index, 0, xteas); + if (b != null && b.length > 1 && validRegion(new InputStream(b))) { + System.out.println("Added missing region " + regionId + "!"); + count++; + subCount++; + missingRegions.remove((Object) regionId); + LandscapeCache.getLandscapes().put(index, b); + store.getIndexes()[5].putArchive(s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()), s); + } + } + System.out.println("Added " + subCount + " 666 regions!"); + System.out.println("Packing 788 maps..."); + s = new Store("./788/"); + subCount = 0; + for (int regionId = 0; regionId < 50_000; regionId++) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + if (!missingRegions.contains(regionId) && regionId != 6234) { + continue; + } + int index = regionX | regionY << 7; + byte[] b = s.getIndexes()[5].getFile(index, 0); + if (b != null && b.length > 1 && validRegion(new InputStream(b))) { + index = store.getIndexes()[5].getArchiveId(new StringBuilder("l").append(regionX).append("_").append(regionY).toString()); + System.out.println("Added missing region " + regionId + "!"); + count++; + subCount++; + missingRegions.remove((Object) regionId); + LandscapeCache.getLandscapes().put(index, b); + } + } + System.out.println("Added " + subCount + " 788 regions!"); + LandscapeCache.dump("./mapcache_out/"); + System.out.println("Added a total of " + count + " map regions, missing " + missingRegions.size() + " regions."); + System.out.println("Missing: " + Arrays.toString(missingRegions.toArray())); + System.exit(0); + } + + /** + * Checks for non-oceanic regions (regions that don't exist purely of sea). + * @param store The store. + * @param regions The regions array. + */ + public static void checkNonOceanic(Store store, int[] regions) { + List missing = new ArrayList<>(); + for (int regionId : regions) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + int mapscapeId = store.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()); + if (mapscapeId < 0) { + System.err.println("Invalid mapscape index for region " + regionId + "!"); + continue; + } + boolean abort = false; + ByteBuffer buffer = ByteBuffer.wrap(store.getIndexes()[5].getFile(mapscapeId, 0)); + byte[][][] mapscape = new byte[4][64][64]; + main: for (int z = 0; z < 4; z++) { + for (int x = 0; x < 64; x++) { + for (int y = 0; y < 64; y++) { + while (true) { + int value = buffer.get() & 0xFF; + if (value == 0) { + break; + } + if (value == 1) { + buffer.get(); + break; + } + if (value <= 49) { + int overlay = buffer.get() & 0xFF; + OverlayDefinition def = OverlayDefinition.forId(store, overlay); + if (def != null && def.getTextureId() != 25) { + abort = true; + break main; + } + } else if (value <= 81) { + mapscape[z][x][y] = (byte) (value - 49); + } + } + } + } + } + if (abort) { + missing.add(regionId); + } + } + System.out.println("Missing region count: " + missing.size() + ".."); + System.out.println(Arrays.toString(missing.toArray())); + } + + public static void packLandscape(Store store) throws Throwable { + Store s = new Store("./508/"); + int[] ids = new int[] {13722}; + for (int regionId : ids) { + int regionX = regionId >> 8 & 0xFF; + int regionY = regionId & 0xFF; + boolean b = store.getIndexes()[5].putArchive(s.getIndexes()[5].getArchiveId(new StringBuilder("m").append(regionX).append("_").append(regionY).toString()), s); + System.out.println("Packed landscape (" + regionId + "): " + b); + } + } + + public static boolean addMapFile(Index index, String name, byte[] data) { + int archiveId = index.getArchiveId(name); + if(archiveId == -1) + archiveId = index.getTable().getValidArchiveIds().length; + return index.putFile(archiveId, 0, Constants.GZIP_COMPRESSION, data, null, false, false, Utils.getNameHash(name), -1); + } + + public static boolean validRegion(InputStream stream) { + int count = 0; + for (;;) { + int offset = stream.readSmart2(); + if (offset == 0) { + break; + } + int location = 0; + for (;;) { + offset = stream.readUnsignedSmart(); + if (offset == 0) { + break; + } + location += offset - 1; + int y = location & 0x3f; + int x = location >> 6 & 0x3f; + stream.readUnsignedByte(); + if (x >= 0 && y >= 0 && x < 64 && y < 64) { + if (++count > 10) { + return true; + } + } + } + } + return false; + } + + /** + * Copies a file. + * @param in The file to be copied. + * @param out The file to copy to. + */ + private static void copyFile(File in, File out) { + try (FileChannel channel = new FileInputStream(in).getChannel()) { + try (FileChannel output = new FileOutputStream(out).getChannel()) { + channel.transferTo(0, channel.size(), output); + channel.close(); + output.close(); + } + } catch (IOException e) { + e.printStackTrace(); + } + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/MapEditor.java b/Tools/Cache Editor/src/emperor/MapEditor.java new file mode 100644 index 000000000..45669634d --- /dev/null +++ b/Tools/Cache Editor/src/emperor/MapEditor.java @@ -0,0 +1,393 @@ +package emperor; + +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Iterator; +import java.util.Map; + +import alex.cache.loaders.OverlayDefinition; + +import com.alex.io.InputStream; +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +import emperor.ObjectMap.GameObject; + +/** + * Used for editting maps. + * @author Emperor + * + */ +public final class MapEditor { + + /** + * The valid revisions. + */ + private static final int[] VALID_REVISIONS = { + 377, 468, 474, 498, 503, 508, 538, 546, 562, 569, 666, 788 + }; + + /** + * The xtea keys used to encrypt the maps. + */ + private static final int[] XTEA_KEYS = { + 14881828, -6662814, 58238456, 146761213 + }; + + /** + * The mapscape type (floor). + */ + private static final String MAP_TYPE = "m"; + + /** + * The landscape type (objects). + */ + private static final String LAND_TYPE = "l"; + + /** + * The map cache index. + */ + private static final int MAP_INDEX = 5; + + /** + * The cache file store to change. + */ + private static Store store; + + /** + * Used the update the maps. + * @throws Throwable When an exception occurs. + */ + private static void update() throws Throwable { +// replaceObjects(new GameObject[][] { +// { new GameObject(5281, 3666, 3521, 1, 10, 0), new GameObject(5281, 3666, 3520, 1, 10, 0) } +// }); +// copy(13099, 13099, new Store("./508/"), new int[] { 273193181, -1465876115, -151667950, 40605898 }); + replaceMapPart(13099, new Store("./468/"), new int[] {-636687345, -1379232722, -1661855973, 666075756}, 18, 36, 31, 49, 0); +// int regionId = 13099; +// System.out.println("Revisions for region " + regionId + ": " + Arrays.toString(getValidRevisions(regionId)) + "."); + } + + /** + * Replaces a part of the map. + * @param regionId The region id. + * @param from The store to copy from. + * @param xteaKeys The XTEA keys used to decrypt the region from the store to copy from. + * @param southWestX The south west x (on region) coordinate of the part to replace. + * @param southWestY The south west y (on region) coordinate of the part to replace. + * @param northEastX The north east x (on region) coordinate of the part to replace. + * @param northEastY The north east y (on region) coordinate of the part to replace. + */ + static void replaceMapPart(int regionId, Store from, int[] xteaKeys, int southWestX, int southWestY, int northEastX, int northEastY, int...planes) { + ObjectMap map = new ObjectMap(); + map.map(new InputStream(getLandscape(regionId, store, XTEA_KEYS))); + for (Iterator it = map.getObjects().iterator(); it.hasNext();) { + GameObject object = it.next(); + if (object.loc.x >= southWestX && object.loc.x <= northEastX && object.loc.y >= southWestY && object.loc.y <= northEastY) { + it.remove(); + } + } + ObjectMap m = new ObjectMap(); + m.map(new InputStream(getLandscape(regionId, from, xteaKeys))); + for (GameObject object : m.getObjects()) { + for (int z : planes) { + if (object.loc.z == z && object.loc.x >= southWestX && object.loc.x <= northEastX && object.loc.y >= southWestY && object.loc.y <= northEastY) { + map.getObjects().add(object); + break; + } + } + } + packLandscape(regionId, map.generate(), store, XTEA_KEYS); + LandMap l = new LandMap(); + l.map(ByteBuffer.wrap(store.getIndexes()[5].getFile(getArchiveIndex(MAP_TYPE, regionId, store)))); + LandMap lm = new LandMap(); + lm.map(ByteBuffer.wrap(from.getIndexes()[5].getFile(getArchiveIndex(MAP_TYPE, regionId, from)))); + for (int z : planes) { + for (int x = southWestX; x <= northEastX; x++) { + for (int y = southWestY; y <= northEastY; y++) { + l.defaultOpcodes[z][x][y] = lm.defaultOpcodes[z][x][y]; + l.height[z][x][y] = lm.height[z][x][y]; + l.overlayOpcodes[z][x][y] = lm.overlayOpcodes[z][x][y]; + l.overlays[z][x][y] = lm.overlays[z][x][y]; + l.underlays[z][x][y] = lm.underlays[z][x][y]; + } + } + } + packMapscape(regionId, l.generate(), store); + } + + /** + * Gets the revisions of the caches having this region. + * @param regionId The region id. + * @return The cache revisions. + */ + public static int[] getValidRevisions(int regionId) { + int[] revisions = new int[VALID_REVISIONS.length]; + int count = 0; + for (int revision : VALID_REVISIONS) { + String rev = revision == 498 ? "clean_498" : Integer.toString(revision); + try { + Store store = new Store("./" + rev + "/"); + System.out.println("./" + rev + "/"); + if (getArchiveIndex(LAND_TYPE, regionId, store) > -1) { + revisions[count++] = revision; + } + } catch (Throwable t) { + t.printStackTrace(); + } + } + return Arrays.copyOf(revisions, count); + } + + /** + * Copies a region. + * @param fromId The region id to copy. + * @param toId The region id to paste on. + * @param from The store to get the data from. + * @param xtea The XTEA keys to decrypt the map. + */ + static void copy(int fromId, int toId, Store from, int[] xtea) { + copy(LAND_TYPE, fromId, toId, from, xtea); + copy(MAP_TYPE, fromId, toId, from, null); + } + + /** + * Copies the landscape from a region. + * @param fromId The region id to copy the landscape from. + * @param toId The region id to paste the landscape on. + * @param from the store to get the data from. + * @param xtea The XTEA keys to decrypt the landscape. + */ + private static void copy(String type, int fromId, int toId, Store from, int[] xtea) { + int index = getArchiveIndex(type, fromId, from); + if (index < 0) { + throw new IllegalArgumentException("Region " + fromId + " does not exist!"); + } + byte[] bs = from.getIndexes()[MAP_INDEX].getFile(index, 0, xtea); + if (bs == null || bs.length < 1) { + throw new IllegalArgumentException("Region " + fromId + " is invalid!"); + } + index = getArchiveIndex(type, toId, store); + if (index < 0) { + index = findEmptyArchive(store, 0); + System.out.println("Creating new region - id=" + index + "!"); + } + store.getIndexes()[MAP_INDEX].putFile(index, 0, Constants.GZIP_COMPRESSION, bs, type == LAND_TYPE ? XTEA_KEYS : null, true, true, getNameHash(type, toId), -1); + } + + /** + * Replaces objects. + * @param changes The array of object changes. + * @throws Throwable when an exception occurs. + */ + static void replaceObjects(GameObject[][] changes) throws Throwable { + Map> objects = new HashMap<>(); + for (int i = 0; i < changes.length; i++) { + GameObject old = changes[i][0]; + int regionId = (old.loc.x >> 6) << 8 | (old.loc.y >> 6); + old = old.getLocal(); + Map map = objects.get(regionId); + if (map == null) { + objects.put(regionId, map = new HashMap<>()); + } + GameObject replace = changes[i][1]; + if (replace != null) { + replace = replace.getLocal(); + } + map.put(old, replace); + } + int count = 0; + for (int regionId : objects.keySet()) { + Map replacements = objects.get(regionId); + ObjectMap map = new ObjectMap(); + map.map(new InputStream(getLandscape(regionId, store, XTEA_KEYS))); + for (GameObject object : replacements.keySet()) { + GameObject current = map.get(object); + if (current == null) { + throw new IllegalArgumentException("Could not find object " + object + "!"); + } + map.getObjects().remove(current); + current = replacements.get(object); + if (current != null) { + map.getObjects().add(current); + } + count++; + } + packLandscape(regionId, map.generate(), store, XTEA_KEYS); + } + System.out.println("Changed " + count + " objects in " + objects.size() + " regions!"); + } + + /** + * Packs the landscape. + * @param regionId The region id to pack on. + * @param data The landscape data to pack. + * @param store The store used. + * @param xtea The XTEA keys. + */ + private static void packMapscape(int regionId, byte[] data, Store store) { + int index = getArchiveIndex(MAP_TYPE, regionId, store); + store.getIndexes()[MAP_INDEX].putFile(index, 0, Constants.GZIP_COMPRESSION, data, null, true, true, getNameHash(MAP_TYPE, regionId), -1); + } + + /** + * Packs the landscape. + * @param regionId The region id to pack on. + * @param data The landscape data to pack. + * @param store The store used. + * @param xtea The XTEA keys. + */ + private static void packLandscape(int regionId, byte[] data, Store store, int[] xtea) { + int index = getArchiveIndex(LAND_TYPE, regionId, store); + store.getIndexes()[MAP_INDEX].putFile(index, 0, Constants.GZIP_COMPRESSION, data, xtea, true, true, getNameHash("l", regionId), -1); + } + + /** + * Gets the landscape data. + * @param regionId The region id. + * @param store The store to get the landscape data from. + * @param xtea The XTEA keys used to decrypt the landscape. + * @return The landscape data. + */ + private static byte[] getLandscape(int regionId, Store store, int[] xtea) { + int index = getArchiveIndex(LAND_TYPE, regionId, store); + if (index < 0) { + throw new IllegalArgumentException("Region " + regionId + " does not exist!"); + } + byte[] bs = store.getIndexes()[MAP_INDEX].getFile(index, 0, xtea); + if (bs == null) { + throw new IllegalArgumentException("Region " + regionId + " has no valid landscape!"); + } + return bs; + } + + /** + * Finds an empty archive id. + * @param store The store to check. + * @param offset The archive offset to start checking from. + * @return The new archive index. + */ + private static int findEmptyArchive(Store store, int offset) { + for (int index = offset; index < 50000; index++) { + if (!store.getIndexes()[MAP_INDEX].archiveExists(index)) { + return index; + } + } + return -1; + } + + /** + * Gets the name hash for the given region id. + * @param type The archive type "m"=mapscape, "l"=landscape. + * @param regionId The region id. + * @return The name hash. + */ + private static int getNameHash(String type, int regionId) { + int x = regionId >> 8 & 0xFF; + int y = regionId & 0xFF; + return Utils.getNameHash(type + x + "_" + y); + } + + /** + * Gets the archive index. + * @param type The archive type "m"=mapscape, "l"=landscape. + * @param regionId The region id. + * @param store The store. + * @return The archive index. + */ + private static int getArchiveIndex(String type, int regionId, Store store) { + int x = regionId >> 8 & 0xFF; + int y = regionId & 0xFF; + return store.getIndexes()[MAP_INDEX].getArchiveId(type + x + "_" + y); + } + + /** + * The main method. + * @param args The arguments cast on runtime. + * @throws Throwable When an exception occurs. + */ + public static void main(String...args) throws Throwable { + String revision = "498"; + if (args.length > 0) { + revision = args[0]; + } + System.out.println("Updating revision " + revision + "..."); + long start = System.currentTimeMillis(); + store = new Store("./" + revision + "/"); + update(); + System.out.println("Finished after " + (System.currentTimeMillis() - start) + " milliseconds."); + } + + /** + * Checks if the region is valid. + * @param regionId The region id. + * @param store The store. + * @return {@code True} if so. + */ + public static boolean isValid(int regionId, int[] xtea, Store store) { + int index = getArchiveIndex("l", regionId, store); + if (index > -1) { + byte[] bs = store.getIndexes()[MAP_INDEX].getFile(index, 0, xtea); + if (bs == null) { + if (regionId == 11082) { //Elf city is an empty region + return true; + } + ByteBuffer buffer = ByteBuffer.wrap(store.getIndexes()[5].getFile(getArchiveIndex("m", regionId, store), 0)); + byte[][][] mapscape = new byte[4][64][64]; + boolean ocean = true; + main: for (int z = 0; z < 4; z++) { + for (int i = 0; i < 64; i++) { + for (int j = 0; j < 64; j++) { + while (true) { + int value = buffer.get() & 0xFF; + if (value == 0) { + break; + } + if (value == 1) { + buffer.get(); + break; + } + if (value <= 49) { + int overlay = buffer.get() & 0xFF; + OverlayDefinition def = OverlayDefinition.forId(store, overlay); + if (def != null && def.getTextureId() != 25) { + ocean = false; + break main; + } + } else if (value <= 81) { + mapscape[z][i][j] = (byte) (value - 49); + } + } + } + } + } + if (!ocean) { + return false; + } + } + return true; + } + return false; + } + + /** + * Debugs the world map. + */ + static void debugWorldMap() { + int regions = 0; + int missing = 0; + for (int x = 0; x < 255; x++) { + for (int y = 0; y < 255; y++) { + int regionId = x << 8 | y; + if (!isValid(regionId, XTEA_KEYS, store)) { + missing++; + System.out.println("Missing region " + regionId + "!"); + } + } + } + System.out.println("World map is missing " + missing + "/" + regions + " regions!"); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/ModelPacker.java b/Tools/Cache Editor/src/emperor/ModelPacker.java new file mode 100644 index 000000000..eb55bd2c5 --- /dev/null +++ b/Tools/Cache Editor/src/emperor/ModelPacker.java @@ -0,0 +1,195 @@ +package emperor; + +import java.awt.image.BufferedImage; +import java.io.File; +import java.io.IOException; +import java.nio.ByteBuffer; +import java.util.Arrays; + +import javax.imageio.ImageIO; + +import alex.cache.loaders.OverlayDefinition; + +import com.alex.loaders.images.IndexedColorImageFile; +import com.alex.store.Store; + +/** + * Packs the models. + * @author Emperor + * + */ +public final class ModelPacker { + + public static void main(String...args) throws Throwable { + Store to = new Store("./498/"); + packDonatorIcons(to); + // packObjectDefinitions(from, to); + // packAnimations(from, to); + // List anims = new ArrayList<>(); + // for (int i = 0; i < 50_000; i++) { + // byte[] data = from.getIndexes()[16].getFile(i >>> 1998118472, i & 0xff); + // if (data == null) { + // continue; + // } + // ObjectDefinitions def = new ObjectDefinitions(i); + // def.initialize(from); + // if (def.animationId > -1) { + // if (!anims.contains(def.animationId)) { + // anims.add(def.animationId); + // } + //// System.out.println(def.getName() + " anim: " + def.animationId + ", " + Arrays.toString(def.models)); + // } + // } + // System.out.println(Arrays.toString(anims.toArray())); + // packAnimations(from, to); + } + + static void packObjectDefinitions(Store from, Store to) { + int[] defs = new int[] { 5461 };//5099, 5100, 5094, 5096, 5098, 5097, 5110, 5111};//5088, 5089, 5090 }; + for (int id : defs) { + int archive = id >>> 1998118472; + int file = id & 0xFF; + byte[] bs = from.getIndexes()[16].getFile(archive, file); + to.getIndexes()[16].putFile(archive, file, bs); + } + } + + static void editObjectDefinitions(int itemId, Store store, int opcode, Object value) { + int archive = itemId >>> 1998118472; + int file = itemId & 0xFF; + byte[] bs = store.getIndexes()[16].getFile(archive, file); + ByteBuffer buffer = ByteBuffer.allocate(bs.length + 128); + for (int i = 0; i < bs.length - 1; i++) { + buffer.put(bs[i]); + } + buffer.put((byte) opcode); + if (value instanceof Byte) { + buffer.put((Byte) value); + } + else if (value instanceof Short) { + buffer.putShort((Short) value); + } + else if (value instanceof Integer) { + buffer.putInt((Integer) value); + } + else if (value instanceof Long) { + buffer.putLong((Long) value); + } + else if (value instanceof String) { + buffer.put(((String) value).getBytes()).put((byte) 0); + } + else if (value instanceof Boolean) { + buffer.put((byte) ((Boolean) value ? 1 : 0)); + } + bs = new byte[buffer.remaining()]; + buffer.get(bs); + store.getIndexes()[16].putFile(archive, file, bs); + } + + static void packSprite(Store to) { + int id = 423; + IndexedColorImageFile f = null; + try { + f = new IndexedColorImageFile(to, id, 0); + BufferedImage icon = ImageIO.read(new File("green.png")); + f.replaceImage(icon, 3); + //System.out.println("Added icon: "+f.addImage(icon, 0, 1)+"."); + } catch (IOException e) { + e.printStackTrace(); + } + to.getIndexes()[8].putFile(id, 0, f.encodeFile()); + } + + static void packDonatorIcons(Store to) { + int id = 423; + File[] files = new File("donator_icons").listFiles(); + IndexedColorImageFile f = null; + int index = 0; + for (File file : files) { + try { + f = new IndexedColorImageFile(to, id, 0); + BufferedImage icon = ImageIO.read(file); + if (index == 0) { + f.replaceImage(icon, 3); + System.out.println("Replaced icon - " + 3); + } else { + System.out.println("Added icon: "+f.addImage(icon, 0, 1)+"."); + } + } catch (IOException e) { + e.printStackTrace(); + } + to.getIndexes()[8].putFile(id, 0, f.encodeFile()); + index++; + } + } + + static void packAnimations(Store from, Store to) { + int [] anims = new int[] {4856};//3206, 498, 499, 500, 501, 481, 467, 526, 527, 907, 505, 524, 449, 523, 2709, 1726, 480, 488, 479, 469, 475, 476, 473, 1071, 493, 494, 504, 471, 468, 470, 332, 333, 492, 1731, 472, 491, 503, 522, 456, 464, 2714, 9101, 502, 525, 6023, 6561, 477, 478, 1223, 446, 6913, 912, 917, 474, 1051, 1049, 4860, 1052, 1073, 9123, 3106, 1072, 1096, 1098, 1097, 1103, 1104, 1108, 1112, 1127, 1138, 1211, 1212, 1216, 1233, 1234, 1235, 1231, 1260, 1261, 1293, 1294, 1295, 1296, 1297, 1298, 1299, 1300, 1301, 1302, 1303, 1304, 8618, 1334, 1433, 1347, 1349, 1348, 1362, 1355, 1416, 1398, 1411, 1430, 1431, 1532, 1533, 1600, 1631, 1629, 1632, 1630, 1636, 1657, 1641, 1642, 1643, 1729, 1733, 1727, 1017, 1734, 1730, 1732, 1747, 1812, 1845, 1846, 1847, 1869, 1868, 1875, 1881, 1908, 1909, 1923, 1915, 1943, 1940, 1936, 1937, 1938, 1939, 1944, 1998, 1999, 2056, 2054, 2091, 2131, 2133, 2136, 2135, 2137, 2178, 2260, 2174, 2173, 2199, 2198, 2196, 2201, 2203, 2204, 2209, 2210, 2212, 5855, 2313, 2331, 2346, 2359, 2360, 2349, 2350, 2348, 2379, 2440, 2439, 2437, 2451, 2564, 4291, 3743, 2598, 4123, 2600, 2641, 2657, 2699, 2708, 2734, 2746, 2747, 2742, 2743, 2744, 2768, 447, 2807, 2870, 2871, 2878, 2883, 2897, 2899, 2901, 2898, 2900, 2905, 2997, 3022, 3029, 3028, 3030, 3038, 3070, 3099, 3100, 3095, 3105, 3107, 3101, 3097, 3104, 3113, 3174, 3173, 3180, 4354, 3217, 3218, 3219, 3172, 3230, 3231, 3237, 3246, 3247, 3264, 6094, 3304, 3305, 3306, 3343, 3347, 3511, 7263, 3349, 3351, 3352, 3408, 3405, 3406, 3407, 3438, 3439, 3440, 3445, 3472, 3528, 3534, 3529, 3530, 3531, 3532, 3478, 3542, 3558, 4477, 4564, 3586, 3573, 3577, 3117, 3118, 5483, 166, 286, 145, 3707, 6218, 6496, 1338, 9241, 3587, 3582, 3647, 3720, 3644, 3646, 3648, 3578, 3579, 3580, 3581, 3615, 3616, 3742, 3835, 3843, 3927, 3932, 3939, 3940, 3943, 3944, 3976, 3998, 4005, 4006, 4015, 4013, 4014, 4022, 3699, 3698, 3700, 4133, 4132, 6477, 4126, 4157, 4161, 4163, 4220, 4217, 4218, 4239, 4260, 4241, 4240, 4242, 4274, 4284, 4308, 4309, 4323, 4324, 4325, 4338, 4339, 4336, 4335, 459, 4357, 4355, 4356, 4358, 4393, 4392, 4408, 4394, 4395, 4396, 4397, 4398, 4377, 4359, 4361, 4360, 4363, 4364, 4399, 4431, 4535, 4565, 4566, 4567, 4568, 4569, 4577, 4557, 4559, 4560, 4563, 4561, 4562, 4572, 4571, 4595, 4599, 4621, 4622, 4627, 4628, 4746, 4747, 4778, 4744, 4745, 4743, 4781, 4798, 4783, 4894, 4879, 4880, 4881, 4895, 4896, 4899, 4883, 4897, 4898, 4900, 4901, 5012, 5044, 5073, 5058, 5141, 5109, 5170, 5169, 5173, 5174, 5175, 5179, 5180, 5176, 5177, 5178, 5197, 5193, 5195, 5196, 5194, 5203, 5220, 5219, 5222, 5221, 5235, 5237, 5239, 5260, 5261, 5267, 5269, 5268, 5271, 5270, 5278, 5308, 5295, 5296, 5297, 5350, 5351, 5360, 5068, 5430, 5415, 5422, 5423, 5429, 5431, 5432, 5603, 5599, 5601, 5600, 5598, 5605, 5631, 5604, 5564, 5740, 5742, 5737, 5745, 5743, 5744, 5738, 5728, 5730, 5729, 5739, 5741, 5734, 5771, 5772, 5768, 5797, 5798, 5828, 5829, 5830, 5844, 5824, 5825, 5847, 5900, 5901, 5906, 5874, 5857, 5909, 5975, 5976, 5977, 5983, 5984, 5985, 5974, 6015, 6069, 6037, 6038, 6036, 6035, 6034, 6029, 6031, 6032, 6027, 6028, 6024, 6025, 6026, 6039, 6123, 6211, 6161, 6162, 6163, 6164, 6165, 6166, 6167, 6168, 6170, 6196, 6269, 6274, 6481, 4130, 4128, 4131, 4129, 6491, 4127, 4125, 6495, 6493, 6494, 6466, 6467, 6492, 4124, 6426, 6461, 6453, 6439, 6522, 6497, 6499, 6500, 6509, 6506, 6523, 6623, 6624, 6625, 6626, 6627, 6597, 6598, 6635, 6637, 6636, 6638, 6639, 6645, 6646, 6652, 6653, 6656, 6737, 6732, 6731, 6733, 6734, 6735, 6736, 6854, 6853, 6873, 6874, 6875, 6912, 6914, 6915, 4782, 6925, 6900, 6901, 6902, 6903, 6917, 6898, 6890, 6891, 6892, 6893, 6894, 6895, 6931, 6932, 6982, 6995, 6996, 7007, 7066, 7067, 7087, 7097, 7118, 7115, 7117, 7120, 7138, 7144, 7146, 7152, 7225, 7226, 7245, 7252, 7231, 7291, 7286, 7283, 7284, 7285, 7352, 7354, 7353, 7361, 7346, 7357, 7356, 7358, 7360, 7375, 7373, 7378, 7379, 7380, 7381, 7544, 7546, 7552, 7577, 7603, 7601, 7580, 7602, 7600, 8526, 8510, 8663, 8664, 8666, 8665, 8653, 8624, 8646, 8654, 8647, 8667, 2418, 8708, 8714, 8735, 7158, 808, 8881, 8845, 8857, 8894, 8892, 8897, 8967, 8968, 8969, 8970, 8972, 8971, 9005, 9011, 9007, 9010, 9008, 9090, 9088, 9089, 9085, 9083, 9084, 9033, 9035, 9036, 9041, 9135, 9122, 4290, 4295, 4296, 9137, 9143, 9144, 9150, 9146, 4297, 9154, 9199, 9303, 9348, 9329, 9330, 9347}; + for (int i : anims) { + byte[] a = from.getIndexes()[20].getFile(i >>> 7, i & 0x7F); + if (a == null) { + continue; + } + // i = 10222;//from.getIndexes()[20].getLastArchiveId() + 1; + System.out.println("Packed animation " + i + " - " + to.getIndexes()[20].putFile(i >>> 7, i & 0x7F, a)); + } + } + + static void packTextures(Store from, Store to) { + for (int i = 0; i < from.getIndexes()[9].getValidFilesCount(0); i++) { + byte[] bs = from.getIndexes()[9].getFile(0, i); + if (bs == null || bs.length < 1) { + System.out.println("Missing texture id " + i); + continue; + } + System.out.println("Packing texture id " + i + ": " + to.getIndexes()[9].putFile(0, i, bs));//+ (i < 200 ? Arrays.toString(bs) : null));//to.getIndexes()[6].putFile(0, i, bs)); + } + } + + /** + * Packs the overlays. + * @param from The cache to get the data from. + * @param to The cache to store the data. + */ + static void packOverlays(Store from, Store to) { + System.out.println("Start"); + // int changeOverlay = 135; + // int newOverlay = 135; + // System.out.println("Success = " + to.getIndexes()[2].putFile(4, changeOverlay, from.getIndexes()[2].getFile(4, newOverlay))); + for (int id = 0; id < to.getIndexes()[2].getValidFilesCount(4); id++) { + byte[] bs = to.getIndexes()[2].getFile(4, id); + if (bs == null || bs.length < 1) { + continue; + } + OverlayDefinition def = OverlayDefinition.forId(to, id); + if (def.getTextureId() > 0) { + System.out.println("Packed overlay definition " + id + " - texture=" + def.getTextureId() + "!"); + // boolean success = to.getIndexes()[2].putFile(4, id, from.getIndexes()[2].getFile(4, id)); + // System.out.println("Packed overlay definition " + id + " - success=" + success); + } + } + } + + static void packModels(Store from, Store to) { + int[] models = new int[] {16400}; + for (int model : models) { + byte[] a = from.getIndexes()[7].getFile(model); + if (a == null) { + continue; + } + System.out.println(Arrays.toString(a) + ""); + to.getIndexes()[7].putFile(1046, 0, a); + break; + } + } + + static void packMusic(Store from, Store to) throws Throwable { + for (int i = 0; i < to.getIndexes()[6].getValidArchivesCount(); i++) { + byte[] bs = to.getIndexes()[6].getFile(i); + if (bs == null || bs.length < 1) { + continue; + } + System.out.println("Packing music id " + i + ": ");// + to.getIndexes()[6].putArchive(i, from));//.putArchive(2, , from)); + } + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/emperor/MusicPropertiesPacker.java b/Tools/Cache Editor/src/emperor/MusicPropertiesPacker.java new file mode 100644 index 000000000..cf9de3fcb --- /dev/null +++ b/Tools/Cache Editor/src/emperor/MusicPropertiesPacker.java @@ -0,0 +1,406 @@ +package emperor; + +import com.alex.loaders.clientscripts.CS2Mapping; +import com.alex.store.Store; + + +public final class MusicPropertiesPacker { + + /** + * The music zones. + */ + + /** + * Configures the music data. + */ + private static void configureMusic() { + //Don't remove anything from this! + //Also make sure zones don't overlap! + add(40, "brew hoo hoo!", 338, (14747)); + add(53, "chef surprise", 399, (7507)); + add(76, "davy jones' locker", 394, (11924)); + add(102, "etceteria", 227, (10300)); + add(156, "hells bells", 254, (11066)); + add(175, "jolly-r", 65, (11058)); + add(189, "land of the dwarves", 310, (11423)); + add(205, "mad eadgar", 213, (11677)); + add(254, "pharaoh's tomb", 355, (13356), (12105)); + add(259, "pirates of peril", 262, (12093)); + add(304, "spirits of elid", 331, (13461)); + add(318, "subterranea", 362, (10142)); + add(322, "tale of keldagrim", 309, (11678)); + add(323, "talking forest", 119, (10550)); + add(328, "cellar dwellers", 342, (10135)); + add(329, "chosen", 324, (9805)); + add(330, "desert", 120, (12591)); + add(331, "desolate isle", 330, (10042)); + add(333, "far side", 314, (12111)); + add(335, "genie", 332, (13457)); + add(336, "the golem", 295, (13616), (13872)); + add(338, "the lost melody", 315, (13206)); + add(341, "the mad mole", 393, (6992)); + add(342, "monsters below", 329, (9886)); + add(343, "the navigator", 255, (10652)); + add(345, "other side", 278, (14646)); + add(347, "quiz master", 318, (7754)); + add(348, "rogues' den", 313, (11853), (12109)); + add(349, "shadow", 121, (11314)); + add(350, "the slayer", 269, (11164)); + add(351, "terrible tower", 267, (13623)); + add(352, "tower", 122, (10292), (10136)); + add(360, "tomorrow", 163, (12081)); + add(361, "too many cooks...", 398, (11930)); + add(413, "zogre dance", 306, (9775)); + add(2, "adventure", 0, (12854)); + add(5, "alone", 2, (12086), (10134)); + add(6, "ambient jungle", 3, (11310)); + add(7, "anywhere", 240, (10795)); + add(11, "arabique", 7, (11417)); + add(12, "army of darkness", 8, (12088)); + add(13, "arrival", 9, (11572)); + add(14, "artistry", 200, (8010)); + add(15, "attack 1", 10, (10034)); + add(16, "attack 2", 11, (11414)); + add(17, "attack 3", 12, (12192)); + add(18, "attack 4", 13, (10289), (10389)); + add(19, "attack 5", 14, (9033)); + add(20, "attack 6", 15, (10387)); + add(21, "attention", 16, (11825)); + add(22, "autumn voyage", 17, (12851)); + add(23, "aye car rum ba", 351, (8527)); + add(24, "aztec", 201, (11157)); + add(25, "background", 18, (11060), (7758)); + add(26, "ballad of enchantment", 19, (10290)); + add(27, "bandit camp", 214, (12590)); + add(28, "barbarianism", 257, (12341), (12441)); + add(29, "barking mad", 274, (14234)); + add(30, "baroque", 20, (10547)); + add(31, "beyond", 21, (11418), (11419)); + add(32, "big chords", 22, (10032)); + add(33, "blistering barnacles", 352, (8528)); + add(34, "body parts", 270, (13979)); + add(35, "bone dance", 183, (13619)); + add(36, "bone dry", 216, (12946)); + add(37, "book of spells", 23, (12593)); + add(38, "borderland", 233, (10809)); + add(39, "breeze", 194, (9010)); + add(42, "bubble and squeak", 347, (7753)); + add(43, "camelot", 24, (11062)); + add(44, "castlewars", 247, (9520)); + add(45, "catch me if you can", 344, (10646)); + add(46, "cave background", 25, (12184), (11929)); + add(47, "cave of beasts", 280, (11165)); + add(48, "cave of the goblins", 304, (12693)); + add(49, "cavern", 26, (12193), (10388)); + add(50, "cellar song", 173, (12697)); + add(51, "chain of command", 27, (10648), (10905)); + add(52, "chamber", 225, (10821), (11078)); + add(54, "chickened out", 395, (9796)); + add(55, "chompy hunt", 178, (10542), (10642)); + add(56, "city of the dead", 300, (12843), (13099)); + add(57, "claustrophobia", 291, (9293)); + add(58, "close quarters", 175, (12602)); + add(59, "competition", 217, (8781)); + add(60, "complication", 258, (9035)); + add(61, "contest", 208, (11576)); + add(62, "corporal punishment", 323, (12619)); + add(64, "courage", 260, (11673)); + add(65, "crystal castle", 210, (9011)); + add(66, "crystal cave", 28, (9797)); + add(67, "crystal sword", 29, (12855), (10647)); + add(68, "cursed", 186, (9623)); + add(69, "dagannoth dawn", 365, (7236), (7748)); + add(71, "dance of the undead", 298, (14131)); + add(72, "dangerous road", 263, (11413)); + add(73, "dangerous way", 299, (14231)); + add(74, "dangerous", 30, (12343), (13115)); + add(75, "dark", 31, (13113)); + add(77, "dead can dance", 341, (12601)); + add(78, "dead quiet", 181, (13621), (9294)); + add(79, "deadlands", 230, (14134)); + add(80, "deep down", 224, (10823), (10822)); + add(81, "deep wildy", 32, (11835)); + add(82, "desert heat", 333, (13614)); + add(83, "desert voyage", 33, (13102), (13359)); + add(84, "diango's little helpers", 371, (8005)); + add(86, "distant land", 353, (13873)); + add(89, "doorways", 34, (12598)); + add(90, "down below", 284, (12438)); + add(91, "down to earth", 259, (10571)); + add(92, "dragontooth island", 281, (15159)); + add(93, "dream", 35, (12594)); + add(95, "dunjun", 36, (11672)); + add(96, "dynasty", 275, (13358)); + add(98, "elven mist", 202, (9266)); + add(99, "emotion", 38, (10033), (10309), (10133)); + add(100, "emperor", 39, (11570), (11670)); + add(101, "escape", 176, (10903)); + add(103, "everlasting fire", 417, (13373)); + add(104, "everywhere", 219, (8499)); + add(105, "evil bob's island", 316, (10058)); + add(106, "expanse", 40, (12605), (12852), (12952)); + add(107, "expecting", 41, (9778), (9878)); + add(108, "expedition", 42, (11676)); + add(109, "exposed", 220, (8752)); + add(110, "faerie", 43, (9540)); + add(111, "faithless", 265, (12856)); + add(112, "fanfare", 44, (11828)); + add(113, "fanfare 2", 162, (11823)); + add(114, "fanfare 3", 45, (10545)); + add(116, "far away", 292, (9265)); + add(118, "fenkenstrain's refrain", 271, (13879)); + add(119, "fight or flight", 293, (7752)); + add(120, "find my way", 246, (10894)); + add(121, "fire and brimstone", 334, (9552)); + add(122, "fishing", 46, (11317)); + add(123, "flute salad", 47, (12595)); + add(125, "forbidden", 185, (13111)); + add(126, "forest", 203, (9009)); + add(127, "forever", 48, (12342), (12442)); + add(130, "frogland", 336, (9802)); + add(131, "frostbite", 236, (11323)); + add(132, "fruits de mer", 273, (11059)); + add(133, "funny bunnies", 406, (9810)); + add(134, "gaol", 49, (12090), (10031), (10131)); + add(135, "garden", 50, (12853)); + add(136, "gnome king", 51, (9782)); + add(138, "gnome village", 53, (9781)); + add(139, "gnome village 2", 54, (9269)); + add(141, "gnomeball", 56, (9270)); + add(142, "goblin game", 252, (10393)); + add(144, "greatness", 57, (12596)); + add(146, "grotto", 198, (13720)); + add(148, "grumpy", 177, (10286)); + add(151, "harmony 2", 167, (12950)); + add(152, "haunted mine", 222, (11077)); + add(153, "have a blast", 325, (7757)); + add(155, "heart and mind", 174, (10059)); + add(157, "hermit", 191, (9034)); + add(158, "high seas", 59, (11057)); + add(159, "horizon", 60, (11573)); + add(161, "iban", 61, (8519)); + add(162, "ice melody", 165, (11318)); + add(163, "in between", 290, (10061)); + add(164, "in the brine", 370, (14638)); + add(165, "in the clink", 360, (8261)); + add(166, "in the manor", 62, (10287)); + add(167, "in the pits", 335, (9808)); + add(169, "insect queen", 212, (13972)); + add(170, "inspiration", 63, (12087)); + add(171, "into the abyss", 317, (12107)); + add(172, "intrepid", 64, (9369)); + add(173, "island life", 242, (10794)); + add(176, "jungle island", 66, (11313), (11309)); + add(177, "jungle troubles", 343, (11568)); + add(178, "jungly 1", 67, (11054), (11154)); + add(179, "jungly 2", 68, (10802)); + add(180, "jungly 3", 69, (11055)); + add(182, "kingdom", 190, (11319)); + add(183, "knightly", 70, (10291)); + add(184, "la mort", 192, (8779)); + add(185, "lair", 229, (13975)); + add(187, "lament", 381, (12433)); + add(190, "landlubber", 169, (10801)); + add(192, "lasting", 71, (10549)); + add(193, "legend", 235, (10808)); + add(194, "legion", 72, (12089), (10039)); + add(196, "lighthouse", 251, (10040)); + add(197, "lightness", 73, (12599)); + add(198, "lightwalk", 74, (11061)); + add(200, "lonesome", 149, (13203)); + add(201, "long ago", 75, (10544)); + add(202, "long way home", 76, (11826)); + add(203, "lost soul", 204, (9008)); + add(204, "lullaby", 77, (13365), (10551)); + add(206, "mage arena", 78, (12349), (10057)); + add(207, "magic dance", 79, (10288)); + add(208, "magical journey", 80, (10805)); + add(209, "making waves", 378, (9273), (9272)); + add(211, "march", 81, (10036)); + add(212, "marooned", 241, (11562), (12117)); + add(213, "marzipan", 211, (11166), (11421)); + add(214, "masquerade", 268, (10908)); + add(216, "mausoleum", 184, (13722)); + add(218, "medieval", 82, (13109)); + add(219, "mellow", 83, (10293)); + add(220, "melodrama", 248, (9776)); + add(221, "meridian", 205, (8497)); + add(223, "miles away", 84, (11571), (10569)); + add(225, "miracle dance", 85, (11083)); + add(226, "mirage", 303, (13199)); + add(227, "miscellania", 226, (10044)); + add(228, "monarch waltz", 86, (10807)); + add(229, "monkey madness", 239, (11051)); + add(230, "monster melee", 272, (12694)); + add(231, "moody", 87, (12600), (9523)); + add(232, "morytania", 180, (13622)); + add(233, "mudskipper melody", 361, (11824)); + add(234, "narnode's theme", 513, (9882)); + add(235, "natural", 197, (13620), (9038)); + add(236, "neverland", 88, (9780)); + add(239, "nightfall", 90, (12861), (11827)); + add(241, "no way out", 403, (13209), (12369), (12113)); + add(242, "nomad", 171, (11056)); + add(243, "null and void", 400, (10537)); + add(245, "oriental", 91, (11666)); + add(246, "out of the deep", 253, (10140)); + add(247, "over to nardah", 328, (13613)); + add(248, "overpass", 207, (9267)); + add(249, "overture", 92, (10806)); + add(250, "parade", 93, (13110)); + add(251, "path of peril", 307, (10575)); + add(253, "pest control", 401, (10536)); + add(255, "phasmatys", 277, (14746)); + add(256, "pheasant peasant", 321, (10314)); + add(258, "principality", 188, (11575)); + add(260, "quest", 94, (10315)); + add(261, "rat a tat tat", 345, (11599)); + add(262, "rat hunt", 349, (11343)); + add(263, "ready for battle", 249, (9620)); + add(264, "regal", 95, (13117)); + add(265, "reggae", 96, (11565)); + add(266, "reggae 2", 97, (11567)); + add(267, "rellekka", 231, (10297)); + add(269, "righteousness", 223, (9803)); + add(270, "riverside", 98, (10803), (8496)); + add(272, "romancing the crone", 264, (11068)); + add(273, "romper chomper", 312, (9263)); + add(274, "royale", 99, (11671)); + add(275, "rune essence", 100, (11595)); + add(276, "sad meadow", 101, (10035), (11081)); + add(277, "saga", 232, (10296)); + add(278, "sarcophagus", 283, (12945)); + add(279, "sarim's vermin", 348, (11926)); + add(280, "scape cave", 102, (12698), (12437)); + add(283, "scape sad", 104, (13116)); + add(286, "scape soft", 159, (11829)); + add(287, "scape wild", 105, (12857), (12604)); + add(288, "scarab", 282, (12589)); + add(290, "sea shanty", 106, (11569)); + add(289, "sea shanty 2", 107, (12082)); + add(291, "serenade", 108, (9521)); + add(292, "serene", 109, (11837), (11936), (11339)); + add(293, "settlement", 279, (11065)); + add(294, "shadowland", 228, (13618), (13875), (8526)); + add(296, "shining", 160, (12858)); + add(297, "shipwrecked", 276, (14391)); + add(298, "showdown", 245, (10895)); + add(300, "sojourn", 209, (11321)); + add(301, "soundscape", 111, (9774)); + add(302, "sphinx", 302, (13100)); + add(303, "spirit", 112, (12597)); + add(305, "splendour", 113, (11574)); + add(306, "spooky jungle", 115, (11053), (11668)); + add(307, "spooky", 114, (12340)); + add(308, "spooky 2", 218, (13718)); + add(309, "stagnant", 193, (13876), (8782)); + add(310, "starlight", 116, (11925), (12949)); + add(311, "start", 117, (12339)); + add(312, "still night", 118, (13108)); + add(313, "stillness", 250, (13977)); + add(314, "stranded", 234, (11322)); + add(316, "stratosphere", 195, (8523)); + add(319, "sunburn", 215, (12846), (13357)); + add(320, "superstition", 261, (11153)); + add(324, "tears of guthix", 311, (12948)); + add(325, "technology", 238, (10310)); + add(326, "temple of light", 294, (7496)); + add(327, "temple", 243, (11151)); + add(353, "theme", 123, (10294), (10138)); + add(355, "time out", 196, (11591)); + add(356, "time to mine", 289, (11422)); + add(357, "tiptoe", 266, (12440)); +// add(358, "title fight", 367, (12696)); + add(362, "trawler minor", 125, (7755)); + add(363, "trawler", 124, (7499)); + add(364, "tree spirits", 126, (9268)); + add(365, "tremble", 189, (11320)); + add(367, "tribal background", 127, (11312), (11412)); + add(368, "tribal", 128, (11311)); + add(366, "tribal 2", 129, (11566)); + add(369, "trinity", 130, (10804), (10904)); + add(371, "troubled", 131, (11833)); + add(372, "twilight", 179, (10906)); + add(373, "tzhaar!", 339, (9551)); + add(374, "undercurrent", 170, (12345)); + add(376, "underground pass", 134, (9621)); + add(375, "underground", 132, (13368), (11416)); + add(377, "understanding", 187, (9547)); + add(378, "unknown land", 133, (12338)); + add(379, "upcoming", 135, (10546)); + add(380, "venture", 136, (13364)); + add(381, "venture 2", 168, (13464)); + add(382, "victory is mine", 368, (12696)); + add(383, "village", 182, (13878)); + add(384, "vision", 137, (12337), (12436)); + add(385, "voodoo cult", 138, (9545), (11665)); + add(386, "voyage", 139, (10038)); + add(388, "wander", 140, (12083)); + add(389, "warrior", 237, (10653)); + add(391, "waterfall", 141, (10037), (10137)); + add(392, "waterlogged", 199, (13877), (8014)); + add(394, "wayward", 308, (9875)); + add(396, "well of voyage", 221, (9366)); + add(397, "wild side", 340, (12092)); + add(398, "wilderness", 142, (11832), (12346)); + add(399, "wilderness 2", 143, (12091)); + add(400, "wilderness 3", 144, (11834)); + add(401, "wildwood", 256, (12344)); + add(402, "witching", 145, (13114)); + add(403, "woe of the wyvern", 369, (12181)); + add(405, "wonder", 146, (11831)); + add(406, "wonderous", 147, (10548)); + add(407, "woodland", 206, (8498)); + add(408, "workshop", 148, (12084)); + add(410, "xenophobe", 366, (7492), (11589)); + add(411, "yesteryear", 161, (12849)); + add(412, "zealot", 172, (10827)); + + //Al kharid/desert + add(3, "al kharid", 1, (13105), (13361)); + add(8, "arabian2", 5, (13107)); + add(9, "arabian3", 6, (12848)); + add(10, "arabian", 4, (13106), (13617)); + add(94, "duel arena", 164, (13362)); + add(295, "shine", 110, (13363)); + add(97, "egypt", 37, (13104)); + //Brimhaven + add(1, "7th realm", 285, (10645), (10644)); + add(181, "karamja jam", 286, (10900), (10899)); + add(252, "pathways", 287, (10901)); + //Tutorial island +// add(237, "newbie melody", 89, new ZoneBorders(3052, 3055, 3155, 3135)); + //Lumbridge + add(150, "harmony", 58, (12850)); + } + + static CS2Mapping indexes; + static CS2Mapping ids; + /** + * Adds a new music entry. + * @param musicId The music id. + * @param name The song name. + * @param index The list index. + * @param borders The zone borders. + */ + private static void add(int musicId, String name, int index, int... regions) { + String n = (String) indexes.getMap().get(index); + System.out.print("add(" + ids.getMap().get(index) + ", \"" + n + "\", " + index); + for (int id : regions) { + System.out.print(", forRegion(" + id + ")"); + } + System.out.println(");"); + } + + /** + * The main method. + * @param args The arguments cast on runtime. + * @throws Throwable When an exception occurs. + */ + public static void main(String[] args) throws Throwable { + Store store = new Store("./666/"); + indexes = CS2Mapping.forId(1345, store); + ids = CS2Mapping.forId(1351, store); + configureMusic(); + } +} diff --git a/Tools/Cache Editor/src/emperor/ObjectMap.java b/Tools/Cache Editor/src/emperor/ObjectMap.java new file mode 100644 index 000000000..d7ad5c455 --- /dev/null +++ b/Tools/Cache Editor/src/emperor/ObjectMap.java @@ -0,0 +1,207 @@ +package emperor; + +import java.util.ArrayList; +import java.util.List; +import java.util.PriorityQueue; +import java.util.Queue; + +import com.alex.io.InputStream; +import com.alex.io.OutputStream; + +/** + * Represents an object map. + * @author Emperor + * + */ +public final class ObjectMap { + + private List objects = new ArrayList<>(); + + public void add(int id, int x, int y, int z, int type, int rotation) { + objects.add(new GameObject(id, x, y, z, type, rotation)); + } + + public GameObject get(GameObject object) { + return get(object.id, object.loc.x, object.loc.y, object.loc.z, object.type, object.rotation); + } + + public GameObject get(int id, int x, int y, int z, int type, int rotation) { + for (GameObject object : objects) { + Location loc = object.loc; + if (object.id == id && loc.x == x && loc.y == y && loc.z == z && object.type == type && object.rotation == rotation) { + return object; + } + } + return null; + } + + public List getObjects() { + return objects; + } + + public static void compare(ObjectMap map, ObjectMap m) { + if (map.objects.size() != m.objects.size()) { + System.err.println("Mismatch [s1=" + map.objects.size() + ", s2=" + m.objects.size() + "]!"); + return; + } + Queue queue1 = new PriorityQueue<>(map.objects); + Queue queue2 = new PriorityQueue<>(m.objects); + while (!queue1.isEmpty()) { + int id = queue1.peek().id; + int id1 = queue2.peek().id; + if (id != id1) { + System.err.println("Object id mismatch [o1=" + id + ", o2=" + id1 + "]!"); + return; + } + Queue entry = new PriorityQueue<>(); + Queue entry1 = new PriorityQueue<>(); + while (!queue1.isEmpty() && (queue1.peek().id == id)) { + entry.add(new QueueEntry(queue1.poll())); + } + while (!queue2.isEmpty() && (queue2.peek().id == id)) { + entry1.add(new QueueEntry(queue2.poll())); + } + if (entry.size() != entry1.size()) { + System.err.println("Entry mismatch [s1=" + entry.size() + ", s2=" + entry1.size() + "]!"); + return; + } + while (!entry.isEmpty()) { + GameObject object = entry.poll().object; + GameObject object1 = entry1.poll().object; + if (object.loc.getHash() != object1.loc.getHash()) { + System.err.println("Location mismatch " + id + "!"); + return; + } + if (object.rotation != object1.rotation) { + System.err.println("Rotation mismatch " + id + "!"); + return; + } + if (object.type != object1.type) { + System.err.println("Type mismatch " + id + "!"); + return; + } + } + } + System.out.println("Matching object maps [s1=" + map.objects.size() + ", s2=" + m.objects.size() + "]!"); + } + + public void map(InputStream stream) { + int objectId = -1; + for (;;) { + int offset = stream.readSmart2(); + if (offset == 0) { + break; + } + objectId += offset; + int location = 0; + for (;;) { + offset = stream.readUnsignedSmart(); + if (offset == 0) { + break; + } + location += offset - 1; + int y = location & 0x3f; + int x = location >> 6 & 0x3f; + int configuration = stream.readUnsignedByte(); + int rotation = configuration & 0x3; + int type = configuration >> 2; + int z = location >> 12; + if (x >= 0 && y >= 0 && x < 64 && y < 64) { + add(objectId, x, y, z, type, rotation); + } else { + System.out.println("Object out of bounds: " + objectId + " - " + x + ", " + y + ", " + z); + } + } + } + } + + public byte[] generate() { + OutputStream stream = new OutputStream(); + PriorityQueue queue = new PriorityQueue<>(objects); + int offset = -1; + while (!queue.isEmpty()) { + int id = queue.peek().id; + Queue entry = new PriorityQueue<>(); + while (!queue.isEmpty() && (queue.peek().id == id)) { + entry.add(new QueueEntry(queue.poll())); + } + stream.writeSmart2(id - offset); + int location = 0; + while (!entry.isEmpty()) { + GameObject object = entry.poll().object; + stream.writeSmart(1 + (object.loc.getHash() - location)); + stream.writeByte(object.rotation | object.type << 2); + location = object.loc.getHash(); + } + stream.writeSmart(0); + offset = id; + } + stream.writeSmart2(0); + byte[] bs = new byte[stream.getOffset()]; + for (int i = 0; i < stream.getOffset(); i++) { + bs[i] = stream.getBuffer()[i]; + } + return bs; + } + + public static class GameObject implements Comparable { + int id; + Location loc; + int type; + int rotation; + + public GameObject(int id, int x, int y, int z, int type, int rotation) { + this.id = id; + this.loc = new Location(x, y, z); + this.type = type; + this.rotation = rotation; + } + + public GameObject getLocal() { + return new GameObject(id, loc.getRegionX(), loc.getRegionY(), loc.z, type, rotation); + } + + @Override + public int compareTo(GameObject o) { + return id - o.id; + } + + @Override + public String toString() { + return id + ", " + type + ", " + rotation; + } + } + + public static class QueueEntry implements Comparable { + GameObject object; + public QueueEntry(GameObject object) { + this.object = object; + } + + @Override + public int compareTo(QueueEntry o) { + return object.loc.getHash() - o.object.loc.getHash(); + } + } + public static class Location { + int x; + int y; + int z; + public Location(int x, int y, int z) { + this.x = x; + this.y = y; + this.z = z; + } + + public int getRegionX() { + return x - ((x >> 6) << 6); + } + + public int getRegionY() { + return y - ((y >> 6) << 6); + } + public int getHash() { + return z << 12 | x << 6 | y; + } + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4BlockInputStream.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4BlockInputStream.java new file mode 100644 index 000000000..09b266d87 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4BlockInputStream.java @@ -0,0 +1,247 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.lz4.LZ4BlockOutputStream.COMPRESSION_LEVEL_BASE; +import static net.jpountz.lz4.LZ4BlockOutputStream.COMPRESSION_METHOD_LZ4; +import static net.jpountz.lz4.LZ4BlockOutputStream.COMPRESSION_METHOD_RAW; +import static net.jpountz.lz4.LZ4BlockOutputStream.DEFAULT_SEED; +import static net.jpountz.lz4.LZ4BlockOutputStream.HEADER_LENGTH; +import static net.jpountz.lz4.LZ4BlockOutputStream.MAGIC; +import static net.jpountz.lz4.LZ4BlockOutputStream.MAGIC_LENGTH; + +import java.io.EOFException; +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.zip.Checksum; + +import net.jpountz.util.Utils; +import net.jpountz.xxhash.StreamingXXHash32; +import net.jpountz.xxhash.XXHash32; +import net.jpountz.xxhash.XXHashFactory; + +/** + * {@link InputStream} implementation to decode data written with + * {@link LZ4BlockOutputStream}. This class is not thread-safe and does not + * support {@link #mark(int)}/{@link #reset()}. + * @see LZ4BlockOutputStream + */ +public final class LZ4BlockInputStream extends FilterInputStream { + + private final LZ4FastDecompressor decompressor; + private final Checksum checksum; + private byte[] buffer; + private byte[] compressedBuffer; + private int originalLen; + private int o; + private boolean finished; + + /** + * Create a new {@link InputStream}. + * + * @param in the {@link InputStream} to poll + * @param decompressor the {@link LZ4FastDecompressor decompressor} instance to + * use + * @param checksum the {@link Checksum} instance to use, must be + * equivalent to the instance which has been used to + * write the stream + */ + public LZ4BlockInputStream(InputStream in, LZ4FastDecompressor decompressor, Checksum checksum) { + super(in); + this.decompressor = decompressor; + this.checksum = checksum; + this.buffer = new byte[0]; + this.compressedBuffer = new byte[HEADER_LENGTH]; + o = originalLen = 0; + finished = false; + } + + /** + * Create a new instance using {@link XXHash32} for checksuming. + * @see #LZ4BlockInputStream(InputStream, LZ4FastDecompressor, Checksum) + * @see StreamingXXHash32#asChecksum() + */ + public LZ4BlockInputStream(InputStream in, LZ4FastDecompressor decompressor) { + this(in, decompressor, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum()); + } + + /** + * Create a new instance which uses the fastest {@link LZ4FastDecompressor} available. + * @see LZ4Factory#fastestInstance() + * @see #LZ4BlockInputStream(InputStream, LZ4FastDecompressor) + */ + public LZ4BlockInputStream(InputStream in) { + this(in, LZ4Factory.fastestInstance().fastDecompressor()); + } + + @Override + public int available() throws IOException { + return originalLen - o; + } + + @Override + public int read() throws IOException { + if (finished) { + return -1; + } + if (o == originalLen) { + refill(); + } + if (finished) { + return -1; + } + return buffer[o++] & 0xFF; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + Utils.checkRange(b, off, len); + if (finished) { + return -1; + } + if (o == originalLen) { + refill(); + } + if (finished) { + return -1; + } + len = Math.min(len, originalLen - o); + System.arraycopy(buffer, o, b, off, len); + o += len; + return len; + } + + @Override + public int read(byte[] b) throws IOException { + return read(b, 0, b.length); + } + + @Override + public long skip(long n) throws IOException { + if (finished) { + return -1; + } + if (o == originalLen) { + refill(); + } + if (finished) { + return -1; + } + final int skipped = (int) Math.min(n, originalLen - o); + o += skipped; + return skipped; + } + + private void refill() throws IOException { + readFully(compressedBuffer, HEADER_LENGTH); + for (int i = 0; i < MAGIC_LENGTH; ++i) { + if (compressedBuffer[i] != MAGIC[i]) { + throw new IOException("Stream is corrupted"); + } + } + final int token = compressedBuffer[MAGIC_LENGTH] & 0xFF; + final int compressionMethod = token & 0xF0; + final int compressionLevel = COMPRESSION_LEVEL_BASE + (token & 0x0F); + if (compressionMethod != COMPRESSION_METHOD_RAW && compressionMethod != COMPRESSION_METHOD_LZ4) { + throw new IOException("Stream is corrupted"); + } + final int compressedLen = Utils.readIntLE(compressedBuffer, MAGIC_LENGTH + 1); + originalLen = Utils.readIntLE(compressedBuffer, MAGIC_LENGTH + 5); + final int check = Utils.readIntLE(compressedBuffer, MAGIC_LENGTH + 9); + assert HEADER_LENGTH == MAGIC_LENGTH + 13; + if (originalLen > 1 << compressionLevel + || originalLen < 0 + || compressedLen < 0 + || (originalLen == 0 && compressedLen != 0) + || (originalLen != 0 && compressedLen == 0) + || (compressionMethod == COMPRESSION_METHOD_RAW && originalLen != compressedLen)) { + throw new IOException("Stream is corrupted"); + } + if (originalLen == 0 && compressedLen == 0) { + if (check != 0) { + throw new IOException("Stream is corrupted"); + } + finished = true; + return; + } + if (buffer.length < originalLen) { + buffer = new byte[Math.max(originalLen, buffer.length * 3 / 2)]; + } + switch (compressionMethod) { + case COMPRESSION_METHOD_RAW: + readFully(buffer, originalLen); + break; + case COMPRESSION_METHOD_LZ4: + if (compressedBuffer.length < originalLen) { + compressedBuffer = new byte[Math.max(compressedLen, compressedBuffer.length * 3 / 2)]; + } + readFully(compressedBuffer, compressedLen); + try { + final int compressedLen2 = decompressor.decompress(compressedBuffer, 0, buffer, 0, originalLen); + if (compressedLen != compressedLen2) { + throw new IOException("Stream is corrupted"); + } + } catch (LZ4Exception e) { + throw new IOException("Stream is corrupted", e); + } + break; + default: + throw new AssertionError(); + } + checksum.reset(); + checksum.update(buffer, 0, originalLen); + if ((int) checksum.getValue() != check) { + throw new IOException("Stream is corrupted"); + } + o = 0; + } + + private void readFully(byte[] b, int len) throws IOException { + int read = 0; + while (read < len) { + final int r = in.read(b, read, len - read); + if (r < 0) { + throw new EOFException("Stream ended prematurely"); + } + read += r; + } + assert len == read; + } + + @Override + public boolean markSupported() { + return false; + } + + @SuppressWarnings("sync-override") + @Override + public void mark(int readlimit) { + // unsupported + } + + @SuppressWarnings("sync-override") + @Override + public void reset() throws IOException { + throw new IOException("mark/reset not supported"); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(in=" + in + + ", decompressor=" + decompressor + ", checksum=" + checksum + ")"; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4BlockOutputStream.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4BlockOutputStream.java new file mode 100644 index 000000000..c0f0800dc --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4BlockOutputStream.java @@ -0,0 +1,257 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.zip.Checksum; + +import net.jpountz.util.Utils; +import net.jpountz.xxhash.StreamingXXHash32; +import net.jpountz.xxhash.XXHashFactory; + +/** + * Streaming LZ4. + *

+ * This class compresses data into fixed-size blocks of compressed data. + * @see LZ4BlockInputStream + */ +public final class LZ4BlockOutputStream extends FilterOutputStream { + + static final byte[] MAGIC = new byte[] { 'L', 'Z', '4', 'B', 'l', 'o', 'c', 'k' }; + static final int MAGIC_LENGTH = MAGIC.length; + + static final int HEADER_LENGTH = + MAGIC_LENGTH // magic bytes + + 1 // token + + 4 // compressed length + + 4 // decompressed length + + 4; // checksum + + static final int COMPRESSION_LEVEL_BASE = 10; + static final int MIN_BLOCK_SIZE = 64; + static final int MAX_BLOCK_SIZE = 1 << (COMPRESSION_LEVEL_BASE + 0x0F); + + static final int COMPRESSION_METHOD_RAW = 0x10; + static final int COMPRESSION_METHOD_LZ4 = 0x20; + + static final int DEFAULT_SEED = 0x9747b28c; + + private static int compressionLevel(int blockSize) { + if (blockSize < MIN_BLOCK_SIZE) { + throw new IllegalArgumentException("blockSize must be >= " + MIN_BLOCK_SIZE + ", got " + blockSize); + } else if (blockSize > MAX_BLOCK_SIZE) { + throw new IllegalArgumentException("blockSize must be <= " + MAX_BLOCK_SIZE + ", got " + blockSize); + } + int compressionLevel = 32 - Integer.numberOfLeadingZeros(blockSize - 1); // ceil of log2 + assert (1 << compressionLevel) >= blockSize; + assert blockSize * 2 > (1 << compressionLevel); + compressionLevel = Math.max(0, compressionLevel - COMPRESSION_LEVEL_BASE); + assert compressionLevel >= 0 && compressionLevel <= 0x0F; + return compressionLevel; + } + + private final int blockSize; + private final int compressionLevel; + private final LZ4Compressor compressor; + private final Checksum checksum; + private final byte[] buffer; + private final byte[] compressedBuffer; + private final boolean syncFlush; + private boolean finished; + private int o; + + /** + * Create a new {@link OutputStream} with configurable block size. Large + * blocks require more memory at compression and decompression time but + * should improve the compression ratio. + * + * @param out the {@link OutputStream} to feed + * @param blockSize the maximum number of bytes to try to compress at once, + * must be >= 64 and <= 32 M + * @param compressor the {@link LZ4Compressor} instance to use to compress + * data + * @param checksum the {@link Checksum} instance to use to check data for + * integrity. + * @param syncFlush true if pending data should also be flushed on {@link #flush()} + */ + public LZ4BlockOutputStream(OutputStream out, int blockSize, LZ4Compressor compressor, Checksum checksum, boolean syncFlush) { + super(out); + this.blockSize = blockSize; + this.compressor = compressor; + this.checksum = checksum; + this.compressionLevel = compressionLevel(blockSize); + this.buffer = new byte[blockSize]; + final int compressedBlockSize = HEADER_LENGTH + compressor.maxCompressedLength(blockSize); + this.compressedBuffer = new byte[compressedBlockSize]; + this.syncFlush = syncFlush; + o = 0; + finished = false; + System.arraycopy(MAGIC, 0, compressedBuffer, 0, MAGIC_LENGTH); + } + + /** + * Create a new instance which checks stream integrity using + * {@link StreamingXXHash32} and doesn't sync flush. + * @see #LZ4BlockOutputStream(OutputStream, int, LZ4Compressor, Checksum, boolean) + * @see StreamingXXHash32#asChecksum() + */ + public LZ4BlockOutputStream(OutputStream out, int blockSize, LZ4Compressor compressor) { + this(out, blockSize, compressor, XXHashFactory.fastestInstance().newStreamingHash32(DEFAULT_SEED).asChecksum(), false); + } + + /** + * Create a new instance which compresses with the standard LZ4 compression + * algorithm. + * @see #LZ4BlockOutputStream(OutputStream, int, LZ4Compressor) + * @see LZ4Factory#fastCompressor() + */ + public LZ4BlockOutputStream(OutputStream out, int blockSize) { + this(out, blockSize, LZ4Factory.fastestInstance().fastCompressor()); + } + + /** + * Create a new instance which compresses into blocks of 64 KB. + * @see #LZ4BlockOutputStream(OutputStream, int) + */ + public LZ4BlockOutputStream(OutputStream out) { + this(out, 1 << 16); + } + + private void ensureNotFinished() { + if (finished) { + throw new IllegalStateException("This stream is already closed"); + } + } + + @Override + public void write(int b) throws IOException { + ensureNotFinished(); + if (o == blockSize) { + flushBufferedData(); + } + buffer[o++] = (byte) b; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + Utils.checkRange(b, off, len); + ensureNotFinished(); + + while (o + len > blockSize) { + final int l = blockSize - o; + System.arraycopy(b, off, buffer, o, blockSize - o); + o = blockSize; + flushBufferedData(); + off += l; + len -= l; + } + System.arraycopy(b, off, buffer, o, len); + o += len; + } + + @Override + public void write(byte[] b) throws IOException { + ensureNotFinished(); + write(b, 0, b.length); + } + + @Override + public void close() throws IOException { + if (!finished) { + finish(); + } + if (out != null) { + out.close(); + out = null; + } + } + + private void flushBufferedData() throws IOException { + if (o == 0) { + return; + } + checksum.reset(); + checksum.update(buffer, 0, o); + final int check = (int) checksum.getValue(); + int compressedLength = compressor.compress(buffer, 0, o, compressedBuffer, HEADER_LENGTH); + final int compressMethod; + if (compressedLength >= o) { + compressMethod = COMPRESSION_METHOD_RAW; + compressedLength = o; + System.arraycopy(buffer, 0, compressedBuffer, HEADER_LENGTH, o); + } else { + compressMethod = COMPRESSION_METHOD_LZ4; + } + + compressedBuffer[MAGIC_LENGTH] = (byte) (compressMethod | compressionLevel); + writeIntLE(compressedLength, compressedBuffer, MAGIC_LENGTH + 1); + writeIntLE(o, compressedBuffer, MAGIC_LENGTH + 5); + writeIntLE(check, compressedBuffer, MAGIC_LENGTH + 9); + assert MAGIC_LENGTH + 13 == HEADER_LENGTH; + out.write(compressedBuffer, 0, HEADER_LENGTH + compressedLength); + o = 0; + } + + /** + * Flush this compressed {@link OutputStream}. + * + * If the stream has been created with syncFlush=true, pending + * data will be compressed and appended to the underlying {@link OutputStream} + * before calling {@link OutputStream#flush()} on the underlying stream. + * Otherwise, this method just flushes the underlying stream, so pending + * data might not be available for reading until {@link #finish()} or + * {@link #close()} is called. + */ + @Override + public void flush() throws IOException { + if (syncFlush) { + flushBufferedData(); + } + out.flush(); + } + + /** + * Same as {@link #close()} except that it doesn't close the underlying stream. + * This can be useful if you want to keep on using the underlying stream. + */ + public void finish() throws IOException { + ensureNotFinished(); + flushBufferedData(); + compressedBuffer[MAGIC_LENGTH] = (byte) (COMPRESSION_METHOD_RAW | compressionLevel); + writeIntLE(0, compressedBuffer, MAGIC_LENGTH + 1); + writeIntLE(0, compressedBuffer, MAGIC_LENGTH + 5); + writeIntLE(0, compressedBuffer, MAGIC_LENGTH + 9); + assert MAGIC_LENGTH + 13 == HEADER_LENGTH; + out.write(compressedBuffer, 0, HEADER_LENGTH); + finished = true; + out.flush(); + } + + private static void writeIntLE(int i, byte[] buf, int off) { + buf[off++] = (byte) i; + buf[off++] = (byte) (i >>> 8); + buf[off++] = (byte) (i >>> 16); + buf[off++] = (byte) (i >>> 24); + } + + @Override + public String toString() { + return getClass().getSimpleName() + "(out=" + out + ", blockSize=" + blockSize + + ", compressor=" + compressor + ", checksum=" + checksum + ")"; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Compressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Compressor.java new file mode 100644 index 000000000..53fc7764a --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Compressor.java @@ -0,0 +1,98 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.Arrays; + +/** + * LZ4 compressor. + *

+ * Instances of this class are thread-safe. + */ +public abstract class LZ4Compressor { + + /** Return the maximum compressed length for an input of size length. */ + @SuppressWarnings("static-method") + public final int maxCompressedLength(int length) { + return LZ4Utils.maxCompressedLength(length); + } + + /** + * Compress src[srcOff:srcOff+srcLen] into + * dest[destOff:destOff+destLen] and return the compressed + * length. + * + * This method will throw a {@link LZ4Exception} if this compressor is unable + * to compress the input into less than maxDestLen bytes. To + * prevent this exception to be thrown, you should make sure that + * maxDestLen >= maxCompressedLength(srcLen). + * + * @throws LZ4Exception if maxDestLen is too small + * @return the compressed size + */ + public abstract int compress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + + /** + * Convenience method, equivalent to calling + * {@link #compress(byte[], int, int, byte[], int, int) compress(src, srcOff, srcLen, dest, destOff, dest.length - destOff)}. + */ + public final int compress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff) { + return compress(src, srcOff, srcLen, dest, destOff, dest.length - destOff); + } + + /** + * Convenience method, equivalent to calling + * {@link #compress(byte[], int, int, byte[], int) compress(src, 0, src.length, dest, 0)}. + */ + public final int compress(byte[] src, byte[] dest) { + return compress(src, 0, src.length, dest, 0); + } + + /** + * Convenience method which returns src[srcOff:srcOff+srcLen] + * compressed. + *

Warning: this method has an + * important overhead due to the fact that it needs to allocate a buffer to + * compress into, and then needs to resize this buffer to the actual + * compressed length.

+ *

Here is how this method is implemented:

+ *
+   * final int maxCompressedLength = maxCompressedLength(srcLen);
+   * final byte[] compressed = new byte[maxCompressedLength];
+   * final int compressedLength = compress(src, srcOff, srcLen, compressed, 0);
+   * return Arrays.copyOf(compressed, compressedLength);
+   * 
+ */ + public final byte[] compress(byte[] src, int srcOff, int srcLen) { + final int maxCompressedLength = maxCompressedLength(srcLen); + final byte[] compressed = new byte[maxCompressedLength]; + final int compressedLength = compress(src, srcOff, srcLen, compressed, 0); + return Arrays.copyOf(compressed, compressedLength); + } + + /** + * Convenience method, equivalent to calling + * {@link #compress(byte[], int, int) compress(src, 0, src.length)}. + */ + public final byte[] compress(byte[] src) { + return compress(src, 0, src.length); + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Constants.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Constants.java new file mode 100644 index 000000000..6e642b63b --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Constants.java @@ -0,0 +1,50 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +enum LZ4Constants { + ; + + static final int MEMORY_USAGE = 14; + static final int NOT_COMPRESSIBLE_DETECTION_LEVEL = 6; + + static final int MIN_MATCH = 4; + + static final int HASH_LOG = MEMORY_USAGE - 2; + static final int HASH_TABLE_SIZE = 1 << HASH_LOG; + + static final int SKIP_STRENGTH = Math.max(NOT_COMPRESSIBLE_DETECTION_LEVEL, 2); + static final int COPY_LENGTH = 8; + static final int LAST_LITERALS = 5; + static final int MF_LIMIT = COPY_LENGTH + MIN_MATCH; + static final int MIN_LENGTH = MF_LIMIT + 1; + + static final int MAX_DISTANCE = 1 << 16; + + static final int ML_BITS = 4; + static final int ML_MASK = (1 << ML_BITS) - 1; + static final int RUN_BITS = 8 - ML_BITS; + static final int RUN_MASK = (1 << RUN_BITS) - 1; + + static final int LZ4_64K_LIMIT = (1 << 16) + (MF_LIMIT - 1); + static final int HASH_LOG_64K = HASH_LOG + 1; + static final int HASH_TABLE_SIZE_64K = 1 << HASH_LOG_64K; + + static final int HASH_LOG_HC = 15; + static final int HASH_TABLE_SIZE_HC = 1 << HASH_LOG_HC; + static final int OPTIMAL_ML = ML_MASK - 1 + MIN_MATCH; + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Decompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Decompressor.java new file mode 100644 index 000000000..6b2c18335 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Decompressor.java @@ -0,0 +1,25 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @deprecated Use {@link LZ4FastDecompressor} instead. + */ +@Deprecated +public interface LZ4Decompressor { + + int decompress(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Exception.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Exception.java new file mode 100644 index 000000000..cb45c0aa3 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Exception.java @@ -0,0 +1,36 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * LZ4 compression or decompression error. + */ +public class LZ4Exception extends RuntimeException { + + private static final long serialVersionUID = 1L; + + public LZ4Exception(String msg, Throwable t) { + super(msg, t); + } + + public LZ4Exception(String msg) { + super(msg); + } + + public LZ4Exception() { + super(); + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Factory.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Factory.java new file mode 100644 index 000000000..223c95841 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Factory.java @@ -0,0 +1,222 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.lang.reflect.Field; +import java.util.Arrays; + +import net.jpountz.util.Native; + +/** + * Entry point for the LZ4 API. + *

+ * This class has 3 instances

    + *
  • a {@link #nativeInstance() native} instance which is a JNI binding to + * the original LZ4 C implementation. + *
  • a {@link #safeInstance() safe Java} instance which is a pure Java port + * of the original C library,
  • + *
  • an {@link #unsafeInstance() unsafe Java} instance which is a Java port + * using the unofficial {@link sun.misc.Unsafe} API. + *
+ *

+ * Only the {@link #safeInstance() safe instance} is guaranteed to work on your + * JVM, as a consequence it is advised to use the {@link #fastestInstance()} or + * {@link #fastestJavaInstance()} to pull a {@link LZ4Factory} instance. + *

+ * All methods from this class are very costly, so you should get an instance + * once, and then reuse it whenever possible. This is typically done by storing + * a {@link LZ4Factory} instance in a static field. + */ +public final class LZ4Factory { + + private static LZ4Factory instance(String impl) { + try { + return new LZ4Factory(impl); + } catch (Exception e) { + throw new AssertionError(e); + } + } + + private static LZ4Factory NATIVE_INSTANCE, + JAVA_UNSAFE_INSTANCE, + JAVA_SAFE_INSTANCE; + + /** + * Return a {@link LZ4Factory} instance that returns compressors and + * decompressors that are native bindings to the original C library. + *

+ * Please note that this instance has some traps you should be aware of:

    + *
  1. Upon loading this instance, files will be written to the temporary + * directory of the system. Although these files are supposed to be deleted + * when the JVM exits, they might remain on systems that don't support + * removal of files being used such as Windows. + *
  2. The instance can only be loaded once per JVM. This can be a problem + * if your application uses multiple class loaders (such as most servlet + * containers): this instance will only be available to the children of the + * class loader which has loaded it. As a consequence, it is advised to + * either not use this instance in webapps or to put this library in the lib + * directory of your servlet container so that it is loaded by the system + * class loader. + *
+ */ + public static synchronized LZ4Factory nativeInstance() { + if (NATIVE_INSTANCE == null) { + NATIVE_INSTANCE = instance("JNI"); + } + return NATIVE_INSTANCE; + } + + /** Return a {@link LZ4Factory} instance that returns compressors and + * decompressors that are written with Java's official API. */ + public static synchronized LZ4Factory safeInstance() { + if (JAVA_SAFE_INSTANCE == null) { + JAVA_SAFE_INSTANCE = instance("JavaSafe"); + } + return JAVA_SAFE_INSTANCE; + } + + /** Return a {@link LZ4Factory} instance that returns compressors and + * decompressors that may use {@link sun.misc.Unsafe} to speed up compression + * and decompression. */ + public static synchronized LZ4Factory unsafeInstance() { + if (JAVA_UNSAFE_INSTANCE == null) { + JAVA_UNSAFE_INSTANCE = instance("JavaUnsafe"); + } + return JAVA_UNSAFE_INSTANCE; + } + + /** + * Return the fastest available {@link LZ4Factory} instance which does not + * rely on JNI bindings. It first tries to load the + * {@link #unsafeInstance() unsafe instance}, and then the + * {@link #safeInstance() safe Java instance} if the JVM doesn't have a + * working {@link sun.misc.Unsafe}. + */ + public static LZ4Factory fastestJavaInstance() { + try { + return unsafeInstance(); + } catch (Throwable t) { + return safeInstance(); + } + } + + /** + * Return the fastest available {@link LZ4Factory} instance. If the class + * loader is the system class loader and if the + * {@link #nativeInstance() native instance} loads successfully, then the + * {@link #nativeInstance() native instance} is returned, otherwise the + * {@link #fastestJavaInstance() fastest Java instance} is returned. + *

+ * Please read {@link #nativeInstance() javadocs of nativeInstance()} before + * using this method. + */ + public static LZ4Factory fastestInstance() { + if (Native.isLoaded() + || Native.class.getClassLoader() == ClassLoader.getSystemClassLoader()) { + try { + return nativeInstance(); + } catch (Throwable t) { + return fastestJavaInstance(); + } + } else { + return fastestJavaInstance(); + } + } + + @SuppressWarnings("unchecked") + private static T classInstance(String cls) throws NoSuchFieldException, SecurityException, ClassNotFoundException, IllegalArgumentException, IllegalAccessException { + final Class c = Class.forName(cls); + Field f = c.getField("INSTANCE"); + return (T) f.get(null); + } + + private final String impl; + private final LZ4Compressor fastCompressor; + private final LZ4Compressor highCompressor; + private final LZ4FastDecompressor fastDecompressor; + private final LZ4SafeDecompressor safeDecompressor; + + private LZ4Factory(String impl) throws ClassNotFoundException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException { + this.impl = impl; + fastCompressor = classInstance("net.jpountz.lz4.LZ4" + impl + "Compressor"); + highCompressor = classInstance("net.jpountz.lz4.LZ4HC" + impl + "Compressor"); + fastDecompressor = classInstance("net.jpountz.lz4.LZ4" + impl + "FastDecompressor"); + safeDecompressor = classInstance("net.jpountz.lz4.LZ4" + impl + "SafeDecompressor"); + + // quickly test that everything works as expected + final byte[] original = new byte[] {'a','b','c','d',' ',' ',' ',' ',' ',' ','a','b','c','d','e','f','g','h','i','j'}; + for (LZ4Compressor compressor : Arrays.asList(fastCompressor, highCompressor)) { + final int maxCompressedLength = compressor.maxCompressedLength(original.length); + final byte[] compressed = new byte[maxCompressedLength]; + final int compressedLength = compressor.compress(original, 0, original.length, compressed, 0, maxCompressedLength); + final byte[] restored = new byte[original.length]; + fastDecompressor.decompress(compressed, 0, restored, 0, original.length); + if (!Arrays.equals(original, restored)) { + throw new AssertionError(); + } + Arrays.fill(restored, (byte) 0); + final int decompressedLength = safeDecompressor.decompress(compressed, 0, compressedLength, restored, 0); + if (decompressedLength != original.length || !Arrays.equals(original, restored)) { + throw new AssertionError(); + } + } + + } + + /** Return a blazing fast {@link LZ4Compressor}. */ + public LZ4Compressor fastCompressor() { + return fastCompressor; + } + + /** Return a {@link LZ4Compressor} which requires more memory than + * {@link #fastCompressor()} and is slower but compresses more efficiently. */ + public LZ4Compressor highCompressor() { + return highCompressor; + } + + /** Return a {@link LZ4FastDecompressor} instance. */ + public LZ4FastDecompressor fastDecompressor() { + return fastDecompressor; + } + + /** Return a {@link LZ4SafeDecompressor} instance. */ + public LZ4SafeDecompressor safeDecompressor() { + return safeDecompressor; + } + + /** Return a {@link LZ4UnknownSizeDecompressor} instance. + * @deprecated use {@link #safeDecompressor()} */ + public LZ4UnknownSizeDecompressor unknwonSizeDecompressor() { + return safeDecompressor(); + } + + /** Return a {@link LZ4Decompressor} instance. + * @deprecated use {@link #fastDecompressor()} */ + public LZ4Decompressor decompressor() { + return fastDecompressor(); + } + + /** Prints the fastest instance. */ + public static void main(String[] args) { + System.out.println("Fastest instance is " + fastestInstance()); + System.out.println("Fastest Java instance is " + fastestJavaInstance()); + } + + @Override + public String toString() { + return getClass().getSimpleName() + ":" + impl; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4FastDecompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4FastDecompressor.java new file mode 100644 index 000000000..3281e823c --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4FastDecompressor.java @@ -0,0 +1,90 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * LZ4 decompressor that requires the size of the original input to be known. + * Use {@link LZ4SafeDecompressor} if you only know the size of the + * compressed stream. + *

+ * Instances of this class are thread-safe. + */ +public abstract class LZ4FastDecompressor implements LZ4Decompressor { + + /** Decompress src[srcOff:] into dest[destOff:destOff+destLen] + * and return the number of bytes read from src. + * destLen must be exactly the size of the decompressed data. + * + * @param destLen the exact size of the original input + * @return the number of bytes read to restore the original input + */ + public abstract int decompress(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); + + /** + * Same as {@link #decompress(byte[], int, byte[], int, int)} except that up + * to 64 KB before srcOff in src. This is useful for + * providing LZ4 with a dictionary that can be reused during decompression. + */ + public abstract int decompressWithPrefix64k(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); + + /** + * Convenience method, equivalent to calling + * {@link #decompress(byte[], int, byte[], int, int) decompress(src, 0, dest, 0, destLen)}. + */ + public final int decompress(byte[] src, byte[] dest, int destLen) { + return decompress(src, 0, dest, 0, destLen); + } + + /** + * Convenience method, equivalent to calling + * {@link #decompress(byte[], byte[], int) decompress(src, dest, dest.length)}. + */ + public final int decompress(byte[] src, byte[] dest) { + return decompress(src, dest, dest.length); + } + + /** + * Convenience method which returns src[srcOff:?] + * decompressed. + *

Warning: this method has an + * important overhead due to the fact that it needs to allocate a buffer to + * decompress into.

+ *

Here is how this method is implemented:

+ *
+   * final byte[] decompressed = new byte[destLen];
+   * decompress(src, srcOff, decompressed, 0, destLen);
+   * return decompressed;
+   * 
+ */ + public final byte[] decompress(byte[] src, int srcOff, int destLen) { + final byte[] decompressed = new byte[destLen]; + decompress(src, srcOff, decompressed, 0, destLen); + return decompressed; + } + + /** + * Convenience method, equivalent to calling + * {@link #decompress(byte[], int, int) decompress(src, 0, destLen)}. + */ + public final byte[] decompress(byte[] src, int destLen) { + return decompress(src, 0, destLen); + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4HCJNICompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4HCJNICompressor.java new file mode 100644 index 000000000..ec09353d4 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4HCJNICompressor.java @@ -0,0 +1,38 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.util.Utils.checkRange; + +/** + * High compression {@link LZ4Compressor}s implemented with JNI bindings to the + * original C implementation of LZ4. + */ +final class LZ4HCJNICompressor extends LZ4Compressor { + + public static final LZ4Compressor INSTANCE = new LZ4HCJNICompressor(); + + @Override + public int compress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen) { + checkRange(src, srcOff, srcLen); + checkRange(dest, destOff, maxDestLen); + final int result = LZ4JNI.LZ4_compressHC(src, srcOff, srcLen, dest, destOff, maxDestLen); + if (result <= 0) { + throw new LZ4Exception(); + } + return result; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNI.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNI.java new file mode 100644 index 000000000..1c02ff1c7 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNI.java @@ -0,0 +1,41 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import net.jpountz.util.Native; + + +/** + * JNI bindings to the original C implementation of LZ4. + */ +enum LZ4JNI { + ; + + static { + Native.load(); + init(); + } + + static native void init(); + static native int LZ4_compress_limitedOutput(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + static native int LZ4_compressHC(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + static native int LZ4_decompress_fast(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); + static native int LZ4_decompress_fast_withPrefix64k(byte[] src, int srcOff, byte[] dest, int destOff, int destLen); + static native int LZ4_decompress_safe(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + static native int LZ4_decompress_safe_withPrefix64k(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + static native int LZ4_compressBound(int len); + +} + diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNICompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNICompressor.java new file mode 100644 index 000000000..abfdb8850 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNICompressor.java @@ -0,0 +1,37 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.util.Utils.checkRange; + +/** + * Fast {@link LZ4FastCompressor}s implemented with JNI bindings to the original C + * implementation of LZ4. + */ +final class LZ4JNICompressor extends LZ4Compressor { + + public static final LZ4Compressor INSTANCE = new LZ4JNICompressor(); + + @Override + public int compress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen) { + checkRange(src, srcOff, srcLen); + checkRange(dest, destOff, maxDestLen); + final int result = LZ4JNI.LZ4_compress_limitedOutput(src, srcOff, srcLen, dest, destOff, maxDestLen); + if (result <= 0) { + throw new LZ4Exception("maxDestLen is too small"); + } + return result; + } +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNIFastDecompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNIFastDecompressor.java new file mode 100644 index 000000000..6aa1963fc --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNIFastDecompressor.java @@ -0,0 +1,48 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.util.Utils.checkRange; + +/** + * {@link LZ4FastDecompressor} implemented with JNI bindings to the original C + * implementation of LZ4. + */ +final class LZ4JNIFastDecompressor extends LZ4FastDecompressor { + + public static final LZ4JNIFastDecompressor INSTANCE = new LZ4JNIFastDecompressor(); + + @Override + public final int decompress(byte[] src, int srcOff, byte[] dest, int destOff, int destLen) { + checkRange(src, srcOff); + checkRange(dest, destOff, destLen); + final int result = LZ4JNI.LZ4_decompress_fast(src, srcOff, dest, destOff, destLen); + if (result < 0) { + throw new LZ4Exception("Error decoding offset " + (srcOff - result) + " of input buffer"); + } + return result; + } + + @Override + public final int decompressWithPrefix64k(byte[] src, int srcOff, byte[] dest, int destOff, int destLen) { + checkRange(src, srcOff); + checkRange(dest, destOff, destLen); + final int result = LZ4JNI.LZ4_decompress_fast_withPrefix64k(src, srcOff, dest, destOff, destLen); + if (result < 0) { + throw new LZ4Exception("Error decoding offset " + (srcOff - result) + " of input buffer"); + } + return result; + } +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNISafeDecompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNISafeDecompressor.java new file mode 100644 index 000000000..02802facb --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4JNISafeDecompressor.java @@ -0,0 +1,48 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.util.Utils.checkRange; + +/** + * {@link LZ4SafeDecompressor} implemented with JNI bindings to the original C + * implementation of LZ4. + */ +final class LZ4JNISafeDecompressor extends LZ4SafeDecompressor { + + public static final LZ4SafeDecompressor INSTANCE = new LZ4JNISafeDecompressor(); + + @Override + public final int decompress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen) { + checkRange(src, srcOff, srcLen); + checkRange(dest, destOff, maxDestLen); + final int result = LZ4JNI.LZ4_decompress_safe(src, srcOff, srcLen, dest, destOff, maxDestLen); + if (result < 0) { + throw new LZ4Exception("Error decoding offset " + (srcOff - result) + " of input buffer"); + } + return result; + } + + @Override + public final int decompressWithPrefix64k(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen) { + checkRange(src, srcOff, srcLen); + checkRange(dest, destOff, maxDestLen); + final int result = LZ4JNI.LZ4_decompress_safe_withPrefix64k(src, srcOff, srcLen, dest, destOff, maxDestLen); + if (result < 0) { + throw new LZ4Exception("Error decoding offset " + (srcOff - result) + " of input buffer"); + } + return result; + } +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4SafeDecompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4SafeDecompressor.java new file mode 100644 index 000000000..c4ad28a54 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4SafeDecompressor.java @@ -0,0 +1,109 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.util.Arrays; + +/** + * LZ4 decompressor that requires the size of the compressed data to be known. + *

+ * Implementations of this class are usually a little slower than those of + * {@link LZ4FastDecompressor} but do not require the size of the original data to + * be known. + */ +public abstract class LZ4SafeDecompressor implements LZ4UnknownSizeDecompressor { + + /** + * Uncompress src[srcOff:srcLen] into + * dest[destOff:destOff+maxDestLen] and returns the number of + * decompressed bytes written into dest. + * + * @param srcLen the exact size of the compressed stream + * @return the original input size + * @throws LZ4Exception if maxDestLen is too small + */ + public abstract int decompress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + + /** + * Same as {@link #decompress(byte[], int, int, byte[], int, int) except that + * up to 64 KB before srcOff in src. This is useful + * for providing LZ4 with a dictionary that can be reused during decompression. + */ + public abstract int decompressWithPrefix64k(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + + /** + * Convenience method, equivalent to calling + * {@link #decompress(byte[], int, int, byte[], int, int) decompress(src, srcOff, srcLen, dest, destOff, dest.length - destOff)}. + */ + public final int decompress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff) { + return decompress(src, srcOff, srcLen, dest, destOff, dest.length - destOff); + } + + /** + * Convenience method, equivalent to calling + * {@link #decompressWithPrefix64k(byte[], int, int, byte[], int, int) decompress(src, srcOff, srcLen, dest, destOff, dest.length - destOff)}. + */ + public final int decompressWithPrefix64k(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff) { + return decompressWithPrefix64k(src, srcOff, srcLen, dest, destOff, dest.length - destOff); + } + + /** + * Convenience method, equivalent to calling + * {@link #decompress(byte[], int, int, byte[], int) decompress(src, 0, src.length, dest, 0)} + */ + public final int decompress(byte[] src, byte[] dest) { + return decompress(src, 0, src.length, dest, 0); + } + + /** + * Convenience method which returns src[srcOff:srcOff+srcLen] + * decompressed. + *

Warning: this method has an + * important overhead due to the fact that it needs to allocate a buffer to + * decompress into, and then needs to resize this buffer to the actual + * decompressed length.

+ *

Here is how this method is implemented:

+ *
+   * byte[] decompressed = new byte[maxDestLen];
+   * final int decompressedLength = decompress(src, srcOff, srcLen, decompressed, 0, maxDestLen);
+   * if (decompressedLength != decompressed.length) {
+   *   decompressed = Arrays.copyOf(decompressed, decompressedLength);
+   * }
+   * return decompressed;
+   * 
+ */ + public final byte[] decompress(byte[] src, int srcOff, int srcLen, int maxDestLen) { + byte[] decompressed = new byte[maxDestLen]; + final int decompressedLength = decompress(src, srcOff, srcLen, decompressed, 0, maxDestLen); + if (decompressedLength != decompressed.length) { + decompressed = Arrays.copyOf(decompressed, decompressedLength); + } + return decompressed; + } + + /** + * Convenience method, equivalent to calling + * {@link #decompress(byte[], int, int, int) decompress(src, 0, src.length, maxDestLen)}. + */ + public final byte[] decompress(byte[] src, int maxDestLen) { + return decompress(src, 0, src.length, maxDestLen); + } + + @Override + public String toString() { + return getClass().getSimpleName(); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4UnknownSizeDecompressor.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4UnknownSizeDecompressor.java new file mode 100644 index 000000000..10fde1c4d --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4UnknownSizeDecompressor.java @@ -0,0 +1,27 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @deprecated Use {@link LZ4SafeDecompressor} instead. + */ +@Deprecated +public interface LZ4UnknownSizeDecompressor { + + int decompress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff, int maxDestLen); + + int decompress(byte[] src, int srcOff, int srcLen, byte[] dest, int destOff); + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Utils.java b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Utils.java new file mode 100644 index 000000000..a46374bfe --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/LZ4Utils.java @@ -0,0 +1,206 @@ +package net.jpountz.lz4; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.lz4.LZ4Constants.HASH_LOG; +import static net.jpountz.lz4.LZ4Constants.HASH_LOG_64K; +import static net.jpountz.lz4.LZ4Constants.HASH_LOG_HC; +import static net.jpountz.lz4.LZ4Constants.LAST_LITERALS; +import static net.jpountz.lz4.LZ4Constants.MIN_MATCH; +import static net.jpountz.lz4.LZ4Constants.ML_BITS; +import static net.jpountz.lz4.LZ4Constants.ML_MASK; +import static net.jpountz.lz4.LZ4Constants.RUN_MASK; +import static net.jpountz.util.Utils.readInt; + +enum LZ4Utils { + ; + + static final int maxCompressedLength(int length) { + if (length < 0) { + throw new IllegalArgumentException("length must be >= 0, got " + length); + } + return length + length / 255 + 16; + } + + static int hash(int i) { + return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG); + } + + static int hash64k(int i) { + return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG_64K); + } + + static int hashHC(int i) { + return (i * -1640531535) >>> ((MIN_MATCH * 8) - HASH_LOG_HC); + } + + static int readShortLittleEndian(byte[] buf, int i) { + return (buf[i] & 0xFF) | ((buf[i+1] & 0xFF) << 8); + } + + static int hash(byte[] buf, int i) { + return hash(readInt(buf, i)); + } + + static int hash64k(byte[] buf, int i) { + return hash64k(readInt(buf, i)); + } + + static boolean readIntEquals(byte[] buf, int i, int j) { + return buf[i] == buf[j] && buf[i+1] == buf[j+1] && buf[i+2] == buf[j+2] && buf[i+3] == buf[j+3]; + } + + static void safeIncrementalCopy(byte[] dest, int matchOff, int dOff, int matchLen) { + for (int i = 0; i < matchLen; ++i) { + dest[dOff + i] = dest[matchOff + i]; + } + } + + static void wildIncrementalCopy(byte[] dest, int matchOff, int dOff, int matchCopyEnd) { + do { + copy8Bytes(dest, matchOff, dest, dOff); + matchOff += 8; + dOff += 8; + } while (dOff < matchCopyEnd); + } + + static void copy8Bytes(byte[] src, int sOff, byte[] dest, int dOff) { + for (int i = 0; i < 8; ++i) { + dest[dOff + i] = src[sOff + i]; + } + } + + static int commonBytes(byte[] b, int o1, int o2, int limit) { + int count = 0; + while (o2 < limit && b[o1++] == b[o2++]) { + ++count; + } + return count; + } + + static int commonBytesBackward(byte[] b, int o1, int o2, int l1, int l2) { + int count = 0; + while (o1 > l1 && o2 > l2 && b[--o1] == b[--o2]) { + ++count; + } + return count; + } + + static void safeArraycopy(byte[] src, int sOff, byte[] dest, int dOff, int len) { + System.arraycopy(src, sOff, dest, dOff, len); + } + + static void wildArraycopy(byte[] src, int sOff, byte[] dest, int dOff, int len) { + try { + for (int i = 0; i < len; i += 8) { + copy8Bytes(src, sOff + i, dest, dOff + i); + } + } catch (ArrayIndexOutOfBoundsException e) { + throw new LZ4Exception("Malformed input at offset " + sOff); + } + } + + static int encodeSequence(byte[] src, int anchor, int matchOff, int matchRef, int matchLen, byte[] dest, int dOff, int destEnd) { + final int runLen = matchOff - anchor; + final int tokenOff = dOff++; + + if (dOff + runLen + (2 + 1 + LAST_LITERALS) + (runLen >>> 8) > destEnd) { + throw new LZ4Exception("maxDestLen is too small"); + } + + int token; + if (runLen >= RUN_MASK) { + token = (byte) (RUN_MASK << ML_BITS); + dOff = writeLen(runLen - RUN_MASK, dest, dOff); + } else { + token = runLen << ML_BITS; + } + + // copy literals + wildArraycopy(src, anchor, dest, dOff, runLen); + dOff += runLen; + + // encode offset + final int matchDec = matchOff - matchRef; + dest[dOff++] = (byte) matchDec; + dest[dOff++] = (byte) (matchDec >>> 8); + + // encode match len + matchLen -= 4; + if (dOff + (1 + LAST_LITERALS) + (matchLen >>> 8) > destEnd) { + throw new LZ4Exception("maxDestLen is too small"); + } + if (matchLen >= ML_MASK) { + token |= ML_MASK; + dOff = writeLen(matchLen - RUN_MASK, dest, dOff); + } else { + token |= matchLen; + } + + dest[tokenOff] = (byte) token; + + return dOff; + } + + static int lastLiterals(byte[] src, int sOff, int srcLen, byte[] dest, int dOff, int destEnd) { + final int runLen = srcLen; + + if (dOff + runLen + 1 + (runLen + 255 - RUN_MASK) / 255 > destEnd) { + throw new LZ4Exception(); + } + + if (runLen >= RUN_MASK) { + dest[dOff++] = (byte) (RUN_MASK << ML_BITS); + dOff = writeLen(runLen - RUN_MASK, dest, dOff); + } else { + dest[dOff++] = (byte) (runLen << ML_BITS); + } + // copy literals + System.arraycopy(src, sOff, dest, dOff, runLen); + dOff += runLen; + + return dOff; + } + + static int writeLen(int len, byte[] dest, int dOff) { + while (len >= 0xFF) { + dest[dOff++] = (byte) 0xFF; + len -= 0xFF; + } + dest[dOff++] = (byte) len; + return dOff; + } + + static class Match { + int start, ref, len; + + void fix(int correction) { + start += correction; + ref += correction; + len -= correction; + } + + int end() { + return start + len; + } + } + + static void copyTo(Match m1, Match m2) { + m2.len = m1.len; + m2.start = m1.start; + m2.ref = m1.ref; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/lz4/package.html b/Tools/Cache Editor/src/net/jpountz/lz4/package.html new file mode 100644 index 000000000..e53410674 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/lz4/package.html @@ -0,0 +1,55 @@ + + + + + + + +

LZ4 compression. The entry point of the API is the +{@link net.jpountz.lz4.LZ4Factory} class, which gives access to +{@link net.jpountz.lz4.LZ4Compressor compressors} and +{@link net.jpountz.lz4.LZ4SafeDecompressor decompressors}.

+ + +

Sample usage:

+ +
+    LZ4Factory factory = LZ4Factory.fastestInstance();
+
+    byte[] data = "12345345234572".getBytes("UTF-8");
+    final int decompressedLength = data.length;
+
+    // compress data
+    LZ4Compressor compressor = factory.fastCompressor();
+    int maxCompressedLength = compressor.maxCompressedLength(decompressedLength);
+    byte[] compressed = new byte[maxCompressedLength];
+    int compressedLength = compressor.compress(data, 0, decompressedLength, compressed, 0, maxCompressedLength);
+
+    // decompress data
+    // - method 1: when the decompressed length is known
+    LZ4FastDecompressor decompressor = factory.fastDecompressor();
+    byte[] restored = new byte[decompressedLength];
+    int compressedLength2 = decompressor.decompress(compressed, 0, restored, 0, decompressedLength);
+    // compressedLength == compressedLength2
+
+    // - method 2: when the compressed length is known (a little slower)
+    // the destination buffer needs to be over-sized
+    LZ4SafeDecompressor decompressor2 = factory.safeDecompressor();
+    int decompressedLength2 = decompressor2.decompress(compressed, 0, compressedLength, restored, 0);
+    // decompressedLength == decompressedLength2
+
+ + + diff --git a/Tools/Cache Editor/src/net/jpountz/util/Native.java b/Tools/Cache Editor/src/net/jpountz/util/Native.java new file mode 100644 index 000000000..34f7877bc --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/util/Native.java @@ -0,0 +1,121 @@ +package net.jpountz.util; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; + +/** FOR INTERNAL USE ONLY */ +public enum Native { + ; + + private enum OS { + // Even on Windows, the default compiler from cpptasks (gcc) uses .so as a shared lib extension + WINDOWS("win32", "so"), LINUX("linux", "so"), MAC("darwin", "dylib"), SOLARIS("solaris", "so"); + public final String name, libExtension; + + private OS(String name, String libExtension) { + this.name = name; + this.libExtension = libExtension; + } + } + + private static String arch() { + return System.getProperty("os.arch"); + } + + private static OS os() { + String osName = System.getProperty("os.name"); + if (osName.contains("Linux")) { + return OS.LINUX; + } else if (osName.contains("Mac")) { + return OS.MAC; + } else if (osName.contains("Windows")) { + return OS.WINDOWS; + } else if (osName.contains("Solaris")) { + return OS.SOLARIS; + } else { + throw new UnsupportedOperationException("Unsupported operating system: " + + osName); + } + } + + private static String resourceName() { + OS os = os(); + return "/" + os.name + "/" + arch() + "/liblz4-java." + os.libExtension; + } + + private static boolean loaded = false; + + public static synchronized boolean isLoaded() { + return loaded; + } + + public static synchronized void load() { + if (loaded) { + return; + } + String resourceName = resourceName(); + InputStream is = Native.class.getResourceAsStream(resourceName); + if (is == null) { + throw new UnsupportedOperationException("Unsupported OS/arch, cannot find " + resourceName + ". Please try building from source."); + } + File tempLib; + try { + tempLib = File.createTempFile("liblz4-java", "." + os().libExtension); + // copy to tempLib + FileOutputStream out = new FileOutputStream(tempLib); + try { + byte[] buf = new byte[4096]; + while (true) { + int read = is.read(buf); + if (read == -1) { + break; + } + out.write(buf, 0, read); + } + try { + out.close(); + out = null; + } catch (IOException e) { + // ignore + } + System.load(tempLib.getAbsolutePath()); + loaded = true; + } finally { + try { + if (out != null) { + out.close(); + } + } catch (IOException e) { + // ignore + } + if (tempLib != null && tempLib.exists()) { + if (!loaded) { + tempLib.delete(); + } else { + // try to delete on exit, does it work on Windows? + tempLib.deleteOnExit(); + } + } + } + } catch (IOException e) { + throw new ExceptionInInitializerError("Cannot unpack liblz4-java"); + } + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/util/Utils.java b/Tools/Cache Editor/src/net/jpountz/util/Utils.java new file mode 100644 index 000000000..acc2ba007 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/util/Utils.java @@ -0,0 +1,89 @@ +package net.jpountz.util; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.nio.ByteOrder; + +public enum Utils { + ; + + public static final ByteOrder NATIVE_BYTE_ORDER = ByteOrder.nativeOrder(); + + public static void checkRange(byte[] buf, int off) { + if (off < 0 || off >= buf.length) { + throw new ArrayIndexOutOfBoundsException(off); + } + } + + public static void checkRange(byte[] buf, int off, int len) { + checkLength(len); + if (len > 0) { + checkRange(buf, off); + checkRange(buf, off + len - 1); + } + } + + public static void checkLength(int len) { + if (len < 0) { + throw new IllegalArgumentException("lengths must be >= 0"); + } + } + + public static byte readByte(byte[] buf, int i) { + return buf[i]; + } + + public static int readIntBE(byte[] buf, int i) { + return ((buf[i] & 0xFF) << 24) | ((buf[i+1] & 0xFF) << 16) | ((buf[i+2] & 0xFF) << 8) | (buf[i+3] & 0xFF); + } + + public static int readIntLE(byte[] buf, int i) { + return (buf[i] & 0xFF) | ((buf[i+1] & 0xFF) << 8) | ((buf[i+2] & 0xFF) << 16) | ((buf[i+3] & 0xFF) << 24); + } + + public static int readInt(byte[] buf, int i) { + if (NATIVE_BYTE_ORDER == ByteOrder.BIG_ENDIAN) { + return readIntBE(buf, i); + } else { + return readIntLE(buf, i); + } + } + + public static void writeShortLittleEndian(byte[] buf, int off, int v) { + buf[off++] = (byte) v; + buf[off++] = (byte) (v >>> 8); + } + + public static void writeInt(int[] buf, int off, int v) { + buf[off] = v; + } + + public static int readInt(int[] buf, int off) { + return buf[off]; + } + + public static void writeByte(byte[] dest, int tokenOff, int i) { + dest[tokenOff] = (byte) i; + } + + public static void writeShort(short[] buf, int off, int v) { + buf[off] = (short) v; + } + + public static int readShort(short[] buf, int off) { + return buf[off] & 0xFFFF; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/util/package.html b/Tools/Cache Editor/src/net/jpountz/util/package.html new file mode 100644 index 000000000..4b3ceb980 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/util/package.html @@ -0,0 +1,22 @@ + + + + + + + +

Utility classes.

+ + \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/AbstractStreamingXXHash32Java.java b/Tools/Cache Editor/src/net/jpountz/xxhash/AbstractStreamingXXHash32Java.java new file mode 100644 index 000000000..3dd3ed4f3 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/AbstractStreamingXXHash32Java.java @@ -0,0 +1,42 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.xxhash.XXHashConstants.PRIME1; +import static net.jpountz.xxhash.XXHashConstants.PRIME2; + +abstract class AbstractStreamingXXHash32Java extends StreamingXXHash32 { + + int v1, v2, v3, v4, memSize; + long totalLen; + final byte[] memory; + + AbstractStreamingXXHash32Java(int seed) { + super(seed); + memory = new byte[16]; + reset(); + } + + @Override + public void reset() { + v1 = seed + PRIME1 + PRIME2; + v2 = seed + PRIME2; + v3 = seed + 0; + v4 = seed - PRIME1; + totalLen = 0; + memSize = 0; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/StreamingXXHash32.java b/Tools/Cache Editor/src/net/jpountz/xxhash/StreamingXXHash32.java new file mode 100644 index 000000000..c59f03466 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/StreamingXXHash32.java @@ -0,0 +1,111 @@ +package net.jpountz.xxhash; + +import java.util.zip.Checksum; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + + +/** + * Streaming interface for {@link XXHash32}. + *

+ * This API is compatible with the {@link XXHash32 block API} and the following + * code samples are equivalent: + *

+ *   int hash(XXHashFactory xxhashFactory, byte[] buf, int off, int len, int seed) {
+ *     return xxhashFactory.hash32().hash(buf, off, len, seed);
+ *   }
+ * 
+ *
+ *   int hash(XXHashFactory xxhashFactory, byte[] buf, int off, int len, int seed) {
+ *     StreamingXXHash32 sh32 = xxhashFactory.newStreamingHash32(seed);
+ *     sh32.update(buf, off, len);
+ *     return sh32.getValue();
+ *   }
+ * 
+ *

+ * Instances of this class are not thread-safe. + */ +public abstract class StreamingXXHash32 { + + interface Factory { + + StreamingXXHash32 newStreamingHash(int seed); + + } + + final int seed; + + StreamingXXHash32(int seed) { + this.seed = seed; + } + + /** + * Get the value of the checksum. + */ + public abstract int getValue(); + + /** + * Update the value of the hash with buf[off:off+len]. + */ + public abstract void update(byte[] buf, int off, int len); + + /** + * Reset this instance to the state it had right after instantiation. The + * seed remains unchanged. + */ + public abstract void reset(); + + @Override + public String toString() { + return getClass().getSimpleName() + "(seed=" + seed + ")"; + } + + /** + * Return a {@link Checksum} view of this instance. Modifications to the view + * will modify this instance too and vice-versa. + */ + public final Checksum asChecksum() { + return new Checksum() { + + @Override + public long getValue() { + return StreamingXXHash32.this.getValue() & 0xFFFFFFFL; + } + + @Override + public void reset() { + StreamingXXHash32.this.reset(); + } + + @Override + public void update(int b) { + StreamingXXHash32.this.update(new byte[] {(byte) b}, 0, 1); + } + + @Override + public void update(byte[] b, int off, int len) { + StreamingXXHash32.this.update(b, off, len); + } + + @Override + public String toString() { + return StreamingXXHash32.this.toString(); + } + + }; + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/StreamingXXHash32JNI.java b/Tools/Cache Editor/src/net/jpountz/xxhash/StreamingXXHash32JNI.java new file mode 100644 index 000000000..98f417cde --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/StreamingXXHash32JNI.java @@ -0,0 +1,71 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +final class StreamingXXHash32JNI extends StreamingXXHash32 { + + static class Factory implements StreamingXXHash32.Factory { + + public static final StreamingXXHash32.Factory INSTANCE = new Factory(); + + @Override + public StreamingXXHash32 newStreamingHash(int seed) { + return new StreamingXXHash32JNI(seed); + } + + } + + private long state; + + StreamingXXHash32JNI(int seed) { + super(seed); + state = XXHashJNI.XXH32_init(seed); + } + + private void checkState() { + if (state == 0) { + throw new AssertionError("Already finalized"); + } + } + + @Override + public void reset() { + checkState(); + XXHashJNI.XXH32_free(state); + state = XXHashJNI.XXH32_init(seed); + } + + @Override + public int getValue() { + checkState(); + return XXHashJNI.XXH32_intermediateDigest(state); + } + + @Override + public void update(byte[] bytes, int off, int len) { + checkState(); + XXHashJNI.XXH32_update(state, bytes, off, len); + } + + @Override + protected void finalize() throws Throwable { + super.finalize(); + // free memory + XXHashJNI.XXH32_free(state); + state = 0; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/XXHash32.java b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHash32.java new file mode 100644 index 000000000..1a80023d9 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHash32.java @@ -0,0 +1,35 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * A 32-bits hash. + *

+ * Instances of this class are thread-safe. + */ +public abstract class XXHash32 { + + /** + * Compute the 32-bits hash of buf[off:off+len] using seed + * seed. + */ + public abstract int hash(byte[] buf, int off, int len, int seed); + + @Override + public String toString() { + return getClass().getSimpleName(); + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/XXHash32JNI.java b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHash32JNI.java new file mode 100644 index 000000000..bb75980b4 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHash32JNI.java @@ -0,0 +1,29 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import static net.jpountz.util.Utils.checkRange; + +final class XXHash32JNI extends XXHash32 { + + public static final XXHash32 INSTANCE = new XXHash32JNI(); + + @Override + public int hash(byte[] buf, int off, int len, int seed) { + checkRange(buf, off, len); + return XXHashJNI.XXH32(buf, off, len, seed); + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashConstants.java b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashConstants.java new file mode 100644 index 000000000..771a0d2c8 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashConstants.java @@ -0,0 +1,26 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +enum XXHashConstants { + ; + + static final int PRIME1 = -1640531535; + static final int PRIME2 = -2048144777; + static final int PRIME3 = -1028477379; + static final int PRIME4 = 668265263; + static final int PRIME5 = 374761393; + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashFactory.java b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashFactory.java new file mode 100644 index 000000000..a7750b679 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashFactory.java @@ -0,0 +1,190 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import java.lang.reflect.Field; +import java.util.Random; + +import net.jpountz.util.Native; + +/** + * Entry point to get {@link XXHash32} and {@link StreamingXXHash32} instances. + *

+ * This class has 3 instances

    + *
  • a {@link #nativeInstance() native} instance which is a JNI binding to + * the original LZ4 C implementation. + *
  • a {@link #safeInstance() safe Java} instance which is a pure Java port + * of the original C library,
  • + *
  • an {@link #unsafeInstance() unsafe Java} instance which is a Java port + * using the unofficial {@link sun.misc.Unsafe} API. + *
+ *

+ * Only the {@link #safeInstance() safe instance} is guaranteed to work on your + * JVM, as a consequence it is advised to use the {@link #fastestInstance()} or + * {@link #fastestJavaInstance()} to pull a {@link XXHashFactory} instance. + *

+ * All methods from this class are very costly, so you should get an instance + * once, and then reuse it whenever possible. This is typically done by storing + * a {@link XXHashFactory} instance in a static field. + */ +public final class XXHashFactory { + + private static XXHashFactory instance(String impl) { + try { + return new XXHashFactory(impl); + } catch (Exception e) { + throw new AssertionError(e); + } + } + + private static XXHashFactory NATIVE_INSTANCE, + JAVA_UNSAFE_INSTANCE, + JAVA_SAFE_INSTANCE; + + /** Return a {@link XXHashFactory} that returns {@link XXHash32} instances that + * are native bindings to the original C API. + *

+ * Please note that this instance has some traps you should be aware of:

    + *
  1. Upon loading this instance, files will be written to the temporary + * directory of the system. Although these files are supposed to be deleted + * when the JVM exits, they might remain on systems that don't support + * removal of files being used such as Windows. + *
  2. The instance can only be loaded once per JVM. This can be a problem + * if your application uses multiple class loaders (such as most servlet + * containers): this instance will only be available to the children of the + * class loader which has loaded it. As a consequence, it is advised to + * either not use this instance in webapps or to put this library in the lib + * directory of your servlet container so that it is loaded by the system + * class loader. + *
+ */ + public static synchronized XXHashFactory nativeInstance() { + if (NATIVE_INSTANCE == null) { + NATIVE_INSTANCE = instance("JNI"); + } + return NATIVE_INSTANCE; + } + + /** Return a {@link XXHashFactory} that returns {@link XXHash32} instances that + * are written with Java's official API. */ + public static synchronized XXHashFactory safeInstance() { + if (JAVA_SAFE_INSTANCE == null) { + JAVA_SAFE_INSTANCE = instance("JavaSafe"); + } + return JAVA_SAFE_INSTANCE; + } + + /** Return a {@link XXHashFactory} that returns {@link XXHash32} instances that + * may use {@link sun.misc.Unsafe} to speed up hashing. */ + public static synchronized XXHashFactory unsafeInstance() { + if (JAVA_UNSAFE_INSTANCE == null) { + JAVA_UNSAFE_INSTANCE = instance("JavaUnsafe"); + } + return JAVA_UNSAFE_INSTANCE; + } + + /** + * Return the fastest available {@link XXHashFactory} instance which does not + * rely on JNI bindings. It first tries to load the + * {@link #unsafeInstance() unsafe instance}, and then the + * {@link #safeInstance() safe Java instance} if the JVM doesn't have a + * working {@link sun.misc.Unsafe}. + */ + public static XXHashFactory fastestJavaInstance() { + try { + return unsafeInstance(); + } catch (Throwable t) { + return safeInstance(); + } + } + + /** + * Return the fastest available {@link XXHashFactory} instance. If the class + * loader is the system class loader and if the + * {@link #nativeInstance() native instance} loads successfully, then the + * {@link #nativeInstance() native instance} is returned, otherwise the + * {@link #fastestJavaInstance() fastest Java instance} is returned. + *

+ * Please read {@link #nativeInstance() javadocs of nativeInstance()} before + * using this method. + */ + public static XXHashFactory fastestInstance() { + if (Native.isLoaded() + || Native.class.getClassLoader() == ClassLoader.getSystemClassLoader()) { + try { + return nativeInstance(); + } catch (Throwable t) { + return fastestJavaInstance(); + } + } else { + return fastestJavaInstance(); + } + } + + @SuppressWarnings("unchecked") + private static T classInstance(String cls) throws NoSuchFieldException, SecurityException, ClassNotFoundException, IllegalArgumentException, IllegalAccessException { + final Class c = Class.forName(cls); + Field f = c.getField("INSTANCE"); + return (T) f.get(null); + } + + private final String impl; + private final XXHash32 hash32; + private final StreamingXXHash32.Factory streamingHash32Factory; + + private XXHashFactory(String impl) throws ClassNotFoundException, NoSuchFieldException, SecurityException, IllegalArgumentException, IllegalAccessException { + this.impl = impl; + hash32 = classInstance("net.jpountz.xxhash.XXHash32" + impl); + streamingHash32Factory = classInstance("net.jpountz.xxhash.StreamingXXHash32" + impl + "$Factory"); + + // make sure it can run + final byte[] bytes = new byte[100]; + final Random random = new Random(); + random.nextBytes(bytes); + final int seed = random.nextInt(); + + final int h1 = hash32.hash(bytes, 0, bytes.length, seed); + final StreamingXXHash32 streamingHash32 = newStreamingHash32(seed); + streamingHash32.update(bytes, 0, bytes.length); + final int h2 = streamingHash32.getValue(); + if (h1 != h2) { + throw new AssertionError(); + } + } + + /** Return a {@link XXHash32} instance. */ + public XXHash32 hash32() { + return hash32; + } + + /** + * Return a new {@link StreamingXXHash32} instance. + */ + public StreamingXXHash32 newStreamingHash32(int seed) { + return streamingHash32Factory.newStreamingHash(seed); + } + + /** Prints the fastest instance. */ + public static void main(String[] args) { + System.out.println("Fastest instance is " + fastestInstance()); + System.out.println("Fastest Java instance is " + fastestJavaInstance()); + } + + @Override + public String toString() { + return getClass().getSimpleName() + ":" + impl; + } + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashJNI.java b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashJNI.java new file mode 100644 index 000000000..74c40211e --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/XXHashJNI.java @@ -0,0 +1,35 @@ +package net.jpountz.xxhash; + +/* + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +import net.jpountz.util.Native; + +enum XXHashJNI { + ; + + static { + Native.load(); + init(); + } + + private static native void init(); + static native int XXH32(byte[] input, int offset, int len, int seed); + static native long XXH32_init(int seed); + static native void XXH32_update(long state, byte[] input, int offset, int len); + static native int XXH32_intermediateDigest(long state); + static native int XXH32_digest(long state); + static native void XXH32_free(long state); + +} diff --git a/Tools/Cache Editor/src/net/jpountz/xxhash/package.html b/Tools/Cache Editor/src/net/jpountz/xxhash/package.html new file mode 100644 index 000000000..f595d25a8 --- /dev/null +++ b/Tools/Cache Editor/src/net/jpountz/xxhash/package.html @@ -0,0 +1,65 @@ + + + + + + + +

xxhash hashing. This package supports both block hashing via +{@link net.jpountz.xxhash.XXHash32} and streaming hashing via +{@link net.jpountz.xxhash.StreamingXXHash32}. Have a look at +{@link net.jpountz.xxhash.XXHashFactory} to know how to get instances of these +interfaces.

+ +

Streaming hashing is a little slower but doesn't require to load the whole +stream into memory.

+ +

Sample block usage:

+ +
+    XXHashFactory factory = XXHashFactory.fastestInstance();
+
+    byte[] data = "12345345234572".getBytes("UTF-8");
+
+    XXHash32 hash32 = factory.hash32();
+    int seed = 0x9747b28c; // used to initialize the hash value, use whatever
+                           // value you want, but always the same
+    int hash = hash32.hash(data, 0, data.length, seed);
+
+ +

Sample streaming usage:

+ +
+    XXHashFactory factory = XXHashFactory.fastestInstance();
+
+    byte[] data = "12345345234572".getBytes("UTF-8");
+    ByteArrayInputStream in = new ByteArrayInputStream(data);
+
+    int seed = 0x9747b28c; // used to initialize the hash value, use whatever
+                           // value you want, but always the same
+    StreamingXXHash32 hash32 = factory.newStreamingHash32(seed);
+    byte[] buf = new byte[8]; // for real-world usage, use a larger buffer, like 8192 bytes
+    for (;;) {
+      int read = in.read(buf);
+      if (read == -1) {
+        break;
+      }
+      hash32.update(buf, 0, read);
+    }
+    int hash = hash32.getValue();
+
+ + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveEntry.java new file mode 100644 index 000000000..0060215dd --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveEntry.java @@ -0,0 +1,59 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers; + +import java.util.Date; + +/** + * Represents an entry of an archive. + */ +public interface ArchiveEntry { + + /** + * Gets the name of the entry in this archive. May refer to a file or directory or other item. + * + * @return The name of this entry in the archive. + */ + public String getName(); + + /** + * Gets the uncompressed size of this entry. May be -1 (SIZE_UNKNOWN) if the size is unknown + * + * @return the uncompressed size of this entry. + */ + public long getSize(); + + /** Special value indicating that the size is unknown */ + public static final long SIZE_UNKNOWN = -1; + + /** + * Returns true if this entry refers to a directory. + * + * @return true if this entry refers to a directory. + */ + public boolean isDirectory(); + + /** + * Gets the last modified date of this entry. + * + * @return the last modified date of this entry. + * @since 1.1 + */ + public Date getLastModifiedDate(); +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveException.java new file mode 100644 index 000000000..0c2fce3ea --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveException.java @@ -0,0 +1,52 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers; + +/** + * Archiver related Exception + */ +public class ArchiveException extends Exception { + + /** Serial */ + private static final long serialVersionUID = 2772690708123267100L; + + /** + * Constructs a new exception with the specified detail message. The cause + * is not initialized. + * + * @param message + * the detail message + */ + public ArchiveException(String message) { + super(message); + } + + /** + * Constructs a new exception with the specified detail message and cause. + * + * @param message + * the detail message + * @param cause + * the cause + */ + public ArchiveException(String message, Exception cause) { + super(message); + this.initCause(cause); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveInputStream.java new file mode 100644 index 000000000..4d54c2d5b --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveInputStream.java @@ -0,0 +1,155 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Archive input streams MUST override the + * {@link #read(byte[], int, int)} - or {@link #read()} - + * method so that reading from the stream generates EOF for the end of + * data in each entry as well as at the end of the file proper. + *

+ * The {@link #getNextEntry()} method is used to reset the input stream + * ready for reading the data from the next entry. + *

+ * The input stream classes must also implement a method with the signature: + *

+ * public static boolean matches(byte[] signature, int length)
+ * 
+ * which is used by the {@link ArchiveStreamFactory} to autodetect + * the archive type from the first few bytes of a stream. + */ +public abstract class ArchiveInputStream extends InputStream { + + private final byte[] SINGLE = new byte[1]; + private static final int BYTE_MASK = 0xFF; + + /** holds the number of bytes read in this stream */ + private long bytesRead = 0; + + /** + * Returns the next Archive Entry in this Stream. + * + * @return the next entry, + * or {@code null} if there are no more entries + * @throws IOException if the next entry could not be read + */ + public abstract ArchiveEntry getNextEntry() throws IOException; + + /* + * Note that subclasses also implement specific get() methods which + * return the appropriate class without need for a cast. + * See SVN revision r743259 + * @return + * @throws IOException + */ + // public abstract XXXArchiveEntry getNextXXXEntry() throws IOException; + + /** + * Reads a byte of data. This method will block until enough input is + * available. + * + * Simply calls the {@link #read(byte[], int, int)} method. + * + * MUST be overridden if the {@link #read(byte[], int, int)} method + * is not overridden; may be overridden otherwise. + * + * @return the byte read, or -1 if end of input is reached + * @throws IOException + * if an I/O error has occurred + */ + @Override + public int read() throws IOException { + int num = read(SINGLE, 0, 1); + return num == -1 ? -1 : SINGLE[0] & BYTE_MASK; + } + + /** + * Increments the counter of already read bytes. + * Doesn't increment if the EOF has been hit (read == -1) + * + * @param read the number of bytes read + */ + protected void count(int read) { + count((long) read); + } + + /** + * Increments the counter of already read bytes. + * Doesn't increment if the EOF has been hit (read == -1) + * + * @param read the number of bytes read + * @since 1.1 + */ + protected void count(long read) { + if (read != -1) { + bytesRead = bytesRead + read; + } + } + + /** + * Decrements the counter of already read bytes. + * + * @param pushedBack the number of bytes pushed back. + * @since 1.1 + */ + protected void pushedBackBytes(long pushedBack) { + bytesRead -= pushedBack; + } + + /** + * Returns the current number of bytes read from this stream. + * @return the number of read bytes + * @deprecated this method may yield wrong results for large + * archives, use #getBytesRead instead + */ + @Deprecated + public int getCount() { + return (int) bytesRead; + } + + /** + * Returns the current number of bytes read from this stream. + * @return the number of read bytes + * @since 1.1 + */ + public long getBytesRead() { + return bytesRead; + } + + /** + * Whether this stream is able to read the given entry. + * + *

+ * Some archive formats support variants or details that are not supported (yet). + *

+ * + * @param archiveEntry + * the entry to test + * @return This implementation always returns true. + * + * @since 1.1 + */ + public boolean canReadEntryData(ArchiveEntry archiveEntry) { + return true; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveOutputStream.java new file mode 100644 index 000000000..3a5084a08 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveOutputStream.java @@ -0,0 +1,169 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; + +/** + * Archive output stream implementations are expected to override the + * {@link #write(byte[], int, int)} method to improve performance. + * They should also override {@link #close()} to ensure that any necessary + * trailers are added. + * + *

The normal sequence of calls when working with ArchiveOutputStreams is:

+ *
    + *
  • Create ArchiveOutputStream object,
  • + *
  • optionally write SFX header (Zip only),
  • + *
  • repeat as needed: + *
      + *
    • {@link #putArchiveEntry(ArchiveEntry)} (writes entry header), + *
    • {@link #write(byte[])} (writes entry data, as often as needed), + *
    • {@link #closeArchiveEntry()} (closes entry), + *
    + *
  • + *
  • {@link #finish()} (ends the addition of entries),
  • + *
  • optionally write additional data, provided format supports it,
  • + *
  • {@link #close()}.
  • + *
+ */ +public abstract class ArchiveOutputStream extends OutputStream { + + /** Temporary buffer used for the {@link #write(int)} method */ + private final byte[] oneByte = new byte[1]; + static final int BYTE_MASK = 0xFF; + + /** holds the number of bytes written to this stream */ + private long bytesWritten = 0; + // Methods specific to ArchiveOutputStream + + /** + * Writes the headers for an archive entry to the output stream. + * The caller must then write the content to the stream and call + * {@link #closeArchiveEntry()} to complete the process. + * + * @param entry describes the entry + * @throws IOException + */ + public abstract void putArchiveEntry(ArchiveEntry entry) throws IOException; + + /** + * Closes the archive entry, writing any trailer information that may + * be required. + * @throws IOException + */ + public abstract void closeArchiveEntry() throws IOException; + + /** + * Finishes the addition of entries to this stream, without closing it. + * Additional data can be written, if the format supports it. + * + * @throws IOException if the user forgets to close the entry. + */ + public abstract void finish() throws IOException; + + /** + * Create an archive entry using the inputFile and entryName provided. + * + * @param inputFile + * @param entryName + * @return the ArchiveEntry set up with details from the file + * + * @throws IOException + */ + public abstract ArchiveEntry createArchiveEntry(File inputFile, String entryName) throws IOException; + + // Generic implementations of OutputStream methods that may be useful to sub-classes + + /** + * Writes a byte to the current archive entry. + * + *

This method simply calls {@code write( byte[], 0, 1 )}. + * + *

MUST be overridden if the {@link #write(byte[], int, int)} method + * is not overridden; may be overridden otherwise. + * + * @param b The byte to be written. + * @throws IOException on error + */ + @Override + public void write(int b) throws IOException { + oneByte[0] = (byte) (b & BYTE_MASK); + write(oneByte, 0, 1); + } + + /** + * Increments the counter of already written bytes. + * Doesn't increment if EOF has been hit ({@code written == -1}). + * + * @param written the number of bytes written + */ + protected void count(int written) { + count((long) written); + } + + /** + * Increments the counter of already written bytes. + * Doesn't increment if EOF has been hit ({@code written == -1}). + * + * @param written the number of bytes written + * @since 1.1 + */ + protected void count(long written) { + if (written != -1) { + bytesWritten = bytesWritten + written; + } + } + + /** + * Returns the current number of bytes written to this stream. + * @return the number of written bytes + * @deprecated this method may yield wrong results for large + * archives, use #getBytesWritten instead + */ + @Deprecated + public int getCount() { + return (int) bytesWritten; + } + + /** + * Returns the current number of bytes written to this stream. + * @return the number of written bytes + * @since 1.1 + */ + public long getBytesWritten() { + return bytesWritten; + } + + /** + * Whether this stream is able to write the given entry. + * + *

Some archive formats support variants or details that are + * not supported (yet).

+ * + * @param archiveEntry + * the entry to test + * @return This implementation always returns true. + * @since 1.1 + */ + public boolean canWriteEntryData(ArchiveEntry archiveEntry) { + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveStreamFactory.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveStreamFactory.java new file mode 100644 index 000000000..bf9bd3797 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ArchiveStreamFactory.java @@ -0,0 +1,342 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers; + +import java.io.ByteArrayInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import org.apache.commons.compress.archivers.ar.ArArchiveInputStream; +import org.apache.commons.compress.archivers.ar.ArArchiveOutputStream; +import org.apache.commons.compress.archivers.arj.ArjArchiveInputStream; +import org.apache.commons.compress.archivers.cpio.CpioArchiveInputStream; +import org.apache.commons.compress.archivers.cpio.CpioArchiveOutputStream; +import org.apache.commons.compress.archivers.dump.DumpArchiveInputStream; +import org.apache.commons.compress.archivers.jar.JarArchiveInputStream; +import org.apache.commons.compress.archivers.jar.JarArchiveOutputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveInputStream; +import org.apache.commons.compress.archivers.tar.TarArchiveOutputStream; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; +import org.apache.commons.compress.utils.IOUtils; + +/** + * Factory to create Archive[In|Out]putStreams from names or the first bytes of + * the InputStream. In order to add other implementations, you should extend + * ArchiveStreamFactory and override the appropriate methods (and call their + * implementation from super of course). + * + * Compressing a ZIP-File: + * + *
+ * final OutputStream out = new FileOutputStream(output); 
+ * ArchiveOutputStream os = new ArchiveStreamFactory().createArchiveOutputStream(ArchiveStreamFactory.ZIP, out);
+ * 
+ * os.putArchiveEntry(new ZipArchiveEntry("testdata/test1.xml"));
+ * IOUtils.copy(new FileInputStream(file1), os);
+ * os.closeArchiveEntry();
+ *
+ * os.putArchiveEntry(new ZipArchiveEntry("testdata/test2.xml"));
+ * IOUtils.copy(new FileInputStream(file2), os);
+ * os.closeArchiveEntry();
+ * os.close();
+ * 
+ * + * Decompressing a ZIP-File: + * + *
+ * final InputStream is = new FileInputStream(input); 
+ * ArchiveInputStream in = new ArchiveStreamFactory().createArchiveInputStream(ArchiveStreamFactory.ZIP, is);
+ * ZipArchiveEntry entry = (ZipArchiveEntry)in.getNextEntry();
+ * OutputStream out = new FileOutputStream(new File(dir, entry.getName()));
+ * IOUtils.copy(in, out);
+ * out.close();
+ * in.close();
+ * 
+ * + * @Immutable + */ +public class ArchiveStreamFactory { + + /** + * Constant used to identify the AR archive format. + * @since 1.1 + */ + public static final String AR = "ar"; + /** + * Constant used to identify the ARJ archive format. + * @since 1.6 + */ + public static final String ARJ = "arj"; + /** + * Constant used to identify the CPIO archive format. + * @since 1.1 + */ + public static final String CPIO = "cpio"; + /** + * Constant used to identify the Unix DUMP archive format. + * @since 1.3 + */ + public static final String DUMP = "dump"; + /** + * Constant used to identify the JAR archive format. + * @since 1.1 + */ + public static final String JAR = "jar"; + /** + * Constant used to identify the TAR archive format. + * @since 1.1 + */ + public static final String TAR = "tar"; + /** + * Constant used to identify the ZIP archive format. + * @since 1.1 + */ + public static final String ZIP = "zip"; + + /** + * Entry encoding, null for the default. + */ + private String entryEncoding = null; + + /** + * Returns the encoding to use for arj, zip, dump, cpio and tar + * files, or null for the default. + * + * @return entry encoding, or null + * @since 1.5 + */ + public String getEntryEncoding() { + return entryEncoding; + } + + /** + * Sets the encoding to use for arj, zip, dump, cpio and tar files. Use null for the default. + * + * @param entryEncoding the entry encoding, null uses the default. + * @since 1.5 + */ + public void setEntryEncoding(String entryEncoding) { + this.entryEncoding = entryEncoding; + } + + /** + * Create an archive input stream from an archiver name and an input stream. + * + * @param archiverName the archive name, i.e. "ar", "arj", "zip", "tar", "jar", "dump" or "cpio" + * @param in the input stream + * @return the archive input stream + * @throws ArchiveException if the archiver name is not known + * @throws IllegalArgumentException if the archiver name or stream is null + */ + public ArchiveInputStream createArchiveInputStream( + final String archiverName, final InputStream in) + throws ArchiveException { + + if (archiverName == null) { + throw new IllegalArgumentException("Archivername must not be null."); + } + + if (in == null) { + throw new IllegalArgumentException("InputStream must not be null."); + } + + if (AR.equalsIgnoreCase(archiverName)) { + return new ArArchiveInputStream(in); + } + if (ARJ.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new ArjArchiveInputStream(in, entryEncoding); + } else { + return new ArjArchiveInputStream(in); + } + } + if (ZIP.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new ZipArchiveInputStream(in, entryEncoding); + } else { + return new ZipArchiveInputStream(in); + } + } + if (TAR.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new TarArchiveInputStream(in, entryEncoding); + } else { + return new TarArchiveInputStream(in); + } + } + if (JAR.equalsIgnoreCase(archiverName)) { + return new JarArchiveInputStream(in); + } + if (CPIO.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new CpioArchiveInputStream(in, entryEncoding); + } else { + return new CpioArchiveInputStream(in); + } + } + if (DUMP.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new DumpArchiveInputStream(in, entryEncoding); + } else { + return new DumpArchiveInputStream(in); + } + } + + throw new ArchiveException("Archiver: " + archiverName + " not found."); + } + + /** + * Create an archive output stream from an archiver name and an input stream. + * + * @param archiverName the archive name, i.e. "ar", "zip", "tar", "jar" or "cpio" + * @param out the output stream + * @return the archive output stream + * @throws ArchiveException if the archiver name is not known + * @throws IllegalArgumentException if the archiver name or stream is null + */ + public ArchiveOutputStream createArchiveOutputStream( + final String archiverName, final OutputStream out) + throws ArchiveException { + if (archiverName == null) { + throw new IllegalArgumentException("Archivername must not be null."); + } + if (out == null) { + throw new IllegalArgumentException("OutputStream must not be null."); + } + + if (AR.equalsIgnoreCase(archiverName)) { + return new ArArchiveOutputStream(out); + } + if (ZIP.equalsIgnoreCase(archiverName)) { + ZipArchiveOutputStream zip = new ZipArchiveOutputStream(out); + if (entryEncoding != null) { + zip.setEncoding(entryEncoding); + } + return zip; + } + if (TAR.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new TarArchiveOutputStream(out, entryEncoding); + } else { + return new TarArchiveOutputStream(out); + } + } + if (JAR.equalsIgnoreCase(archiverName)) { + return new JarArchiveOutputStream(out); + } + if (CPIO.equalsIgnoreCase(archiverName)) { + if (entryEncoding != null) { + return new CpioArchiveOutputStream(out, entryEncoding); + } else { + return new CpioArchiveOutputStream(out); + } + } + throw new ArchiveException("Archiver: " + archiverName + " not found."); + } + + /** + * Create an archive input stream from an input stream, autodetecting + * the archive type from the first few bytes of the stream. The InputStream + * must support marks, like BufferedInputStream. + * + * @param in the input stream + * @return the archive input stream + * @throws ArchiveException if the archiver name is not known + * @throws IllegalArgumentException if the stream is null or does not support mark + */ + public ArchiveInputStream createArchiveInputStream(final InputStream in) + throws ArchiveException { + if (in == null) { + throw new IllegalArgumentException("Stream must not be null."); + } + + if (!in.markSupported()) { + throw new IllegalArgumentException("Mark is not supported."); + } + + final byte[] signature = new byte[12]; + in.mark(signature.length); + try { + int signatureLength = IOUtils.readFully(in, signature); + in.reset(); + if (ZipArchiveInputStream.matches(signature, signatureLength)) { + if (entryEncoding != null) { + return new ZipArchiveInputStream(in, entryEncoding); + } else { + return new ZipArchiveInputStream(in); + } + } else if (JarArchiveInputStream.matches(signature, signatureLength)) { + return new JarArchiveInputStream(in); + } else if (ArArchiveInputStream.matches(signature, signatureLength)) { + return new ArArchiveInputStream(in); + } else if (CpioArchiveInputStream.matches(signature, signatureLength)) { + return new CpioArchiveInputStream(in); + } else if (ArjArchiveInputStream.matches(signature, signatureLength)) { + return new ArjArchiveInputStream(in); + } + + // Dump needs a bigger buffer to check the signature; + final byte[] dumpsig = new byte[32]; + in.mark(dumpsig.length); + signatureLength = IOUtils.readFully(in, dumpsig); + in.reset(); + if (DumpArchiveInputStream.matches(dumpsig, signatureLength)) { + return new DumpArchiveInputStream(in); + } + + // Tar needs an even bigger buffer to check the signature; read the first block + final byte[] tarheader = new byte[512]; + in.mark(tarheader.length); + signatureLength = IOUtils.readFully(in, tarheader); + in.reset(); + if (TarArchiveInputStream.matches(tarheader, signatureLength)) { + if (entryEncoding != null) { + return new TarArchiveInputStream(in, entryEncoding); + } else { + return new TarArchiveInputStream(in); + } + } + // COMPRESS-117 - improve auto-recognition + if (signatureLength >= 512) { + TarArchiveInputStream tais = null; + try { + tais = new TarArchiveInputStream(new ByteArrayInputStream(tarheader)); + // COMPRESS-191 - verify the header checksum + if (tais.getNextTarEntry().isCheckSumOK()) { + return new TarArchiveInputStream(in); + } + } catch (Exception e) { // NOPMD + // can generate IllegalArgumentException as well + // as IOException + // autodetection, simply not a TAR + // ignored + } finally { + IOUtils.closeQuietly(tais); + } + } + } catch (IOException e) { + throw new ArchiveException("Could not use reset and mark operations.", e); + } + + throw new ArchiveException("No Archiver found for the stream signature"); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/Lister.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/Lister.java new file mode 100644 index 000000000..f2eaeb966 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/Lister.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers; + +import java.io.BufferedInputStream; +import java.io.File; +import java.io.FileInputStream; +import java.io.InputStream; + +/** + * Simple command line application that lists the contents of an archive. + * + *

The name of the archive must be given as a command line argument.

+ *

The optional second argument defines the archive type, in case the format is not recognised.

+ * + * @since 1.1 + */ +public final class Lister { + private static final ArchiveStreamFactory factory = new ArchiveStreamFactory(); + + public static void main(String[] args) throws Exception { + if (args.length == 0) { + usage(); + return; + } + System.out.println("Analysing "+args[0]); + File f = new File(args[0]); + if (!f.isFile()) { + System.err.println(f + " doesn't exist or is a directory"); + } + InputStream fis = new BufferedInputStream(new FileInputStream(f)); + ArchiveInputStream ais; + if (args.length > 1) { + ais = factory.createArchiveInputStream(args[1], fis); + } else { + ais = factory.createArchiveInputStream(fis); + } + System.out.println("Created "+ais.toString()); + ArchiveEntry ae; + while((ae=ais.getNextEntry()) != null){ + System.out.println(ae.getName()); + } + ais.close(); + fis.close(); + } + + private static void usage() { + System.out.println("Parameters: archive-name [archive-type]"); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java new file mode 100644 index 000000000..e32749cd6 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveEntry.java @@ -0,0 +1,183 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.ar; + +import java.io.File; +import java.util.Date; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * Represents an archive entry in the "ar" format. + * + * Each AR archive starts with "!<arch>" followed by a LF. After these 8 bytes + * the archive entries are listed. The format of an entry header is as it follows: + * + *
+ * START BYTE   END BYTE    NAME                    FORMAT      LENGTH
+ * 0            15          File name               ASCII       16
+ * 16           27          Modification timestamp  Decimal     12
+ * 28           33          Owner ID                Decimal     6
+ * 34           39          Group ID                Decimal     6
+ * 40           47          File mode               Octal       8
+ * 48           57          File size (bytes)       Decimal     10
+ * 58           59          File magic              \140\012    2
+ * 
+ * + * This specifies that an ar archive entry header contains 60 bytes. + * + * Due to the limitation of the file name length to 16 bytes GNU and + * BSD has their own variants of this format. Currently Commons + * Compress can read but not write the GNU variant. It fully supports + * the BSD variant. + * + * @see ar man page + * + * @Immutable + */ +public class ArArchiveEntry implements ArchiveEntry { + + /** The header for each entry */ + public static final String HEADER = "!\n"; + + /** The trailer for each entry */ + public static final String TRAILER = "`\012"; + + /** + * SVR4/GNU adds a trailing / to names; BSD does not. + * They also vary in how names longer than 16 characters are represented. + * (Not yet fully supported by this implementation) + */ + private final String name; + private final int userId; + private final int groupId; + private final int mode; + private static final int DEFAULT_MODE = 33188; // = (octal) 0100644 + private final long lastModified; + private final long length; + + /** + * Create a new instance using a couple of default values. + * + *

Sets userId and groupId to 0, the octal file mode to 644 and + * the last modified time to the current time.

+ * + * @param name name of the entry + * @param length length of the entry in bytes + */ + public ArArchiveEntry(String name, long length) { + this(name, length, 0, 0, DEFAULT_MODE, + System.currentTimeMillis() / 1000); + } + + /** + * Create a new instance. + * + * @param name name of the entry + * @param length length of the entry in bytes + * @param userId numeric user id + * @param groupId numeric group id + * @param mode file mode + * @param lastModified last modified time in seconds since the epoch + */ + public ArArchiveEntry(String name, long length, int userId, int groupId, + int mode, long lastModified) { + this.name = name; + this.length = length; + this.userId = userId; + this.groupId = groupId; + this.mode = mode; + this.lastModified = lastModified; + } + + /** + * Create a new instance using the attributes of the given file + */ + public ArArchiveEntry(File inputFile, String entryName) { + // TODO sort out mode + this(entryName, inputFile.isFile() ? inputFile.length() : 0, + 0, 0, DEFAULT_MODE, inputFile.lastModified() / 1000); + } + + public long getSize() { + return this.getLength(); + } + + public String getName() { + return name; + } + + public int getUserId() { + return userId; + } + + public int getGroupId() { + return groupId; + } + + public int getMode() { + return mode; + } + + /** + * Last modified time in seconds since the epoch. + */ + public long getLastModified() { + return lastModified; + } + + public Date getLastModifiedDate() { + return new Date(1000 * getLastModified()); + } + + public long getLength() { + return length; + } + + public boolean isDirectory() { + return false; + } + + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (name == null ? 0 : name.hashCode()); + return result; + } + + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ArArchiveEntry other = (ArArchiveEntry) obj; + if (name == null) { + if (other.name != null) { + return false; + } + } else if (!name.equals(other.name)) { + return false; + } + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java new file mode 100644 index 000000000..e65f27bea --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveInputStream.java @@ -0,0 +1,406 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.ar; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.utils.ArchiveUtils; +import org.apache.commons.compress.utils.IOUtils; + +/** + * Implements the "ar" archive format as an input stream. + * + * @NotThreadSafe + * + */ +public class ArArchiveInputStream extends ArchiveInputStream { + + private final InputStream input; + private long offset = 0; + private boolean closed; + + /* + * If getNextEnxtry has been called, the entry metadata is stored in + * currentEntry. + */ + private ArArchiveEntry currentEntry = null; + + // Storage area for extra long names (GNU ar) + private byte[] namebuffer = null; + + /* + * The offset where the current entry started. -1 if no entry has been + * called + */ + private long entryOffset = -1; + + // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) + private final byte[] NAME_BUF = new byte[16]; + private final byte[] LAST_MODIFIED_BUF = new byte[12]; + private final byte[] ID_BUF = new byte[6]; + private final byte[] FILE_MODE_BUF = new byte[8]; + private final byte[] LENGTH_BUF = new byte[10]; + + /** + * Constructs an Ar input stream with the referenced stream + * + * @param pInput + * the ar input stream + */ + public ArArchiveInputStream(final InputStream pInput) { + input = pInput; + closed = false; + } + + /** + * Returns the next AR entry in this stream. + * + * @return the next AR entry. + * @throws IOException + * if the entry could not be read + */ + public ArArchiveEntry getNextArEntry() throws IOException { + if (currentEntry != null) { + final long entryEnd = entryOffset + currentEntry.getLength(); + IOUtils.skip(this, entryEnd - offset); + currentEntry = null; + } + + if (offset == 0) { + final byte[] expected = ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER); + final byte[] realized = new byte[expected.length]; + final int read = IOUtils.readFully(this, realized); + if (read != expected.length) { + throw new IOException("failed to read header. Occured at byte: " + getBytesRead()); + } + for (int i = 0; i < expected.length; i++) { + if (expected[i] != realized[i]) { + throw new IOException("invalid header " + ArchiveUtils.toAsciiString(realized)); + } + } + } + + if (offset % 2 != 0 && read() < 0) { + // hit eof + return null; + } + + if (input.available() == 0) { + return null; + } + + IOUtils.readFully(this, NAME_BUF); + IOUtils.readFully(this, LAST_MODIFIED_BUF); + IOUtils.readFully(this, ID_BUF); + int userId = asInt(ID_BUF, true); + IOUtils.readFully(this, ID_BUF); + IOUtils.readFully(this, FILE_MODE_BUF); + IOUtils.readFully(this, LENGTH_BUF); + + { + final byte[] expected = ArchiveUtils.toAsciiBytes(ArArchiveEntry.TRAILER); + final byte[] realized = new byte[expected.length]; + final int read = IOUtils.readFully(this, realized); + if (read != expected.length) { + throw new IOException("failed to read entry trailer. Occured at byte: " + getBytesRead()); + } + for (int i = 0; i < expected.length; i++) { + if (expected[i] != realized[i]) { + throw new IOException("invalid entry trailer. not read the content? Occured at byte: " + getBytesRead()); + } + } + } + + entryOffset = offset; + +// GNU ar uses a '/' to mark the end of the filename; this allows for the use of spaces without the use of an extended filename. + + // entry name is stored as ASCII string + String temp = ArchiveUtils.toAsciiString(NAME_BUF).trim(); + if (isGNUStringTable(temp)) { // GNU extended filenames entry + currentEntry = readGNUStringTable(LENGTH_BUF); + return getNextArEntry(); + } + + long len = asLong(LENGTH_BUF); + if (temp.endsWith("/")) { // GNU terminator + temp = temp.substring(0, temp.length() - 1); + } else if (isGNULongName(temp)) { + int off = Integer.parseInt(temp.substring(1));// get the offset + temp = getExtendedName(off); // convert to the long name + } else if (isBSDLongName(temp)) { + temp = getBSDLongName(temp); + // entry length contained the length of the file name in + // addition to the real length of the entry. + // assume file name was ASCII, there is no "standard" otherwise + int nameLen = temp.length(); + len -= nameLen; + entryOffset += nameLen; + } + + currentEntry = new ArArchiveEntry(temp, len, userId, + asInt(ID_BUF, true), + asInt(FILE_MODE_BUF, 8), + asLong(LAST_MODIFIED_BUF)); + return currentEntry; + } + + /** + * Get an extended name from the GNU extended name buffer. + * + * @param offset pointer to entry within the buffer + * @return the extended file name; without trailing "/" if present. + * @throws IOException if name not found or buffer not set up + */ + private String getExtendedName(int offset) throws IOException{ + if (namebuffer == null) { + throw new IOException("Cannot process GNU long filename as no // record was found"); + } + for(int i=offset; i < namebuffer.length; i++){ + if (namebuffer[i]=='\012'){ + if (namebuffer[i-1]=='/') { + i--; // drop trailing / + } + return ArchiveUtils.toAsciiString(namebuffer, offset, i-offset); + } + } + throw new IOException("Failed to read entry: "+offset); + } + private long asLong(byte[] input) { + return Long.parseLong(ArchiveUtils.toAsciiString(input).trim()); + } + + private int asInt(byte[] input) { + return asInt(input, 10, false); + } + + private int asInt(byte[] input, boolean treatBlankAsZero) { + return asInt(input, 10, treatBlankAsZero); + } + + private int asInt(byte[] input, int base) { + return asInt(input, base, false); + } + + private int asInt(byte[] input, int base, boolean treatBlankAsZero) { + String string = ArchiveUtils.toAsciiString(input).trim(); + if (string.length() == 0 && treatBlankAsZero) { + return 0; + } + return Integer.parseInt(string, base); + } + + /* + * (non-Javadoc) + * + * @see + * org.apache.commons.compress.archivers.ArchiveInputStream#getNextEntry() + */ + @Override + public ArchiveEntry getNextEntry() throws IOException { + return getNextArEntry(); + } + + /* + * (non-Javadoc) + * + * @see java.io.InputStream#close() + */ + @Override + public void close() throws IOException { + if (!closed) { + closed = true; + input.close(); + } + currentEntry = null; + } + + /* + * (non-Javadoc) + * + * @see java.io.InputStream#read(byte[], int, int) + */ + @Override + public int read(byte[] b, final int off, final int len) throws IOException { + int toRead = len; + if (currentEntry != null) { + final long entryEnd = entryOffset + currentEntry.getLength(); + if (len > 0 && entryEnd > offset) { + toRead = (int) Math.min(len, entryEnd - offset); + } else { + return -1; + } + } + final int ret = this.input.read(b, off, toRead); + count(ret); + offset += ret > 0 ? ret : 0; + return ret; + } + + /** + * Checks if the signature matches ASCII "!<arch>" followed by a single LF + * control character + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is an Ar archive stream, false otherwise + */ + public static boolean matches(byte[] signature, int length) { + // 3c21 7261 6863 0a3e + + if (length < 8) { + return false; + } + if (signature[0] != 0x21) { + return false; + } + if (signature[1] != 0x3c) { + return false; + } + if (signature[2] != 0x61) { + return false; + } + if (signature[3] != 0x72) { + return false; + } + if (signature[4] != 0x63) { + return false; + } + if (signature[5] != 0x68) { + return false; + } + if (signature[6] != 0x3e) { + return false; + } + if (signature[7] != 0x0a) { + return false; + } + + return true; + } + + static final String BSD_LONGNAME_PREFIX = "#1/"; + private static final int BSD_LONGNAME_PREFIX_LEN = + BSD_LONGNAME_PREFIX.length(); + private static final String BSD_LONGNAME_PATTERN = + "^" + BSD_LONGNAME_PREFIX + "\\d+"; + + /** + * Does the name look like it is a long name (or a name containing + * spaces) as encoded by BSD ar? + * + *

From the FreeBSD ar(5) man page:

+ *
+     * BSD   In the BSD variant, names that are shorter than 16
+     *       characters and without embedded spaces are stored
+     *       directly in this field.  If a name has an embedded
+     *       space, or if it is longer than 16 characters, then
+     *       the string "#1/" followed by the decimal represen-
+     *       tation of the length of the file name is placed in
+     *       this field. The actual file name is stored immedi-
+     *       ately after the archive header.  The content of the
+     *       archive member follows the file name.  The ar_size
+     *       field of the header (see below) will then hold the
+     *       sum of the size of the file name and the size of
+     *       the member.
+     * 
+ * + * @since 1.3 + */ + private static boolean isBSDLongName(String name) { + return name != null && name.matches(BSD_LONGNAME_PATTERN); + } + + /** + * Reads the real name from the current stream assuming the very + * first bytes to be read are the real file name. + * + * @see #isBSDLongName + * + * @since 1.3 + */ + private String getBSDLongName(String bsdLongName) throws IOException { + int nameLen = + Integer.parseInt(bsdLongName.substring(BSD_LONGNAME_PREFIX_LEN)); + byte[] name = new byte[nameLen]; + int read = IOUtils.readFully(input, name); + count(read); + if (read != nameLen) { + throw new EOFException(); + } + return ArchiveUtils.toAsciiString(name); + } + + private static final String GNU_STRING_TABLE_NAME = "//"; + + /** + * Is this the name of the "Archive String Table" as used by + * SVR4/GNU to store long file names? + * + *

GNU ar stores multiple extended filenames in the data section + * of a file with the name "//", this record is referred to by + * future headers.

+ * + *

A header references an extended filename by storing a "/" + * followed by a decimal offset to the start of the filename in + * the extended filename data section.

+ * + *

The format of the "//" file itself is simply a list of the + * long filenames, each separated by one or more LF + * characters. Note that the decimal offsets are number of + * characters, not line or string number within the "//" file.

+ */ + private static boolean isGNUStringTable(String name) { + return GNU_STRING_TABLE_NAME.equals(name); + } + + /** + * Reads the GNU archive String Table. + * + * @see #isGNUStringTable + */ + private ArArchiveEntry readGNUStringTable(byte[] length) throws IOException { + int bufflen = asInt(length); // Assume length will fit in an int + namebuffer = new byte[bufflen]; + int read = read(namebuffer, 0, bufflen); + if (read != bufflen){ + throw new IOException("Failed to read complete // record: expected=" + + bufflen + " read=" + read); + } + return new ArArchiveEntry(GNU_STRING_TABLE_NAME, bufflen); + } + + private static final String GNU_LONGNAME_PATTERN = "^/\\d+"; + + /** + * Does the name look like it is a long name (or a name containing + * spaces) as encoded by SVR4/GNU ar? + * + * @see #isGNUStringTable + */ + private boolean isGNULongName(String name) { + return name != null && name.matches(GNU_LONGNAME_PATTERN); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java new file mode 100644 index 000000000..17fe61f77 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/ArArchiveOutputStream.java @@ -0,0 +1,234 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.ar; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveOutputStream; +import org.apache.commons.compress.utils.ArchiveUtils; + +/** + * Implements the "ar" archive format as an output stream. + * + * @NotThreadSafe + */ +public class ArArchiveOutputStream extends ArchiveOutputStream { + /** Fail if a long file name is required in the archive. */ + public static final int LONGFILE_ERROR = 0; + + /** BSD ar extensions are used to store long file names in the archive. */ + public static final int LONGFILE_BSD = 1; + + private final OutputStream out; + private long entryOffset = 0; + private ArArchiveEntry prevEntry; + private boolean haveUnclosedEntry = false; + private int longFileMode = LONGFILE_ERROR; + + /** indicates if this archive is finished */ + private boolean finished = false; + + public ArArchiveOutputStream( final OutputStream pOut ) { + this.out = pOut; + } + + /** + * Set the long file mode. + * This can be LONGFILE_ERROR(0) or LONGFILE_BSD(1). + * This specifies the treatment of long file names (names >= 16). + * Default is LONGFILE_ERROR. + * @param longFileMode the mode to use + * @since 1.3 + */ + public void setLongFileMode(int longFileMode) { + this.longFileMode = longFileMode; + } + + private long writeArchiveHeader() throws IOException { + byte [] header = ArchiveUtils.toAsciiBytes(ArArchiveEntry.HEADER); + out.write(header); + return header.length; + } + + @Override + public void closeArchiveEntry() throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + if (prevEntry == null || !haveUnclosedEntry){ + throw new IOException("No current entry to close"); + } + if (entryOffset % 2 != 0) { + out.write('\n'); // Pad byte + } + haveUnclosedEntry = false; + } + + @Override + public void putArchiveEntry( final ArchiveEntry pEntry ) throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + + ArArchiveEntry pArEntry = (ArArchiveEntry)pEntry; + if (prevEntry == null) { + writeArchiveHeader(); + } else { + if (prevEntry.getLength() != entryOffset) { + throw new IOException("length does not match entry (" + prevEntry.getLength() + " != " + entryOffset); + } + + if (haveUnclosedEntry) { + closeArchiveEntry(); + } + } + + prevEntry = pArEntry; + + writeEntryHeader(pArEntry); + + entryOffset = 0; + haveUnclosedEntry = true; + } + + private long fill( final long pOffset, final long pNewOffset, final char pFill ) throws IOException { + final long diff = pNewOffset - pOffset; + + if (diff > 0) { + for (int i = 0; i < diff; i++) { + write(pFill); + } + } + + return pNewOffset; + } + + private long write( final String data ) throws IOException { + final byte[] bytes = data.getBytes("ascii"); + write(bytes); + return bytes.length; + } + + private long writeEntryHeader( final ArArchiveEntry pEntry ) throws IOException { + + long offset = 0; + boolean mustAppendName = false; + + final String n = pEntry.getName(); + if (LONGFILE_ERROR == longFileMode && n.length() > 16) { + throw new IOException("filename too long, > 16 chars: "+n); + } + if (LONGFILE_BSD == longFileMode && + (n.length() > 16 || n.indexOf(" ") > -1)) { + mustAppendName = true; + offset += write(ArArchiveInputStream.BSD_LONGNAME_PREFIX + + String.valueOf(n.length())); + } else { + offset += write(n); + } + + offset = fill(offset, 16, ' '); + final String m = "" + pEntry.getLastModified(); + if (m.length() > 12) { + throw new IOException("modified too long"); + } + offset += write(m); + + offset = fill(offset, 28, ' '); + final String u = "" + pEntry.getUserId(); + if (u.length() > 6) { + throw new IOException("userid too long"); + } + offset += write(u); + + offset = fill(offset, 34, ' '); + final String g = "" + pEntry.getGroupId(); + if (g.length() > 6) { + throw new IOException("groupid too long"); + } + offset += write(g); + + offset = fill(offset, 40, ' '); + final String fm = "" + Integer.toString(pEntry.getMode(), 8); + if (fm.length() > 8) { + throw new IOException("filemode too long"); + } + offset += write(fm); + + offset = fill(offset, 48, ' '); + final String s = + String.valueOf(pEntry.getLength() + + (mustAppendName ? n.length() : 0)); + if (s.length() > 10) { + throw new IOException("size too long"); + } + offset += write(s); + + offset = fill(offset, 58, ' '); + + offset += write(ArArchiveEntry.TRAILER); + + if (mustAppendName) { + offset += write(n); + } + + return offset; + } + + @Override + public void write(byte[] b, int off, int len) throws IOException { + out.write(b, off, len); + count(len); + entryOffset += len; + } + + /** + * Calls finish if necessary, and then closes the OutputStream + */ + @Override + public void close() throws IOException { + if(!finished) { + finish(); + } + out.close(); + prevEntry = null; + } + + @Override + public ArchiveEntry createArchiveEntry(File inputFile, String entryName) + throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + return new ArArchiveEntry(inputFile, entryName); + } + + @Override + public void finish() throws IOException { + if(haveUnclosedEntry) { + throw new IOException("This archive contains unclosed entries."); + } else if(finished) { + throw new IOException("This archive has already been finished"); + } + finished = true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/package.html new file mode 100644 index 000000000..9c80f96e1 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/ar/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading and writing archives using + the AR format.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java new file mode 100644 index 000000000..80b078aa5 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/ArjArchiveEntry.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.arj; + +import java.io.File; +import java.util.Date; +import java.util.regex.Matcher; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipUtil; + +/** + * An entry in an ARJ archive. + * + * @NotThreadSafe + * @since 1.6 + */ +public class ArjArchiveEntry implements ArchiveEntry { + private final LocalFileHeader localFileHeader; + + public ArjArchiveEntry() { + localFileHeader = new LocalFileHeader(); + } + + ArjArchiveEntry(final LocalFileHeader localFileHeader) { + this.localFileHeader = localFileHeader; + } + + /** + * Get this entry's name. + * + * @return This entry's name. + */ + public String getName() { + if ((localFileHeader.arjFlags & LocalFileHeader.Flags.PATHSYM) != 0) { + return localFileHeader.name.replaceAll("/", + Matcher.quoteReplacement(File.separator)); + } else { + return localFileHeader.name; + } + } + + /** + * Get this entry's file size. + * + * @return This entry's file size. + */ + public long getSize() { + return localFileHeader.originalSize; + } + + /** True if the entry refers to a directory */ + public boolean isDirectory() { + return localFileHeader.fileType == LocalFileHeader.FileTypes.DIRECTORY; + } + + /** + * The last modified date of the entry. + * + *

Note the interpretation of time is different depending on + * the HostOS that has created the archive. While an OS that is + * {@link #isHostOsUnix considered to be Unix} stores time in a + * timezone independent manner, other platforms only use the local + * time. I.e. if an archive has been created at midnight UTC on a + * machine in timezone UTC this method will return midnight + * regardless of timezone if the archive has been created on a + * non-Unix system and a time taking the current timezone into + * account if the archive has beeen created on Unix.

+ */ + public Date getLastModifiedDate() { + long ts = isHostOsUnix() ? localFileHeader.dateTimeModified * 1000l + : ZipUtil.dosToJavaTime(0xFFFFFFFFL & localFileHeader.dateTimeModified); + return new Date(ts); + } + + /** + * File mode of this entry. + * + *

The format depends on the host os that created the entry.

+ */ + public int getMode() { + return localFileHeader.fileAccessMode; + } + + /** + * File mode of this entry as Unix stat value. + * + *

Will only be non-zero of the host os was UNIX. + */ + public int getUnixMode() { + return isHostOsUnix() ? getMode() : 0; + } + + /** + * The operating system the archive has been created on. + * @see HostOs + */ + public int getHostOs() { + return localFileHeader.hostOS; + } + + /** + * Is the operating system the archive has been created on one + * that is considered a UNIX OS by arj? + */ + public boolean isHostOsUnix() { + return getHostOs() == HostOs.UNIX || getHostOs() == HostOs.NEXT; + } + + /** + * The known values for HostOs. + */ + public static class HostOs { + public static final int DOS = 0; + public static final int PRIMOS = 1; + public static final int UNIX = 2; + public static final int AMIGA = 3; + public static final int MAC_OS = 4; + public static final int OS_2 = 5; + public static final int APPLE_GS = 6; + public static final int ATARI_ST = 7; + public static final int NEXT = 8; + public static final int VAX_VMS = 9; + public static final int WIN95 = 10; + public static final int WIN32 = 11; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java new file mode 100644 index 000000000..f99aa0da4 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/ArjArchiveInputStream.java @@ -0,0 +1,349 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.arj; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.ArrayList; +import java.util.zip.CRC32; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.utils.BoundedInputStream; +import org.apache.commons.compress.utils.CRC32VerifyingInputStream; +import org.apache.commons.compress.utils.IOUtils; + +/** + * Implements the "arj" archive format as an InputStream. + *

+ * Reference + * @NotThreadSafe + * @since 1.6 + */ +public class ArjArchiveInputStream extends ArchiveInputStream { + private static final int ARJ_MAGIC_1 = 0x60; + private static final int ARJ_MAGIC_2 = 0xEA; + private final DataInputStream in; + private final String charsetName; + private final MainHeader mainHeader; + private LocalFileHeader currentLocalFileHeader = null; + private InputStream currentInputStream = null; + + /** + * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in. + * @param inputStream the underlying stream, whose ownership is taken + * @param charsetName the charset used for file names and comments + * in the archive + * @throws ArchiveException + */ + public ArjArchiveInputStream(final InputStream inputStream, + final String charsetName) throws ArchiveException { + in = new DataInputStream(inputStream); + this.charsetName = charsetName; + try { + mainHeader = readMainHeader(); + if ((mainHeader.arjFlags & MainHeader.Flags.GARBLED) != 0) { + throw new ArchiveException("Encrypted ARJ files are unsupported"); + } + if ((mainHeader.arjFlags & MainHeader.Flags.VOLUME) != 0) { + throw new ArchiveException("Multi-volume ARJ files are unsupported"); + } + } catch (IOException ioException) { + throw new ArchiveException(ioException.getMessage(), ioException); + } + } + + /** + * Constructs the ArjInputStream, taking ownership of the inputStream that is passed in, + * and using the CP437 character encoding. + * @param inputStream the underlying stream, whose ownership is taken + * @throws ArchiveException + */ + public ArjArchiveInputStream(final InputStream inputStream) + throws ArchiveException { + this(inputStream, "CP437"); + } + + @Override + public void close() throws IOException { + in.close(); + } + + private int read8(final DataInputStream dataIn) throws IOException { + int value = dataIn.readUnsignedByte(); + count(1); + return value; + } + + private int read16(final DataInputStream dataIn) throws IOException { + final int value = dataIn.readUnsignedShort(); + count(2); + return Integer.reverseBytes(value) >>> 16; + } + + private int read32(final DataInputStream dataIn) throws IOException { + final int value = dataIn.readInt(); + count(4); + return Integer.reverseBytes(value); + } + + private String readString(final DataInputStream dataIn) throws IOException { + final ByteArrayOutputStream buffer = new ByteArrayOutputStream(); + int nextByte; + while ((nextByte = dataIn.readUnsignedByte()) != 0) { + buffer.write(nextByte); + } + if (charsetName != null) { + return new String(buffer.toByteArray(), charsetName); + } else { + // intentionally using the default encoding as that's the contract for a null charsetName + return new String(buffer.toByteArray()); + } + } + + private void readFully(final DataInputStream dataIn, byte[] b) + throws IOException { + dataIn.readFully(b); + count(b.length); + } + + private byte[] readHeader() throws IOException { + boolean found = false; + byte[] basicHeaderBytes = null; + do { + int first = 0; + int second = read8(in); + do { + first = second; + second = read8(in); + } while (first != ARJ_MAGIC_1 && second != ARJ_MAGIC_2); + final int basicHeaderSize = read16(in); + if (basicHeaderSize == 0) { + // end of archive + return null; + } + if (basicHeaderSize <= 2600) { + basicHeaderBytes = new byte[basicHeaderSize]; + readFully(in, basicHeaderBytes); + final long basicHeaderCrc32 = read32(in) & 0xFFFFFFFFL; + final CRC32 crc32 = new CRC32(); + crc32.update(basicHeaderBytes); + if (basicHeaderCrc32 == crc32.getValue()) { + found = true; + } + } + } while (!found); + return basicHeaderBytes; + } + + private MainHeader readMainHeader() throws IOException { + final byte[] basicHeaderBytes = readHeader(); + if (basicHeaderBytes == null) { + throw new IOException("Archive ends without any headers"); + } + final DataInputStream basicHeader = new DataInputStream( + new ByteArrayInputStream(basicHeaderBytes)); + + final int firstHeaderSize = basicHeader.readUnsignedByte(); + final byte[] firstHeaderBytes = new byte[firstHeaderSize - 1]; + basicHeader.readFully(firstHeaderBytes); + final DataInputStream firstHeader = new DataInputStream( + new ByteArrayInputStream(firstHeaderBytes)); + + final MainHeader hdr = new MainHeader(); + hdr.archiverVersionNumber = firstHeader.readUnsignedByte(); + hdr.minVersionToExtract = firstHeader.readUnsignedByte(); + hdr.hostOS = firstHeader.readUnsignedByte(); + hdr.arjFlags = firstHeader.readUnsignedByte(); + hdr.securityVersion = firstHeader.readUnsignedByte(); + hdr.fileType = firstHeader.readUnsignedByte(); + hdr.reserved = firstHeader.readUnsignedByte(); + hdr.dateTimeCreated = read32(firstHeader); + hdr.dateTimeModified = read32(firstHeader); + hdr.archiveSize = 0xffffFFFFL & read32(firstHeader); + hdr.securityEnvelopeFilePosition = read32(firstHeader); + hdr.fileSpecPosition = read16(firstHeader); + hdr.securityEnvelopeLength = read16(firstHeader); + pushedBackBytes(20); // count has already counted them via readFully + hdr.encryptionVersion = firstHeader.readUnsignedByte(); + hdr.lastChapter = firstHeader.readUnsignedByte(); + + if (firstHeaderSize >= 33) { + hdr.arjProtectionFactor = firstHeader.readUnsignedByte(); + hdr.arjFlags2 = firstHeader.readUnsignedByte(); + firstHeader.readUnsignedByte(); + firstHeader.readUnsignedByte(); + } + + hdr.name = readString(basicHeader); + hdr.comment = readString(basicHeader); + + final int extendedHeaderSize = read16(in); + if (extendedHeaderSize > 0) { + hdr.extendedHeaderBytes = new byte[extendedHeaderSize]; + readFully(in, hdr.extendedHeaderBytes); + final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in); + final CRC32 crc32 = new CRC32(); + crc32.update(hdr.extendedHeaderBytes); + if (extendedHeaderCrc32 != crc32.getValue()) { + throw new IOException("Extended header CRC32 verification failure"); + } + } + + return hdr; + } + + private LocalFileHeader readLocalFileHeader() throws IOException { + final byte[] basicHeaderBytes = readHeader(); + if (basicHeaderBytes == null) { + return null; + } + final DataInputStream basicHeader = new DataInputStream( + new ByteArrayInputStream(basicHeaderBytes)); + + final int firstHeaderSize = basicHeader.readUnsignedByte(); + final byte[] firstHeaderBytes = new byte[firstHeaderSize - 1]; + basicHeader.readFully(firstHeaderBytes); + final DataInputStream firstHeader = new DataInputStream( + new ByteArrayInputStream(firstHeaderBytes)); + + final LocalFileHeader localFileHeader = new LocalFileHeader(); + localFileHeader.archiverVersionNumber = firstHeader.readUnsignedByte(); + localFileHeader.minVersionToExtract = firstHeader.readUnsignedByte(); + localFileHeader.hostOS = firstHeader.readUnsignedByte(); + localFileHeader.arjFlags = firstHeader.readUnsignedByte(); + localFileHeader.method = firstHeader.readUnsignedByte(); + localFileHeader.fileType = firstHeader.readUnsignedByte(); + localFileHeader.reserved = firstHeader.readUnsignedByte(); + localFileHeader.dateTimeModified = read32(firstHeader); + localFileHeader.compressedSize = 0xffffFFFFL & read32(firstHeader); + localFileHeader.originalSize = 0xffffFFFFL & read32(firstHeader); + localFileHeader.originalCrc32 = 0xffffFFFFL & read32(firstHeader); + localFileHeader.fileSpecPosition = read16(firstHeader); + localFileHeader.fileAccessMode = read16(firstHeader); + pushedBackBytes(20); + localFileHeader.firstChapter = firstHeader.readUnsignedByte(); + localFileHeader.lastChapter = firstHeader.readUnsignedByte(); + + readExtraData(firstHeaderSize, firstHeader, localFileHeader); + + localFileHeader.name = readString(basicHeader); + localFileHeader.comment = readString(basicHeader); + + ArrayList extendedHeaders = new ArrayList(); + int extendedHeaderSize; + while ((extendedHeaderSize = read16(in)) > 0) { + final byte[] extendedHeaderBytes = new byte[extendedHeaderSize]; + readFully(in, extendedHeaderBytes); + final long extendedHeaderCrc32 = 0xffffFFFFL & read32(in); + final CRC32 crc32 = new CRC32(); + crc32.update(extendedHeaderBytes); + if (extendedHeaderCrc32 != crc32.getValue()) { + throw new IOException("Extended header CRC32 verification failure"); + } + extendedHeaders.add(extendedHeaderBytes); + } + localFileHeader.extendedHeaders = extendedHeaders.toArray(new byte[extendedHeaders.size()][]); + + return localFileHeader; + } + + private void readExtraData(int firstHeaderSize, DataInputStream firstHeader, + LocalFileHeader localFileHeader) throws IOException { + if (firstHeaderSize >= 33) { + localFileHeader.extendedFilePosition = read32(firstHeader); + if (firstHeaderSize >= 45) { + localFileHeader.dateTimeAccessed = read32(firstHeader); + localFileHeader.dateTimeCreated = read32(firstHeader); + localFileHeader.originalSizeEvenForVolumes = read32(firstHeader); + pushedBackBytes(12); + } + pushedBackBytes(4); + } + } + + /** + * Checks if the signature matches what is expected for an arj file. + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is an arj archive stream, false otherwise + */ + public static boolean matches(final byte[] signature, final int length) { + return length >= 2 && + (0xff & signature[0]) == ARJ_MAGIC_1 && + (0xff & signature[1]) == ARJ_MAGIC_2; + } + + /** + * Gets the archive's recorded name. + */ + public String getArchiveName() { + return mainHeader.name; + } + + /** + * Gets the archive's comment. + */ + public String getArchiveComment() { + return mainHeader.comment; + } + + @Override + public ArjArchiveEntry getNextEntry() throws IOException { + if (currentInputStream != null) { + // return value ignored as IOUtils.skip ensures the stream is drained completely + IOUtils.skip(currentInputStream, Long.MAX_VALUE); + currentInputStream.close(); + currentLocalFileHeader = null; + currentInputStream = null; + } + + currentLocalFileHeader = readLocalFileHeader(); + if (currentLocalFileHeader != null) { + currentInputStream = new BoundedInputStream(in, currentLocalFileHeader.compressedSize); + if (currentLocalFileHeader.method == LocalFileHeader.Methods.STORED) { + currentInputStream = new CRC32VerifyingInputStream(currentInputStream, + currentLocalFileHeader.originalSize, currentLocalFileHeader.originalCrc32); + } + return new ArjArchiveEntry(currentLocalFileHeader); + } else { + currentInputStream = null; + return null; + } + } + + @Override + public boolean canReadEntryData(ArchiveEntry ae) { + return currentLocalFileHeader.method == LocalFileHeader.Methods.STORED; + } + + @Override + public int read(final byte[] b, final int off, final int len) throws IOException { + if (currentLocalFileHeader.method != LocalFileHeader.Methods.STORED) { + throw new IOException("Unsupported compression method " + currentLocalFileHeader.method); + } + return currentInputStream.read(b, off, len); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/LocalFileHeader.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/LocalFileHeader.java new file mode 100644 index 000000000..d48dc1748 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/LocalFileHeader.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.arj; + +import java.util.Arrays; + +class LocalFileHeader { + int archiverVersionNumber; + int minVersionToExtract; + int hostOS; + int arjFlags; + int method; + int fileType; + int reserved; + int dateTimeModified; + long compressedSize; + long originalSize; + long originalCrc32; + int fileSpecPosition; + int fileAccessMode; + int firstChapter; + int lastChapter; + + int extendedFilePosition; + int dateTimeAccessed; + int dateTimeCreated; + int originalSizeEvenForVolumes; + + String name; + String comment; + + byte[][] extendedHeaders = null; + + static class Flags { + static final int GARBLED = 0x01; + static final int VOLUME = 0x04; + static final int EXTFILE = 0x08; + static final int PATHSYM = 0x10; + static final int BACKUP = 0x20; + } + + static class FileTypes { + static final int BINARY = 0; + static final int SEVEN_BIT_TEXT = 1; + static final int DIRECTORY = 3; + static final int VOLUME_LABEL = 4; + static final int CHAPTER_LABEL = 5; + } + + static class Methods { + static final int STORED = 0; + static final int COMPRESSED_MOST = 1; + static final int COMPRESSED_FASTEST = 4; + static final int NO_DATA_NO_CRC = 8; + static final int NO_DATA = 9; + } + + @Override + public String toString() { + StringBuilder builder = new StringBuilder(); + builder.append("LocalFileHeader [archiverVersionNumber="); + builder.append(archiverVersionNumber); + builder.append(", minVersionToExtract="); + builder.append(minVersionToExtract); + builder.append(", hostOS="); + builder.append(hostOS); + builder.append(", arjFlags="); + builder.append(arjFlags); + builder.append(", method="); + builder.append(method); + builder.append(", fileType="); + builder.append(fileType); + builder.append(", reserved="); + builder.append(reserved); + builder.append(", dateTimeModified="); + builder.append(dateTimeModified); + builder.append(", compressedSize="); + builder.append(compressedSize); + builder.append(", originalSize="); + builder.append(originalSize); + builder.append(", originalCrc32="); + builder.append(originalCrc32); + builder.append(", fileSpecPosition="); + builder.append(fileSpecPosition); + builder.append(", fileAccessMode="); + builder.append(fileAccessMode); + builder.append(", firstChapter="); + builder.append(firstChapter); + builder.append(", lastChapter="); + builder.append(lastChapter); + builder.append(", extendedFilePosition="); + builder.append(extendedFilePosition); + builder.append(", dateTimeAccessed="); + builder.append(dateTimeAccessed); + builder.append(", dateTimeCreated="); + builder.append(dateTimeCreated); + builder.append(", originalSizeEvenForVolumes="); + builder.append(originalSizeEvenForVolumes); + builder.append(", name="); + builder.append(name); + builder.append(", comment="); + builder.append(comment); + builder.append(", extendedHeaders="); + builder.append(Arrays.toString(extendedHeaders)); + builder.append("]"); + return builder.toString(); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/MainHeader.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/MainHeader.java new file mode 100644 index 000000000..a41aa72c2 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/MainHeader.java @@ -0,0 +1,102 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.arj; + +import java.util.Arrays; + +class MainHeader { + int archiverVersionNumber; + int minVersionToExtract; + int hostOS; + int arjFlags; + int securityVersion; + int fileType; + int reserved; + int dateTimeCreated; + int dateTimeModified; + long archiveSize; + int securityEnvelopeFilePosition; + int fileSpecPosition; + int securityEnvelopeLength; + int encryptionVersion; + int lastChapter; + int arjProtectionFactor; + int arjFlags2; + String name; + String comment; + byte[] extendedHeaderBytes = null; + + static class Flags { + static final int GARBLED = 0x01; + static final int OLD_SECURED_NEW_ANSI_PAGE = 0x02; + static final int VOLUME = 0x04; + static final int ARJPROT = 0x08; + static final int PATHSYM = 0x10; + static final int BACKUP = 0x20; + static final int SECURED = 0x40; + static final int ALTNAME = 0x80; + } + + + @Override + public String toString() { + final StringBuilder builder = new StringBuilder(); + builder.append("MainHeader [archiverVersionNumber="); + builder.append(archiverVersionNumber); + builder.append(", minVersionToExtract="); + builder.append(minVersionToExtract); + builder.append(", hostOS="); + builder.append(hostOS); + builder.append(", arjFlags="); + builder.append(arjFlags); + builder.append(", securityVersion="); + builder.append(securityVersion); + builder.append(", fileType="); + builder.append(fileType); + builder.append(", reserved="); + builder.append(reserved); + builder.append(", dateTimeCreated="); + builder.append(dateTimeCreated); + builder.append(", dateTimeModified="); + builder.append(dateTimeModified); + builder.append(", archiveSize="); + builder.append(archiveSize); + builder.append(", securityEnvelopeFilePosition="); + builder.append(securityEnvelopeFilePosition); + builder.append(", fileSpecPosition="); + builder.append(fileSpecPosition); + builder.append(", securityEnvelopeLength="); + builder.append(securityEnvelopeLength); + builder.append(", encryptionVersion="); + builder.append(encryptionVersion); + builder.append(", lastChapter="); + builder.append(lastChapter); + builder.append(", arjProtectionFactor="); + builder.append(arjProtectionFactor); + builder.append(", arjFlags2="); + builder.append(arjFlags2); + builder.append(", name="); + builder.append(name); + builder.append(", comment="); + builder.append(comment); + builder.append(", extendedHeaderBytes="); + builder.append(Arrays.toString(extendedHeaderBytes)); + builder.append("]"); + return builder.toString(); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/package.html new file mode 100644 index 000000000..de18f61d8 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/arj/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading archives using + the ARJ format.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java new file mode 100644 index 000000000..641fae447 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveEntry.java @@ -0,0 +1,892 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.cpio; + +import java.io.File; +import java.util.Date; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * A cpio archive consists of a sequence of files. There are several types of + * headers defided in two categories of new and old format. The headers are + * recognized by magic numbers: + * + *
    + *
  • "070701" ASCII for new portable format
  • + *
  • "070702" ASCII for new portable format with CRC
  • + *
  • "070707" ASCII for old ascii (also known as Portable ASCII, odc or old + * character format
  • + *
  • 070707 binary for old binary
  • + *
+ * + *

The old binary format is limited to 16 bits for user id, group + * id, device, and inode numbers. It is limited to 4 gigabyte file + * sizes. + * + * The old ASCII format is limited to 18 bits for the user id, group + * id, device, and inode numbers. It is limited to 8 gigabyte file + * sizes. + * + * The new ASCII format is limited to 4 gigabyte file sizes. + * + * CPIO 2.5 knows also about tar, but it is not recognized here.

+ * + * + *

OLD FORMAT

+ * + *

Each file has a 76 (ascii) / 26 (binary) byte header, a variable + * length, NUL terminated filename, and variable length file data. A + * header for a filename "TRAILER!!!" indicates the end of the + * archive.

+ * + *

All the fields in the header are ISO 646 (approximately ASCII) + * strings of octal numbers, left padded, not NUL terminated.

+ * + *
+ * FIELDNAME        NOTES 
+ * c_magic          The integer value octal 070707.  This value can be used to deter-
+ *                  mine whether this archive is written with little-endian or big-
+ *                  endian integers.
+ * c_dev            Device that contains a directory entry for this file 
+ * c_ino            I-node number that identifies the input file to the file system 
+ * c_mode           The mode specifies both the regular permissions and the file type.
+ * c_uid            Numeric User ID of the owner of the input file 
+ * c_gid            Numeric Group ID of the owner of the input file 
+ * c_nlink          Number of links that are connected to the input file 
+ * c_rdev           For block special and character special entries, this field 
+ *                  contains the associated device number.  For all other entry types,
+ *                  it should be set to zero by writers and ignored by readers.
+ * c_mtime[2]       Modification time of the file, indicated as the number of seconds
+ *                  since the start of the epoch, 00:00:00 UTC January 1, 1970.  The
+ *                  four-byte integer is stored with the most-significant 16 bits
+ *                  first followed by the least-significant 16 bits.  Each of the two
+ *                  16 bit values are stored in machine-native byte order.
+ * c_namesize       Length of the path name, including the terminating null byte 
+ * c_filesize[2]    Length of the file in bytes. This is the length of the data 
+ *                  section that follows the header structure. Must be 0 for 
+ *                  FIFOs and directories
+ *
+ * All fields are unsigned short fields with 16-bit integer values
+ * apart from c_mtime and c_filesize which are 32-bit integer values
+ * 
+ * + *

If necessary, the filename and file data are padded with a NUL byte to an even length

+ * + *

Special files, directories, and the trailer are recorded with + * the h_filesize field equal to 0.

+ * + *

In the ASCII version of this format, the 16-bit entries are represented as 6-byte octal numbers, + * and the 32-bit entries are represented as 11-byte octal numbers. No padding is added.

+ * + *

NEW FORMAT

+ * + *

Each file has a 110 byte header, a variable length, NUL + * terminated filename, and variable length file data. A header for a + * filename "TRAILER!!!" indicates the end of the archive. All the + * fields in the header are ISO 646 (approximately ASCII) strings of + * hexadecimal numbers, left padded, not NUL terminated.

+ * + *
+ * FIELDNAME        NOTES 
+ * c_magic[6]       The string 070701 for new ASCII, the string 070702 for new ASCII with CRC
+ * c_ino[8]
+ * c_mode[8]
+ * c_uid[8]
+ * c_gid[8]
+ * c_nlink[8]
+ * c_mtim[8]
+ * c_filesize[8]    must be 0 for FIFOs and directories 
+ * c_maj[8]
+ * c_min[8] 
+ * c_rmaj[8]        only valid for chr and blk special files 
+ * c_rmin[8]        only valid for chr and blk special files 
+ * c_namesize[8]    count includes terminating NUL in pathname 
+ * c_check[8]       0 for "new" portable format; for CRC format
+ *                  the sum of all the bytes in the file
+ * 
+ * + *

New ASCII Format The "new" ASCII format uses 8-byte hexadecimal + * fields for all numbers and separates device numbers into separate + * fields for major and minor numbers.

+ * + *

The pathname is followed by NUL bytes so that the total size of + * the fixed header plus pathname is a multiple of four. Likewise, the + * file data is padded to a multiple of four bytes.

+ * + *

This class uses mutable fields and is not considered to be + * threadsafe.

+ * + *

Based on code from the jRPM project (http://jrpm.sourceforge.net).

+ * + *

The MAGIC numbers and other constants are defined in {@link CpioConstants}

+ * + *

+ * N.B. does not handle the cpio "tar" format + *

+ * @NotThreadSafe + * @see http://people.freebsd.org/~kientzle/libarchive/man/cpio.5.txt + */ +public class CpioArchiveEntry implements CpioConstants, ArchiveEntry { + + // Header description fields - should be same throughout an archive + + /** + * See constructor documenation for possible values. + */ + private final short fileFormat; + + /** The number of bytes in each header record; depends on the file format */ + private final int headerSize; + + /** The boundary to which the header and data elements are aligned: 0, 2 or 4 bytes */ + private final int alignmentBoundary; + + // Header fields + + private long chksum = 0; + + /** Number of bytes in the file */ + private long filesize = 0; + + private long gid = 0; + + private long inode = 0; + + private long maj = 0; + + private long min = 0; + + private long mode = 0; + + private long mtime = 0; + + private String name; + + private long nlink = 0; + + private long rmaj = 0; + + private long rmin = 0; + + private long uid = 0; + + /** + * Creates a CPIOArchiveEntry with a specified format. + * + * @param format + * The cpio format for this entry. + *

+ * Possible format values are: + *

+     * CpioConstants.FORMAT_NEW
+     * CpioConstants.FORMAT_NEW_CRC
+     * CpioConstants.FORMAT_OLD_BINARY
+     * CpioConstants.FORMAT_OLD_ASCII
+     * 
+ */ + public CpioArchiveEntry(final short format) { + switch (format) { + case FORMAT_NEW: + this.headerSize = 110; + this.alignmentBoundary = 4; + break; + case FORMAT_NEW_CRC: + this.headerSize = 110; + this.alignmentBoundary = 4; + break; + case FORMAT_OLD_ASCII: + this.headerSize = 76; + this.alignmentBoundary = 0; + break; + case FORMAT_OLD_BINARY: + this.headerSize = 26; + this.alignmentBoundary = 2; + break; + default: + throw new IllegalArgumentException("Unknown header type"); + } + this.fileFormat = format; + } + + /** + * Creates a CPIOArchiveEntry with a specified name. The format of + * this entry will be the new format. + * + * @param name + * The name of this entry. + */ + public CpioArchiveEntry(final String name) { + this(FORMAT_NEW, name); + } + + /** + * Creates a CPIOArchiveEntry with a specified name. + * + * @param format + * The cpio format for this entry. + * @param name + * The name of this entry. + *

+ * Possible format values are: + *

+     * CpioConstants.FORMAT_NEW
+     * CpioConstants.FORMAT_NEW_CRC
+     * CpioConstants.FORMAT_OLD_BINARY
+     * CpioConstants.FORMAT_OLD_ASCII
+     * 
+ * + * @since 1.1 + */ + public CpioArchiveEntry(final short format, final String name) { + this(format); + this.name = name; + } + + /** + * Creates a CPIOArchiveEntry with a specified name. The format of + * this entry will be the new format. + * + * @param name + * The name of this entry. + * @param size + * The size of this entry + */ + public CpioArchiveEntry(final String name, final long size) { + this(name); + this.setSize(size); + } + + /** + * Creates a CPIOArchiveEntry with a specified name. + * + * @param format + * The cpio format for this entry. + * @param name + * The name of this entry. + * @param size + * The size of this entry + *

+ * Possible format values are: + *

+     * CpioConstants.FORMAT_NEW
+     * CpioConstants.FORMAT_NEW_CRC
+     * CpioConstants.FORMAT_OLD_BINARY
+     * CpioConstants.FORMAT_OLD_ASCII
+     * 
+ * + * @since 1.1 + */ + public CpioArchiveEntry(final short format, final String name, + final long size) { + this(format, name); + this.setSize(size); + } + + /** + * Creates a CPIOArchiveEntry with a specified name for a + * specified file. The format of this entry will be the new + * format. + * + * @param inputFile + * The file to gather information from. + * @param entryName + * The name of this entry. + */ + public CpioArchiveEntry(File inputFile, String entryName) { + this(FORMAT_NEW, inputFile, entryName); + } + + /** + * Creates a CPIOArchiveEntry with a specified name for a + * specified file. + * + * @param format + * The cpio format for this entry. + * @param inputFile + * The file to gather information from. + * @param entryName + * The name of this entry. + *

+ * Possible format values are: + *

+     * CpioConstants.FORMAT_NEW
+     * CpioConstants.FORMAT_NEW_CRC
+     * CpioConstants.FORMAT_OLD_BINARY
+     * CpioConstants.FORMAT_OLD_ASCII
+     * 
+ * + * @since 1.1 + */ + public CpioArchiveEntry(final short format, File inputFile, + String entryName) { + this(format, entryName, inputFile.isFile() ? inputFile.length() : 0); + long mode=0; + if (inputFile.isDirectory()){ + mode |= C_ISDIR; + } else if (inputFile.isFile()){ + mode |= C_ISREG; + } else { + throw new IllegalArgumentException("Cannot determine type of file " + + inputFile.getName()); + } + // TODO set other fields as needed + setMode(mode); + setTime(inputFile.lastModified() / 1000); + } + + /** + * Check if the method is allowed for the defined format. + */ + private void checkNewFormat() { + if ((this.fileFormat & FORMAT_NEW_MASK) == 0) { + throw new UnsupportedOperationException(); + } + } + + /** + * Check if the method is allowed for the defined format. + */ + private void checkOldFormat() { + if ((this.fileFormat & FORMAT_OLD_MASK) == 0) { + throw new UnsupportedOperationException(); + } + } + + /** + * Get the checksum. + * Only supported for the new formats. + * + * @return Returns the checksum. + * @throws UnsupportedOperationException if the format is not a new format + */ + public long getChksum() { + checkNewFormat(); + return this.chksum; + } + + /** + * Get the device id. + * + * @return Returns the device id. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with a new + * format. + */ + public long getDevice() { + checkOldFormat(); + return this.min; + } + + /** + * Get the major device id. + * + * @return Returns the major device id. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with an old + * format. + */ + public long getDeviceMaj() { + checkNewFormat(); + return this.maj; + } + + /** + * Get the minor device id + * + * @return Returns the minor device id. + * @throws UnsupportedOperationException if format is not a new format + */ + public long getDeviceMin() { + checkNewFormat(); + return this.min; + } + + /** + * Get the filesize. + * + * @return Returns the filesize. + * @see org.apache.commons.compress.archivers.ArchiveEntry#getSize() + */ + public long getSize() { + return this.filesize; + } + + /** + * Get the format for this entry. + * + * @return Returns the format. + */ + public short getFormat() { + return this.fileFormat; + } + + /** + * Get the group id. + * + * @return Returns the group id. + */ + public long getGID() { + return this.gid; + } + + /** + * Get the header size for this CPIO format + * + * @return Returns the header size in bytes. + */ + public int getHeaderSize() { + return this.headerSize; + } + + /** + * Get the alignment boundary for this CPIO format + * + * @return Returns the aligment boundary (0, 2, 4) in bytes + */ + public int getAlignmentBoundary() { + return this.alignmentBoundary; + } + + /** + * Get the number of bytes needed to pad the header to the alignment boundary. + * + * @return the number of bytes needed to pad the header (0,1,2,3) + */ + public int getHeaderPadCount(){ + if (this.alignmentBoundary == 0) { return 0; } + int size = this.headerSize+this.name.length()+1; // Name has terminating null + int remain = size % this.alignmentBoundary; + if (remain > 0){ + return this.alignmentBoundary - remain; + } + return 0; + } + + /** + * Get the number of bytes needed to pad the data to the alignment boundary. + * + * @return the number of bytes needed to pad the data (0,1,2,3) + */ + public int getDataPadCount(){ + if (this.alignmentBoundary == 0) { return 0; } + long size = this.filesize; + int remain = (int) (size % this.alignmentBoundary); + if (remain > 0){ + return this.alignmentBoundary - remain; + } + return 0; + } + + /** + * Set the inode. + * + * @return Returns the inode. + */ + public long getInode() { + return this.inode; + } + + /** + * Get the mode of this entry (e.g. directory, regular file). + * + * @return Returns the mode. + */ + public long getMode() { + return mode == 0 && !CPIO_TRAILER.equals(name) ? C_ISREG : mode; + } + + /** + * Get the name. + * + * @return Returns the name. + */ + public String getName() { + return this.name; + } + + /** + * Get the number of links. + * + * @return Returns the number of links. + */ + public long getNumberOfLinks() { + return nlink == 0 ? + isDirectory() ? 2 : 1 + : nlink; + } + + /** + * Get the remote device id. + * + * @return Returns the remote device id. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with a new + * format. + */ + public long getRemoteDevice() { + checkOldFormat(); + return this.rmin; + } + + /** + * Get the remote major device id. + * + * @return Returns the remote major device id. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with an old + * format. + */ + public long getRemoteDeviceMaj() { + checkNewFormat(); + return this.rmaj; + } + + /** + * Get the remote minor device id. + * + * @return Returns the remote minor device id. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with an old + * format. + */ + public long getRemoteDeviceMin() { + checkNewFormat(); + return this.rmin; + } + + /** + * Get the time in seconds. + * + * @return Returns the time. + */ + public long getTime() { + return this.mtime; + } + + public Date getLastModifiedDate() { + return new Date(1000 * getTime()); + } + + /** + * Get the user id. + * + * @return Returns the user id. + */ + public long getUID() { + return this.uid; + } + + /** + * Check if this entry represents a block device. + * + * @return TRUE if this entry is a block device. + */ + public boolean isBlockDevice() { + return CpioUtil.fileType(mode) == C_ISBLK; + } + + /** + * Check if this entry represents a character device. + * + * @return TRUE if this entry is a character device. + */ + public boolean isCharacterDevice() { + return CpioUtil.fileType(mode) == C_ISCHR; + } + + /** + * Check if this entry represents a directory. + * + * @return TRUE if this entry is a directory. + */ + public boolean isDirectory() { + return CpioUtil.fileType(mode) == C_ISDIR; + } + + /** + * Check if this entry represents a network device. + * + * @return TRUE if this entry is a network device. + */ + public boolean isNetwork() { + return CpioUtil.fileType(mode) == C_ISNWK; + } + + /** + * Check if this entry represents a pipe. + * + * @return TRUE if this entry is a pipe. + */ + public boolean isPipe() { + return CpioUtil.fileType(mode) == C_ISFIFO; + } + + /** + * Check if this entry represents a regular file. + * + * @return TRUE if this entry is a regular file. + */ + public boolean isRegularFile() { + return CpioUtil.fileType(mode) == C_ISREG; + } + + /** + * Check if this entry represents a socket. + * + * @return TRUE if this entry is a socket. + */ + public boolean isSocket() { + return CpioUtil.fileType(mode) == C_ISSOCK; + } + + /** + * Check if this entry represents a symbolic link. + * + * @return TRUE if this entry is a symbolic link. + */ + public boolean isSymbolicLink() { + return CpioUtil.fileType(mode) == C_ISLNK; + } + + /** + * Set the checksum. The checksum is calculated by adding all bytes of a + * file to transfer (crc += buf[pos] & 0xFF). + * + * @param chksum + * The checksum to set. + */ + public void setChksum(final long chksum) { + checkNewFormat(); + this.chksum = chksum; + } + + /** + * Set the device id. + * + * @param device + * The device id to set. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with a new + * format. + */ + public void setDevice(final long device) { + checkOldFormat(); + this.min = device; + } + + /** + * Set major device id. + * + * @param maj + * The major device id to set. + */ + public void setDeviceMaj(final long maj) { + checkNewFormat(); + this.maj = maj; + } + + /** + * Set the minor device id + * + * @param min + * The minor device id to set. + */ + public void setDeviceMin(final long min) { + checkNewFormat(); + this.min = min; + } + + /** + * Set the filesize. + * + * @param size + * The filesize to set. + */ + public void setSize(final long size) { + if (size < 0 || size > 0xFFFFFFFFL) { + throw new IllegalArgumentException("invalid entry size <" + size + + ">"); + } + this.filesize = size; + } + + /** + * Set the group id. + * + * @param gid + * The group id to set. + */ + public void setGID(final long gid) { + this.gid = gid; + } + + /** + * Set the inode. + * + * @param inode + * The inode to set. + */ + public void setInode(final long inode) { + this.inode = inode; + } + + /** + * Set the mode of this entry (e.g. directory, regular file). + * + * @param mode + * The mode to set. + */ + public void setMode(final long mode) { + final long maskedMode = mode & S_IFMT; + switch ((int) maskedMode) { + case C_ISDIR: + case C_ISLNK: + case C_ISREG: + case C_ISFIFO: + case C_ISCHR: + case C_ISBLK: + case C_ISSOCK: + case C_ISNWK: + break; + default: + throw new IllegalArgumentException( + "Unknown mode. " + + "Full: " + Long.toHexString(mode) + + " Masked: " + Long.toHexString(maskedMode)); + } + + this.mode = mode; + } + + /** + * Set the name. + * + * @param name + * The name to set. + */ + public void setName(final String name) { + this.name = name; + } + + /** + * Set the number of links. + * + * @param nlink + * The number of links to set. + */ + public void setNumberOfLinks(final long nlink) { + this.nlink = nlink; + } + + /** + * Set the remote device id. + * + * @param device + * The remote device id to set. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with a new + * format. + */ + public void setRemoteDevice(final long device) { + checkOldFormat(); + this.rmin = device; + } + + /** + * Set the remote major device id. + * + * @param rmaj + * The remote major device id to set. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with an old + * format. + */ + public void setRemoteDeviceMaj(final long rmaj) { + checkNewFormat(); + this.rmaj = rmaj; + } + + /** + * Set the remote minor device id. + * + * @param rmin + * The remote minor device id to set. + * @throws UnsupportedOperationException + * if this method is called for a CPIOArchiveEntry with an old + * format. + */ + public void setRemoteDeviceMin(final long rmin) { + checkNewFormat(); + this.rmin = rmin; + } + + /** + * Set the time in seconds. + * + * @param time + * The time to set. + */ + public void setTime(final long time) { + this.mtime = time; + } + + /** + * Set the user id. + * + * @param uid + * The user id to set. + */ + public void setUID(final long uid) { + this.uid = uid; + } + + /* (non-Javadoc) + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + final int prime = 31; + int result = 1; + result = prime * result + (name == null ? 0 : name.hashCode()); + return result; + } + + /* (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + CpioArchiveEntry other = (CpioArchiveEntry) obj; + if (name == null) { + if (other.name != null) { + return false; + } + } else if (!name.equals(other.name)) { + return false; + } + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java new file mode 100644 index 000000000..0d7e4ba1f --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveInputStream.java @@ -0,0 +1,562 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.cpio; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; +import org.apache.commons.compress.utils.ArchiveUtils; +import org.apache.commons.compress.utils.CharsetNames; +import org.apache.commons.compress.utils.IOUtils; + +/** + * CPIOArchiveInputStream is a stream for reading cpio streams. All formats of + * cpio are supported (old ascii, old binary, new portable format and the new + * portable format with crc). + * + *

+ * The stream can be read by extracting a cpio entry (containing all + * informations about a entry) and afterwards reading from the stream the file + * specified by the entry. + *

+ *
+ * CPIOArchiveInputStream cpioIn = new CPIOArchiveInputStream(
+ *         new FileInputStream(new File("test.cpio")));
+ * CPIOArchiveEntry cpioEntry;
+ *
+ * while ((cpioEntry = cpioIn.getNextEntry()) != null) {
+ *     System.out.println(cpioEntry.getName());
+ *     int tmp;
+ *     StringBuilder buf = new StringBuilder();
+ *     while ((tmp = cpIn.read()) != -1) {
+ *         buf.append((char) tmp);
+ *     }
+ *     System.out.println(buf.toString());
+ * }
+ * cpioIn.close();
+ * 
+ *

+ * Note: This implementation should be compatible to cpio 2.5 + * + *

This class uses mutable fields and is not considered to be threadsafe. + * + *

Based on code from the jRPM project (jrpm.sourceforge.net) + */ + +public class CpioArchiveInputStream extends ArchiveInputStream implements + CpioConstants { + + private boolean closed = false; + + private CpioArchiveEntry entry; + + private long entryBytesRead = 0; + + private boolean entryEOF = false; + + private final byte tmpbuf[] = new byte[4096]; + + private long crc = 0; + + private final InputStream in; + + // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) + private final byte[] TWO_BYTES_BUF = new byte[2]; + private final byte[] FOUR_BYTES_BUF = new byte[4]; + private final byte[] SIX_BYTES_BUF = new byte[6]; + + private final int blockSize; + + /** + * The encoding to use for filenames and labels. + */ + private final ZipEncoding encoding; + + /** + * Construct the cpio input stream with a blocksize of {@link + * CpioConstants#BLOCK_SIZE BLOCK_SIZE} and expecting ASCII file + * names. + * + * @param in + * The cpio stream + */ + public CpioArchiveInputStream(final InputStream in) { + this(in, BLOCK_SIZE, CharsetNames.US_ASCII); + } + + /** + * Construct the cpio input stream with a blocksize of {@link + * CpioConstants#BLOCK_SIZE BLOCK_SIZE}. + * + * @param in + * The cpio stream + * @param encoding + * The encoding of file names to expect - use null for + * the platform's default. + * @since 1.6 + */ + public CpioArchiveInputStream(final InputStream in, String encoding) { + this(in, BLOCK_SIZE, encoding); + } + + /** + * Construct the cpio input stream with a blocksize of {@link + * CpioConstants#BLOCK_SIZE BLOCK_SIZE} expecting ASCII file + * names. + * + * @param in + * The cpio stream + * @param blockSize + * The block size of the archive. + * @since 1.5 + */ + public CpioArchiveInputStream(final InputStream in, int blockSize) { + this(in, blockSize, CharsetNames.US_ASCII); + } + + /** + * Construct the cpio input stream with a blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE}. + * + * @param in + * The cpio stream + * @param blockSize + * The block size of the archive. + * @param encoding + * The encoding of file names to expect - use null for + * the platform's default. + * @since 1.6 + */ + public CpioArchiveInputStream(final InputStream in, int blockSize, String encoding) { + this.in = in; + this.blockSize = blockSize; + this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + } + + /** + * Returns 0 after EOF has reached for the current entry data, otherwise + * always return 1. + *

+ * Programs should not count on this method to return the actual number of + * bytes that could be read without blocking. + * + * @return 1 before EOF and 0 after EOF has reached for current entry. + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + */ + @Override + public int available() throws IOException { + ensureOpen(); + if (this.entryEOF) { + return 0; + } + return 1; + } + + /** + * Closes the CPIO input stream. + * + * @throws IOException + * if an I/O error has occurred + */ + @Override + public void close() throws IOException { + if (!this.closed) { + in.close(); + this.closed = true; + } + } + + /** + * Closes the current CPIO entry and positions the stream for reading the + * next entry. + * + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + */ + private void closeEntry() throws IOException { + // the skip implementation of this class will not skip more + // than Integer.MAX_VALUE bytes + while (skip((long) Integer.MAX_VALUE) == Integer.MAX_VALUE) { // NOPMD + // do nothing + } + } + + /** + * Check to make sure that this stream has not been closed + * + * @throws IOException + * if the stream is already closed + */ + private void ensureOpen() throws IOException { + if (this.closed) { + throw new IOException("Stream closed"); + } + } + + /** + * Reads the next CPIO file entry and positions stream at the beginning of + * the entry data. + * + * @return the CPIOArchiveEntry just read + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + */ + public CpioArchiveEntry getNextCPIOEntry() throws IOException { + ensureOpen(); + if (this.entry != null) { + closeEntry(); + } + readFully(TWO_BYTES_BUF, 0, TWO_BYTES_BUF.length); + if (CpioUtil.byteArray2long(TWO_BYTES_BUF, false) == MAGIC_OLD_BINARY) { + this.entry = readOldBinaryEntry(false); + } else if (CpioUtil.byteArray2long(TWO_BYTES_BUF, true) + == MAGIC_OLD_BINARY) { + this.entry = readOldBinaryEntry(true); + } else { + System.arraycopy(TWO_BYTES_BUF, 0, SIX_BYTES_BUF, 0, + TWO_BYTES_BUF.length); + readFully(SIX_BYTES_BUF, TWO_BYTES_BUF.length, + FOUR_BYTES_BUF.length); + String magicString = ArchiveUtils.toAsciiString(SIX_BYTES_BUF); + if (magicString.equals(MAGIC_NEW)) { + this.entry = readNewEntry(false); + } else if (magicString.equals(MAGIC_NEW_CRC)) { + this.entry = readNewEntry(true); + } else if (magicString.equals(MAGIC_OLD_ASCII)) { + this.entry = readOldAsciiEntry(); + } else { + throw new IOException("Unknown magic [" + magicString + "]. Occured at byte: " + getBytesRead()); + } + } + + this.entryBytesRead = 0; + this.entryEOF = false; + this.crc = 0; + + if (this.entry.getName().equals(CPIO_TRAILER)) { + this.entryEOF = true; + skipRemainderOfLastBlock(); + return null; + } + return this.entry; + } + + private void skip(int bytes) throws IOException{ + // bytes cannot be more than 3 bytes + if (bytes > 0) { + readFully(FOUR_BYTES_BUF, 0, bytes); + } + } + + /** + * Reads from the current CPIO entry into an array of bytes. Blocks until + * some input is available. + * + * @param b + * the buffer into which the data is read + * @param off + * the start offset of the data + * @param len + * the maximum number of bytes read + * @return the actual number of bytes read, or -1 if the end of the entry is + * reached + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + */ + @Override + public int read(final byte[] b, final int off, final int len) + throws IOException { + ensureOpen(); + if (off < 0 || len < 0 || off > b.length - len) { + throw new IndexOutOfBoundsException(); + } else if (len == 0) { + return 0; + } + + if (this.entry == null || this.entryEOF) { + return -1; + } + if (this.entryBytesRead == this.entry.getSize()) { + skip(entry.getDataPadCount()); + this.entryEOF = true; + if (this.entry.getFormat() == FORMAT_NEW_CRC + && this.crc != this.entry.getChksum()) { + throw new IOException("CRC Error. Occured at byte: " + + getBytesRead()); + } + return -1; // EOF for this entry + } + int tmplength = (int) Math.min(len, this.entry.getSize() + - this.entryBytesRead); + if (tmplength < 0) { + return -1; + } + + int tmpread = readFully(b, off, tmplength); + if (this.entry.getFormat() == FORMAT_NEW_CRC) { + for (int pos = 0; pos < tmpread; pos++) { + this.crc += b[pos] & 0xFF; + } + } + this.entryBytesRead += tmpread; + + return tmpread; + } + + private final int readFully(final byte[] b, final int off, final int len) + throws IOException { + int count = IOUtils.readFully(in, b, off, len); + count(count); + if (count < len) { + throw new EOFException(); + } + return count; + } + + private long readBinaryLong(final int length, final boolean swapHalfWord) + throws IOException { + byte tmp[] = new byte[length]; + readFully(tmp, 0, tmp.length); + return CpioUtil.byteArray2long(tmp, swapHalfWord); + } + + private long readAsciiLong(final int length, final int radix) + throws IOException { + byte tmpBuffer[] = new byte[length]; + readFully(tmpBuffer, 0, tmpBuffer.length); + return Long.parseLong(ArchiveUtils.toAsciiString(tmpBuffer), radix); + } + + private CpioArchiveEntry readNewEntry(final boolean hasCrc) + throws IOException { + CpioArchiveEntry ret; + if (hasCrc) { + ret = new CpioArchiveEntry(FORMAT_NEW_CRC); + } else { + ret = new CpioArchiveEntry(FORMAT_NEW); + } + + ret.setInode(readAsciiLong(8, 16)); + long mode = readAsciiLong(8, 16); + if (CpioUtil.fileType(mode) != 0){ // mode is initialised to 0 + ret.setMode(mode); + } + ret.setUID(readAsciiLong(8, 16)); + ret.setGID(readAsciiLong(8, 16)); + ret.setNumberOfLinks(readAsciiLong(8, 16)); + ret.setTime(readAsciiLong(8, 16)); + ret.setSize(readAsciiLong(8, 16)); + ret.setDeviceMaj(readAsciiLong(8, 16)); + ret.setDeviceMin(readAsciiLong(8, 16)); + ret.setRemoteDeviceMaj(readAsciiLong(8, 16)); + ret.setRemoteDeviceMin(readAsciiLong(8, 16)); + long namesize = readAsciiLong(8, 16); + ret.setChksum(readAsciiLong(8, 16)); + String name = readCString((int) namesize); + ret.setName(name); + if (CpioUtil.fileType(mode) == 0 && !name.equals(CPIO_TRAILER)){ + throw new IOException("Mode 0 only allowed in the trailer. Found entry name: "+name + " Occured at byte: " + getBytesRead()); + } + skip(ret.getHeaderPadCount()); + + return ret; + } + + private CpioArchiveEntry readOldAsciiEntry() throws IOException { + CpioArchiveEntry ret = new CpioArchiveEntry(FORMAT_OLD_ASCII); + + ret.setDevice(readAsciiLong(6, 8)); + ret.setInode(readAsciiLong(6, 8)); + final long mode = readAsciiLong(6, 8); + if (CpioUtil.fileType(mode) != 0) { + ret.setMode(mode); + } + ret.setUID(readAsciiLong(6, 8)); + ret.setGID(readAsciiLong(6, 8)); + ret.setNumberOfLinks(readAsciiLong(6, 8)); + ret.setRemoteDevice(readAsciiLong(6, 8)); + ret.setTime(readAsciiLong(11, 8)); + long namesize = readAsciiLong(6, 8); + ret.setSize(readAsciiLong(11, 8)); + final String name = readCString((int) namesize); + ret.setName(name); + if (CpioUtil.fileType(mode) == 0 && !name.equals(CPIO_TRAILER)){ + throw new IOException("Mode 0 only allowed in the trailer. Found entry: "+ name + " Occured at byte: " + getBytesRead()); + } + + return ret; + } + + private CpioArchiveEntry readOldBinaryEntry(final boolean swapHalfWord) + throws IOException { + CpioArchiveEntry ret = new CpioArchiveEntry(FORMAT_OLD_BINARY); + + ret.setDevice(readBinaryLong(2, swapHalfWord)); + ret.setInode(readBinaryLong(2, swapHalfWord)); + final long mode = readBinaryLong(2, swapHalfWord); + if (CpioUtil.fileType(mode) != 0){ + ret.setMode(mode); + } + ret.setUID(readBinaryLong(2, swapHalfWord)); + ret.setGID(readBinaryLong(2, swapHalfWord)); + ret.setNumberOfLinks(readBinaryLong(2, swapHalfWord)); + ret.setRemoteDevice(readBinaryLong(2, swapHalfWord)); + ret.setTime(readBinaryLong(4, swapHalfWord)); + long namesize = readBinaryLong(2, swapHalfWord); + ret.setSize(readBinaryLong(4, swapHalfWord)); + final String name = readCString((int) namesize); + ret.setName(name); + if (CpioUtil.fileType(mode) == 0 && !name.equals(CPIO_TRAILER)){ + throw new IOException("Mode 0 only allowed in the trailer. Found entry: "+name + "Occured at byte: " + getBytesRead()); + } + skip(ret.getHeaderPadCount()); + + return ret; + } + + private String readCString(final int length) throws IOException { + // don't include trailing NUL in file name to decode + byte tmpBuffer[] = new byte[length - 1]; + readFully(tmpBuffer, 0, tmpBuffer.length); + this.in.read(); + return encoding.decode(tmpBuffer); + } + + /** + * Skips specified number of bytes in the current CPIO entry. + * + * @param n + * the number of bytes to skip + * @return the actual number of bytes skipped + * @throws IOException + * if an I/O error has occurred + * @throws IllegalArgumentException + * if n < 0 + */ + @Override + public long skip(final long n) throws IOException { + if (n < 0) { + throw new IllegalArgumentException("negative skip length"); + } + ensureOpen(); + int max = (int) Math.min(n, Integer.MAX_VALUE); + int total = 0; + + while (total < max) { + int len = max - total; + if (len > this.tmpbuf.length) { + len = this.tmpbuf.length; + } + len = read(this.tmpbuf, 0, len); + if (len == -1) { + this.entryEOF = true; + break; + } + total += len; + } + return total; + } + + @Override + public ArchiveEntry getNextEntry() throws IOException { + return getNextCPIOEntry(); + } + + /** + * Skips the padding zeros written after the TRAILER!!! entry. + */ + private void skipRemainderOfLastBlock() throws IOException { + long readFromLastBlock = getBytesRead() % blockSize; + long remainingBytes = readFromLastBlock == 0 ? 0 + : blockSize - readFromLastBlock; + while (remainingBytes > 0) { + long skipped = skip(blockSize - readFromLastBlock); + if (skipped <= 0) { + break; + } + remainingBytes -= skipped; + } + } + + /** + * Checks if the signature matches one of the following magic values: + * + * Strings: + * + * "070701" - MAGIC_NEW + * "070702" - MAGIC_NEW_CRC + * "070707" - MAGIC_OLD_ASCII + * + * Octal Binary value: + * + * 070707 - MAGIC_OLD_BINARY (held as a short) = 0x71C7 or 0xC771 + */ + public static boolean matches(byte[] signature, int length) { + if (length < 6) { + return false; + } + + // Check binary values + if (signature[0] == 0x71 && (signature[1] & 0xFF) == 0xc7) { + return true; + } + if (signature[1] == 0x71 && (signature[0] & 0xFF) == 0xc7) { + return true; + } + + // Check Ascii (String) values + // 3037 3037 30nn + if (signature[0] != 0x30) { + return false; + } + if (signature[1] != 0x37) { + return false; + } + if (signature[2] != 0x30) { + return false; + } + if (signature[3] != 0x37) { + return false; + } + if (signature[4] != 0x30) { + return false; + } + // Check last byte + if (signature[5] == 0x31) { + return true; + } + if (signature[5] == 0x32) { + return true; + } + if (signature[5] == 0x37) { + return true; + } + + return false; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java new file mode 100644 index 000000000..ff86ddf9f --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioArchiveOutputStream.java @@ -0,0 +1,558 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.cpio; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.util.HashMap; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveOutputStream; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; +import org.apache.commons.compress.utils.ArchiveUtils; +import org.apache.commons.compress.utils.CharsetNames; + +/** + * CPIOArchiveOutputStream is a stream for writing CPIO streams. All formats of + * CPIO are supported (old ASCII, old binary, new portable format and the new + * portable format with CRC). + * + *

An entry can be written by creating an instance of CpioArchiveEntry and fill + * it with the necessary values and put it into the CPIO stream. Afterwards + * write the contents of the file into the CPIO stream. Either close the stream + * by calling finish() or put a next entry into the cpio stream.

+ * + *
+ * CpioArchiveOutputStream out = new CpioArchiveOutputStream(
+ *         new FileOutputStream(new File("test.cpio")));
+ * CpioArchiveEntry entry = new CpioArchiveEntry();
+ * entry.setName("testfile");
+ * String contents = "12345";
+ * entry.setFileSize(contents.length());
+ * entry.setMode(CpioConstants.C_ISREG); // regular file
+ * ... set other attributes, e.g. time, number of links
+ * out.putArchiveEntry(entry);
+ * out.write(testContents.getBytes());
+ * out.close();
+ * 
+ * + *

Note: This implementation should be compatible to cpio 2.5

+ * + *

This class uses mutable fields and is not considered threadsafe.

+ * + *

based on code from the jRPM project (jrpm.sourceforge.net)

+ */ +public class CpioArchiveOutputStream extends ArchiveOutputStream implements + CpioConstants { + + private CpioArchiveEntry entry; + + private boolean closed = false; + + /** indicates if this archive is finished */ + private boolean finished; + + /** + * See {@link CpioArchiveEntry#setFormat(short)} for possible values. + */ + private final short entryFormat; + + private final HashMap names = + new HashMap(); + + private long crc = 0; + + private long written; + + private final OutputStream out; + + private final int blockSize; + + private long nextArtificalDeviceAndInode = 1; + + /** + * The encoding to use for filenames and labels. + */ + private final ZipEncoding encoding; + + /** + * Construct the cpio output stream with a specified format, a + * blocksize of {@link CpioConstants#BLOCK_SIZE BLOCK_SIZE} and + * using ASCII as the file name encoding. + * + * @param out + * The cpio stream + * @param format + * The format of the stream + */ + public CpioArchiveOutputStream(final OutputStream out, final short format) { + this(out, format, BLOCK_SIZE, CharsetNames.US_ASCII); + } + + /** + * Construct the cpio output stream with a specified format using + * ASCII as the file name encoding. + * + * @param out + * The cpio stream + * @param format + * The format of the stream + * @param blockSize + * The block size of the archive. + * + * @since 1.1 + */ + public CpioArchiveOutputStream(final OutputStream out, final short format, + final int blockSize) { + this(out, format, blockSize, CharsetNames.US_ASCII); + } + + /** + * Construct the cpio output stream with a specified format using + * ASCII as the file name encoding. + * + * @param out + * The cpio stream + * @param format + * The format of the stream + * @param blockSize + * The block size of the archive. + * @param encoding + * The encoding of file names to write - use null for + * the platform's default. + * + * @since 1.6 + */ + public CpioArchiveOutputStream(final OutputStream out, final short format, + final int blockSize, final String encoding) { + this.out = out; + switch (format) { + case FORMAT_NEW: + case FORMAT_NEW_CRC: + case FORMAT_OLD_ASCII: + case FORMAT_OLD_BINARY: + break; + default: + throw new IllegalArgumentException("Unknown format: "+format); + + } + this.entryFormat = format; + this.blockSize = blockSize; + this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + } + + /** + * Construct the cpio output stream. The format for this CPIO stream is the + * "new" format using ASCII encoding for file names + * + * @param out + * The cpio stream + */ + public CpioArchiveOutputStream(final OutputStream out) { + this(out, FORMAT_NEW); + } + + /** + * Construct the cpio output stream. The format for this CPIO stream is the + * "new" format. + * + * @param out + * The cpio stream + * @param encoding + * The encoding of file names to write - use null for + * the platform's default. + * @since 1.6 + */ + public CpioArchiveOutputStream(final OutputStream out, String encoding) { + this(out, FORMAT_NEW, BLOCK_SIZE, encoding); + } + + /** + * Check to make sure that this stream has not been closed + * + * @throws IOException + * if the stream is already closed + */ + private void ensureOpen() throws IOException { + if (this.closed) { + throw new IOException("Stream closed"); + } + } + + /** + * Begins writing a new CPIO file entry and positions the stream to the + * start of the entry data. Closes the current entry if still active. The + * current time will be used if the entry has no set modification time and + * the default header format will be used if no other format is specified in + * the entry. + * + * @param entry + * the CPIO cpioEntry to be written + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + * @throws ClassCastException if entry is not an instance of CpioArchiveEntry + */ + @Override + public void putArchiveEntry(ArchiveEntry entry) throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + + CpioArchiveEntry e = (CpioArchiveEntry) entry; + ensureOpen(); + if (this.entry != null) { + closeArchiveEntry(); // close previous entry + } + if (e.getTime() == -1) { + e.setTime(System.currentTimeMillis() / 1000); + } + + final short format = e.getFormat(); + if (format != this.entryFormat){ + throw new IOException("Header format: "+format+" does not match existing format: "+this.entryFormat); + } + + if (this.names.put(e.getName(), e) != null) { + throw new IOException("duplicate entry: " + e.getName()); + } + + writeHeader(e); + this.entry = e; + this.written = 0; + } + + private void writeHeader(final CpioArchiveEntry e) throws IOException { + switch (e.getFormat()) { + case FORMAT_NEW: + out.write(ArchiveUtils.toAsciiBytes(MAGIC_NEW)); + count(6); + writeNewEntry(e); + break; + case FORMAT_NEW_CRC: + out.write(ArchiveUtils.toAsciiBytes(MAGIC_NEW_CRC)); + count(6); + writeNewEntry(e); + break; + case FORMAT_OLD_ASCII: + out.write(ArchiveUtils.toAsciiBytes(MAGIC_OLD_ASCII)); + count(6); + writeOldAsciiEntry(e); + break; + case FORMAT_OLD_BINARY: + boolean swapHalfWord = true; + writeBinaryLong(MAGIC_OLD_BINARY, 2, swapHalfWord); + writeOldBinaryEntry(e, swapHalfWord); + break; + default: + throw new IOException("unknown format " + e.getFormat()); + } + } + + private void writeNewEntry(final CpioArchiveEntry entry) throws IOException { + long inode = entry.getInode(); + long devMin = entry.getDeviceMin(); + if (CPIO_TRAILER.equals(entry.getName())) { + inode = devMin = 0; + } else { + if (inode == 0 && devMin == 0) { + inode = nextArtificalDeviceAndInode & 0xFFFFFFFF; + devMin = (nextArtificalDeviceAndInode++ >> 32) & 0xFFFFFFFF; + } else { + nextArtificalDeviceAndInode = + Math.max(nextArtificalDeviceAndInode, + inode + 0x100000000L * devMin) + 1; + } + } + + writeAsciiLong(inode, 8, 16); + writeAsciiLong(entry.getMode(), 8, 16); + writeAsciiLong(entry.getUID(), 8, 16); + writeAsciiLong(entry.getGID(), 8, 16); + writeAsciiLong(entry.getNumberOfLinks(), 8, 16); + writeAsciiLong(entry.getTime(), 8, 16); + writeAsciiLong(entry.getSize(), 8, 16); + writeAsciiLong(entry.getDeviceMaj(), 8, 16); + writeAsciiLong(devMin, 8, 16); + writeAsciiLong(entry.getRemoteDeviceMaj(), 8, 16); + writeAsciiLong(entry.getRemoteDeviceMin(), 8, 16); + writeAsciiLong(entry.getName().length() + 1, 8, 16); + writeAsciiLong(entry.getChksum(), 8, 16); + writeCString(entry.getName()); + pad(entry.getHeaderPadCount()); + } + + private void writeOldAsciiEntry(final CpioArchiveEntry entry) + throws IOException { + long inode = entry.getInode(); + long device = entry.getDevice(); + if (CPIO_TRAILER.equals(entry.getName())) { + inode = device = 0; + } else { + if (inode == 0 && device == 0) { + inode = nextArtificalDeviceAndInode & 0777777; + device = (nextArtificalDeviceAndInode++ >> 18) & 0777777; + } else { + nextArtificalDeviceAndInode = + Math.max(nextArtificalDeviceAndInode, + inode + 01000000 * device) + 1; + } + } + + writeAsciiLong(device, 6, 8); + writeAsciiLong(inode, 6, 8); + writeAsciiLong(entry.getMode(), 6, 8); + writeAsciiLong(entry.getUID(), 6, 8); + writeAsciiLong(entry.getGID(), 6, 8); + writeAsciiLong(entry.getNumberOfLinks(), 6, 8); + writeAsciiLong(entry.getRemoteDevice(), 6, 8); + writeAsciiLong(entry.getTime(), 11, 8); + writeAsciiLong(entry.getName().length() + 1, 6, 8); + writeAsciiLong(entry.getSize(), 11, 8); + writeCString(entry.getName()); + } + + private void writeOldBinaryEntry(final CpioArchiveEntry entry, + final boolean swapHalfWord) throws IOException { + long inode = entry.getInode(); + long device = entry.getDevice(); + if (CPIO_TRAILER.equals(entry.getName())) { + inode = device = 0; + } else { + if (inode == 0 && device == 0) { + inode = nextArtificalDeviceAndInode & 0xFFFF; + device = (nextArtificalDeviceAndInode++ >> 16) & 0xFFFF; + } else { + nextArtificalDeviceAndInode = + Math.max(nextArtificalDeviceAndInode, + inode + 0x10000 * device) + 1; + } + } + + writeBinaryLong(device, 2, swapHalfWord); + writeBinaryLong(inode, 2, swapHalfWord); + writeBinaryLong(entry.getMode(), 2, swapHalfWord); + writeBinaryLong(entry.getUID(), 2, swapHalfWord); + writeBinaryLong(entry.getGID(), 2, swapHalfWord); + writeBinaryLong(entry.getNumberOfLinks(), 2, swapHalfWord); + writeBinaryLong(entry.getRemoteDevice(), 2, swapHalfWord); + writeBinaryLong(entry.getTime(), 4, swapHalfWord); + writeBinaryLong(entry.getName().length() + 1, 2, swapHalfWord); + writeBinaryLong(entry.getSize(), 4, swapHalfWord); + writeCString(entry.getName()); + pad(entry.getHeaderPadCount()); + } + + /*(non-Javadoc) + * + * @see + * org.apache.commons.compress.archivers.ArchiveOutputStream#closeArchiveEntry + * () + */ + @Override + public void closeArchiveEntry() throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + + ensureOpen(); + + if (entry == null) { + throw new IOException("Trying to close non-existent entry"); + } + + if (this.entry.getSize() != this.written) { + throw new IOException("invalid entry size (expected " + + this.entry.getSize() + " but got " + this.written + + " bytes)"); + } + pad(this.entry.getDataPadCount()); + if (this.entry.getFormat() == FORMAT_NEW_CRC + && this.crc != this.entry.getChksum()) { + throw new IOException("CRC Error"); + } + this.entry = null; + this.crc = 0; + this.written = 0; + } + + /** + * Writes an array of bytes to the current CPIO entry data. This method will + * block until all the bytes are written. + * + * @param b + * the data to be written + * @param off + * the start offset in the data + * @param len + * the number of bytes that are written + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + */ + @Override + public void write(final byte[] b, final int off, final int len) + throws IOException { + ensureOpen(); + if (off < 0 || len < 0 || off > b.length - len) { + throw new IndexOutOfBoundsException(); + } else if (len == 0) { + return; + } + + if (this.entry == null) { + throw new IOException("no current CPIO entry"); + } + if (this.written + len > this.entry.getSize()) { + throw new IOException("attempt to write past end of STORED entry"); + } + out.write(b, off, len); + this.written += len; + if (this.entry.getFormat() == FORMAT_NEW_CRC) { + for (int pos = 0; pos < len; pos++) { + this.crc += b[pos] & 0xFF; + } + } + count(len); + } + + /** + * Finishes writing the contents of the CPIO output stream without closing + * the underlying stream. Use this method when applying multiple filters in + * succession to the same output stream. + * + * @throws IOException + * if an I/O exception has occurred or if a CPIO file error has + * occurred + */ + @Override + public void finish() throws IOException { + ensureOpen(); + if (finished) { + throw new IOException("This archive has already been finished"); + } + + if (this.entry != null) { + throw new IOException("This archive contains unclosed entries."); + } + this.entry = new CpioArchiveEntry(this.entryFormat); + this.entry.setName(CPIO_TRAILER); + this.entry.setNumberOfLinks(1); + writeHeader(this.entry); + closeArchiveEntry(); + + int lengthOfLastBlock = (int) (getBytesWritten() % blockSize); + if (lengthOfLastBlock != 0) { + pad(blockSize - lengthOfLastBlock); + } + + finished = true; + } + + /** + * Closes the CPIO output stream as well as the stream being filtered. + * + * @throws IOException + * if an I/O error has occurred or if a CPIO file error has + * occurred + */ + @Override + public void close() throws IOException { + if(!finished) { + finish(); + } + + if (!this.closed) { + out.close(); + this.closed = true; + } + } + + private void pad(int count) throws IOException{ + if (count > 0){ + byte buff[] = new byte[count]; + out.write(buff); + count(count); + } + } + + private void writeBinaryLong(final long number, final int length, + final boolean swapHalfWord) throws IOException { + byte tmp[] = CpioUtil.long2byteArray(number, length, swapHalfWord); + out.write(tmp); + count(tmp.length); + } + + private void writeAsciiLong(final long number, final int length, + final int radix) throws IOException { + StringBuilder tmp = new StringBuilder(); + String tmpStr; + if (radix == 16) { + tmp.append(Long.toHexString(number)); + } else if (radix == 8) { + tmp.append(Long.toOctalString(number)); + } else { + tmp.append(Long.toString(number)); + } + + if (tmp.length() <= length) { + long insertLength = length - tmp.length(); + for (int pos = 0; pos < insertLength; pos++) { + tmp.insert(0, "0"); + } + tmpStr = tmp.toString(); + } else { + tmpStr = tmp.substring(tmp.length() - length); + } + byte[] b = ArchiveUtils.toAsciiBytes(tmpStr); + out.write(b); + count(b.length); + } + + /** + * Writes an ASCII string to the stream followed by \0 + * @param str the String to write + * @throws IOException if the string couldn't be written + */ + private void writeCString(final String str) throws IOException { + ByteBuffer buf = encoding.encode(str); + final int len = buf.limit() - buf.position(); + out.write(buf.array(), buf.arrayOffset(), len); + out.write('\0'); + count(len + 1); + } + + /** + * Creates a new ArchiveEntry. The entryName must be an ASCII encoded string. + * + * @see org.apache.commons.compress.archivers.ArchiveOutputStream#createArchiveEntry(java.io.File, java.lang.String) + */ + @Override + public ArchiveEntry createArchiveEntry(File inputFile, String entryName) + throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + return new CpioArchiveEntry(inputFile, entryName); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioConstants.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioConstants.java new file mode 100644 index 000000000..b480d79c1 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioConstants.java @@ -0,0 +1,143 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.cpio; + +/** + * All constants needed by CPIO. + * + * based on code from the jRPM project (jrpm.sourceforge.net) + * + */ +public interface CpioConstants { + /** magic number of a cpio entry in the new format */ + final String MAGIC_NEW = "070701"; + + /** magic number of a cpio entry in the new format with crc */ + final String MAGIC_NEW_CRC = "070702"; + + /** magic number of a cpio entry in the old ascii format */ + final String MAGIC_OLD_ASCII = "070707"; + + /** magic number of a cpio entry in the old binary format */ + final int MAGIC_OLD_BINARY = 070707; + + // These FORMAT_ constants are internal to the code + + /** write/read a CPIOArchiveEntry in the new format */ + final short FORMAT_NEW = 1; + + /** write/read a CPIOArchiveEntry in the new format with crc */ + final short FORMAT_NEW_CRC = 2; + + /** write/read a CPIOArchiveEntry in the old ascii format */ + final short FORMAT_OLD_ASCII = 4; + + /** write/read a CPIOArchiveEntry in the old binary format */ + final short FORMAT_OLD_BINARY = 8; + + /** Mask for both new formats */ + final short FORMAT_NEW_MASK = 3; + + /** Mask for both old formats */ + final short FORMAT_OLD_MASK = 12; + + /* + * Constants for the MODE bits + */ + + /** Mask for all file type bits. */ + final int S_IFMT = 0170000; + + // http://www.opengroup.org/onlinepubs/9699919799/basedefs/cpio.h.html + // has a list of the C_xxx constatnts + + /** Defines a socket */ + final int C_ISSOCK = 0140000; + + /** Defines a symbolic link */ + final int C_ISLNK = 0120000; + + /** HP/UX network special (C_ISCTG) */ + final int C_ISNWK = 0110000; + + /** Defines a regular file */ + final int C_ISREG = 0100000; + + /** Defines a block device */ + final int C_ISBLK = 0060000; + + /** Defines a directory */ + final int C_ISDIR = 0040000; + + /** Defines a character device */ + final int C_ISCHR = 0020000; + + /** Defines a pipe */ + final int C_ISFIFO = 0010000; + + + /** Set user ID */ + final int C_ISUID = 0004000; + + /** Set group ID */ + final int C_ISGID = 0002000; + + /** On directories, restricted deletion flag. */ + final int C_ISVTX = 0001000; + + + /** Permits the owner of a file to read the file */ + final int C_IRUSR = 0000400; + + /** Permits the owner of a file to write to the file */ + final int C_IWUSR = 0000200; + + /** Permits the owner of a file to execute the file or to search the directory */ + final int C_IXUSR = 0000100; + + + /** Permits a file's group to read the file */ + final int C_IRGRP = 0000040; + + /** Permits a file's group to write to the file */ + final int C_IWGRP = 0000020; + + /** Permits a file's group to execute the file or to search the directory */ + final int C_IXGRP = 0000010; + + + /** Permits others to read the file */ + final int C_IROTH = 0000004; + + /** Permits others to write to the file */ + final int C_IWOTH = 0000002; + + /** Permits others to execute the file or to search the directory */ + final int C_IXOTH = 0000001; + + /** The special trailer marker */ + final String CPIO_TRAILER = "TRAILER!!!"; + + /** + * The default block size. + * + * @since 1.1 + */ + final int BLOCK_SIZE = 512; +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioUtil.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioUtil.java new file mode 100644 index 000000000..26b51fc48 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/CpioUtil.java @@ -0,0 +1,114 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.cpio; + +/** + * Package private utility class for Cpio + * + * @Immutable + */ +class CpioUtil { + + /** + * Extracts the file type bits from a mode. + */ + static long fileType(long mode) { + return mode & CpioConstants.S_IFMT; + } + + /** + * Converts a byte array to a long. Halfwords can be swapped by setting + * swapHalfWord=true. + * + * @param number + * An array of bytes containing a number + * @param swapHalfWord + * Swap halfwords ([0][1][2][3]->[1][0][3][2]) + * @return The long value + * @throws UnsupportedOperationException if number length is not a multiple of 2 + */ + static long byteArray2long(final byte[] number, final boolean swapHalfWord) { + if (number.length % 2 != 0) { + throw new UnsupportedOperationException(); + } + + long ret = 0; + int pos = 0; + byte tmp_number[] = new byte[number.length]; + System.arraycopy(number, 0, tmp_number, 0, number.length); + + if (!swapHalfWord) { + byte tmp = 0; + for (pos = 0; pos < tmp_number.length; pos++) { + tmp = tmp_number[pos]; + tmp_number[pos++] = tmp_number[pos]; + tmp_number[pos] = tmp; + } + } + + ret = tmp_number[0] & 0xFF; + for (pos = 1; pos < tmp_number.length; pos++) { + ret <<= 8; + ret |= tmp_number[pos] & 0xFF; + } + return ret; + } + + /** + * Converts a long number to a byte array + * Halfwords can be swapped by setting swapHalfWord=true. + * + * @param number + * the input long number to be converted + * + * @param length + * The length of the returned array + * @param swapHalfWord + * Swap halfwords ([0][1][2][3]->[1][0][3][2]) + * @return The long value + * @throws UnsupportedOperationException if the length is not a positive multiple of two + */ + static byte[] long2byteArray(final long number, final int length, + final boolean swapHalfWord) { + byte[] ret = new byte[length]; + int pos = 0; + long tmp_number = 0; + + if (length % 2 != 0 || length < 2) { + throw new UnsupportedOperationException(); + } + + tmp_number = number; + for (pos = length - 1; pos >= 0; pos--) { + ret[pos] = (byte) (tmp_number & 0xFF); + tmp_number >>= 8; + } + + if (!swapHalfWord) { + byte tmp = 0; + for (pos = 0; pos < length; pos++) { + tmp = ret[pos]; + ret[pos++] = ret[pos]; + ret[pos] = tmp; + } + } + + return ret; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/package.html new file mode 100644 index 000000000..985828725 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/cpio/package.html @@ -0,0 +1,24 @@ + + + +

Provides stream classes for reading and writing archives using + the CPIO format.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/Dirent.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/Dirent.java new file mode 100644 index 000000000..34e0ef791 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/Dirent.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +/** + * Directory entry. + */ +class Dirent { + private final int ino; + private final int parentIno; + private final int type; + private final String name; + + /** + * Constructor + * + * @param ino + * @param parentIno + * @param type + * @param name + */ + Dirent(int ino, int parentIno, int type, String name) { + this.ino = ino; + this.parentIno = parentIno; + this.type = type; + this.name = name; + } + + /** + * Get ino. + * @return the i-node + */ + int getIno() { + return ino; + } + + /** + * Get ino of parent directory. + * @return the parent i-node + */ + int getParentIno() { + return parentIno; + } + + /** + * Get entry type. + * @return the entry type + */ + int getType() { + return type; + } + + /** + * Get name of directory entry. + * @return the directory name + */ + String getName() { + return name; + } + + /** + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return String.format("[%d]: %s", Integer.valueOf(ino), name); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java new file mode 100644 index 000000000..87ca8d913 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveConstants.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +/** + * Various constants associated with dump archives. + */ +public final class DumpArchiveConstants { + public static final int TP_SIZE = 1024; + public static final int NTREC = 10; + public static final int HIGH_DENSITY_NTREC = 32; + public static final int OFS_MAGIC = 60011; + public static final int NFS_MAGIC = 60012; + public static final int FS_UFS2_MAGIC = 0x19540119; + public static final int CHECKSUM = 84446; + public static final int LBLSIZE = 16; + public static final int NAMELEN = 64; + + /* do not instantiate */ + private DumpArchiveConstants() { + } + + /** + * The type of tape segment. + */ + public enum SEGMENT_TYPE { + TAPE(1), + INODE(2), + BITS(3), + ADDR(4), + END(5), + CLRI(6); + + int code; + + private SEGMENT_TYPE(int code) { + this.code = code; + } + + public static SEGMENT_TYPE find(int code) { + for (SEGMENT_TYPE t : values()) { + if (t.code == code) { + return t; + } + } + + return null; + } + } + + /** + * The type of compression. + */ + public enum COMPRESSION_TYPE { + ZLIB(0), + BZLIB(1), + LZO(2); + + int code; + + private COMPRESSION_TYPE(int code) { + this.code = code; + } + + public static COMPRESSION_TYPE find(int code) { + for (COMPRESSION_TYPE t : values()) { + if (t.code == code) { + return t; + } + } + + return null; + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java new file mode 100644 index 000000000..1cb62c75d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveEntry.java @@ -0,0 +1,809 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +import java.util.Collections; +import java.util.Date; +import java.util.EnumSet; +import java.util.HashSet; +import java.util.Set; +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * This class represents an entry in a Dump archive. It consists + * of the entry's header, the entry's File and any extended attributes. + *

+ * DumpEntries that are created from the header bytes read from + * an archive are instantiated with the DumpArchiveEntry( byte[] ) + * constructor. These entries will be used when extracting from + * or listing the contents of an archive. These entries have their + * header filled in using the header bytes. They also set the File + * to null, since they reference an archive entry not a file. + *

+ * DumpEntries can also be constructed from nothing but a name. + * This allows the programmer to construct the entry by hand, for + * instance when only an InputStream is available for writing to + * the archive, and the header information is constructed from + * other information. In this case the header fields are set to + * defaults and the File is set to null. + * + *

+ * The C structure for a Dump Entry's header is: + *

+ * #define TP_BSIZE    1024          // size of each file block
+ * #define NTREC       10            // number of blocks to write at once
+ * #define HIGHDENSITYTREC 32        // number of blocks to write on high-density tapes
+ * #define TP_NINDIR   (TP_BSIZE/2)  // number if indirect inodes in record
+ * #define TP_NINOS    (TP_NINDIR / sizeof (int32_t))
+ * #define LBLSIZE     16
+ * #define NAMELEN     64
+ *
+ * #define OFS_MAGIC     (int)60011  // old format magic value
+ * #define NFS_MAGIC     (int)60012  // new format magic value
+ * #define FS_UFS2_MAGIC (int)0x19540119
+ * #define CHECKSUM      (int)84446  // constant used in checksum algorithm
+ *
+ * struct  s_spcl {
+ *   int32_t c_type;             // record type (see below)
+ *   int32_t c_date;             // date of this dump
+ *   int32_t c_ddate;            // date of previous dump
+ *   int32_t c_volume;           // dump volume number
+ *   u_int32_t c_tapea;          // logical block of this record
+ *   dump_ino_t c_ino;           // number of inode
+ *   int32_t c_magic;            // magic number (see above)
+ *   int32_t c_checksum;         // record checksum
+ * #ifdef  __linux__
+ *   struct  new_bsd_inode c_dinode;
+ * #else
+ * #ifdef sunos
+ *   struct  new_bsd_inode c_dinode;
+ * #else
+ *   struct  dinode  c_dinode;   // ownership and mode of inode
+ * #endif
+ * #endif
+ *   int32_t c_count;            // number of valid c_addr entries
+ *   union u_data c_data;        // see above
+ *   char    c_label[LBLSIZE];   // dump label
+ *   int32_t c_level;            // level of this dump
+ *   char    c_filesys[NAMELEN]; // name of dumpped file system
+ *   char    c_dev[NAMELEN];     // name of dumpped device
+ *   char    c_host[NAMELEN];    // name of dumpped host
+ *   int32_t c_flags;            // additional information (see below)
+ *   int32_t c_firstrec;         // first record on volume
+ *   int32_t c_ntrec;            // blocksize on volume
+ *   int32_t c_extattributes;    // additional inode info (see below)
+ *   int32_t c_spare[30];        // reserved for future uses
+ * } s_spcl;
+ *
+ * //
+ * // flag values
+ * //
+ * #define DR_NEWHEADER     0x0001  // new format tape header
+ * #define DR_NEWINODEFMT   0x0002  // new format inodes on tape
+ * #define DR_COMPRESSED    0x0080  // dump tape is compressed
+ * #define DR_METAONLY      0x0100  // only the metadata of the inode has been dumped
+ * #define DR_INODEINFO     0x0002  // [SIC] TS_END header contains c_inos information
+ * #define DR_EXTATTRIBUTES 0x8000
+ *
+ * //
+ * // extattributes inode info
+ * //
+ * #define EXT_REGULAR         0
+ * #define EXT_MACOSFNDRINFO   1
+ * #define EXT_MACOSRESFORK    2
+ * #define EXT_XATTR           3
+ *
+ * // used for EA on tape
+ * #define EXT2_GOOD_OLD_INODE_SIZE    128
+ * #define EXT2_XATTR_MAGIC        0xEA020000  // block EA
+ * #define EXT2_XATTR_MAGIC2       0xEA020001  // in inode EA
+ * 
+ *

+ * The fields in bold are the same for all blocks. (This permitted + * multiple dumps to be written to a single tape.) + *

+ * + *

+ * The C structure for the inode (file) information is: + *

+ * struct bsdtimeval {           //  **** alpha-*-linux is deviant
+ *   __u32   tv_sec;
+ *   __u32   tv_usec;
+ * };
+ *
+ * #define NDADDR      12
+ * #define NIADDR       3
+ *
+ * //
+ * // This is the new (4.4) BSD inode structure
+ * // copied from the FreeBSD 2.0 <ufs/ufs/dinode.h> include file
+ * //
+ * struct new_bsd_inode {
+ *   __u16       di_mode;           // file type, standard Unix permissions
+ *   __s16       di_nlink;          // number of hard links to file.
+ *   union {
+ *      __u16       oldids[2];
+ *      __u32       inumber;
+ *   }           di_u;
+ *   u_quad_t    di_size;           // file size
+ *   struct bsdtimeval   di_atime;  // time file was last accessed
+ *   struct bsdtimeval   di_mtime;  // time file was last modified
+ *   struct bsdtimeval   di_ctime;  // time file was created
+ *   __u32       di_db[NDADDR];
+ *   __u32       di_ib[NIADDR];
+ *   __u32       di_flags;          //
+ *   __s32       di_blocks;         // number of disk blocks
+ *   __s32       di_gen;            // generation number
+ *   __u32       di_uid;            // user id (see /etc/passwd)
+ *   __u32       di_gid;            // group id (see /etc/group)
+ *   __s32       di_spare[2];       // unused
+ * };
+ * 
+ *

+ * It is important to note that the header DOES NOT have the name of the + * file. It can't since hard links mean that you may have multiple filenames + * for a single physical file. You must read the contents of the directory + * entries to learn the mapping(s) from filename to inode. + *

+ * + *

+ * The C structure that indicates if a specific block is a real block + * that contains data or is a sparse block that is not persisted to the + * disk is:

+ *
+ * #define TP_BSIZE    1024
+ * #define TP_NINDIR   (TP_BSIZE/2)
+ *
+ * union u_data {
+ *   char    s_addrs[TP_NINDIR]; // 1 => data; 0 => hole in inode
+ *   int32_t s_inos[TP_NINOS];   // table of first inode on each volume
+ * } u_data;
+ * 
+ * + * @NotThreadSafe + */ +public class DumpArchiveEntry implements ArchiveEntry { + private String name; + private TYPE type = TYPE.UNKNOWN; + private int mode; + private Set permissions = Collections.emptySet(); + private long size; + private long atime; + private long mtime; + private int uid; + private int gid; + + /** + * Currently unused + */ + private final DumpArchiveSummary summary = null; + + // this information is available from standard index. + private final TapeSegmentHeader header = new TapeSegmentHeader(); + private String simpleName; + private String originalName; + + // this information is available from QFA index + private int volume; + private long offset; + private int ino; + private int nlink; + private long ctime; + private int generation; + private boolean isDeleted; + + /** + * Default constructor. + */ + public DumpArchiveEntry() { + } + + /** + * Constructor taking only filename. + * @param name pathname + * @param simpleName actual filename. + */ + public DumpArchiveEntry(String name, String simpleName) { + setName(name); + this.simpleName = simpleName; + } + + /** + * Constructor taking name, inode and type. + * + * @param name + * @param simpleName + * @param ino + * @param type + */ + protected DumpArchiveEntry(String name, String simpleName, int ino, + TYPE type) { + setType(type); + setName(name); + this.simpleName = simpleName; + this.ino = ino; + this.offset = 0; + } + + /** + * Constructor taking tape buffer. + * @param buffer + * @param offset + */ + + /** + * Returns the path of the entry. + * @return the path of the entry. + */ + public String getSimpleName() { + return simpleName; + } + + /** + * Sets the path of the entry. + */ + protected void setSimpleName(String simpleName) { + this.simpleName = simpleName; + } + + /** + * Returns the ino of the entry. + */ + public int getIno() { + return header.getIno(); + } + + /** + * Return the number of hard links to the entry. + */ + public int getNlink() { + return nlink; + } + + /** + * Set the number of hard links. + */ + public void setNlink(int nlink) { + this.nlink = nlink; + } + + /** + * Get file creation time. + */ + public Date getCreationTime() { + return new Date(ctime); + } + + /** + * Set the file creation time. + */ + public void setCreationTime(Date ctime) { + this.ctime = ctime.getTime(); + } + + /** + * Return the generation of the file. + */ + public int getGeneration() { + return generation; + } + + /** + * Set the generation of the file. + */ + public void setGeneration(int generation) { + this.generation = generation; + } + + /** + * Has this file been deleted? (On valid on incremental dumps.) + */ + public boolean isDeleted() { + return isDeleted; + } + + /** + * Set whether this file has been deleted. + */ + public void setDeleted(boolean isDeleted) { + this.isDeleted = isDeleted; + } + + /** + * Return the offset within the archive + */ + public long getOffset() { + return offset; + } + + /** + * Set the offset within the archive. + */ + public void setOffset(long offset) { + this.offset = offset; + } + + /** + * Return the tape volume where this file is located. + */ + public int getVolume() { + return volume; + } + + /** + * Set the tape volume. + */ + public void setVolume(int volume) { + this.volume = volume; + } + + /** + * Return the type of the tape segment header. + */ + public DumpArchiveConstants.SEGMENT_TYPE getHeaderType() { + return header.getType(); + } + + /** + * Return the number of records in this segment. + */ + public int getHeaderCount() { + return header.getCount(); + } + + /** + * Return the number of sparse records in this segment. + */ + public int getHeaderHoles() { + return header.getHoles(); + } + + /** + * Is this a sparse record? + */ + public boolean isSparseRecord(int idx) { + return (header.getCdata(idx) & 0x01) == 0; + } + + /** + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + return ino; + } + + /** + * @see java.lang.Object#equals(Object o) + */ + @Override + public boolean equals(Object o) { + if (o == this) { + return true; + } else if (o == null || !o.getClass().equals(getClass())) { + return false; + } + + DumpArchiveEntry rhs = (DumpArchiveEntry) o; + + if ((header == null) || (rhs.header == null)) { + return false; + } + + if (ino != rhs.ino) { + return false; + } + + if ((summary == null && rhs.summary != null) + || (summary != null && !summary.equals(rhs.summary))) { + return false; + } + + return true; + } + + /** + * @see java.lang.Object#toString() + */ + @Override + public String toString() { + return getName(); + } + + /** + * Populate the dump archive entry and tape segment header with + * the contents of the buffer. + * + * @param buffer + * @throws Exception + */ + static DumpArchiveEntry parse(byte[] buffer) { + DumpArchiveEntry entry = new DumpArchiveEntry(); + TapeSegmentHeader header = entry.header; + + header.type = DumpArchiveConstants.SEGMENT_TYPE.find(DumpArchiveUtil.convert32( + buffer, 0)); + + //header.dumpDate = new Date(1000L * DumpArchiveUtil.convert32(buffer, 4)); + //header.previousDumpDate = new Date(1000L * DumpArchiveUtil.convert32( + // buffer, 8)); + header.volume = DumpArchiveUtil.convert32(buffer, 12); + //header.tapea = DumpArchiveUtil.convert32(buffer, 16); + entry.ino = header.ino = DumpArchiveUtil.convert32(buffer, 20); + + //header.magic = DumpArchiveUtil.convert32(buffer, 24); + //header.checksum = DumpArchiveUtil.convert32(buffer, 28); + int m = DumpArchiveUtil.convert16(buffer, 32); + + // determine the type of the file. + entry.setType(TYPE.find((m >> 12) & 0x0F)); + + // determine the standard permissions + entry.setMode(m); + + entry.nlink = DumpArchiveUtil.convert16(buffer, 34); + // inumber, oldids? + entry.setSize(DumpArchiveUtil.convert64(buffer, 40)); + + long t = (1000L * DumpArchiveUtil.convert32(buffer, 48)) + + (DumpArchiveUtil.convert32(buffer, 52) / 1000); + entry.setAccessTime(new Date(t)); + t = (1000L * DumpArchiveUtil.convert32(buffer, 56)) + + (DumpArchiveUtil.convert32(buffer, 60) / 1000); + entry.setLastModifiedDate(new Date(t)); + t = (1000L * DumpArchiveUtil.convert32(buffer, 64)) + + (DumpArchiveUtil.convert32(buffer, 68) / 1000); + entry.ctime = t; + + // db: 72-119 - direct blocks + // id: 120-131 - indirect blocks + //entry.flags = DumpArchiveUtil.convert32(buffer, 132); + //entry.blocks = DumpArchiveUtil.convert32(buffer, 136); + entry.generation = DumpArchiveUtil.convert32(buffer, 140); + entry.setUserId(DumpArchiveUtil.convert32(buffer, 144)); + entry.setGroupId(DumpArchiveUtil.convert32(buffer, 148)); + // two 32-bit spare values. + header.count = DumpArchiveUtil.convert32(buffer, 160); + + header.holes = 0; + + for (int i = 0; (i < 512) && (i < header.count); i++) { + if (buffer[164 + i] == 0) { + header.holes++; + } + } + + System.arraycopy(buffer, 164, header.cdata, 0, 512); + + entry.volume = header.getVolume(); + + //entry.isSummaryOnly = false; + return entry; + } + + /** + * Update entry with information from next tape segment header. + */ + void update(byte[] buffer) { + header.volume = DumpArchiveUtil.convert32(buffer, 16); + header.count = DumpArchiveUtil.convert32(buffer, 160); + + header.holes = 0; + + for (int i = 0; (i < 512) && (i < header.count); i++) { + if (buffer[164 + i] == 0) { + header.holes++; + } + } + + System.arraycopy(buffer, 164, header.cdata, 0, 512); + } + + /** + * Archive entry as stored on tape. There is one TSH for (at most) + * every 512k in the file. + */ + static class TapeSegmentHeader { + private DumpArchiveConstants.SEGMENT_TYPE type; + private int volume; + private int ino; + private int count; + private int holes; + private final byte[] cdata = new byte[512]; // map of any 'holes' + + public DumpArchiveConstants.SEGMENT_TYPE getType() { + return type; + } + + public int getVolume() { + return volume; + } + + public int getIno() { + return ino; + } + + void setIno(int ino) { + this.ino = ino; + } + + public int getCount() { + return count; + } + + public int getHoles() { + return holes; + } + + public int getCdata(int idx) { + return cdata[idx]; + } + } + + /** + * Returns the name of the entry. + * @return the name of the entry. + */ + public String getName() { + return name; + } + + /** + * Returns the unmodified name of the entry. + * @return the name of the entry. + */ + String getOriginalName() { + return originalName; + } + + /** + * Sets the name of the entry. + */ + public final void setName(String name) { + this.originalName = name; + if (name != null) { + if (isDirectory() && !name.endsWith("/")) { + name += "/"; + } + if (name.startsWith("./")) { + name = name.substring(2); + } + } + this.name = name; + } + + public Date getLastModifiedDate() { + return new Date(mtime); + } + + /** + * Is this a directory? + */ + public boolean isDirectory() { + return type == TYPE.DIRECTORY; + } + + /** + * Is this a regular file? + */ + public boolean isFile() { + return type == TYPE.FILE; + } + + /** + * Is this a network device? + */ + public boolean isSocket() { + return type == TYPE.SOCKET; + } + + /** + * Is this a character device? + */ + public boolean isChrDev() { + return type == TYPE.CHRDEV; + } + + /** + * Is this a block device? + */ + public boolean isBlkDev() { + return type == TYPE.BLKDEV; + } + + /** + * Is this a fifo/pipe? + */ + public boolean isFifo() { + return type == TYPE.FIFO; + } + + /** + * Get the type of the entry. + */ + public TYPE getType() { + return type; + } + + /** + * Set the type of the entry. + */ + public void setType(TYPE type) { + this.type = type; + } + + /** + * Return the access permissions on the entry. + */ + public int getMode() { + return mode; + } + + /** + * Set the access permissions on the entry. + */ + public void setMode(int mode) { + this.mode = mode & 07777; + this.permissions = PERMISSION.find(mode); + } + + /** + * Returns the permissions on the entry. + */ + public Set getPermissions() { + return permissions; + } + + /** + * Returns the size of the entry. + */ + public long getSize() { + return isDirectory() ? SIZE_UNKNOWN : size; + } + + /** + * Returns the size of the entry as read from the archive. + */ + long getEntrySize() { + return size; + } + + /** + * Set the size of the entry. + */ + public void setSize(long size) { + this.size = size; + } + + /** + * Set the time the file was last modified. + */ + public void setLastModifiedDate(Date mtime) { + this.mtime = mtime.getTime(); + } + + /** + * Returns the time the file was last accessed. + */ + public Date getAccessTime() { + return new Date(atime); + } + + /** + * Set the time the file was last accessed. + */ + public void setAccessTime(Date atime) { + this.atime = atime.getTime(); + } + + /** + * Return the user id. + */ + public int getUserId() { + return uid; + } + + /** + * Set the user id. + */ + public void setUserId(int uid) { + this.uid = uid; + } + + /** + * Return the group id + */ + public int getGroupId() { + return gid; + } + + /** + * Set the group id. + */ + public void setGroupId(int gid) { + this.gid = gid; + } + + public enum TYPE { + WHITEOUT(14), + SOCKET(12), + LINK(10), + FILE(8), + BLKDEV(6), + DIRECTORY(4), + CHRDEV(2), + FIFO(1), + UNKNOWN(15); + + private int code; + + private TYPE(int code) { + this.code = code; + } + + public static TYPE find(int code) { + TYPE type = UNKNOWN; + + for (TYPE t : TYPE.values()) { + if (code == t.code) { + type = t; + } + } + + return type; + } + } + + public enum PERMISSION { + SETUID(04000), + SETGUI(02000), + STICKY(01000), + USER_READ(00400), + USER_WRITE(00200), + USER_EXEC(00100), + GROUP_READ(00040), + GROUP_WRITE(00020), + GROUP_EXEC(00010), + WORLD_READ(00004), + WORLD_WRITE(00002), + WORLD_EXEC(00001); + + private int code; + + private PERMISSION(int code) { + this.code = code; + } + + public static Set find(int code) { + Set set = new HashSet(); + + for (PERMISSION p : PERMISSION.values()) { + if ((code & p.code) == p.code) { + set.add(p); + } + } + + if (set.isEmpty()) { + return Collections.emptySet(); + } + + return EnumSet.copyOf(set); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveException.java new file mode 100644 index 000000000..8e6a9937f --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveException.java @@ -0,0 +1,45 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +import java.io.IOException; + + +/** + * Dump Archive Exception + */ +public class DumpArchiveException extends IOException { + private static final long serialVersionUID = 1L; + + public DumpArchiveException() { + } + + public DumpArchiveException(String msg) { + super(msg); + } + + public DumpArchiveException(Throwable cause) { + initCause(cause); + } + + public DumpArchiveException(String msg, Throwable cause) { + super(msg); + initCause(cause); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java new file mode 100644 index 000000000..0f804545d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveInputStream.java @@ -0,0 +1,548 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +import org.apache.commons.compress.archivers.ArchiveException; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; + +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; + +import java.util.Arrays; +import java.util.Comparator; +import java.util.HashMap; +import java.util.Map; +import java.util.PriorityQueue; +import java.util.Queue; +import java.util.Stack; + +/** + * The DumpArchiveInputStream reads a UNIX dump archive as an InputStream. + * Methods are provided to position at each successive entry in + * the archive, and the read each entry as a normal input stream + * using read(). + * + * There doesn't seem to exist a hint on the encoding of string values + * in any piece documentation. Given the main purpose of dump/restore + * is backing up a system it seems very likely the format uses the + * current default encoding of the system. + * + * @NotThreadSafe + */ +public class DumpArchiveInputStream extends ArchiveInputStream { + private DumpArchiveSummary summary; + private DumpArchiveEntry active; + private boolean isClosed; + private boolean hasHitEOF; + private long entrySize; + private long entryOffset; + private int readIdx; + private final byte[] readBuf = new byte[DumpArchiveConstants.TP_SIZE]; + private byte[] blockBuffer; + private int recordOffset; + private long filepos; + protected TapeInputStream raw; + + // map of ino -> dirent entry. We can use this to reconstruct full paths. + private final Map names = new HashMap(); + + // map of ino -> (directory) entry when we're missing one or more elements in the path. + private final Map pending = new HashMap(); + + // queue of (directory) entries where we now have the full path. + private Queue queue; + + /** + * The encoding to use for filenames and labels. + */ + private final ZipEncoding encoding; + + /** + * Constructor using the platform's default encoding for file + * names. + * + * @param is + * @throws ArchiveException + */ + public DumpArchiveInputStream(InputStream is) throws ArchiveException { + this(is, null); + } + + /** + * Constructor. + * + * @param is + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * @since 1.6 + */ + public DumpArchiveInputStream(InputStream is, String encoding) + throws ArchiveException { + this.raw = new TapeInputStream(is); + this.hasHitEOF = false; + this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + + try { + // read header, verify it's a dump archive. + byte[] headerBytes = raw.readRecord(); + + if (!DumpArchiveUtil.verify(headerBytes)) { + throw new UnrecognizedFormatException(); + } + + // get summary information + summary = new DumpArchiveSummary(headerBytes, this.encoding); + + // reset buffer with actual block size. + raw.resetBlockSize(summary.getNTRec(), summary.isCompressed()); + + // allocate our read buffer. + blockBuffer = new byte[4 * DumpArchiveConstants.TP_SIZE]; + + // skip past CLRI and BITS segments since we don't handle them yet. + readCLRI(); + readBITS(); + } catch (IOException ex) { + throw new ArchiveException(ex.getMessage(), ex); + } + + // put in a dummy record for the root node. + Dirent root = new Dirent(2, 2, 4, "."); + names.put(Integer.valueOf(2), root); + + // use priority based on queue to ensure parent directories are + // released first. + queue = new PriorityQueue(10, + new Comparator() { + public int compare(DumpArchiveEntry p, DumpArchiveEntry q) { + if (p.getOriginalName() == null || q.getOriginalName() == null) { + return Integer.MAX_VALUE; + } + + return p.getOriginalName().compareTo(q.getOriginalName()); + } + }); + } + + @Deprecated + @Override + public int getCount() { + return (int) getBytesRead(); + } + + @Override + public long getBytesRead() { + return raw.getBytesRead(); + } + + /** + * Return the archive summary information. + */ + public DumpArchiveSummary getSummary() { + return summary; + } + + /** + * Read CLRI (deleted inode) segment. + */ + private void readCLRI() throws IOException { + byte[] buffer = raw.readRecord(); + + if (!DumpArchiveUtil.verify(buffer)) { + throw new InvalidFormatException(); + } + + active = DumpArchiveEntry.parse(buffer); + + if (DumpArchiveConstants.SEGMENT_TYPE.CLRI != active.getHeaderType()) { + throw new InvalidFormatException(); + } + + // we don't do anything with this yet. + if (raw.skip(DumpArchiveConstants.TP_SIZE * active.getHeaderCount()) + == -1) { + throw new EOFException(); + } + readIdx = active.getHeaderCount(); + } + + /** + * Read BITS segment. + */ + private void readBITS() throws IOException { + byte[] buffer = raw.readRecord(); + + if (!DumpArchiveUtil.verify(buffer)) { + throw new InvalidFormatException(); + } + + active = DumpArchiveEntry.parse(buffer); + + if (DumpArchiveConstants.SEGMENT_TYPE.BITS != active.getHeaderType()) { + throw new InvalidFormatException(); + } + + // we don't do anything with this yet. + if (raw.skip(DumpArchiveConstants.TP_SIZE * active.getHeaderCount()) + == -1) { + throw new EOFException(); + } + readIdx = active.getHeaderCount(); + } + + /** + * Read the next entry. + */ + public DumpArchiveEntry getNextDumpEntry() throws IOException { + return getNextEntry(); + } + + /** + * Read the next entry. + */ + @Override + public DumpArchiveEntry getNextEntry() throws IOException { + DumpArchiveEntry entry = null; + String path = null; + + // is there anything in the queue? + if (!queue.isEmpty()) { + return queue.remove(); + } + + while (entry == null) { + if (hasHitEOF) { + return null; + } + + // skip any remaining records in this segment for prior file. + // we might still have holes... easiest to do it + // block by block. We may want to revisit this if + // the unnecessary decompression time adds up. + while (readIdx < active.getHeaderCount()) { + if (!active.isSparseRecord(readIdx++) + && raw.skip(DumpArchiveConstants.TP_SIZE) == -1) { + throw new EOFException(); + } + } + + readIdx = 0; + filepos = raw.getBytesRead(); + + byte[] headerBytes = raw.readRecord(); + + if (!DumpArchiveUtil.verify(headerBytes)) { + throw new InvalidFormatException(); + } + + active = DumpArchiveEntry.parse(headerBytes); + + // skip any remaining segments for prior file. + while (DumpArchiveConstants.SEGMENT_TYPE.ADDR == active.getHeaderType()) { + if (raw.skip(DumpArchiveConstants.TP_SIZE + * (active.getHeaderCount() + - active.getHeaderHoles())) == -1) { + throw new EOFException(); + } + + filepos = raw.getBytesRead(); + headerBytes = raw.readRecord(); + + if (!DumpArchiveUtil.verify(headerBytes)) { + throw new InvalidFormatException(); + } + + active = DumpArchiveEntry.parse(headerBytes); + } + + // check if this is an end-of-volume marker. + if (DumpArchiveConstants.SEGMENT_TYPE.END == active.getHeaderType()) { + hasHitEOF = true; + + return null; + } + + entry = active; + + if (entry.isDirectory()) { + readDirectoryEntry(active); + + // now we create an empty InputStream. + entryOffset = 0; + entrySize = 0; + readIdx = active.getHeaderCount(); + } else { + entryOffset = 0; + entrySize = active.getEntrySize(); + readIdx = 0; + } + + recordOffset = readBuf.length; + + path = getPath(entry); + + if (path == null) { + entry = null; + } + } + + entry.setName(path); + entry.setSimpleName(names.get(Integer.valueOf(entry.getIno())).getName()); + entry.setOffset(filepos); + + return entry; + } + + /** + * Read directory entry. + */ + private void readDirectoryEntry(DumpArchiveEntry entry) + throws IOException { + long size = entry.getEntrySize(); + boolean first = true; + + while (first || + DumpArchiveConstants.SEGMENT_TYPE.ADDR == entry.getHeaderType()) { + // read the header that we just peeked at. + if (!first) { + raw.readRecord(); + } + + if (!names.containsKey(Integer.valueOf(entry.getIno())) && + DumpArchiveConstants.SEGMENT_TYPE.INODE == entry.getHeaderType()) { + pending.put(Integer.valueOf(entry.getIno()), entry); + } + + int datalen = DumpArchiveConstants.TP_SIZE * entry.getHeaderCount(); + + if (blockBuffer.length < datalen) { + blockBuffer = new byte[datalen]; + } + + if (raw.read(blockBuffer, 0, datalen) != datalen) { + throw new EOFException(); + } + + int reclen = 0; + + for (int i = 0; i < datalen - 8 && i < size - 8; + i += reclen) { + int ino = DumpArchiveUtil.convert32(blockBuffer, i); + reclen = DumpArchiveUtil.convert16(blockBuffer, i + 4); + + byte type = blockBuffer[i + 6]; + + String name = DumpArchiveUtil.decode(encoding, blockBuffer, i + 8, blockBuffer[i + 7]); + + if (".".equals(name) || "..".equals(name)) { + // do nothing... + continue; + } + + Dirent d = new Dirent(ino, entry.getIno(), type, name); + + /* + if ((type == 4) && names.containsKey(ino)) { + System.out.println("we already have ino: " + + names.get(ino)); + } + */ + + names.put(Integer.valueOf(ino), d); + + // check whether this allows us to fill anything in the pending list. + for (Map.Entry e : pending.entrySet()) { + String path = getPath(e.getValue()); + + if (path != null) { + e.getValue().setName(path); + e.getValue() + .setSimpleName(names.get(e.getKey()).getName()); + queue.add(e.getValue()); + } + } + + // remove anything that we found. (We can't do it earlier + // because of concurrent modification exceptions.) + for (DumpArchiveEntry e : queue) { + pending.remove(Integer.valueOf(e.getIno())); + } + } + + byte[] peekBytes = raw.peek(); + + if (!DumpArchiveUtil.verify(peekBytes)) { + throw new InvalidFormatException(); + } + + entry = DumpArchiveEntry.parse(peekBytes); + first = false; + size -= DumpArchiveConstants.TP_SIZE; + } + } + + /** + * Get full path for specified archive entry, or null if there's a gap. + * + * @param entry + * @return full path for specified archive entry, or null if there's a gap. + */ + private String getPath(DumpArchiveEntry entry) { + // build the stack of elements. It's possible that we're + // still missing an intermediate value and if so we + Stack elements = new Stack(); + Dirent dirent = null; + + for (int i = entry.getIno();; i = dirent.getParentIno()) { + if (!names.containsKey(Integer.valueOf(i))) { + elements.clear(); + break; + } + + dirent = names.get(Integer.valueOf(i)); + elements.push(dirent.getName()); + + if (dirent.getIno() == dirent.getParentIno()) { + break; + } + } + + // if an element is missing defer the work and read next entry. + if (elements.isEmpty()) { + pending.put(Integer.valueOf(entry.getIno()), entry); + + return null; + } + + // generate full path from stack of elements. + StringBuilder sb = new StringBuilder(elements.pop()); + + while (!elements.isEmpty()) { + sb.append('/'); + sb.append(elements.pop()); + } + + return sb.toString(); + } + + /** + * Reads bytes from the current dump archive entry. + * + * This method is aware of the boundaries of the current + * entry in the archive and will deal with them as if they + * were this stream's start and EOF. + * + * @param buf The buffer into which to place bytes read. + * @param off The offset at which to place bytes read. + * @param len The number of bytes to read. + * @return The number of bytes read, or -1 at EOF. + * @throws IOException on error + */ + @Override + public int read(byte[] buf, int off, int len) throws IOException { + int totalRead = 0; + + if (hasHitEOF || isClosed || entryOffset >= entrySize) { + return -1; + } + + if (len + entryOffset > entrySize) { + len = (int) (entrySize - entryOffset); + } + + while (len > 0) { + int sz = len > readBuf.length - recordOffset + ? readBuf.length - recordOffset : len; + + // copy any data we have + if (recordOffset + sz <= readBuf.length) { + System.arraycopy(readBuf, recordOffset, buf, off, sz); + totalRead += sz; + recordOffset += sz; + len -= sz; + off += sz; + } + + // load next block if necessary. + if (len > 0) { + if (readIdx >= 512) { + byte[] headerBytes = raw.readRecord(); + + if (!DumpArchiveUtil.verify(headerBytes)) { + throw new InvalidFormatException(); + } + + active = DumpArchiveEntry.parse(headerBytes); + readIdx = 0; + } + + if (!active.isSparseRecord(readIdx++)) { + int r = raw.read(readBuf, 0, readBuf.length); + if (r != readBuf.length) { + throw new EOFException(); + } + } else { + Arrays.fill(readBuf, (byte) 0); + } + + recordOffset = 0; + } + } + + entryOffset += totalRead; + + return totalRead; + } + + /** + * Closes the stream for this entry. + */ + @Override + public void close() throws IOException { + if (!isClosed) { + isClosed = true; + raw.close(); + } + } + + /** + * Look at the first few bytes of the file to decide if it's a dump + * archive. With 32 bytes we can look at the magic value, with a full + * 1k we can verify the checksum. + */ + public static boolean matches(byte[] buffer, int length) { + // do we have enough of the header? + if (length < 32) { + return false; + } + + // this is the best test + if (length >= DumpArchiveConstants.TP_SIZE) { + return DumpArchiveUtil.verify(buffer); + } + + // this will work in a pinch. + return DumpArchiveConstants.NFS_MAGIC == DumpArchiveUtil.convert32(buffer, + 24); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java new file mode 100644 index 000000000..08b9e8f20 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveSummary.java @@ -0,0 +1,335 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +import java.io.IOException; +import java.util.Date; + +import org.apache.commons.compress.archivers.zip.ZipEncoding; + +/** + * This class represents identifying information about a Dump archive volume. + * It consists the archive's dump date, label, hostname, device name and possibly + * last mount point plus the volume's volume id andfirst record number. + * + * For the corresponding C structure see the header of {@link DumpArchiveEntry}. + */ +public class DumpArchiveSummary { + private long dumpDate; + private long previousDumpDate; + private int volume; + private String label; + private int level; + private String filesys; + private String devname; + private String hostname; + private int flags; + private int firstrec; + private int ntrec; + + DumpArchiveSummary(byte[] buffer, ZipEncoding encoding) throws IOException { + dumpDate = 1000L * DumpArchiveUtil.convert32(buffer, 4); + previousDumpDate = 1000L * DumpArchiveUtil.convert32(buffer, 8); + volume = DumpArchiveUtil.convert32(buffer, 12); + label = DumpArchiveUtil.decode(encoding, buffer, 676, DumpArchiveConstants.LBLSIZE).trim(); + level = DumpArchiveUtil.convert32(buffer, 692); + filesys = DumpArchiveUtil.decode(encoding, buffer, 696, DumpArchiveConstants.NAMELEN).trim(); + devname = DumpArchiveUtil.decode(encoding, buffer, 760, DumpArchiveConstants.NAMELEN).trim(); + hostname = DumpArchiveUtil.decode(encoding, buffer, 824, DumpArchiveConstants.NAMELEN).trim(); + flags = DumpArchiveUtil.convert32(buffer, 888); + firstrec = DumpArchiveUtil.convert32(buffer, 892); + ntrec = DumpArchiveUtil.convert32(buffer, 896); + + //extAttributes = DumpArchiveUtil.convert32(buffer, 900); + } + + /** + * Get the date of this dump. + * @return the date of this dump. + */ + public Date getDumpDate() { + return new Date(dumpDate); + } + + /** + * Set dump date. + */ + public void setDumpDate(Date dumpDate) { + this.dumpDate = dumpDate.getTime(); + } + + /** + * Get the date of the previous dump at this level higher. + * @return dumpdate may be null + */ + public Date getPreviousDumpDate() { + return new Date(previousDumpDate); + } + + /** + * Set previous dump date. + */ + public void setPreviousDumpDate(Date previousDumpDate) { + this.previousDumpDate = previousDumpDate.getTime(); + } + + /** + * Get volume (tape) number. + * @return volume (tape) number. + */ + public int getVolume() { + return volume; + } + + /** + * Set volume (tape) number. + */ + public void setVolume(int volume) { + this.volume = volume; + } + + /** + * Get the level of this dump. This is a number between 0 and 9, inclusive, + * and a level 0 dump is a complete dump of the partition. For any other dump + * 'n' this dump contains all files that have changed since the last dump + * at this level or lower. This is used to support different levels of + * incremental backups. + * @return dump level + */ + public int getLevel() { + return level; + } + + /** + * Set level. + */ + public void setLevel(int level) { + this.level = level; + } + + /** + * Get dump label. This may be autogenerated or it may be specified + * bu the user. + * @return dump label + */ + public String getLabel() { + return label; + } + + /** + * Set dump label. + * @param label + */ + public void setLabel(String label) { + this.label = label; + } + + /** + * Get the last mountpoint, e.g., /home. + * @return last mountpoint + */ + public String getFilesystem() { + return filesys; + } + + /** + * Set the last mountpoint. + */ + public void setFilesystem(String filesystem) { + this.filesys = filesystem; + } + + /** + * Get the device name, e.g., /dev/sda3 or /dev/mapper/vg0-home. + * @return device name + */ + public String getDevname() { + return devname; + } + + /** + * Set the device name. + * @param devname + */ + public void setDevname(String devname) { + this.devname = devname; + } + + /** + * Get the hostname of the system where the dump was performed. + * @return hostname + */ + public String getHostname() { + return hostname; + } + + /** + * Set the hostname. + */ + public void setHostname(String hostname) { + this.hostname = hostname; + } + + /** + * Get the miscellaneous flags. See below. + * @return flags + */ + public int getFlags() { + return flags; + } + + /** + * Set the miscellaneous flags. + * @param flags + */ + public void setFlags(int flags) { + this.flags = flags; + } + + /** + * Get the inode of the first record on this volume. + * @return inode of the first record on this volume. + */ + public int getFirstRecord() { + return firstrec; + } + + /** + * Set the inode of the first record. + * @param firstrec + */ + public void setFirstRecord(int firstrec) { + this.firstrec = firstrec; + } + + /** + * Get the number of records per tape block. This is typically + * between 10 and 32. + * @return the number of records per tape block + */ + public int getNTRec() { + return ntrec; + } + + /** + * Set the number of records per tape block. + */ + public void setNTRec(int ntrec) { + this.ntrec = ntrec; + } + + /** + * Is this the new header format? (We do not currently support the + * old format.) + * + * @return true if using new header format + */ + public boolean isNewHeader() { + return (flags & 0x0001) == 0x0001; + } + + /** + * Is this the new inode format? (We do not currently support the + * old format.) + * @return true if using new inode format + */ + public boolean isNewInode() { + return (flags & 0x0002) == 0x0002; + } + + /** + * Is this volume compressed? N.B., individual blocks may or may not be compressed. + * The first block is never compressed. + * @return true if volume is compressed + */ + public boolean isCompressed() { + return (flags & 0x0080) == 0x0080; + } + + /** + * Does this volume only contain metadata? + * @return true if volume only contains meta-data + */ + public boolean isMetaDataOnly() { + return (flags & 0x0100) == 0x0100; + } + + /** + * Does this volume cotain extended attributes. + * @return true if volume cotains extended attributes. + */ + public boolean isExtendedAttributes() { + return (flags & 0x8000) == 0x8000; + } + + /** + * @see java.lang.Object#hashCode() + */ + @Override + public int hashCode() { + int hash = 17; + + if (label != null) { + hash = label.hashCode(); + } + + hash += 31 * dumpDate; + + if (hostname != null) { + hash = (31 * hostname.hashCode()) + 17; + } + + if (devname != null) { + hash = (31 * devname.hashCode()) + 17; + } + + return hash; + } + + /** + * @see java.lang.Object#equals(Object) + */ + @Override + public boolean equals(Object o) { + if (this == o) { + return true; + } + + if (o == null || !o.getClass().equals(getClass())) { + return false; + } + + DumpArchiveSummary rhs = (DumpArchiveSummary) o; + + if (dumpDate != rhs.dumpDate) { + return false; + } + + if ((getHostname() == null) || + !getHostname().equals(rhs.getHostname())) { + return false; + } + + if ((getDevname() == null) || !getDevname().equals(rhs.getDevname())) { + return false; + } + + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java new file mode 100644 index 000000000..5b7494499 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/DumpArchiveUtil.java @@ -0,0 +1,145 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +import java.io.IOException; +import org.apache.commons.compress.archivers.zip.ZipEncoding; + +/** + * Various utilities for dump archives. + */ +class DumpArchiveUtil { + /** + * Private constructor to prevent instantiation. + */ + private DumpArchiveUtil() { + } + + /** + * Calculate checksum for buffer. + * + * @param buffer buffer containing tape segment header + * @returns checksum + */ + public static int calculateChecksum(byte[] buffer) { + int calc = 0; + + for (int i = 0; i < 256; i++) { + calc += DumpArchiveUtil.convert32(buffer, 4 * i); + } + + return DumpArchiveConstants.CHECKSUM - + (calc - DumpArchiveUtil.convert32(buffer, 28)); + } + + /** + * Verify that the buffer contains a tape segment header. + * + * @param buffer + */ + public static final boolean verify(byte[] buffer) { + // verify magic. for now only accept NFS_MAGIC. + int magic = convert32(buffer, 24); + + if (magic != DumpArchiveConstants.NFS_MAGIC) { + return false; + } + + //verify checksum... + int checksum = convert32(buffer, 28); + + if (checksum != calculateChecksum(buffer)) { + return false; + } + + return true; + } + + /** + * Get the ino associated with this buffer. + * + * @param buffer + */ + public static final int getIno(byte[] buffer) { + return convert32(buffer, 20); + } + + /** + * Read 8-byte integer from buffer. + * + * @param buffer + * @param offset + * @return the 8-byte entry as a long + */ + public static final long convert64(byte[] buffer, int offset) { + long i = 0; + i += (((long) buffer[offset + 7]) << 56); + i += (((long) buffer[offset + 6] << 48) & 0x00FF000000000000L); + i += (((long) buffer[offset + 5] << 40) & 0x0000FF0000000000L); + i += (((long) buffer[offset + 4] << 32) & 0x000000FF00000000L); + i += (((long) buffer[offset + 3] << 24) & 0x00000000FF000000L); + i += (((long) buffer[offset + 2] << 16) & 0x0000000000FF0000L); + i += (((long) buffer[offset + 1] << 8) & 0x000000000000FF00L); + i += (buffer[offset] & 0x00000000000000FFL); + + return i; + } + + /** + * Read 4-byte integer from buffer. + * + * @param buffer + * @param offset + * @return the 4-byte entry as an int + */ + public static final int convert32(byte[] buffer, int offset) { + int i = 0; + i = buffer[offset + 3] << 24; + i += (buffer[offset + 2] << 16) & 0x00FF0000; + i += (buffer[offset + 1] << 8) & 0x0000FF00; + i += buffer[offset] & 0x000000FF; + + return i; + } + + /** + * Read 2-byte integer from buffer. + * + * @param buffer + * @param offset + * @return the 2-byte entry as an int + */ + public static final int convert16(byte[] buffer, int offset) { + int i = 0; + i += (buffer[offset + 1] << 8) & 0x0000FF00; + i += buffer[offset] & 0x000000FF; + + return i; + } + + /** + * Decodes a byte array to a string. + */ + static String decode(ZipEncoding encoding, byte[] b, int offset, int len) + throws IOException { + byte[] copy = new byte[len]; + System.arraycopy(b, offset, copy, 0, len); + return encoding.decode(copy); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/InvalidFormatException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/InvalidFormatException.java new file mode 100644 index 000000000..2d8a9ac4a --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/InvalidFormatException.java @@ -0,0 +1,43 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + + +/** + * Invalid Format Exception. There was an error decoding a + * tape segment header. + */ +public class InvalidFormatException extends DumpArchiveException { + private static final long serialVersionUID = 1L; + protected long offset; + + public InvalidFormatException() { + super("there was an error decoding a tape segment"); + } + + public InvalidFormatException(long offset) { + super("there was an error decoding a tape segment header at offset " + + offset + "."); + this.offset = offset; + } + + public long getOffset() { + return offset; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/ShortFileException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/ShortFileException.java new file mode 100644 index 000000000..e06c97cc0 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/ShortFileException.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + + +/** + * Short File Exception. There was an unexpected EOF when reading + * the input stream. + */ +public class ShortFileException extends DumpArchiveException { + private static final long serialVersionUID = 1L; + + public ShortFileException() { + super("unexpected EOF"); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/TapeInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/TapeInputStream.java new file mode 100644 index 000000000..d7a9a2bd7 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/TapeInputStream.java @@ -0,0 +1,351 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; + +import java.util.Arrays; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; + +import org.apache.commons.compress.utils.IOUtils; + +/** + * Filter stream that mimics a physical tape drive capable of compressing + * the data stream. + * + * @NotThreadSafe + */ +class TapeInputStream extends FilterInputStream { + private byte[] blockBuffer = new byte[DumpArchiveConstants.TP_SIZE]; + private int currBlkIdx = -1; + private int blockSize = DumpArchiveConstants.TP_SIZE; + private static final int recordSize = DumpArchiveConstants.TP_SIZE; + private int readOffset = DumpArchiveConstants.TP_SIZE; + private boolean isCompressed = false; + private long bytesRead = 0; + + /** + * Constructor + */ + public TapeInputStream(InputStream in) { + super(in); + } + + /** + * Set the DumpArchive Buffer's block size. We need to sync the block size with the + * dump archive's actual block size since compression is handled at the + * block level. + * + * @param recsPerBlock + * records per block + * @param isCompressed + * true if the archive is compressed + * @throws IOException + * more than one block has been read + * @throws IOException + * there was an error reading additional blocks. + */ + public void resetBlockSize(int recsPerBlock, boolean isCompressed) + throws IOException { + this.isCompressed = isCompressed; + + blockSize = recordSize * recsPerBlock; + + // save first block in case we need it again + byte[] oldBuffer = blockBuffer; + + // read rest of new block + blockBuffer = new byte[blockSize]; + System.arraycopy(oldBuffer, 0, blockBuffer, 0, recordSize); + readFully(blockBuffer, recordSize, blockSize - recordSize); + + this.currBlkIdx = 0; + this.readOffset = recordSize; + } + + /** + * @see java.io.InputStream#available + */ + @Override + public int available() throws IOException { + if (readOffset < blockSize) { + return blockSize - readOffset; + } + + return in.available(); + } + + /** + * @see java.io.InputStream#read() + */ + @Override + public int read() throws IOException { + throw new IllegalArgumentException( + "all reads must be multiple of record size (" + recordSize + + " bytes."); + } + + /** + * {@inheritDoc} + * + *

reads the full given length unless EOF is reached.

+ * + * @param len length to read, must be a multiple of the stream's + * record size + */ + @Override + public int read(byte[] b, int off, int len) throws IOException { + if ((len % recordSize) != 0) { + throw new IllegalArgumentException( + "all reads must be multiple of record size (" + recordSize + + " bytes."); + } + + int bytes = 0; + + while (bytes < len) { + // we need to read from the underlying stream. + // this will reset readOffset value. + // return -1 if there's a problem. + if ((readOffset == blockSize) && !readBlock(true)) { + return -1; + } + + int n = 0; + + if ((readOffset + (len - bytes)) <= blockSize) { + // we can read entirely from the buffer. + n = len - bytes; + } else { + // copy what we can from the buffer. + n = blockSize - readOffset; + } + + // copy data, increment counters. + System.arraycopy(blockBuffer, readOffset, b, off, n); + readOffset += n; + bytes += n; + off += n; + } + + return bytes; + } + + /** + * Skip bytes. Same as read but without the arraycopy. + * + *

skips the full given length unless EOF is reached.

+ * + * @param len length to read, must be a multiple of the stream's + * record size + */ + @Override + public long skip(long len) throws IOException { + if ((len % recordSize) != 0) { + throw new IllegalArgumentException( + "all reads must be multiple of record size (" + recordSize + + " bytes."); + } + + long bytes = 0; + + while (bytes < len) { + // we need to read from the underlying stream. + // this will reset readOffset value. We do not perform + // any decompression if we won't eventually read the data. + // return -1 if there's a problem. + if ((readOffset == blockSize) && + !readBlock((len - bytes) < blockSize)) { + return -1; + } + + long n = 0; + + if ((readOffset + (len - bytes)) <= blockSize) { + // we can read entirely from the buffer. + n = len - bytes; + } else { + // copy what we can from the buffer. + n = blockSize - readOffset; + } + + // do not copy data but still increment counters. + readOffset += n; + bytes += n; + } + + return bytes; + } + + /** + * Close the input stream. + * + * @throws IOException on error + */ + @Override + public void close() throws IOException { + if (in != null && in != System.in) { + in.close(); + } + } + + /** + * Peek at the next record from the input stream and return the data. + * + * @return The record data. + * @throws IOException on error + */ + public byte[] peek() throws IOException { + // we need to read from the underlying stream. This + // isn't a problem since it would be the first step in + // any subsequent read() anyway. + if ((readOffset == blockSize) && !readBlock(true)) { + return null; + } + + // copy data, increment counters. + byte[] b = new byte[recordSize]; + System.arraycopy(blockBuffer, readOffset, b, 0, b.length); + + return b; + } + + /** + * Read a record from the input stream and return the data. + * + * @return The record data. + * @throws IOException on error + */ + public byte[] readRecord() throws IOException { + byte[] result = new byte[recordSize]; + + if (-1 == read(result, 0, result.length)) { + throw new ShortFileException(); + } + + return result; + } + + /** + * Read next block. All decompression is handled here. + * + * @param decompress if false the buffer will not be decompressed. + * This is an optimization for longer seeks. + * @return false if End-Of-File, else true + */ + private boolean readBlock(boolean decompress) throws IOException { + boolean success = true; + + if (in == null) { + throw new IOException("input buffer is closed"); + } + + if (!isCompressed || (currBlkIdx == -1)) { + // file is not compressed + success = readFully(blockBuffer, 0, blockSize); + bytesRead += blockSize; + } else { + if (!readFully(blockBuffer, 0, 4)) { + return false; + } + bytesRead += 4; + + int h = DumpArchiveUtil.convert32(blockBuffer, 0); + boolean compressed = (h & 0x01) == 0x01; + + if (!compressed) { + // file is compressed but this block is not. + success = readFully(blockBuffer, 0, blockSize); + bytesRead += blockSize; + } else { + // this block is compressed. + int flags = (h >> 1) & 0x07; + int length = (h >> 4) & 0x0FFFFFFF; + byte[] compBuffer = new byte[length]; + success = readFully(compBuffer, 0, length); + bytesRead += length; + + if (!decompress) { + // just in case someone reads the data. + Arrays.fill(blockBuffer, (byte) 0); + } else { + switch (DumpArchiveConstants.COMPRESSION_TYPE.find(flags & + 0x03)) { + case ZLIB: + + try { + Inflater inflator = new Inflater(); + inflator.setInput(compBuffer, 0, compBuffer.length); + length = inflator.inflate(blockBuffer); + + if (length != blockSize) { + throw new ShortFileException(); + } + + inflator.end(); + } catch (DataFormatException e) { + throw new DumpArchiveException("bad data", e); + } + + break; + + case BZLIB: + throw new UnsupportedCompressionAlgorithmException( + "BZLIB2"); + + case LZO: + throw new UnsupportedCompressionAlgorithmException( + "LZO"); + + default: + throw new UnsupportedCompressionAlgorithmException(); + } + } + } + } + + currBlkIdx++; + readOffset = 0; + + return success; + } + + /** + * Read buffer + */ + private boolean readFully(byte[] b, int off, int len) + throws IOException { + int count = IOUtils.readFully(in, b, off, len); + if (count < len) { + throw new ShortFileException(); + } + + return true; + } + + /** + * Get number of bytes read. + */ + public long getBytesRead() { + return bytesRead; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/UnrecognizedFormatException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/UnrecognizedFormatException.java new file mode 100644 index 000000000..333aeacd6 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/UnrecognizedFormatException.java @@ -0,0 +1,32 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + + +/** + * Unrecognized Format Exception. This is either not a recognized dump archive or there's + * a bad tape segment header. + */ +public class UnrecognizedFormatException extends DumpArchiveException { + private static final long serialVersionUID = 1L; + + public UnrecognizedFormatException() { + super("this is not a recognized format."); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.java new file mode 100644 index 000000000..3160feb24 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/UnsupportedCompressionAlgorithmException.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.dump; + + +/** + * Unsupported compression algorithm. The dump archive uses an unsupported + * compression algorithm (BZLIB2 or LZO). + */ +public class UnsupportedCompressionAlgorithmException + extends DumpArchiveException { + private static final long serialVersionUID = 1L; + + public UnsupportedCompressionAlgorithmException() { + super("this file uses an unsupported compression algorithm."); + } + + public UnsupportedCompressionAlgorithmException(String alg) { + super("this file uses an unsupported compression algorithm: " + alg + + "."); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/package.html new file mode 100644 index 000000000..72f3c68c4 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/dump/package.html @@ -0,0 +1,56 @@ + + + +

This package provides stream classes for reading archives + using the Unix DUMP format. This format is similar to (and + contemporary with) TAR but reads the raw filesystem directly. + This means that writers are filesystem-specific even though the + created archives are filesystem-agnostic. +

+ +

Unlike other formats DUMP offers clean support for sparse files, + extended attributes, and other file metadata. In addition DUMP + supports incremental dump files can capture (most) file deletion. + It also provides a native form of compression and will soon support + native encryption as well. +

+ +

In practice TAR archives are used for both distribution + and backups. DUMP archives are used exclusively for backups. +

+ +

Like any 30+-year-old application there are a number of variants. + For pragmatic reasons we will only support archives with the + 'new' tape header and inode formats. Other restrictions: + +

    +
  • We only support ZLIB compression. The format + also permits LZO and BZLIB compression.
  • +
  • Sparse files will have the holes filled.
  • +
  • MacOS finder and resource streams are ignored.
  • +
  • Extended attributes are not currently provided.
  • +
  • SELinux labels are not currently provided.
  • +
+

+ +

As of Apache Commons Compress 1.3 support for the dump format is + read-only.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java new file mode 100644 index 000000000..d284ad988 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveEntry.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.jar; + +import java.security.cert.Certificate; +import java.util.jar.Attributes; +import java.util.jar.JarEntry; +import java.util.zip.ZipEntry; +import java.util.zip.ZipException; + +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; + +/** + * + * @NotThreadSafe (parent is not thread-safe) + */ +public class JarArchiveEntry extends ZipArchiveEntry { + + // These are always null - see https://issues.apache.org/jira/browse/COMPRESS-18 for discussion + private final Attributes manifestAttributes = null; + private final Certificate[] certificates = null; + + public JarArchiveEntry(ZipEntry entry) throws ZipException { + super(entry); + } + + public JarArchiveEntry(String name) { + super(name); + } + + public JarArchiveEntry(ZipArchiveEntry entry) throws ZipException { + super(entry); + } + + public JarArchiveEntry(JarEntry entry) throws ZipException { + super(entry); + + } + + /** + * This method is not implemented and won't ever be. + * The JVM equivalent has a different name {@link java.util.jar.JarEntry#getAttributes()} + * + * @deprecated since 1.5, do not use; always returns null + * @return Always returns null. + */ + @Deprecated + public Attributes getManifestAttributes() { + return manifestAttributes; + } + + /** + * Return a copy of the list of certificates or null if there are none. + * + * @return Always returns null in the current implementation + * + * @deprecated since 1.5, not currently implemented + */ + @Deprecated + public Certificate[] getCertificates() { + if (certificates != null) { // never true currently + Certificate[] certs = new Certificate[certificates.length]; + System.arraycopy(certificates, 0, certs, 0, certs.length); + return certs; + } + /* + * Note, the method + * Certificate[] java.util.jar.JarEntry.getCertificates() + * also returns null or the list of certificates (but not copied) + */ + return null; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java new file mode 100644 index 000000000..d051a4b6d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveInputStream.java @@ -0,0 +1,62 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.jar; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveInputStream; + +/** + * Implements an input stream that can read entries from jar files. + * + * @NotThreadSafe + */ +public class JarArchiveInputStream extends ZipArchiveInputStream { + + public JarArchiveInputStream( final InputStream inputStream ) { + super(inputStream); + } + + public JarArchiveEntry getNextJarEntry() throws IOException { + ZipArchiveEntry entry = getNextZipEntry(); + return entry == null ? null : new JarArchiveEntry(entry); + } + + @Override + public ArchiveEntry getNextEntry() throws IOException { + return getNextJarEntry(); + } + + /** + * Checks if the signature matches what is expected for a jar file + * (in this case it is the same as for a zip file). + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is a jar archive stream, false otherwise + */ + public static boolean matches(byte[] signature, int length ) { + return ZipArchiveInputStream.matches(signature, length); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java new file mode 100644 index 000000000..f372ad760 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/JarArchiveOutputStream.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.jar; + +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.zip.JarMarker; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipArchiveOutputStream; + +/** + * Subclass that adds a special extra field to the very first entry + * which allows the created archive to be used as an executable jar on + * Solaris. + * + * @NotThreadSafe + */ +public class JarArchiveOutputStream extends ZipArchiveOutputStream { + + private boolean jarMarkerAdded = false; + + public JarArchiveOutputStream(final OutputStream out) { + super(out); + } + + // @throws ClassCastException if entry is not an instance of ZipArchiveEntry + @Override + public void putArchiveEntry(ArchiveEntry ze) throws IOException { + if (!jarMarkerAdded) { + ((ZipArchiveEntry)ze).addAsFirstExtraField(JarMarker.getInstance()); + jarMarkerAdded = true; + } + super.putArchiveEntry(ze); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/package.html new file mode 100644 index 000000000..09829ae6a --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/jar/package.html @@ -0,0 +1,25 @@ + + + +

Provides stream classes for reading and writing archives using + the ZIP format with some extensions for the special case of JAR + archives.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/package.html new file mode 100644 index 000000000..df1922b4a --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/package.html @@ -0,0 +1,24 @@ + + + +

Provides a unified API and factories for dealing with archives + in different formats.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java new file mode 100644 index 000000000..cc5db1ca6 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveEntry.java @@ -0,0 +1,1073 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.tar; + +import java.io.File; +import java.io.IOException; +import java.util.Date; +import java.util.Locale; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.utils.ArchiveUtils; + +/** + * This class represents an entry in a Tar archive. It consists + * of the entry's header, as well as the entry's File. Entries + * can be instantiated in one of three ways, depending on how + * they are to be used. + *

+ * TarEntries that are created from the header bytes read from + * an archive are instantiated with the TarEntry( byte[] ) + * constructor. These entries will be used when extracting from + * or listing the contents of an archive. These entries have their + * header filled in using the header bytes. They also set the File + * to null, since they reference an archive entry not a file. + *

+ * TarEntries that are created from Files that are to be written + * into an archive are instantiated with the TarEntry( File ) + * constructor. These entries have their header filled in using + * the File's information. They also keep a reference to the File + * for convenience when writing entries. + *

+ * Finally, TarEntries can be constructed from nothing but a name. + * This allows the programmer to construct the entry by hand, for + * instance when only an InputStream is available for writing to + * the archive, and the header information is constructed from + * other information. In this case the header fields are set to + * defaults and the File is set to null. + * + *

+ * The C structure for a Tar Entry's header is: + *

+ * struct header {
+ * char name[100];     // TarConstants.NAMELEN    - offset   0
+ * char mode[8];       // TarConstants.MODELEN    - offset 100
+ * char uid[8];        // TarConstants.UIDLEN     - offset 108
+ * char gid[8];        // TarConstants.GIDLEN     - offset 116
+ * char size[12];      // TarConstants.SIZELEN    - offset 124
+ * char mtime[12];     // TarConstants.MODTIMELEN - offset 136
+ * char chksum[8];     // TarConstants.CHKSUMLEN  - offset 148
+ * char linkflag[1];   //                         - offset 156
+ * char linkname[100]; // TarConstants.NAMELEN    - offset 157
+ * The following fields are only present in new-style POSIX tar archives:
+ * char magic[6];      // TarConstants.MAGICLEN   - offset 257
+ * char version[2];    // TarConstants.VERSIONLEN - offset 263
+ * char uname[32];     // TarConstants.UNAMELEN   - offset 265
+ * char gname[32];     // TarConstants.GNAMELEN   - offset 297
+ * char devmajor[8];   // TarConstants.DEVLEN     - offset 329
+ * char devminor[8];   // TarConstants.DEVLEN     - offset 337
+ * char prefix[155];   // TarConstants.PREFIXLEN  - offset 345
+ * // Used if "name" field is not long enough to hold the path
+ * char pad[12];       // NULs                    - offset 500
+ * } header;
+ * All unused bytes are set to null.
+ * New-style GNU tar files are slightly different from the above.
+ * For values of size larger than 077777777777L (11 7s)
+ * or uid and gid larger than 07777777L (7 7s)
+ * the sign bit of the first byte is set, and the rest of the
+ * field is the binary representation of the number.
+ * See TarUtils.parseOctalOrBinary.
+ * 
+ * + *

+ * The C structure for a old GNU Tar Entry's header is: + *

+ * struct oldgnu_header {
+ * char unused_pad1[345]; // TarConstants.PAD1LEN_GNU       - offset 0
+ * char atime[12];        // TarConstants.ATIMELEN_GNU      - offset 345
+ * char ctime[12];        // TarConstants.CTIMELEN_GNU      - offset 357
+ * char offset[12];       // TarConstants.OFFSETLEN_GNU     - offset 369
+ * char longnames[4];     // TarConstants.LONGNAMESLEN_GNU  - offset 381
+ * char unused_pad2;      // TarConstants.PAD2LEN_GNU       - offset 385
+ * struct sparse sp[4];   // TarConstants.SPARSELEN_GNU     - offset 386
+ * char isextended;       // TarConstants.ISEXTENDEDLEN_GNU - offset 482
+ * char realsize[12];     // TarConstants.REALSIZELEN_GNU   - offset 483
+ * char unused_pad[17];   // TarConstants.PAD3LEN_GNU       - offset 495
+ * };
+ * 
+ * Whereas, "struct sparse" is: + *
+ * struct sparse {
+ * char offset[12];   // offset 0
+ * char numbytes[12]; // offset 12
+ * };
+ * 
+ * + * @NotThreadSafe + */ + +public class TarArchiveEntry implements TarConstants, ArchiveEntry { + /** The entry's name. */ + private String name = ""; + + /** The entry's permission mode. */ + private int mode; + + /** The entry's user id. */ + private int userId = 0; + + /** The entry's group id. */ + private int groupId = 0; + + /** The entry's size. */ + private long size = 0; + + /** The entry's modification time. */ + private long modTime; + + /** If the header checksum is reasonably correct. */ + private boolean checkSumOK; + + /** The entry's link flag. */ + private byte linkFlag; + + /** The entry's link name. */ + private String linkName = ""; + + /** The entry's magic tag. */ + private String magic = MAGIC_POSIX; + /** The version of the format */ + private String version = VERSION_POSIX; + + /** The entry's user name. */ + private String userName; + + /** The entry's group name. */ + private String groupName = ""; + + /** The entry's major device number. */ + private int devMajor = 0; + + /** The entry's minor device number. */ + private int devMinor = 0; + + /** If an extension sparse header follows. */ + private boolean isExtended; + + /** The entry's real size in case of a sparse file. */ + private long realSize; + + /** The entry's file reference */ + private final File file; + + /** Maximum length of a user's name in the tar file */ + public static final int MAX_NAMELEN = 31; + + /** Default permissions bits for directories */ + public static final int DEFAULT_DIR_MODE = 040755; + + /** Default permissions bits for files */ + public static final int DEFAULT_FILE_MODE = 0100644; + + /** Convert millis to seconds */ + public static final int MILLIS_PER_SECOND = 1000; + + /** + * Construct an empty entry and prepares the header values. + */ + private TarArchiveEntry() { + String user = System.getProperty("user.name", ""); + + if (user.length() > MAX_NAMELEN) { + user = user.substring(0, MAX_NAMELEN); + } + + this.userName = user; + this.file = null; + } + + /** + * Construct an entry with only a name. This allows the programmer + * to construct the entry's header "by hand". File is set to null. + * + * @param name the entry name + */ + public TarArchiveEntry(String name) { + this(name, false); + } + + /** + * Construct an entry with only a name. This allows the programmer + * to construct the entry's header "by hand". File is set to null. + * + * @param name the entry name + * @param preserveLeadingSlashes whether to allow leading slashes + * in the name. + * + * @since 1.1 + */ + public TarArchiveEntry(String name, boolean preserveLeadingSlashes) { + this(); + + name = normalizeFileName(name, preserveLeadingSlashes); + boolean isDir = name.endsWith("/"); + + this.name = name; + this.mode = isDir ? DEFAULT_DIR_MODE : DEFAULT_FILE_MODE; + this.linkFlag = isDir ? LF_DIR : LF_NORMAL; + this.modTime = new Date().getTime() / MILLIS_PER_SECOND; + this.userName = ""; + } + + /** + * Construct an entry with a name and a link flag. + * + * @param name the entry name + * @param linkFlag the entry link flag. + */ + public TarArchiveEntry(String name, byte linkFlag) { + this(name, linkFlag, false); + } + + /** + * Construct an entry with a name and a link flag. + * + * @param name the entry name + * @param linkFlag the entry link flag. + * @param preserveLeadingSlashes whether to allow leading slashes + * in the name. + * + * @since 1.5 + */ + public TarArchiveEntry(String name, byte linkFlag, boolean preserveLeadingSlashes) { + this(name, preserveLeadingSlashes); + this.linkFlag = linkFlag; + if (linkFlag == LF_GNUTYPE_LONGNAME) { + magic = MAGIC_GNU; + version = VERSION_GNU_SPACE; + } + } + + /** + * Construct an entry for a file. File is set to file, and the + * header is constructed from information from the file. + * The name is set from the normalized file path. + * + * @param file The file that the entry represents. + */ + public TarArchiveEntry(File file) { + this(file, normalizeFileName(file.getPath(), false)); + } + + /** + * Construct an entry for a file. File is set to file, and the + * header is constructed from information from the file. + * + * @param file The file that the entry represents. + * @param fileName the name to be used for the entry. + */ + public TarArchiveEntry(File file, String fileName) { + this.file = file; + + if (file.isDirectory()) { + this.mode = DEFAULT_DIR_MODE; + this.linkFlag = LF_DIR; + + int nameLength = fileName.length(); + if (nameLength == 0 || fileName.charAt(nameLength - 1) != '/') { + this.name = fileName + "/"; + } else { + this.name = fileName; + } + } else { + this.mode = DEFAULT_FILE_MODE; + this.linkFlag = LF_NORMAL; + this.size = file.length(); + this.name = fileName; + } + + this.modTime = file.lastModified() / MILLIS_PER_SECOND; + this.userName = ""; + } + + /** + * Construct an entry from an archive's header bytes. File is set + * to null. + * + * @param headerBuf The header bytes from a tar archive entry. + * @throws IllegalArgumentException if any of the numeric fields have an invalid format + */ + public TarArchiveEntry(byte[] headerBuf) { + this(); + parseTarHeader(headerBuf); + } + + /** + * Construct an entry from an archive's header bytes. File is set + * to null. + * + * @param headerBuf The header bytes from a tar archive entry. + * @param encoding encoding to use for file names + * @since 1.4 + * @throws IllegalArgumentException if any of the numeric fields have an invalid format + */ + public TarArchiveEntry(byte[] headerBuf, ZipEncoding encoding) + throws IOException { + this(); + parseTarHeader(headerBuf, encoding); + } + + /** + * Determine if the two entries are equal. Equality is determined + * by the header names being equal. + * + * @param it Entry to be checked for equality. + * @return True if the entries are equal. + */ + public boolean equals(TarArchiveEntry it) { + return getName().equals(it.getName()); + } + + /** + * Determine if the two entries are equal. Equality is determined + * by the header names being equal. + * + * @param it Entry to be checked for equality. + * @return True if the entries are equal. + */ + @Override + public boolean equals(Object it) { + if (it == null || getClass() != it.getClass()) { + return false; + } + return equals((TarArchiveEntry) it); + } + + /** + * Hashcodes are based on entry names. + * + * @return the entry hashcode + */ + @Override + public int hashCode() { + return getName().hashCode(); + } + + /** + * Determine if the given entry is a descendant of this entry. + * Descendancy is determined by the name of the descendant + * starting with this entry's name. + * + * @param desc Entry to be checked as a descendent of this. + * @return True if entry is a descendant of this. + */ + public boolean isDescendent(TarArchiveEntry desc) { + return desc.getName().startsWith(getName()); + } + + /** + * Get this entry's name. + * + * @return This entry's name. + */ + public String getName() { + return name.toString(); + } + + /** + * Set this entry's name. + * + * @param name This entry's new name. + */ + public void setName(String name) { + this.name = normalizeFileName(name, false); + } + + /** + * Set the mode for this entry + * + * @param mode the mode for this entry + */ + public void setMode(int mode) { + this.mode = mode; + } + + /** + * Get this entry's link name. + * + * @return This entry's link name. + */ + public String getLinkName() { + return linkName.toString(); + } + + /** + * Set this entry's link name. + * + * @param link the link name to use. + * + * @since 1.1 + */ + public void setLinkName(String link) { + this.linkName = link; + } + + /** + * Get this entry's user id. + * + * @return This entry's user id. + */ + public int getUserId() { + return userId; + } + + /** + * Set this entry's user id. + * + * @param userId This entry's new user id. + */ + public void setUserId(int userId) { + this.userId = userId; + } + + /** + * Get this entry's group id. + * + * @return This entry's group id. + */ + public int getGroupId() { + return groupId; + } + + /** + * Set this entry's group id. + * + * @param groupId This entry's new group id. + */ + public void setGroupId(int groupId) { + this.groupId = groupId; + } + + /** + * Get this entry's user name. + * + * @return This entry's user name. + */ + public String getUserName() { + return userName.toString(); + } + + /** + * Set this entry's user name. + * + * @param userName This entry's new user name. + */ + public void setUserName(String userName) { + this.userName = userName; + } + + /** + * Get this entry's group name. + * + * @return This entry's group name. + */ + public String getGroupName() { + return groupName.toString(); + } + + /** + * Set this entry's group name. + * + * @param groupName This entry's new group name. + */ + public void setGroupName(String groupName) { + this.groupName = groupName; + } + + /** + * Convenience method to set this entry's group and user ids. + * + * @param userId This entry's new user id. + * @param groupId This entry's new group id. + */ + public void setIds(int userId, int groupId) { + setUserId(userId); + setGroupId(groupId); + } + + /** + * Convenience method to set this entry's group and user names. + * + * @param userName This entry's new user name. + * @param groupName This entry's new group name. + */ + public void setNames(String userName, String groupName) { + setUserName(userName); + setGroupName(groupName); + } + + /** + * Set this entry's modification time. The parameter passed + * to this method is in "Java time". + * + * @param time This entry's new modification time. + */ + public void setModTime(long time) { + modTime = time / MILLIS_PER_SECOND; + } + + /** + * Set this entry's modification time. + * + * @param time This entry's new modification time. + */ + public void setModTime(Date time) { + modTime = time.getTime() / MILLIS_PER_SECOND; + } + + /** + * Set this entry's modification time. + * + * @return time This entry's new modification time. + */ + public Date getModTime() { + return new Date(modTime * MILLIS_PER_SECOND); + } + + public Date getLastModifiedDate() { + return getModTime(); + } + + /** + * Get this entry's checksum status. + * + * @return if the header checksum is reasonably correct + * @see TarUtils#verifyCheckSum(byte[]) + * @since 1.5 + */ + public boolean isCheckSumOK() { + return checkSumOK; + } + + /** + * Get this entry's file. + * + * @return This entry's file. + */ + public File getFile() { + return file; + } + + /** + * Get this entry's mode. + * + * @return This entry's mode. + */ + public int getMode() { + return mode; + } + + /** + * Get this entry's file size. + * + * @return This entry's file size. + */ + public long getSize() { + return size; + } + + /** + * Set this entry's file size. + * + * @param size This entry's new file size. + * @throws IllegalArgumentException if the size is < 0. + */ + public void setSize(long size) { + if (size < 0){ + throw new IllegalArgumentException("Size is out of range: "+size); + } + this.size = size; + } + + /** + * Get this entry's major device number. + * + * @return This entry's major device number. + * @since 1.4 + */ + public int getDevMajor() { + return devMajor; + } + + /** + * Set this entry's major device number. + * + * @param devNo This entry's major device number. + * @throws IllegalArgumentException if the devNo is < 0. + * @since 1.4 + */ + public void setDevMajor(int devNo) { + if (devNo < 0){ + throw new IllegalArgumentException("Major device number is out of " + + "range: " + devNo); + } + this.devMajor = devNo; + } + + /** + * Get this entry's minor device number. + * + * @return This entry's minor device number. + * @since 1.4 + */ + public int getDevMinor() { + return devMinor; + } + + /** + * Set this entry's minor device number. + * + * @param devNo This entry's minor device number. + * @throws IllegalArgumentException if the devNo is < 0. + * @since 1.4 + */ + public void setDevMinor(int devNo) { + if (devNo < 0){ + throw new IllegalArgumentException("Minor device number is out of " + + "range: " + devNo); + } + this.devMinor = devNo; + } + + /** + * Indicates in case of a sparse file if an extension sparse header + * follows. + * + * @return true if an extension sparse header follows. + */ + public boolean isExtended() { + return isExtended; + } + + /** + * Get this entry's real file size in case of a sparse file. + * + * @return This entry's real file size. + */ + public long getRealSize() { + return realSize; + } + + /** + * Indicate if this entry is a GNU sparse block + * + * @return true if this is a sparse extension provided by GNU tar + */ + public boolean isGNUSparse() { + return linkFlag == LF_GNUTYPE_SPARSE; + } + + /** + * Indicate if this entry is a GNU long linkname block + * + * @return true if this is a long name extension provided by GNU tar + */ + public boolean isGNULongLinkEntry() { + return linkFlag == LF_GNUTYPE_LONGLINK + && name.equals(GNU_LONGLINK); + } + + /** + * Indicate if this entry is a GNU long name block + * + * @return true if this is a long name extension provided by GNU tar + */ + public boolean isGNULongNameEntry() { + return linkFlag == LF_GNUTYPE_LONGNAME + && name.equals(GNU_LONGLINK); + } + + /** + * Check if this is a Pax header. + * + * @return {@code true} if this is a Pax header. + * + * @since 1.1 + * + */ + public boolean isPaxHeader(){ + return linkFlag == LF_PAX_EXTENDED_HEADER_LC + || linkFlag == LF_PAX_EXTENDED_HEADER_UC; + } + + /** + * Check if this is a Pax header. + * + * @return {@code true} if this is a Pax header. + * + * @since 1.1 + */ + public boolean isGlobalPaxHeader(){ + return linkFlag == LF_PAX_GLOBAL_EXTENDED_HEADER; + } + + /** + * Return whether or not this entry represents a directory. + * + * @return True if this entry is a directory. + */ + public boolean isDirectory() { + if (file != null) { + return file.isDirectory(); + } + + if (linkFlag == LF_DIR) { + return true; + } + + if (getName().endsWith("/")) { + return true; + } + + return false; + } + + /** + * Check if this is a "normal file" + * + * @since 1.2 + */ + public boolean isFile() { + if (file != null) { + return file.isFile(); + } + if (linkFlag == LF_OLDNORM || linkFlag == LF_NORMAL) { + return true; + } + return !getName().endsWith("/"); + } + + /** + * Check if this is a symbolic link entry. + * + * @since 1.2 + */ + public boolean isSymbolicLink() { + return linkFlag == LF_SYMLINK; + } + + /** + * Check if this is a link entry. + * + * @since 1.2 + */ + public boolean isLink() { + return linkFlag == LF_LINK; + } + + /** + * Check if this is a character device entry. + * + * @since 1.2 + */ + public boolean isCharacterDevice() { + return linkFlag == LF_CHR; + } + + /** + * Check if this is a block device entry. + * + * @since 1.2 + */ + public boolean isBlockDevice() { + return linkFlag == LF_BLK; + } + + /** + * Check if this is a FIFO (pipe) entry. + * + * @since 1.2 + */ + public boolean isFIFO() { + return linkFlag == LF_FIFO; + } + + /** + * If this entry represents a file, and the file is a directory, return + * an array of TarEntries for this entry's children. + * + * @return An array of TarEntry's for this entry's children. + */ + public TarArchiveEntry[] getDirectoryEntries() { + if (file == null || !file.isDirectory()) { + return new TarArchiveEntry[0]; + } + + String[] list = file.list(); + TarArchiveEntry[] result = new TarArchiveEntry[list.length]; + + for (int i = 0; i < list.length; ++i) { + result[i] = new TarArchiveEntry(new File(file, list[i])); + } + + return result; + } + + /** + * Write an entry's header information to a header buffer. + * + *

This method does not use the star/GNU tar/BSD tar extensions.

+ * + * @param outbuf The tar entry header buffer to fill in. + */ + public void writeEntryHeader(byte[] outbuf) { + try { + writeEntryHeader(outbuf, TarUtils.DEFAULT_ENCODING, false); + } catch (IOException ex) { + try { + writeEntryHeader(outbuf, TarUtils.FALLBACK_ENCODING, false); + } catch (IOException ex2) { + // impossible + throw new RuntimeException(ex2); + } + } + } + + /** + * Write an entry's header information to a header buffer. + * + * @param outbuf The tar entry header buffer to fill in. + * @param encoding encoding to use when writing the file name. + * @param starMode whether to use the star/GNU tar/BSD tar + * extension for numeric fields if their value doesn't fit in the + * maximum size of standard tar archives + * @since 1.4 + */ + public void writeEntryHeader(byte[] outbuf, ZipEncoding encoding, + boolean starMode) throws IOException { + int offset = 0; + + offset = TarUtils.formatNameBytes(name, outbuf, offset, NAMELEN, + encoding); + offset = writeEntryHeaderField(mode, outbuf, offset, MODELEN, starMode); + offset = writeEntryHeaderField(userId, outbuf, offset, UIDLEN, + starMode); + offset = writeEntryHeaderField(groupId, outbuf, offset, GIDLEN, + starMode); + offset = writeEntryHeaderField(size, outbuf, offset, SIZELEN, starMode); + offset = writeEntryHeaderField(modTime, outbuf, offset, MODTIMELEN, + starMode); + + int csOffset = offset; + + for (int c = 0; c < CHKSUMLEN; ++c) { + outbuf[offset++] = (byte) ' '; + } + + outbuf[offset++] = linkFlag; + offset = TarUtils.formatNameBytes(linkName, outbuf, offset, NAMELEN, + encoding); + offset = TarUtils.formatNameBytes(magic, outbuf, offset, MAGICLEN); + offset = TarUtils.formatNameBytes(version, outbuf, offset, VERSIONLEN); + offset = TarUtils.formatNameBytes(userName, outbuf, offset, UNAMELEN, + encoding); + offset = TarUtils.formatNameBytes(groupName, outbuf, offset, GNAMELEN, + encoding); + offset = writeEntryHeaderField(devMajor, outbuf, offset, DEVLEN, + starMode); + offset = writeEntryHeaderField(devMinor, outbuf, offset, DEVLEN, + starMode); + + while (offset < outbuf.length) { + outbuf[offset++] = 0; + } + + long chk = TarUtils.computeCheckSum(outbuf); + + TarUtils.formatCheckSumOctalBytes(chk, outbuf, csOffset, CHKSUMLEN); + } + + private int writeEntryHeaderField(long value, byte[] outbuf, int offset, + int length, boolean starMode) { + if (!starMode && (value < 0 + || value >= 1l << 3 * (length - 1))) { + // value doesn't fit into field when written as octal + // number, will be written to PAX header or causes an + // error + return TarUtils.formatLongOctalBytes(0, outbuf, offset, length); + } + return TarUtils.formatLongOctalOrBinaryBytes(value, outbuf, offset, + length); + } + + /** + * Parse an entry's header information from a header buffer. + * + * @param header The tar entry header buffer to get information from. + * @throws IllegalArgumentException if any of the numeric fields have an invalid format + */ + public void parseTarHeader(byte[] header) { + try { + parseTarHeader(header, TarUtils.DEFAULT_ENCODING); + } catch (IOException ex) { + try { + parseTarHeader(header, TarUtils.DEFAULT_ENCODING, true); + } catch (IOException ex2) { + // not really possible + throw new RuntimeException(ex2); + } + } + } + + /** + * Parse an entry's header information from a header buffer. + * + * @param header The tar entry header buffer to get information from. + * @param encoding encoding to use for file names + * @since 1.4 + * @throws IllegalArgumentException if any of the numeric fields + * have an invalid format + */ + public void parseTarHeader(byte[] header, ZipEncoding encoding) + throws IOException { + parseTarHeader(header, encoding, false); + } + + private void parseTarHeader(byte[] header, ZipEncoding encoding, + final boolean oldStyle) + throws IOException { + int offset = 0; + + name = oldStyle ? TarUtils.parseName(header, offset, NAMELEN) + : TarUtils.parseName(header, offset, NAMELEN, encoding); + offset += NAMELEN; + mode = (int) TarUtils.parseOctalOrBinary(header, offset, MODELEN); + offset += MODELEN; + userId = (int) TarUtils.parseOctalOrBinary(header, offset, UIDLEN); + offset += UIDLEN; + groupId = (int) TarUtils.parseOctalOrBinary(header, offset, GIDLEN); + offset += GIDLEN; + size = TarUtils.parseOctalOrBinary(header, offset, SIZELEN); + offset += SIZELEN; + modTime = TarUtils.parseOctalOrBinary(header, offset, MODTIMELEN); + offset += MODTIMELEN; + checkSumOK = TarUtils.verifyCheckSum(header); + offset += CHKSUMLEN; + linkFlag = header[offset++]; + linkName = oldStyle ? TarUtils.parseName(header, offset, NAMELEN) + : TarUtils.parseName(header, offset, NAMELEN, encoding); + offset += NAMELEN; + magic = TarUtils.parseName(header, offset, MAGICLEN); + offset += MAGICLEN; + version = TarUtils.parseName(header, offset, VERSIONLEN); + offset += VERSIONLEN; + userName = oldStyle ? TarUtils.parseName(header, offset, UNAMELEN) + : TarUtils.parseName(header, offset, UNAMELEN, encoding); + offset += UNAMELEN; + groupName = oldStyle ? TarUtils.parseName(header, offset, GNAMELEN) + : TarUtils.parseName(header, offset, GNAMELEN, encoding); + offset += GNAMELEN; + devMajor = (int) TarUtils.parseOctalOrBinary(header, offset, DEVLEN); + offset += DEVLEN; + devMinor = (int) TarUtils.parseOctalOrBinary(header, offset, DEVLEN); + offset += DEVLEN; + + int type = evaluateType(header); + switch (type) { + case FORMAT_OLDGNU: { + offset += ATIMELEN_GNU; + offset += CTIMELEN_GNU; + offset += OFFSETLEN_GNU; + offset += LONGNAMESLEN_GNU; + offset += PAD2LEN_GNU; + offset += SPARSELEN_GNU; + isExtended = TarUtils.parseBoolean(header, offset); + offset += ISEXTENDEDLEN_GNU; + realSize = TarUtils.parseOctal(header, offset, REALSIZELEN_GNU); + offset += REALSIZELEN_GNU; + break; + } + case FORMAT_POSIX: + default: { + String prefix = oldStyle + ? TarUtils.parseName(header, offset, PREFIXLEN) + : TarUtils.parseName(header, offset, PREFIXLEN, encoding); + // SunOS tar -E does not add / to directory names, so fix + // up to be consistent + if (isDirectory() && !name.endsWith("/")){ + name = name + "/"; + } + if (prefix.length() > 0){ + name = prefix + "/" + name; + } + } + } + } + + /** + * Strips Windows' drive letter as well as any leading slashes, + * turns path separators into forward slahes. + */ + private static String normalizeFileName(String fileName, + boolean preserveLeadingSlashes) { + String osname = System.getProperty("os.name").toLowerCase(Locale.ENGLISH); + + if (osname != null) { + + // Strip off drive letters! + // REVIEW Would a better check be "(File.separator == '\')"? + + if (osname.startsWith("windows")) { + if (fileName.length() > 2) { + char ch1 = fileName.charAt(0); + char ch2 = fileName.charAt(1); + + if (ch2 == ':' + && (ch1 >= 'a' && ch1 <= 'z' + || ch1 >= 'A' && ch1 <= 'Z')) { + fileName = fileName.substring(2); + } + } + } else if (osname.indexOf("netware") > -1) { + int colon = fileName.indexOf(':'); + if (colon != -1) { + fileName = fileName.substring(colon + 1); + } + } + } + + fileName = fileName.replace(File.separatorChar, '/'); + + // No absolute pathnames + // Windows (and Posix?) paths can start with "\\NetworkDrive\", + // so we loop on starting /'s. + while (!preserveLeadingSlashes && fileName.startsWith("/")) { + fileName = fileName.substring(1); + } + return fileName; + } + + /** + * Evaluate an entry's header format from a header buffer. + * + * @param header The tar entry header buffer to evaluate the format for. + * @return format type + */ + private int evaluateType(byte[] header) { + if (ArchiveUtils.matchAsciiBuffer(MAGIC_GNU, header, MAGIC_OFFSET, MAGICLEN)) { + return FORMAT_OLDGNU; + } + if (ArchiveUtils.matchAsciiBuffer(MAGIC_POSIX, header, MAGIC_OFFSET, MAGICLEN)) { + return FORMAT_POSIX; + } + return 0; + } +} + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java new file mode 100644 index 000000000..2f4ce6eab --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveInputStream.java @@ -0,0 +1,685 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +/* + * This package is based on the work done by Timothy Gerard Endres + * (time@ice.com) to whom the Ant project is very grateful for his great code. + */ + +package org.apache.commons.compress.archivers.tar; + +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.Map.Entry; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; +import org.apache.commons.compress.utils.ArchiveUtils; +import org.apache.commons.compress.utils.CharsetNames; +import org.apache.commons.compress.utils.IOUtils; + +/** + * The TarInputStream reads a UNIX tar archive as an InputStream. + * methods are provided to position at each successive entry in + * the archive, and the read each entry as a normal input stream + * using read(). + * @NotThreadSafe + */ +public class TarArchiveInputStream extends ArchiveInputStream { + + private static final int SMALL_BUFFER_SIZE = 256; + + private final byte[] SMALL_BUF = new byte[SMALL_BUFFER_SIZE]; + + /** The size the TAR header */ + private final int recordSize; + + /** The size of a block */ + private final int blockSize; + + /** True if file has hit EOF */ + private boolean hasHitEOF; + + /** Size of the current entry */ + private long entrySize; + + /** How far into the entry the stream is at */ + private long entryOffset; + + /** An input stream to read from */ + private final InputStream is; + + /** The meta-data about the current entry */ + private TarArchiveEntry currEntry; + + /** The encoding of the file */ + private final ZipEncoding encoding; + + /** + * Constructor for TarInputStream. + * @param is the input stream to use + */ + public TarArchiveInputStream(InputStream is) { + this(is, TarConstants.DEFAULT_BLKSIZE, TarConstants.DEFAULT_RCDSIZE); + } + + /** + * Constructor for TarInputStream. + * @param is the input stream to use + * @param encoding name of the encoding to use for file names + * @since 1.4 + */ + public TarArchiveInputStream(InputStream is, String encoding) { + this(is, TarConstants.DEFAULT_BLKSIZE, TarConstants.DEFAULT_RCDSIZE, + encoding); + } + + /** + * Constructor for TarInputStream. + * @param is the input stream to use + * @param blockSize the block size to use + */ + public TarArchiveInputStream(InputStream is, int blockSize) { + this(is, blockSize, TarConstants.DEFAULT_RCDSIZE); + } + + /** + * Constructor for TarInputStream. + * @param is the input stream to use + * @param blockSize the block size to use + * @param encoding name of the encoding to use for file names + * @since 1.4 + */ + public TarArchiveInputStream(InputStream is, int blockSize, + String encoding) { + this(is, blockSize, TarConstants.DEFAULT_RCDSIZE, encoding); + } + + /** + * Constructor for TarInputStream. + * @param is the input stream to use + * @param blockSize the block size to use + * @param recordSize the record size to use + */ + public TarArchiveInputStream(InputStream is, int blockSize, int recordSize) { + this(is, blockSize, recordSize, null); + } + + /** + * Constructor for TarInputStream. + * @param is the input stream to use + * @param blockSize the block size to use + * @param recordSize the record size to use + * @param encoding name of the encoding to use for file names + * @since 1.4 + */ + public TarArchiveInputStream(InputStream is, int blockSize, int recordSize, + String encoding) { + this.is = is; + this.hasHitEOF = false; + this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + this.recordSize = recordSize; + this.blockSize = blockSize; + } + + /** + * Closes this stream. Calls the TarBuffer's close() method. + * @throws IOException on error + */ + @Override + public void close() throws IOException { + is.close(); + } + + /** + * Get the record size being used by this stream's buffer. + * + * @return The TarBuffer record size. + */ + public int getRecordSize() { + return recordSize; + } + + /** + * Get the available data that can be read from the current + * entry in the archive. This does not indicate how much data + * is left in the entire archive, only in the current entry. + * This value is determined from the entry's size header field + * and the amount of data already read from the current entry. + * Integer.MAX_VALUE is returned in case more than Integer.MAX_VALUE + * bytes are left in the current entry in the archive. + * + * @return The number of available bytes for the current entry. + * @throws IOException for signature + */ + @Override + public int available() throws IOException { + if (entrySize - entryOffset > Integer.MAX_VALUE) { + return Integer.MAX_VALUE; + } + return (int) (entrySize - entryOffset); + } + + /** + * Skip bytes in the input buffer. This skips bytes in the + * current entry's data, not the entire archive, and will + * stop at the end of the current entry's data if the number + * to skip extends beyond that point. + * + * @param numToSkip The number of bytes to skip. + * @return the number actually skipped + * @throws IOException on error + */ + @Override + public long skip(long numToSkip) throws IOException { + + long available = entrySize - entryOffset; + numToSkip = Math.min(numToSkip, available); + + long skipped = IOUtils.skip(is, numToSkip); + count(skipped); + entryOffset += skipped; + return skipped; + } + + /** + * Since we do not support marking just yet, we do nothing. + */ + @Override + public synchronized void reset() { + } + + /** + * Get the next entry in this tar archive. This will skip + * over any remaining data in the current entry, if there + * is one, and place the input stream at the header of the + * next entry, and read the header and instantiate a new + * TarEntry from the header bytes and return that entry. + * If there are no more entries in the archive, null will + * be returned to indicate that the end of the archive has + * been reached. + * + * @return The next TarEntry in the archive, or null. + * @throws IOException on error + */ + public TarArchiveEntry getNextTarEntry() throws IOException { + if (hasHitEOF) { + return null; + } + + if (currEntry != null) { + /* Skip will only go to the end of the current entry */ + skip(Long.MAX_VALUE); + + /* skip to the end of the last record */ + skipRecordPadding(); + } + + byte[] headerBuf = getRecord(); + + if (headerBuf == null) { + /* hit EOF */ + currEntry = null; + return null; + } + + try { + currEntry = new TarArchiveEntry(headerBuf, encoding); + } catch (IllegalArgumentException e) { + IOException ioe = new IOException("Error detected parsing the header"); + ioe.initCause(e); + throw ioe; + } + + entryOffset = 0; + entrySize = currEntry.getSize(); + + if (currEntry.isGNULongLinkEntry()) { + byte[] longLinkData = getLongNameData(); + if (longLinkData == null) { + // Bugzilla: 40334 + // Malformed tar file - long link entry name not followed by + // entry + return null; + } + currEntry.setLinkName(encoding.decode(longLinkData)); + } + + if (currEntry.isGNULongNameEntry()) { + byte[] longNameData = getLongNameData(); + if (longNameData == null) { + // Bugzilla: 40334 + // Malformed tar file - long entry name not followed by + // entry + return null; + } + currEntry.setName(encoding.decode(longNameData)); + } + + if (currEntry.isPaxHeader()){ // Process Pax headers + paxHeaders(); + } + + if (currEntry.isGNUSparse()){ // Process sparse files + readGNUSparse(); + } + + // If the size of the next element in the archive has changed + // due to a new size being reported in the posix header + // information, we update entrySize here so that it contains + // the correct value. + entrySize = currEntry.getSize(); + + return currEntry; + } + + /** + * The last record block should be written at the full size, so skip any + * additional space used to fill a record after an entry + */ + private void skipRecordPadding() throws IOException { + if (this.entrySize > 0 && this.entrySize % this.recordSize != 0) { + long numRecords = (this.entrySize / this.recordSize) + 1; + long padding = (numRecords * this.recordSize) - this.entrySize; + long skipped = IOUtils.skip(is, padding); + count(skipped); + } + } + + /** + * Get the next entry in this tar archive as longname data. + * + * @return The next entry in the archive as longname data, or null. + * @throws IOException on error + */ + protected byte[] getLongNameData() throws IOException { + // read in the name + ByteArrayOutputStream longName = new ByteArrayOutputStream(); + int length = 0; + while ((length = read(SMALL_BUF)) >= 0) { + longName.write(SMALL_BUF, 0, length); + } + getNextEntry(); + if (currEntry == null) { + // Bugzilla: 40334 + // Malformed tar file - long entry name not followed by entry + return null; + } + byte[] longNameData = longName.toByteArray(); + // remove trailing null terminator(s) + length = longNameData.length; + while (length > 0 && longNameData[length - 1] == 0) { + --length; + } + if (length != longNameData.length) { + byte[] l = new byte[length]; + System.arraycopy(longNameData, 0, l, 0, length); + longNameData = l; + } + return longNameData; + } + + /** + * Get the next record in this tar archive. This will skip + * over any remaining data in the current entry, if there + * is one, and place the input stream at the header of the + * next entry. + * + *

If there are no more entries in the archive, null will be + * returned to indicate that the end of the archive has been + * reached. At the same time the {@code hasHitEOF} marker will be + * set to true.

+ * + * @return The next header in the archive, or null. + * @throws IOException on error + */ + private byte[] getRecord() throws IOException { + byte[] headerBuf = readRecord(); + hasHitEOF = isEOFRecord(headerBuf); + if (hasHitEOF && headerBuf != null) { + tryToConsumeSecondEOFRecord(); + consumeRemainderOfLastBlock(); + headerBuf = null; + } + return headerBuf; + } + + /** + * Determine if an archive record indicate End of Archive. End of + * archive is indicated by a record that consists entirely of null bytes. + * + * @param record The record data to check. + * @return true if the record data is an End of Archive + */ + protected boolean isEOFRecord(byte[] record) { + return record == null || ArchiveUtils.isArrayZero(record, recordSize); + } + + /** + * Read a record from the input stream and return the data. + * + * @return The record data or null if EOF has been hit. + * @throws IOException on error + */ + protected byte[] readRecord() throws IOException { + + byte[] record = new byte[recordSize]; + + int readNow = IOUtils.readFully(is, record); + count(readNow); + if (readNow != recordSize) { + return null; + } + + return record; + } + + private void paxHeaders() throws IOException{ + Map headers = parsePaxHeaders(this); + getNextEntry(); // Get the actual file entry + applyPaxHeadersToCurrentEntry(headers); + } + + Map parsePaxHeaders(InputStream i) throws IOException { + Map headers = new HashMap(); + // Format is "length keyword=value\n"; + while(true){ // get length + int ch; + int len = 0; + int read = 0; + while((ch = i.read()) != -1) { + read++; + if (ch == ' '){ // End of length string + // Get keyword + ByteArrayOutputStream coll = new ByteArrayOutputStream(); + while((ch = i.read()) != -1) { + read++; + if (ch == '='){ // end of keyword + String keyword = coll.toString(CharsetNames.UTF_8); + // Get rest of entry + byte[] rest = new byte[len - read]; + int got = i.read(rest); + if (got != len - read){ + throw new IOException("Failed to read " + + "Paxheader. Expected " + + (len - read) + + " bytes, read " + + got); + } + // Drop trailing NL + String value = new String(rest, 0, + len - read - 1, CharsetNames.UTF_8); + headers.put(keyword, value); + break; + } + coll.write((byte) ch); + } + break; // Processed single header + } + len *= 10; + len += ch - '0'; + } + if (ch == -1){ // EOF + break; + } + } + return headers; + } + + private void applyPaxHeadersToCurrentEntry(Map headers) { + /* + * The following headers are defined for Pax. + * atime, ctime, charset: cannot use these without changing TarArchiveEntry fields + * mtime + * comment + * gid, gname + * linkpath + * size + * uid,uname + * SCHILY.devminor, SCHILY.devmajor: don't have setters/getters for those + */ + for (Entry ent : headers.entrySet()){ + String key = ent.getKey(); + String val = ent.getValue(); + if ("path".equals(key)){ + currEntry.setName(val); + } else if ("linkpath".equals(key)){ + currEntry.setLinkName(val); + } else if ("gid".equals(key)){ + currEntry.setGroupId(Integer.parseInt(val)); + } else if ("gname".equals(key)){ + currEntry.setGroupName(val); + } else if ("uid".equals(key)){ + currEntry.setUserId(Integer.parseInt(val)); + } else if ("uname".equals(key)){ + currEntry.setUserName(val); + } else if ("size".equals(key)){ + currEntry.setSize(Long.parseLong(val)); + } else if ("mtime".equals(key)){ + currEntry.setModTime((long) (Double.parseDouble(val) * 1000)); + } else if ("SCHILY.devminor".equals(key)){ + currEntry.setDevMinor(Integer.parseInt(val)); + } else if ("SCHILY.devmajor".equals(key)){ + currEntry.setDevMajor(Integer.parseInt(val)); + } + } + } + + /** + * Adds the sparse chunks from the current entry to the sparse chunks, + * including any additional sparse entries following the current entry. + * + * @throws IOException on error + * + * @todo Sparse files get not yet really processed. + */ + private void readGNUSparse() throws IOException { + /* we do not really process sparse files yet + sparses = new ArrayList(); + sparses.addAll(currEntry.getSparses()); + */ + if (currEntry.isExtended()) { + TarArchiveSparseEntry entry; + do { + byte[] headerBuf = getRecord(); + if (headerBuf == null) { + currEntry = null; + break; + } + entry = new TarArchiveSparseEntry(headerBuf); + /* we do not really process sparse files yet + sparses.addAll(entry.getSparses()); + */ + } while (entry.isExtended()); + } + } + + /** + * Returns the next Archive Entry in this Stream. + * + * @return the next entry, + * or {@code null} if there are no more entries + * @throws IOException if the next entry could not be read + */ + @Override + public ArchiveEntry getNextEntry() throws IOException { + return getNextTarEntry(); + } + + /** + * Tries to read the next record rewinding the stream if it is not a EOF record. + * + *

This is meant to protect against cases where a tar + * implementation has written only one EOF record when two are + * expected. Actually this won't help since a non-conforming + * implementation likely won't fill full blocks consisting of - by + * default - ten records either so we probably have already read + * beyond the archive anyway.

+ */ + private void tryToConsumeSecondEOFRecord() throws IOException { + boolean shouldReset = true; + boolean marked = is.markSupported(); + if (marked) { + is.mark(recordSize); + } + try { + shouldReset = !isEOFRecord(readRecord()); + } finally { + if (shouldReset && marked) { + pushedBackBytes(recordSize); + is.reset(); + } + } + } + + /** + * Reads bytes from the current tar archive entry. + * + * This method is aware of the boundaries of the current + * entry in the archive and will deal with them as if they + * were this stream's start and EOF. + * + * @param buf The buffer into which to place bytes read. + * @param offset The offset at which to place bytes read. + * @param numToRead The number of bytes to read. + * @return The number of bytes read, or -1 at EOF. + * @throws IOException on error + */ + @Override + public int read(byte[] buf, int offset, int numToRead) throws IOException { + int totalRead = 0; + + if (hasHitEOF || entryOffset >= entrySize) { + return -1; + } + + numToRead = Math.min(numToRead, available()); + + totalRead = is.read(buf, offset, numToRead); + count(totalRead); + + if (totalRead == -1) { + hasHitEOF = true; + } else { + entryOffset += totalRead; + } + + return totalRead; + } + + /** + * Whether this class is able to read the given entry. + * + *

May return false if the current entry is a sparse file.

+ */ + @Override + public boolean canReadEntryData(ArchiveEntry ae) { + if (ae instanceof TarArchiveEntry) { + TarArchiveEntry te = (TarArchiveEntry) ae; + return !te.isGNUSparse(); + } + return false; + } + + /** + * Get the current TAR Archive Entry that this input stream is processing + * + * @return The current Archive Entry + */ + public TarArchiveEntry getCurrentEntry() { + return currEntry; + } + + protected final void setCurrentEntry(TarArchiveEntry e) { + currEntry = e; + } + + protected final boolean isAtEOF() { + return hasHitEOF; + } + + protected final void setAtEOF(boolean b) { + hasHitEOF = b; + } + + /** + * This method is invoked once the end of the archive is hit, it + * tries to consume the remaining bytes under the assumption that + * the tool creating this archive has padded the last block. + */ + private void consumeRemainderOfLastBlock() throws IOException { + long bytesReadOfLastBlock = getBytesRead() % blockSize; + if (bytesReadOfLastBlock > 0) { + long skipped = IOUtils.skip(is, blockSize - bytesReadOfLastBlock); + count(skipped); + } + } + + /** + * Checks if the signature matches what is expected for a tar file. + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is a tar archive stream, false otherwise + */ + public static boolean matches(byte[] signature, int length) { + if (length < TarConstants.VERSION_OFFSET+TarConstants.VERSIONLEN) { + return false; + } + + if (ArchiveUtils.matchAsciiBuffer(TarConstants.MAGIC_POSIX, + signature, TarConstants.MAGIC_OFFSET, TarConstants.MAGICLEN) + && + ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_POSIX, + signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) + ){ + return true; + } + if (ArchiveUtils.matchAsciiBuffer(TarConstants.MAGIC_GNU, + signature, TarConstants.MAGIC_OFFSET, TarConstants.MAGICLEN) + && + ( + ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_GNU_SPACE, + signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) + || + ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_GNU_ZERO, + signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) + ) + ){ + return true; + } + // COMPRESS-107 - recognise Ant tar files + if (ArchiveUtils.matchAsciiBuffer(TarConstants.MAGIC_ANT, + signature, TarConstants.MAGIC_OFFSET, TarConstants.MAGICLEN) + && + ArchiveUtils.matchAsciiBuffer(TarConstants.VERSION_ANT, + signature, TarConstants.VERSION_OFFSET, TarConstants.VERSIONLEN) + ){ + return true; + } + return false; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java new file mode 100644 index 000000000..8dbc4aaf3 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveOutputStream.java @@ -0,0 +1,666 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.tar; + +import java.io.File; +import java.io.IOException; +import java.io.OutputStream; +import java.io.StringWriter; +import java.nio.ByteBuffer; +import java.util.Arrays; +import java.util.HashMap; +import java.util.Map; +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveOutputStream; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; +import org.apache.commons.compress.utils.CharsetNames; +import org.apache.commons.compress.utils.CountingOutputStream; + +/** + * The TarOutputStream writes a UNIX tar archive as an OutputStream. + * Methods are provided to put entries, and then write their contents + * by writing to this stream using write(). + * @NotThreadSafe + */ +public class TarArchiveOutputStream extends ArchiveOutputStream { + /** Fail if a long file name is required in the archive. */ + public static final int LONGFILE_ERROR = 0; + + /** Long paths will be truncated in the archive. */ + public static final int LONGFILE_TRUNCATE = 1; + + /** GNU tar extensions are used to store long file names in the archive. */ + public static final int LONGFILE_GNU = 2; + + /** POSIX/PAX extensions are used to store long file names in the archive. */ + public static final int LONGFILE_POSIX = 3; + + /** Fail if a big number (e.g. size > 8GiB) is required in the archive. */ + public static final int BIGNUMBER_ERROR = 0; + + /** star/GNU tar/BSD tar extensions are used to store big number in the archive. */ + public static final int BIGNUMBER_STAR = 1; + + /** POSIX/PAX extensions are used to store big numbers in the archive. */ + public static final int BIGNUMBER_POSIX = 2; + + private long currSize; + private String currName; + private long currBytes; + private final byte[] recordBuf; + private int assemLen; + private final byte[] assemBuf; + private int longFileMode = LONGFILE_ERROR; + private int bigNumberMode = BIGNUMBER_ERROR; + private int recordsWritten; + private final int recordsPerBlock; + private final int recordSize; + + private boolean closed = false; + + /** Indicates if putArchiveEntry has been called without closeArchiveEntry */ + private boolean haveUnclosedEntry = false; + + /** indicates if this archive is finished */ + private boolean finished = false; + + private final OutputStream out; + + private final ZipEncoding encoding; + + private boolean addPaxHeadersForNonAsciiNames = false; + private static final ZipEncoding ASCII = + ZipEncodingHelper.getZipEncoding("ASCII"); + + /** + * Constructor for TarInputStream. + * @param os the output stream to use + */ + public TarArchiveOutputStream(OutputStream os) { + this(os, TarConstants.DEFAULT_BLKSIZE, TarConstants.DEFAULT_RCDSIZE); + } + + /** + * Constructor for TarInputStream. + * @param os the output stream to use + * @param encoding name of the encoding to use for file names + * @since 1.4 + */ + public TarArchiveOutputStream(OutputStream os, String encoding) { + this(os, TarConstants.DEFAULT_BLKSIZE, TarConstants.DEFAULT_RCDSIZE, encoding); + } + + /** + * Constructor for TarInputStream. + * @param os the output stream to use + * @param blockSize the block size to use + */ + public TarArchiveOutputStream(OutputStream os, int blockSize) { + this(os, blockSize, TarConstants.DEFAULT_RCDSIZE); + } + + /** + * Constructor for TarInputStream. + * @param os the output stream to use + * @param blockSize the block size to use + * @param encoding name of the encoding to use for file names + * @since 1.4 + */ + public TarArchiveOutputStream(OutputStream os, int blockSize, + String encoding) { + this(os, blockSize, TarConstants.DEFAULT_RCDSIZE, encoding); + } + + /** + * Constructor for TarInputStream. + * @param os the output stream to use + * @param blockSize the block size to use + * @param recordSize the record size to use + */ + public TarArchiveOutputStream(OutputStream os, int blockSize, int recordSize) { + this(os, blockSize, recordSize, null); + } + + /** + * Constructor for TarInputStream. + * @param os the output stream to use + * @param blockSize the block size to use + * @param recordSize the record size to use + * @param encoding name of the encoding to use for file names + * @since 1.4 + */ + public TarArchiveOutputStream(OutputStream os, int blockSize, + int recordSize, String encoding) { + out = new CountingOutputStream(os); + this.encoding = ZipEncodingHelper.getZipEncoding(encoding); + + this.assemLen = 0; + this.assemBuf = new byte[recordSize]; + this.recordBuf = new byte[recordSize]; + this.recordSize = recordSize; + this.recordsPerBlock = blockSize / recordSize; + } + + /** + * Set the long file mode. + * This can be LONGFILE_ERROR(0), LONGFILE_TRUNCATE(1) or LONGFILE_GNU(2). + * This specifies the treatment of long file names (names >= TarConstants.NAMELEN). + * Default is LONGFILE_ERROR. + * @param longFileMode the mode to use + */ + public void setLongFileMode(int longFileMode) { + this.longFileMode = longFileMode; + } + + /** + * Set the big number mode. + * This can be BIGNUMBER_ERROR(0), BIGNUMBER_POSIX(1) or BIGNUMBER_STAR(2). + * This specifies the treatment of big files (sizes > TarConstants.MAXSIZE) and other numeric values to big to fit into a traditional tar header. + * Default is BIGNUMBER_ERROR. + * @param bigNumberMode the mode to use + * @since 1.4 + */ + public void setBigNumberMode(int bigNumberMode) { + this.bigNumberMode = bigNumberMode; + } + + /** + * Whether to add a PAX extension header for non-ASCII file names. + * @since 1.4 + */ + public void setAddPaxHeadersForNonAsciiNames(boolean b) { + addPaxHeadersForNonAsciiNames = b; + } + + @Deprecated + @Override + public int getCount() { + return (int) getBytesWritten(); + } + + @Override + public long getBytesWritten() { + return ((CountingOutputStream) out).getBytesWritten(); + } + + /** + * Ends the TAR archive without closing the underlying OutputStream. + * + * An archive consists of a series of file entries terminated by an + * end-of-archive entry, which consists of two 512 blocks of zero bytes. + * POSIX.1 requires two EOF records, like some other implementations. + * + * @throws IOException on error + */ + @Override + public void finish() throws IOException { + if (finished) { + throw new IOException("This archive has already been finished"); + } + + if (haveUnclosedEntry) { + throw new IOException("This archives contains unclosed entries."); + } + writeEOFRecord(); + writeEOFRecord(); + padAsNeeded(); + out.flush(); + finished = true; + } + + /** + * Closes the underlying OutputStream. + * @throws IOException on error + */ + @Override + public void close() throws IOException { + if (!finished) { + finish(); + } + + if (!closed) { + out.close(); + closed = true; + } + } + + /** + * Get the record size being used by this stream's TarBuffer. + * + * @return The TarBuffer record size. + */ + public int getRecordSize() { + return this.recordSize; + } + + /** + * Put an entry on the output stream. This writes the entry's + * header record and positions the output stream for writing + * the contents of the entry. Once this method is called, the + * stream is ready for calls to write() to write the entry's + * contents. Once the contents are written, closeArchiveEntry() + * MUST be called to ensure that all buffered data + * is completely written to the output stream. + * + * @param archiveEntry The TarEntry to be written to the archive. + * @throws IOException on error + * @throws ClassCastException if archiveEntry is not an instance of TarArchiveEntry + */ + @Override + public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + TarArchiveEntry entry = (TarArchiveEntry) archiveEntry; + Map paxHeaders = new HashMap(); + final String entryName = entry.getName(); + boolean paxHeaderContainsPath = handleLongName(entryName, paxHeaders, "path", + TarConstants.LF_GNUTYPE_LONGNAME, "file name"); + + final String linkName = entry.getLinkName(); + boolean paxHeaderContainsLinkPath = linkName != null && linkName.length() > 0 + && handleLongName(linkName, paxHeaders, "linkpath", + TarConstants.LF_GNUTYPE_LONGLINK, "link name"); + + if (bigNumberMode == BIGNUMBER_POSIX) { + addPaxHeadersForBigNumbers(paxHeaders, entry); + } else if (bigNumberMode != BIGNUMBER_STAR) { + failForBigNumbers(entry); + } + + if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsPath + && !ASCII.canEncode(entryName)) { + paxHeaders.put("path", entryName); + } + + if (addPaxHeadersForNonAsciiNames && !paxHeaderContainsLinkPath + && (entry.isLink() || entry.isSymbolicLink()) + && !ASCII.canEncode(linkName)) { + paxHeaders.put("linkpath", linkName); + } + + if (paxHeaders.size() > 0) { + writePaxHeaders(entryName, paxHeaders); + } + + entry.writeEntryHeader(recordBuf, encoding, + bigNumberMode == BIGNUMBER_STAR); + writeRecord(recordBuf); + + currBytes = 0; + + if (entry.isDirectory()) { + currSize = 0; + } else { + currSize = entry.getSize(); + } + currName = entryName; + haveUnclosedEntry = true; + } + + /** + * Close an entry. This method MUST be called for all file + * entries that contain data. The reason is that we must + * buffer data written to the stream in order to satisfy + * the buffer's record based writes. Thus, there may be + * data fragments still being assembled that must be written + * to the output stream before this entry is closed and the + * next entry written. + * @throws IOException on error + */ + @Override + public void closeArchiveEntry() throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + if (!haveUnclosedEntry){ + throw new IOException("No current entry to close"); + } + if (assemLen > 0) { + for (int i = assemLen; i < assemBuf.length; ++i) { + assemBuf[i] = 0; + } + + writeRecord(assemBuf); + + currBytes += assemLen; + assemLen = 0; + } + + if (currBytes < currSize) { + throw new IOException("entry '" + currName + "' closed at '" + + currBytes + + "' before the '" + currSize + + "' bytes specified in the header were written"); + } + haveUnclosedEntry = false; + } + + /** + * Writes bytes to the current tar archive entry. This method + * is aware of the current entry and will throw an exception if + * you attempt to write bytes past the length specified for the + * current entry. The method is also (painfully) aware of the + * record buffering required by TarBuffer, and manages buffers + * that are not a multiple of recordsize in length, including + * assembling records from small buffers. + * + * @param wBuf The buffer to write to the archive. + * @param wOffset The offset in the buffer from which to get bytes. + * @param numToWrite The number of bytes to write. + * @throws IOException on error + */ + @Override + public void write(byte[] wBuf, int wOffset, int numToWrite) throws IOException { + if (currBytes + numToWrite > currSize) { + throw new IOException("request to write '" + numToWrite + + "' bytes exceeds size in header of '" + + currSize + "' bytes for entry '" + + currName + "'"); + + // + // We have to deal with assembly!!! + // The programmer can be writing little 32 byte chunks for all + // we know, and we must assemble complete records for writing. + // REVIEW Maybe this should be in TarBuffer? Could that help to + // eliminate some of the buffer copying. + // + } + + if (assemLen > 0) { + if (assemLen + numToWrite >= recordBuf.length) { + int aLen = recordBuf.length - assemLen; + + System.arraycopy(assemBuf, 0, recordBuf, 0, + assemLen); + System.arraycopy(wBuf, wOffset, recordBuf, + assemLen, aLen); + writeRecord(recordBuf); + + currBytes += recordBuf.length; + wOffset += aLen; + numToWrite -= aLen; + assemLen = 0; + } else { + System.arraycopy(wBuf, wOffset, assemBuf, assemLen, + numToWrite); + + wOffset += numToWrite; + assemLen += numToWrite; + numToWrite = 0; + } + } + + // + // When we get here we have EITHER: + // o An empty "assemble" buffer. + // o No bytes to write (numToWrite == 0) + // + while (numToWrite > 0) { + if (numToWrite < recordBuf.length) { + System.arraycopy(wBuf, wOffset, assemBuf, assemLen, + numToWrite); + + assemLen += numToWrite; + + break; + } + + writeRecord(wBuf, wOffset); + + int num = recordBuf.length; + + currBytes += num; + numToWrite -= num; + wOffset += num; + } + } + + /** + * Writes a PAX extended header with the given map as contents. + * @since 1.4 + */ + void writePaxHeaders(String entryName, + Map headers) throws IOException { + String name = "./PaxHeaders.X/" + stripTo7Bits(entryName); + if (name.length() >= TarConstants.NAMELEN) { + name = name.substring(0, TarConstants.NAMELEN - 1); + } + while (name.endsWith("/")) { + // TarEntry's constructor would think this is a directory + // and not allow any data to be written + name = name.substring(0, name.length() - 1); + } + TarArchiveEntry pex = new TarArchiveEntry(name, + TarConstants.LF_PAX_EXTENDED_HEADER_LC); + + StringWriter w = new StringWriter(); + for (Map.Entry h : headers.entrySet()) { + String key = h.getKey(); + String value = h.getValue(); + int len = key.length() + value.length() + + 3 /* blank, equals and newline */ + + 2 /* guess 9 < actual length < 100 */; + String line = len + " " + key + "=" + value + "\n"; + int actualLength = line.getBytes(CharsetNames.UTF_8).length; + while (len != actualLength) { + // Adjust for cases where length < 10 or > 100 + // or where UTF-8 encoding isn't a single octet + // per character. + // Must be in loop as size may go from 99 to 100 in + // first pass so we'd need a second. + len = actualLength; + line = len + " " + key + "=" + value + "\n"; + actualLength = line.getBytes(CharsetNames.UTF_8).length; + } + w.write(line); + } + byte[] data = w.toString().getBytes(CharsetNames.UTF_8); + pex.setSize(data.length); + putArchiveEntry(pex); + write(data); + closeArchiveEntry(); + } + + private String stripTo7Bits(String name) { + final int length = name.length(); + StringBuilder result = new StringBuilder(length); + for (int i = 0; i < length; i++) { + char stripped = (char) (name.charAt(i) & 0x7F); + if (stripped != 0) { // would be read as Trailing null + result.append(stripped); + } + } + return result.toString(); + } + + /** + * Write an EOF (end of archive) record to the tar archive. + * An EOF record consists of a record of all zeros. + */ + private void writeEOFRecord() throws IOException { + Arrays.fill(recordBuf, (byte) 0); + writeRecord(recordBuf); + } + + @Override + public void flush() throws IOException { + out.flush(); + } + + @Override + public ArchiveEntry createArchiveEntry(File inputFile, String entryName) + throws IOException { + if(finished) { + throw new IOException("Stream has already been finished"); + } + return new TarArchiveEntry(inputFile, entryName); + } + + /** + * Write an archive record to the archive. + * + * @param record The record data to write to the archive. + * @throws IOException on error + */ + private void writeRecord(byte[] record) throws IOException { + if (record.length != recordSize) { + throw new IOException("record to write has length '" + + record.length + + "' which is not the record size of '" + + recordSize + "'"); + } + + out.write(record); + recordsWritten++; + } + + /** + * Write an archive record to the archive, where the record may be + * inside of a larger array buffer. The buffer must be "offset plus + * record size" long. + * + * @param buf The buffer containing the record data to write. + * @param offset The offset of the record data within buf. + * @throws IOException on error + */ + private void writeRecord(byte[] buf, int offset) throws IOException { + + if (offset + recordSize > buf.length) { + throw new IOException("record has length '" + buf.length + + "' with offset '" + offset + + "' which is less than the record size of '" + + recordSize + "'"); + } + + out.write(buf, offset, recordSize); + recordsWritten++; + } + + private void padAsNeeded() throws IOException { + int start = recordsWritten % recordsPerBlock; + if (start != 0) { + for (int i = start; i < recordsPerBlock; i++) { + writeEOFRecord(); + } + } + } + + private void addPaxHeadersForBigNumbers(Map paxHeaders, + TarArchiveEntry entry) { + addPaxHeaderForBigNumber(paxHeaders, "size", entry.getSize(), + TarConstants.MAXSIZE); + addPaxHeaderForBigNumber(paxHeaders, "gid", entry.getGroupId(), + TarConstants.MAXID); + addPaxHeaderForBigNumber(paxHeaders, "mtime", + entry.getModTime().getTime() / 1000, + TarConstants.MAXSIZE); + addPaxHeaderForBigNumber(paxHeaders, "uid", entry.getUserId(), + TarConstants.MAXID); + // star extensions by J\u00f6rg Schilling + addPaxHeaderForBigNumber(paxHeaders, "SCHILY.devmajor", + entry.getDevMajor(), TarConstants.MAXID); + addPaxHeaderForBigNumber(paxHeaders, "SCHILY.devminor", + entry.getDevMinor(), TarConstants.MAXID); + // there is no PAX header for file mode + failForBigNumber("mode", entry.getMode(), TarConstants.MAXID); + } + + private void addPaxHeaderForBigNumber(Map paxHeaders, + String header, long value, + long maxValue) { + if (value < 0 || value > maxValue) { + paxHeaders.put(header, String.valueOf(value)); + } + } + + private void failForBigNumbers(TarArchiveEntry entry) { + failForBigNumber("entry size", entry.getSize(), TarConstants.MAXSIZE); + failForBigNumber("group id", entry.getGroupId(), TarConstants.MAXID); + failForBigNumber("last modification time", + entry.getModTime().getTime() / 1000, + TarConstants.MAXSIZE); + failForBigNumber("user id", entry.getUserId(), TarConstants.MAXID); + failForBigNumber("mode", entry.getMode(), TarConstants.MAXID); + failForBigNumber("major device number", entry.getDevMajor(), + TarConstants.MAXID); + failForBigNumber("minor device number", entry.getDevMinor(), + TarConstants.MAXID); + } + + private void failForBigNumber(String field, long value, long maxValue) { + if (value < 0 || value > maxValue) { + throw new RuntimeException(field + " '" + value + + "' is too big ( > " + + maxValue + " )"); + } + } + + /** + * Handles long file or link names according to the longFileMode setting. + * + *

I.e. if the given name is too long to be written to a plain + * tar header then + *

    + *
  • it creates a pax header who's name is given by the + * paxHeaderName parameter if longFileMode is POSIX
  • + *
  • it creates a GNU longlink entry who's type is given by + * the linkType parameter if longFileMode is GNU
  • + *
  • it throws an exception if longFileMode is ERROR
  • + *
  • it truncates the name if longFileMode is TRUNCATE
  • + *

+ * + * @param name the name to write + * @param paxHeaders current map of pax headers + * @param paxHeaderName name of the pax header to write + * @param linkType type of the GNU entry to write + * @param fieldName the name of the field + * @return whether a pax header has been written. + */ + private boolean handleLongName(String name, + Map paxHeaders, + String paxHeaderName, byte linkType, String fieldName) + throws IOException { + final ByteBuffer encodedName = encoding.encode(name); + final int len = encodedName.limit() - encodedName.position(); + if (len >= TarConstants.NAMELEN) { + + if (longFileMode == LONGFILE_POSIX) { + paxHeaders.put(paxHeaderName, name); + return true; + } else if (longFileMode == LONGFILE_GNU) { + // create a TarEntry for the LongLink, the contents + // of which are the link's name + TarArchiveEntry longLinkEntry = new TarArchiveEntry(TarConstants.GNU_LONGLINK, linkType); + + longLinkEntry.setSize(len + 1); // +1 for NUL + putArchiveEntry(longLinkEntry); + write(encodedName.array(), encodedName.arrayOffset(), len); + write(0); // NUL terminator + closeArchiveEntry(); + } else if (longFileMode != LONGFILE_TRUNCATE) { + throw new RuntimeException(fieldName + " '" + name + + "' is too long ( > " + + TarConstants.NAMELEN + " bytes)"); + } + } + return false; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveSparseEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveSparseEntry.java new file mode 100644 index 000000000..79b36ac8f --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarArchiveSparseEntry.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.tar; + +import java.io.IOException; + +/** + * This class represents a sparse entry in a Tar archive. + * + *

+ * The C structure for a sparse entry is: + *

+ * struct posix_header {
+ * struct sparse sp[21]; // TarConstants.SPARSELEN_GNU_SPARSE     - offset 0
+ * char isextended;      // TarConstants.ISEXTENDEDLEN_GNU_SPARSE - offset 504
+ * };
+ * 
+ * Whereas, "struct sparse" is: + *
+ * struct sparse {
+ * char offset[12];   // offset 0
+ * char numbytes[12]; // offset 12
+ * };
+ * 
+ */ + +public class TarArchiveSparseEntry implements TarConstants { + /** If an extension sparse header follows. */ + private final boolean isExtended; + + /** + * Construct an entry from an archive's header bytes. File is set + * to null. + * + * @param headerBuf The header bytes from a tar archive entry. + * @throws IOException on unknown format + */ + public TarArchiveSparseEntry(byte[] headerBuf) throws IOException { + int offset = 0; + offset += SPARSELEN_GNU_SPARSE; + isExtended = TarUtils.parseBoolean(headerBuf, offset); + } + + public boolean isExtended() { + return isExtended; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarConstants.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarConstants.java new file mode 100644 index 000000000..000d12654 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarConstants.java @@ -0,0 +1,314 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.tar; + +/** + * This interface contains all the definitions used in the package. + * + * For tar formats (FORMAT_OLDGNU, FORMAT_POSIX, etc.) see GNU tar + * tar.h type enum archive_format + */ +// CheckStyle:InterfaceIsTypeCheck OFF (bc) +public interface TarConstants { + + /** Default record size */ + int DEFAULT_RCDSIZE = 512; + + /** Default block size */ + int DEFAULT_BLKSIZE = DEFAULT_RCDSIZE * 20; + + /** + * GNU format as per before tar 1.12. + */ + int FORMAT_OLDGNU = 2; + + /** + * Pure Posix format. + */ + int FORMAT_POSIX = 3; + + /** + * The length of the name field in a header buffer. + */ + int NAMELEN = 100; + + /** + * The length of the mode field in a header buffer. + */ + int MODELEN = 8; + + /** + * The length of the user id field in a header buffer. + */ + int UIDLEN = 8; + + /** + * The length of the group id field in a header buffer. + */ + int GIDLEN = 8; + + /** + * The maximum value of gid/uid in a tar archive which can + * be expressed in octal char notation (that's 7 sevens, octal). + */ + long MAXID = 07777777L; + + /** + * The length of the checksum field in a header buffer. + */ + int CHKSUMLEN = 8; + + /** + * Offset of the checksum field within header record. + * @since 1.5 + */ + int CHKSUM_OFFSET = 148; + + /** + * The length of the size field in a header buffer. + * Includes the trailing space or NUL. + */ + int SIZELEN = 12; + + /** + * The maximum size of a file in a tar archive + * which can be expressed in octal char notation (that's 11 sevens, octal). + */ + long MAXSIZE = 077777777777L; + + /** Offset of start of magic field within header record */ + int MAGIC_OFFSET = 257; + /** + * The length of the magic field in a header buffer. + */ + int MAGICLEN = 6; + + /** Offset of start of magic field within header record */ + int VERSION_OFFSET = 263; + /** + * Previously this was regarded as part of "magic" field, but it is separate. + */ + int VERSIONLEN = 2; + + /** + * The length of the modification time field in a header buffer. + */ + int MODTIMELEN = 12; + + /** + * The length of the user name field in a header buffer. + */ + int UNAMELEN = 32; + + /** + * The length of the group name field in a header buffer. + */ + int GNAMELEN = 32; + + /** + * The length of each of the device fields (major and minor) in a header buffer. + */ + int DEVLEN = 8; + + /** + * Length of the prefix field. + * + */ + int PREFIXLEN = 155; + + /** + * The length of the access time field in an old GNU header buffer. + * + */ + int ATIMELEN_GNU = 12; + + /** + * The length of the created time field in an old GNU header buffer. + * + */ + int CTIMELEN_GNU = 12; + + /** + * The length of the multivolume start offset field in an old GNU header buffer. + * + */ + int OFFSETLEN_GNU = 12; + + /** + * The length of the long names field in an old GNU header buffer. + * + */ + int LONGNAMESLEN_GNU = 4; + + /** + * The length of the padding field in an old GNU header buffer. + * + */ + int PAD2LEN_GNU = 1; + + /** + * The sum of the length of all sparse headers in an old GNU header buffer. + * + */ + int SPARSELEN_GNU = 96; + + /** + * The length of the is extension field in an old GNU header buffer. + * + */ + int ISEXTENDEDLEN_GNU = 1; + + /** + * The length of the real size field in an old GNU header buffer. + * + */ + int REALSIZELEN_GNU = 12; + + /** + * The sum of the length of all sparse headers in a sparse header buffer. + * + */ + int SPARSELEN_GNU_SPARSE = 504; + + /** + * The length of the is extension field in a sparse header buffer. + * + */ + int ISEXTENDEDLEN_GNU_SPARSE = 1; + + /** + * LF_ constants represent the "link flag" of an entry, or more commonly, + * the "entry type". This is the "old way" of indicating a normal file. + */ + byte LF_OLDNORM = 0; + + /** + * Normal file type. + */ + byte LF_NORMAL = (byte) '0'; + + /** + * Link file type. + */ + byte LF_LINK = (byte) '1'; + + /** + * Symbolic link file type. + */ + byte LF_SYMLINK = (byte) '2'; + + /** + * Character device file type. + */ + byte LF_CHR = (byte) '3'; + + /** + * Block device file type. + */ + byte LF_BLK = (byte) '4'; + + /** + * Directory file type. + */ + byte LF_DIR = (byte) '5'; + + /** + * FIFO (pipe) file type. + */ + byte LF_FIFO = (byte) '6'; + + /** + * Contiguous file type. + */ + byte LF_CONTIG = (byte) '7'; + + /** + * Identifies the *next* file on the tape as having a long linkname. + */ + byte LF_GNUTYPE_LONGLINK = (byte) 'K'; + + /** + * Identifies the *next* file on the tape as having a long name. + */ + byte LF_GNUTYPE_LONGNAME = (byte) 'L'; + + /** + * Sparse file type. + * @since 1.1.1 + */ + byte LF_GNUTYPE_SPARSE = (byte) 'S'; + + // See "http://www.opengroup.org/onlinepubs/009695399/utilities/pax.html#tag_04_100_13_02" + + /** + * Identifies the entry as a Pax extended header. + * @since 1.1 + */ + byte LF_PAX_EXTENDED_HEADER_LC = (byte) 'x'; + + /** + * Identifies the entry as a Pax extended header (SunOS tar -E). + * + * @since 1.1 + */ + byte LF_PAX_EXTENDED_HEADER_UC = (byte) 'X'; + + /** + * Identifies the entry as a Pax global extended header. + * + * @since 1.1 + */ + byte LF_PAX_GLOBAL_EXTENDED_HEADER = (byte) 'g'; + + /** + * The magic tag representing a POSIX tar archive. + */ + String MAGIC_POSIX = "ustar\0"; + String VERSION_POSIX = "00"; + + /** + * The magic tag representing a GNU tar archive. + */ + String MAGIC_GNU = "ustar "; + // Appear to be two possible GNU versions + String VERSION_GNU_SPACE = " \0"; + String VERSION_GNU_ZERO = "0\0"; + + /** + * The magic tag representing an Ant tar archive. + * + * @since 1.1 + */ + String MAGIC_ANT = "ustar\0"; + + /** + * The "version" representing an Ant tar archive. + * + * @since 1.1 + */ + // Does not appear to have a version, however Ant does write 8 bytes, + // so assume the version is 2 nulls + String VERSION_ANT = "\0\0"; + + /** + * The name of the GNU tar entry which contains a long name. + */ + String GNU_LONGLINK = "././@LongLink"; // TODO rename as LONGLINK_GNU ? + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarUtils.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarUtils.java new file mode 100644 index 000000000..1579dcbb8 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/TarUtils.java @@ -0,0 +1,632 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.tar; + +import static org.apache.commons.compress.archivers.tar.TarConstants.CHKSUMLEN; +import static org.apache.commons.compress.archivers.tar.TarConstants.CHKSUM_OFFSET; + +import java.io.IOException; +import java.math.BigInteger; +import java.nio.ByteBuffer; +import org.apache.commons.compress.archivers.zip.ZipEncoding; +import org.apache.commons.compress.archivers.zip.ZipEncodingHelper; + +/** + * This class provides static utility methods to work with byte streams. + * + * @Immutable + */ +// CheckStyle:HideUtilityClassConstructorCheck OFF (bc) +public class TarUtils { + + private static final int BYTE_MASK = 255; + + static final ZipEncoding DEFAULT_ENCODING = + ZipEncodingHelper.getZipEncoding(null); + + /** + * Encapsulates the algorithms used up to Commons Compress 1.3 as + * ZipEncoding. + */ + static final ZipEncoding FALLBACK_ENCODING = new ZipEncoding() { + public boolean canEncode(String name) { return true; } + + public ByteBuffer encode(String name) { + final int length = name.length(); + byte[] buf = new byte[length]; + + // copy until end of input or output is reached. + for (int i = 0; i < length; ++i) { + buf[i] = (byte) name.charAt(i); + } + return ByteBuffer.wrap(buf); + } + + public String decode(byte[] buffer) { + final int length = buffer.length; + StringBuilder result = new StringBuilder(length); + + for (int i = 0; i < length; ++i) { + byte b = buffer[i]; + if (b == 0) { // Trailing null + break; + } + result.append((char) (b & 0xFF)); // Allow for sign-extension + } + + return result.toString(); + } + }; + + /** Private constructor to prevent instantiation of this utility class. */ + private TarUtils(){ + } + + /** + * Parse an octal string from a buffer. + * + *

Leading spaces are ignored. + * The buffer must contain a trailing space or NUL, + * and may contain an additional trailing space or NUL.

+ * + *

The input buffer is allowed to contain all NULs, + * in which case the method returns 0L + * (this allows for missing fields).

+ * + *

To work-around some tar implementations that insert a + * leading NUL this method returns 0 if it detects a leading NUL + * since Commons Compress 1.4.

+ * + * @param buffer The buffer from which to parse. + * @param offset The offset into the buffer from which to parse. + * @param length The maximum number of bytes to parse - must be at least 2 bytes. + * @return The long value of the octal string. + * @throws IllegalArgumentException if the trailing space/NUL is missing or if a invalid byte is detected. + */ + public static long parseOctal(final byte[] buffer, final int offset, final int length) { + long result = 0; + int end = offset + length; + int start = offset; + + if (length < 2){ + throw new IllegalArgumentException("Length "+length+" must be at least 2"); + } + + if (buffer[start] == 0) { + return 0L; + } + + // Skip leading spaces + while (start < end){ + if (buffer[start] == ' '){ + start++; + } else { + break; + } + } + + // Must have trailing NUL or space + byte trailer; + trailer = buffer[end-1]; + if (trailer == 0 || trailer == ' '){ + end--; + } else { + throw new IllegalArgumentException( + exceptionMessage(buffer, offset, length, end-1, trailer)); + } + // May have additional NULs or spaces + trailer = buffer[end - 1]; + while (start < end - 1 && (trailer == 0 || trailer == ' ')) { + end--; + trailer = buffer[end - 1]; + } + + for ( ;start < end; start++) { + final byte currentByte = buffer[start]; + // CheckStyle:MagicNumber OFF + if (currentByte < '0' || currentByte > '7'){ + throw new IllegalArgumentException( + exceptionMessage(buffer, offset, length, start, currentByte)); + } + result = (result << 3) + (currentByte - '0'); // convert from ASCII + // CheckStyle:MagicNumber ON + } + + return result; + } + + /** + * Compute the value contained in a byte buffer. If the most + * significant bit of the first byte in the buffer is set, this + * bit is ignored and the rest of the buffer is interpreted as a + * binary number. Otherwise, the buffer is interpreted as an + * octal number as per the parseOctal function above. + * + * @param buffer The buffer from which to parse. + * @param offset The offset into the buffer from which to parse. + * @param length The maximum number of bytes to parse. + * @return The long value of the octal or binary string. + * @throws IllegalArgumentException if the trailing space/NUL is + * missing or an invalid byte is detected in an octal number, or + * if a binary number would exceed the size of a signed long + * 64-bit integer. + * @since 1.4 + */ + public static long parseOctalOrBinary(final byte[] buffer, final int offset, + final int length) { + + if ((buffer[offset] & 0x80) == 0) { + return parseOctal(buffer, offset, length); + } + final boolean negative = buffer[offset] == (byte) 0xff; + if (length < 9) { + return parseBinaryLong(buffer, offset, length, negative); + } + return parseBinaryBigInteger(buffer, offset, length, negative); + } + + private static long parseBinaryLong(final byte[] buffer, final int offset, + final int length, + final boolean negative) { + if (length >= 9) { + throw new IllegalArgumentException("At offset " + offset + ", " + + length + " byte binary number" + + " exceeds maximum signed long" + + " value"); + } + long val = 0; + for (int i = 1; i < length; i++) { + val = (val << 8) + (buffer[offset + i] & 0xff); + } + if (negative) { + // 2's complement + val--; + val ^= (long) Math.pow(2, (length - 1) * 8) - 1; + } + return negative ? -val : val; + } + + private static long parseBinaryBigInteger(final byte[] buffer, + final int offset, + final int length, + final boolean negative) { + byte[] remainder = new byte[length - 1]; + System.arraycopy(buffer, offset + 1, remainder, 0, length - 1); + BigInteger val = new BigInteger(remainder); + if (negative) { + // 2's complement + val = val.add(BigInteger.valueOf(-1)).not(); + } + if (val.bitLength() > 63) { + throw new IllegalArgumentException("At offset " + offset + ", " + + length + " byte binary number" + + " exceeds maximum signed long" + + " value"); + } + return negative ? -val.longValue() : val.longValue(); + } + + /** + * Parse a boolean byte from a buffer. + * Leading spaces and NUL are ignored. + * The buffer may contain trailing spaces or NULs. + * + * @param buffer The buffer from which to parse. + * @param offset The offset into the buffer from which to parse. + * @return The boolean value of the bytes. + * @throws IllegalArgumentException if an invalid byte is detected. + */ + public static boolean parseBoolean(final byte[] buffer, final int offset) { + return buffer[offset] == 1; + } + + // Helper method to generate the exception message + private static String exceptionMessage(byte[] buffer, final int offset, + final int length, int current, final byte currentByte) { + // default charset is good enough for an exception message, + // + // the alternative was to modify parseOctal and + // parseOctalOrBinary to receive the ZipEncoding of the + // archive (deprecating the existing public methods, of + // course) and dealing with the fact that ZipEncoding#decode + // can throw an IOException which parseOctal* doesn't declare + String string = new String(buffer, offset, length); + + string=string.replaceAll("\0", "{NUL}"); // Replace NULs to allow string to be printed + final String s = "Invalid byte "+currentByte+" at offset "+(current-offset)+" in '"+string+"' len="+length; + return s; + } + + /** + * Parse an entry name from a buffer. + * Parsing stops when a NUL is found + * or the buffer length is reached. + * + * @param buffer The buffer from which to parse. + * @param offset The offset into the buffer from which to parse. + * @param length The maximum number of bytes to parse. + * @return The entry name. + */ + public static String parseName(byte[] buffer, final int offset, final int length) { + try { + return parseName(buffer, offset, length, DEFAULT_ENCODING); + } catch (IOException ex) { + try { + return parseName(buffer, offset, length, FALLBACK_ENCODING); + } catch (IOException ex2) { + // impossible + throw new RuntimeException(ex2); + } + } + } + + /** + * Parse an entry name from a buffer. + * Parsing stops when a NUL is found + * or the buffer length is reached. + * + * @param buffer The buffer from which to parse. + * @param offset The offset into the buffer from which to parse. + * @param length The maximum number of bytes to parse. + * @param encoding name of the encoding to use for file names + * @since 1.4 + * @return The entry name. + */ + public static String parseName(byte[] buffer, final int offset, + final int length, + final ZipEncoding encoding) + throws IOException { + + int len = length; + for (; len > 0; len--) { + if (buffer[offset + len - 1] != 0) { + break; + } + } + if (len > 0) { + byte[] b = new byte[len]; + System.arraycopy(buffer, offset, b, 0, len); + return encoding.decode(b); + } + return ""; + } + + /** + * Copy a name into a buffer. + * Copies characters from the name into the buffer + * starting at the specified offset. + * If the buffer is longer than the name, the buffer + * is filled with trailing NULs. + * If the name is longer than the buffer, + * the output is truncated. + * + * @param name The header name from which to copy the characters. + * @param buf The buffer where the name is to be stored. + * @param offset The starting offset into the buffer + * @param length The maximum number of header bytes to copy. + * @return The updated offset, i.e. offset + length + */ + public static int formatNameBytes(String name, byte[] buf, final int offset, final int length) { + try { + return formatNameBytes(name, buf, offset, length, DEFAULT_ENCODING); + } catch (IOException ex) { + try { + return formatNameBytes(name, buf, offset, length, + FALLBACK_ENCODING); + } catch (IOException ex2) { + // impossible + throw new RuntimeException(ex2); + } + } + } + + /** + * Copy a name into a buffer. + * Copies characters from the name into the buffer + * starting at the specified offset. + * If the buffer is longer than the name, the buffer + * is filled with trailing NULs. + * If the name is longer than the buffer, + * the output is truncated. + * + * @param name The header name from which to copy the characters. + * @param buf The buffer where the name is to be stored. + * @param offset The starting offset into the buffer + * @param length The maximum number of header bytes to copy. + * @param encoding name of the encoding to use for file names + * @since 1.4 + * @return The updated offset, i.e. offset + length + */ + public static int formatNameBytes(String name, byte[] buf, final int offset, + final int length, + final ZipEncoding encoding) + throws IOException { + int len = name.length(); + ByteBuffer b = encoding.encode(name); + while (b.limit() > length && len > 0) { + b = encoding.encode(name.substring(0, --len)); + } + final int limit = b.limit() - b.position(); + System.arraycopy(b.array(), b.arrayOffset(), buf, offset, limit); + + // Pad any remaining output bytes with NUL + for (int i = limit; i < length; ++i) { + buf[offset + i] = 0; + } + + return offset + length; + } + + /** + * Fill buffer with unsigned octal number, padded with leading zeroes. + * + * @param value number to convert to octal - treated as unsigned + * @param buffer destination buffer + * @param offset starting offset in buffer + * @param length length of buffer to fill + * @throws IllegalArgumentException if the value will not fit in the buffer + */ + public static void formatUnsignedOctalString(final long value, byte[] buffer, + final int offset, final int length) { + int remaining = length; + remaining--; + if (value == 0) { + buffer[offset + remaining--] = (byte) '0'; + } else { + long val = value; + for (; remaining >= 0 && val != 0; --remaining) { + // CheckStyle:MagicNumber OFF + buffer[offset + remaining] = (byte) ((byte) '0' + (byte) (val & 7)); + val = val >>> 3; + // CheckStyle:MagicNumber ON + } + if (val != 0){ + throw new IllegalArgumentException + (value+"="+Long.toOctalString(value)+ " will not fit in octal number buffer of length "+length); + } + } + + for (; remaining >= 0; --remaining) { // leading zeros + buffer[offset + remaining] = (byte) '0'; + } + } + + /** + * Write an octal integer into a buffer. + * + * Uses {@link #formatUnsignedOctalString} to format + * the value as an octal string with leading zeros. + * The converted number is followed by space and NUL + * + * @param value The value to write + * @param buf The buffer to receive the output + * @param offset The starting offset into the buffer + * @param length The size of the output buffer + * @return The updated offset, i.e offset+length + * @throws IllegalArgumentException if the value (and trailer) will not fit in the buffer + */ + public static int formatOctalBytes(final long value, byte[] buf, final int offset, final int length) { + + int idx=length-2; // For space and trailing null + formatUnsignedOctalString(value, buf, offset, idx); + + buf[offset + idx++] = (byte) ' '; // Trailing space + buf[offset + idx] = 0; // Trailing null + + return offset + length; + } + + /** + * Write an octal long integer into a buffer. + * + * Uses {@link #formatUnsignedOctalString} to format + * the value as an octal string with leading zeros. + * The converted number is followed by a space. + * + * @param value The value to write as octal + * @param buf The destinationbuffer. + * @param offset The starting offset into the buffer. + * @param length The length of the buffer + * @return The updated offset + * @throws IllegalArgumentException if the value (and trailer) will not fit in the buffer + */ + public static int formatLongOctalBytes(final long value, byte[] buf, final int offset, final int length) { + + int idx=length-1; // For space + + formatUnsignedOctalString(value, buf, offset, idx); + buf[offset + idx] = (byte) ' '; // Trailing space + + return offset + length; + } + + /** + * Write an long integer into a buffer as an octal string if this + * will fit, or as a binary number otherwise. + * + * Uses {@link #formatUnsignedOctalString} to format + * the value as an octal string with leading zeros. + * The converted number is followed by a space. + * + * @param value The value to write into the buffer. + * @param buf The destination buffer. + * @param offset The starting offset into the buffer. + * @param length The length of the buffer. + * @return The updated offset. + * @throws IllegalArgumentException if the value (and trailer) + * will not fit in the buffer. + * @since 1.4 + */ + public static int formatLongOctalOrBinaryBytes( + final long value, byte[] buf, final int offset, final int length) { + + // Check whether we are dealing with UID/GID or SIZE field + final long maxAsOctalChar = length == TarConstants.UIDLEN ? TarConstants.MAXID : TarConstants.MAXSIZE; + + final boolean negative = value < 0; + if (!negative && value <= maxAsOctalChar) { // OK to store as octal chars + return formatLongOctalBytes(value, buf, offset, length); + } + + if (length < 9) { + formatLongBinary(value, buf, offset, length, negative); + } + formatBigIntegerBinary(value, buf, offset, length, negative); + + buf[offset] = (byte) (negative ? 0xff : 0x80); + return offset + length; + } + + private static void formatLongBinary(final long value, byte[] buf, + final int offset, final int length, + final boolean negative) { + final int bits = (length - 1) * 8; + final long max = 1l << bits; + long val = Math.abs(value); + if (val >= max) { + throw new IllegalArgumentException("Value " + value + + " is too large for " + length + " byte field."); + } + if (negative) { + val ^= max - 1; + val |= 0xff << bits; + val++; + } + for (int i = offset + length - 1; i >= offset; i--) { + buf[i] = (byte) val; + val >>= 8; + } + } + + private static void formatBigIntegerBinary(final long value, byte[] buf, + final int offset, + final int length, + final boolean negative) { + BigInteger val = BigInteger.valueOf(value); + final byte[] b = val.toByteArray(); + final int len = b.length; + final int off = offset + length - len; + System.arraycopy(b, 0, buf, off, len); + final byte fill = (byte) (negative ? 0xff : 0); + for (int i = offset + 1; i < off; i++) { + buf[i] = fill; + } + } + + /** + * Writes an octal value into a buffer. + * + * Uses {@link #formatUnsignedOctalString} to format + * the value as an octal string with leading zeros. + * The converted number is followed by NUL and then space. + * + * @param value The value to convert + * @param buf The destination buffer + * @param offset The starting offset into the buffer. + * @param length The size of the buffer. + * @return The updated value of offset, i.e. offset+length + * @throws IllegalArgumentException if the value (and trailer) will not fit in the buffer + */ + public static int formatCheckSumOctalBytes(final long value, byte[] buf, final int offset, final int length) { + + int idx=length-2; // for NUL and space + formatUnsignedOctalString(value, buf, offset, idx); + + buf[offset + idx++] = 0; // Trailing null + buf[offset + idx] = (byte) ' '; // Trailing space + + return offset + length; + } + + /** + * Compute the checksum of a tar entry header. + * + * @param buf The tar entry's header buffer. + * @return The computed checksum. + */ + public static long computeCheckSum(final byte[] buf) { + long sum = 0; + + for (byte element : buf) { + sum += BYTE_MASK & element; + } + + return sum; + } + + /** + * Wikipedia says: + *
+ * The checksum is calculated by taking the sum of the unsigned byte values + * of the header block with the eight checksum bytes taken to be ascii + * spaces (decimal value 32). It is stored as a six digit octal number with + * leading zeroes followed by a NUL and then a space. Various + * implementations do not adhere to this format. For better compatibility, + * ignore leading and trailing whitespace, and get the first six digits. In + * addition, some historic tar implementations treated bytes as signed. + * Implementations typically calculate the checksum both ways, and treat it + * as good if either the signed or unsigned sum matches the included + * checksum. + *
+ *

+ * In addition there are + * some tar files + * that seem to have parts of their header cleared to zero (no detectable + * magic bytes, etc.) but still have a reasonable-looking checksum field + * present. It looks like we can detect such cases reasonably well by + * checking whether the stored checksum is greater than the + * computed unsigned checksum. That check is unlikely to pass on some + * random file header, as it would need to have a valid sequence of + * octal digits in just the right place. + *

+ * The return value of this method should be treated as a best-effort + * heuristic rather than an absolute and final truth. The checksum + * verification logic may well evolve over time as more special cases + * are encountered. + * + * @param header tar header + * @return whether the checksum is reasonably good + * @see COMPRESS-191 + * @since 1.5 + */ + public static boolean verifyCheckSum(byte[] header) { + long storedSum = 0; + long unsignedSum = 0; + long signedSum = 0; + + int digits = 0; + for (int i = 0; i < header.length; i++) { + byte b = header[i]; + if (CHKSUM_OFFSET <= i && i < CHKSUM_OFFSET + CHKSUMLEN) { + if ('0' <= b && b <= '7' && digits++ < 6) { + storedSum = storedSum * 8 + b - '0'; + } else if (digits > 0) { + digits = 6; // only look at the first octal digit sequence + } + b = ' '; + } + unsignedSum += 0xff & b; + signedSum += b; + } + + return storedSum == unsignedSum || storedSum == signedSum + || storedSum > unsignedSum; // COMPRESS-177 + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/package.html new file mode 100644 index 000000000..141f33b61 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/tar/package.html @@ -0,0 +1,30 @@ + + + +

Provides stream classes for reading and writing archives using + the TAR format.

+ +

There are many different format dialects that call themselves + TAR. The classes of this package can read and write archives in + the traditional pre-POSIX ustar format and support GNU + specific extensions for long filenames that GNU tar itself by + now refers to as oldgnu.

+ + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java new file mode 100644 index 000000000..a0a76f114 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/AbstractUnicodeExtraField.java @@ -0,0 +1,186 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.UnsupportedEncodingException; +import java.util.zip.CRC32; +import java.util.zip.ZipException; + +import org.apache.commons.compress.utils.CharsetNames; + +/** + * A common base class for Unicode extra information extra fields. + * @NotThreadSafe + */ +public abstract class AbstractUnicodeExtraField implements ZipExtraField { + private long nameCRC32; + private byte[] unicodeName; + private byte[] data; + + protected AbstractUnicodeExtraField() { + } + + /** + * Assemble as unicode extension from the name/comment and + * encoding of the original zip entry. + * + * @param text The file name or comment. + * @param bytes The encoded of the filename or comment in the zip + * file. + * @param off The offset of the encoded filename or comment in + * bytes. + * @param len The length of the encoded filename or commentin + * bytes. + */ + protected AbstractUnicodeExtraField(String text, byte[] bytes, int off, int len) { + CRC32 crc32 = new CRC32(); + crc32.update(bytes, off, len); + nameCRC32 = crc32.getValue(); + + try { + unicodeName = text.getBytes(CharsetNames.UTF_8); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException("FATAL: UTF-8 encoding not supported.", e); + } + } + + /** + * Assemble as unicode extension from the name/comment and + * encoding of the original zip entry. + * + * @param text The file name or comment. + * @param bytes The encoded of the filename or comment in the zip + * file. + */ + protected AbstractUnicodeExtraField(String text, byte[] bytes) { + this(text, bytes, 0, bytes.length); + } + + private void assembleData() { + if (unicodeName == null) { + return; + } + + data = new byte[5 + unicodeName.length]; + // version 1 + data[0] = 0x01; + System.arraycopy(ZipLong.getBytes(nameCRC32), 0, data, 1, 4); + System.arraycopy(unicodeName, 0, data, 5, unicodeName.length); + } + + /** + * @return The CRC32 checksum of the filename or comment as + * encoded in the central directory of the zip file. + */ + public long getNameCRC32() { + return nameCRC32; + } + + /** + * @param nameCRC32 The CRC32 checksum of the filename as encoded + * in the central directory of the zip file to set. + */ + public void setNameCRC32(long nameCRC32) { + this.nameCRC32 = nameCRC32; + data = null; + } + + /** + * @return The UTF-8 encoded name. + */ + public byte[] getUnicodeName() { + byte[] b = null; + if (unicodeName != null) { + b = new byte[unicodeName.length]; + System.arraycopy(unicodeName, 0, b, 0, b.length); + } + return b; + } + + /** + * @param unicodeName The UTF-8 encoded name to set. + */ + public void setUnicodeName(byte[] unicodeName) { + if (unicodeName != null) { + this.unicodeName = new byte[unicodeName.length]; + System.arraycopy(unicodeName, 0, this.unicodeName, 0, + unicodeName.length); + } else { + this.unicodeName = null; + } + data = null; + } + + public byte[] getCentralDirectoryData() { + if (data == null) { + this.assembleData(); + } + byte[] b = null; + if (data != null) { + b = new byte[data.length]; + System.arraycopy(data, 0, b, 0, b.length); + } + return b; + } + + public ZipShort getCentralDirectoryLength() { + if (data == null) { + assembleData(); + } + return new ZipShort(data.length); + } + + public byte[] getLocalFileDataData() { + return getCentralDirectoryData(); + } + + public ZipShort getLocalFileDataLength() { + return getCentralDirectoryLength(); + } + + public void parseFromLocalFileData(byte[] buffer, int offset, int length) + throws ZipException { + + if (length < 5) { + throw new ZipException("UniCode path extra data must have at least 5 bytes."); + } + + int version = buffer[offset]; + + if (version != 0x01) { + throw new ZipException("Unsupported version [" + version + + "] for UniCode path extra data."); + } + + nameCRC32 = ZipLong.getValue(buffer, offset + 1); + unicodeName = new byte[length - 5]; + System.arraycopy(buffer, offset + 5, unicodeName, 0, length - 5); + data = null; + } + + /** + * Doesn't do anything special since this class always uses the + * same data in central directory and local file data. + */ + public void parseFromCentralDirectoryData(byte[] buffer, int offset, + int length) + throws ZipException { + parseFromLocalFileData(buffer, offset, length); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/AsiExtraField.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/AsiExtraField.java new file mode 100644 index 000000000..a2dc1c3b3 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/AsiExtraField.java @@ -0,0 +1,330 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.util.zip.CRC32; +import java.util.zip.ZipException; + +/** + * Adds Unix file permission and UID/GID fields as well as symbolic + * link handling. + * + *

This class uses the ASi extra field in the format:

+ *
+ *         Value         Size            Description
+ *         -----         ----            -----------
+ * (Unix3) 0x756e        Short           tag for this extra block type
+ *         TSize         Short           total data size for this block
+ *         CRC           Long            CRC-32 of the remaining data
+ *         Mode          Short           file permissions
+ *         SizDev        Long            symlink'd size OR major/minor dev num
+ *         UID           Short           user ID
+ *         GID           Short           group ID
+ *         (var.)        variable        symbolic link filename
+ * 
+ *

taken from appnote.iz (Info-ZIP note, 981119) found at ftp://ftp.uu.net/pub/archiving/zip/doc/

+ * + *

Short is two bytes and Long is four bytes in big endian byte and + * word order, device numbers are currently not supported.

+ * @NotThreadSafe + * + *

Since the documentation this class is based upon doesn't mention + * the character encoding of the file name at all, it is assumed that + * it uses the current platform's default encoding.

+ */ +public class AsiExtraField implements ZipExtraField, UnixStat, Cloneable { + + private static final ZipShort HEADER_ID = new ZipShort(0x756E); + private static final int WORD = 4; + /** + * Standard Unix stat(2) file mode. + */ + private int mode = 0; + /** + * User ID. + */ + private int uid = 0; + /** + * Group ID. + */ + private int gid = 0; + /** + * File this entry points to, if it is a symbolic link. + * + *

empty string - if entry is not a symbolic link.

+ */ + private String link = ""; + /** + * Is this an entry for a directory? + */ + private boolean dirFlag = false; + + /** + * Instance used to calculate checksums. + */ + private CRC32 crc = new CRC32(); + + /** Constructor for AsiExtraField. */ + public AsiExtraField() { + } + + /** + * The Header-ID. + * @return the value for the header id for this extrafield + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Length of the extra field in the local file data - without + * Header-ID or length specifier. + * @return a ZipShort for the length of the data of this extra field + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(WORD // CRC + + 2 // Mode + + WORD // SizDev + + 2 // UID + + 2 // GID + + getLinkedFile().getBytes().length); + // Uses default charset - see class Javadoc + } + + /** + * Delegate to local file data. + * @return the centralDirectory length + */ + public ZipShort getCentralDirectoryLength() { + return getLocalFileDataLength(); + } + + /** + * The actual data to put into local file data - without Header-ID + * or length specifier. + * @return get the data + */ + public byte[] getLocalFileDataData() { + // CRC will be added later + byte[] data = new byte[getLocalFileDataLength().getValue() - WORD]; + System.arraycopy(ZipShort.getBytes(getMode()), 0, data, 0, 2); + + byte[] linkArray = getLinkedFile().getBytes(); // Uses default charset - see class Javadoc + // CheckStyle:MagicNumber OFF + System.arraycopy(ZipLong.getBytes(linkArray.length), + 0, data, 2, WORD); + + System.arraycopy(ZipShort.getBytes(getUserId()), + 0, data, 6, 2); + System.arraycopy(ZipShort.getBytes(getGroupId()), + 0, data, 8, 2); + + System.arraycopy(linkArray, 0, data, 10, linkArray.length); + // CheckStyle:MagicNumber ON + + crc.reset(); + crc.update(data); + long checksum = crc.getValue(); + + byte[] result = new byte[data.length + WORD]; + System.arraycopy(ZipLong.getBytes(checksum), 0, result, 0, WORD); + System.arraycopy(data, 0, result, WORD, data.length); + return result; + } + + /** + * Delegate to local file data. + * @return the local file data + */ + public byte[] getCentralDirectoryData() { + return getLocalFileDataData(); + } + + /** + * Set the user id. + * @param uid the user id + */ + public void setUserId(int uid) { + this.uid = uid; + } + + /** + * Get the user id. + * @return the user id + */ + public int getUserId() { + return uid; + } + + /** + * Set the group id. + * @param gid the group id + */ + public void setGroupId(int gid) { + this.gid = gid; + } + + /** + * Get the group id. + * @return the group id + */ + public int getGroupId() { + return gid; + } + + /** + * Indicate that this entry is a symbolic link to the given filename. + * + * @param name Name of the file this entry links to, empty String + * if it is not a symbolic link. + */ + public void setLinkedFile(String name) { + link = name; + mode = getMode(mode); + } + + /** + * Name of linked file + * + * @return name of the file this entry links to if it is a + * symbolic link, the empty string otherwise. + */ + public String getLinkedFile() { + return link; + } + + /** + * Is this entry a symbolic link? + * @return true if this is a symbolic link + */ + public boolean isLink() { + return getLinkedFile().length() != 0; + } + + /** + * File mode of this file. + * @param mode the file mode + */ + public void setMode(int mode) { + this.mode = getMode(mode); + } + + /** + * File mode of this file. + * @return the file mode + */ + public int getMode() { + return mode; + } + + /** + * Indicate whether this entry is a directory. + * @param dirFlag if true, this entry is a directory + */ + public void setDirectory(boolean dirFlag) { + this.dirFlag = dirFlag; + mode = getMode(mode); + } + + /** + * Is this entry a directory? + * @return true if this entry is a directory + */ + public boolean isDirectory() { + return dirFlag && !isLink(); + } + + /** + * Populate data from this array as if it was in local file data. + * @param data an array of bytes + * @param offset the start offset + * @param length the number of bytes in the array from offset + * @throws ZipException on error + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) + throws ZipException { + + long givenChecksum = ZipLong.getValue(data, offset); + byte[] tmp = new byte[length - WORD]; + System.arraycopy(data, offset + WORD, tmp, 0, length - WORD); + crc.reset(); + crc.update(tmp); + long realChecksum = crc.getValue(); + if (givenChecksum != realChecksum) { + throw new ZipException("bad CRC checksum " + + Long.toHexString(givenChecksum) + + " instead of " + + Long.toHexString(realChecksum)); + } + + int newMode = ZipShort.getValue(tmp, 0); + // CheckStyle:MagicNumber OFF + byte[] linkArray = new byte[(int) ZipLong.getValue(tmp, 2)]; + uid = ZipShort.getValue(tmp, 6); + gid = ZipShort.getValue(tmp, 8); + + if (linkArray.length == 0) { + link = ""; + } else { + System.arraycopy(tmp, 10, linkArray, 0, linkArray.length); + link = new String(linkArray); // Uses default charset - see class Javadoc + } + // CheckStyle:MagicNumber ON + setDirectory((newMode & DIR_FLAG) != 0); + setMode(newMode); + } + + /** + * Doesn't do anything special since this class always uses the + * same data in central directory and local file data. + */ + public void parseFromCentralDirectoryData(byte[] buffer, int offset, + int length) + throws ZipException { + parseFromLocalFileData(buffer, offset, length); + } + + /** + * Get the file mode for given permissions with the correct file type. + * @param mode the mode + * @return the type with the mode + */ + protected int getMode(int mode) { + int type = FILE_FLAG; + if (isLink()) { + type = LINK_FLAG; + } else if (isDirectory()) { + type = DIR_FLAG; + } + return type | (mode & PERM_MASK); + } + + @Override + public Object clone() { + try { + AsiExtraField cloned = (AsiExtraField) super.clone(); + cloned.crc = new CRC32(); + return cloned; + } catch (CloneNotSupportedException cnfe) { + // impossible + throw new RuntimeException(cnfe); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/BinaryTree.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/BinaryTree.java new file mode 100644 index 000000000..a4ac4b5b7 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/BinaryTree.java @@ -0,0 +1,189 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.DataInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Arrays; + +/** + * Binary tree of positive values. + * + * @author Emmanuel Bourg + * @since 1.7 + */ +class BinaryTree { + + /** Value in the array indicating an undefined node */ + private static final int UNDEFINED = -1; + + /** Value in the array indicating a non leaf node */ + private static final int NODE = -2; + + /** + * The array representing the binary tree. The root is at index 0, + * the left children are at 2*i+1 and the right children at 2*i+2. + */ + private final int[] tree; + + public BinaryTree(int depth) { + tree = new int[(1 << (depth + 1)) - 1]; + Arrays.fill(tree, UNDEFINED); + } + + /** + * Adds a leaf to the tree. + * + * @param node the index of the node where the path is appended + * @param path the path to the leaf (bits are parsed from the right to the left) + * @param depth the number of nodes in the path + * @param value the value of the leaf (must be positive) + */ + public void addLeaf(int node, int path, int depth, int value) { + if (depth == 0) { + // end of the path reached, add the value to the current node + if (tree[node] == UNDEFINED) { + tree[node] = value; + } else { + throw new IllegalArgumentException("Tree value at index " + node + " has already been assigned (" + tree[node] + ")"); + } + } else { + // mark the current node as a non leaf node + tree[node] = NODE; + + // move down the path recursively + int nextChild = 2 * node + 1 + (path & 1); + addLeaf(nextChild, path >>> 1, depth - 1, value); + } + } + + /** + * Reads a value from the specified bit stream. + * + * @param stream + * @return the value decoded, or -1 if the end of the stream is reached + */ + public int read(BitStream stream) throws IOException { + int currentIndex = 0; + + while (true) { + int bit = stream.nextBit(); + if (bit == -1) { + return -1; + } + + int childIndex = 2 * currentIndex + 1 + bit; + int value = tree[childIndex]; + if (value == NODE) { + // consume the next bit + currentIndex = childIndex; + } else if (value != UNDEFINED) { + return value; + } else { + throw new IOException("The child " + bit + " of node at index " + currentIndex + " is not defined"); + } + } + } + + + /** + * Decodes the packed binary tree from the specified stream. + */ + static BinaryTree decode(InputStream in, final int totalNumberOfValues) throws IOException { + // the first byte contains the size of the structure minus one + int size = in.read() + 1; + if (size == 0) { + throw new IOException("Cannot read the size of the encoded tree, unexpected end of stream"); + } + + byte[] encodedTree = new byte[size]; + new DataInputStream(in).readFully(encodedTree); + + /** The maximum bit length for a value (16 or lower) */ + int maxLength = 0; + + int[] originalBitLengths = new int[totalNumberOfValues]; + int pos = 0; + for (byte b : encodedTree) { + // each byte encodes the number of values (upper 4 bits) for a bit length (lower 4 bits) + int numberOfValues = ((b & 0xF0) >> 4) + 1; + int bitLength = (b & 0x0F) + 1; + + for (int j = 0; j < numberOfValues; j++) { + originalBitLengths[pos++] = bitLength; + } + + maxLength = Math.max(maxLength, bitLength); + } + + // sort the array of bit lengths and memorize the permutation used to restore the order of the codes + int[] permutation = new int[originalBitLengths.length]; + for (int k = 0; k < permutation.length; k++) { + permutation[k] = k; + } + + int c = 0; + int[] sortedBitLengths = new int[originalBitLengths.length]; + for (int k = 0; k < originalBitLengths.length; k++) { + // iterate over the values + for (int l = 0; l < originalBitLengths.length; l++) { + // look for the value in the original array + if (originalBitLengths[l] == k) { + // put the value at the current position in the sorted array... + sortedBitLengths[c] = k; + + // ...and memorize the permutation + permutation[c] = l; + + c++; + } + } + } + + // decode the values of the tree + int code = 0; + int codeIncrement = 0; + int lastBitLength = 0; + + int[] codes = new int[totalNumberOfValues]; + + for (int i = totalNumberOfValues - 1; i >= 0; i--) { + code = code + codeIncrement; + if (sortedBitLengths[i] != lastBitLength) { + lastBitLength = sortedBitLengths[i]; + codeIncrement = 1 << (16 - lastBitLength); + } + codes[permutation[i]] = code; + } + + // build the tree + BinaryTree tree = new BinaryTree(maxLength); + + for (int k = 0; k < codes.length; k++) { + int bitLength = originalBitLengths[k]; + if (bitLength > 0) { + tree.addLeaf(0, Integer.reverse(codes[k] << 16), bitLength, k); + } + } + + return tree; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/BitStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/BitStream.java new file mode 100644 index 000000000..c9e6ccf74 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/BitStream.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.io.InputStream; + +/** + * Iterates over the bits of an InputStream. For each byte the bits + * are read from the right to the left. + * + * @since 1.7 + */ +class BitStream { + + private final InputStream in; + + /** The bits read from the underlying stream but not consumed by nextBits() */ + private long bitCache; + + /** The number of bits available in the bit cache */ + private int bitCacheSize; + + /** Bit masks for extracting the right most bits from a byte */ + private static final int[] MASKS = new int[]{ + 0x00, // 00000000 + 0x01, // 00000001 + 0x03, // 00000011 + 0x07, // 00000111 + 0x0F, // 00001111 + 0x1F, // 00011111 + 0x3F, // 00111111 + 0x7F, // 01111111 + 0xFF // 11111111 + }; + + BitStream(InputStream in) { + this.in = in; + } + + private boolean fillCache() throws IOException { + boolean filled = false; + + while (bitCacheSize <= 56) { + long nextByte = in.read(); + if (nextByte == -1) { + break; + } + + filled = true; + bitCache = bitCache | (nextByte << bitCacheSize); + bitCacheSize += 8; + } + + return filled; + } + + /** + * Returns the next bit. + * + * @return The next bit (0 or 1) or -1 if the end of the stream has been reached + */ + int nextBit() throws IOException { + if (bitCacheSize == 0 && !fillCache()) { + return -1; + } + + int bit = (int) (bitCache & 1); // extract the right most bit + + bitCache = (bitCache >>> 1); // shift the remaning bits to the right + bitCacheSize--; + + return bit; + } + + /** + * Returns the integer value formed by the n next bits (up to 8 bits). + * + * @param n the number of bits read (up to 8) + * @return The value formed by the n bits, or -1 if the end of the stream has been reached + */ + int nextBits(final int n) throws IOException { + if (bitCacheSize < n && !fillCache()) { + return -1; + } + + final int bits = (int) (bitCache & MASKS[n]); // extract the right most bits + + bitCache = (bitCache >>> n); // shift the remaning bits to the right + bitCacheSize = bitCacheSize - n; + + return bits; + } + + int nextByte() throws IOException { + return nextBits(8); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/CircularBuffer.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/CircularBuffer.java new file mode 100644 index 000000000..af64a8574 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/CircularBuffer.java @@ -0,0 +1,89 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +/** + * Circular byte buffer. + * + * @author Emmanuel Bourg + * @since 1.7 + */ +class CircularBuffer { + + /** Size of the buffer */ + private final int size; + + /** The buffer */ + private final byte[] buffer; + + /** Index of the next data to be read from the buffer */ + private int readIndex; + + /** Index of the next data written in the buffer */ + private int writeIndex; + + CircularBuffer(int size) { + this.size = size; + buffer = new byte[size]; + } + + /** + * Tells if a new byte can be read from the buffer. + */ + public boolean available() { + return readIndex != writeIndex; + } + + /** + * Writes a byte to the buffer. + */ + public void put(int value) { + buffer[writeIndex] = (byte) value; + writeIndex = (writeIndex + 1) % size; + } + + /** + * Reads a byte from the buffer. + */ + public int get() { + if (available()) { + int value = buffer[readIndex]; + readIndex = (readIndex + 1) % size; + return value & 0xFF; + } else { + return -1; + } + } + + /** + * Copy a previous interval in the buffer to the current position. + * + * @param distance the distance from the current write position + * @param length the number of bytes to copy + */ + public void copy(int distance, int length) { + int pos1 = writeIndex - distance; + int pos2 = pos1 + length; + for (int i = pos1; i < pos2; i++) { + buffer[writeIndex] = buffer[(i + size) % size]; + writeIndex = (writeIndex + 1) % size; + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java new file mode 100644 index 000000000..aa9a5ce39 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ExplodingInputStream.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.io.InputStream; + +/** + * The implode compression method was added to PKZIP 1.01 released in 1989. + * It was then dropped from PKZIP 2.0 released in 1993 in favor of the deflate + * method. + *

+ * The algorithm is described in the ZIP File Format Specification. + * + * @see ZIP File Format Specification + * + * @author Emmanuel Bourg + * @since 1.7 + */ +class ExplodingInputStream extends InputStream { + + /** The underlying stream containing the compressed data */ + private final InputStream in; + + /** The stream of bits read from the input stream */ + private BitStream bits; + + /** The size of the sliding dictionary (4K or 8K) */ + private final int dictionarySize; + + /** The number of Shannon-Fano trees (2 or 3) */ + private final int numberOfTrees; + + private final int minimumMatchLength; + + /** The binary tree containing the 256 encoded literals (null when only two trees are used) */ + private BinaryTree literalTree; + + /** The binary tree containing the 64 encoded lengths */ + private BinaryTree lengthTree; + + /** The binary tree containing the 64 encoded distances */ + private BinaryTree distanceTree; + + /** Output buffer holding the decompressed data */ + private final CircularBuffer buffer = new CircularBuffer(32 * 1024); + + /** + * Create a new stream decompressing the content of the specified stream + * using the explode algorithm. + * + * @param dictionarySize the size of the sliding dictionary (4096 or 8192) + * @param numberOfTrees the number of trees (2 or 3) + * @param in the compressed data stream + */ + public ExplodingInputStream(int dictionarySize, int numberOfTrees, InputStream in) { + if (dictionarySize != 4096 && dictionarySize != 8192) { + throw new IllegalArgumentException("The dictionary size must be 4096 or 8192"); + } + if (numberOfTrees != 2 && numberOfTrees != 3) { + throw new IllegalArgumentException("The number of trees must be 2 or 3"); + } + this.dictionarySize = dictionarySize; + this.numberOfTrees = numberOfTrees; + this.minimumMatchLength = numberOfTrees; + this.in = in; + } + + /** + * Reads the encoded binary trees and prepares the bit stream. + * + * @throws IOException + */ + private void init() throws IOException { + if (bits == null) { + if (numberOfTrees == 3) { + literalTree = BinaryTree.decode(in, 256); + } + + lengthTree = BinaryTree.decode(in, 64); + distanceTree = BinaryTree.decode(in, 64); + + bits = new BitStream(in); + } + } + + @Override + public int read() throws IOException { + if (!buffer.available()) { + fillBuffer(); + } + + return buffer.get(); + } + + /** + * Fill the sliding dictionary with more data. + * @throws IOException + */ + private void fillBuffer() throws IOException { + init(); + + int bit = bits.nextBit(); + if (bit == 1) { + // literal value + int literal; + if (literalTree != null) { + literal = literalTree.read(bits); + } else { + literal = bits.nextBits(8); + } + + if (literal == -1) { + // end of stream reached, nothing left to decode + return; + } + + buffer.put(literal); + + } else if (bit == 0) { + // back reference + int distanceLowSize = dictionarySize == 4096 ? 6 : 7; + int distanceLow = bits.nextBits(distanceLowSize); + int distanceHigh = distanceTree.read(bits); + if (distanceHigh == -1 && distanceLow <= 0) { + // end of stream reached, nothing left to decode + return; + } + int distance = distanceHigh << distanceLowSize | distanceLow; + + int length = lengthTree.read(bits); + if (length == 63) { + length += bits.nextBits(8); + } + length += minimumMatchLength; + + buffer.copy(distance + 1, length); + } + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java new file mode 100644 index 000000000..b2fa1dde7 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ExtraFieldUtils.java @@ -0,0 +1,308 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.util.ArrayList; +import java.util.List; +import java.util.Map; +import java.util.concurrent.ConcurrentHashMap; +import java.util.zip.ZipException; + +/** + * ZipExtraField related methods + * @NotThreadSafe because the HashMap is not synch. + */ +// CheckStyle:HideUtilityClassConstructorCheck OFF (bc) +public class ExtraFieldUtils { + + private static final int WORD = 4; + + /** + * Static registry of known extra fields. + */ + private static final Map> implementations; + + static { + implementations = new ConcurrentHashMap>(); + register(AsiExtraField.class); + register(X5455_ExtendedTimestamp.class); + register(X7875_NewUnix.class); + register(JarMarker.class); + register(UnicodePathExtraField.class); + register(UnicodeCommentExtraField.class); + register(Zip64ExtendedInformationExtraField.class); + } + + /** + * Register a ZipExtraField implementation. + * + *

The given class must have a no-arg constructor and implement + * the {@link ZipExtraField ZipExtraField interface}.

+ * @param c the class to register + */ + public static void register(Class c) { + try { + ZipExtraField ze = (ZipExtraField) c.newInstance(); + implementations.put(ze.getHeaderId(), c); + } catch (ClassCastException cc) { + throw new RuntimeException(c + " doesn\'t implement ZipExtraField"); + } catch (InstantiationException ie) { + throw new RuntimeException(c + " is not a concrete class"); + } catch (IllegalAccessException ie) { + throw new RuntimeException(c + "\'s no-arg constructor is not public"); + } + } + + /** + * Create an instance of the appropriate ExtraField, falls back to + * {@link UnrecognizedExtraField UnrecognizedExtraField}. + * @param headerId the header identifier + * @return an instance of the appropriate ExtraField + * @exception InstantiationException if unable to instantiate the class + * @exception IllegalAccessException if not allowed to instantiate the class + */ + public static ZipExtraField createExtraField(ZipShort headerId) + throws InstantiationException, IllegalAccessException { + Class c = implementations.get(headerId); + if (c != null) { + return (ZipExtraField) c.newInstance(); + } + UnrecognizedExtraField u = new UnrecognizedExtraField(); + u.setHeaderId(headerId); + return u; + } + + /** + * Split the array into ExtraFields and populate them with the + * given data as local file data, throwing an exception if the + * data cannot be parsed. + * @param data an array of bytes as it appears in local file data + * @return an array of ExtraFields + * @throws ZipException on error + */ + public static ZipExtraField[] parse(byte[] data) throws ZipException { + return parse(data, true, UnparseableExtraField.THROW); + } + + /** + * Split the array into ExtraFields and populate them with the + * given data, throwing an exception if the data cannot be parsed. + * @param data an array of bytes + * @param local whether data originates from the local file data + * or the central directory + * @return an array of ExtraFields + * @throws ZipException on error + */ + public static ZipExtraField[] parse(byte[] data, boolean local) + throws ZipException { + return parse(data, local, UnparseableExtraField.THROW); + } + + /** + * Split the array into ExtraFields and populate them with the + * given data. + * @param data an array of bytes + * @param local whether data originates from the local file data + * or the central directory + * @param onUnparseableData what to do if the extra field data + * cannot be parsed. + * @return an array of ExtraFields + * @throws ZipException on error + * + * @since 1.1 + */ + public static ZipExtraField[] parse(byte[] data, boolean local, + UnparseableExtraField onUnparseableData) + throws ZipException { + List v = new ArrayList(); + int start = 0; + LOOP: + while (start <= data.length - WORD) { + ZipShort headerId = new ZipShort(data, start); + int length = new ZipShort(data, start + 2).getValue(); + if (start + WORD + length > data.length) { + switch(onUnparseableData.getKey()) { + case UnparseableExtraField.THROW_KEY: + throw new ZipException("bad extra field starting at " + + start + ". Block length of " + + length + " bytes exceeds remaining" + + " data of " + + (data.length - start - WORD) + + " bytes."); + case UnparseableExtraField.READ_KEY: + UnparseableExtraFieldData field = + new UnparseableExtraFieldData(); + if (local) { + field.parseFromLocalFileData(data, start, + data.length - start); + } else { + field.parseFromCentralDirectoryData(data, start, + data.length - start); + } + v.add(field); + //$FALL-THROUGH$ + case UnparseableExtraField.SKIP_KEY: + // since we cannot parse the data we must assume + // the extra field consumes the whole rest of the + // available data + break LOOP; + default: + throw new ZipException("unknown UnparseableExtraField key: " + + onUnparseableData.getKey()); + } + } + try { + ZipExtraField ze = createExtraField(headerId); + if (local) { + ze.parseFromLocalFileData(data, start + WORD, length); + } else { + ze.parseFromCentralDirectoryData(data, start + WORD, + length); + } + v.add(ze); + } catch (InstantiationException ie) { + throw (ZipException) new ZipException(ie.getMessage()).initCause(ie); + } catch (IllegalAccessException iae) { + throw (ZipException) new ZipException(iae.getMessage()).initCause(iae); + } + start += length + WORD; + } + + ZipExtraField[] result = new ZipExtraField[v.size()]; + return v.toArray(result); + } + + /** + * Merges the local file data fields of the given ZipExtraFields. + * @param data an array of ExtraFiles + * @return an array of bytes + */ + public static byte[] mergeLocalFileDataData(ZipExtraField[] data) { + final boolean lastIsUnparseableHolder = data.length > 0 + && data[data.length - 1] instanceof UnparseableExtraFieldData; + int regularExtraFieldCount = + lastIsUnparseableHolder ? data.length - 1 : data.length; + + int sum = WORD * regularExtraFieldCount; + for (ZipExtraField element : data) { + sum += element.getLocalFileDataLength().getValue(); + } + + byte[] result = new byte[sum]; + int start = 0; + for (int i = 0; i < regularExtraFieldCount; i++) { + System.arraycopy(data[i].getHeaderId().getBytes(), + 0, result, start, 2); + System.arraycopy(data[i].getLocalFileDataLength().getBytes(), + 0, result, start + 2, 2); + byte[] local = data[i].getLocalFileDataData(); + System.arraycopy(local, 0, result, start + WORD, local.length); + start += local.length + WORD; + } + if (lastIsUnparseableHolder) { + byte[] local = data[data.length - 1].getLocalFileDataData(); + System.arraycopy(local, 0, result, start, local.length); + } + return result; + } + + /** + * Merges the central directory fields of the given ZipExtraFields. + * @param data an array of ExtraFields + * @return an array of bytes + */ + public static byte[] mergeCentralDirectoryData(ZipExtraField[] data) { + final boolean lastIsUnparseableHolder = data.length > 0 + && data[data.length - 1] instanceof UnparseableExtraFieldData; + int regularExtraFieldCount = + lastIsUnparseableHolder ? data.length - 1 : data.length; + + int sum = WORD * regularExtraFieldCount; + for (ZipExtraField element : data) { + sum += element.getCentralDirectoryLength().getValue(); + } + byte[] result = new byte[sum]; + int start = 0; + for (int i = 0; i < regularExtraFieldCount; i++) { + System.arraycopy(data[i].getHeaderId().getBytes(), + 0, result, start, 2); + System.arraycopy(data[i].getCentralDirectoryLength().getBytes(), + 0, result, start + 2, 2); + byte[] local = data[i].getCentralDirectoryData(); + System.arraycopy(local, 0, result, start + WORD, local.length); + start += local.length + WORD; + } + if (lastIsUnparseableHolder) { + byte[] local = data[data.length - 1].getCentralDirectoryData(); + System.arraycopy(local, 0, result, start, local.length); + } + return result; + } + + /** + * "enum" for the possible actions to take if the extra field + * cannot be parsed. + * + * @since 1.1 + */ + public static final class UnparseableExtraField { + /** + * Key for "throw an exception" action. + */ + public static final int THROW_KEY = 0; + /** + * Key for "skip" action. + */ + public static final int SKIP_KEY = 1; + /** + * Key for "read" action. + */ + public static final int READ_KEY = 2; + + /** + * Throw an exception if field cannot be parsed. + */ + public static final UnparseableExtraField THROW + = new UnparseableExtraField(THROW_KEY); + + /** + * Skip the extra field entirely and don't make its data + * available - effectively removing the extra field data. + */ + public static final UnparseableExtraField SKIP + = new UnparseableExtraField(SKIP_KEY); + + /** + * Read the extra field data into an instance of {@link + * UnparseableExtraFieldData UnparseableExtraFieldData}. + */ + public static final UnparseableExtraField READ + = new UnparseableExtraField(READ_KEY); + + private final int key; + + private UnparseableExtraField(int k) { + key = k; + } + + /** + * Key of the action to take. + */ + public int getKey() { return key; } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/FallbackZipEncoding.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/FallbackZipEncoding.java new file mode 100644 index 000000000..4baae8aba --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/FallbackZipEncoding.java @@ -0,0 +1,95 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * A fallback ZipEncoding, which uses a java.io means to encode names. + * + *

This implementation is not suitable for encodings other than + * UTF-8, because java.io encodes unmappable character as question + * marks leading to unreadable ZIP entries on some operating + * systems.

+ * + *

Furthermore this implementation is unable to tell whether a + * given name can be safely encoded or not.

+ * + *

This implementation acts as a last resort implementation, when + * neither {@link Simple8BitZipEnoding} nor {@link NioZipEncoding} is + * available.

+ * + *

The methods of this class are reentrant.

+ * @Immutable + */ +class FallbackZipEncoding implements ZipEncoding { + private final String charsetName; + + /** + * Construct a fallback zip encoding, which uses the platform's + * default charset. + */ + public FallbackZipEncoding() { + this.charsetName = null; + } + + /** + * Construct a fallback zip encoding, which uses the given charset. + * + * @param charsetName The name of the charset or {@code null} for + * the platform's default character set. + */ + public FallbackZipEncoding(String charsetName) { + this.charsetName = charsetName; + } + + /** + * @see + * org.apache.commons.compress.archivers.zip.ZipEncoding#canEncode(java.lang.String) + */ + public boolean canEncode(String name) { + return true; + } + + /** + * @see + * org.apache.commons.compress.archivers.zip.ZipEncoding#encode(java.lang.String) + */ + public ByteBuffer encode(String name) throws IOException { + if (this.charsetName == null) { // i.e. use default charset, see no-args constructor + return ByteBuffer.wrap(name.getBytes()); + } else { + return ByteBuffer.wrap(name.getBytes(this.charsetName)); + } + } + + /** + * @see + * org.apache.commons.compress.archivers.zip.ZipEncoding#decode(byte[]) + */ + public String decode(byte[] data) throws IOException { + if (this.charsetName == null) { // i.e. use default charset, see no-args constructor + return new String(data); + } else { + return new String(data,this.charsetName); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java new file mode 100644 index 000000000..d4b4c3dd3 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/GeneralPurposeBit.java @@ -0,0 +1,209 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Parser/encoder for the "general purpose bit" field in ZIP's local + * file and central directory headers. + * + * @since 1.1 + * @NotThreadSafe + */ +public final class GeneralPurposeBit { + + /** + * Indicates that the file is encrypted. + */ + private static final int ENCRYPTION_FLAG = 1 << 0; + + /** + * Indicates the size of the sliding dictionary used by the compression method 6 (imploding). + *
    + *
  • 0: 4096 bytes
  • + *
  • 1: 8192 bytes
  • + *
+ */ + private static final int SLIDING_DICTIONARY_SIZE_FLAG = 1 << 1; + + /** + * Indicates the number of Shannon-Fano trees used by the compression method 6 (imploding). + *
    + *
  • 0: 2 trees (lengths, distances)
  • + *
  • 1: 3 trees (literals, lengths, distances)
  • + *
+ */ + private static final int NUMBER_OF_SHANNON_FANO_TREES_FLAG = 1 << 2; + + /** + * Indicates that a data descriptor stored after the file contents + * will hold CRC and size information. + */ + private static final int DATA_DESCRIPTOR_FLAG = 1 << 3; + + /** + * Indicates strong encryption. + */ + private static final int STRONG_ENCRYPTION_FLAG = 1 << 6; + + /** + * Indicates that filenames are written in UTF-8. + * + *

The only reason this is public is that {@link + * ZipArchiveOutputStream#EFS_FLAG} was public in Apache Commons + * Compress 1.0 and we needed a substitute for it.

+ */ + public static final int UFT8_NAMES_FLAG = 1 << 11; + + private boolean languageEncodingFlag = false; + private boolean dataDescriptorFlag = false; + private boolean encryptionFlag = false; + private boolean strongEncryptionFlag = false; + private int slidingDictionarySize; + private int numberOfShannonFanoTrees; + + public GeneralPurposeBit() { + } + + /** + * whether the current entry uses UTF8 for file name and comment. + */ + public boolean usesUTF8ForNames() { + return languageEncodingFlag; + } + + /** + * whether the current entry will use UTF8 for file name and comment. + */ + public void useUTF8ForNames(boolean b) { + languageEncodingFlag = b; + } + + /** + * whether the current entry uses the data descriptor to store CRC + * and size information + */ + public boolean usesDataDescriptor() { + return dataDescriptorFlag; + } + + /** + * whether the current entry will use the data descriptor to store + * CRC and size information + */ + public void useDataDescriptor(boolean b) { + dataDescriptorFlag = b; + } + + /** + * whether the current entry is encrypted + */ + public boolean usesEncryption() { + return encryptionFlag; + } + + /** + * whether the current entry will be encrypted + */ + public void useEncryption(boolean b) { + encryptionFlag = b; + } + + /** + * whether the current entry is encrypted using strong encryption + */ + public boolean usesStrongEncryption() { + return encryptionFlag && strongEncryptionFlag; + } + + /** + * whether the current entry will be encrypted using strong encryption + */ + public void useStrongEncryption(boolean b) { + strongEncryptionFlag = b; + if (b) { + useEncryption(true); + } + } + + /** + * Returns the sliding dictionary size used by the compression method 6 (imploding). + */ + int getSlidingDictionarySize() { + return slidingDictionarySize; + } + + /** + * Returns the number of trees used by the compression method 6 (imploding). + */ + int getNumberOfShannonFanoTrees() { + return numberOfShannonFanoTrees; + } + + /** + * Encodes the set bits in a form suitable for ZIP archives. + */ + public byte[] encode() { + return + ZipShort.getBytes((dataDescriptorFlag ? DATA_DESCRIPTOR_FLAG : 0) + | + (languageEncodingFlag ? UFT8_NAMES_FLAG : 0) + | + (encryptionFlag ? ENCRYPTION_FLAG : 0) + | + (strongEncryptionFlag ? STRONG_ENCRYPTION_FLAG : 0) + ); + } + + /** + * Parses the supported flags from the given archive data. + * + * @param data local file header or a central directory entry. + * @param offset offset at which the general purpose bit starts + */ + public static GeneralPurposeBit parse(final byte[] data, final int offset) { + final int generalPurposeFlag = ZipShort.getValue(data, offset); + GeneralPurposeBit b = new GeneralPurposeBit(); + b.useDataDescriptor((generalPurposeFlag & DATA_DESCRIPTOR_FLAG) != 0); + b.useUTF8ForNames((generalPurposeFlag & UFT8_NAMES_FLAG) != 0); + b.useStrongEncryption((generalPurposeFlag & STRONG_ENCRYPTION_FLAG) != 0); + b.useEncryption((generalPurposeFlag & ENCRYPTION_FLAG) != 0); + b.slidingDictionarySize = (generalPurposeFlag & SLIDING_DICTIONARY_SIZE_FLAG) != 0 ? 8192 : 4096; + b.numberOfShannonFanoTrees = (generalPurposeFlag & NUMBER_OF_SHANNON_FANO_TREES_FLAG) != 0 ? 3 : 2; + return b; + } + + @Override + public int hashCode() { + return 3 * (7 * (13 * (17 * (encryptionFlag ? 1 : 0) + + (strongEncryptionFlag ? 1 : 0)) + + (languageEncodingFlag ? 1 : 0)) + + (dataDescriptorFlag ? 1 : 0)); + } + + @Override + public boolean equals(Object o) { + if (!(o instanceof GeneralPurposeBit)) { + return false; + } + GeneralPurposeBit g = (GeneralPurposeBit) o; + return g.encryptionFlag == encryptionFlag + && g.strongEncryptionFlag == strongEncryptionFlag + && g.languageEncodingFlag == languageEncodingFlag + && g.dataDescriptorFlag == dataDescriptorFlag; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/JarMarker.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/JarMarker.java new file mode 100644 index 000000000..f5dde8510 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/JarMarker.java @@ -0,0 +1,115 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.util.zip.ZipException; + +/** + * If this extra field is added as the very first extra field of the + * archive, Solaris will consider it an executable jar file. + * @Immutable + */ +public final class JarMarker implements ZipExtraField { + + private static final ZipShort ID = new ZipShort(0xCAFE); + private static final ZipShort NULL = new ZipShort(0); + private static final byte[] NO_BYTES = new byte[0]; + private static final JarMarker DEFAULT = new JarMarker(); + + /** No-arg constructor */ + public JarMarker() { + // empty + } + + /** + * Since JarMarker is stateless we can always use the same instance. + * @return the DEFAULT jarmaker. + */ + public static JarMarker getInstance() { + return DEFAULT; + } + + /** + * The Header-ID. + * @return the header id + */ + public ZipShort getHeaderId() { + return ID; + } + + /** + * Length of the extra field in the local file data - without + * Header-ID or length specifier. + * @return 0 + */ + public ZipShort getLocalFileDataLength() { + return NULL; + } + + /** + * Length of the extra field in the central directory - without + * Header-ID or length specifier. + * @return 0 + */ + public ZipShort getCentralDirectoryLength() { + return NULL; + } + + /** + * The actual data to put into local file data - without Header-ID + * or length specifier. + * @return the data + */ + public byte[] getLocalFileDataData() { + return NO_BYTES; + } + + /** + * The actual data to put central directory - without Header-ID or + * length specifier. + * @return the data + */ + public byte[] getCentralDirectoryData() { + return NO_BYTES; + } + + /** + * Populate data from this array as if it was in local file data. + * @param data an array of bytes + * @param offset the start offset + * @param length the number of bytes in the array from offset + * + * @throws ZipException on error + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) + throws ZipException { + if (length != 0) { + throw new ZipException("JarMarker doesn't expect any data"); + } + } + + /** + * Doesn't do anything special since this class always uses the + * same data in central directory and local file data. + */ + public void parseFromCentralDirectoryData(byte[] buffer, int offset, + int length) + throws ZipException { + parseFromLocalFileData(buffer, offset, length); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/NioZipEncoding.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/NioZipEncoding.java new file mode 100644 index 000000000..f93192c34 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/NioZipEncoding.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.nio.ByteBuffer; +import java.nio.CharBuffer; +import java.nio.charset.Charset; +import java.nio.charset.CharsetEncoder; +import java.nio.charset.CoderResult; +import java.nio.charset.CodingErrorAction; + +/** + * A ZipEncoding, which uses a java.nio {@link + * java.nio.charset.Charset Charset} to encode names. + * + *

This implementation works for all cases under java-1.5 or + * later. However, in java-1.4, some charsets don't have a java.nio + * implementation, most notably the default ZIP encoding Cp437.

+ * + *

The methods of this class are reentrant.

+ * @Immutable + */ +class NioZipEncoding implements ZipEncoding { + private final Charset charset; + + /** + * Construct an NIO based zip encoding, which wraps the given + * charset. + * + * @param charset The NIO charset to wrap. + */ + public NioZipEncoding(Charset charset) { + this.charset = charset; + } + + /** + * @see + * org.apache.commons.compress.archivers.zip.ZipEncoding#canEncode(java.lang.String) + */ + public boolean canEncode(String name) { + CharsetEncoder enc = this.charset.newEncoder(); + enc.onMalformedInput(CodingErrorAction.REPORT); + enc.onUnmappableCharacter(CodingErrorAction.REPORT); + + return enc.canEncode(name); + } + + /** + * @see + * org.apache.commons.compress.archivers.zip.ZipEncoding#encode(java.lang.String) + */ + public ByteBuffer encode(String name) { + CharsetEncoder enc = this.charset.newEncoder(); + + enc.onMalformedInput(CodingErrorAction.REPORT); + enc.onUnmappableCharacter(CodingErrorAction.REPORT); + + CharBuffer cb = CharBuffer.wrap(name); + ByteBuffer out = ByteBuffer.allocate(name.length() + + (name.length() + 1) / 2); + + while (cb.remaining() > 0) { + CoderResult res = enc.encode(cb, out,true); + + if (res.isUnmappable() || res.isMalformed()) { + + // write the unmappable characters in utf-16 + // pseudo-URL encoding style to ByteBuffer. + if (res.length() * 6 > out.remaining()) { + out = ZipEncodingHelper.growBuffer(out, out.position() + + res.length() * 6); + } + + for (int i=0; i + *
  • Characters 0x0000 to 0x007f are encoded as the corresponding + * byte values 0x00 to 0x7f.
  • + *
  • All byte codes from 0x80 to 0xff are mapped to a unique unicode + * character in the range 0x0080 to 0x7fff. (No support for + * UTF-16 surrogates) + * + * + *

    These restrictions most notably apply to the most prominent + * omissions of java-1.4's {@link java.nio.charset.Charset Charset} + * implementation, Cp437 and Cp850.

    + * + *

    The methods of this class are reentrant.

    + * @Immutable + */ +class Simple8BitZipEncoding implements ZipEncoding { + + /** + * A character entity, which is put to the reverse mapping table + * of a simple encoding. + */ + private static final class Simple8BitChar implements Comparable { + public final char unicode; + public final byte code; + + Simple8BitChar(byte code, char unicode) { + this.code = code; + this.unicode = unicode; + } + + public int compareTo(Simple8BitChar a) { + return this.unicode - a.unicode; + } + + @Override + public String toString() { + return "0x" + Integer.toHexString(0xffff & unicode) + + "->0x" + Integer.toHexString(0xff & code); + } + + @Override + public boolean equals(Object o) { + if (o instanceof Simple8BitChar) { + Simple8BitChar other = (Simple8BitChar) o; + return unicode == other.unicode && code == other.code; + } + return false; + } + + @Override + public int hashCode() { + return unicode; + } + } + + /** + * The characters for byte values of 128 to 255 stored as an array of + * 128 chars. + */ + private final char[] highChars; + + /** + * A list of {@link Simple8BitChar} objects sorted by the unicode + * field. This list is used to binary search reverse mapping of + * unicode characters with a character code greater than 127. + */ + private final List reverseMapping; + + /** + * @param highChars The characters for byte values of 128 to 255 + * stored as an array of 128 chars. + */ + public Simple8BitZipEncoding(char[] highChars) { + this.highChars = highChars.clone(); + List temp = + new ArrayList(this.highChars.length); + + byte code = 127; + + for (char highChar : this.highChars) { + temp.add(new Simple8BitChar(++code, highChar)); + } + + Collections.sort(temp); + this.reverseMapping = Collections.unmodifiableList(temp); + } + + /** + * Return the character code for a given encoded byte. + * + * @param b The byte to decode. + * @return The associated character value. + */ + public char decodeByte(byte b) { + // code 0-127 + if (b >= 0) { + return (char) b; + } + + // byte is signed, so 128 == -128 and 255 == -1 + return this.highChars[128 + b]; + } + + /** + * @param c The character to encode. + * @return Whether the given unicode character is covered by this encoding. + */ + public boolean canEncodeChar(char c) { + + if (c >= 0 && c < 128) { + return true; + } + + Simple8BitChar r = this.encodeHighChar(c); + return r != null; + } + + /** + * Pushes the encoded form of the given character to the given byte buffer. + * + * @param bb The byte buffer to write to. + * @param c The character to encode. + * @return Whether the given unicode character is covered by this encoding. + * If {@code false} is returned, nothing is pushed to the + * byte buffer. + */ + public boolean pushEncodedChar(ByteBuffer bb, char c) { + + if (c >= 0 && c < 128) { + bb.put((byte) c); + return true; + } + + Simple8BitChar r = this.encodeHighChar(c); + if (r == null) { + return false; + } + bb.put(r.code); + return true; + } + + /** + * @param c A unicode character in the range from 0x0080 to 0x7f00 + * @return A Simple8BitChar, if this character is covered by this encoding. + * A {@code null} value is returned, if this character is not + * covered by this encoding. + */ + private Simple8BitChar encodeHighChar(char c) { + // for performance an simplicity, yet another reincarnation of + // binary search... + int i0 = 0; + int i1 = this.reverseMapping.size(); + + while (i1 > i0) { + + int i = i0 + (i1 - i0) / 2; + + Simple8BitChar m = this.reverseMapping.get(i); + + if (m.unicode == c) { + return m; + } + + if (m.unicode < c) { + i0 = i + 1; + } else { + i1 = i; + } + } + + if (i0 >= this.reverseMapping.size()) { + return null; + } + + Simple8BitChar r = this.reverseMapping.get(i0); + + if (r.unicode != c) { + return null; + } + + return r; + } + + /** + * @see + * org.apache.commons.compress.archivers.zip.ZipEncoding#canEncode(java.lang.String) + */ + public boolean canEncode(String name) { + + for (int i=0;iStores the UTF-8 version of the file comment as stored in the + * central directory header.

    + * + * @see PKWARE's + * APPNOTE.TXT, section 4.6.8 + * + * @NotThreadSafe super-class is not thread-safe + */ +public class UnicodeCommentExtraField extends AbstractUnicodeExtraField { + + public static final ZipShort UCOM_ID = new ZipShort(0x6375); + + public UnicodeCommentExtraField () { + } + + /** + * Assemble as unicode comment extension from the name given as + * text as well as the encoded bytes actually written to the archive. + * + * @param text The file name + * @param bytes the bytes actually written to the archive + * @param off The offset of the encoded comment in bytes. + * @param len The length of the encoded comment or comment in + * bytes. + */ + public UnicodeCommentExtraField(String text, byte[] bytes, int off, + int len) { + super(text, bytes, off, len); + } + + /** + * Assemble as unicode comment extension from the comment given as + * text as well as the bytes actually written to the archive. + * + * @param comment The file comment + * @param bytes the bytes actually written to the archive + */ + public UnicodeCommentExtraField(String comment, byte[] bytes) { + super(comment, bytes); + } + + public ZipShort getHeaderId() { + return UCOM_ID; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnicodePathExtraField.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnicodePathExtraField.java new file mode 100644 index 000000000..a60ccb2d8 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnicodePathExtraField.java @@ -0,0 +1,67 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +/** + * Info-ZIP Unicode Path Extra Field (0x7075): + * + *

    Stores the UTF-8 version of the file name field as stored in the + * local header and central directory header.

    + * + * @see PKWARE's + * APPNOTE.TXT, section 4.6.9 + * + * @NotThreadSafe super-class is not thread-safe + */ +public class UnicodePathExtraField extends AbstractUnicodeExtraField { + + public static final ZipShort UPATH_ID = new ZipShort(0x7075); + + public UnicodePathExtraField () { + } + + /** + * Assemble as unicode path extension from the name given as + * text as well as the encoded bytes actually written to the archive. + * + * @param text The file name + * @param bytes the bytes actually written to the archive + * @param off The offset of the encoded filename in bytes. + * @param len The length of the encoded filename or comment in + * bytes. + */ + public UnicodePathExtraField(String text, byte[] bytes, int off, int len) { + super(text, bytes, off, len); + } + + /** + * Assemble as unicode path extension from the name given as + * text as well as the encoded bytes actually written to the archive. + * + * @param name The file name + * @param bytes the bytes actually written to the archive + */ + public UnicodePathExtraField(String name, byte[] bytes) { + super(name, bytes); + } + + public ZipShort getHeaderId() { + return UPATH_ID; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnixStat.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnixStat.java new file mode 100644 index 000000000..b8afc6bc8 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnixStat.java @@ -0,0 +1,63 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Constants from stat.h on Unix systems. + */ +// CheckStyle:InterfaceIsTypeCheck OFF - backward compatible +public interface UnixStat { + + /** + * Bits used for permissions (and sticky bit) + */ + int PERM_MASK = 07777; + /** + * Indicates symbolic links. + */ + int LINK_FLAG = 0120000; + /** + * Indicates plain files. + */ + int FILE_FLAG = 0100000; + /** + * Indicates directories. + */ + int DIR_FLAG = 040000; + + // ---------------------------------------------------------- + // somewhat arbitrary choices that are quite common for shared + // installations + // ----------------------------------------------------------- + + /** + * Default permissions for symbolic links. + */ + int DEFAULT_LINK_PERM = 0777; + + /** + * Default permissions for directories. + */ + int DEFAULT_DIR_PERM = 0755; + + /** + * Default permissions for plain files. + */ + int DEFAULT_FILE_PERM = 0644; +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnparseableExtraFieldData.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnparseableExtraFieldData.java new file mode 100644 index 000000000..029cfcdaf --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnparseableExtraFieldData.java @@ -0,0 +1,113 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Wrapper for extra field data that doesn't conform to the recommended format of header-tag + size + data. + * + *

    The header-id is artificial (and not listed as a known ID in APPNOTE.TXT). Since it isn't used anywhere + * except to satisfy the ZipExtraField contract it shouldn't matter anyway.

    + * + * @since 1.1 + * @NotThreadSafe + */ +public final class UnparseableExtraFieldData implements ZipExtraField { + private static final ZipShort HEADER_ID = new ZipShort(0xACC1); + + private byte[] localFileData; + private byte[] centralDirectoryData; + + /** + * The Header-ID. + * + * @return a completely arbitrary value that should be ignored. + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Length of the complete extra field in the local file data. + * + * @return The LocalFileDataLength value + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localFileData == null ? 0 : localFileData.length); + } + + /** + * Length of the complete extra field in the central directory. + * + * @return The CentralDirectoryLength value + */ + public ZipShort getCentralDirectoryLength() { + return centralDirectoryData == null + ? getLocalFileDataLength() + : new ZipShort(centralDirectoryData.length); + } + + /** + * The actual data to put into local file data. + * + * @return The LocalFileDataData value + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localFileData); + } + + /** + * The actual data to put into central directory. + * + * @return The CentralDirectoryData value + */ + public byte[] getCentralDirectoryData() { + return centralDirectoryData == null + ? getLocalFileDataData() : ZipUtil.copy(centralDirectoryData); + } + + /** + * Populate data from this array as if it was in local file data. + * + * @param buffer the buffer to read data from + * @param offset offset into buffer to read data + * @param length the length of data + */ + public void parseFromLocalFileData(byte[] buffer, int offset, int length) { + localFileData = new byte[length]; + System.arraycopy(buffer, offset, localFileData, 0, length); + } + + /** + * Populate data from this array as if it was in central directory data. + * + * @param buffer the buffer to read data from + * @param offset offset into buffer to read data + * @param length the length of data + */ + public void parseFromCentralDirectoryData(byte[] buffer, int offset, + int length) { + centralDirectoryData = new byte[length]; + System.arraycopy(buffer, offset, centralDirectoryData, 0, length); + if (localFileData == null) { + parseFromLocalFileData(buffer, offset, length); + } + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnrecognizedExtraField.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnrecognizedExtraField.java new file mode 100644 index 000000000..e0bdf199d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnrecognizedExtraField.java @@ -0,0 +1,148 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Simple placeholder for all those extra fields we don't want to deal + * with. + * + *

    Assumes local file data and central directory entries are + * identical - unless told the opposite.

    + * @NotThreadSafe + */ +public class UnrecognizedExtraField implements ZipExtraField { + + /** + * The Header-ID. + */ + private ZipShort headerId; + + /** + * Set the header id. + * @param headerId the header id to use + */ + public void setHeaderId(ZipShort headerId) { + this.headerId = headerId; + } + + /** + * Get the header id. + * @return the header id + */ + public ZipShort getHeaderId() { + return headerId; + } + + /** + * Extra field data in local file data - without + * Header-ID or length specifier. + */ + private byte[] localData; + + /** + * Set the extra field data in the local file data - + * without Header-ID or length specifier. + * @param data the field data to use + */ + public void setLocalFileDataData(byte[] data) { + localData = ZipUtil.copy(data); + } + + /** + * Get the length of the local data. + * @return the length of the local data + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(localData.length); + } + + /** + * Get the local data. + * @return the local data + */ + public byte[] getLocalFileDataData() { + return ZipUtil.copy(localData); + } + + /** + * Extra field data in central directory - without + * Header-ID or length specifier. + */ + private byte[] centralData; + + /** + * Set the extra field data in central directory. + * @param data the data to use + */ + public void setCentralDirectoryData(byte[] data) { + centralData = ZipUtil.copy(data); + } + + /** + * Get the central data length. + * If there is no central data, get the local file data length. + * @return the central data length + */ + public ZipShort getCentralDirectoryLength() { + if (centralData != null) { + return new ZipShort(centralData.length); + } + return getLocalFileDataLength(); + } + + /** + * Get the central data. + * @return the central data if present, else return the local file data + */ + public byte[] getCentralDirectoryData() { + if (centralData != null) { + return ZipUtil.copy(centralData); + } + return getLocalFileDataData(); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromLocalFileData(byte[], int, int) + */ + public void parseFromLocalFileData(byte[] data, int offset, int length) { + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setLocalFileDataData(tmp); + } + + /** + * @param data the array of bytes. + * @param offset the source location in the data array. + * @param length the number of bytes to use in the data array. + * @see ZipExtraField#parseFromCentralDirectoryData(byte[], int, int) + */ + public void parseFromCentralDirectoryData(byte[] data, int offset, + int length) { + byte[] tmp = new byte[length]; + System.arraycopy(data, offset, tmp, 0, length); + setCentralDirectoryData(tmp); + if (localData == null) { + setLocalFileDataData(tmp); + } + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java new file mode 100644 index 000000000..0bd4db347 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnshrinkingInputStream.java @@ -0,0 +1,118 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.compressors.z._internal_.InternalLZWInputStream; + +/** + * Input stream that decompresses ZIP method 1 (unshrinking). A variation of the LZW algorithm, with some twists. + * @NotThreadSafe + * @since 1.7 + */ +class UnshrinkingInputStream extends InternalLZWInputStream { + private static final int MAX_CODE_SIZE = 13; + private static final int MAX_TABLE_SIZE = 1 << MAX_CODE_SIZE; + private final boolean[] isUsed; + + public UnshrinkingInputStream(InputStream inputStream) throws IOException { + super(inputStream); + setClearCode(codeSize); + initializeTables(MAX_CODE_SIZE); + isUsed = new boolean[prefixes.length]; + for (int i = 0; i < (1 << 8); i++) { + isUsed[i] = true; + } + tableSize = clearCode + 1; + } + + @Override + protected int addEntry(int previousCode, byte character) throws IOException { + while ((tableSize < MAX_TABLE_SIZE) && isUsed[tableSize]) { + tableSize++; + } + int idx = addEntry(previousCode, character, MAX_TABLE_SIZE); + if (idx >= 0) { + isUsed[idx] = true; + } + return idx; + } + + private void partialClear() { + final boolean[] isParent = new boolean[MAX_TABLE_SIZE]; + for (int i = 0; i < isUsed.length; i++) { + if (isUsed[i] && prefixes[i] != -1) { + isParent[prefixes[i]] = true; + } + } + for (int i = clearCode + 1; i < isParent.length; i++) { + if (!isParent[i]) { + isUsed[i] = false; + prefixes[i] = -1; + } + } + } + + @Override + protected int decompressNextSymbol() throws IOException { + // + // table entry table entry + // _____________ _____ + // table entry / \ / \ + // ____________/ \ \ + // / / \ / \ \ + // +---+---+---+---+---+---+---+---+---+---+ + // | . | . | . | . | . | . | . | . | . | . | + // +---+---+---+---+---+---+---+---+---+---+ + // |<--------->|<------------->|<----->|<->| + // symbol symbol symbol symbol + // + final int code = readNextCode(); + if (code < 0) { + return -1; + } else if (code == clearCode) { + final int subCode = readNextCode(); + if (subCode < 0) { + throw new IOException("Unexpected EOF;"); + } else if (subCode == 1) { + if (codeSize < MAX_CODE_SIZE) { + codeSize++; + } else { + throw new IOException("Attempt to increase code size beyond maximum"); + } + } else if (subCode == 2) { + partialClear(); + tableSize = clearCode + 1; + } else { + throw new IOException("Invalid clear code subcode " + subCode); + } + return 0; + } else { + boolean addedUnfinishedEntry = false; + int effectiveCode = code; + if (!isUsed[code]) { + effectiveCode = addRepeatOfPreviousCode(); + addedUnfinishedEntry = true; + } + return expandCodeToOutputStack(effectiveCode, addedUnfinishedEntry); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java new file mode 100644 index 000000000..b1aad770c --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/UnsupportedZipFeatureException.java @@ -0,0 +1,123 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +import java.util.zip.ZipException; + +/** + * Exception thrown when attempting to read or write data for a zip + * entry that uses ZIP features not supported by this library. + * @since 1.1 + */ +public class UnsupportedZipFeatureException extends ZipException { + + private final Feature reason; + private final ZipArchiveEntry entry; + private static final long serialVersionUID = 20130101L; + + /** + * Creates an exception. + * @param reason the feature that is not supported + * @param entry the entry using the feature + */ + public UnsupportedZipFeatureException(Feature reason, + ZipArchiveEntry entry) { + super("unsupported feature " + reason + " used in entry " + + entry.getName()); + this.reason = reason; + this.entry = entry; + } + + /** + * Creates an exception for archives that use an unsupported + * compression algorithm. + * @param method the method that is not supported + * @param entry the entry using the feature + * @since 1.5 + */ + public UnsupportedZipFeatureException(ZipMethod method, + ZipArchiveEntry entry) { + super("unsupported feature method '" + method.name() + + "' used in entry " + entry.getName()); + this.reason = Feature.METHOD; + this.entry = entry; + } + + /** + * Creates an exception when the whole archive uses an unsupported + * feature. + * + * @param reason the feature that is not supported + * @since 1.5 + */ + public UnsupportedZipFeatureException(Feature reason) { + super("unsupported feature " + reason + " used in archive."); + this.reason = reason; + this.entry = null; + } + + /** + * The unsupported feature that has been used. + */ + public Feature getFeature() { + return reason; + } + + /** + * The entry using the unsupported feature. + */ + public ZipArchiveEntry getEntry() { + return entry; + } + + /** + * ZIP Features that may or may not be supported. + * @since 1.1 + */ + public static class Feature { + /** + * The entry is encrypted. + */ + public static final Feature ENCRYPTION = new Feature("encryption"); + /** + * The entry used an unsupported compression method. + */ + public static final Feature METHOD = new Feature("compression method"); + /** + * The entry uses a data descriptor. + */ + public static final Feature DATA_DESCRIPTOR = new Feature("data descriptor"); + /** + * The archive uses splitting or spanning. + * @since 1.5 + */ + public static final Feature SPLITTING = new Feature("splitting"); + + private final String name; + + private Feature(String name) { + this.name = name; + } + + @Override + public String toString() { + return name; + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java new file mode 100644 index 000000000..b1ac3ee43 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/X5455_ExtendedTimestamp.java @@ -0,0 +1,587 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.Serializable; +import java.util.Date; +import java.util.zip.ZipException; + +/** + *

    An extra field that stores additional file and directory timestamp data + * for zip entries. Each zip entry can include up to three timestamps + * (modify, access, create*). The timestamps are stored as 32 bit unsigned + * integers representing seconds since UNIX epoch (Jan 1st, 1970, UTC). + * This field improves on zip's default timestamp granularity, since it + * allows one to store additional timestamps, and, in addition, the timestamps + * are stored using per-second granularity (zip's default behaviour can only store + * timestamps to the nearest even second). + *

    + * Unfortunately, 32 (unsigned) bits can only store dates up to the year 2106, + * and so this extra field will eventually be obsolete. Enjoy it while it lasts! + *

    + *
      + *
    • modifyTime: + * most recent time of file/directory modification + * (or file/dir creation if the entry has not been + * modified since it was created). + *
    • + *
    • accessTime: + * most recent time file/directory was opened + * (e.g., read from disk). Many people disable + * their operating systems from updating this value + * using the NOATIME mount option to optimize disk behaviour, + * and thus it's not always reliable. In those cases + * it's always equal to modifyTime. + *
    • + *
    • *createTime: + * modern linux file systems (e.g., ext2 and newer) + * do not appear to store a value like this, and so + * it's usually omitted altogether in the zip extra + * field. Perhaps other unix systems track this. + *
    + *

    + * We're using the field definition given in Info-Zip's source archive: + * zip-3.0.tar.gz/proginfo/extrafld.txt + *

    + *
    + * Value         Size        Description
    + * -----         ----        -----------
    + * 0x5455        Short       tag for this extra block type ("UT")
    + * TSize         Short       total data size for this block
    + * Flags         Byte        info bits
    + * (ModTime)     Long        time of last modification (UTC/GMT)
    + * (AcTime)      Long        time of last access (UTC/GMT)
    + * (CrTime)      Long        time of original creation (UTC/GMT)
    + *
    + * Central-header version:
    + *
    + * Value         Size        Description
    + * -----         ----        -----------
    + * 0x5455        Short       tag for this extra block type ("UT")
    + * TSize         Short       total data size for this block
    + * Flags         Byte        info bits (refers to local header!)
    + * (ModTime)     Long        time of last modification (UTC/GMT)
    + * 
    + * @since 1.5 + */ +public class X5455_ExtendedTimestamp implements ZipExtraField, Cloneable, Serializable { + private static final ZipShort HEADER_ID = new ZipShort(0x5455); + private static final long serialVersionUID = 1L; + + /** + * The bit set inside the flags by when the last modification time + * is present in this extra field. + */ + public static final byte MODIFY_TIME_BIT = 1; + /** + * The bit set inside the flags by when the lasr access time is + * present in this extra field. + */ + public static final byte ACCESS_TIME_BIT = 2; + /** + * The bit set inside the flags by when the original creation time + * is present in this extra field. + */ + public static final byte CREATE_TIME_BIT = 4; + + // The 3 boolean fields (below) come from this flags byte. The remaining 5 bits + // are ignored according to the current version of the spec (December 2012). + private byte flags; + + // Note: even if bit1 and bit2 are set, the Central data will still not contain + // access/create fields: only local data ever holds those! This causes + // some of our implementation to look a little odd, with seemingly spurious + // != null and length checks. + private boolean bit0_modifyTimePresent; + private boolean bit1_accessTimePresent; + private boolean bit2_createTimePresent; + + private ZipLong modifyTime; + private ZipLong accessTime; + private ZipLong createTime; + + /** + * Constructor for X5455_ExtendedTimestamp. + */ + public X5455_ExtendedTimestamp() {} + + /** + * The Header-ID. + * + * @return the value for the header id for this extrafield + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Length of the extra field in the local file data - without + * Header-ID or length specifier. + * + * @return a ZipShort for the length of the data of this extra field + */ + public ZipShort getLocalFileDataLength() { + return new ZipShort(1 + + (bit0_modifyTimePresent ? 4 : 0) + + (bit1_accessTimePresent && accessTime != null ? 4 : 0) + + (bit2_createTimePresent && createTime != null ? 4 : 0) + ); + } + + /** + * Length of the extra field in the local file data - without + * Header-ID or length specifier. + * + *

    For X5455 the central length is often smaller than the + * local length, because central cannot contain access or create + * timestamps.

    + * + * @return a ZipShort for the length of the data of this extra field + */ + public ZipShort getCentralDirectoryLength() { + return new ZipShort(1 + + (bit0_modifyTimePresent ? 4 : 0) + ); + } + + /** + * The actual data to put into local file data - without Header-ID + * or length specifier. + * + * @return get the data + */ + public byte[] getLocalFileDataData() { + byte[] data = new byte[getLocalFileDataLength().getValue()]; + int pos = 0; + data[pos++] = 0; + if (bit0_modifyTimePresent) { + data[0] |= MODIFY_TIME_BIT; + System.arraycopy(modifyTime.getBytes(), 0, data, pos, 4); + pos += 4; + } + if (bit1_accessTimePresent && accessTime != null) { + data[0] |= ACCESS_TIME_BIT; + System.arraycopy(accessTime.getBytes(), 0, data, pos, 4); + pos += 4; + } + if (bit2_createTimePresent && createTime != null) { + data[0] |= CREATE_TIME_BIT; + System.arraycopy(createTime.getBytes(), 0, data, pos, 4); + pos += 4; + } + return data; + } + + /** + * The actual data to put into central directory data - without Header-ID + * or length specifier. + * + * @return the central directory data + */ + public byte[] getCentralDirectoryData() { + byte[] centralData = new byte[getCentralDirectoryLength().getValue()]; + byte[] localData = getLocalFileDataData(); + + // Truncate out create & access time (last 8 bytes) from + // the copy of the local data we obtained: + System.arraycopy(localData, 0, centralData, 0, centralData.length); + return centralData; + } + + /** + * Populate data from this array as if it was in local file data. + * + * @param data an array of bytes + * @param offset the start offset + * @param length the number of bytes in the array from offset + * @throws java.util.zip.ZipException on error + */ + public void parseFromLocalFileData( + byte[] data, int offset, int length + ) throws ZipException { + reset(); + final int len = offset + length; + setFlags(data[offset++]); + if (bit0_modifyTimePresent) { + modifyTime = new ZipLong(data, offset); + offset += 4; + } + + // Notice the extra length check in case we are parsing the shorter + // central data field (for both access and create timestamps). + if (bit1_accessTimePresent && offset + 4 <= len) { + accessTime = new ZipLong(data, offset); + offset += 4; + } + if (bit2_createTimePresent && offset + 4 <= len) { + createTime = new ZipLong(data, offset); + offset += 4; + } + } + + /** + * Doesn't do anything special since this class always uses the + * same parsing logic for both central directory and local file data. + */ + public void parseFromCentralDirectoryData( + byte[] buffer, int offset, int length + ) throws ZipException { + reset(); + parseFromLocalFileData(buffer, offset, length); + } + + /** + * Reset state back to newly constructed state. Helps us make sure + * parse() calls always generate clean results. + */ + private void reset() { + setFlags((byte) 0); + this.modifyTime = null; + this.accessTime = null; + this.createTime = null; + } + + /** + * Sets flags byte. The flags byte tells us which of the + * three datestamp fields are present in the data: + *
    +     * bit0 - modify time
    +     * bit1 - access time
    +     * bit2 - create time
    +     * 
    + * Only first 3 bits of flags are used according to the + * latest version of the spec (December 2012). + * + * @param flags flags byte indicating which of the + * three datestamp fields are present. + */ + public void setFlags(byte flags) { + this.flags = flags; + this.bit0_modifyTimePresent = (flags & MODIFY_TIME_BIT) == MODIFY_TIME_BIT; + this.bit1_accessTimePresent = (flags & ACCESS_TIME_BIT) == ACCESS_TIME_BIT; + this.bit2_createTimePresent = (flags & CREATE_TIME_BIT) == CREATE_TIME_BIT; + } + + /** + * Gets flags byte. The flags byte tells us which of the + * three datestamp fields are present in the data: + *
    +     * bit0 - modify time
    +     * bit1 - access time
    +     * bit2 - create time
    +     * 
    + * Only first 3 bits of flags are used according to the + * latest version of the spec (December 2012). + * + * @return flags byte indicating which of the + * three datestamp fields are present. + */ + public byte getFlags() { return flags; } + + /** + * Returns whether bit0 of the flags byte is set or not, + * which should correspond to the presence or absence of + * a modify timestamp in this particular zip entry. + * + * @return true if bit0 of the flags byte is set. + */ + public boolean isBit0_modifyTimePresent() { return bit0_modifyTimePresent; } + + /** + * Returns whether bit1 of the flags byte is set or not, + * which should correspond to the presence or absence of + * a "last access" timestamp in this particular zip entry. + * + * @return true if bit1 of the flags byte is set. + */ + public boolean isBit1_accessTimePresent() { return bit1_accessTimePresent; } + + /** + * Returns whether bit2 of the flags byte is set or not, + * which should correspond to the presence or absence of + * a create timestamp in this particular zip entry. + * + * @return true if bit2 of the flags byte is set. + */ + public boolean isBit2_createTimePresent() { return bit2_createTimePresent; } + + /** + * Returns the modify time (seconds since epoch) of this zip entry + * as a ZipLong object, or null if no such timestamp exists in the + * zip entry. + * + * @return modify time (seconds since epoch) or null. + */ + public ZipLong getModifyTime() { return modifyTime; } + + /** + * Returns the access time (seconds since epoch) of this zip entry + * as a ZipLong object, or null if no such timestamp exists in the + * zip entry. + * + * @return access time (seconds since epoch) or null. + */ + public ZipLong getAccessTime() { return accessTime; } + + /** + *

    + * Returns the create time (seconds since epoch) of this zip entry + * as a ZipLong object, or null if no such timestamp exists in the + * zip entry. + *

    + * Note: modern linux file systems (e.g., ext2) + * do not appear to store a "create time" value, and so + * it's usually omitted altogether in the zip extra + * field. Perhaps other unix systems track this. + * + * @return create time (seconds since epoch) or null. + */ + public ZipLong getCreateTime() { return createTime; } + + /** + * Returns the modify time as a java.util.Date + * of this zip entry, or null if no such timestamp exists in the zip entry. + * The milliseconds are always zeroed out, since the underlying data + * offers only per-second precision. + * + * @return modify time as java.util.Date or null. + */ + public Date getModifyJavaTime() { + return modifyTime != null ? new Date(modifyTime.getValue() * 1000) : null; + } + + /** + * Returns the access time as a java.util.Date + * of this zip entry, or null if no such timestamp exists in the zip entry. + * The milliseconds are always zeroed out, since the underlying data + * offers only per-second precision. + * + * @return access time as java.util.Date or null. + */ + public Date getAccessJavaTime() { + return accessTime != null ? new Date(accessTime.getValue() * 1000) : null; + } + + /** + *

    + * Returns the create time as a a java.util.Date + * of this zip entry, or null if no such timestamp exists in the zip entry. + * The milliseconds are always zeroed out, since the underlying data + * offers only per-second precision. + *

    + * Note: modern linux file systems (e.g., ext2) + * do not appear to store a "create time" value, and so + * it's usually omitted altogether in the zip extra + * field. Perhaps other unix systems track this. + * + * @return create time as java.util.Date or null. + */ + public Date getCreateJavaTime() { + return createTime != null ? new Date(createTime.getValue() * 1000) : null; + } + + /** + *

    + * Sets the modify time (seconds since epoch) of this zip entry + * using a ZipLong object. + *

    + * Note: the setters for flags and timestamps are decoupled. + * Even if the timestamp is not-null, it will only be written + * out if the corresponding bit in the flags is also set. + *

    + * + * @param l ZipLong of the modify time (seconds per epoch) + */ + public void setModifyTime(ZipLong l) { + bit0_modifyTimePresent = l != null; + flags = (byte) (l != null ? (flags | MODIFY_TIME_BIT) + : (flags & ~MODIFY_TIME_BIT)); + this.modifyTime = l; + } + + /** + *

    + * Sets the access time (seconds since epoch) of this zip entry + * using a ZipLong object + *

    + * Note: the setters for flags and timestamps are decoupled. + * Even if the timestamp is not-null, it will only be written + * out if the corresponding bit in the flags is also set. + *

    + * + * @param l ZipLong of the access time (seconds per epoch) + */ + public void setAccessTime(ZipLong l) { + bit1_accessTimePresent = l != null; + flags = (byte) (l != null ? (flags | ACCESS_TIME_BIT) + : (flags & ~ACCESS_TIME_BIT)); + this.accessTime = l; + } + + /** + *

    + * Sets the create time (seconds since epoch) of this zip entry + * using a ZipLong object + *

    + * Note: the setters for flags and timestamps are decoupled. + * Even if the timestamp is not-null, it will only be written + * out if the corresponding bit in the flags is also set. + *

    + * + * @param l ZipLong of the create time (seconds per epoch) + */ + public void setCreateTime(ZipLong l) { + bit2_createTimePresent = l != null; + flags = (byte) (l != null ? (flags | CREATE_TIME_BIT) + : (flags & ~CREATE_TIME_BIT)); + this.createTime = l; + } + + /** + *

    + * Sets the modify time as a java.util.Date + * of this zip entry. Supplied value is truncated to per-second + * precision (milliseconds zeroed-out). + *

    + * Note: the setters for flags and timestamps are decoupled. + * Even if the timestamp is not-null, it will only be written + * out if the corresponding bit in the flags is also set. + *

    + * + * @param d modify time as java.util.Date + */ + public void setModifyJavaTime(Date d) { setModifyTime(dateToZipLong(d)); } + + /** + *

    + * Sets the access time as a java.util.Date + * of this zip entry. Supplied value is truncated to per-second + * precision (milliseconds zeroed-out). + *

    + * Note: the setters for flags and timestamps are decoupled. + * Even if the timestamp is not-null, it will only be written + * out if the corresponding bit in the flags is also set. + *

    + * + * @param d access time as java.util.Date + */ + public void setAccessJavaTime(Date d) { setAccessTime(dateToZipLong(d)); } + + /** + *

    + * Sets the create time as a java.util.Date + * of this zip entry. Supplied value is truncated to per-second + * precision (milliseconds zeroed-out). + *

    + * Note: the setters for flags and timestamps are decoupled. + * Even if the timestamp is not-null, it will only be written + * out if the corresponding bit in the flags is also set. + *

    + * + * @param d create time as java.util.Date + */ + public void setCreateJavaTime(Date d) { setCreateTime(dateToZipLong(d)); } + + /** + * Utility method converts java.util.Date (milliseconds since epoch) + * into a ZipLong (seconds since epoch). + *

    + * Also makes sure the converted ZipLong is not too big to fit + * in 32 unsigned bits. + * + * @param d java.util.Date to convert to ZipLong + * @return ZipLong + */ + private static ZipLong dateToZipLong(final Date d) { + if (d == null) { return null; } + + final long TWO_TO_32 = 0x100000000L; + final long l = d.getTime() / 1000; + if (l >= TWO_TO_32) { + throw new IllegalArgumentException("Cannot set an X5455 timestamp larger than 2^32: " + l); + } + return new ZipLong(l); + } + + /** + * Returns a String representation of this class useful for + * debugging purposes. + * + * @return A String representation of this class useful for + * debugging purposes. + */ + @Override + public String toString() { + StringBuilder buf = new StringBuilder(); + buf.append("0x5455 Zip Extra Field: Flags="); + buf.append(Integer.toBinaryString(ZipUtil.unsignedIntToSignedByte(flags))).append(" "); + if (bit0_modifyTimePresent && modifyTime != null) { + Date m = getModifyJavaTime(); + buf.append(" Modify:[").append(m).append("] "); + } + if (bit1_accessTimePresent && accessTime != null) { + Date a = getAccessJavaTime(); + buf.append(" Access:[").append(a).append("] "); + } + if (bit2_createTimePresent && createTime != null) { + Date c = getCreateJavaTime(); + buf.append(" Create:[").append(c).append("] "); + } + return buf.toString(); + } + + @Override + public Object clone() throws CloneNotSupportedException { + return super.clone(); + } + + @Override + public boolean equals(Object o) { + if (o instanceof X5455_ExtendedTimestamp) { + X5455_ExtendedTimestamp xf = (X5455_ExtendedTimestamp) o; + + // The ZipLong==ZipLong clauses handle the cases where both are null. + // and only last 3 bits of flags matter. + return ((flags & 0x07) == (xf.flags & 0x07)) && + (modifyTime == xf.modifyTime || (modifyTime != null && modifyTime.equals(xf.modifyTime))) && + (accessTime == xf.accessTime || (accessTime != null && accessTime.equals(xf.accessTime))) && + (createTime == xf.createTime || (createTime != null && createTime.equals(xf.createTime))); + } else { + return false; + } + } + + @Override + public int hashCode() { + int hc = (-123 * (flags & 0x07)); // only last 3 bits of flags matter + if (modifyTime != null) { + hc ^= modifyTime.hashCode(); + } + if (accessTime != null) { + // Since accessTime is often same as modifyTime, + // this prevents them from XOR negating each other. + hc ^= Integer.rotateLeft(accessTime.hashCode(), 11); + } + if (createTime != null) { + hc ^= Integer.rotateLeft(createTime.hashCode(), 22); + } + return hc; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java new file mode 100644 index 000000000..87d1e1d40 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/X7875_NewUnix.java @@ -0,0 +1,338 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.Serializable; +import java.math.BigInteger; +import java.util.zip.ZipException; + +import static org.apache.commons.compress.archivers.zip.ZipUtil.reverse; +import static org.apache.commons.compress.archivers.zip.ZipUtil.signedByteToUnsignedInt; +import static org.apache.commons.compress.archivers.zip.ZipUtil.unsignedIntToSignedByte; + +/** + * An extra field that stores UNIX UID/GID data (owner & group ownership) for a given + * zip entry. We're using the field definition given in Info-Zip's source archive: + * zip-3.0.tar.gz/proginfo/extrafld.txt + * + *

    + * Value         Size        Description
    + * -----         ----        -----------
    + * 0x7875        Short       tag for this extra block type ("ux")
    + * TSize         Short       total data size for this block
    + * Version       1 byte      version of this extra field, currently 1
    + * UIDSize       1 byte      Size of UID field
    + * UID           Variable    UID for this entry (little endian)
    + * GIDSize       1 byte      Size of GID field
    + * GID           Variable    GID for this entry (little endian)
    + * 
    + * @since 1.5 + */ +public class X7875_NewUnix implements ZipExtraField, Cloneable, Serializable { + private static final ZipShort HEADER_ID = new ZipShort(0x7875); + private static final BigInteger ONE_THOUSAND = BigInteger.valueOf(1000); + private static final long serialVersionUID = 1L; + + private int version = 1; // always '1' according to current info-zip spec. + + // BigInteger helps us with little-endian / big-endian conversions. + // (thanks to BigInteger.toByteArray() and a reverse() method we created). + // Also, the spec theoretically allows UID/GID up to 255 bytes long! + // + // NOTE: equals() and hashCode() currently assume these can never be null. + private BigInteger uid; + private BigInteger gid; + + /** + * Constructor for X7875_NewUnix. + */ + public X7875_NewUnix() { + reset(); + } + + /** + * The Header-ID. + * + * @return the value for the header id for this extrafield + */ + public ZipShort getHeaderId() { + return HEADER_ID; + } + + /** + * Gets the UID as a long. UID is typically a 32 bit unsigned + * value on most UNIX systems, so we return a long to avoid + * integer overflow into the negatives in case values above + * and including 2^31 are being used. + * + * @return the UID value. + */ + public long getUID() { return ZipUtil.bigToLong(uid); } + + /** + * Gets the GID as a long. GID is typically a 32 bit unsigned + * value on most UNIX systems, so we return a long to avoid + * integer overflow into the negatives in case values above + * and including 2^31 are being used. + * + * @return the GID value. + */ + public long getGID() { return ZipUtil.bigToLong(gid); } + + /** + * Sets the UID. + * + * @param l UID value to set on this extra field. + */ + public void setUID(long l) { + this.uid = ZipUtil.longToBig(l); + } + + /** + * Sets the GID. + * + * @param l GID value to set on this extra field. + */ + public void setGID(long l) { + this.gid = ZipUtil.longToBig(l); + } + + /** + * Length of the extra field in the local file data - without + * Header-ID or length specifier. + * + * @return a ZipShort for the length of the data of this extra field + */ + public ZipShort getLocalFileDataLength() { + int uidSize = trimLeadingZeroesForceMinLength(uid.toByteArray()).length; + int gidSize = trimLeadingZeroesForceMinLength(gid.toByteArray()).length; + + // The 3 comes from: version=1 + uidsize=1 + gidsize=1 + return new ZipShort(3 + uidSize + gidSize); + } + + /** + * Length of the extra field in the central directory data - without + * Header-ID or length specifier. + * + * @return a ZipShort for the length of the data of this extra field + */ + public ZipShort getCentralDirectoryLength() { + return getLocalFileDataLength(); // No different than local version. + } + + /** + * The actual data to put into local file data - without Header-ID + * or length specifier. + * + * @return get the data + */ + public byte[] getLocalFileDataData() { + byte[] uidBytes = uid.toByteArray(); + byte[] gidBytes = gid.toByteArray(); + + // BigInteger might prepend a leading-zero to force a positive representation + // (e.g., so that the sign-bit is set to zero). We need to remove that + // before sending the number over the wire. + uidBytes = trimLeadingZeroesForceMinLength(uidBytes); + gidBytes = trimLeadingZeroesForceMinLength(gidBytes); + + // Couldn't bring myself to just call getLocalFileDataLength() when we've + // already got the arrays right here. Yeah, yeah, I know, premature + // optimization is the root of all... + // + // The 3 comes from: version=1 + uidsize=1 + gidsize=1 + byte[] data = new byte[3 + uidBytes.length + gidBytes.length]; + + // reverse() switches byte array from big-endian to little-endian. + reverse(uidBytes); + reverse(gidBytes); + + int pos = 0; + data[pos++] = unsignedIntToSignedByte(version); + data[pos++] = unsignedIntToSignedByte(uidBytes.length); + System.arraycopy(uidBytes, 0, data, pos, uidBytes.length); + pos += uidBytes.length; + data[pos++] = unsignedIntToSignedByte(gidBytes.length); + System.arraycopy(gidBytes, 0, data, pos, gidBytes.length); + return data; + } + + /** + * The actual data to put into central directory data - without Header-ID + * or length specifier. + * + * @return get the data + */ + public byte[] getCentralDirectoryData() { + return getLocalFileDataData(); + } + + /** + * Populate data from this array as if it was in local file data. + * + * @param data an array of bytes + * @param offset the start offset + * @param length the number of bytes in the array from offset + * @throws java.util.zip.ZipException on error + */ + public void parseFromLocalFileData( + byte[] data, int offset, int length + ) throws ZipException { + reset(); + this.version = signedByteToUnsignedInt(data[offset++]); + int uidSize = signedByteToUnsignedInt(data[offset++]); + byte[] uidBytes = new byte[uidSize]; + System.arraycopy(data, offset, uidBytes, 0, uidSize); + offset += uidSize; + this.uid = new BigInteger(1, reverse(uidBytes)); // sign-bit forced positive + + int gidSize = signedByteToUnsignedInt(data[offset++]); + byte[] gidBytes = new byte[gidSize]; + System.arraycopy(data, offset, gidBytes, 0, gidSize); + this.gid = new BigInteger(1, reverse(gidBytes)); // sign-bit forced positive + } + + /** + * Doesn't do anything special since this class always uses the + * same data in central directory and local file data. + */ + public void parseFromCentralDirectoryData( + byte[] buffer, int offset, int length + ) throws ZipException { + reset(); + parseFromLocalFileData(buffer, offset, length); + } + + /** + * Reset state back to newly constructed state. Helps us make sure + * parse() calls always generate clean results. + */ + private void reset() { + // Typical UID/GID of the first non-root user created on a unix system. + uid = ONE_THOUSAND; + gid = ONE_THOUSAND; + } + + /** + * Returns a String representation of this class useful for + * debugging purposes. + * + * @return A String representation of this class useful for + * debugging purposes. + */ + @Override + public String toString() { + return "0x7875 Zip Extra Field: UID=" + uid + " GID=" + gid; + } + + @Override + public Object clone() throws CloneNotSupportedException { + return super.clone(); + } + + @Override + public boolean equals(Object o) { + if (o instanceof X7875_NewUnix) { + X7875_NewUnix xf = (X7875_NewUnix) o; + // We assume uid and gid can never be null. + return version == xf.version && uid.equals(xf.uid) && gid.equals(xf.gid); + } + return false; + } + + @Override + public int hashCode() { + int hc = -1234567 * version; + // Since most UID's and GID's are below 65,536, this is (hopefully!) + // a nice way to make sure typical UID and GID values impact the hash + // as much as possible. + hc ^= Integer.rotateLeft(uid.hashCode(), 16); + hc ^= gid.hashCode(); + return hc; + } + + /** + * Not really for external usage, but marked "package" visibility + * to help us JUnit it. Trims a byte array of leading zeroes while + * also enforcing a minimum length, and thus it really trims AND pads + * at the same time. + * + * @param array byte[] array to trim & pad. + * @return trimmed & padded byte[] array. + */ + static byte[] trimLeadingZeroesForceMinLength(byte[] array) { + if (array == null) { + return array; + } + + int pos = 0; + for (byte b : array) { + if (b == 0) { + pos++; + } else { + break; + } + } + + /* + + I agonized over my choice of MIN_LENGTH=1. Here's the situation: + InfoZip (the tool I am using to test interop) always sets these + to length=4. And so a UID of 0 (typically root) for example is + encoded as {4,0,0,0,0} (len=4, 32 bits of zero), when it could just + as easily be encoded as {1,0} (len=1, 8 bits of zero) according to + the spec. + + In the end I decided on MIN_LENGTH=1 for four reasons: + + 1.) We are adhering to the spec as far as I can tell, and so + a consumer that cannot parse this is broken. + + 2.) Fundamentally, zip files are about shrinking things, so + let's save a few bytes per entry while we can. + + 3.) Of all the people creating zip files using commons- + compress, how many care about UNIX UID/GID attributes + of the files they store? (e.g., I am probably thinking + way too hard about this and no one cares!) + + 4.) InfoZip's tool, even though it carefully stores every UID/GID + for every file zipped on a unix machine (by default) currently + appears unable to ever restore UID/GID. + unzip -X has no effect on my machine, even when run as root!!!! + + And thus it is decided: MIN_LENGTH=1. + + If anyone runs into interop problems from this, feel free to set + it to MIN_LENGTH=4 at some future time, and then we will behave + exactly like InfoZip (requires changes to unit tests, though). + + And I am sorry that the time you spent reading this comment is now + gone and you can never have it back. + + */ + final int MIN_LENGTH = 1; + + byte[] trimmedArray = new byte[Math.max(MIN_LENGTH, array.length - pos)]; + int startPos = trimmedArray.length - (array.length - pos); + System.arraycopy(array, pos, trimmedArray, startPos, trimmedArray.length - startPos); + return trimmedArray; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java new file mode 100644 index 000000000..a75395863 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64ExtendedInformationExtraField.java @@ -0,0 +1,317 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.util.zip.ZipException; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD; + +/** + * Holds size and other extended information for entries that use Zip64 + * features. + * + *

    Currently Commons Compress doesn't support encrypting the + * central directory so the note in APPNOTE.TXT about masking doesn't + * apply.

    + * + *

    The implementation relies on data being read from the local file + * header and assumes that both size values are always present.

    + * + * @see PKWARE's + * APPNOTE.TXT, section 4.5.3 + * + * @since 1.2 + * @NotThreadSafe + */ +public class Zip64ExtendedInformationExtraField implements ZipExtraField { + + static final ZipShort HEADER_ID = new ZipShort(0x0001); + + private static final String LFH_MUST_HAVE_BOTH_SIZES_MSG = + "Zip64 extended information must contain" + + " both size values in the local file header."; + private static final byte[] EMPTY = new byte[0]; + + private ZipEightByteInteger size, compressedSize, relativeHeaderOffset; + private ZipLong diskStart; + + /** + * Stored in {@link #parseFromCentralDirectoryData + * parseFromCentralDirectoryData} so it can be reused when ZipFile + * calls {@link #reparseCentralDirectoryData + * reparseCentralDirectoryData}. + * + *

    Not used for anything else

    + * + * @since 1.3 + */ + private byte[] rawCentralDirectoryData; + + /** + * This constructor should only be used by the code that reads + * archives inside of Commons Compress. + */ + public Zip64ExtendedInformationExtraField() { } + + /** + * Creates an extra field based on the original and compressed size. + * + * @param size the entry's original size + * @param compressedSize the entry's compressed size + * + * @throws IllegalArgumentException if size or compressedSize is null + */ + public Zip64ExtendedInformationExtraField(ZipEightByteInteger size, + ZipEightByteInteger compressedSize) { + this(size, compressedSize, null, null); + } + + /** + * Creates an extra field based on all four possible values. + * + * @param size the entry's original size + * @param compressedSize the entry's compressed size + * + * @throws IllegalArgumentException if size or compressedSize is null + */ + public Zip64ExtendedInformationExtraField(ZipEightByteInteger size, + ZipEightByteInteger compressedSize, + ZipEightByteInteger relativeHeaderOffset, + ZipLong diskStart) { + this.size = size; + this.compressedSize = compressedSize; + this.relativeHeaderOffset = relativeHeaderOffset; + this.diskStart = diskStart; + } + + public ZipShort getHeaderId() { + return HEADER_ID; + } + + public ZipShort getLocalFileDataLength() { + return new ZipShort(size != null ? 2 * DWORD : 0); + } + + public ZipShort getCentralDirectoryLength() { + return new ZipShort((size != null ? DWORD : 0) + + (compressedSize != null ? DWORD : 0) + + (relativeHeaderOffset != null ? DWORD : 0) + + (diskStart != null ? WORD : 0)); + } + + public byte[] getLocalFileDataData() { + if (size != null || compressedSize != null) { + if (size == null || compressedSize == null) { + throw new IllegalArgumentException(LFH_MUST_HAVE_BOTH_SIZES_MSG); + } + byte[] data = new byte[2 * DWORD]; + addSizes(data); + return data; + } + return EMPTY; + } + + public byte[] getCentralDirectoryData() { + byte[] data = new byte[getCentralDirectoryLength().getValue()]; + int off = addSizes(data); + if (relativeHeaderOffset != null) { + System.arraycopy(relativeHeaderOffset.getBytes(), 0, data, off, DWORD); + off += DWORD; + } + if (diskStart != null) { + System.arraycopy(diskStart.getBytes(), 0, data, off, WORD); + off += WORD; + } + return data; + } + + public void parseFromLocalFileData(byte[] buffer, int offset, int length) + throws ZipException { + if (length == 0) { + // no local file data at all, may happen if an archive + // only holds a ZIP64 extended information extra field + // inside the central directory but not inside the local + // file header + return; + } + if (length < 2 * DWORD) { + throw new ZipException(LFH_MUST_HAVE_BOTH_SIZES_MSG); + } + size = new ZipEightByteInteger(buffer, offset); + offset += DWORD; + compressedSize = new ZipEightByteInteger(buffer, offset); + offset += DWORD; + int remaining = length - 2 * DWORD; + if (remaining >= DWORD) { + relativeHeaderOffset = new ZipEightByteInteger(buffer, offset); + offset += DWORD; + remaining -= DWORD; + } + if (remaining >= WORD) { + diskStart = new ZipLong(buffer, offset); + offset += WORD; + remaining -= WORD; + } + } + + public void parseFromCentralDirectoryData(byte[] buffer, int offset, + int length) + throws ZipException { + // store for processing in reparseCentralDirectoryData + rawCentralDirectoryData = new byte[length]; + System.arraycopy(buffer, offset, rawCentralDirectoryData, 0, length); + + // if there is no size information in here, we are screwed and + // can only hope things will get resolved by LFH data later + // But there are some cases that can be detected + // * all data is there + // * length == 24 -> both sizes and offset + // * length % 8 == 4 -> at least we can identify the diskStart field + if (length >= 3 * DWORD + WORD) { + parseFromLocalFileData(buffer, offset, length); + } else if (length == 3 * DWORD) { + size = new ZipEightByteInteger(buffer, offset); + offset += DWORD; + compressedSize = new ZipEightByteInteger(buffer, offset); + offset += DWORD; + relativeHeaderOffset = new ZipEightByteInteger(buffer, offset); + } else if (length % DWORD == WORD) { + diskStart = new ZipLong(buffer, offset + length - WORD); + } + } + + /** + * Parses the raw bytes read from the central directory extra + * field with knowledge which fields are expected to be there. + * + *

    All four fields inside the zip64 extended information extra + * field are optional and must only be present if their corresponding + * entry inside the central directory contains the correct magic + * value.

    + */ + public void reparseCentralDirectoryData(boolean hasUncompressedSize, + boolean hasCompressedSize, + boolean hasRelativeHeaderOffset, + boolean hasDiskStart) + throws ZipException { + if (rawCentralDirectoryData != null) { + int expectedLength = (hasUncompressedSize ? DWORD : 0) + + (hasCompressedSize ? DWORD : 0) + + (hasRelativeHeaderOffset ? DWORD : 0) + + (hasDiskStart ? WORD : 0); + if (rawCentralDirectoryData.length < expectedLength) { + throw new ZipException("central directory zip64 extended" + + " information extra field's length" + + " doesn't match central directory" + + " data. Expected length " + + expectedLength + " but is " + + rawCentralDirectoryData.length); + } + int offset = 0; + if (hasUncompressedSize) { + size = new ZipEightByteInteger(rawCentralDirectoryData, offset); + offset += DWORD; + } + if (hasCompressedSize) { + compressedSize = new ZipEightByteInteger(rawCentralDirectoryData, + offset); + offset += DWORD; + } + if (hasRelativeHeaderOffset) { + relativeHeaderOffset = + new ZipEightByteInteger(rawCentralDirectoryData, offset); + offset += DWORD; + } + if (hasDiskStart) { + diskStart = new ZipLong(rawCentralDirectoryData, offset); + offset += WORD; + } + } + } + + /** + * The uncompressed size stored in this extra field. + */ + public ZipEightByteInteger getSize() { + return size; + } + + /** + * The uncompressed size stored in this extra field. + */ + public void setSize(ZipEightByteInteger size) { + this.size = size; + } + + /** + * The compressed size stored in this extra field. + */ + public ZipEightByteInteger getCompressedSize() { + return compressedSize; + } + + /** + * The uncompressed size stored in this extra field. + */ + public void setCompressedSize(ZipEightByteInteger compressedSize) { + this.compressedSize = compressedSize; + } + + /** + * The relative header offset stored in this extra field. + */ + public ZipEightByteInteger getRelativeHeaderOffset() { + return relativeHeaderOffset; + } + + /** + * The relative header offset stored in this extra field. + */ + public void setRelativeHeaderOffset(ZipEightByteInteger rho) { + relativeHeaderOffset = rho; + } + + /** + * The disk start number stored in this extra field. + */ + public ZipLong getDiskStartNumber() { + return diskStart; + } + + /** + * The disk start number stored in this extra field. + */ + public void setDiskStartNumber(ZipLong ds) { + diskStart = ds; + } + + private int addSizes(byte[] data) { + int off = 0; + if (size != null) { + System.arraycopy(size.getBytes(), 0, data, 0, DWORD); + off += DWORD; + } + if (compressedSize != null) { + System.arraycopy(compressedSize.getBytes(), 0, data, off, DWORD); + off += DWORD; + } + return off; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64Mode.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64Mode.java new file mode 100644 index 000000000..d051e8982 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64Mode.java @@ -0,0 +1,47 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +/** + * The different modes {@link ZipArchiveOutputStream} can operate in. + * + * @see ZipArchiveOutputStream#setUseZip64 + * + * @since 1.3 + */ +public enum Zip64Mode { + /** + * Use Zip64 extensions for all entries, even if it is clear it is + * not required. + */ + Always, + /** + * Don't use Zip64 extensions for any entries. + * + *

    This will cause a {@link Zip64RequiredException} to be + * thrown if {@link ZipArchiveOutputStream} detects it needs Zip64 + * support.

    + */ + Never, + /** + * Use Zip64 extensions for all entries where they are required, + * don't use them for entries that clearly don't require them. + */ + AsNeeded +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64RequiredException.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64RequiredException.java new file mode 100644 index 000000000..677b4e6e5 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/Zip64RequiredException.java @@ -0,0 +1,49 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +import java.util.zip.ZipException; + +/** + * Exception thrown when attempting to write data that requires Zip64 + * support to an archive and {@link ZipArchiveOutputStream#setUseZip64 + * UseZip64} has been set to {@link Zip64Mode#Never Never}. + * @since 1.3 + */ +public class Zip64RequiredException extends ZipException { + + private static final long serialVersionUID = 20110809L; + + /** + * Helper to format "entry too big" messages. + */ + static String getEntryTooBigMessage(ZipArchiveEntry ze) { + return ze.getName() + "'s size exceeds the limit of 4GByte."; + } + + static final String ARCHIVE_TOO_BIG_MESSAGE = + "archive's size exceeds the limit of 4GByte."; + + static final String TOO_MANY_ENTRIES_MESSAGE = + "archive contains more than 65535 entries."; + + public Zip64RequiredException(String reason) { + super(reason); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java new file mode 100644 index 000000000..8dbc10191 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveEntry.java @@ -0,0 +1,701 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +import java.io.File; +import java.util.ArrayList; +import java.util.Arrays; +import java.util.Date; +import java.util.LinkedHashMap; +import java.util.List; +import java.util.zip.ZipException; + +/** + * Extension that adds better handling of extra fields and provides + * access to the internal and external file attributes. + * + *

    The extra data is expected to follow the recommendation of + * APPNOTE.TXT:

    + *
      + *
    • the extra byte array consists of a sequence of extra fields
    • + *
    • each extra fields starts by a two byte header id followed by + * a two byte sequence holding the length of the remainder of + * data.
    • + *
    + * + *

    Any extra data that cannot be parsed by the rules above will be + * consumed as "unparseable" extra data and treated differently by the + * methods of this class. Versions prior to Apache Commons Compress + * 1.1 would have thrown an exception if any attempt was made to read + * or write extra data not conforming to the recommendation.

    + * + * @NotThreadSafe + */ +public class ZipArchiveEntry extends java.util.zip.ZipEntry + implements ArchiveEntry { + + public static final int PLATFORM_UNIX = 3; + public static final int PLATFORM_FAT = 0; + private static final int SHORT_MASK = 0xFFFF; + private static final int SHORT_SHIFT = 16; + private static final byte[] EMPTY = new byte[0]; + + /** + * The {@link java.util.zip.ZipEntry} base class only supports + * the compression methods STORED and DEFLATED. We override the + * field so that any compression methods can be used. + *

    + * The default value -1 means that the method has not been specified. + * + * @see COMPRESS-93 + */ + private int method = -1; + + /** + * The {@link java.util.zip.ZipEntry#setSize} method in the base + * class throws an IllegalArgumentException if the size is bigger + * than 2GB for Java versions < 7. Need to keep our own size + * information for Zip64 support. + */ + private long size = SIZE_UNKNOWN; + + private int internalAttributes = 0; + private int platform = PLATFORM_FAT; + private long externalAttributes = 0; + private LinkedHashMap extraFields = null; + private UnparseableExtraFieldData unparseableExtra = null; + private String name = null; + private byte[] rawName = null; + private GeneralPurposeBit gpb = new GeneralPurposeBit(); + + /** + * Creates a new zip entry with the specified name. + * + *

    Assumes the entry represents a directory if and only if the + * name ends with a forward slash "/".

    + * + * @param name the name of the entry + */ + public ZipArchiveEntry(String name) { + super(name); + setName(name); + } + + /** + * Creates a new zip entry with fields taken from the specified zip entry. + * + *

    Assumes the entry represents a directory if and only if the + * name ends with a forward slash "/".

    + * + * @param entry the entry to get fields from + * @throws ZipException on error + */ + public ZipArchiveEntry(java.util.zip.ZipEntry entry) throws ZipException { + super(entry); + setName(entry.getName()); + byte[] extra = entry.getExtra(); + if (extra != null) { + setExtraFields(ExtraFieldUtils.parse(extra, true, + ExtraFieldUtils + .UnparseableExtraField.READ)); + } else { + // initializes extra data to an empty byte array + setExtra(); + } + setMethod(entry.getMethod()); + this.size = entry.getSize(); + } + + /** + * Creates a new zip entry with fields taken from the specified zip entry. + * + *

    Assumes the entry represents a directory if and only if the + * name ends with a forward slash "/".

    + * + * @param entry the entry to get fields from + * @throws ZipException on error + */ + public ZipArchiveEntry(ZipArchiveEntry entry) throws ZipException { + this((java.util.zip.ZipEntry) entry); + setInternalAttributes(entry.getInternalAttributes()); + setExternalAttributes(entry.getExternalAttributes()); + setExtraFields(entry.getExtraFields(true)); + } + + /** + */ + protected ZipArchiveEntry() { + this(""); + } + + /** + * Creates a new zip entry taking some information from the given + * file and using the provided name. + * + *

    The name will be adjusted to end with a forward slash "/" if + * the file is a directory. If the file is not a directory a + * potential trailing forward slash will be stripped from the + * entry name.

    + */ + public ZipArchiveEntry(File inputFile, String entryName) { + this(inputFile.isDirectory() && !entryName.endsWith("/") ? + entryName + "/" : entryName); + if (inputFile.isFile()){ + setSize(inputFile.length()); + } + setTime(inputFile.lastModified()); + // TODO are there any other fields we can set here? + } + + /** + * Overwrite clone. + * @return a cloned copy of this ZipArchiveEntry + */ + @Override + public Object clone() { + ZipArchiveEntry e = (ZipArchiveEntry) super.clone(); + + e.setInternalAttributes(getInternalAttributes()); + e.setExternalAttributes(getExternalAttributes()); + e.setExtraFields(getExtraFields(true)); + return e; + } + + /** + * Returns the compression method of this entry, or -1 if the + * compression method has not been specified. + * + * @return compression method + * + * @since 1.1 + */ + @Override + public int getMethod() { + return method; + } + + /** + * Sets the compression method of this entry. + * + * @param method compression method + * + * @since 1.1 + */ + @Override + public void setMethod(int method) { + if (method < 0) { + throw new IllegalArgumentException( + "ZIP compression method can not be negative: " + method); + } + this.method = method; + } + + /** + * Retrieves the internal file attributes. + * + * @return the internal file attributes + */ + public int getInternalAttributes() { + return internalAttributes; + } + + /** + * Sets the internal file attributes. + * @param value an int value + */ + public void setInternalAttributes(int value) { + internalAttributes = value; + } + + /** + * Retrieves the external file attributes. + * @return the external file attributes + */ + public long getExternalAttributes() { + return externalAttributes; + } + + /** + * Sets the external file attributes. + * @param value an long value + */ + public void setExternalAttributes(long value) { + externalAttributes = value; + } + + /** + * Sets Unix permissions in a way that is understood by Info-Zip's + * unzip command. + * @param mode an int value + */ + public void setUnixMode(int mode) { + // CheckStyle:MagicNumberCheck OFF - no point + setExternalAttributes((mode << SHORT_SHIFT) + // MS-DOS read-only attribute + | ((mode & 0200) == 0 ? 1 : 0) + // MS-DOS directory flag + | (isDirectory() ? 0x10 : 0)); + // CheckStyle:MagicNumberCheck ON + platform = PLATFORM_UNIX; + } + + /** + * Unix permission. + * @return the unix permissions + */ + public int getUnixMode() { + return platform != PLATFORM_UNIX ? 0 : + (int) ((getExternalAttributes() >> SHORT_SHIFT) & SHORT_MASK); + } + + /** + * Returns true if this entry represents a unix symlink, + * in which case the entry's content contains the target path + * for the symlink. + * + * @since 1.5 + * @return true if the entry represents a unix symlink, false otherwise. + */ + public boolean isUnixSymlink() { + return (getUnixMode() & UnixStat.LINK_FLAG) == UnixStat.LINK_FLAG; + } + + /** + * Platform specification to put into the "version made + * by" part of the central file header. + * + * @return PLATFORM_FAT unless {@link #setUnixMode setUnixMode} + * has been called, in which case PLATFORM_UNIX will be returned. + */ + public int getPlatform() { + return platform; + } + + /** + * Set the platform (UNIX or FAT). + * @param platform an int value - 0 is FAT, 3 is UNIX + */ + protected void setPlatform(int platform) { + this.platform = platform; + } + + /** + * Replaces all currently attached extra fields with the new array. + * @param fields an array of extra fields + */ + public void setExtraFields(ZipExtraField[] fields) { + extraFields = new LinkedHashMap(); + for (ZipExtraField field : fields) { + if (field instanceof UnparseableExtraFieldData) { + unparseableExtra = (UnparseableExtraFieldData) field; + } else { + extraFields.put(field.getHeaderId(), field); + } + } + setExtra(); + } + + /** + * Retrieves all extra fields that have been parsed successfully. + * @return an array of the extra fields + */ + public ZipExtraField[] getExtraFields() { + return getExtraFields(false); + } + + /** + * Retrieves extra fields. + * @param includeUnparseable whether to also return unparseable + * extra fields as {@link UnparseableExtraFieldData} if such data + * exists. + * @return an array of the extra fields + * + * @since 1.1 + */ + public ZipExtraField[] getExtraFields(boolean includeUnparseable) { + if (extraFields == null) { + return !includeUnparseable || unparseableExtra == null + ? new ZipExtraField[0] + : new ZipExtraField[] { unparseableExtra }; + } + List result = + new ArrayList(extraFields.values()); + if (includeUnparseable && unparseableExtra != null) { + result.add(unparseableExtra); + } + return result.toArray(new ZipExtraField[0]); + } + + /** + * Adds an extra field - replacing an already present extra field + * of the same type. + * + *

    If no extra field of the same type exists, the field will be + * added as last field.

    + * @param ze an extra field + */ + public void addExtraField(ZipExtraField ze) { + if (ze instanceof UnparseableExtraFieldData) { + unparseableExtra = (UnparseableExtraFieldData) ze; + } else { + if (extraFields == null) { + extraFields = new LinkedHashMap(); + } + extraFields.put(ze.getHeaderId(), ze); + } + setExtra(); + } + + /** + * Adds an extra field - replacing an already present extra field + * of the same type. + * + *

    The new extra field will be the first one.

    + * @param ze an extra field + */ + public void addAsFirstExtraField(ZipExtraField ze) { + if (ze instanceof UnparseableExtraFieldData) { + unparseableExtra = (UnparseableExtraFieldData) ze; + } else { + LinkedHashMap copy = extraFields; + extraFields = new LinkedHashMap(); + extraFields.put(ze.getHeaderId(), ze); + if (copy != null) { + copy.remove(ze.getHeaderId()); + extraFields.putAll(copy); + } + } + setExtra(); + } + + /** + * Remove an extra field. + * @param type the type of extra field to remove + */ + public void removeExtraField(ZipShort type) { + if (extraFields == null) { + throw new java.util.NoSuchElementException(); + } + if (extraFields.remove(type) == null) { + throw new java.util.NoSuchElementException(); + } + setExtra(); + } + + /** + * Removes unparseable extra field data. + * + * @since 1.1 + */ + public void removeUnparseableExtraFieldData() { + if (unparseableExtra == null) { + throw new java.util.NoSuchElementException(); + } + unparseableExtra = null; + setExtra(); + } + + /** + * Looks up an extra field by its header id. + * + * @return null if no such field exists. + */ + public ZipExtraField getExtraField(ZipShort type) { + if (extraFields != null) { + return extraFields.get(type); + } + return null; + } + + /** + * Looks up extra field data that couldn't be parsed correctly. + * + * @return null if no such field exists. + * + * @since 1.1 + */ + public UnparseableExtraFieldData getUnparseableExtraFieldData() { + return unparseableExtra; + } + + /** + * Parses the given bytes as extra field data and consumes any + * unparseable data as an {@link UnparseableExtraFieldData} + * instance. + * @param extra an array of bytes to be parsed into extra fields + * @throws RuntimeException if the bytes cannot be parsed + * @throws RuntimeException on error + */ + @Override + public void setExtra(byte[] extra) throws RuntimeException { + try { + ZipExtraField[] local = + ExtraFieldUtils.parse(extra, true, + ExtraFieldUtils.UnparseableExtraField.READ); + mergeExtraFields(local, true); + } catch (ZipException e) { + // actually this is not possible as of Commons Compress 1.1 + throw new RuntimeException("Error parsing extra fields for entry: " + + getName() + " - " + e.getMessage(), e); + } + } + + /** + * Unfortunately {@link java.util.zip.ZipOutputStream + * java.util.zip.ZipOutputStream} seems to access the extra data + * directly, so overriding getExtra doesn't help - we need to + * modify super's data directly. + */ + protected void setExtra() { + super.setExtra(ExtraFieldUtils.mergeLocalFileDataData(getExtraFields(true))); + } + + /** + * Sets the central directory part of extra fields. + */ + public void setCentralDirectoryExtra(byte[] b) { + try { + ZipExtraField[] central = + ExtraFieldUtils.parse(b, false, + ExtraFieldUtils.UnparseableExtraField.READ); + mergeExtraFields(central, false); + } catch (ZipException e) { + throw new RuntimeException(e.getMessage(), e); + } + } + + /** + * Retrieves the extra data for the local file data. + * @return the extra data for local file + */ + public byte[] getLocalFileDataExtra() { + byte[] extra = getExtra(); + return extra != null ? extra : EMPTY; + } + + /** + * Retrieves the extra data for the central directory. + * @return the central directory extra data + */ + public byte[] getCentralDirectoryExtra() { + return ExtraFieldUtils.mergeCentralDirectoryData(getExtraFields(true)); + } + + /** + * Get the name of the entry. + * @return the entry name + */ + @Override + public String getName() { + return name == null ? super.getName() : name; + } + + /** + * Is this entry a directory? + * @return true if the entry is a directory + */ + @Override + public boolean isDirectory() { + return getName().endsWith("/"); + } + + /** + * Set the name of the entry. + * @param name the name to use + */ + protected void setName(String name) { + if (name != null && getPlatform() == PLATFORM_FAT + && name.indexOf("/") == -1) { + name = name.replace('\\', '/'); + } + this.name = name; + } + + /** + * Gets the uncompressed size of the entry data. + * @return the entry size + */ + @Override + public long getSize() { + return size; + } + + /** + * Sets the uncompressed size of the entry data. + * @param size the uncompressed size in bytes + * @exception IllegalArgumentException if the specified size is less + * than 0 + */ + @Override + public void setSize(long size) { + if (size < 0) { + throw new IllegalArgumentException("invalid entry size"); + } + this.size = size; + } + + /** + * Sets the name using the raw bytes and the string created from + * it by guessing or using the configured encoding. + * @param name the name to use created from the raw bytes using + * the guessed or configured encoding + * @param rawName the bytes originally read as name from the + * archive + * @since 1.2 + */ + protected void setName(String name, byte[] rawName) { + setName(name); + this.rawName = rawName; + } + + /** + * Returns the raw bytes that made up the name before it has been + * converted using the configured or guessed encoding. + * + *

    This method will return null if this instance has not been + * read from an archive.

    + * + * @since 1.2 + */ + public byte[] getRawName() { + if (rawName != null) { + byte[] b = new byte[rawName.length]; + System.arraycopy(rawName, 0, b, 0, rawName.length); + return b; + } + return null; + } + + /** + * Get the hashCode of the entry. + * This uses the name as the hashcode. + * @return a hashcode. + */ + @Override + public int hashCode() { + // this method has severe consequences on performance. We cannot rely + // on the super.hashCode() method since super.getName() always return + // the empty string in the current implemention (there's no setter) + // so it is basically draining the performance of a hashmap lookup + return getName().hashCode(); + } + + /** + * The "general purpose bit" field. + * @since 1.1 + */ + public GeneralPurposeBit getGeneralPurposeBit() { + return gpb; + } + + /** + * The "general purpose bit" field. + * @since 1.1 + */ + public void setGeneralPurposeBit(GeneralPurposeBit b) { + gpb = b; + } + + /** + * If there are no extra fields, use the given fields as new extra + * data - otherwise merge the fields assuming the existing fields + * and the new fields stem from different locations inside the + * archive. + * @param f the extra fields to merge + * @param local whether the new fields originate from local data + */ + private void mergeExtraFields(ZipExtraField[] f, boolean local) + throws ZipException { + if (extraFields == null) { + setExtraFields(f); + } else { + for (ZipExtraField element : f) { + ZipExtraField existing; + if (element instanceof UnparseableExtraFieldData) { + existing = unparseableExtra; + } else { + existing = getExtraField(element.getHeaderId()); + } + if (existing == null) { + addExtraField(element); + } else { + if (local) { + byte[] b = element.getLocalFileDataData(); + existing.parseFromLocalFileData(b, 0, b.length); + } else { + byte[] b = element.getCentralDirectoryData(); + existing.parseFromCentralDirectoryData(b, 0, b.length); + } + } + } + setExtra(); + } + } + + public Date getLastModifiedDate() { + return new Date(getTime()); + } + + /* (non-Javadoc) + * @see java.lang.Object#equals(java.lang.Object) + */ + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null || getClass() != obj.getClass()) { + return false; + } + ZipArchiveEntry other = (ZipArchiveEntry) obj; + String myName = getName(); + String otherName = other.getName(); + if (myName == null) { + if (otherName != null) { + return false; + } + } else if (!myName.equals(otherName)) { + return false; + } + String myComment = getComment(); + String otherComment = other.getComment(); + if (myComment == null) { + myComment = ""; + } + if (otherComment == null) { + otherComment = ""; + } + return getTime() == other.getTime() + && myComment.equals(otherComment) + && getInternalAttributes() == other.getInternalAttributes() + && getPlatform() == other.getPlatform() + && getExternalAttributes() == other.getExternalAttributes() + && getMethod() == other.getMethod() + && getSize() == other.getSize() + && getCrc() == other.getCrc() + && getCompressedSize() == other.getCompressedSize() + && Arrays.equals(getCentralDirectoryExtra(), + other.getCentralDirectoryExtra()) + && Arrays.equals(getLocalFileDataExtra(), + other.getLocalFileDataExtra()) + && gpb.equals(other.gpb); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java new file mode 100644 index 000000000..202eeb691 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveInputStream.java @@ -0,0 +1,1072 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.EOFException; +import java.io.IOException; +import java.io.InputStream; +import java.io.PushbackInputStream; +import java.nio.ByteBuffer; +import java.util.zip.CRC32; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; +import java.util.zip.ZipEntry; +import java.util.zip.ZipException; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.utils.IOUtils; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.SHORT; +import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC; + +/** + * Implements an input stream that can read Zip archives. + * + *

    Note that {@link ZipArchiveEntry#getSize()} may return -1 if the + * DEFLATE algorithm is used, as the size information is not available + * from the header.

    + * + *

    The {@link ZipFile} class is preferred when reading from files.

    + * + *

    As of Apache Commons Compress it transparently supports Zip64 + * extensions and thus individual entries and archives larger than 4 + * GB or with more than 65536 entries.

    + * + * @see ZipFile + * @NotThreadSafe + */ +public class ZipArchiveInputStream extends ArchiveInputStream { + + /** The zip encoding to use for filenames and the file comment. */ + private final ZipEncoding zipEncoding; + + /** Whether to look for and use Unicode extra fields. */ + private final boolean useUnicodeExtraFields; + + /** Wrapped stream, will always be a PushbackInputStream. */ + private final InputStream in; + + /** Inflater used for all deflated entries. */ + private final Inflater inf = new Inflater(true); + + /** Buffer used to read from the wrapped stream. */ + private final ByteBuffer buf = ByteBuffer.allocate(ZipArchiveOutputStream.BUFFER_SIZE); + + /** The entry that is currently being read. */ + private CurrentEntry current = null; + + /** Whether the stream has been closed. */ + private boolean closed = false; + + /** Whether the stream has reached the central directory - and thus found all entries. */ + private boolean hitCentralDirectory = false; + + /** + * When reading a stored entry that uses the data descriptor this + * stream has to read the full entry and caches it. This is the + * cache. + */ + private ByteArrayInputStream lastStoredEntry = null; + + /** Whether the stream will try to read STORED entries that use a data descriptor. */ + private boolean allowStoredEntriesWithDataDescriptor = false; + + private static final int LFH_LEN = 30; + /* + local file header signature WORD + version needed to extract SHORT + general purpose bit flag SHORT + compression method SHORT + last mod file time SHORT + last mod file date SHORT + crc-32 WORD + compressed size WORD + uncompressed size WORD + file name length SHORT + extra field length SHORT + */ + + private static final int CFH_LEN = 46; + /* + central file header signature WORD + version made by SHORT + version needed to extract SHORT + general purpose bit flag SHORT + compression method SHORT + last mod file time SHORT + last mod file date SHORT + crc-32 WORD + compressed size WORD + uncompressed size WORD + file name length SHORT + extra field length SHORT + file comment length SHORT + disk number start SHORT + internal file attributes SHORT + external file attributes WORD + relative offset of local header WORD + */ + + private static final long TWO_EXP_32 = ZIP64_MAGIC + 1; + + // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) + private final byte[] LFH_BUF = new byte[LFH_LEN]; + private final byte[] SKIP_BUF = new byte[1024]; + private final byte[] SHORT_BUF = new byte[SHORT]; + private final byte[] WORD_BUF = new byte[WORD]; + private final byte[] TWO_DWORD_BUF = new byte[2 * DWORD]; + + private int entriesRead = 0; + + public ZipArchiveInputStream(InputStream inputStream) { + this(inputStream, ZipEncodingHelper.UTF8); + } + + /** + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * @since 1.5 + */ + public ZipArchiveInputStream(InputStream inputStream, String encoding) { + this(inputStream, encoding, true); + } + + /** + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * @param useUnicodeExtraFields whether to use InfoZIP Unicode + * Extra Fields (if present) to set the file names. + */ + public ZipArchiveInputStream(InputStream inputStream, String encoding, boolean useUnicodeExtraFields) { + this(inputStream, encoding, useUnicodeExtraFields, false); + } + + /** + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * @param useUnicodeExtraFields whether to use InfoZIP Unicode + * Extra Fields (if present) to set the file names. + * @param allowStoredEntriesWithDataDescriptor whether the stream + * will try to read STORED entries that use a data descriptor + * @since 1.1 + */ + public ZipArchiveInputStream(InputStream inputStream, + String encoding, + boolean useUnicodeExtraFields, + boolean allowStoredEntriesWithDataDescriptor) { + zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); + this.useUnicodeExtraFields = useUnicodeExtraFields; + in = new PushbackInputStream(inputStream, buf.capacity()); + this.allowStoredEntriesWithDataDescriptor = + allowStoredEntriesWithDataDescriptor; + } + + public ZipArchiveEntry getNextZipEntry() throws IOException { + boolean firstEntry = true; + if (closed || hitCentralDirectory) { + return null; + } + if (current != null) { + closeEntry(); + firstEntry = false; + } + + try { + if (firstEntry) { + // split archives have a special signature before the + // first local file header - look for it and fail with + // the appropriate error message if this is a split + // archive. + readFirstLocalFileHeader(LFH_BUF); + } else { + readFully(LFH_BUF); + } + } catch (EOFException e) { + return null; + } + + ZipLong sig = new ZipLong(LFH_BUF); + if (sig.equals(ZipLong.CFH_SIG) || sig.equals(ZipLong.AED_SIG)) { + hitCentralDirectory = true; + skipRemainderOfArchive(); + } + if (!sig.equals(ZipLong.LFH_SIG)) { + return null; + } + + int off = WORD; + current = new CurrentEntry(); + + int versionMadeBy = ZipShort.getValue(LFH_BUF, off); + off += SHORT; + current.entry.setPlatform((versionMadeBy >> ZipFile.BYTE_SHIFT) & ZipFile.NIBLET_MASK); + + final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(LFH_BUF, off); + final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); + final ZipEncoding entryEncoding = hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; + current.hasDataDescriptor = gpFlag.usesDataDescriptor(); + current.entry.setGeneralPurposeBit(gpFlag); + + off += SHORT; + + current.entry.setMethod(ZipShort.getValue(LFH_BUF, off)); + off += SHORT; + + long time = ZipUtil.dosToJavaTime(ZipLong.getValue(LFH_BUF, off)); + current.entry.setTime(time); + off += WORD; + + ZipLong size = null, cSize = null; + if (!current.hasDataDescriptor) { + current.entry.setCrc(ZipLong.getValue(LFH_BUF, off)); + off += WORD; + + cSize = new ZipLong(LFH_BUF, off); + off += WORD; + + size = new ZipLong(LFH_BUF, off); + off += WORD; + } else { + off += 3 * WORD; + } + + int fileNameLen = ZipShort.getValue(LFH_BUF, off); + + off += SHORT; + + int extraLen = ZipShort.getValue(LFH_BUF, off); + off += SHORT; + + byte[] fileName = new byte[fileNameLen]; + readFully(fileName); + current.entry.setName(entryEncoding.decode(fileName), fileName); + + byte[] extraData = new byte[extraLen]; + readFully(extraData); + current.entry.setExtra(extraData); + + if (!hasUTF8Flag && useUnicodeExtraFields) { + ZipUtil.setNameAndCommentFromExtraFields(current.entry, fileName, null); + } + + processZip64Extra(size, cSize); + + if (current.entry.getCompressedSize() != -1) { + if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode()) { + current.in = new UnshrinkingInputStream(new BoundedInputStream(in, current.entry.getCompressedSize())); + } else if (current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) { + current.in = new ExplodingInputStream( + current.entry.getGeneralPurposeBit().getSlidingDictionarySize(), + current.entry.getGeneralPurposeBit().getNumberOfShannonFanoTrees(), + new BoundedInputStream(in, current.entry.getCompressedSize())); + } + } + + entriesRead++; + return current.entry; + } + + /** + * Fills the given array with the first local file header and + * deals with splitting/spanning markers that may prefix the first + * LFH. + */ + private void readFirstLocalFileHeader(byte[] lfh) throws IOException { + readFully(lfh); + ZipLong sig = new ZipLong(lfh); + if (sig.equals(ZipLong.DD_SIG)) { + throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.SPLITTING); + } + + if (sig.equals(ZipLong.SINGLE_SEGMENT_SPLIT_MARKER)) { + // The archive is not really split as only one segment was + // needed in the end. Just skip over the marker. + byte[] missedLfhBytes = new byte[4]; + readFully(missedLfhBytes); + System.arraycopy(lfh, 4, lfh, 0, LFH_LEN - 4); + System.arraycopy(missedLfhBytes, 0, lfh, LFH_LEN - 4, 4); + } + } + + /** + * Records whether a Zip64 extra is present and sets the size + * information from it if sizes are 0xFFFFFFFF and the entry + * doesn't use a data descriptor. + */ + private void processZip64Extra(ZipLong size, ZipLong cSize) { + Zip64ExtendedInformationExtraField z64 = + (Zip64ExtendedInformationExtraField) + current.entry.getExtraField(Zip64ExtendedInformationExtraField.HEADER_ID); + current.usesZip64 = z64 != null; + if (!current.hasDataDescriptor) { + if (z64 != null // same as current.usesZip64 but avoids NPE warning + && (cSize.equals(ZipLong.ZIP64_MAGIC) || size.equals(ZipLong.ZIP64_MAGIC)) ) { + current.entry.setCompressedSize(z64.getCompressedSize().getLongValue()); + current.entry.setSize(z64.getSize().getLongValue()); + } else { + current.entry.setCompressedSize(cSize.getValue()); + current.entry.setSize(size.getValue()); + } + } + } + + @Override + public ArchiveEntry getNextEntry() throws IOException { + return getNextZipEntry(); + } + + /** + * Whether this class is able to read the given entry. + * + *

    May return false if it is set up to use encryption or a + * compression method that hasn't been implemented yet.

    + * @since 1.1 + */ + @Override + public boolean canReadEntryData(ArchiveEntry ae) { + if (ae instanceof ZipArchiveEntry) { + ZipArchiveEntry ze = (ZipArchiveEntry) ae; + return ZipUtil.canHandleEntryData(ze) + && supportsDataDescriptorFor(ze); + + } + return false; + } + + @Override + public int read(byte[] buffer, int offset, int length) throws IOException { + if (closed) { + throw new IOException("The stream is closed"); + } + + if (current == null) { + return -1; + } + + // avoid int overflow, check null buffer + if (offset > buffer.length || length < 0 || offset < 0 || buffer.length - offset < length) { + throw new ArrayIndexOutOfBoundsException(); + } + + ZipUtil.checkRequestedFeatures(current.entry); + if (!supportsDataDescriptorFor(current.entry)) { + throw new UnsupportedZipFeatureException(UnsupportedZipFeatureException.Feature.DATA_DESCRIPTOR, + current.entry); + } + + int read; + if (current.entry.getMethod() == ZipArchiveOutputStream.STORED) { + read = readStored(buffer, offset, length); + } else if (current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED) { + read = readDeflated(buffer, offset, length); + } else if (current.entry.getMethod() == ZipMethod.UNSHRINKING.getCode() + || current.entry.getMethod() == ZipMethod.IMPLODING.getCode()) { + read = current.in.read(buffer, offset, length); + } else { + throw new UnsupportedZipFeatureException(ZipMethod.getMethodByCode(current.entry.getMethod()), + current.entry); + } + + if (read >= 0) { + current.crc.update(buffer, offset, read); + } + + return read; + } + + /** + * Implementation of read for STORED entries. + */ + private int readStored(byte[] buffer, int offset, int length) throws IOException { + + if (current.hasDataDescriptor) { + if (lastStoredEntry == null) { + readStoredEntry(); + } + return lastStoredEntry.read(buffer, offset, length); + } + + long csize = current.entry.getSize(); + if (current.bytesRead >= csize) { + return -1; + } + + if (buf.position() >= buf.limit()) { + buf.position(0); + int l = in.read(buf.array()); + if (l == -1) { + return -1; + } + buf.limit(l); + + count(buf.limit()); + current.bytesReadFromStream += buf.limit(); + } + + int toRead = Math.min(buf.remaining(), length); + if ((csize - current.bytesRead) < toRead) { + // if it is smaller than toRead then it fits into an int + toRead = (int) (csize - current.bytesRead); + } + buf.get(buffer, offset, toRead); + current.bytesRead += toRead; + return toRead; + } + + /** + * Implementation of read for DEFLATED entries. + */ + private int readDeflated(byte[] buffer, int offset, int length) throws IOException { + int read = readFromInflater(buffer, offset, length); + if (read <= 0) { + if (inf.finished()) { + return -1; + } else if (inf.needsDictionary()) { + throw new ZipException("This archive needs a preset dictionary" + + " which is not supported by Commons" + + " Compress."); + } else if (read == -1) { + throw new IOException("Truncated ZIP file"); + } + } + return read; + } + + /** + * Potentially reads more bytes to fill the inflater's buffer and + * reads from it. + */ + private int readFromInflater(byte[] buffer, int offset, int length) throws IOException { + int read = 0; + do { + if (inf.needsInput()) { + int l = fill(); + if (l > 0) { + current.bytesReadFromStream += buf.limit(); + } else if (l == -1) { + return -1; + } else { + break; + } + } + try { + read = inf.inflate(buffer, offset, length); + } catch (DataFormatException e) { + throw (IOException) new ZipException(e.getMessage()).initCause(e); + } + } while (read == 0 && inf.needsInput()); + return read; + } + + @Override + public void close() throws IOException { + if (!closed) { + closed = true; + in.close(); + inf.end(); + } + } + + /** + * Skips over and discards value bytes of data from this input + * stream. + * + *

    This implementation may end up skipping over some smaller + * number of bytes, possibly 0, if and only if it reaches the end + * of the underlying stream.

    + * + *

    The actual number of bytes skipped is returned.

    + * + * @param value the number of bytes to be skipped. + * @return the actual number of bytes skipped. + * @throws IOException - if an I/O error occurs. + * @throws IllegalArgumentException - if value is negative. + */ + @Override + public long skip(long value) throws IOException { + if (value >= 0) { + long skipped = 0; + while (skipped < value) { + long rem = value - skipped; + int x = read(SKIP_BUF, 0, (int) (SKIP_BUF.length > rem ? rem : SKIP_BUF.length)); + if (x == -1) { + return skipped; + } + skipped += x; + } + return skipped; + } + throw new IllegalArgumentException(); + } + + /** + * Checks if the signature matches what is expected for a zip file. + * Does not currently handle self-extracting zips which may have arbitrary + * leading content. + * + * @param signature the bytes to check + * @param length the number of bytes to check + * @return true, if this stream is a zip archive stream, false otherwise + */ + public static boolean matches(byte[] signature, int length) { + if (length < ZipArchiveOutputStream.LFH_SIG.length) { + return false; + } + + return checksig(signature, ZipArchiveOutputStream.LFH_SIG) // normal file + || checksig(signature, ZipArchiveOutputStream.EOCD_SIG) // empty zip + || checksig(signature, ZipArchiveOutputStream.DD_SIG) // split zip + || checksig(signature, ZipLong.SINGLE_SEGMENT_SPLIT_MARKER.getBytes()); + } + + private static boolean checksig(byte[] signature, byte[] expected) { + for (int i = 0; i < expected.length; i++) { + if (signature[i] != expected[i]) { + return false; + } + } + return true; + } + + /** + * Closes the current ZIP archive entry and positions the underlying + * stream to the beginning of the next entry. All per-entry variables + * and data structures are cleared. + *

    + * If the compressed size of this entry is included in the entry header, + * then any outstanding bytes are simply skipped from the underlying + * stream without uncompressing them. This allows an entry to be safely + * closed even if the compression method is unsupported. + *

    + * In case we don't know the compressed size of this entry or have + * already buffered too much data from the underlying stream to support + * uncompression, then the uncompression process is completed and the + * end position of the stream is adjusted based on the result of that + * process. + * + * @throws IOException if an error occurs + */ + private void closeEntry() throws IOException { + if (closed) { + throw new IOException("The stream is closed"); + } + if (current == null) { + return; + } + + // Ensure all entry bytes are read + if (current.bytesReadFromStream <= current.entry.getCompressedSize() + && !current.hasDataDescriptor) { + drainCurrentEntryData(); + } else { + skip(Long.MAX_VALUE); + + long inB = current.entry.getMethod() == ZipArchiveOutputStream.DEFLATED + ? getBytesInflated() : current.bytesRead; + + // this is at most a single read() operation and can't + // exceed the range of int + int diff = (int) (current.bytesReadFromStream - inB); + + // Pushback any required bytes + if (diff > 0) { + pushback(buf.array(), buf.limit() - diff, diff); + } + } + + if (lastStoredEntry == null && current.hasDataDescriptor) { + readDataDescriptor(); + } + + inf.reset(); + buf.clear().flip(); + current = null; + lastStoredEntry = null; + } + + /** + * Read all data of the current entry from the underlying stream + * that hasn't been read, yet. + */ + private void drainCurrentEntryData() throws IOException { + long remaining = current.entry.getCompressedSize() - current.bytesReadFromStream; + while (remaining > 0) { + long n = in.read(buf.array(), 0, (int) Math.min(buf.capacity(), remaining)); + if (n < 0) { + throw new EOFException("Truncated ZIP entry: " + current.entry.getName()); + } else { + count(n); + remaining -= n; + } + } + } + + /** + * Get the number of bytes Inflater has actually processed. + * + *

    for Java < Java7 the getBytes* methods in + * Inflater/Deflater seem to return unsigned ints rather than + * longs that start over with 0 at 2^32.

    + * + *

    The stream knows how many bytes it has read, but not how + * many the Inflater actually consumed - it should be between the + * total number of bytes read for the entry and the total number + * minus the last read operation. Here we just try to make the + * value close enough to the bytes we've read by assuming the + * number of bytes consumed must be smaller than (or equal to) the + * number of bytes read but not smaller by more than 2^32.

    + */ + private long getBytesInflated() { + long inB = inf.getBytesRead(); + if (current.bytesReadFromStream >= TWO_EXP_32) { + while (inB + TWO_EXP_32 <= current.bytesReadFromStream) { + inB += TWO_EXP_32; + } + } + return inB; + } + + private int fill() throws IOException { + if (closed) { + throw new IOException("The stream is closed"); + } + int length = in.read(buf.array()); + if (length > 0) { + buf.limit(length); + count(buf.limit()); + inf.setInput(buf.array(), 0, buf.limit()); + } + return length; + } + + private void readFully(byte[] b) throws IOException { + int count = IOUtils.readFully(in, b); + count(count); + if (count < b.length) { + throw new EOFException(); + } + } + + private void readDataDescriptor() throws IOException { + readFully(WORD_BUF); + ZipLong val = new ZipLong(WORD_BUF); + if (ZipLong.DD_SIG.equals(val)) { + // data descriptor with signature, skip sig + readFully(WORD_BUF); + val = new ZipLong(WORD_BUF); + } + current.entry.setCrc(val.getValue()); + + // if there is a ZIP64 extra field, sizes are eight bytes + // each, otherwise four bytes each. Unfortunately some + // implementations - namely Java7 - use eight bytes without + // using a ZIP64 extra field - + // http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=7073588 + + // just read 16 bytes and check whether bytes nine to twelve + // look like one of the signatures of what could follow a data + // descriptor (ignoring archive decryption headers for now). + // If so, push back eight bytes and assume sizes are four + // bytes, otherwise sizes are eight bytes each. + readFully(TWO_DWORD_BUF); + ZipLong potentialSig = new ZipLong(TWO_DWORD_BUF, DWORD); + if (potentialSig.equals(ZipLong.CFH_SIG) || potentialSig.equals(ZipLong.LFH_SIG)) { + pushback(TWO_DWORD_BUF, DWORD, DWORD); + current.entry.setCompressedSize(ZipLong.getValue(TWO_DWORD_BUF)); + current.entry.setSize(ZipLong.getValue(TWO_DWORD_BUF, WORD)); + } else { + current.entry.setCompressedSize(ZipEightByteInteger.getLongValue(TWO_DWORD_BUF)); + current.entry.setSize(ZipEightByteInteger.getLongValue(TWO_DWORD_BUF, DWORD)); + } + } + + /** + * Whether this entry requires a data descriptor this library can work with. + * + * @return true if allowStoredEntriesWithDataDescriptor is true, + * the entry doesn't require any data descriptor or the method is + * DEFLATED. + */ + private boolean supportsDataDescriptorFor(ZipArchiveEntry entry) { + return !entry.getGeneralPurposeBit().usesDataDescriptor() + + || (allowStoredEntriesWithDataDescriptor && entry.getMethod() == ZipEntry.STORED) + || entry.getMethod() == ZipEntry.DEFLATED; + } + + /** + * Caches a stored entry that uses the data descriptor. + * + *
      + *
    • Reads a stored entry until the signature of a local file + * header, central directory header or data descriptor has been + * found.
    • + *
    • Stores all entry data in lastStoredEntry.

      + *
    • Rewinds the stream to position at the data + * descriptor.
    • + *
    • reads the data descriptor
    • + *
    + * + *

    After calling this method the entry should know its size, + * the entry's data is cached and the stream is positioned at the + * next local file or central directory header.

    + */ + private void readStoredEntry() throws IOException { + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + int off = 0; + boolean done = false; + + // length of DD without signature + int ddLen = current.usesZip64 ? WORD + 2 * DWORD : 3 * WORD; + + while (!done) { + int r = in.read(buf.array(), off, ZipArchiveOutputStream.BUFFER_SIZE - off); + if (r <= 0) { + // read the whole archive without ever finding a + // central directory + throw new IOException("Truncated ZIP file"); + } + if (r + off < 4) { + // buffer too small to check for a signature, loop + off += r; + continue; + } + + done = bufferContainsSignature(bos, off, r, ddLen); + if (!done) { + off = cacheBytesRead(bos, off, r, ddLen); + } + } + + byte[] b = bos.toByteArray(); + lastStoredEntry = new ByteArrayInputStream(b); + } + + private static final byte[] LFH = ZipLong.LFH_SIG.getBytes(); + private static final byte[] CFH = ZipLong.CFH_SIG.getBytes(); + private static final byte[] DD = ZipLong.DD_SIG.getBytes(); + + /** + * Checks whether the current buffer contains the signature of a + * "data decsriptor", "local file header" or + * "central directory entry". + * + *

    If it contains such a signature, reads the data descriptor + * and positions the stream right after the data descriptor.

    + */ + private boolean bufferContainsSignature(ByteArrayOutputStream bos, int offset, int lastRead, int expectedDDLen) + throws IOException { + + boolean done = false; + int readTooMuch = 0; + for (int i = 0; !done && i < lastRead - 4; i++) { + if (buf.array()[i] == LFH[0] && buf.array()[i + 1] == LFH[1]) { + if ((buf.array()[i + 2] == LFH[2] && buf.array()[i + 3] == LFH[3]) + || (buf.array()[i] == CFH[2] && buf.array()[i + 3] == CFH[3])) { + // found a LFH or CFH: + readTooMuch = offset + lastRead - i - expectedDDLen; + done = true; + } + else if (buf.array()[i + 2] == DD[2] && buf.array()[i + 3] == DD[3]) { + // found DD: + readTooMuch = offset + lastRead - i; + done = true; + } + if (done) { + // * push back bytes read in excess as well as the data + // descriptor + // * copy the remaining bytes to cache + // * read data descriptor + pushback(buf.array(), offset + lastRead - readTooMuch, readTooMuch); + bos.write(buf.array(), 0, i); + readDataDescriptor(); + } + } + } + return done; + } + + /** + * If the last read bytes could hold a data descriptor and an + * incomplete signature then save the last bytes to the front of + * the buffer and cache everything in front of the potential data + * descriptor into the given ByteArrayOutputStream. + * + *

    Data descriptor plus incomplete signature (3 bytes in the + * worst case) can be 20 bytes max.

    + */ + private int cacheBytesRead(ByteArrayOutputStream bos, int offset, int lastRead, int expecteDDLen) { + final int cacheable = offset + lastRead - expecteDDLen - 3; + if (cacheable > 0) { + bos.write(buf.array(), 0, cacheable); + System.arraycopy(buf.array(), cacheable, buf.array(), 0, expecteDDLen + 3); + offset = expecteDDLen + 3; + } else { + offset += lastRead; + } + return offset; + } + + private void pushback(byte[] buf, int offset, int length) throws IOException { + ((PushbackInputStream) in).unread(buf, offset, length); + pushedBackBytes(length); + } + + // End of Central Directory Record + // end of central dir signature WORD + // number of this disk SHORT + // number of the disk with the + // start of the central directory SHORT + // total number of entries in the + // central directory on this disk SHORT + // total number of entries in + // the central directory SHORT + // size of the central directory WORD + // offset of start of central + // directory with respect to + // the starting disk number WORD + // .ZIP file comment length SHORT + // .ZIP file comment up to 64KB + // + + /** + * Reads the stream until it find the "End of central directory + * record" and consumes it as well. + */ + private void skipRemainderOfArchive() throws IOException { + // skip over central directory. One LFH has been read too much + // already. The calculation discounts file names and extra + // data so it will be too short. + realSkip(entriesRead * CFH_LEN - LFH_LEN); + findEocdRecord(); + realSkip(ZipFile.MIN_EOCD_SIZE - WORD /* signature */ - SHORT /* comment len */); + readFully(SHORT_BUF); + // file comment + realSkip(ZipShort.getValue(SHORT_BUF)); + } + + /** + * Reads forward until the signature of the "End of central + * directory" record is found. + */ + private void findEocdRecord() throws IOException { + int currentByte = -1; + boolean skipReadCall = false; + while (skipReadCall || (currentByte = readOneByte()) > -1) { + skipReadCall = false; + if (!isFirstByteOfEocdSig(currentByte)) { + continue; + } + currentByte = readOneByte(); + if (currentByte != ZipArchiveOutputStream.EOCD_SIG[1]) { + if (currentByte == -1) { + break; + } + skipReadCall = isFirstByteOfEocdSig(currentByte); + continue; + } + currentByte = readOneByte(); + if (currentByte != ZipArchiveOutputStream.EOCD_SIG[2]) { + if (currentByte == -1) { + break; + } + skipReadCall = isFirstByteOfEocdSig(currentByte); + continue; + } + currentByte = readOneByte(); + if (currentByte == -1 + || currentByte == ZipArchiveOutputStream.EOCD_SIG[3]) { + break; + } + skipReadCall = isFirstByteOfEocdSig(currentByte); + } + } + + /** + * Skips bytes by reading from the underlying stream rather than + * the (potentially inflating) archive stream - which {@link + * #skip} would do. + * + * Also updates bytes-read counter. + */ + private void realSkip(long value) throws IOException { + if (value >= 0) { + long skipped = 0; + while (skipped < value) { + long rem = value - skipped; + int x = in.read(SKIP_BUF, 0, (int) (SKIP_BUF.length > rem ? rem : SKIP_BUF.length)); + if (x == -1) { + return; + } + count(x); + skipped += x; + } + return; + } + throw new IllegalArgumentException(); + } + + /** + * Reads bytes by reading from the underlying stream rather than + * the (potentially inflating) archive stream - which {@link #read} would do. + * + * Also updates bytes-read counter. + */ + private int readOneByte() throws IOException { + int b = in.read(); + if (b != -1) { + count(1); + } + return b; + } + + private boolean isFirstByteOfEocdSig(int b) { + return b == ZipArchiveOutputStream.EOCD_SIG[0]; + } + + /** + * Structure collecting information for the entry that is + * currently being read. + */ + private static final class CurrentEntry { + + /** + * Current ZIP entry. + */ + private final ZipArchiveEntry entry = new ZipArchiveEntry(); + + /** + * Does the entry use a data descriptor? + */ + private boolean hasDataDescriptor; + + /** + * Does the entry have a ZIP64 extended information extra field. + */ + private boolean usesZip64; + + /** + * Number of bytes of entry content read by the client if the + * entry is STORED. + */ + private long bytesRead; + + /** + * Number of bytes of entry content read so from the stream. + * + *

    This may be more than the actual entry's length as some + * stuff gets buffered up and needs to be pushed back when the + * end of the entry has been reached.

    + */ + private long bytesReadFromStream; + + /** + * The checksum calculated as the current entry is read. + */ + private final CRC32 crc = new CRC32(); + + /** + * The input stream decompressing the data for shrunk and imploded entries. + */ + private InputStream in; + } + + /** + * Bounded input stream adapted from commons-io + */ + private class BoundedInputStream extends InputStream { + + /** the wrapped input stream */ + private final InputStream in; + + /** the max length to provide */ + private final long max; + + /** the number of bytes already returned */ + private long pos = 0; + + /** + * Creates a new BoundedInputStream that wraps the given input + * stream and limits it to a certain size. + * + * @param in The wrapped input stream + * @param size The maximum number of bytes to return + */ + public BoundedInputStream(final InputStream in, final long size) { + this.max = size; + this.in = in; + } + + @Override + public int read() throws IOException { + if (max >= 0 && pos >= max) { + return -1; + } + final int result = in.read(); + pos++; + count(1); + current.bytesReadFromStream++; + return result; + } + + @Override + public int read(final byte[] b) throws IOException { + return this.read(b, 0, b.length); + } + + @Override + public int read(final byte[] b, final int off, final int len) throws IOException { + if (max >= 0 && pos >= max) { + return -1; + } + final long maxRead = max >= 0 ? Math.min(len, max - pos) : len; + final int bytesRead = in.read(b, off, (int) maxRead); + + if (bytesRead == -1) { + return -1; + } + + pos += bytesRead; + count(bytesRead); + current.bytesReadFromStream += bytesRead; + return bytesRead; + } + + @Override + public long skip(final long n) throws IOException { + final long toSkip = max >= 0 ? Math.min(n, max - pos) : n; + final long skippedBytes = in.skip(toSkip); + pos += skippedBytes; + return skippedBytes; + } + + @Override + public int available() throws IOException { + if (max >= 0 && pos >= max) { + return 0; + } + return in.available(); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java new file mode 100644 index 000000000..12a1c66e7 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipArchiveOutputStream.java @@ -0,0 +1,1501 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.zip.CRC32; +import java.util.zip.Deflater; +import java.util.zip.ZipException; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveOutputStream; +import org.apache.commons.compress.utils.IOUtils; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.DATA_DESCRIPTOR_MIN_VERSION; +import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.INITIAL_VERSION; +import static org.apache.commons.compress.archivers.zip.ZipConstants.SHORT; +import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC; +import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC_SHORT; +import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MIN_VERSION; + +/** + * Reimplementation of {@link java.util.zip.ZipOutputStream + * java.util.zip.ZipOutputStream} that does handle the extended + * functionality of this package, especially internal/external file + * attributes and extra fields with different layouts for local file + * data and central directory entries. + * + *

    This class will try to use {@link java.io.RandomAccessFile + * RandomAccessFile} when you know that the output is going to go to a + * file.

    + * + *

    If RandomAccessFile cannot be used, this implementation will use + * a Data Descriptor to store size and CRC information for {@link + * #DEFLATED DEFLATED} entries, this means, you don't need to + * calculate them yourself. Unfortunately this is not possible for + * the {@link #STORED STORED} method, here setting the CRC and + * uncompressed size information is required before {@link + * #putArchiveEntry(ArchiveEntry)} can be called.

    + * + *

    As of Apache Commons Compress 1.3 it transparently supports Zip64 + * extensions and thus individual entries and archives larger than 4 + * GB or with more than 65536 entries in most cases but explicit + * control is provided via {@link #setUseZip64}. If the stream can not + * user RandomAccessFile and you try to write a ZipArchiveEntry of + * unknown size then Zip64 extensions will be disabled by default.

    + * + * @NotThreadSafe + */ +public class ZipArchiveOutputStream extends ArchiveOutputStream { + + static final int BUFFER_SIZE = 512; + + /** indicates if this archive is finished. protected for use in Jar implementation */ + protected boolean finished = false; + + /* + * Apparently Deflater.setInput gets slowed down a lot on Sun JVMs + * when it gets handed a really big buffer. See + * https://issues.apache.org/bugzilla/show_bug.cgi?id=45396 + * + * Using a buffer size of 8 kB proved to be a good compromise + */ + private static final int DEFLATER_BLOCK_SIZE = 8192; + + /** + * Compression method for deflated entries. + */ + public static final int DEFLATED = java.util.zip.ZipEntry.DEFLATED; + + /** + * Default compression level for deflated entries. + */ + public static final int DEFAULT_COMPRESSION = Deflater.DEFAULT_COMPRESSION; + + /** + * Compression method for stored entries. + */ + public static final int STORED = java.util.zip.ZipEntry.STORED; + + /** + * default encoding for file names and comment. + */ + static final String DEFAULT_ENCODING = ZipEncodingHelper.UTF8; + + /** + * General purpose flag, which indicates that filenames are + * written in UTF-8. + * @deprecated use {@link GeneralPurposeBit#UFT8_NAMES_FLAG} instead + */ + @Deprecated + public static final int EFS_FLAG = GeneralPurposeBit.UFT8_NAMES_FLAG; + + private static final byte[] EMPTY = new byte[0]; + + /** + * Current entry. + */ + private CurrentEntry entry; + + /** + * The file comment. + */ + private String comment = ""; + + /** + * Compression level for next entry. + */ + private int level = DEFAULT_COMPRESSION; + + /** + * Has the compression level changed when compared to the last + * entry? + */ + private boolean hasCompressionLevelChanged = false; + + /** + * Default compression method for next entry. + */ + private int method = java.util.zip.ZipEntry.DEFLATED; + + /** + * List of ZipArchiveEntries written so far. + */ + private final List entries = + new LinkedList(); + + /** + * CRC instance to avoid parsing DEFLATED data twice. + */ + private final CRC32 crc = new CRC32(); + + /** + * Count the bytes written to out. + */ + private long written = 0; + + /** + * Start of central directory. + */ + private long cdOffset = 0; + + /** + * Length of central directory. + */ + private long cdLength = 0; + + /** + * Helper, a 0 as ZipShort. + */ + private static final byte[] ZERO = {0, 0}; + + /** + * Helper, a 0 as ZipLong. + */ + private static final byte[] LZERO = {0, 0, 0, 0}; + + /** + * Holds the offsets of the LFH starts for each entry. + */ + private final Map offsets = + new HashMap(); + + /** + * The encoding to use for filenames and the file comment. + * + *

    For a list of possible values see http://java.sun.com/j2se/1.5.0/docs/guide/intl/encoding.doc.html. + * Defaults to UTF-8.

    + */ + private String encoding = DEFAULT_ENCODING; + + /** + * The zip encoding to use for filenames and the file comment. + * + * This field is of internal use and will be set in {@link + * #setEncoding(String)}. + */ + private ZipEncoding zipEncoding = + ZipEncodingHelper.getZipEncoding(DEFAULT_ENCODING); + + /** + * This Deflater object is used for output. + * + */ + protected final Deflater def = new Deflater(level, true); + + /** + * This buffer serves as a Deflater. + * + */ + private final byte[] buf = new byte[BUFFER_SIZE]; + + /** + * Optional random access output. + */ + private final RandomAccessFile raf; + + private final OutputStream out; + + /** + * whether to use the general purpose bit flag when writing UTF-8 + * filenames or not. + */ + private boolean useUTF8Flag = true; + + /** + * Whether to encode non-encodable file names as UTF-8. + */ + private boolean fallbackToUTF8 = false; + + /** + * whether to create UnicodePathExtraField-s for each entry. + */ + private UnicodeExtraFieldPolicy createUnicodeExtraFields = UnicodeExtraFieldPolicy.NEVER; + + /** + * Whether anything inside this archive has used a ZIP64 feature. + * + * @since 1.3 + */ + private boolean hasUsedZip64 = false; + + private Zip64Mode zip64Mode = Zip64Mode.AsNeeded; + + /** + * Creates a new ZIP OutputStream filtering the underlying stream. + * @param out the outputstream to zip + */ + public ZipArchiveOutputStream(OutputStream out) { + this.out = out; + this.raf = null; + } + + /** + * Creates a new ZIP OutputStream writing to a File. Will use + * random access if possible. + * @param file the file to zip to + * @throws IOException on error + */ + public ZipArchiveOutputStream(File file) throws IOException { + OutputStream o = null; + RandomAccessFile _raf = null; + try { + _raf = new RandomAccessFile(file, "rw"); + _raf.setLength(0); + } catch (IOException e) { + IOUtils.closeQuietly(_raf); + _raf = null; + o = new FileOutputStream(file); + } + out = o; + raf = _raf; + } + + /** + * This method indicates whether this archive is writing to a + * seekable stream (i.e., to a random access file). + * + *

    For seekable streams, you don't need to calculate the CRC or + * uncompressed size for {@link #STORED} entries before + * invoking {@link #putArchiveEntry(ArchiveEntry)}. + * @return true if seekable + */ + public boolean isSeekable() { + return raf != null; + } + + /** + * The encoding to use for filenames and the file comment. + * + *

    For a list of possible values see http://java.sun.com/j2se/1.5.0/docs/guide/intl/encoding.doc.html. + * Defaults to UTF-8.

    + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + */ + public void setEncoding(final String encoding) { + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); + if (useUTF8Flag && !ZipEncodingHelper.isUTF8(encoding)) { + useUTF8Flag = false; + } + } + + /** + * The encoding to use for filenames and the file comment. + * + * @return null if using the platform's default character encoding. + */ + public String getEncoding() { + return encoding; + } + + /** + * Whether to set the language encoding flag if the file name + * encoding is UTF-8. + * + *

    Defaults to true.

    + */ + public void setUseLanguageEncodingFlag(boolean b) { + useUTF8Flag = b && ZipEncodingHelper.isUTF8(encoding); + } + + /** + * Whether to create Unicode Extra Fields. + * + *

    Defaults to NEVER.

    + */ + public void setCreateUnicodeExtraFields(UnicodeExtraFieldPolicy b) { + createUnicodeExtraFields = b; + } + + /** + * Whether to fall back to UTF and the language encoding flag if + * the file name cannot be encoded using the specified encoding. + * + *

    Defaults to false.

    + */ + public void setFallbackToUTF8(boolean b) { + fallbackToUTF8 = b; + } + + /** + * Whether Zip64 extensions will be used. + * + *

    When setting the mode to {@link Zip64Mode#Never Never}, + * {@link #putArchiveEntry}, {@link #closeArchiveEntry}, {@link + * #finish} or {@link #close} may throw a {@link + * Zip64RequiredException} if the entry's size or the total size + * of the archive exceeds 4GB or there are more than 65536 entries + * inside the archive. Any archive created in this mode will be + * readable by implementations that don't support Zip64.

    + * + *

    When setting the mode to {@link Zip64Mode#Always Always}, + * Zip64 extensions will be used for all entries. Any archive + * created in this mode may be unreadable by implementations that + * don't support Zip64 even if all its contents would be.

    + * + *

    When setting the mode to {@link Zip64Mode#AsNeeded + * AsNeeded}, Zip64 extensions will transparently be used for + * those entries that require them. This mode can only be used if + * the uncompressed size of the {@link ZipArchiveEntry} is known + * when calling {@link #putArchiveEntry} or the archive is written + * to a seekable output (i.e. you have used the {@link + * #ZipArchiveOutputStream(java.io.File) File-arg constructor}) - + * this mode is not valid when the output stream is not seekable + * and the uncompressed size is unknown when {@link + * #putArchiveEntry} is called.

    + * + *

    If no entry inside the resulting archive requires Zip64 + * extensions then {@link Zip64Mode#Never Never} will create the + * smallest archive. {@link Zip64Mode#AsNeeded AsNeeded} will + * create a slightly bigger archive if the uncompressed size of + * any entry has initially been unknown and create an archive + * identical to {@link Zip64Mode#Never Never} otherwise. {@link + * Zip64Mode#Always Always} will create an archive that is at + * least 24 bytes per entry bigger than the one {@link + * Zip64Mode#Never Never} would create.

    + * + *

    Defaults to {@link Zip64Mode#AsNeeded AsNeeded} unless + * {@link #putArchiveEntry} is called with an entry of unknown + * size and data is written to a non-seekable stream - in this + * case the default is {@link Zip64Mode#Never Never}.

    + * + * @since 1.3 + */ + public void setUseZip64(Zip64Mode mode) { + zip64Mode = mode; + } + + /** + * {@inheritDoc} + * @throws Zip64RequiredException if the archive's size exceeds 4 + * GByte or there are more than 65535 entries inside the archive + * and {@link #setUseZip64} is {@link Zip64Mode#Never}. + */ + @Override + public void finish() throws IOException { + if (finished) { + throw new IOException("This archive has already been finished"); + } + + if (entry != null) { + throw new IOException("This archive contains unclosed entries."); + } + + cdOffset = written; + for (ZipArchiveEntry ze : entries) { + writeCentralFileHeader(ze); + } + cdLength = written - cdOffset; + writeZip64CentralDirectory(); + writeCentralDirectoryEnd(); + offsets.clear(); + entries.clear(); + def.end(); + finished = true; + } + + /** + * Writes all necessary data for this entry. + * @throws IOException on error + * @throws Zip64RequiredException if the entry's uncompressed or + * compressed size exceeds 4 GByte and {@link #setUseZip64} + * is {@link Zip64Mode#Never}. + */ + @Override + public void closeArchiveEntry() throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + + if (entry == null) { + throw new IOException("No current entry to close"); + } + + if (!entry.hasWritten) { + write(EMPTY, 0, 0); + } + + flushDeflater(); + + final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); + long bytesWritten = written - entry.dataStart; + long realCrc = crc.getValue(); + crc.reset(); + + final boolean actuallyNeedsZip64 = + handleSizesAndCrc(bytesWritten, realCrc, effectiveMode); + + if (raf != null) { + rewriteSizesAndCrc(actuallyNeedsZip64); + } + + writeDataDescriptor(entry.entry); + entry = null; + } + + /** + * Ensures all bytes sent to the deflater are written to the stream. + */ + private void flushDeflater() throws IOException { + if (entry.entry.getMethod() == DEFLATED) { + def.finish(); + while (!def.finished()) { + deflate(); + } + } + } + + /** + * Ensures the current entry's size and CRC information is set to + * the values just written, verifies it isn't too big in the + * Zip64Mode.Never case and returns whether the entry would + * require a Zip64 extra field. + */ + private boolean handleSizesAndCrc(long bytesWritten, long crc, + Zip64Mode effectiveMode) + throws ZipException { + if (entry.entry.getMethod() == DEFLATED) { + /* It turns out def.getBytesRead() returns wrong values if + * the size exceeds 4 GB on Java < Java7 + entry.entry.setSize(def.getBytesRead()); + */ + entry.entry.setSize(entry.bytesRead); + entry.entry.setCompressedSize(bytesWritten); + entry.entry.setCrc(crc); + + def.reset(); + } else if (raf == null) { + if (entry.entry.getCrc() != crc) { + throw new ZipException("bad CRC checksum for entry " + + entry.entry.getName() + ": " + + Long.toHexString(entry.entry.getCrc()) + + " instead of " + + Long.toHexString(crc)); + } + + if (entry.entry.getSize() != bytesWritten) { + throw new ZipException("bad size for entry " + + entry.entry.getName() + ": " + + entry.entry.getSize() + + " instead of " + + bytesWritten); + } + } else { /* method is STORED and we used RandomAccessFile */ + entry.entry.setSize(bytesWritten); + entry.entry.setCompressedSize(bytesWritten); + entry.entry.setCrc(crc); + } + + final boolean actuallyNeedsZip64 = effectiveMode == Zip64Mode.Always + || entry.entry.getSize() >= ZIP64_MAGIC + || entry.entry.getCompressedSize() >= ZIP64_MAGIC; + if (actuallyNeedsZip64 && effectiveMode == Zip64Mode.Never) { + throw new Zip64RequiredException(Zip64RequiredException + .getEntryTooBigMessage(entry.entry)); + } + return actuallyNeedsZip64; + } + + /** + * When using random access output, write the local file header + * and potentiall the ZIP64 extra containing the correct CRC and + * compressed/uncompressed sizes. + */ + private void rewriteSizesAndCrc(boolean actuallyNeedsZip64) + throws IOException { + long save = raf.getFilePointer(); + + raf.seek(entry.localDataStart); + writeOut(ZipLong.getBytes(entry.entry.getCrc())); + if (!hasZip64Extra(entry.entry) || !actuallyNeedsZip64) { + writeOut(ZipLong.getBytes(entry.entry.getCompressedSize())); + writeOut(ZipLong.getBytes(entry.entry.getSize())); + } else { + writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + } + + if (hasZip64Extra(entry.entry)) { + // seek to ZIP64 extra, skip header and size information + raf.seek(entry.localDataStart + 3 * WORD + 2 * SHORT + + getName(entry.entry).limit() + 2 * SHORT); + // inside the ZIP64 extra uncompressed size comes + // first, unlike the LFH, CD or data descriptor + writeOut(ZipEightByteInteger.getBytes(entry.entry.getSize())); + writeOut(ZipEightByteInteger.getBytes(entry.entry.getCompressedSize())); + + if (!actuallyNeedsZip64) { + // do some cleanup: + // * rewrite version needed to extract + raf.seek(entry.localDataStart - 5 * SHORT); + writeOut(ZipShort.getBytes(INITIAL_VERSION)); + + // * remove ZIP64 extra so it doesn't get written + // to the central directory + entry.entry.removeExtraField(Zip64ExtendedInformationExtraField + .HEADER_ID); + entry.entry.setExtra(); + + // * reset hasUsedZip64 if it has been set because + // of this entry + if (entry.causedUseOfZip64) { + hasUsedZip64 = false; + } + } + } + raf.seek(save); + } + + /** + * {@inheritDoc} + * @throws ClassCastException if entry is not an instance of ZipArchiveEntry + * @throws Zip64RequiredException if the entry's uncompressed or + * compressed size is known to exceed 4 GByte and {@link #setUseZip64} + * is {@link Zip64Mode#Never}. + */ + @Override + public void putArchiveEntry(ArchiveEntry archiveEntry) throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + + if (entry != null) { + closeArchiveEntry(); + } + + entry = new CurrentEntry((ZipArchiveEntry) archiveEntry); + entries.add(entry.entry); + + setDefaults(entry.entry); + + final Zip64Mode effectiveMode = getEffectiveZip64Mode(entry.entry); + validateSizeInformation(effectiveMode); + + if (shouldAddZip64Extra(entry.entry, effectiveMode)) { + + Zip64ExtendedInformationExtraField z64 = getZip64Extra(entry.entry); + + // just a placeholder, real data will be in data + // descriptor or inserted later via RandomAccessFile + ZipEightByteInteger size = ZipEightByteInteger.ZERO; + if (entry.entry.getMethod() == STORED + && entry.entry.getSize() != ArchiveEntry.SIZE_UNKNOWN) { + // actually, we already know the sizes + size = new ZipEightByteInteger(entry.entry.getSize()); + } + z64.setSize(size); + z64.setCompressedSize(size); + entry.entry.setExtra(); + } + + if (entry.entry.getMethod() == DEFLATED && hasCompressionLevelChanged) { + def.setLevel(level); + hasCompressionLevelChanged = false; + } + writeLocalFileHeader(entry.entry); + } + + /** + * Provides default values for compression method and last + * modification time. + */ + private void setDefaults(ZipArchiveEntry entry) { + if (entry.getMethod() == -1) { // not specified + entry.setMethod(method); + } + + if (entry.getTime() == -1) { // not specified + entry.setTime(System.currentTimeMillis()); + } + } + + /** + * Throws an exception if the size is unknown for a stored entry + * that is written to a non-seekable output or the entry is too + * big to be written without Zip64 extra but the mode has been set + * to Never. + */ + private void validateSizeInformation(Zip64Mode effectiveMode) + throws ZipException { + // Size/CRC not required if RandomAccessFile is used + if (entry.entry.getMethod() == STORED && raf == null) { + if (entry.entry.getSize() == ArchiveEntry.SIZE_UNKNOWN) { + throw new ZipException("uncompressed size is required for" + + " STORED method when not writing to a" + + " file"); + } + if (entry.entry.getCrc() == -1) { + throw new ZipException("crc checksum is required for STORED" + + " method when not writing to a file"); + } + entry.entry.setCompressedSize(entry.entry.getSize()); + } + + if ((entry.entry.getSize() >= ZIP64_MAGIC + || entry.entry.getCompressedSize() >= ZIP64_MAGIC) + && effectiveMode == Zip64Mode.Never) { + throw new Zip64RequiredException(Zip64RequiredException + .getEntryTooBigMessage(entry.entry)); + } + } + + /** + * Whether to addd a Zip64 extended information extra field to the + * local file header. + * + *

    Returns true if

    + * + *
      + *
    • mode is Always
    • + *
    • or we already know it is going to be needed
    • + *
    • or the size is unknown and we can ensure it won't hurt + * other implementations if we add it (i.e. we can erase its + * usage
    • + *
    + */ + private boolean shouldAddZip64Extra(ZipArchiveEntry entry, Zip64Mode mode) { + return mode == Zip64Mode.Always + || entry.getSize() >= ZIP64_MAGIC + || entry.getCompressedSize() >= ZIP64_MAGIC + || (entry.getSize() == ArchiveEntry.SIZE_UNKNOWN + && raf != null && mode != Zip64Mode.Never); + } + + /** + * Set the file comment. + * @param comment the comment + */ + public void setComment(String comment) { + this.comment = comment; + } + + /** + * Sets the compression level for subsequent entries. + * + *

    Default is Deflater.DEFAULT_COMPRESSION.

    + * @param level the compression level. + * @throws IllegalArgumentException if an invalid compression + * level is specified. + */ + public void setLevel(int level) { + if (level < Deflater.DEFAULT_COMPRESSION + || level > Deflater.BEST_COMPRESSION) { + throw new IllegalArgumentException("Invalid compression level: " + + level); + } + hasCompressionLevelChanged = (this.level != level); + this.level = level; + } + + /** + * Sets the default compression method for subsequent entries. + * + *

    Default is DEFLATED.

    + * @param method an int from java.util.zip.ZipEntry + */ + public void setMethod(int method) { + this.method = method; + } + + /** + * Whether this stream is able to write the given entry. + * + *

    May return false if it is set up to use encryption or a + * compression method that hasn't been implemented yet.

    + * @since 1.1 + */ + @Override + public boolean canWriteEntryData(ArchiveEntry ae) { + if (ae instanceof ZipArchiveEntry) { + ZipArchiveEntry zae = (ZipArchiveEntry) ae; + return zae.getMethod() != ZipMethod.IMPLODING.getCode() + && zae.getMethod() != ZipMethod.UNSHRINKING.getCode() + && ZipUtil.canHandleEntryData(zae); + } + return false; + } + + /** + * Writes bytes to ZIP entry. + * @param b the byte array to write + * @param offset the start position to write from + * @param length the number of bytes to write + * @throws IOException on error + */ + @Override + public void write(byte[] b, int offset, int length) throws IOException { + ZipUtil.checkRequestedFeatures(entry.entry); + entry.hasWritten = true; + if (entry.entry.getMethod() == DEFLATED) { + writeDeflated(b, offset, length); + } else { + writeOut(b, offset, length); + written += length; + } + crc.update(b, offset, length); + count(length); + } + + /** + * write implementation for DEFLATED entries. + */ + private void writeDeflated(byte[]b, int offset, int length) + throws IOException { + if (length > 0 && !def.finished()) { + entry.bytesRead += length; + if (length <= DEFLATER_BLOCK_SIZE) { + def.setInput(b, offset, length); + deflateUntilInputIsNeeded(); + } else { + final int fullblocks = length / DEFLATER_BLOCK_SIZE; + for (int i = 0; i < fullblocks; i++) { + def.setInput(b, offset + i * DEFLATER_BLOCK_SIZE, + DEFLATER_BLOCK_SIZE); + deflateUntilInputIsNeeded(); + } + final int done = fullblocks * DEFLATER_BLOCK_SIZE; + if (done < length) { + def.setInput(b, offset + done, length - done); + deflateUntilInputIsNeeded(); + } + } + } + } + + /** + * Closes this output stream and releases any system resources + * associated with the stream. + * + * @exception IOException if an I/O error occurs. + * @throws Zip64RequiredException if the archive's size exceeds 4 + * GByte or there are more than 65535 entries inside the archive + * and {@link #setUseZip64} is {@link Zip64Mode#Never}. + */ + @Override + public void close() throws IOException { + if (!finished) { + finish(); + } + destroy(); + } + + /** + * Flushes this output stream and forces any buffered output bytes + * to be written out to the stream. + * + * @exception IOException if an I/O error occurs. + */ + @Override + public void flush() throws IOException { + if (out != null) { + out.flush(); + } + } + + /* + * Various ZIP constants + */ + /** + * local file header signature + */ + static final byte[] LFH_SIG = ZipLong.LFH_SIG.getBytes(); + /** + * data descriptor signature + */ + static final byte[] DD_SIG = ZipLong.DD_SIG.getBytes(); + /** + * central file header signature + */ + static final byte[] CFH_SIG = ZipLong.CFH_SIG.getBytes(); + /** + * end of central dir signature + */ + static final byte[] EOCD_SIG = ZipLong.getBytes(0X06054B50L); + /** + * ZIP64 end of central dir signature + */ + static final byte[] ZIP64_EOCD_SIG = ZipLong.getBytes(0X06064B50L); + /** + * ZIP64 end of central dir locator signature + */ + static final byte[] ZIP64_EOCD_LOC_SIG = ZipLong.getBytes(0X07064B50L); + + /** + * Writes next block of compressed data to the output stream. + * @throws IOException on error + */ + protected final void deflate() throws IOException { + int len = def.deflate(buf, 0, buf.length); + if (len > 0) { + writeOut(buf, 0, len); + written += len; + } + } + + /** + * Writes the local file header entry + * @param ze the entry to write + * @throws IOException on error + */ + protected void writeLocalFileHeader(ZipArchiveEntry ze) throws IOException { + + boolean encodable = zipEncoding.canEncode(ze.getName()); + ByteBuffer name = getName(ze); + + if (createUnicodeExtraFields != UnicodeExtraFieldPolicy.NEVER) { + addUnicodeExtraFields(ze, encodable, name); + } + + offsets.put(ze, Long.valueOf(written)); + + writeOut(LFH_SIG); + written += WORD; + + //store method in local variable to prevent multiple method calls + final int zipMethod = ze.getMethod(); + + writeVersionNeededToExtractAndGeneralPurposeBits(zipMethod, + !encodable + && fallbackToUTF8, + hasZip64Extra(ze)); + written += WORD; + + // compression method + writeOut(ZipShort.getBytes(zipMethod)); + written += SHORT; + + // last mod. time and date + writeOut(ZipUtil.toDosTime(ze.getTime())); + written += WORD; + + // CRC + // compressed length + // uncompressed length + entry.localDataStart = written; + if (zipMethod == DEFLATED || raf != null) { + writeOut(LZERO); + if (hasZip64Extra(entry.entry)) { + // point to ZIP64 extended information extra field for + // sizes, may get rewritten once sizes are known if + // stream is seekable + writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + } else { + writeOut(LZERO); + writeOut(LZERO); + } + } else { + writeOut(ZipLong.getBytes(ze.getCrc())); + byte[] size = ZipLong.ZIP64_MAGIC.getBytes(); + if (!hasZip64Extra(ze)) { + size = ZipLong.getBytes(ze.getSize()); + } + writeOut(size); + writeOut(size); + } + // CheckStyle:MagicNumber OFF + written += 12; + // CheckStyle:MagicNumber ON + + // file name length + writeOut(ZipShort.getBytes(name.limit())); + written += SHORT; + + // extra field length + byte[] extra = ze.getLocalFileDataExtra(); + writeOut(ZipShort.getBytes(extra.length)); + written += SHORT; + + // file name + writeOut(name.array(), name.arrayOffset(), + name.limit() - name.position()); + written += name.limit(); + + // extra field + writeOut(extra); + written += extra.length; + + entry.dataStart = written; + } + + /** + * Adds UnicodeExtra fields for name and file comment if mode is + * ALWAYS or the data cannot be encoded using the configured + * encoding. + */ + private void addUnicodeExtraFields(ZipArchiveEntry ze, boolean encodable, + ByteBuffer name) + throws IOException { + if (createUnicodeExtraFields == UnicodeExtraFieldPolicy.ALWAYS + || !encodable) { + ze.addExtraField(new UnicodePathExtraField(ze.getName(), + name.array(), + name.arrayOffset(), + name.limit() + - name.position())); + } + + String comm = ze.getComment(); + if (comm != null && !"".equals(comm)) { + + boolean commentEncodable = zipEncoding.canEncode(comm); + + if (createUnicodeExtraFields == UnicodeExtraFieldPolicy.ALWAYS + || !commentEncodable) { + ByteBuffer commentB = getEntryEncoding(ze).encode(comm); + ze.addExtraField(new UnicodeCommentExtraField(comm, + commentB.array(), + commentB.arrayOffset(), + commentB.limit() + - commentB.position()) + ); + } + } + } + + /** + * Writes the data descriptor entry. + * @param ze the entry to write + * @throws IOException on error + */ + protected void writeDataDescriptor(ZipArchiveEntry ze) throws IOException { + if (ze.getMethod() != DEFLATED || raf != null) { + return; + } + writeOut(DD_SIG); + writeOut(ZipLong.getBytes(ze.getCrc())); + int sizeFieldSize = WORD; + if (!hasZip64Extra(ze)) { + writeOut(ZipLong.getBytes(ze.getCompressedSize())); + writeOut(ZipLong.getBytes(ze.getSize())); + } else { + sizeFieldSize = DWORD; + writeOut(ZipEightByteInteger.getBytes(ze.getCompressedSize())); + writeOut(ZipEightByteInteger.getBytes(ze.getSize())); + } + written += 2 * WORD + 2 * sizeFieldSize; + } + + /** + * Writes the central file header entry. + * @param ze the entry to write + * @throws IOException on error + * @throws Zip64RequiredException if the archive's size exceeds 4 + * GByte and {@link Zip64Mode #setUseZip64} is {@link + * Zip64Mode#Never}. + */ + protected void writeCentralFileHeader(ZipArchiveEntry ze) throws IOException { + writeOut(CFH_SIG); + written += WORD; + + final long lfhOffset = offsets.get(ze).longValue(); + final boolean needsZip64Extra = hasZip64Extra(ze) + || ze.getCompressedSize() >= ZIP64_MAGIC + || ze.getSize() >= ZIP64_MAGIC + || lfhOffset >= ZIP64_MAGIC; + + if (needsZip64Extra && zip64Mode == Zip64Mode.Never) { + // must be the offset that is too big, otherwise an + // exception would have been throw in putArchiveEntry or + // closeArchiveEntry + throw new Zip64RequiredException(Zip64RequiredException + .ARCHIVE_TOO_BIG_MESSAGE); + } + + handleZip64Extra(ze, lfhOffset, needsZip64Extra); + + // version made by + // CheckStyle:MagicNumber OFF + writeOut(ZipShort.getBytes((ze.getPlatform() << 8) | + (!hasUsedZip64 ? DATA_DESCRIPTOR_MIN_VERSION + : ZIP64_MIN_VERSION))); + written += SHORT; + + final int zipMethod = ze.getMethod(); + final boolean encodable = zipEncoding.canEncode(ze.getName()); + writeVersionNeededToExtractAndGeneralPurposeBits(zipMethod, + !encodable + && fallbackToUTF8, + needsZip64Extra); + written += WORD; + + // compression method + writeOut(ZipShort.getBytes(zipMethod)); + written += SHORT; + + // last mod. time and date + writeOut(ZipUtil.toDosTime(ze.getTime())); + written += WORD; + + // CRC + // compressed length + // uncompressed length + writeOut(ZipLong.getBytes(ze.getCrc())); + if (ze.getCompressedSize() >= ZIP64_MAGIC + || ze.getSize() >= ZIP64_MAGIC) { + writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + writeOut(ZipLong.ZIP64_MAGIC.getBytes()); + } else { + writeOut(ZipLong.getBytes(ze.getCompressedSize())); + writeOut(ZipLong.getBytes(ze.getSize())); + } + // CheckStyle:MagicNumber OFF + written += 12; + // CheckStyle:MagicNumber ON + + ByteBuffer name = getName(ze); + + writeOut(ZipShort.getBytes(name.limit())); + written += SHORT; + + // extra field length + byte[] extra = ze.getCentralDirectoryExtra(); + writeOut(ZipShort.getBytes(extra.length)); + written += SHORT; + + // file comment length + String comm = ze.getComment(); + if (comm == null) { + comm = ""; + } + + ByteBuffer commentB = getEntryEncoding(ze).encode(comm); + + writeOut(ZipShort.getBytes(commentB.limit())); + written += SHORT; + + // disk number start + writeOut(ZERO); + written += SHORT; + + // internal file attributes + writeOut(ZipShort.getBytes(ze.getInternalAttributes())); + written += SHORT; + + // external file attributes + writeOut(ZipLong.getBytes(ze.getExternalAttributes())); + written += WORD; + + // relative offset of LFH + writeOut(ZipLong.getBytes(Math.min(lfhOffset, ZIP64_MAGIC))); + written += WORD; + + // file name + writeOut(name.array(), name.arrayOffset(), + name.limit() - name.position()); + written += name.limit(); + + // extra field + writeOut(extra); + written += extra.length; + + // file comment + writeOut(commentB.array(), commentB.arrayOffset(), + commentB.limit() - commentB.position()); + written += commentB.limit(); + } + + /** + * If the entry needs Zip64 extra information inside the central + * directory then configure its data. + */ + private void handleZip64Extra(ZipArchiveEntry ze, long lfhOffset, + boolean needsZip64Extra) { + if (needsZip64Extra) { + Zip64ExtendedInformationExtraField z64 = getZip64Extra(ze); + if (ze.getCompressedSize() >= ZIP64_MAGIC + || ze.getSize() >= ZIP64_MAGIC) { + z64.setCompressedSize(new ZipEightByteInteger(ze.getCompressedSize())); + z64.setSize(new ZipEightByteInteger(ze.getSize())); + } else { + // reset value that may have been set for LFH + z64.setCompressedSize(null); + z64.setSize(null); + } + if (lfhOffset >= ZIP64_MAGIC) { + z64.setRelativeHeaderOffset(new ZipEightByteInteger(lfhOffset)); + } + ze.setExtra(); + } + } + + /** + * Writes the "End of central dir record". + * @throws IOException on error + * @throws Zip64RequiredException if the archive's size exceeds 4 + * GByte or there are more than 65535 entries inside the archive + * and {@link Zip64Mode #setUseZip64} is {@link Zip64Mode#Never}. + */ + protected void writeCentralDirectoryEnd() throws IOException { + writeOut(EOCD_SIG); + + // disk numbers + writeOut(ZERO); + writeOut(ZERO); + + // number of entries + int numberOfEntries = entries.size(); + if (numberOfEntries > ZIP64_MAGIC_SHORT + && zip64Mode == Zip64Mode.Never) { + throw new Zip64RequiredException(Zip64RequiredException + .TOO_MANY_ENTRIES_MESSAGE); + } + if (cdOffset > ZIP64_MAGIC && zip64Mode == Zip64Mode.Never) { + throw new Zip64RequiredException(Zip64RequiredException + .ARCHIVE_TOO_BIG_MESSAGE); + } + + byte[] num = ZipShort.getBytes(Math.min(numberOfEntries, + ZIP64_MAGIC_SHORT)); + writeOut(num); + writeOut(num); + + // length and location of CD + writeOut(ZipLong.getBytes(Math.min(cdLength, ZIP64_MAGIC))); + writeOut(ZipLong.getBytes(Math.min(cdOffset, ZIP64_MAGIC))); + + // ZIP file comment + ByteBuffer data = this.zipEncoding.encode(comment); + writeOut(ZipShort.getBytes(data.limit())); + writeOut(data.array(), data.arrayOffset(), + data.limit() - data.position()); + } + + private static final byte[] ONE = ZipLong.getBytes(1L); + + /** + * Writes the "ZIP64 End of central dir record" and + * "ZIP64 End of central dir locator". + * @throws IOException on error + * @since 1.3 + */ + protected void writeZip64CentralDirectory() throws IOException { + if (zip64Mode == Zip64Mode.Never) { + return; + } + + if (!hasUsedZip64 + && (cdOffset >= ZIP64_MAGIC || cdLength >= ZIP64_MAGIC + || entries.size() >= ZIP64_MAGIC_SHORT)) { + // actually "will use" + hasUsedZip64 = true; + } + + if (!hasUsedZip64) { + return; + } + + long offset = written; + + writeOut(ZIP64_EOCD_SIG); + // size, we don't have any variable length as we don't support + // the extensible data sector, yet + writeOut(ZipEightByteInteger + .getBytes(SHORT /* version made by */ + + SHORT /* version needed to extract */ + + WORD /* disk number */ + + WORD /* disk with central directory */ + + DWORD /* number of entries in CD on this disk */ + + DWORD /* total number of entries */ + + DWORD /* size of CD */ + + DWORD /* offset of CD */ + )); + + // version made by and version needed to extract + writeOut(ZipShort.getBytes(ZIP64_MIN_VERSION)); + writeOut(ZipShort.getBytes(ZIP64_MIN_VERSION)); + + // disk numbers - four bytes this time + writeOut(LZERO); + writeOut(LZERO); + + // number of entries + byte[] num = ZipEightByteInteger.getBytes(entries.size()); + writeOut(num); + writeOut(num); + + // length and location of CD + writeOut(ZipEightByteInteger.getBytes(cdLength)); + writeOut(ZipEightByteInteger.getBytes(cdOffset)); + + // no "zip64 extensible data sector" for now + + // and now the "ZIP64 end of central directory locator" + writeOut(ZIP64_EOCD_LOC_SIG); + + // disk number holding the ZIP64 EOCD record + writeOut(LZERO); + // relative offset of ZIP64 EOCD record + writeOut(ZipEightByteInteger.getBytes(offset)); + // total number of disks + writeOut(ONE); + } + + /** + * Write bytes to output or random access file. + * @param data the byte array to write + * @throws IOException on error + */ + protected final void writeOut(byte[] data) throws IOException { + writeOut(data, 0, data.length); + } + + /** + * Write bytes to output or random access file. + * @param data the byte array to write + * @param offset the start position to write from + * @param length the number of bytes to write + * @throws IOException on error + */ + protected final void writeOut(byte[] data, int offset, int length) + throws IOException { + if (raf != null) { + raf.write(data, offset, length); + } else { + out.write(data, offset, length); + } + } + + private void deflateUntilInputIsNeeded() throws IOException { + while (!def.needsInput()) { + deflate(); + } + } + + private void writeVersionNeededToExtractAndGeneralPurposeBits(final int + zipMethod, + final boolean + utfFallback, + final boolean + zip64) + throws IOException { + + // CheckStyle:MagicNumber OFF + int versionNeededToExtract = INITIAL_VERSION; + GeneralPurposeBit b = new GeneralPurposeBit(); + b.useUTF8ForNames(useUTF8Flag || utfFallback); + if (zipMethod == DEFLATED && raf == null) { + // requires version 2 as we are going to store length info + // in the data descriptor + versionNeededToExtract = DATA_DESCRIPTOR_MIN_VERSION; + b.useDataDescriptor(true); + } + if (zip64) { + versionNeededToExtract = ZIP64_MIN_VERSION; + } + // CheckStyle:MagicNumber ON + + // version needed to extract + writeOut(ZipShort.getBytes(versionNeededToExtract)); + // general purpose bit flag + writeOut(b.encode()); + } + + /** + * Creates a new zip entry taking some information from the given + * file and using the provided name. + * + *

    The name will be adjusted to end with a forward slash "/" if + * the file is a directory. If the file is not a directory a + * potential trailing forward slash will be stripped from the + * entry name.

    + * + *

    Must not be used if the stream has already been closed.

    + */ + @Override + public ArchiveEntry createArchiveEntry(File inputFile, String entryName) + throws IOException { + if (finished) { + throw new IOException("Stream has already been finished"); + } + return new ZipArchiveEntry(inputFile, entryName); + } + + /** + * Get the existing ZIP64 extended information extra field or + * create a new one and add it to the entry. + * + * @since 1.3 + */ + private Zip64ExtendedInformationExtraField + getZip64Extra(ZipArchiveEntry ze) { + if (entry != null) { + entry.causedUseOfZip64 = !hasUsedZip64; + } + hasUsedZip64 = true; + Zip64ExtendedInformationExtraField z64 = + (Zip64ExtendedInformationExtraField) + ze.getExtraField(Zip64ExtendedInformationExtraField + .HEADER_ID); + if (z64 == null) { + /* + System.err.println("Adding z64 for " + ze.getName() + + ", method: " + ze.getMethod() + + " (" + (ze.getMethod() == STORED) + ")" + + ", raf: " + (raf != null)); + */ + z64 = new Zip64ExtendedInformationExtraField(); + } + + // even if the field is there already, make sure it is the first one + ze.addAsFirstExtraField(z64); + + return z64; + } + + /** + * Is there a ZIP64 extended information extra field for the + * entry? + * + * @since 1.3 + */ + private boolean hasZip64Extra(ZipArchiveEntry ze) { + return ze.getExtraField(Zip64ExtendedInformationExtraField + .HEADER_ID) + != null; + } + + /** + * If the mode is AsNeeded and the entry is a compressed entry of + * unknown size that gets written to a non-seekable stream the + * change the default to Never. + * + * @since 1.3 + */ + private Zip64Mode getEffectiveZip64Mode(ZipArchiveEntry ze) { + if (zip64Mode != Zip64Mode.AsNeeded + || raf != null + || ze.getMethod() != DEFLATED + || ze.getSize() != ArchiveEntry.SIZE_UNKNOWN) { + return zip64Mode; + } + return Zip64Mode.Never; + } + + private ZipEncoding getEntryEncoding(ZipArchiveEntry ze) { + boolean encodable = zipEncoding.canEncode(ze.getName()); + return !encodable && fallbackToUTF8 + ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; + } + + private ByteBuffer getName(ZipArchiveEntry ze) throws IOException { + return getEntryEncoding(ze).encode(ze.getName()); + } + + /** + * Closes the underlying stream/file without finishing the + * archive, the result will likely be a corrupt archive. + * + *

    This method only exists to support tests that generate + * corrupt archives so they can clean up any temporary files.

    + */ + void destroy() throws IOException { + if (raf != null) { + raf.close(); + } + if (out != null) { + out.close(); + } + } + + /** + * enum that represents the possible policies for creating Unicode + * extra fields. + */ + public static final class UnicodeExtraFieldPolicy { + /** + * Always create Unicode extra fields. + */ + public static final UnicodeExtraFieldPolicy ALWAYS = new UnicodeExtraFieldPolicy("always"); + /** + * Never create Unicode extra fields. + */ + public static final UnicodeExtraFieldPolicy NEVER = new UnicodeExtraFieldPolicy("never"); + /** + * Create Unicode extra fields for filenames that cannot be + * encoded using the specified encoding. + */ + public static final UnicodeExtraFieldPolicy NOT_ENCODEABLE = + new UnicodeExtraFieldPolicy("not encodeable"); + + private final String name; + private UnicodeExtraFieldPolicy(String n) { + name = n; + } + @Override + public String toString() { + return name; + } + } + + /** + * Structure collecting information for the entry that is + * currently being written. + */ + private static final class CurrentEntry { + private CurrentEntry(ZipArchiveEntry entry) { + this.entry = entry; + } + /** + * Current ZIP entry. + */ + private final ZipArchiveEntry entry; + /** + * Offset for CRC entry in the local file header data for the + * current entry starts here. + */ + private long localDataStart = 0; + /** + * Data for local header data + */ + private long dataStart = 0; + /** + * Number of bytes read for the current entry (can't rely on + * Deflater#getBytesRead) when using DEFLATED. + */ + private long bytesRead = 0; + /** + * Whether current entry was the first one using ZIP64 features. + */ + private boolean causedUseOfZip64 = false; + /** + * Whether write() has been called at all. + * + *

    In order to create a valid archive {@link + * #closeArchiveEntry closeArchiveEntry} will write an empty + * array to get the CRC right if nothing has been written to + * the stream at all.

    + */ + private boolean hasWritten; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipConstants.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipConstants.java new file mode 100644 index 000000000..b6e2d4536 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipConstants.java @@ -0,0 +1,61 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +/** + * Various constants used throughout the package. + * + * @since 1.3 + */ +final class ZipConstants { + private ZipConstants() { } + + /** Masks last eight bits */ + static final int BYTE_MASK = 0xFF; + + /** length of a ZipShort in bytes */ + static final int SHORT = 2; + + /** length of a ZipLong in bytes */ + static final int WORD = 4; + + /** length of a ZipEightByteInteger in bytes */ + static final int DWORD = 8; + + /** Initial ZIP specification version */ + static final int INITIAL_VERSION = 10; + + /** ZIP specification version that introduced data descriptor method */ + static final int DATA_DESCRIPTOR_MIN_VERSION = 20; + + /** ZIP specification version that introduced ZIP64 */ + static final int ZIP64_MIN_VERSION = 45; + + /** + * Value stored in two-byte size and similar fields if ZIP64 + * extensions are used. + */ + static final int ZIP64_MAGIC_SHORT = 0xFFFF; + + /** + * Value stored in four-byte size and similar fields if ZIP64 + * extensions are used. + */ + static final long ZIP64_MAGIC = 0xFFFFFFFFL; + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEightByteInteger.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEightByteInteger.java new file mode 100644 index 000000000..0803e6508 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEightByteInteger.java @@ -0,0 +1,234 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.Serializable; +import java.math.BigInteger; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.BYTE_MASK; + +/** + * Utility class that represents an eight byte integer with conversion + * rules for the big endian byte order of ZIP files. + * @Immutable + * + * @since 1.2 + */ +public final class ZipEightByteInteger implements Serializable { + private static final long serialVersionUID = 1L; + + private static final int BYTE_1 = 1; + private static final int BYTE_1_MASK = 0xFF00; + private static final int BYTE_1_SHIFT = 8; + + private static final int BYTE_2 = 2; + private static final int BYTE_2_MASK = 0xFF0000; + private static final int BYTE_2_SHIFT = 16; + + private static final int BYTE_3 = 3; + private static final long BYTE_3_MASK = 0xFF000000L; + private static final int BYTE_3_SHIFT = 24; + + private static final int BYTE_4 = 4; + private static final long BYTE_4_MASK = 0xFF00000000L; + private static final int BYTE_4_SHIFT = 32; + + private static final int BYTE_5 = 5; + private static final long BYTE_5_MASK = 0xFF0000000000L; + private static final int BYTE_5_SHIFT = 40; + + private static final int BYTE_6 = 6; + private static final long BYTE_6_MASK = 0xFF000000000000L; + private static final int BYTE_6_SHIFT = 48; + + private static final int BYTE_7 = 7; + private static final long BYTE_7_MASK = 0x7F00000000000000L; + private static final int BYTE_7_SHIFT = 56; + + private static final int LEFTMOST_BIT_SHIFT = 63; + private static final byte LEFTMOST_BIT = (byte) 0x80; + + private final BigInteger value; + + public static final ZipEightByteInteger ZERO = new ZipEightByteInteger(0); + + /** + * Create instance from a number. + * @param value the long to store as a ZipEightByteInteger + */ + public ZipEightByteInteger(long value) { + this(BigInteger.valueOf(value)); + } + + /** + * Create instance from a number. + * @param value the BigInteger to store as a ZipEightByteInteger + */ + public ZipEightByteInteger(BigInteger value) { + this.value = value; + } + + /** + * Create instance from bytes. + * @param bytes the bytes to store as a ZipEightByteInteger + */ + public ZipEightByteInteger (byte[] bytes) { + this(bytes, 0); + } + + /** + * Create instance from the eight bytes starting at offset. + * @param bytes the bytes to store as a ZipEightByteInteger + * @param offset the offset to start + */ + public ZipEightByteInteger (byte[] bytes, int offset) { + value = ZipEightByteInteger.getValue(bytes, offset); + } + + /** + * Get value as eight bytes in big endian byte order. + * @return value as eight bytes in big endian order + */ + public byte[] getBytes() { + return ZipEightByteInteger.getBytes(value); + } + + /** + * Get value as Java long. + * @return value as a long + */ + public long getLongValue() { + return value.longValue(); + } + + /** + * Get value as Java long. + * @return value as a long + */ + public BigInteger getValue() { + return value; + } + + /** + * Get value as eight bytes in big endian byte order. + * @param value the value to convert + * @return value as eight bytes in big endian byte order + */ + public static byte[] getBytes(long value) { + return getBytes(BigInteger.valueOf(value)); + } + + /** + * Get value as eight bytes in big endian byte order. + * @param value the value to convert + * @return value as eight bytes in big endian byte order + */ + public static byte[] getBytes(BigInteger value) { + byte[] result = new byte[8]; + long val = value.longValue(); + result[0] = (byte) ((val & BYTE_MASK)); + result[BYTE_1] = (byte) ((val & BYTE_1_MASK) >> BYTE_1_SHIFT); + result[BYTE_2] = (byte) ((val & BYTE_2_MASK) >> BYTE_2_SHIFT); + result[BYTE_3] = (byte) ((val & BYTE_3_MASK) >> BYTE_3_SHIFT); + result[BYTE_4] = (byte) ((val & BYTE_4_MASK) >> BYTE_4_SHIFT); + result[BYTE_5] = (byte) ((val & BYTE_5_MASK) >> BYTE_5_SHIFT); + result[BYTE_6] = (byte) ((val & BYTE_6_MASK) >> BYTE_6_SHIFT); + result[BYTE_7] = (byte) ((val & BYTE_7_MASK) >> BYTE_7_SHIFT); + if (value.testBit(LEFTMOST_BIT_SHIFT)) { + result[BYTE_7] |= LEFTMOST_BIT; + } + return result; + } + + /** + * Helper method to get the value as a Java long from eight bytes + * starting at given array offset + * @param bytes the array of bytes + * @param offset the offset to start + * @return the corresponding Java long value + */ + public static long getLongValue(byte[] bytes, int offset) { + return getValue(bytes, offset).longValue(); + } + + /** + * Helper method to get the value as a Java BigInteger from eight + * bytes starting at given array offset + * @param bytes the array of bytes + * @param offset the offset to start + * @return the corresponding Java BigInteger value + */ + public static BigInteger getValue(byte[] bytes, int offset) { + long value = ((long) bytes[offset + BYTE_7] << BYTE_7_SHIFT) & BYTE_7_MASK; + value += ((long) bytes[offset + BYTE_6] << BYTE_6_SHIFT) & BYTE_6_MASK; + value += ((long) bytes[offset + BYTE_5] << BYTE_5_SHIFT) & BYTE_5_MASK; + value += ((long) bytes[offset + BYTE_4] << BYTE_4_SHIFT) & BYTE_4_MASK; + value += ((long) bytes[offset + BYTE_3] << BYTE_3_SHIFT) & BYTE_3_MASK; + value += ((long) bytes[offset + BYTE_2] << BYTE_2_SHIFT) & BYTE_2_MASK; + value += ((long) bytes[offset + BYTE_1] << BYTE_1_SHIFT) & BYTE_1_MASK; + value += ((long) bytes[offset] & BYTE_MASK); + BigInteger val = BigInteger.valueOf(value); + return (bytes[offset + BYTE_7] & LEFTMOST_BIT) == LEFTMOST_BIT + ? val.setBit(LEFTMOST_BIT_SHIFT) : val; + } + + /** + * Helper method to get the value as a Java long from an eight-byte array + * @param bytes the array of bytes + * @return the corresponding Java long value + */ + public static long getLongValue(byte[] bytes) { + return getLongValue(bytes, 0); + } + + /** + * Helper method to get the value as a Java long from an eight-byte array + * @param bytes the array of bytes + * @return the corresponding Java BigInteger value + */ + public static BigInteger getValue(byte[] bytes) { + return getValue(bytes, 0); + } + + /** + * Override to make two instances with same value equal. + * @param o an object to compare + * @return true if the objects are equal + */ + @Override + public boolean equals(Object o) { + if (o == null || !(o instanceof ZipEightByteInteger)) { + return false; + } + return value.equals(((ZipEightByteInteger) o).getValue()); + } + + /** + * Override to make two instances with same value equal. + * @return the hashCode of the value stored in the ZipEightByteInteger + */ + @Override + public int hashCode() { + return value.hashCode(); + } + + @Override + public String toString() { + return "ZipEightByteInteger value: " + value; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEncoding.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEncoding.java new file mode 100644 index 000000000..65d2044b5 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEncoding.java @@ -0,0 +1,84 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.nio.ByteBuffer; + +/** + * An interface for encoders that do a pretty encoding of ZIP + * filenames. + * + *

    There are mostly two implementations, one that uses java.nio + * {@link java.nio.charset.Charset Charset} and one implementation, + * which copes with simple 8 bit charsets, because java-1.4 did not + * support Cp437 in java.nio.

    + * + *

    The main reason for defining an own encoding layer comes from + * the problems with {@link java.lang.String#getBytes(String) + * String.getBytes}, which encodes unknown characters as ASCII + * quotation marks ('?'). Quotation marks are per definition an + * invalid filename on some operating systems like Windows, which + * leads to ignored ZIP entries.

    + * + *

    All implementations should implement this interface in a + * reentrant way.

    + */ +public interface ZipEncoding { + /** + * Check, whether the given string may be losslessly encoded using this + * encoding. + * + * @param name A filename or ZIP comment. + * @return Whether the given name may be encoded with out any losses. + */ + boolean canEncode(String name); + + /** + * Encode a filename or a comment to a byte array suitable for + * storing it to a serialized zip entry. + * + *

    Examples for CP 437 (in pseudo-notation, right hand side is + * C-style notation):

    + *
    +     *  encode("\u20AC_for_Dollar.txt") = "%U20AC_for_Dollar.txt"
    +     *  encode("\u00D6lf\u00E4sser.txt") = "\231lf\204sser.txt"
    +     * 
    + * + * @param name A filename or ZIP comment. + * @return A byte buffer with a backing array containing the + * encoded name. Unmappable characters or malformed + * character sequences are mapped to a sequence of utf-16 + * words encoded in the format %Uxxxx. It is + * assumed, that the byte buffer is positioned at the + * beginning of the encoded result, the byte buffer has a + * backing array and the limit of the byte buffer points + * to the end of the encoded result. + * @throws IOException + */ + ByteBuffer encode(String name) throws IOException; + + /** + * @param data The byte values to decode. + * @return The decoded string. + * @throws IOException + */ + String decode(byte [] data) throws IOException; +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java new file mode 100644 index 000000000..c0f1cfaef --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipEncodingHelper.java @@ -0,0 +1,258 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.archivers.zip; + +import java.nio.ByteBuffer; +import java.nio.charset.Charset; +import java.nio.charset.UnsupportedCharsetException; +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; + +import org.apache.commons.compress.utils.Charsets; + +/** + * Static helper functions for robustly encoding filenames in zip files. + */ +public abstract class ZipEncodingHelper { + + /** + * A class, which holds the high characters of a simple encoding + * and lazily instantiates a Simple8BitZipEncoding instance in a + * thread-safe manner. + */ + private static class SimpleEncodingHolder { + + private final char [] highChars; + private Simple8BitZipEncoding encoding; + + /** + * Instantiate a simple encoding holder. + * + * @param highChars The characters for byte codes 128 to 255. + * + * @see Simple8BitZipEncoding#Simple8BitZipEncoding(char[]) + */ + SimpleEncodingHolder(char [] highChars) { + this.highChars = highChars; + } + + /** + * @return The associated {@link Simple8BitZipEncoding}, which + * is instantiated if not done so far. + */ + public synchronized Simple8BitZipEncoding getEncoding() { + if (this.encoding == null) { + this.encoding = new Simple8BitZipEncoding(this.highChars); + } + return this.encoding; + } + } + + private static final Map simpleEncodings; + + static { + Map se = + new HashMap(); + + char[] cp437_high_chars = + new char[] { 0x00c7, 0x00fc, 0x00e9, 0x00e2, 0x00e4, 0x00e0, + 0x00e5, 0x00e7, 0x00ea, 0x00eb, 0x00e8, 0x00ef, + 0x00ee, 0x00ec, 0x00c4, 0x00c5, 0x00c9, 0x00e6, + 0x00c6, 0x00f4, 0x00f6, 0x00f2, 0x00fb, 0x00f9, + 0x00ff, 0x00d6, 0x00dc, 0x00a2, 0x00a3, 0x00a5, + 0x20a7, 0x0192, 0x00e1, 0x00ed, 0x00f3, 0x00fa, + 0x00f1, 0x00d1, 0x00aa, 0x00ba, 0x00bf, 0x2310, + 0x00ac, 0x00bd, 0x00bc, 0x00a1, 0x00ab, 0x00bb, + 0x2591, 0x2592, 0x2593, 0x2502, 0x2524, 0x2561, + 0x2562, 0x2556, 0x2555, 0x2563, 0x2551, 0x2557, + 0x255d, 0x255c, 0x255b, 0x2510, 0x2514, 0x2534, + 0x252c, 0x251c, 0x2500, 0x253c, 0x255e, 0x255f, + 0x255a, 0x2554, 0x2569, 0x2566, 0x2560, 0x2550, + 0x256c, 0x2567, 0x2568, 0x2564, 0x2565, 0x2559, + 0x2558, 0x2552, 0x2553, 0x256b, 0x256a, 0x2518, + 0x250c, 0x2588, 0x2584, 0x258c, 0x2590, 0x2580, + 0x03b1, 0x00df, 0x0393, 0x03c0, 0x03a3, 0x03c3, + 0x00b5, 0x03c4, 0x03a6, 0x0398, 0x03a9, 0x03b4, + 0x221e, 0x03c6, 0x03b5, 0x2229, 0x2261, 0x00b1, + 0x2265, 0x2264, 0x2320, 0x2321, 0x00f7, 0x2248, + 0x00b0, 0x2219, 0x00b7, 0x221a, 0x207f, 0x00b2, + 0x25a0, 0x00a0 }; + + SimpleEncodingHolder cp437 = new SimpleEncodingHolder(cp437_high_chars); + + se.put("CP437", cp437); + se.put("Cp437", cp437); + se.put("cp437", cp437); + se.put("IBM437", cp437); + se.put("ibm437", cp437); + + char[] cp850_high_chars = + new char[] { 0x00c7, 0x00fc, 0x00e9, 0x00e2, 0x00e4, 0x00e0, + 0x00e5, 0x00e7, 0x00ea, 0x00eb, 0x00e8, 0x00ef, + 0x00ee, 0x00ec, 0x00c4, 0x00c5, 0x00c9, 0x00e6, + 0x00c6, 0x00f4, 0x00f6, 0x00f2, 0x00fb, 0x00f9, + 0x00ff, 0x00d6, 0x00dc, 0x00f8, 0x00a3, 0x00d8, + 0x00d7, 0x0192, 0x00e1, 0x00ed, 0x00f3, 0x00fa, + 0x00f1, 0x00d1, 0x00aa, 0x00ba, 0x00bf, 0x00ae, + 0x00ac, 0x00bd, 0x00bc, 0x00a1, 0x00ab, 0x00bb, + 0x2591, 0x2592, 0x2593, 0x2502, 0x2524, 0x00c1, + 0x00c2, 0x00c0, 0x00a9, 0x2563, 0x2551, 0x2557, + 0x255d, 0x00a2, 0x00a5, 0x2510, 0x2514, 0x2534, + 0x252c, 0x251c, 0x2500, 0x253c, 0x00e3, 0x00c3, + 0x255a, 0x2554, 0x2569, 0x2566, 0x2560, 0x2550, + 0x256c, 0x00a4, 0x00f0, 0x00d0, 0x00ca, 0x00cb, + 0x00c8, 0x0131, 0x00cd, 0x00ce, 0x00cf, 0x2518, + 0x250c, 0x2588, 0x2584, 0x00a6, 0x00cc, 0x2580, + 0x00d3, 0x00df, 0x00d4, 0x00d2, 0x00f5, 0x00d5, + 0x00b5, 0x00fe, 0x00de, 0x00da, 0x00db, 0x00d9, + 0x00fd, 0x00dd, 0x00af, 0x00b4, 0x00ad, 0x00b1, + 0x2017, 0x00be, 0x00b6, 0x00a7, 0x00f7, 0x00b8, + 0x00b0, 0x00a8, 0x00b7, 0x00b9, 0x00b3, 0x00b2, + 0x25a0, 0x00a0 }; + + SimpleEncodingHolder cp850 = new SimpleEncodingHolder(cp850_high_chars); + + se.put("CP850", cp850); + se.put("Cp850", cp850); + se.put("cp850", cp850); + se.put("IBM850", cp850); + se.put("ibm850", cp850); + simpleEncodings = Collections.unmodifiableMap(se); + } + + /** + * Grow a byte buffer, so it has a minimal capacity or at least + * the double capacity of the original buffer + * + * @param b The original buffer. + * @param newCapacity The minimal requested new capacity. + * @return A byte buffer r with + * r.capacity() = max(b.capacity()*2,newCapacity) and + * all the data contained in b copied to the beginning + * of r. + * + */ + static ByteBuffer growBuffer(ByteBuffer b, int newCapacity) { + b.limit(b.position()); + b.rewind(); + + int c2 = b.capacity() * 2; + ByteBuffer on = ByteBuffer.allocate(c2 < newCapacity ? newCapacity : c2); + + on.put(b); + return on; + } + + + /** + * The hexadecimal digits 0,...,9,A,...,F encoded as + * ASCII bytes. + */ + private static final byte[] HEX_DIGITS = + new byte [] { + 0x30, 0x31, 0x32, 0x33, 0x34, 0x35, 0x36, 0x37, 0x38, 0x39, 0x41, + 0x42, 0x43, 0x44, 0x45, 0x46 + }; + + /** + * Append %Uxxxx to the given byte buffer. + * The caller must assure, that bb.remaining()>=6. + * + * @param bb The byte buffer to write to. + * @param c The character to write. + */ + static void appendSurrogate(ByteBuffer bb, char c) { + + bb.put((byte) '%'); + bb.put((byte) 'U'); + + bb.put(HEX_DIGITS[(c >> 12)&0x0f]); + bb.put(HEX_DIGITS[(c >> 8)&0x0f]); + bb.put(HEX_DIGITS[(c >> 4)&0x0f]); + bb.put(HEX_DIGITS[c & 0x0f]); + } + + + /** + * name of the encoding UTF-8 + */ + static final String UTF8 = "UTF8"; + + /** + * name of the encoding UTF-8 + */ + static final ZipEncoding UTF8_ZIP_ENCODING = new FallbackZipEncoding(UTF8); + + /** + * Instantiates a zip encoding. + * + * @param name The name of the zip encoding. Specify {@code null} for + * the platform's default encoding. + * @return A zip encoding for the given encoding name. + */ + public static ZipEncoding getZipEncoding(String name) { + + // fallback encoding is good enough for UTF-8. + if (isUTF8(name)) { + return UTF8_ZIP_ENCODING; + } + + if (name == null) { + return new FallbackZipEncoding(); + } + + SimpleEncodingHolder h = simpleEncodings.get(name); + + if (h!=null) { + return h.getEncoding(); + } + + try { + + Charset cs = Charset.forName(name); + return new NioZipEncoding(cs); + + } catch (UnsupportedCharsetException e) { + return new FallbackZipEncoding(name); + } + } + + /** + * Returns whether a given encoding is UTF-8. If the given name is null, then check the platform's default encoding. + * + * @param charsetName + * If the given name is null, then check the platform's default encoding. + */ + static boolean isUTF8(String charsetName) { + if (charsetName == null) { + // check platform's default encoding + charsetName = System.getProperty("file.encoding"); + } + if (Charsets.UTF_8.name().equalsIgnoreCase(charsetName)) { + return true; + } + for (String alias : Charsets.UTF_8.aliases()) { + if (alias.equalsIgnoreCase(charsetName)) { + return true; + } + } + return false; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipExtraField.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipExtraField.java new file mode 100644 index 000000000..07d64a189 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipExtraField.java @@ -0,0 +1,90 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.archivers.zip; + +import java.util.zip.ZipException; + +/** + * General format of extra field data. + * + *

    Extra fields usually appear twice per file, once in the local + * file data and once in the central directory. Usually they are the + * same, but they don't have to be. {@link + * java.util.zip.ZipOutputStream java.util.zip.ZipOutputStream} will + * only use the local file data in both places.

    + * + */ +public interface ZipExtraField { + /** + * The Header-ID. + * + * @return The HeaderId value + */ + ZipShort getHeaderId(); + + /** + * Length of the extra field in the local file data - without + * Header-ID or length specifier. + * @return the length of the field in the local file data + */ + ZipShort getLocalFileDataLength(); + + /** + * Length of the extra field in the central directory - without + * Header-ID or length specifier. + * @return the length of the field in the central directory + */ + ZipShort getCentralDirectoryLength(); + + /** + * The actual data to put into local file data - without Header-ID + * or length specifier. + * @return the data + */ + byte[] getLocalFileDataData(); + + /** + * The actual data to put into central directory - without Header-ID or + * length specifier. + * @return the data + */ + byte[] getCentralDirectoryData(); + + /** + * Populate data from this array as if it was in local file data. + * + * @param buffer the buffer to read data from + * @param offset offset into buffer to read data + * @param length the length of data + * @exception ZipException on error + */ + void parseFromLocalFileData(byte[] buffer, int offset, int length) + throws ZipException; + + /** + * Populate data from this array as if it was in central directory data. + * + * @param buffer the buffer to read data from + * @param offset offset into buffer to read data + * @param length the length of data + * @exception ZipException on error + */ + void parseFromCentralDirectoryData(byte[] buffer, int offset, int length) + throws ZipException; +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipFile.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipFile.java new file mode 100644 index 000000000..bcde9c841 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipFile.java @@ -0,0 +1,1083 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.BufferedInputStream; +import java.io.Closeable; +import java.io.EOFException; +import java.io.File; +import java.io.IOException; +import java.io.InputStream; +import java.io.RandomAccessFile; +import java.util.Arrays; +import java.util.Collections; +import java.util.Comparator; +import java.util.Enumeration; +import java.util.HashMap; +import java.util.LinkedList; +import java.util.List; +import java.util.Map; +import java.util.zip.Inflater; +import java.util.zip.InflaterInputStream; +import java.util.zip.ZipException; + +import org.apache.commons.compress.utils.IOUtils; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.DWORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.SHORT; +import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD; +import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC; +import static org.apache.commons.compress.archivers.zip.ZipConstants.ZIP64_MAGIC_SHORT; + +/** + * Replacement for java.util.ZipFile. + * + *

    This class adds support for file name encodings other than UTF-8 + * (which is required to work on ZIP files created by native zip tools + * and is able to skip a preamble like the one found in self + * extracting archives. Furthermore it returns instances of + * org.apache.commons.compress.archivers.zip.ZipArchiveEntry + * instead of java.util.zip.ZipEntry.

    + * + *

    It doesn't extend java.util.zip.ZipFile as it would + * have to reimplement all methods anyway. Like + * java.util.ZipFile, it uses RandomAccessFile under the + * covers and supports compressed and uncompressed entries. As of + * Apache Commons Compress 1.3 it also transparently supports Zip64 + * extensions and thus individual entries and archives larger than 4 + * GB or with more than 65536 entries.

    + * + *

    The method signatures mimic the ones of + * java.util.zip.ZipFile, with a couple of exceptions: + * + *

      + *
    • There is no getName method.
    • + *
    • entries has been renamed to getEntries.
    • + *
    • getEntries and getEntry return + * org.apache.commons.compress.archivers.zip.ZipArchiveEntry + * instances.
    • + *
    • close is allowed to throw IOException.
    • + *
    + * + */ +public class ZipFile implements Closeable { + private static final int HASH_SIZE = 509; + static final int NIBLET_MASK = 0x0f; + static final int BYTE_SHIFT = 8; + private static final int POS_0 = 0; + private static final int POS_1 = 1; + private static final int POS_2 = 2; + private static final int POS_3 = 3; + + /** + * List of entries in the order they appear inside the central + * directory. + */ + private final List entries = + new LinkedList(); + + /** + * Maps String to list of ZipArchiveEntrys, name -> actual entries. + */ + private final Map> nameMap = + new HashMap>(HASH_SIZE); + + private static final class OffsetEntry { + private long headerOffset = -1; + private long dataOffset = -1; + } + + /** + * The encoding to use for filenames and the file comment. + * + *

    For a list of possible values see http://java.sun.com/j2se/1.5.0/docs/guide/intl/encoding.doc.html. + * Defaults to UTF-8.

    + */ + private final String encoding; + + /** + * The zip encoding to use for filenames and the file comment. + */ + private final ZipEncoding zipEncoding; + + /** + * File name of actual source. + */ + private final String archiveName; + + /** + * The actual data source. + */ + private final RandomAccessFile archive; + + /** + * Whether to look for and use Unicode extra fields. + */ + private final boolean useUnicodeExtraFields; + + /** + * Whether the file is closed. + */ + private boolean closed; + + // cached buffers - must only be used locally in the class (COMPRESS-172 - reduce garbage collection) + private final byte[] DWORD_BUF = new byte[DWORD]; + private final byte[] WORD_BUF = new byte[WORD]; + private final byte[] CFH_BUF = new byte[CFH_LEN]; + private final byte[] SHORT_BUF = new byte[SHORT]; + + /** + * Opens the given file for reading, assuming "UTF8" for file names. + * + * @param f the archive. + * + * @throws IOException if an error occurs while reading the file. + */ + public ZipFile(File f) throws IOException { + this(f, ZipEncodingHelper.UTF8); + } + + /** + * Opens the given file for reading, assuming "UTF8". + * + * @param name name of the archive. + * + * @throws IOException if an error occurs while reading the file. + */ + public ZipFile(String name) throws IOException { + this(new File(name), ZipEncodingHelper.UTF8); + } + + /** + * Opens the given file for reading, assuming the specified + * encoding for file names, scanning unicode extra fields. + * + * @param name name of the archive. + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * + * @throws IOException if an error occurs while reading the file. + */ + public ZipFile(String name, String encoding) throws IOException { + this(new File(name), encoding, true); + } + + /** + * Opens the given file for reading, assuming the specified + * encoding for file names and scanning for unicode extra fields. + * + * @param f the archive. + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * + * @throws IOException if an error occurs while reading the file. + */ + public ZipFile(File f, String encoding) throws IOException { + this(f, encoding, true); + } + + /** + * Opens the given file for reading, assuming the specified + * encoding for file names. + * + * @param f the archive. + * @param encoding the encoding to use for file names, use null + * for the platform's default encoding + * @param useUnicodeExtraFields whether to use InfoZIP Unicode + * Extra Fields (if present) to set the file names. + * + * @throws IOException if an error occurs while reading the file. + */ + public ZipFile(File f, String encoding, boolean useUnicodeExtraFields) + throws IOException { + this.archiveName = f.getAbsolutePath(); + this.encoding = encoding; + this.zipEncoding = ZipEncodingHelper.getZipEncoding(encoding); + this.useUnicodeExtraFields = useUnicodeExtraFields; + archive = new RandomAccessFile(f, "r"); + boolean success = false; + try { + Map entriesWithoutUTF8Flag = + populateFromCentralDirectory(); + resolveLocalFileHeaderData(entriesWithoutUTF8Flag); + success = true; + } finally { + if (!success) { + closed = true; + IOUtils.closeQuietly(archive); + } + } + } + + /** + * The encoding to use for filenames and the file comment. + * + * @return null if using the platform's default character encoding. + */ + public String getEncoding() { + return encoding; + } + + /** + * Closes the archive. + * @throws IOException if an error occurs closing the archive. + */ + public void close() throws IOException { + // this flag is only written here and read in finalize() which + // can never be run in parallel. + // no synchronization needed. + closed = true; + + archive.close(); + } + + /** + * close a zipfile quietly; throw no io fault, do nothing + * on a null parameter + * @param zipfile file to close, can be null + */ + public static void closeQuietly(ZipFile zipfile) { + IOUtils.closeQuietly(zipfile); + } + + /** + * Returns all entries. + * + *

    Entries will be returned in the same order they appear + * within the archive's central directory.

    + * + * @return all entries as {@link ZipArchiveEntry} instances + */ + public Enumeration getEntries() { + return Collections.enumeration(entries); + } + + /** + * Returns all entries in physical order. + * + *

    Entries will be returned in the same order their contents + * appear within the archive.

    + * + * @return all entries as {@link ZipArchiveEntry} instances + * + * @since 1.1 + */ + public Enumeration getEntriesInPhysicalOrder() { + ZipArchiveEntry[] allEntries = entries.toArray(new ZipArchiveEntry[0]); + Arrays.sort(allEntries, OFFSET_COMPARATOR); + return Collections.enumeration(Arrays.asList(allEntries)); + } + + /** + * Returns a named entry - or {@code null} if no entry by + * that name exists. + * + *

    If multiple entries with the same name exist the first entry + * in the archive's central directory by that name is + * returned.

    + * + * @param name name of the entry. + * @return the ZipArchiveEntry corresponding to the given name - or + * {@code null} if not present. + */ + public ZipArchiveEntry getEntry(String name) { + LinkedList entriesOfThatName = nameMap.get(name); + return entriesOfThatName != null ? entriesOfThatName.getFirst() : null; + } + + /** + * Returns all named entries in the same order they appear within + * the archive's central directory. + * + * @param name name of the entry. + * @return the Iterable<ZipArchiveEntry> corresponding to the + * given name + * @since 1.6 + */ + public Iterable getEntries(String name) { + List entriesOfThatName = nameMap.get(name); + return entriesOfThatName != null ? entriesOfThatName + : Collections.emptyList(); + } + + /** + * Returns all named entries in the same order their contents + * appear within the archive. + * + * @param name name of the entry. + * @return the Iterable<ZipArchiveEntry> corresponding to the + * given name + * @since 1.6 + */ + public Iterable getEntriesInPhysicalOrder(String name) { + ZipArchiveEntry[] entriesOfThatName = new ZipArchiveEntry[0]; + if (nameMap.containsKey(name)) { + entriesOfThatName = nameMap.get(name).toArray(entriesOfThatName); + Arrays.sort(entriesOfThatName, OFFSET_COMPARATOR); + } + return Arrays.asList(entriesOfThatName); + } + + /** + * Whether this class is able to read the given entry. + * + *

    May return false if it is set up to use encryption or a + * compression method that hasn't been implemented yet.

    + * @since 1.1 + */ + public boolean canReadEntryData(ZipArchiveEntry ze) { + return ZipUtil.canHandleEntryData(ze); + } + + /** + * Returns an InputStream for reading the contents of the given entry. + * + * @param ze the entry to get the stream for. + * @return a stream to read the entry from. + * @throws IOException if unable to create an input stream from the zipentry + * @throws ZipException if the zipentry uses an unsupported feature + */ + public InputStream getInputStream(ZipArchiveEntry ze) + throws IOException, ZipException { + if (!(ze instanceof Entry)) { + return null; + } + // cast valididty is checked just above + OffsetEntry offsetEntry = ((Entry) ze).getOffsetEntry(); + ZipUtil.checkRequestedFeatures(ze); + long start = offsetEntry.dataOffset; + BoundedInputStream bis = + new BoundedInputStream(start, ze.getCompressedSize()); + switch (ZipMethod.getMethodByCode(ze.getMethod())) { + case STORED: + return bis; + case UNSHRINKING: + return new UnshrinkingInputStream(bis); + case IMPLODING: + return new ExplodingInputStream(ze.getGeneralPurposeBit().getSlidingDictionarySize(), + ze.getGeneralPurposeBit().getNumberOfShannonFanoTrees(), new BufferedInputStream(bis)); + case DEFLATED: + bis.addDummy(); + final Inflater inflater = new Inflater(true); + return new InflaterInputStream(bis, inflater) { + @Override + public void close() throws IOException { + super.close(); + inflater.end(); + } + }; + default: + throw new ZipException("Found unsupported compression method " + + ze.getMethod()); + } + } + + /** + *

    + * Convenience method to return the entry's content as a String if isUnixSymlink() + * returns true for it, otherwise returns null. + *

    + * + *

    This method assumes the symbolic link's file name uses the + * same encoding that as been specified for this ZipFile.

    + * + * @param entry ZipArchiveEntry object that represents the symbolic link + * @return entry's content as a String + * @throws IOException problem with content's input stream + * @since 1.5 + */ + public String getUnixSymlink(ZipArchiveEntry entry) throws IOException { + if (entry != null && entry.isUnixSymlink()) { + InputStream in = null; + try { + in = getInputStream(entry); + byte[] symlinkBytes = IOUtils.toByteArray(in); + return zipEncoding.decode(symlinkBytes); + } finally { + if (in != null) { + in.close(); + } + } + } else { + return null; + } + } + + /** + * Ensures that the close method of this zipfile is called when + * there are no more references to it. + * @see #close() + */ + @Override + protected void finalize() throws Throwable { + try { + if (!closed) { + System.err.println("Cleaning up unclosed ZipFile for archive " + + archiveName); + close(); + } + } finally { + super.finalize(); + } + } + + /** + * Length of a "central directory" entry structure without file + * name, extra fields or comment. + */ + private static final int CFH_LEN = + /* version made by */ SHORT + /* version needed to extract */ + SHORT + /* general purpose bit flag */ + SHORT + /* compression method */ + SHORT + /* last mod file time */ + SHORT + /* last mod file date */ + SHORT + /* crc-32 */ + WORD + /* compressed size */ + WORD + /* uncompressed size */ + WORD + /* filename length */ + SHORT + /* extra field length */ + SHORT + /* file comment length */ + SHORT + /* disk number start */ + SHORT + /* internal file attributes */ + SHORT + /* external file attributes */ + WORD + /* relative offset of local header */ + WORD; + + private static final long CFH_SIG = + ZipLong.getValue(ZipArchiveOutputStream.CFH_SIG); + + /** + * Reads the central directory of the given archive and populates + * the internal tables with ZipArchiveEntry instances. + * + *

    The ZipArchiveEntrys will know all data that can be obtained from + * the central directory alone, but not the data that requires the + * local file header or additional data to be read.

    + * + * @return a map of zipentries that didn't have the language + * encoding flag set when read. + */ + private Map populateFromCentralDirectory() + throws IOException { + HashMap noUTF8Flag = + new HashMap(); + + positionAtCentralDirectory(); + + archive.readFully(WORD_BUF); + long sig = ZipLong.getValue(WORD_BUF); + + if (sig != CFH_SIG && startsWithLocalFileHeader()) { + throw new IOException("central directory is empty, can't expand" + + " corrupt archive."); + } + + while (sig == CFH_SIG) { + readCentralDirectoryEntry(noUTF8Flag); + archive.readFully(WORD_BUF); + sig = ZipLong.getValue(WORD_BUF); + } + return noUTF8Flag; + } + + /** + * Reads an individual entry of the central directory, creats an + * ZipArchiveEntry from it and adds it to the global maps. + * + * @param noUTF8Flag map used to collect entries that don't have + * their UTF-8 flag set and whose name will be set by data read + * from the local file header later. The current entry may be + * added to this map. + */ + private void + readCentralDirectoryEntry(Map noUTF8Flag) + throws IOException { + archive.readFully(CFH_BUF); + int off = 0; + OffsetEntry offset = new OffsetEntry(); + Entry ze = new Entry(offset); + + int versionMadeBy = ZipShort.getValue(CFH_BUF, off); + off += SHORT; + ze.setPlatform((versionMadeBy >> BYTE_SHIFT) & NIBLET_MASK); + + off += SHORT; // skip version info + + final GeneralPurposeBit gpFlag = GeneralPurposeBit.parse(CFH_BUF, off); + final boolean hasUTF8Flag = gpFlag.usesUTF8ForNames(); + final ZipEncoding entryEncoding = + hasUTF8Flag ? ZipEncodingHelper.UTF8_ZIP_ENCODING : zipEncoding; + ze.setGeneralPurposeBit(gpFlag); + + off += SHORT; + + ze.setMethod(ZipShort.getValue(CFH_BUF, off)); + off += SHORT; + + long time = ZipUtil.dosToJavaTime(ZipLong.getValue(CFH_BUF, off)); + ze.setTime(time); + off += WORD; + + ze.setCrc(ZipLong.getValue(CFH_BUF, off)); + off += WORD; + + ze.setCompressedSize(ZipLong.getValue(CFH_BUF, off)); + off += WORD; + + ze.setSize(ZipLong.getValue(CFH_BUF, off)); + off += WORD; + + int fileNameLen = ZipShort.getValue(CFH_BUF, off); + off += SHORT; + + int extraLen = ZipShort.getValue(CFH_BUF, off); + off += SHORT; + + int commentLen = ZipShort.getValue(CFH_BUF, off); + off += SHORT; + + int diskStart = ZipShort.getValue(CFH_BUF, off); + off += SHORT; + + ze.setInternalAttributes(ZipShort.getValue(CFH_BUF, off)); + off += SHORT; + + ze.setExternalAttributes(ZipLong.getValue(CFH_BUF, off)); + off += WORD; + + byte[] fileName = new byte[fileNameLen]; + archive.readFully(fileName); + ze.setName(entryEncoding.decode(fileName), fileName); + + // LFH offset, + offset.headerOffset = ZipLong.getValue(CFH_BUF, off); + // data offset will be filled later + entries.add(ze); + + byte[] cdExtraData = new byte[extraLen]; + archive.readFully(cdExtraData); + ze.setCentralDirectoryExtra(cdExtraData); + + setSizesAndOffsetFromZip64Extra(ze, offset, diskStart); + + byte[] comment = new byte[commentLen]; + archive.readFully(comment); + ze.setComment(entryEncoding.decode(comment)); + + if (!hasUTF8Flag && useUnicodeExtraFields) { + noUTF8Flag.put(ze, new NameAndComment(fileName, comment)); + } + } + + /** + * If the entry holds a Zip64 extended information extra field, + * read sizes from there if the entry's sizes are set to + * 0xFFFFFFFFF, do the same for the offset of the local file + * header. + * + *

    Ensures the Zip64 extra either knows both compressed and + * uncompressed size or neither of both as the internal logic in + * ExtraFieldUtils forces the field to create local header data + * even if they are never used - and here a field with only one + * size would be invalid.

    + */ + private void setSizesAndOffsetFromZip64Extra(ZipArchiveEntry ze, + OffsetEntry offset, + int diskStart) + throws IOException { + Zip64ExtendedInformationExtraField z64 = + (Zip64ExtendedInformationExtraField) + ze.getExtraField(Zip64ExtendedInformationExtraField.HEADER_ID); + if (z64 != null) { + boolean hasUncompressedSize = ze.getSize() == ZIP64_MAGIC; + boolean hasCompressedSize = ze.getCompressedSize() == ZIP64_MAGIC; + boolean hasRelativeHeaderOffset = + offset.headerOffset == ZIP64_MAGIC; + z64.reparseCentralDirectoryData(hasUncompressedSize, + hasCompressedSize, + hasRelativeHeaderOffset, + diskStart == ZIP64_MAGIC_SHORT); + + if (hasUncompressedSize) { + ze.setSize(z64.getSize().getLongValue()); + } else if (hasCompressedSize) { + z64.setSize(new ZipEightByteInteger(ze.getSize())); + } + + if (hasCompressedSize) { + ze.setCompressedSize(z64.getCompressedSize().getLongValue()); + } else if (hasUncompressedSize) { + z64.setCompressedSize(new ZipEightByteInteger(ze.getCompressedSize())); + } + + if (hasRelativeHeaderOffset) { + offset.headerOffset = + z64.getRelativeHeaderOffset().getLongValue(); + } + } + } + + /** + * Length of the "End of central directory record" - which is + * supposed to be the last structure of the archive - without file + * comment. + */ + static final int MIN_EOCD_SIZE = + /* end of central dir signature */ WORD + /* number of this disk */ + SHORT + /* number of the disk with the */ + /* start of the central directory */ + SHORT + /* total number of entries in */ + /* the central dir on this disk */ + SHORT + /* total number of entries in */ + /* the central dir */ + SHORT + /* size of the central directory */ + WORD + /* offset of start of central */ + /* directory with respect to */ + /* the starting disk number */ + WORD + /* zipfile comment length */ + SHORT; + + /** + * Maximum length of the "End of central directory record" with a + * file comment. + */ + private static final int MAX_EOCD_SIZE = MIN_EOCD_SIZE + /* maximum length of zipfile comment */ + ZIP64_MAGIC_SHORT; + + /** + * Offset of the field that holds the location of the first + * central directory entry inside the "End of central directory + * record" relative to the start of the "End of central directory + * record". + */ + private static final int CFD_LOCATOR_OFFSET = + /* end of central dir signature */ WORD + /* number of this disk */ + SHORT + /* number of the disk with the */ + /* start of the central directory */ + SHORT + /* total number of entries in */ + /* the central dir on this disk */ + SHORT + /* total number of entries in */ + /* the central dir */ + SHORT + /* size of the central directory */ + WORD; + + /** + * Length of the "Zip64 end of central directory locator" - which + * should be right in front of the "end of central directory + * record" if one is present at all. + */ + private static final int ZIP64_EOCDL_LENGTH = + /* zip64 end of central dir locator sig */ WORD + /* number of the disk with the start */ + /* start of the zip64 end of */ + /* central directory */ + WORD + /* relative offset of the zip64 */ + /* end of central directory record */ + DWORD + /* total number of disks */ + WORD; + + /** + * Offset of the field that holds the location of the "Zip64 end + * of central directory record" inside the "Zip64 end of central + * directory locator" relative to the start of the "Zip64 end of + * central directory locator". + */ + private static final int ZIP64_EOCDL_LOCATOR_OFFSET = + /* zip64 end of central dir locator sig */ WORD + /* number of the disk with the start */ + /* start of the zip64 end of */ + /* central directory */ + WORD; + + /** + * Offset of the field that holds the location of the first + * central directory entry inside the "Zip64 end of central + * directory record" relative to the start of the "Zip64 end of + * central directory record". + */ + private static final int ZIP64_EOCD_CFD_LOCATOR_OFFSET = + /* zip64 end of central dir */ + /* signature */ WORD + /* size of zip64 end of central */ + /* directory record */ + DWORD + /* version made by */ + SHORT + /* version needed to extract */ + SHORT + /* number of this disk */ + WORD + /* number of the disk with the */ + /* start of the central directory */ + WORD + /* total number of entries in the */ + /* central directory on this disk */ + DWORD + /* total number of entries in the */ + /* central directory */ + DWORD + /* size of the central directory */ + DWORD; + + /** + * Searches for either the "Zip64 end of central directory + * locator" or the "End of central dir record", parses + * it and positions the stream at the first central directory + * record. + */ + private void positionAtCentralDirectory() + throws IOException { + positionAtEndOfCentralDirectoryRecord(); + boolean found = false; + boolean searchedForZip64EOCD = + archive.getFilePointer() > ZIP64_EOCDL_LENGTH; + if (searchedForZip64EOCD) { + archive.seek(archive.getFilePointer() - ZIP64_EOCDL_LENGTH); + archive.readFully(WORD_BUF); + found = Arrays.equals(ZipArchiveOutputStream.ZIP64_EOCD_LOC_SIG, + WORD_BUF); + } + if (!found) { + // not a ZIP64 archive + if (searchedForZip64EOCD) { + skipBytes(ZIP64_EOCDL_LENGTH - WORD); + } + positionAtCentralDirectory32(); + } else { + positionAtCentralDirectory64(); + } + } + + /** + * Parses the "Zip64 end of central directory locator", + * finds the "Zip64 end of central directory record" using the + * parsed information, parses that and positions the stream at the + * first central directory record. + * + * Expects stream to be positioned right behind the "Zip64 + * end of central directory locator"'s signature. + */ + private void positionAtCentralDirectory64() + throws IOException { + skipBytes(ZIP64_EOCDL_LOCATOR_OFFSET + - WORD /* signature has already been read */); + archive.readFully(DWORD_BUF); + archive.seek(ZipEightByteInteger.getLongValue(DWORD_BUF)); + archive.readFully(WORD_BUF); + if (!Arrays.equals(WORD_BUF, ZipArchiveOutputStream.ZIP64_EOCD_SIG)) { + throw new ZipException("archive's ZIP64 end of central " + + "directory locator is corrupt."); + } + skipBytes(ZIP64_EOCD_CFD_LOCATOR_OFFSET + - WORD /* signature has already been read */); + archive.readFully(DWORD_BUF); + archive.seek(ZipEightByteInteger.getLongValue(DWORD_BUF)); + } + + /** + * Parses the "End of central dir record" and positions + * the stream at the first central directory record. + * + * Expects stream to be positioned at the beginning of the + * "End of central dir record". + */ + private void positionAtCentralDirectory32() + throws IOException { + skipBytes(CFD_LOCATOR_OFFSET); + archive.readFully(WORD_BUF); + archive.seek(ZipLong.getValue(WORD_BUF)); + } + + /** + * Searches for the and positions the stream at the start of the + * "End of central dir record". + */ + private void positionAtEndOfCentralDirectoryRecord() + throws IOException { + boolean found = tryToLocateSignature(MIN_EOCD_SIZE, MAX_EOCD_SIZE, + ZipArchiveOutputStream.EOCD_SIG); + if (!found) { + throw new ZipException("archive is not a ZIP archive"); + } + } + + /** + * Searches the archive backwards from minDistance to maxDistance + * for the given signature, positions the RandomaccessFile right + * at the signature if it has been found. + */ + private boolean tryToLocateSignature(long minDistanceFromEnd, + long maxDistanceFromEnd, + byte[] sig) throws IOException { + boolean found = false; + long off = archive.length() - minDistanceFromEnd; + final long stopSearching = + Math.max(0L, archive.length() - maxDistanceFromEnd); + if (off >= 0) { + for (; off >= stopSearching; off--) { + archive.seek(off); + int curr = archive.read(); + if (curr == -1) { + break; + } + if (curr == sig[POS_0]) { + curr = archive.read(); + if (curr == sig[POS_1]) { + curr = archive.read(); + if (curr == sig[POS_2]) { + curr = archive.read(); + if (curr == sig[POS_3]) { + found = true; + break; + } + } + } + } + } + } + if (found) { + archive.seek(off); + } + return found; + } + + /** + * Skips the given number of bytes or throws an EOFException if + * skipping failed. + */ + private void skipBytes(final int count) throws IOException { + int totalSkipped = 0; + while (totalSkipped < count) { + int skippedNow = archive.skipBytes(count - totalSkipped); + if (skippedNow <= 0) { + throw new EOFException(); + } + totalSkipped += skippedNow; + } + } + + /** + * Number of bytes in local file header up to the "length of + * filename" entry. + */ + private static final long LFH_OFFSET_FOR_FILENAME_LENGTH = + /* local file header signature */ WORD + /* version needed to extract */ + SHORT + /* general purpose bit flag */ + SHORT + /* compression method */ + SHORT + /* last mod file time */ + SHORT + /* last mod file date */ + SHORT + /* crc-32 */ + WORD + /* compressed size */ + WORD + /* uncompressed size */ + WORD; + + /** + * Walks through all recorded entries and adds the data available + * from the local file header. + * + *

    Also records the offsets for the data to read from the + * entries.

    + */ + private void resolveLocalFileHeaderData(Map + entriesWithoutUTF8Flag) + throws IOException { + for (ZipArchiveEntry zipArchiveEntry : entries) { + // entries is filled in populateFromCentralDirectory and + // never modified + Entry ze = (Entry) zipArchiveEntry; + OffsetEntry offsetEntry = ze.getOffsetEntry(); + long offset = offsetEntry.headerOffset; + archive.seek(offset + LFH_OFFSET_FOR_FILENAME_LENGTH); + archive.readFully(SHORT_BUF); + int fileNameLen = ZipShort.getValue(SHORT_BUF); + archive.readFully(SHORT_BUF); + int extraFieldLen = ZipShort.getValue(SHORT_BUF); + int lenToSkip = fileNameLen; + while (lenToSkip > 0) { + int skipped = archive.skipBytes(lenToSkip); + if (skipped <= 0) { + throw new IOException("failed to skip file name in" + + " local file header"); + } + lenToSkip -= skipped; + } + byte[] localExtraData = new byte[extraFieldLen]; + archive.readFully(localExtraData); + ze.setExtra(localExtraData); + offsetEntry.dataOffset = offset + LFH_OFFSET_FOR_FILENAME_LENGTH + + SHORT + SHORT + fileNameLen + extraFieldLen; + + if (entriesWithoutUTF8Flag.containsKey(ze)) { + NameAndComment nc = entriesWithoutUTF8Flag.get(ze); + ZipUtil.setNameAndCommentFromExtraFields(ze, nc.name, + nc.comment); + } + + String name = ze.getName(); + LinkedList entriesOfThatName = nameMap.get(name); + if (entriesOfThatName == null) { + entriesOfThatName = new LinkedList(); + nameMap.put(name, entriesOfThatName); + } + entriesOfThatName.addLast(ze); + } + } + + /** + * Checks whether the archive starts with a LFH. If it doesn't, + * it may be an empty archive. + */ + private boolean startsWithLocalFileHeader() throws IOException { + archive.seek(0); + archive.readFully(WORD_BUF); + return Arrays.equals(WORD_BUF, ZipArchiveOutputStream.LFH_SIG); + } + + /** + * InputStream that delegates requests to the underlying + * RandomAccessFile, making sure that only bytes from a certain + * range can be read. + */ + private class BoundedInputStream extends InputStream { + private long remaining; + private long loc; + private boolean addDummyByte = false; + + BoundedInputStream(long start, long remaining) { + this.remaining = remaining; + loc = start; + } + + @Override + public int read() throws IOException { + if (remaining-- <= 0) { + if (addDummyByte) { + addDummyByte = false; + return 0; + } + return -1; + } + synchronized (archive) { + archive.seek(loc++); + return archive.read(); + } + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (remaining <= 0) { + if (addDummyByte) { + addDummyByte = false; + b[off] = 0; + return 1; + } + return -1; + } + + if (len <= 0) { + return 0; + } + + if (len > remaining) { + len = (int) remaining; + } + int ret = -1; + synchronized (archive) { + archive.seek(loc); + ret = archive.read(b, off, len); + } + if (ret > 0) { + loc += ret; + remaining -= ret; + } + return ret; + } + + /** + * Inflater needs an extra dummy byte for nowrap - see + * Inflater's javadocs. + */ + void addDummy() { + addDummyByte = true; + } + } + + private static final class NameAndComment { + private final byte[] name; + private final byte[] comment; + private NameAndComment(byte[] name, byte[] comment) { + this.name = name; + this.comment = comment; + } + } + + /** + * Compares two ZipArchiveEntries based on their offset within the archive. + * + *

    Won't return any meaningful results if one of the entries + * isn't part of the archive at all.

    + * + * @since 1.1 + */ + private final Comparator OFFSET_COMPARATOR = + new Comparator() { + public int compare(ZipArchiveEntry e1, ZipArchiveEntry e2) { + if (e1 == e2) { + return 0; + } + + Entry ent1 = e1 instanceof Entry ? (Entry) e1 : null; + Entry ent2 = e2 instanceof Entry ? (Entry) e2 : null; + if (ent1 == null) { + return 1; + } + if (ent2 == null) { + return -1; + } + long val = (ent1.getOffsetEntry().headerOffset + - ent2.getOffsetEntry().headerOffset); + return val == 0 ? 0 : val < 0 ? -1 : +1; + } + }; + + /** + * Extends ZipArchiveEntry to store the offset within the archive. + */ + private static class Entry extends ZipArchiveEntry { + + private final OffsetEntry offsetEntry; + + Entry(OffsetEntry offset) { + this.offsetEntry = offset; + } + + OffsetEntry getOffsetEntry() { + return offsetEntry; + } + + @Override + public int hashCode() { + return 3 * super.hashCode() + + (int) (offsetEntry.headerOffset % Integer.MAX_VALUE); + } + + @Override + public boolean equals(Object other) { + if (super.equals(other)) { + // super.equals would return false if other were not an Entry + Entry otherEntry = (Entry) other; + return offsetEntry.headerOffset + == otherEntry.offsetEntry.headerOffset + && offsetEntry.dataOffset + == otherEntry.offsetEntry.dataOffset; + } + return false; + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipLong.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipLong.java new file mode 100644 index 000000000..c3815441a --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipLong.java @@ -0,0 +1,203 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.Serializable; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.BYTE_MASK; +import static org.apache.commons.compress.archivers.zip.ZipConstants.WORD; + +/** + * Utility class that represents a four byte integer with conversion + * rules for the big endian byte order of ZIP files. + * @Immutable + */ +public final class ZipLong implements Cloneable, Serializable { + private static final long serialVersionUID = 1L; + + //private static final int BYTE_BIT_SIZE = 8; + + private static final int BYTE_1 = 1; + private static final int BYTE_1_MASK = 0xFF00; + private static final int BYTE_1_SHIFT = 8; + + private static final int BYTE_2 = 2; + private static final int BYTE_2_MASK = 0xFF0000; + private static final int BYTE_2_SHIFT = 16; + + private static final int BYTE_3 = 3; + private static final long BYTE_3_MASK = 0xFF000000L; + private static final int BYTE_3_SHIFT = 24; + + private final long value; + + /** Central File Header Signature */ + public static final ZipLong CFH_SIG = new ZipLong(0X02014B50L); + + /** Local File Header Signature */ + public static final ZipLong LFH_SIG = new ZipLong(0X04034B50L); + + /** + * Data Descriptor signature. + * + *

    Actually, PKWARE uses this as marker for split/spanned + * archives and other archivers have started to use it as Data + * Descriptor signature (as well).

    + * @since 1.1 + */ + public static final ZipLong DD_SIG = new ZipLong(0X08074B50L); + + /** + * Value stored in size and similar fields if ZIP64 extensions are + * used. + * @since 1.3 + */ + static final ZipLong ZIP64_MAGIC = new ZipLong(ZipConstants.ZIP64_MAGIC); + + /** + * Marks ZIP archives that were supposed to be split or spanned + * but only needed a single segment in then end (so are actually + * neither split nor spanned). + * + *

    This is the "PK00" prefix found in some archives.

    + * @since 1.5 + */ + public static final ZipLong SINGLE_SEGMENT_SPLIT_MARKER = + new ZipLong(0X30304B50L); + + /** + * Archive extra data record signature. + * @since 1.5 + */ + public static final ZipLong AED_SIG = new ZipLong(0X08064B50L); + + /** + * Create instance from a number. + * @param value the long to store as a ZipLong + */ + public ZipLong(long value) { + this.value = value; + } + + /** + * Create instance from bytes. + * @param bytes the bytes to store as a ZipLong + */ + public ZipLong (byte[] bytes) { + this(bytes, 0); + } + + /** + * Create instance from the four bytes starting at offset. + * @param bytes the bytes to store as a ZipLong + * @param offset the offset to start + */ + public ZipLong (byte[] bytes, int offset) { + value = ZipLong.getValue(bytes, offset); + } + + /** + * Get value as four bytes in big endian byte order. + * @return value as four bytes in big endian order + */ + public byte[] getBytes() { + return ZipLong.getBytes(value); + } + + /** + * Get value as Java long. + * @return value as a long + */ + public long getValue() { + return value; + } + + /** + * Get value as four bytes in big endian byte order. + * @param value the value to convert + * @return value as four bytes in big endian byte order + */ + public static byte[] getBytes(long value) { + byte[] result = new byte[WORD]; + result[0] = (byte) ((value & BYTE_MASK)); + result[BYTE_1] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); + result[BYTE_2] = (byte) ((value & BYTE_2_MASK) >> BYTE_2_SHIFT); + result[BYTE_3] = (byte) ((value & BYTE_3_MASK) >> BYTE_3_SHIFT); + return result; + } + + /** + * Helper method to get the value as a Java long from four bytes starting at given array offset + * @param bytes the array of bytes + * @param offset the offset to start + * @return the corresponding Java long value + */ + public static long getValue(byte[] bytes, int offset) { + long value = (bytes[offset + BYTE_3] << BYTE_3_SHIFT) & BYTE_3_MASK; + value += (bytes[offset + BYTE_2] << BYTE_2_SHIFT) & BYTE_2_MASK; + value += (bytes[offset + BYTE_1] << BYTE_1_SHIFT) & BYTE_1_MASK; + value += (bytes[offset] & BYTE_MASK); + return value; + } + + /** + * Helper method to get the value as a Java long from a four-byte array + * @param bytes the array of bytes + * @return the corresponding Java long value + */ + public static long getValue(byte[] bytes) { + return getValue(bytes, 0); + } + + /** + * Override to make two instances with same value equal. + * @param o an object to compare + * @return true if the objects are equal + */ + @Override + public boolean equals(Object o) { + if (o == null || !(o instanceof ZipLong)) { + return false; + } + return value == ((ZipLong) o).getValue(); + } + + /** + * Override to make two instances with same value equal. + * @return the value stored in the ZipLong + */ + @Override + public int hashCode() { + return (int) value; + } + + @Override + public Object clone() { + try { + return super.clone(); + } catch (CloneNotSupportedException cnfe) { + // impossible + throw new RuntimeException(cnfe); + } + } + + @Override + public String toString() { + return "ZipLong value: " + value; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipMethod.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipMethod.java new file mode 100644 index 000000000..4dafafb31 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipMethod.java @@ -0,0 +1,207 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Map; +import java.util.zip.ZipEntry; + +/** + * List of known compression methods + * + * Many of these methods are currently not supported by commons compress + * + * @since 1.5 + */ +public enum ZipMethod { + + /** + * Compression method 0 for uncompressed entries. + * + * @see ZipEntry#STORED + */ + STORED(ZipEntry.STORED), + + /** + * UnShrinking. + * dynamic Lempel-Ziv-Welch-Algorithm + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + UNSHRINKING(1), + + /** + * Reduced with compression factor 1. + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + EXPANDING_LEVEL_1(2), + + /** + * Reduced with compression factor 2. + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + EXPANDING_LEVEL_2(3), + + /** + * Reduced with compression factor 3. + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + EXPANDING_LEVEL_3(4), + + /** + * Reduced with compression factor 4. + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + EXPANDING_LEVEL_4(5), + + /** + * Imploding. + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + IMPLODING(6), + + /** + * Tokenization. + * + * @see Explanation of fields: compression + * method: (2 bytes) + */ + TOKENIZATION(7), + + /** + * Compression method 8 for compressed (deflated) entries. + * + * @see ZipEntry#DEFLATED + */ + DEFLATED(ZipEntry.DEFLATED), + + /** + * Compression Method 9 for enhanced deflate. + * + * @see http://www.winzip.com/wz54.htm + */ + ENHANCED_DEFLATED(9), + + /** + * PKWARE Data Compression Library Imploding. + * + * @see http://www.winzip.com/wz54.htm + */ + PKWARE_IMPLODING(10), + + /** + * Compression Method 12 for bzip2. + * + * @see http://www.winzip.com/wz54.htm + */ + BZIP2(12), + + /** + * Compression Method 14 for LZMA. + * + * @see http://www.7-zip.org/sdk.html + * @see http://www.winzip.com/wz54.htm + */ + LZMA(14), + + + /** + * Compression Method 96 for Jpeg compression. + * + * @see http://www.winzip.com/wz54.htm + */ + JPEG(96), + + /** + * Compression Method 97 for WavPack. + * + * @see http://www.winzip.com/wz54.htm + */ + WAVPACK(97), + + /** + * Compression Method 98 for PPMd. + * + * @see http://www.winzip.com/wz54.htm + */ + PPMD(98), + + + /** + * Compression Method 99 for AES encryption. + * + * @see http://www.winzip.com/wz54.htm + */ + AES_ENCRYPTED(99), + + /** + * Unknown compression method. + */ + UNKNOWN(-1); + + private final int code; + + private static final Map codeToEnum; + + static { + Map cte = new HashMap(); + for (ZipMethod method : values()) { + cte.put(Integer.valueOf(method.getCode()), method); + } + codeToEnum = Collections.unmodifiableMap(cte); + } + + /** + * private constructor for enum style class. + */ + ZipMethod(int code) { + this.code = code; + } + + /** + * the code of the compression method. + * + * @see ZipArchiveEntry#getMethod() + * + * @return an integer code for the method + */ + public int getCode() { + return code; + } + + + /** + * returns the {@link ZipMethod} for the given code or null if the + * method is not known. + */ + public static ZipMethod getMethodByCode(int code) { + return codeToEnum.get(Integer.valueOf(code)); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipShort.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipShort.java new file mode 100644 index 000000000..b74db86ab --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipShort.java @@ -0,0 +1,150 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.Serializable; + +import static org.apache.commons.compress.archivers.zip.ZipConstants.BYTE_MASK; + +/** + * Utility class that represents a two byte integer with conversion + * rules for the big endian byte order of ZIP files. + * @Immutable + */ +public final class ZipShort implements Cloneable, Serializable { + private static final long serialVersionUID = 1L; + + private static final int BYTE_1_MASK = 0xFF00; + private static final int BYTE_1_SHIFT = 8; + + private final int value; + + /** + * Create instance from a number. + * @param value the int to store as a ZipShort + */ + public ZipShort (int value) { + this.value = value; + } + + /** + * Create instance from bytes. + * @param bytes the bytes to store as a ZipShort + */ + public ZipShort (byte[] bytes) { + this(bytes, 0); + } + + /** + * Create instance from the two bytes starting at offset. + * @param bytes the bytes to store as a ZipShort + * @param offset the offset to start + */ + public ZipShort (byte[] bytes, int offset) { + value = ZipShort.getValue(bytes, offset); + } + + /** + * Get value as two bytes in big endian byte order. + * @return the value as a a two byte array in big endian byte order + */ + public byte[] getBytes() { + byte[] result = new byte[2]; + result[0] = (byte) (value & BYTE_MASK); + result[1] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); + return result; + } + + /** + * Get value as Java int. + * @return value as a Java int + */ + public int getValue() { + return value; + } + + /** + * Get value as two bytes in big endian byte order. + * @param value the Java int to convert to bytes + * @return the converted int as a byte array in big endian byte order + */ + public static byte[] getBytes(int value) { + byte[] result = new byte[2]; + result[0] = (byte) (value & BYTE_MASK); + result[1] = (byte) ((value & BYTE_1_MASK) >> BYTE_1_SHIFT); + return result; + } + + /** + * Helper method to get the value as a java int from two bytes starting at given array offset + * @param bytes the array of bytes + * @param offset the offset to start + * @return the corresponding java int value + */ + public static int getValue(byte[] bytes, int offset) { + int value = (bytes[offset + 1] << BYTE_1_SHIFT) & BYTE_1_MASK; + value += (bytes[offset] & BYTE_MASK); + return value; + } + + /** + * Helper method to get the value as a java int from a two-byte array + * @param bytes the array of bytes + * @return the corresponding java int value + */ + public static int getValue(byte[] bytes) { + return getValue(bytes, 0); + } + + /** + * Override to make two instances with same value equal. + * @param o an object to compare + * @return true if the objects are equal + */ + @Override + public boolean equals(Object o) { + if (o == null || !(o instanceof ZipShort)) { + return false; + } + return value == ((ZipShort) o).getValue(); + } + + /** + * Override to make two instances with same value equal. + * @return the value stored in the ZipShort + */ + @Override + public int hashCode() { + return value; + } + + @Override + public Object clone() { + try { + return super.clone(); + } catch (CloneNotSupportedException cnfe) { + // impossible + throw new RuntimeException(cnfe); + } + } + + @Override + public String toString() { + return "ZipShort value: " + value; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipUtil.java b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipUtil.java new file mode 100644 index 000000000..a138f6e40 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/ZipUtil.java @@ -0,0 +1,332 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.archivers.zip; + +import java.io.IOException; +import java.math.BigInteger; +import java.util.Calendar; +import java.util.Date; +import java.util.zip.CRC32; +import java.util.zip.ZipEntry; + +/** + * Utility class for handling DOS and Java time conversions. + * @Immutable + */ +public abstract class ZipUtil { + /** + * Smallest date/time ZIP can handle. + */ + private static final byte[] DOS_TIME_MIN = ZipLong.getBytes(0x00002100L); + + /** + * Convert a Date object to a DOS date/time field. + * @param time the Date to convert + * @return the date as a ZipLong + */ + public static ZipLong toDosTime(Date time) { + return new ZipLong(toDosTime(time.getTime())); + } + + /** + * Convert a Date object to a DOS date/time field. + * + *

    Stolen from InfoZip's fileio.c

    + * @param t number of milliseconds since the epoch + * @return the date as a byte array + */ + public static byte[] toDosTime(long t) { + Calendar c = Calendar.getInstance(); + c.setTimeInMillis(t); + + int year = c.get(Calendar.YEAR); + if (year < 1980) { + return copy(DOS_TIME_MIN); // stop callers from changing the array + } + int month = c.get(Calendar.MONTH) + 1; + long value = ((year - 1980) << 25) + | (month << 21) + | (c.get(Calendar.DAY_OF_MONTH) << 16) + | (c.get(Calendar.HOUR_OF_DAY) << 11) + | (c.get(Calendar.MINUTE) << 5) + | (c.get(Calendar.SECOND) >> 1); + return ZipLong.getBytes(value); + } + + /** + * Assumes a negative integer really is a positive integer that + * has wrapped around and re-creates the original value. + * + * @param i the value to treat as unsigned int. + * @return the unsigned int as a long. + */ + public static long adjustToLong(int i) { + if (i < 0) { + return 2 * ((long) Integer.MAX_VALUE) + 2 + i; + } else { + return i; + } + } + + /** + * Reverses a byte[] array. Reverses in-place (thus provided array is + * mutated), but also returns same for convenience. + * + * @param array to reverse (mutated in-place, but also returned for + * convenience). + * + * @return the reversed array (mutated in-place, but also returned for + * convenience). + * @since 1.5 + */ + public static byte[] reverse(final byte[] array) { + final int z = array.length - 1; // position of last element + for (int i = 0; i < array.length / 2; i++) { + byte x = array[i]; + array[i] = array[z - i]; + array[z - i] = x; + } + return array; + } + + /** + * Converts a BigInteger into a long, and blows up + * (NumberFormatException) if the BigInteger is too big. + * + * @param big BigInteger to convert. + * @return long representation of the BigInteger. + */ + static long bigToLong(BigInteger big) { + if (big.bitLength() <= 63) { // bitLength() doesn't count the sign bit. + return big.longValue(); + } else { + throw new NumberFormatException("The BigInteger cannot fit inside a 64 bit java long: [" + big + "]"); + } + } + + /** + *

    + * Converts a long into a BigInteger. Negative numbers between -1 and + * -2^31 are treated as unsigned 32 bit (e.g., positive) integers. + * Negative numbers below -2^31 cause an IllegalArgumentException + * to be thrown. + *

    + * + * @param l long to convert to BigInteger. + * @return BigInteger representation of the provided long. + */ + static BigInteger longToBig(long l) { + if (l < Integer.MIN_VALUE) { + throw new IllegalArgumentException("Negative longs < -2^31 not permitted: [" + l + "]"); + } else if (l < 0 && l >= Integer.MIN_VALUE) { + // If someone passes in a -2, they probably mean 4294967294 + // (For example, Unix UID/GID's are 32 bit unsigned.) + l = ZipUtil.adjustToLong((int) l); + } + return BigInteger.valueOf(l); + } + + /** + * Converts a signed byte into an unsigned integer representation + * (e.g., -1 becomes 255). + * + * @param b byte to convert to int + * @return int representation of the provided byte + * @since 1.5 + */ + public static int signedByteToUnsignedInt(byte b) { + if (b >= 0) { + return b; + } else { + return 256 + b; + } + } + + /** + * Converts an unsigned integer to a signed byte (e.g., 255 becomes -1). + * + * @param i integer to convert to byte + * @return byte representation of the provided int + * @throws IllegalArgumentException if the provided integer is not inside the range [0,255]. + * @since 1.5 + */ + public static byte unsignedIntToSignedByte(int i) { + if (i > 255 || i < 0) { + throw new IllegalArgumentException("Can only convert non-negative integers between [0,255] to byte: [" + i + "]"); + } + if (i < 128) { + return (byte) i; + } else { + return (byte) (i - 256); + } + } + + /** + * Convert a DOS date/time field to a Date object. + * + * @param zipDosTime contains the stored DOS time. + * @return a Date instance corresponding to the given time. + */ + public static Date fromDosTime(ZipLong zipDosTime) { + long dosTime = zipDosTime.getValue(); + return new Date(dosToJavaTime(dosTime)); + } + + /** + * Converts DOS time to Java time (number of milliseconds since + * epoch). + */ + public static long dosToJavaTime(long dosTime) { + Calendar cal = Calendar.getInstance(); + // CheckStyle:MagicNumberCheck OFF - no point + cal.set(Calendar.YEAR, (int) ((dosTime >> 25) & 0x7f) + 1980); + cal.set(Calendar.MONTH, (int) ((dosTime >> 21) & 0x0f) - 1); + cal.set(Calendar.DATE, (int) (dosTime >> 16) & 0x1f); + cal.set(Calendar.HOUR_OF_DAY, (int) (dosTime >> 11) & 0x1f); + cal.set(Calendar.MINUTE, (int) (dosTime >> 5) & 0x3f); + cal.set(Calendar.SECOND, (int) (dosTime << 1) & 0x3e); + cal.set(Calendar.MILLISECOND, 0); + // CheckStyle:MagicNumberCheck ON + return cal.getTime().getTime(); + } + + /** + * If the entry has Unicode*ExtraFields and the CRCs of the + * names/comments match those of the extra fields, transfer the + * known Unicode values from the extra field. + */ + static void setNameAndCommentFromExtraFields(ZipArchiveEntry ze, + byte[] originalNameBytes, + byte[] commentBytes) { + UnicodePathExtraField name = (UnicodePathExtraField) + ze.getExtraField(UnicodePathExtraField.UPATH_ID); + String originalName = ze.getName(); + String newName = getUnicodeStringIfOriginalMatches(name, + originalNameBytes); + if (newName != null && !originalName.equals(newName)) { + ze.setName(newName); + } + + if (commentBytes != null && commentBytes.length > 0) { + UnicodeCommentExtraField cmt = (UnicodeCommentExtraField) + ze.getExtraField(UnicodeCommentExtraField.UCOM_ID); + String newComment = + getUnicodeStringIfOriginalMatches(cmt, commentBytes); + if (newComment != null) { + ze.setComment(newComment); + } + } + } + + /** + * If the stored CRC matches the one of the given name, return the + * Unicode name of the given field. + * + *

    If the field is null or the CRCs don't match, return null + * instead.

    + */ + private static + String getUnicodeStringIfOriginalMatches(AbstractUnicodeExtraField f, + byte[] orig) { + if (f != null) { + CRC32 crc32 = new CRC32(); + crc32.update(orig); + long origCRC32 = crc32.getValue(); + + if (origCRC32 == f.getNameCRC32()) { + try { + return ZipEncodingHelper + .UTF8_ZIP_ENCODING.decode(f.getUnicodeName()); + } catch (IOException ex) { + // UTF-8 unsupported? should be impossible the + // Unicode*ExtraField must contain some bad bytes + + // TODO log this anywhere? + return null; + } + } + } + return null; + } + + /** + * Create a copy of the given array - or return null if the + * argument is null. + */ + static byte[] copy(byte[] from) { + if (from != null) { + byte[] to = new byte[from.length]; + System.arraycopy(from, 0, to, 0, to.length); + return to; + } + return null; + } + + /** + * Whether this library is able to read or write the given entry. + */ + static boolean canHandleEntryData(ZipArchiveEntry entry) { + return supportsEncryptionOf(entry) && supportsMethodOf(entry); + } + + /** + * Whether this library supports the encryption used by the given + * entry. + * + * @return true if the entry isn't encrypted at all + */ + private static boolean supportsEncryptionOf(ZipArchiveEntry entry) { + return !entry.getGeneralPurposeBit().usesEncryption(); + } + + /** + * Whether this library supports the compression method used by + * the given entry. + * + * @return true if the compression method is STORED or DEFLATED + */ + private static boolean supportsMethodOf(ZipArchiveEntry entry) { + return entry.getMethod() == ZipEntry.STORED + || entry.getMethod() == ZipMethod.UNSHRINKING.getCode() + || entry.getMethod() == ZipMethod.IMPLODING.getCode() + || entry.getMethod() == ZipEntry.DEFLATED; + } + + /** + * Checks whether the entry requires features not (yet) supported + * by the library and throws an exception if it does. + */ + static void checkRequestedFeatures(ZipArchiveEntry ze) + throws UnsupportedZipFeatureException { + if (!supportsEncryptionOf(ze)) { + throw + new UnsupportedZipFeatureException(UnsupportedZipFeatureException + .Feature.ENCRYPTION, ze); + } + if (!supportsMethodOf(ze)) { + ZipMethod m = ZipMethod.getMethodByCode(ze.getMethod()); + if (m == null) { + throw + new UnsupportedZipFeatureException(UnsupportedZipFeatureException + .Feature.METHOD, ze); + } else { + throw new UnsupportedZipFeatureException(m, ze); + } + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/package.html new file mode 100644 index 000000000..521687be6 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/archivers/zip/package.html @@ -0,0 +1,24 @@ + + + +

    Provides stream classes for reading and writing archives using + the ZIP format.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/changes/Change.java b/Tools/Cache Editor/src/org/apache/commons/compress/changes/Change.java new file mode 100644 index 000000000..c1b0a4c50 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/changes/Change.java @@ -0,0 +1,96 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.changes; + +import java.io.InputStream; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * Change holds meta information about a change. + * + * @Immutable + */ +class Change { + private final String targetFile; // entry name to delete + private final ArchiveEntry entry; // new entry to add + private final InputStream input; // source for new entry + private final boolean replaceMode; // change should replaceMode existing entries + + // Type of change + private final int type; + // Possible type values + static final int TYPE_DELETE = 1; + static final int TYPE_ADD = 2; + static final int TYPE_MOVE = 3; // NOT USED + static final int TYPE_DELETE_DIR = 4; + + /** + * Constructor. Takes the filename of the file to be deleted + * from the stream as argument. + * @param pFilename the filename of the file to delete + */ + Change(final String pFilename, int type) { + if(pFilename == null) { + throw new NullPointerException(); + } + this.targetFile = pFilename; + this.type = type; + this.input = null; + this.entry = null; + this.replaceMode = true; + } + + /** + * Construct a change which adds an entry. + * + * @param pEntry the entry details + * @param pInput the InputStream for the entry data + */ + Change(final ArchiveEntry pEntry, final InputStream pInput, boolean replace) { + if(pEntry == null || pInput == null) { + throw new NullPointerException(); + } + this.entry = pEntry; + this.input = pInput; + type = TYPE_ADD; + targetFile = null; + this.replaceMode = replace; + } + + ArchiveEntry getEntry() { + return entry; + } + + InputStream getInput() { + return input; + } + + String targetFile() { + return targetFile; + } + + int type() { + return type; + } + + boolean isReplaceMode() { + return replaceMode; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSet.java b/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSet.java new file mode 100644 index 000000000..31155f627 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSet.java @@ -0,0 +1,163 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.changes; + +import java.io.InputStream; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * ChangeSet collects and performs changes to an archive. + * Putting delete changes in this ChangeSet from multiple threads can + * cause conflicts. + * + * @NotThreadSafe + */ +public final class ChangeSet { + + private final Set changes = new LinkedHashSet(); + + /** + * Deletes the file with the filename from the archive. + * + * @param filename + * the filename of the file to delete + */ + public void delete(final String filename) { + addDeletion(new Change(filename, Change.TYPE_DELETE)); + } + + /** + * Deletes the directory tree from the archive. + * + * @param dirName + * the name of the directory tree to delete + */ + public void deleteDir(final String dirName) { + addDeletion(new Change(dirName, Change.TYPE_DELETE_DIR)); + } + + /** + * Adds a new archive entry to the archive. + * + * @param pEntry + * the entry to add + * @param pInput + * the datastream to add + */ + public void add(final ArchiveEntry pEntry, final InputStream pInput) { + this.add(pEntry, pInput, true); + } + + /** + * Adds a new archive entry to the archive. + * If replace is set to true, this change will replace all other additions + * done in this ChangeSet and all existing entries in the original stream. + * + * @param pEntry + * the entry to add + * @param pInput + * the datastream to add + * @param replace + * indicates the this change should replace existing entries + */ + public void add(final ArchiveEntry pEntry, final InputStream pInput, final boolean replace) { + addAddition(new Change(pEntry, pInput, replace)); + } + + /** + * Adds an addition change. + * + * @param pChange + * the change which should result in an addition + */ + private void addAddition(Change pChange) { + if (Change.TYPE_ADD != pChange.type() || + pChange.getInput() == null) { + return; + } + + if (!changes.isEmpty()) { + for (Iterator it = changes.iterator(); it.hasNext();) { + Change change = it.next(); + if (change.type() == Change.TYPE_ADD + && change.getEntry() != null) { + ArchiveEntry entry = change.getEntry(); + + if(entry.equals(pChange.getEntry())) { + if(pChange.isReplaceMode()) { + it.remove(); + changes.add(pChange); + return; + } else { + // do not add this change + return; + } + } + } + } + } + changes.add(pChange); + } + + /** + * Adds an delete change. + * + * @param pChange + * the change which should result in a deletion + */ + private void addDeletion(Change pChange) { + if ((Change.TYPE_DELETE != pChange.type() && + Change.TYPE_DELETE_DIR != pChange.type()) || + pChange.targetFile() == null) { + return; + } + String source = pChange.targetFile(); + + if (!changes.isEmpty()) { + for (Iterator it = changes.iterator(); it.hasNext();) { + Change change = it.next(); + if (change.type() == Change.TYPE_ADD + && change.getEntry() != null) { + String target = change.getEntry().getName(); + + if (Change.TYPE_DELETE == pChange.type() && source.equals(target)) { + it.remove(); + } else if (Change.TYPE_DELETE_DIR == pChange.type() && + target.matches(source + "/.*")) { + it.remove(); + } + } + } + } + changes.add(pChange); + } + + /** + * Returns the list of changes as a copy. Changes on this set + * are not reflected on this ChangeSet and vice versa. + * @return the changes as a copy + */ + Set getChanges() { + return new LinkedHashSet(changes); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSetPerformer.java b/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSetPerformer.java new file mode 100644 index 000000000..8ed861ce8 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSetPerformer.java @@ -0,0 +1,277 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.changes; + +import java.io.IOException; +import java.io.InputStream; +import java.util.Enumeration; +import java.util.Iterator; +import java.util.LinkedHashSet; +import java.util.Set; + +import org.apache.commons.compress.archivers.ArchiveEntry; +import org.apache.commons.compress.archivers.ArchiveInputStream; +import org.apache.commons.compress.archivers.ArchiveOutputStream; +import org.apache.commons.compress.archivers.zip.ZipArchiveEntry; +import org.apache.commons.compress.archivers.zip.ZipFile; +import org.apache.commons.compress.utils.IOUtils; + +/** + * Performs ChangeSet operations on a stream. + * This class is thread safe and can be used multiple times. + * It operates on a copy of the ChangeSet. If the ChangeSet changes, + * a new Performer must be created. + * + * @ThreadSafe + * @Immutable + */ +public class ChangeSetPerformer { + private final Set changes; + + /** + * Constructs a ChangeSetPerformer with the changes from this ChangeSet + * @param changeSet the ChangeSet which operations are used for performing + */ + public ChangeSetPerformer(final ChangeSet changeSet) { + changes = changeSet.getChanges(); + } + + /** + * Performs all changes collected in this ChangeSet on the input stream and + * streams the result to the output stream. Perform may be called more than once. + * + * This method finishes the stream, no other entries should be added + * after that. + * + * @param in + * the InputStream to perform the changes on + * @param out + * the resulting OutputStream with all modifications + * @throws IOException + * if an read/write error occurs + * @return the results of this operation + */ + public ChangeSetResults perform(ArchiveInputStream in, ArchiveOutputStream out) + throws IOException { + return perform(new ArchiveInputStreamIterator(in), out); + } + + /** + * Performs all changes collected in this ChangeSet on the ZipFile and + * streams the result to the output stream. Perform may be called more than once. + * + * This method finishes the stream, no other entries should be added + * after that. + * + * @param in + * the ZipFile to perform the changes on + * @param out + * the resulting OutputStream with all modifications + * @throws IOException + * if an read/write error occurs + * @return the results of this operation + * @since 1.5 + */ + public ChangeSetResults perform(ZipFile in, ArchiveOutputStream out) + throws IOException { + return perform(new ZipFileIterator(in), out); + } + + /** + * Performs all changes collected in this ChangeSet on the input entries and + * streams the result to the output stream. + * + * This method finishes the stream, no other entries should be added + * after that. + * + * @param entryIterator + * the entries to perform the changes on + * @param out + * the resulting OutputStream with all modifications + * @throws IOException + * if an read/write error occurs + * @return the results of this operation + */ + private ChangeSetResults perform(ArchiveEntryIterator entryIterator, + ArchiveOutputStream out) + throws IOException { + ChangeSetResults results = new ChangeSetResults(); + + Set workingSet = new LinkedHashSet(changes); + + for (Iterator it = workingSet.iterator(); it.hasNext();) { + Change change = it.next(); + + if (change.type() == Change.TYPE_ADD && change.isReplaceMode()) { + copyStream(change.getInput(), out, change.getEntry()); + it.remove(); + results.addedFromChangeSet(change.getEntry().getName()); + } + } + + while (entryIterator.hasNext()) { + ArchiveEntry entry = entryIterator.next(); + boolean copy = true; + + for (Iterator it = workingSet.iterator(); it.hasNext();) { + Change change = it.next(); + + final int type = change.type(); + final String name = entry.getName(); + if (type == Change.TYPE_DELETE && name != null) { + if (name.equals(change.targetFile())) { + copy = false; + it.remove(); + results.deleted(name); + break; + } + } else if (type == Change.TYPE_DELETE_DIR && name != null) { + // don't combine ifs to make future extensions more easy + if (name.startsWith(change.targetFile() + "/")) { // NOPMD + copy = false; + results.deleted(name); + break; + } + } + } + + if (copy + && !isDeletedLater(workingSet, entry) + && !results.hasBeenAdded(entry.getName())) { + copyStream(entryIterator.getInputStream(), out, entry); + results.addedFromStream(entry.getName()); + } + } + + // Adds files which hasn't been added from the original and do not have replace mode on + for (Iterator it = workingSet.iterator(); it.hasNext();) { + Change change = it.next(); + + if (change.type() == Change.TYPE_ADD && + !change.isReplaceMode() && + !results.hasBeenAdded(change.getEntry().getName())) { + copyStream(change.getInput(), out, change.getEntry()); + it.remove(); + results.addedFromChangeSet(change.getEntry().getName()); + } + } + out.finish(); + return results; + } + + /** + * Checks if an ArchiveEntry is deleted later in the ChangeSet. This is + * necessary if an file is added with this ChangeSet, but later became + * deleted in the same set. + * + * @param entry + * the entry to check + * @return true, if this entry has an deletion change later, false otherwise + */ + private boolean isDeletedLater(Set workingSet, ArchiveEntry entry) { + String source = entry.getName(); + + if (!workingSet.isEmpty()) { + for (Change change : workingSet) { + final int type = change.type(); + String target = change.targetFile(); + if (type == Change.TYPE_DELETE && source.equals(target)) { + return true; + } + + if (type == Change.TYPE_DELETE_DIR && source.startsWith(target + "/")){ + return true; + } + } + } + return false; + } + + /** + * Copies the ArchiveEntry to the Output stream + * + * @param in + * the stream to read the data from + * @param out + * the stream to write the data to + * @param entry + * the entry to write + * @throws IOException + * if data cannot be read or written + */ + private void copyStream(InputStream in, ArchiveOutputStream out, + ArchiveEntry entry) throws IOException { + out.putArchiveEntry(entry); + IOUtils.copy(in, out); + out.closeArchiveEntry(); + } + + /** + * Used in perform to abstract out getting entries and streams for + * those entries. + * + *

    Iterator#hasNext is not allowed to throw exceptions that's + * why we can't use Iterator<ArchiveEntry> directly - + * otherwise we'd need to convert exceptions thrown in + * ArchiveInputStream#getNextEntry.

    + */ + interface ArchiveEntryIterator { + boolean hasNext() throws IOException; + ArchiveEntry next(); + InputStream getInputStream() throws IOException; + } + + private static class ArchiveInputStreamIterator + implements ArchiveEntryIterator { + private final ArchiveInputStream in; + private ArchiveEntry next; + ArchiveInputStreamIterator(ArchiveInputStream in) { + this.in = in; + } + public boolean hasNext() throws IOException { + return (next = in.getNextEntry()) != null; + } + public ArchiveEntry next() { + return next; + } + public InputStream getInputStream() { + return in; + } + } + + private static class ZipFileIterator + implements ArchiveEntryIterator { + private final ZipFile in; + private final Enumeration nestedEnum; + private ZipArchiveEntry current; + ZipFileIterator(ZipFile in) { + this.in = in; + nestedEnum = in.getEntriesInPhysicalOrder(); + } + public boolean hasNext() { + return nestedEnum.hasMoreElements(); + } + public ArchiveEntry next() { + return current = nestedEnum.nextElement(); + } + public InputStream getInputStream() throws IOException { + return in.getInputStream(current); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSetResults.java b/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSetResults.java new file mode 100644 index 000000000..d4f4cc20a --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/changes/ChangeSetResults.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.changes; + +import java.util.ArrayList; +import java.util.List; + +/** + * Stores the results of an performed ChangeSet operation. + */ +public class ChangeSetResults { + private final List addedFromChangeSet = new ArrayList(); + private final List addedFromStream = new ArrayList(); + private final List deleted = new ArrayList(); + + /** + * Adds the filename of a recently deleted file to the result list. + * @param fileName the file which has been deleted + */ + void deleted(String fileName) { + deleted.add(fileName); + } + + /** + * Adds the name of a file to the result list which has been + * copied from the source stream to the target stream. + * @param fileName the file name which has been added from the original stream + */ + void addedFromStream(String fileName) { + addedFromStream.add(fileName); + } + + /** + * Adds the name of a file to the result list which has been + * copied from the changeset to the target stream + * @param fileName the name of the file + */ + void addedFromChangeSet(String fileName) { + addedFromChangeSet.add(fileName); + } + + /** + * Returns a list of filenames which has been added from the changeset + * @return the list of filenames + */ + public List getAddedFromChangeSet() { + return addedFromChangeSet; + } + + /** + * Returns a list of filenames which has been added from the original stream + * @return the list of filenames + */ + public List getAddedFromStream() { + return addedFromStream; + } + + /** + * Returns a list of filenames which has been deleted + * @return the list of filenames + */ + public List getDeleted() { + return deleted; + } + + /** + * Checks if an filename already has been added to the result list + * @param filename the filename to check + * @return true, if this filename already has been added + */ + boolean hasBeenAdded(String filename) { + if(addedFromChangeSet.contains(filename) || addedFromStream.contains(filename)) { + return true; + } + return false; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/changes/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/changes/package.html new file mode 100644 index 000000000..4ba3e87d0 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/changes/package.html @@ -0,0 +1,27 @@ + + + +

    EXPERIMENTAL support for changesets that are applied to + archives.

    + +

    This API is considered unstable and may be modified or even + removed in future releases.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorException.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorException.java new file mode 100644 index 000000000..aea05b32b --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorException.java @@ -0,0 +1,51 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +/** + * Compressor related exception + */ +public class CompressorException extends Exception { + + /** Serial */ + private static final long serialVersionUID = -2932901310255908814L; + + /** + * Constructs a new exception with the specified detail message. The cause + * is not initialized. + * + * @param message + * the detail message + */ + public CompressorException(String message) { + super(message); + } + + /** + * Constructs a new exception with the specified detail message and cause. + * + * @param message + * the detail message + * @param cause + * the cause + */ + public CompressorException(String message, Throwable cause) { + super(message, cause); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorInputStream.java new file mode 100644 index 000000000..52b161ba5 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorInputStream.java @@ -0,0 +1,80 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +import java.io.InputStream; + +public abstract class CompressorInputStream extends InputStream { + private long bytesRead = 0; + + /** + * Increments the counter of already read bytes. + * Doesn't increment if the EOF has been hit (read == -1) + * + * @param read the number of bytes read + * + * @since 1.1 + */ + protected void count(int read) { + count((long) read); + } + + /** + * Increments the counter of already read bytes. + * Doesn't increment if the EOF has been hit (read == -1) + * + * @param read the number of bytes read + */ + protected void count(long read) { + if (read != -1) { + bytesRead = bytesRead + read; + } + } + + /** + * Decrements the counter of already read bytes. + * + * @param pushedBack the number of bytes pushed back. + * @since 1.7 + */ + protected void pushedBackBytes(long pushedBack) { + bytesRead -= pushedBack; + } + + /** + * Returns the current number of bytes read from this stream. + * @return the number of read bytes + * @deprecated this method may yield wrong results for large + * archives, use #getBytesRead instead + */ + @Deprecated + public int getCount() { + return (int) bytesRead; + } + + /** + * Returns the current number of bytes read from this stream. + * @return the number of read bytes + * + * @since 1.1 + */ + public long getBytesRead() { + return bytesRead; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorOutputStream.java new file mode 100644 index 000000000..51eee9cee --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorOutputStream.java @@ -0,0 +1,25 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +import java.io.OutputStream; + +public abstract class CompressorOutputStream extends OutputStream { + // TODO +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorStreamFactory.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorStreamFactory.java new file mode 100644 index 000000000..d2ae16d13 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/CompressorStreamFactory.java @@ -0,0 +1,268 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorInputStream; +import org.apache.commons.compress.compressors.gzip.GzipCompressorOutputStream; +import org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream; +import org.apache.commons.compress.compressors.pack200.Pack200CompressorOutputStream; +import org.apache.commons.compress.compressors.snappy.FramedSnappyCompressorInputStream; +import org.apache.commons.compress.compressors.snappy.SnappyCompressorInputStream; +import org.apache.commons.compress.compressors.z.ZCompressorInputStream; +import org.apache.commons.compress.utils.IOUtils; + +/** + *

    Factory to create Compressor[In|Out]putStreams from names. To add other + * implementations you should extend CompressorStreamFactory and override the + * appropriate methods (and call their implementation from super of course).

    + * + * Example (Compressing a file): + * + *
    + * final OutputStream out = new FileOutputStream(output); 
    + * CompressorOutputStream cos = 
    + *      new CompressorStreamFactory().createCompressorOutputStream(CompressorStreamFactory.BZIP2, out);
    + * IOUtils.copy(new FileInputStream(input), cos);
    + * cos.close();
    + * 
    + * + * Example (Decompressing a file): + *
    + * final InputStream is = new FileInputStream(input); 
    + * CompressorInputStream in = 
    + *      new CompressorStreamFactory().createCompressorInputStream(CompressorStreamFactory.BZIP2, is);
    + * IOUtils.copy(in, new FileOutputStream(output));
    + * in.close();
    + * 
    + * + * @Immutable + */ +public class CompressorStreamFactory { + + /** + * Constant used to identify the BZIP2 compression algorithm. + * @since 1.1 + */ + public static final String BZIP2 = "bzip2"; + + /** + * Constant used to identify the GZIP compression algorithm. + * @since 1.1 + */ + public static final String GZIP = "gz"; + /** + * Constant used to identify the PACK200 compression algorithm. + * @since 1.3 + */ + public static final String PACK200 = "pack200"; + + /** + * Constant used to identify the XZ compression method. + * @since 1.4 + */ + public static final String XZ = "xz"; + + /** + * Constant used to identify the LZMA compression method. + * @since 1.6 + */ + public static final String LZMA = "lzma"; + + /** + * Constant used to identify the "framed" Snappy compression method. + * @since 1.7 + */ + public static final String SNAPPY_FRAMED = "snappy-framed"; + + /** + * Constant used to identify the "raw" Snappy compression method. + * @since 1.7 + */ + public static final String SNAPPY_RAW = "snappy-raw"; + + /** + * Constant used to identify the traditional Unix compress method. + * @since 1.7 + */ + public static final String Z = "z"; + + private boolean decompressConcatenated = false; + + /** + * Whether to decompress the full input or only the first stream + * in formats supporting multiple concatenated input streams. + * + *

    This setting applies to the gzip, bzip2 and xz formats only.

    + * + * @param decompressConcatenated + * if true, decompress until the end of the + * input; if false, stop after the first + * stream and leave the input position to point + * to the next byte after the stream + * @since 1.5 + */ + public void setDecompressConcatenated(boolean decompressConcatenated) { + this.decompressConcatenated = decompressConcatenated; + } + + /** + * Create an compressor input stream from an input stream, autodetecting + * the compressor type from the first few bytes of the stream. The InputStream + * must support marks, like BufferedInputStream. + * + * @param in the input stream + * @return the compressor input stream + * @throws CompressorException if the compressor name is not known + * @throws IllegalArgumentException if the stream is null or does not support mark + * @since 1.1 + */ + public CompressorInputStream createCompressorInputStream(final InputStream in) + throws CompressorException { + if (in == null) { + throw new IllegalArgumentException("Stream must not be null."); + } + + if (!in.markSupported()) { + throw new IllegalArgumentException("Mark is not supported."); + } + + final byte[] signature = new byte[12]; + in.mark(signature.length); + try { + int signatureLength = IOUtils.readFully(in, signature); + in.reset(); + + if (BZip2CompressorInputStream.matches(signature, signatureLength)) { + return new BZip2CompressorInputStream(in, decompressConcatenated); + } + + if (GzipCompressorInputStream.matches(signature, signatureLength)) { + return new GzipCompressorInputStream(in, decompressConcatenated); + } + + + if (Pack200CompressorInputStream.matches(signature, signatureLength)) { + return new Pack200CompressorInputStream(in); + } + + if (FramedSnappyCompressorInputStream.matches(signature, signatureLength)) { + return new FramedSnappyCompressorInputStream(in); + } + + } catch (IOException e) { + throw new CompressorException("Failed to detect Compressor from InputStream.", e); + } + + throw new CompressorException("No Compressor found for the stream signature."); + } + + /** + * Create a compressor input stream from a compressor name and an input stream. + * + * @param name of the compressor, i.e. "gz", "bzip2", "xz", + * "lzma", "snappy-raw", "snappy-framed", "pack200", "z" + * @param in the input stream + * @return compressor input stream + * @throws CompressorException if the compressor name is not known + * @throws IllegalArgumentException if the name or input stream is null + */ + public CompressorInputStream createCompressorInputStream(final String name, + final InputStream in) throws CompressorException { + if (name == null || in == null) { + throw new IllegalArgumentException( + "Compressor name and stream must not be null."); + } + + try { + + if (GZIP.equalsIgnoreCase(name)) { + return new GzipCompressorInputStream(in); + } + + if (BZIP2.equalsIgnoreCase(name)) { + return new BZip2CompressorInputStream(in); + } + + if (PACK200.equalsIgnoreCase(name)) { + return new Pack200CompressorInputStream(in); + } + + if (SNAPPY_RAW.equalsIgnoreCase(name)) { + return new SnappyCompressorInputStream(in); + } + + if (SNAPPY_FRAMED.equalsIgnoreCase(name)) { + return new FramedSnappyCompressorInputStream(in); + } + + if (Z.equalsIgnoreCase(name)) { + return new ZCompressorInputStream(in); + } + + } catch (IOException e) { + throw new CompressorException( + "Could not create CompressorInputStream.", e); + } + throw new CompressorException("Compressor: " + name + " not found."); + } + + /** + * Create an compressor output stream from an compressor name and an input stream. + * + * @param name the compressor name, i.e. "gz", "bzip2", "xz", or "pack200" + * @param out the output stream + * @return the compressor output stream + * @throws CompressorException if the archiver name is not known + * @throws IllegalArgumentException if the archiver name or stream is null + */ + public CompressorOutputStream createCompressorOutputStream( + final String name, final OutputStream out) + throws CompressorException { + if (name == null || out == null) { + throw new IllegalArgumentException( + "Compressor name and stream must not be null."); + } + + try { + + if (GZIP.equalsIgnoreCase(name)) { + return new GzipCompressorOutputStream(out); + } + + if (BZIP2.equalsIgnoreCase(name)) { + return new BZip2CompressorOutputStream(out); + } + + if (PACK200.equalsIgnoreCase(name)) { + return new Pack200CompressorOutputStream(out); + } + + } catch (IOException e) { + throw new CompressorException( + "Could not create CompressorOutputStream", e); + } + throw new CompressorException("Compressor: " + name + " not found."); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/FileNameUtil.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/FileNameUtil.java new file mode 100644 index 000000000..6accafd4a --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/FileNameUtil.java @@ -0,0 +1,196 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors; + +import java.util.Collections; +import java.util.HashMap; +import java.util.Locale; +import java.util.Map; + +/** + * File name mapping code for the compression formats. + * @ThreadSafe + * @since 1.4 + */ +public class FileNameUtil { + + /** + * Map from common filename suffixes to the suffixes that identify compressed + * versions of those file types. For example: from ".tar" to ".tgz". + */ + private final Map compressSuffix = + new HashMap(); + + /** + * Map from common filename suffixes of compressed files to the + * corresponding suffixes of uncompressed files. For example: from + * ".tgz" to ".tar". + *

    + * This map also contains format-specific suffixes like ".gz" and "-z". + * These suffixes are mapped to the empty string, as they should simply + * be removed from the filename when the file is uncompressed. + */ + private final Map uncompressSuffix; + + /** + * Length of the longest compressed suffix. + */ + private final int longestCompressedSuffix; + + /** + * Length of the shortest compressed suffix. + */ + private final int shortestCompressedSuffix; + + /** + * Length of the longest uncompressed suffix. + */ + private final int longestUncompressedSuffix; + + /** + * Length of the shortest uncompressed suffix longer than the + * empty string. + */ + private final int shortestUncompressedSuffix; + + /** + * The format's default extension. + */ + private final String defaultExtension; + + /** + * sets up the utility with a map of known compressed to + * uncompressed suffix mappings and the default extension of the + * format. + * + * @param uncompressSuffix Map from common filename suffixes of + * compressed files to the corresponding suffixes of uncompressed + * files. For example: from ".tgz" to ".tar". This map also + * contains format-specific suffixes like ".gz" and "-z". These + * suffixes are mapped to the empty string, as they should simply + * be removed from the filename when the file is uncompressed. + * + * @param defaultExtension the format's default extension like ".gz" + */ + public FileNameUtil(Map uncompressSuffix, + String defaultExtension) { + this.uncompressSuffix = Collections.unmodifiableMap(uncompressSuffix); + int lc = Integer.MIN_VALUE, sc = Integer.MAX_VALUE; + int lu = Integer.MIN_VALUE, su = Integer.MAX_VALUE; + for (Map.Entry ent : uncompressSuffix.entrySet()) { + int cl = ent.getKey().length(); + if (cl > lc) { + lc = cl; + } + if (cl < sc) { + sc = cl; + } + + String u = ent.getValue(); + int ul = u.length(); + if (ul > 0) { + if (!compressSuffix.containsKey(u)) { + compressSuffix.put(u, ent.getKey()); + } + if (ul > lu) { + lu = ul; + } + if (ul < su) { + su = ul; + } + } + } + longestCompressedSuffix = lc; + longestUncompressedSuffix = lu; + shortestCompressedSuffix = sc; + shortestUncompressedSuffix = su; + this.defaultExtension = defaultExtension; + } + + /** + * Detects common format suffixes in the given filename. + * + * @param filename name of a file + * @return {@code true} if the filename has a common format suffix, + * {@code false} otherwise + */ + public boolean isCompressedFilename(String filename) { + final String lower = filename.toLowerCase(Locale.ENGLISH); + final int n = lower.length(); + for (int i = shortestCompressedSuffix; + i <= longestCompressedSuffix && i < n; i++) { + if (uncompressSuffix.containsKey(lower.substring(n - i))) { + return true; + } + } + return false; + } + + /** + * Maps the given name of a compressed file to the name that the + * file should have after uncompression. Commonly used file type specific + * suffixes like ".tgz" or ".svgz" are automatically detected and + * correctly mapped. For example the name "package.tgz" is mapped to + * "package.tar". And any filenames with the generic ".gz" suffix + * (or any other generic gzip suffix) is mapped to a name without that + * suffix. If no format suffix is detected, then the filename is returned + * unmapped. + * + * @param filename name of a file + * @return name of the corresponding uncompressed file + */ + public String getUncompressedFilename(String filename) { + final String lower = filename.toLowerCase(Locale.ENGLISH); + final int n = lower.length(); + for (int i = shortestCompressedSuffix; + i <= longestCompressedSuffix && i < n; i++) { + String suffix = uncompressSuffix.get(lower.substring(n - i)); + if (suffix != null) { + return filename.substring(0, n - i) + suffix; + } + } + return filename; + } + + /** + * Maps the given filename to the name that the file should have after + * compression. Common file types with custom suffixes for + * compressed versions are automatically detected and correctly mapped. + * For example the name "package.tar" is mapped to "package.tgz". If no + * custom mapping is applicable, then the default ".gz" suffix is appended + * to the filename. + * + * @param filename name of a file + * @return name of the corresponding compressed file + */ + public String getCompressedFilename(String filename) { + final String lower = filename.toLowerCase(Locale.ENGLISH); + final int n = lower.length(); + for (int i = shortestUncompressedSuffix; + i <= longestUncompressedSuffix && i < n; i++) { + String suffix = compressSuffix.get(lower.substring(n - i)); + if (suffix != null) { + return filename.substring(0, n - i) + suffix; + } + } + // No custom suffix found, just append the default + return filename + defaultExtension; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java new file mode 100644 index 000000000..1785d9405 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2CompressorInputStream.java @@ -0,0 +1,1046 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +/* + * This package is based on the work done by Keiron Liddle, Aftex Software + * to whom the Ant project is very grateful for his + * great code. + */ +package org.apache.commons.compress.compressors.bzip2; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.compressors.CompressorInputStream; + +/** + * An input stream that decompresses from the BZip2 format to be read as any other stream. + * + * @NotThreadSafe + */ +public class BZip2CompressorInputStream extends CompressorInputStream implements + BZip2Constants { + + /** + * Index of the last char in the block, so the block size == last + 1. + */ + private int last; + + /** + * Index in zptr[] of original string after sorting. + */ + private int origPtr; + + /** + * always: in the range 0 .. 9. The current block size is 100000 * this + * number. + */ + private int blockSize100k; + + private boolean blockRandomised; + + private int bsBuff; + private int bsLive; + private final CRC crc = new CRC(); + + private int nInUse; + + private InputStream in; + private final boolean decompressConcatenated; + + private int currentChar = -1; + + private static final int EOF = 0; + private static final int START_BLOCK_STATE = 1; + private static final int RAND_PART_A_STATE = 2; + private static final int RAND_PART_B_STATE = 3; + private static final int RAND_PART_C_STATE = 4; + private static final int NO_RAND_PART_A_STATE = 5; + private static final int NO_RAND_PART_B_STATE = 6; + private static final int NO_RAND_PART_C_STATE = 7; + + private int currentState = START_BLOCK_STATE; + + private int storedBlockCRC, storedCombinedCRC; + private int computedBlockCRC, computedCombinedCRC; + + // Variables used by setup* methods exclusively + + private int su_count; + private int su_ch2; + private int su_chPrev; + private int su_i2; + private int su_j2; + private int su_rNToGo; + private int su_rTPos; + private int su_tPos; + private char su_z; + + /** + * All memory intensive stuff. This field is initialized by initBlock(). + */ + private BZip2CompressorInputStream.Data data; + + /** + * Constructs a new BZip2CompressorInputStream which decompresses bytes + * read from the specified stream. This doesn't suppprt decompressing + * concatenated .bz2 files. + * + * @throws IOException + * if the stream content is malformed or an I/O error occurs. + * @throws NullPointerException + * if in == null + */ + public BZip2CompressorInputStream(final InputStream in) throws IOException { + this(in, false); + } + + /** + * Constructs a new BZip2CompressorInputStream which decompresses bytes + * read from the specified stream. + * + * @param in the InputStream from which this object should be created + * @param decompressConcatenated + * if true, decompress until the end of the input; + * if false, stop after the first .bz2 stream and + * leave the input position to point to the next + * byte after the .bz2 stream + * + * @throws IOException + * if the stream content is malformed or an I/O error occurs. + * @throws NullPointerException + * if in == null + */ + public BZip2CompressorInputStream(final InputStream in, final boolean decompressConcatenated) throws IOException { + this.in = in; + this.decompressConcatenated = decompressConcatenated; + + init(true); + initBlock(); + setupBlock(); + } + + @Override + public int read() throws IOException { + if (this.in != null) { + int r = read0(); + count(r < 0 ? -1 : 1); + return r; + } else { + throw new IOException("stream closed"); + } + } + + /* + * (non-Javadoc) + * + * @see java.io.InputStream#read(byte[], int, int) + */ + @Override + public int read(final byte[] dest, final int offs, final int len) + throws IOException { + if (offs < 0) { + throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); + } + if (len < 0) { + throw new IndexOutOfBoundsException("len(" + len + ") < 0."); + } + if (offs + len > dest.length) { + throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + + len + ") > dest.length(" + dest.length + ")."); + } + if (this.in == null) { + throw new IOException("stream closed"); + } + + final int hi = offs + len; + int destOffs = offs; + for (int b; (destOffs < hi) && ((b = read0()) >= 0);) { + dest[destOffs++] = (byte) b; + } + + int c = (destOffs == offs) ? -1 : (destOffs - offs); + count(c); + return c; + } + + private void makeMaps() { + final boolean[] inUse = this.data.inUse; + final byte[] seqToUnseq = this.data.seqToUnseq; + + int nInUseShadow = 0; + + for (int i = 0; i < 256; i++) { + if (inUse[i]) { + seqToUnseq[nInUseShadow++] = (byte) i; + } + } + + this.nInUse = nInUseShadow; + } + + private int read0() throws IOException { + final int retChar = this.currentChar; + + switch (this.currentState) { + case EOF: + return -1; + + case START_BLOCK_STATE: + throw new IllegalStateException(); + + case RAND_PART_A_STATE: + throw new IllegalStateException(); + + case RAND_PART_B_STATE: + setupRandPartB(); + break; + + case RAND_PART_C_STATE: + setupRandPartC(); + break; + + case NO_RAND_PART_A_STATE: + throw new IllegalStateException(); + + case NO_RAND_PART_B_STATE: + setupNoRandPartB(); + break; + + case NO_RAND_PART_C_STATE: + setupNoRandPartC(); + break; + + default: + throw new IllegalStateException(); + } + + return retChar; + } + + private boolean init(boolean isFirstStream) throws IOException { + if (null == in) { + throw new IOException("No InputStream"); + } + + int magic0 = this.in.read(); + if (magic0 == -1 && !isFirstStream) { + return false; + } + int magic1 = this.in.read(); + int magic2 = this.in.read(); + + if (magic0 != 'B' || magic1 != 'Z' || magic2 != 'h') { + throw new IOException(isFirstStream + ? "Stream is not in the BZip2 format" + : "Garbage after a valid BZip2 stream"); + } + + int blockSize = this.in.read(); + if ((blockSize < '1') || (blockSize > '9')) { + throw new IOException("BZip2 block size is invalid"); + } + + this.blockSize100k = blockSize - '0'; + + this.bsLive = 0; + this.computedCombinedCRC = 0; + + return true; + } + + private void initBlock() throws IOException { + char magic0; + char magic1; + char magic2; + char magic3; + char magic4; + char magic5; + + while (true) { + // Get the block magic bytes. + magic0 = bsGetUByte(); + magic1 = bsGetUByte(); + magic2 = bsGetUByte(); + magic3 = bsGetUByte(); + magic4 = bsGetUByte(); + magic5 = bsGetUByte(); + + // If isn't end of stream magic, break out of the loop. + if (magic0 != 0x17 || magic1 != 0x72 || magic2 != 0x45 + || magic3 != 0x38 || magic4 != 0x50 || magic5 != 0x90) { + break; + } + + // End of stream was reached. Check the combined CRC and + // advance to the next .bz2 stream if decoding concatenated + // streams. + if (complete()) { + return; + } + } + + if (magic0 != 0x31 || // '1' + magic1 != 0x41 || // ')' + magic2 != 0x59 || // 'Y' + magic3 != 0x26 || // '&' + magic4 != 0x53 || // 'S' + magic5 != 0x59 // 'Y' + ) { + this.currentState = EOF; + throw new IOException("bad block header"); + } else { + this.storedBlockCRC = bsGetInt(); + this.blockRandomised = bsR(1) == 1; + + /** + * Allocate data here instead in constructor, so we do not allocate + * it if the input file is empty. + */ + if (this.data == null) { + this.data = new Data(this.blockSize100k); + } + + // currBlockNo++; + getAndMoveToFrontDecode(); + + this.crc.initialiseCRC(); + this.currentState = START_BLOCK_STATE; + } + } + + private void endBlock() throws IOException { + this.computedBlockCRC = this.crc.getFinalCRC(); + + // A bad CRC is considered a fatal error. + if (this.storedBlockCRC != this.computedBlockCRC) { + // make next blocks readable without error + // (repair feature, not yet documented, not tested) + this.computedCombinedCRC = (this.storedCombinedCRC << 1) + | (this.storedCombinedCRC >>> 31); + this.computedCombinedCRC ^= this.storedBlockCRC; + + throw new IOException("BZip2 CRC error"); + } + + this.computedCombinedCRC = (this.computedCombinedCRC << 1) + | (this.computedCombinedCRC >>> 31); + this.computedCombinedCRC ^= this.computedBlockCRC; + } + + private boolean complete() throws IOException { + this.storedCombinedCRC = bsGetInt(); + this.currentState = EOF; + this.data = null; + + if (this.storedCombinedCRC != this.computedCombinedCRC) { + throw new IOException("BZip2 CRC error"); + } + + // Look for the next .bz2 stream if decompressing + // concatenated files. + return !decompressConcatenated || !init(false); + } + + @Override + public void close() throws IOException { + InputStream inShadow = this.in; + if (inShadow != null) { + try { + if (inShadow != System.in) { + inShadow.close(); + } + } finally { + this.data = null; + this.in = null; + } + } + } + + private int bsR(final int n) throws IOException { + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + if (bsLiveShadow < n) { + final InputStream inShadow = this.in; + do { + int thech = inShadow.read(); + + if (thech < 0) { + throw new IOException("unexpected end of stream"); + } + + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + } while (bsLiveShadow < n); + + this.bsBuff = bsBuffShadow; + } + + this.bsLive = bsLiveShadow - n; + return (bsBuffShadow >> (bsLiveShadow - n)) & ((1 << n) - 1); + } + + private boolean bsGetBit() throws IOException { + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + if (bsLiveShadow < 1) { + int thech = this.in.read(); + + if (thech < 0) { + throw new IOException("unexpected end of stream"); + } + + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + this.bsBuff = bsBuffShadow; + } + + this.bsLive = bsLiveShadow - 1; + return ((bsBuffShadow >> (bsLiveShadow - 1)) & 1) != 0; + } + + private char bsGetUByte() throws IOException { + return (char) bsR(8); + } + + private int bsGetInt() throws IOException { + return (((((bsR(8) << 8) | bsR(8)) << 8) | bsR(8)) << 8) | bsR(8); + } + + /** + * Called by createHuffmanDecodingTables() exclusively. + */ + private static void hbCreateDecodeTables(final int[] limit, + final int[] base, final int[] perm, final char[] length, + final int minLen, final int maxLen, final int alphaSize) { + for (int i = minLen, pp = 0; i <= maxLen; i++) { + for (int j = 0; j < alphaSize; j++) { + if (length[j] == i) { + perm[pp++] = j; + } + } + } + + for (int i = MAX_CODE_LEN; --i > 0;) { + base[i] = 0; + limit[i] = 0; + } + + for (int i = 0; i < alphaSize; i++) { + base[length[i] + 1]++; + } + + for (int i = 1, b = base[0]; i < MAX_CODE_LEN; i++) { + b += base[i]; + base[i] = b; + } + + for (int i = minLen, vec = 0, b = base[i]; i <= maxLen; i++) { + final int nb = base[i + 1]; + vec += nb - b; + b = nb; + limit[i] = vec - 1; + vec <<= 1; + } + + for (int i = minLen + 1; i <= maxLen; i++) { + base[i] = ((limit[i - 1] + 1) << 1) - base[i]; + } + } + + private void recvDecodingTables() throws IOException { + final Data dataShadow = this.data; + final boolean[] inUse = dataShadow.inUse; + final byte[] pos = dataShadow.recvDecodingTables_pos; + final byte[] selector = dataShadow.selector; + final byte[] selectorMtf = dataShadow.selectorMtf; + + int inUse16 = 0; + + /* Receive the mapping table */ + for (int i = 0; i < 16; i++) { + if (bsGetBit()) { + inUse16 |= 1 << i; + } + } + + for (int i = 256; --i >= 0;) { + inUse[i] = false; + } + + for (int i = 0; i < 16; i++) { + if ((inUse16 & (1 << i)) != 0) { + final int i16 = i << 4; + for (int j = 0; j < 16; j++) { + if (bsGetBit()) { + inUse[i16 + j] = true; + } + } + } + } + + makeMaps(); + final int alphaSize = this.nInUse + 2; + + /* Now the selectors */ + final int nGroups = bsR(3); + final int nSelectors = bsR(15); + + for (int i = 0; i < nSelectors; i++) { + int j = 0; + while (bsGetBit()) { + j++; + } + selectorMtf[i] = (byte) j; + } + + /* Undo the MTF values for the selectors. */ + for (int v = nGroups; --v >= 0;) { + pos[v] = (byte) v; + } + + for (int i = 0; i < nSelectors; i++) { + int v = selectorMtf[i] & 0xff; + final byte tmp = pos[v]; + while (v > 0) { + // nearly all times v is zero, 4 in most other cases + pos[v] = pos[v - 1]; + v--; + } + pos[0] = tmp; + selector[i] = tmp; + } + + final char[][] len = dataShadow.temp_charArray2d; + + /* Now the coding tables */ + for (int t = 0; t < nGroups; t++) { + int curr = bsR(5); + final char[] len_t = len[t]; + for (int i = 0; i < alphaSize; i++) { + while (bsGetBit()) { + curr += bsGetBit() ? -1 : 1; + } + len_t[i] = (char) curr; + } + } + + // finally create the Huffman tables + createHuffmanDecodingTables(alphaSize, nGroups); + } + + /** + * Called by recvDecodingTables() exclusively. + */ + private void createHuffmanDecodingTables(final int alphaSize, + final int nGroups) { + final Data dataShadow = this.data; + final char[][] len = dataShadow.temp_charArray2d; + final int[] minLens = dataShadow.minLens; + final int[][] limit = dataShadow.limit; + final int[][] base = dataShadow.base; + final int[][] perm = dataShadow.perm; + + for (int t = 0; t < nGroups; t++) { + int minLen = 32; + int maxLen = 0; + final char[] len_t = len[t]; + for (int i = alphaSize; --i >= 0;) { + final char lent = len_t[i]; + if (lent > maxLen) { + maxLen = lent; + } + if (lent < minLen) { + minLen = lent; + } + } + hbCreateDecodeTables(limit[t], base[t], perm[t], len[t], minLen, + maxLen, alphaSize); + minLens[t] = minLen; + } + } + + private void getAndMoveToFrontDecode() throws IOException { + this.origPtr = bsR(24); + recvDecodingTables(); + + final InputStream inShadow = this.in; + final Data dataShadow = this.data; + final byte[] ll8 = dataShadow.ll8; + final int[] unzftab = dataShadow.unzftab; + final byte[] selector = dataShadow.selector; + final byte[] seqToUnseq = dataShadow.seqToUnseq; + final char[] yy = dataShadow.getAndMoveToFrontDecode_yy; + final int[] minLens = dataShadow.minLens; + final int[][] limit = dataShadow.limit; + final int[][] base = dataShadow.base; + final int[][] perm = dataShadow.perm; + final int limitLast = this.blockSize100k * 100000; + + /* + * Setting up the unzftab entries here is not strictly necessary, but it + * does save having to do it later in a separate pass, and so saves a + * block's worth of cache misses. + */ + for (int i = 256; --i >= 0;) { + yy[i] = (char) i; + unzftab[i] = 0; + } + + int groupNo = 0; + int groupPos = G_SIZE - 1; + final int eob = this.nInUse + 1; + int nextSym = getAndMoveToFrontDecode0(0); + int bsBuffShadow = this.bsBuff; + int bsLiveShadow = this.bsLive; + int lastShadow = -1; + int zt = selector[groupNo] & 0xff; + int[] base_zt = base[zt]; + int[] limit_zt = limit[zt]; + int[] perm_zt = perm[zt]; + int minLens_zt = minLens[zt]; + + while (nextSym != eob) { + if ((nextSym == RUNA) || (nextSym == RUNB)) { + int s = -1; + + for (int n = 1; true; n <<= 1) { + if (nextSym == RUNA) { + s += n; + } else if (nextSym == RUNB) { + s += n << 1; + } else { + break; + } + + if (groupPos == 0) { + groupPos = G_SIZE - 1; + zt = selector[++groupNo] & 0xff; + base_zt = base[zt]; + limit_zt = limit[zt]; + perm_zt = perm[zt]; + minLens_zt = minLens[zt]; + } else { + groupPos--; + } + + int zn = minLens_zt; + + // Inlined: + // int zvec = bsR(zn); + while (bsLiveShadow < zn) { + final int thech = inShadow.read(); + if (thech >= 0) { + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + continue; + } else { + throw new IOException("unexpected end of stream"); + } + } + int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) + & ((1 << zn) - 1); + bsLiveShadow -= zn; + + while (zvec > limit_zt[zn]) { + zn++; + while (bsLiveShadow < 1) { + final int thech = inShadow.read(); + if (thech >= 0) { + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + continue; + } else { + throw new IOException( + "unexpected end of stream"); + } + } + bsLiveShadow--; + zvec = (zvec << 1) + | ((bsBuffShadow >> bsLiveShadow) & 1); + } + nextSym = perm_zt[zvec - base_zt[zn]]; + } + + final byte ch = seqToUnseq[yy[0]]; + unzftab[ch & 0xff] += s + 1; + + while (s-- >= 0) { + ll8[++lastShadow] = ch; + } + + if (lastShadow >= limitLast) { + throw new IOException("block overrun"); + } + } else { + if (++lastShadow >= limitLast) { + throw new IOException("block overrun"); + } + + final char tmp = yy[nextSym - 1]; + unzftab[seqToUnseq[tmp] & 0xff]++; + ll8[lastShadow] = seqToUnseq[tmp]; + + /* + * This loop is hammered during decompression, hence avoid + * native method call overhead of System.arraycopy for very + * small ranges to copy. + */ + if (nextSym <= 16) { + for (int j = nextSym - 1; j > 0;) { + yy[j] = yy[--j]; + } + } else { + System.arraycopy(yy, 0, yy, 1, nextSym - 1); + } + + yy[0] = tmp; + + if (groupPos == 0) { + groupPos = G_SIZE - 1; + zt = selector[++groupNo] & 0xff; + base_zt = base[zt]; + limit_zt = limit[zt]; + perm_zt = perm[zt]; + minLens_zt = minLens[zt]; + } else { + groupPos--; + } + + int zn = minLens_zt; + + // Inlined: + // int zvec = bsR(zn); + while (bsLiveShadow < zn) { + final int thech = inShadow.read(); + if (thech >= 0) { + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + continue; + } else { + throw new IOException("unexpected end of stream"); + } + } + int zvec = (bsBuffShadow >> (bsLiveShadow - zn)) + & ((1 << zn) - 1); + bsLiveShadow -= zn; + + while (zvec > limit_zt[zn]) { + zn++; + while (bsLiveShadow < 1) { + final int thech = inShadow.read(); + if (thech >= 0) { + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + continue; + } else { + throw new IOException("unexpected end of stream"); + } + } + bsLiveShadow--; + zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); + } + nextSym = perm_zt[zvec - base_zt[zn]]; + } + } + + this.last = lastShadow; + this.bsLive = bsLiveShadow; + this.bsBuff = bsBuffShadow; + } + + private int getAndMoveToFrontDecode0(final int groupNo) throws IOException { + final InputStream inShadow = this.in; + final Data dataShadow = this.data; + final int zt = dataShadow.selector[groupNo] & 0xff; + final int[] limit_zt = dataShadow.limit[zt]; + int zn = dataShadow.minLens[zt]; + int zvec = bsR(zn); + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + while (zvec > limit_zt[zn]) { + zn++; + while (bsLiveShadow < 1) { + final int thech = inShadow.read(); + + if (thech >= 0) { + bsBuffShadow = (bsBuffShadow << 8) | thech; + bsLiveShadow += 8; + continue; + } else { + throw new IOException("unexpected end of stream"); + } + } + bsLiveShadow--; + zvec = (zvec << 1) | ((bsBuffShadow >> bsLiveShadow) & 1); + } + + this.bsLive = bsLiveShadow; + this.bsBuff = bsBuffShadow; + + return dataShadow.perm[zt][zvec - dataShadow.base[zt][zn]]; + } + + private void setupBlock() throws IOException { + if (this.data == null) { + return; + } + + final int[] cftab = this.data.cftab; + final int[] tt = this.data.initTT(this.last + 1); + final byte[] ll8 = this.data.ll8; + cftab[0] = 0; + System.arraycopy(this.data.unzftab, 0, cftab, 1, 256); + + for (int i = 1, c = cftab[0]; i <= 256; i++) { + c += cftab[i]; + cftab[i] = c; + } + + for (int i = 0, lastShadow = this.last; i <= lastShadow; i++) { + tt[cftab[ll8[i] & 0xff]++] = i; + } + + if ((this.origPtr < 0) || (this.origPtr >= tt.length)) { + throw new IOException("stream corrupted"); + } + + this.su_tPos = tt[this.origPtr]; + this.su_count = 0; + this.su_i2 = 0; + this.su_ch2 = 256; /* not a char and not EOF */ + + if (this.blockRandomised) { + this.su_rNToGo = 0; + this.su_rTPos = 0; + setupRandPartA(); + } else { + setupNoRandPartA(); + } + } + + private void setupRandPartA() throws IOException { + if (this.su_i2 <= this.last) { + this.su_chPrev = this.su_ch2; + int su_ch2Shadow = this.data.ll8[this.su_tPos] & 0xff; + this.su_tPos = this.data.tt[this.su_tPos]; + if (this.su_rNToGo == 0) { + this.su_rNToGo = Rand.rNums(this.su_rTPos) - 1; + if (++this.su_rTPos == 512) { + this.su_rTPos = 0; + } + } else { + this.su_rNToGo--; + } + this.su_ch2 = su_ch2Shadow ^= (this.su_rNToGo == 1) ? 1 : 0; + this.su_i2++; + this.currentChar = su_ch2Shadow; + this.currentState = RAND_PART_B_STATE; + this.crc.updateCRC(su_ch2Shadow); + } else { + endBlock(); + initBlock(); + setupBlock(); + } + } + + private void setupNoRandPartA() throws IOException { + if (this.su_i2 <= this.last) { + this.su_chPrev = this.su_ch2; + int su_ch2Shadow = this.data.ll8[this.su_tPos] & 0xff; + this.su_ch2 = su_ch2Shadow; + this.su_tPos = this.data.tt[this.su_tPos]; + this.su_i2++; + this.currentChar = su_ch2Shadow; + this.currentState = NO_RAND_PART_B_STATE; + this.crc.updateCRC(su_ch2Shadow); + } else { + this.currentState = NO_RAND_PART_A_STATE; + endBlock(); + initBlock(); + setupBlock(); + } + } + + private void setupRandPartB() throws IOException { + if (this.su_ch2 != this.su_chPrev) { + this.currentState = RAND_PART_A_STATE; + this.su_count = 1; + setupRandPartA(); + } else if (++this.su_count >= 4) { + this.su_z = (char) (this.data.ll8[this.su_tPos] & 0xff); + this.su_tPos = this.data.tt[this.su_tPos]; + if (this.su_rNToGo == 0) { + this.su_rNToGo = Rand.rNums(this.su_rTPos) - 1; + if (++this.su_rTPos == 512) { + this.su_rTPos = 0; + } + } else { + this.su_rNToGo--; + } + this.su_j2 = 0; + this.currentState = RAND_PART_C_STATE; + if (this.su_rNToGo == 1) { + this.su_z ^= 1; + } + setupRandPartC(); + } else { + this.currentState = RAND_PART_A_STATE; + setupRandPartA(); + } + } + + private void setupRandPartC() throws IOException { + if (this.su_j2 < this.su_z) { + this.currentChar = this.su_ch2; + this.crc.updateCRC(this.su_ch2); + this.su_j2++; + } else { + this.currentState = RAND_PART_A_STATE; + this.su_i2++; + this.su_count = 0; + setupRandPartA(); + } + } + + private void setupNoRandPartB() throws IOException { + if (this.su_ch2 != this.su_chPrev) { + this.su_count = 1; + setupNoRandPartA(); + } else if (++this.su_count >= 4) { + this.su_z = (char) (this.data.ll8[this.su_tPos] & 0xff); + this.su_tPos = this.data.tt[this.su_tPos]; + this.su_j2 = 0; + setupNoRandPartC(); + } else { + setupNoRandPartA(); + } + } + + private void setupNoRandPartC() throws IOException { + if (this.su_j2 < this.su_z) { + int su_ch2Shadow = this.su_ch2; + this.currentChar = su_ch2Shadow; + this.crc.updateCRC(su_ch2Shadow); + this.su_j2++; + this.currentState = NO_RAND_PART_C_STATE; + } else { + this.su_i2++; + this.su_count = 0; + setupNoRandPartA(); + } + } + + private static final class Data extends Object { + + // (with blockSize 900k) + final boolean[] inUse = new boolean[256]; // 256 byte + + final byte[] seqToUnseq = new byte[256]; // 256 byte + final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte + final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte + + /** + * Freq table collected to save a pass over the data during + * decompression. + */ + final int[] unzftab = new int[256]; // 1024 byte + + final int[][] limit = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte + final int[][] base = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte + final int[][] perm = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 byte + final int[] minLens = new int[N_GROUPS]; // 24 byte + + final int[] cftab = new int[257]; // 1028 byte + final char[] getAndMoveToFrontDecode_yy = new char[256]; // 512 byte + final char[][] temp_charArray2d = new char[N_GROUPS][MAX_ALPHA_SIZE]; // 3096 + // byte + final byte[] recvDecodingTables_pos = new byte[N_GROUPS]; // 6 byte + // --------------- + // 60798 byte + + int[] tt; // 3600000 byte + byte[] ll8; // 900000 byte + + // --------------- + // 4560782 byte + // =============== + + Data(int blockSize100k) { + this.ll8 = new byte[blockSize100k * BZip2Constants.BASEBLOCKSIZE]; + } + + /** + * Initializes the {@link #tt} array. + * + * This method is called when the required length of the array is known. + * I don't initialize it at construction time to avoid unneccessary + * memory allocation when compressing small files. + */ + int[] initTT(int length) { + int[] ttShadow = this.tt; + + // tt.length should always be >= length, but theoretically + // it can happen, if the compressor mixed small and large + // blocks. Normally only the last block will be smaller + // than others. + if ((ttShadow == null) || (ttShadow.length < length)) { + this.tt = ttShadow = new int[length]; + } + + return ttShadow; + } + + } + + /** + * Checks if the signature matches what is expected for a bzip2 file. + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is a bzip2 compressed stream, false otherwise + * + * @since 1.1 + */ + public static boolean matches(byte[] signature, int length) { + + if (length < 3) { + return false; + } + + if (signature[0] != 'B') { + return false; + } + + if (signature[1] != 'Z') { + return false; + } + + if (signature[2] != 'h') { + return false; + } + + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java new file mode 100644 index 000000000..a5eedcf04 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2CompressorOutputStream.java @@ -0,0 +1,1329 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.bzip2; + +import java.io.IOException; +import java.io.OutputStream; + +import org.apache.commons.compress.compressors.CompressorOutputStream; + +/** + * An output stream that compresses into the BZip2 format into another stream. + * + *

    + * The compression requires large amounts of memory. Thus you should call the + * {@link #close() close()} method as soon as possible, to force + * BZip2CompressorOutputStream to release the allocated memory. + *

    + * + *

    You can shrink the amount of allocated memory and maybe raise + * the compression speed by choosing a lower blocksize, which in turn + * may cause a lower compression ratio. You can avoid unnecessary + * memory allocation by avoiding using a blocksize which is bigger + * than the size of the input.

    + * + *

    You can compute the memory usage for compressing by the + * following formula:

    + * + *
    + * <code>400k + (9 * blocksize)</code>.
    + * 
    + * + *

    To get the memory required for decompression by {@link + * BZip2CompressorInputStream} use

    + * + *
    + * <code>65k + (5 * blocksize)</code>.
    + * 
    + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + * + *
    Memory usage by blocksize
    Blocksize Compression
    + * memory usage
    Decompression
    + * memory usage
    100k1300k565k
    200k2200k1065k
    300k3100k1565k
    400k4000k2065k
    500k4900k2565k
    600k5800k3065k
    700k6700k3565k
    800k7600k4065k
    900k8500k4565k
    + * + *

    + * For decompression BZip2CompressorInputStream allocates less memory if the + * bzipped input is smaller than one block. + *

    + * + *

    + * Instances of this class are not threadsafe. + *

    + * + *

    + * TODO: Update to BZip2 1.0.1 + *

    + * @NotThreadSafe + */ +public class BZip2CompressorOutputStream extends CompressorOutputStream + implements BZip2Constants { + + /** + * The minimum supported blocksize == 1. + */ + public static final int MIN_BLOCKSIZE = 1; + + /** + * The maximum supported blocksize == 9. + */ + public static final int MAX_BLOCKSIZE = 9; + + private static final int GREATER_ICOST = 15; + private static final int LESSER_ICOST = 0; + + private static void hbMakeCodeLengths(final byte[] len, final int[] freq, + final Data dat, final int alphaSize, + final int maxLen) { + /* + * Nodes and heap entries run from 1. Entry 0 for both the heap and + * nodes is a sentinel. + */ + final int[] heap = dat.heap; + final int[] weight = dat.weight; + final int[] parent = dat.parent; + + for (int i = alphaSize; --i >= 0;) { + weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8; + } + + for (boolean tooLong = true; tooLong;) { + tooLong = false; + + int nNodes = alphaSize; + int nHeap = 0; + heap[0] = 0; + weight[0] = 0; + parent[0] = -2; + + for (int i = 1; i <= alphaSize; i++) { + parent[i] = -1; + nHeap++; + heap[nHeap] = i; + + int zz = nHeap; + int tmp = heap[zz]; + while (weight[tmp] < weight[heap[zz >> 1]]) { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + } + + while (nHeap > 1) { + int n1 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + + int yy = 0; + int zz = 1; + int tmp = heap[1]; + + while (true) { + yy = zz << 1; + + if (yy > nHeap) { + break; + } + + if ((yy < nHeap) + && (weight[heap[yy + 1]] < weight[heap[yy]])) { + yy++; + } + + if (weight[tmp] < weight[heap[yy]]) { + break; + } + + heap[zz] = heap[yy]; + zz = yy; + } + + heap[zz] = tmp; + + int n2 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + + yy = 0; + zz = 1; + tmp = heap[1]; + + while (true) { + yy = zz << 1; + + if (yy > nHeap) { + break; + } + + if ((yy < nHeap) + && (weight[heap[yy + 1]] < weight[heap[yy]])) { + yy++; + } + + if (weight[tmp] < weight[heap[yy]]) { + break; + } + + heap[zz] = heap[yy]; + zz = yy; + } + + heap[zz] = tmp; + nNodes++; + parent[n1] = parent[n2] = nNodes; + + final int weight_n1 = weight[n1]; + final int weight_n2 = weight[n2]; + weight[nNodes] = ((weight_n1 & 0xffffff00) + + (weight_n2 & 0xffffff00)) + | (1 + (((weight_n1 & 0x000000ff) + > (weight_n2 & 0x000000ff)) + ? (weight_n1 & 0x000000ff) + : (weight_n2 & 0x000000ff))); + + parent[nNodes] = -1; + nHeap++; + heap[nHeap] = nNodes; + + tmp = 0; + zz = nHeap; + tmp = heap[zz]; + final int weight_tmp = weight[tmp]; + while (weight_tmp < weight[heap[zz >> 1]]) { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + + } + + for (int i = 1; i <= alphaSize; i++) { + int j = 0; + int k = i; + + for (int parent_k; (parent_k = parent[k]) >= 0;) { + k = parent_k; + j++; + } + + len[i - 1] = (byte) j; + if (j > maxLen) { + tooLong = true; + } + } + + if (tooLong) { + for (int i = 1; i < alphaSize; i++) { + int j = weight[i] >> 8; + j = 1 + (j >> 1); + weight[i] = j << 8; + } + } + } + } + + /** + * Index of the last char in the block, so the block size == last + 1. + */ + private int last; + + /** + * Always: in the range 0 .. 9. The current block size is 100000 * this + * number. + */ + private final int blockSize100k; + + private int bsBuff; + private int bsLive; + private final CRC crc = new CRC(); + + private int nInUse; + + private int nMTF; + + private int currentChar = -1; + private int runLength = 0; + + private int blockCRC; + private int combinedCRC; + private final int allowableBlockSize; + + /** + * All memory intensive stuff. + */ + private Data data; + private BlockSort blockSorter; + + private OutputStream out; + + /** + * Chooses a blocksize based on the given length of the data to compress. + * + * @return The blocksize, between {@link #MIN_BLOCKSIZE} and + * {@link #MAX_BLOCKSIZE} both inclusive. For a negative + * inputLength this method returns MAX_BLOCKSIZE + * always. + * + * @param inputLength + * The length of the data which will be compressed by + * BZip2CompressorOutputStream. + */ + public static int chooseBlockSize(long inputLength) { + return (inputLength > 0) ? (int) Math + .min((inputLength / 132000) + 1, 9) : MAX_BLOCKSIZE; + } + + /** + * Constructs a new BZip2CompressorOutputStream with a blocksize of 900k. + * + * @param out + * the destination stream. + * + * @throws IOException + * if an I/O error occurs in the specified stream. + * @throws NullPointerException + * if out == null. + */ + public BZip2CompressorOutputStream(final OutputStream out) + throws IOException { + this(out, MAX_BLOCKSIZE); + } + + /** + * Constructs a new BZip2CompressorOutputStream with specified blocksize. + * + * @param out + * the destination stream. + * @param blockSize + * the blockSize as 100k units. + * + * @throws IOException + * if an I/O error occurs in the specified stream. + * @throws IllegalArgumentException + * if (blockSize < 1) || (blockSize > 9). + * @throws NullPointerException + * if out == null. + * + * @see #MIN_BLOCKSIZE + * @see #MAX_BLOCKSIZE + */ + public BZip2CompressorOutputStream(final OutputStream out, final int blockSize) throws IOException { + if (blockSize < 1) { + throw new IllegalArgumentException("blockSize(" + blockSize + ") < 1"); + } + if (blockSize > 9) { + throw new IllegalArgumentException("blockSize(" + blockSize + ") > 9"); + } + + this.blockSize100k = blockSize; + this.out = out; + + /* 20 is just a paranoia constant */ + this.allowableBlockSize = (this.blockSize100k * BZip2Constants.BASEBLOCKSIZE) - 20; + init(); + } + + @Override + public void write(final int b) throws IOException { + if (this.out != null) { + write0(b); + } else { + throw new IOException("closed"); + } + } + + /** + * Writes the current byte to the buffer, run-length encoding it + * if it has been repeated at least four times (the first step + * RLEs sequences of four identical bytes). + * + *

    Flushes the current block before writing data if it is + * full.

    + * + *

    "write to the buffer" means adding to data.buffer starting + * two steps "after" this.last - initially starting at index 1 + * (not 0) - and updating this.last to point to the last index + * written minus 1.

    + */ + private void writeRun() throws IOException { + final int lastShadow = this.last; + + if (lastShadow < this.allowableBlockSize) { + final int currentCharShadow = this.currentChar; + final Data dataShadow = this.data; + dataShadow.inUse[currentCharShadow] = true; + final byte ch = (byte) currentCharShadow; + + int runLengthShadow = this.runLength; + this.crc.updateCRC(currentCharShadow, runLengthShadow); + + switch (runLengthShadow) { + case 1: + dataShadow.block[lastShadow + 2] = ch; + this.last = lastShadow + 1; + break; + + case 2: + dataShadow.block[lastShadow + 2] = ch; + dataShadow.block[lastShadow + 3] = ch; + this.last = lastShadow + 2; + break; + + case 3: { + final byte[] block = dataShadow.block; + block[lastShadow + 2] = ch; + block[lastShadow + 3] = ch; + block[lastShadow + 4] = ch; + this.last = lastShadow + 3; + } + break; + + default: { + runLengthShadow -= 4; + dataShadow.inUse[runLengthShadow] = true; + final byte[] block = dataShadow.block; + block[lastShadow + 2] = ch; + block[lastShadow + 3] = ch; + block[lastShadow + 4] = ch; + block[lastShadow + 5] = ch; + block[lastShadow + 6] = (byte) runLengthShadow; + this.last = lastShadow + 5; + } + break; + + } + } else { + endBlock(); + initBlock(); + writeRun(); + } + } + + /** + * Overriden to close the stream. + */ + @Override + protected void finalize() throws Throwable { + finish(); + super.finalize(); + } + + + public void finish() throws IOException { + if (out != null) { + try { + if (this.runLength > 0) { + writeRun(); + } + this.currentChar = -1; + endBlock(); + endCompression(); + } finally { + this.out = null; + this.data = null; + this.blockSorter = null; + } + } + } + + @Override + public void close() throws IOException { + if (out != null) { + OutputStream outShadow = this.out; + finish(); + outShadow.close(); + } + } + + @Override + public void flush() throws IOException { + OutputStream outShadow = this.out; + if (outShadow != null) { + outShadow.flush(); + } + } + + /** + * Writes magic bytes like BZ on the first position of the stream + * and bytes indiciating the file-format, which is + * huffmanised, followed by a digit indicating blockSize100k. + * @throws IOException if the magic bytes could not been written + */ + private void init() throws IOException { + bsPutUByte('B'); + bsPutUByte('Z'); + + this.data = new Data(this.blockSize100k); + this.blockSorter = new BlockSort(this.data); + + // huffmanised magic bytes + bsPutUByte('h'); + bsPutUByte('0' + this.blockSize100k); + + this.combinedCRC = 0; + initBlock(); + } + + private void initBlock() { + // blockNo++; + this.crc.initialiseCRC(); + this.last = -1; + // ch = 0; + + boolean[] inUse = this.data.inUse; + for (int i = 256; --i >= 0;) { + inUse[i] = false; + } + + } + + private void endBlock() throws IOException { + this.blockCRC = this.crc.getFinalCRC(); + this.combinedCRC = (this.combinedCRC << 1) | (this.combinedCRC >>> 31); + this.combinedCRC ^= this.blockCRC; + + // empty block at end of file + if (this.last == -1) { + return; + } + + /* sort the block and establish posn of original string */ + blockSort(); + + /* + * A 6-byte block header, the value chosen arbitrarily as 0x314159265359 + * :-). A 32 bit value does not really give a strong enough guarantee + * that the value will not appear by chance in the compressed + * datastream. Worst-case probability of this event, for a 900k block, + * is about 2.0e-3 for 32 bits, 1.0e-5 for 40 bits and 4.0e-8 for 48 + * bits. For a compressed file of size 100Gb -- about 100000 blocks -- + * only a 48-bit marker will do. NB: normal compression/ decompression + * donot rely on these statistical properties. They are only important + * when trying to recover blocks from damaged files. + */ + bsPutUByte(0x31); + bsPutUByte(0x41); + bsPutUByte(0x59); + bsPutUByte(0x26); + bsPutUByte(0x53); + bsPutUByte(0x59); + + /* Now the block's CRC, so it is in a known place. */ + bsPutInt(this.blockCRC); + + /* Now a single bit indicating no randomisation. */ + bsW(1, 0); + + /* Finally, block's contents proper. */ + moveToFrontCodeAndSend(); + } + + private void endCompression() throws IOException { + /* + * Now another magic 48-bit number, 0x177245385090, to indicate the end + * of the last block. (sqrt(pi), if you want to know. I did want to use + * e, but it contains too much repetition -- 27 18 28 18 28 46 -- for me + * to feel statistically comfortable. Call me paranoid.) + */ + bsPutUByte(0x17); + bsPutUByte(0x72); + bsPutUByte(0x45); + bsPutUByte(0x38); + bsPutUByte(0x50); + bsPutUByte(0x90); + + bsPutInt(this.combinedCRC); + bsFinishedWithStream(); + } + + /** + * Returns the blocksize parameter specified at construction time. + */ + public final int getBlockSize() { + return this.blockSize100k; + } + + @Override + public void write(final byte[] buf, int offs, final int len) + throws IOException { + if (offs < 0) { + throw new IndexOutOfBoundsException("offs(" + offs + ") < 0."); + } + if (len < 0) { + throw new IndexOutOfBoundsException("len(" + len + ") < 0."); + } + if (offs + len > buf.length) { + throw new IndexOutOfBoundsException("offs(" + offs + ") + len(" + + len + ") > buf.length(" + + buf.length + ")."); + } + if (this.out == null) { + throw new IOException("stream closed"); + } + + for (int hi = offs + len; offs < hi;) { + write0(buf[offs++]); + } + } + + /** + * Keeps track of the last bytes written and implicitly performs + * run-length encoding as the first step of the bzip2 algorithm. + */ + private void write0(int b) throws IOException { + if (this.currentChar != -1) { + b &= 0xff; + if (this.currentChar == b) { + if (++this.runLength > 254) { + writeRun(); + this.currentChar = -1; + this.runLength = 0; + } + // else nothing to do + } else { + writeRun(); + this.runLength = 1; + this.currentChar = b; + } + } else { + this.currentChar = b & 0xff; + this.runLength++; + } + } + + private static void hbAssignCodes(final int[] code, final byte[] length, + final int minLen, final int maxLen, + final int alphaSize) { + int vec = 0; + for (int n = minLen; n <= maxLen; n++) { + for (int i = 0; i < alphaSize; i++) { + if ((length[i] & 0xff) == n) { + code[i] = vec; + vec++; + } + } + vec <<= 1; + } + } + + private void bsFinishedWithStream() throws IOException { + while (this.bsLive > 0) { + int ch = this.bsBuff >> 24; + this.out.write(ch); // write 8-bit + this.bsBuff <<= 8; + this.bsLive -= 8; + } + } + + private void bsW(final int n, final int v) throws IOException { + final OutputStream outShadow = this.out; + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + + this.bsBuff = bsBuffShadow | (v << (32 - bsLiveShadow - n)); + this.bsLive = bsLiveShadow + n; + } + + private void bsPutUByte(final int c) throws IOException { + bsW(8, c); + } + + private void bsPutInt(final int u) throws IOException { + bsW(8, (u >> 24) & 0xff); + bsW(8, (u >> 16) & 0xff); + bsW(8, (u >> 8) & 0xff); + bsW(8, u & 0xff); + } + + private void sendMTFValues() throws IOException { + final byte[][] len = this.data.sendMTFValues_len; + final int alphaSize = this.nInUse + 2; + + for (int t = N_GROUPS; --t >= 0;) { + byte[] len_t = len[t]; + for (int v = alphaSize; --v >= 0;) { + len_t[v] = GREATER_ICOST; + } + } + + /* Decide how many coding tables to use */ + // assert (this.nMTF > 0) : this.nMTF; + final int nGroups = (this.nMTF < 200) ? 2 : (this.nMTF < 600) ? 3 + : (this.nMTF < 1200) ? 4 : (this.nMTF < 2400) ? 5 : 6; + + /* Generate an initial set of coding tables */ + sendMTFValues0(nGroups, alphaSize); + + /* + * Iterate up to N_ITERS times to improve the tables. + */ + final int nSelectors = sendMTFValues1(nGroups, alphaSize); + + /* Compute MTF values for the selectors. */ + sendMTFValues2(nGroups, nSelectors); + + /* Assign actual codes for the tables. */ + sendMTFValues3(nGroups, alphaSize); + + /* Transmit the mapping table. */ + sendMTFValues4(); + + /* Now the selectors. */ + sendMTFValues5(nGroups, nSelectors); + + /* Now the coding tables. */ + sendMTFValues6(nGroups, alphaSize); + + /* And finally, the block data proper */ + sendMTFValues7(); + } + + private void sendMTFValues0(final int nGroups, final int alphaSize) { + final byte[][] len = this.data.sendMTFValues_len; + final int[] mtfFreq = this.data.mtfFreq; + + int remF = this.nMTF; + int gs = 0; + + for (int nPart = nGroups; nPart > 0; nPart--) { + final int tFreq = remF / nPart; + int ge = gs - 1; + int aFreq = 0; + + for (final int a = alphaSize - 1; (aFreq < tFreq) && (ge < a);) { + aFreq += mtfFreq[++ge]; + } + + if ((ge > gs) && (nPart != nGroups) && (nPart != 1) + && (((nGroups - nPart) & 1) != 0)) { + aFreq -= mtfFreq[ge--]; + } + + final byte[] len_np = len[nPart - 1]; + for (int v = alphaSize; --v >= 0;) { + if ((v >= gs) && (v <= ge)) { + len_np[v] = LESSER_ICOST; + } else { + len_np[v] = GREATER_ICOST; + } + } + + gs = ge + 1; + remF -= aFreq; + } + } + + private int sendMTFValues1(final int nGroups, final int alphaSize) { + final Data dataShadow = this.data; + final int[][] rfreq = dataShadow.sendMTFValues_rfreq; + final int[] fave = dataShadow.sendMTFValues_fave; + final short[] cost = dataShadow.sendMTFValues_cost; + final char[] sfmap = dataShadow.sfmap; + final byte[] selector = dataShadow.selector; + final byte[][] len = dataShadow.sendMTFValues_len; + final byte[] len_0 = len[0]; + final byte[] len_1 = len[1]; + final byte[] len_2 = len[2]; + final byte[] len_3 = len[3]; + final byte[] len_4 = len[4]; + final byte[] len_5 = len[5]; + final int nMTFShadow = this.nMTF; + + int nSelectors = 0; + + for (int iter = 0; iter < N_ITERS; iter++) { + for (int t = nGroups; --t >= 0;) { + fave[t] = 0; + int[] rfreqt = rfreq[t]; + for (int i = alphaSize; --i >= 0;) { + rfreqt[i] = 0; + } + } + + nSelectors = 0; + + for (int gs = 0; gs < this.nMTF;) { + /* Set group start & end marks. */ + + /* + * Calculate the cost of this group as coded by each of the + * coding tables. + */ + + final int ge = Math.min(gs + G_SIZE - 1, nMTFShadow - 1); + + if (nGroups == N_GROUPS) { + // unrolled version of the else-block + + short cost0 = 0; + short cost1 = 0; + short cost2 = 0; + short cost3 = 0; + short cost4 = 0; + short cost5 = 0; + + for (int i = gs; i <= ge; i++) { + final int icv = sfmap[i]; + cost0 += len_0[icv] & 0xff; + cost1 += len_1[icv] & 0xff; + cost2 += len_2[icv] & 0xff; + cost3 += len_3[icv] & 0xff; + cost4 += len_4[icv] & 0xff; + cost5 += len_5[icv] & 0xff; + } + + cost[0] = cost0; + cost[1] = cost1; + cost[2] = cost2; + cost[3] = cost3; + cost[4] = cost4; + cost[5] = cost5; + + } else { + for (int t = nGroups; --t >= 0;) { + cost[t] = 0; + } + + for (int i = gs; i <= ge; i++) { + final int icv = sfmap[i]; + for (int t = nGroups; --t >= 0;) { + cost[t] += len[t][icv] & 0xff; + } + } + } + + /* + * Find the coding table which is best for this group, and + * record its identity in the selector table. + */ + int bt = -1; + for (int t = nGroups, bc = 999999999; --t >= 0;) { + final int cost_t = cost[t]; + if (cost_t < bc) { + bc = cost_t; + bt = t; + } + } + + fave[bt]++; + selector[nSelectors] = (byte) bt; + nSelectors++; + + /* + * Increment the symbol frequencies for the selected table. + */ + final int[] rfreq_bt = rfreq[bt]; + for (int i = gs; i <= ge; i++) { + rfreq_bt[sfmap[i]]++; + } + + gs = ge + 1; + } + + /* + * Recompute the tables based on the accumulated frequencies. + */ + for (int t = 0; t < nGroups; t++) { + hbMakeCodeLengths(len[t], rfreq[t], this.data, alphaSize, 20); + } + } + + return nSelectors; + } + + private void sendMTFValues2(final int nGroups, final int nSelectors) { + // assert (nGroups < 8) : nGroups; + + final Data dataShadow = this.data; + byte[] pos = dataShadow.sendMTFValues2_pos; + + for (int i = nGroups; --i >= 0;) { + pos[i] = (byte) i; + } + + for (int i = 0; i < nSelectors; i++) { + final byte ll_i = dataShadow.selector[i]; + byte tmp = pos[0]; + int j = 0; + + while (ll_i != tmp) { + j++; + byte tmp2 = tmp; + tmp = pos[j]; + pos[j] = tmp2; + } + + pos[0] = tmp; + dataShadow.selectorMtf[i] = (byte) j; + } + } + + private void sendMTFValues3(final int nGroups, final int alphaSize) { + int[][] code = this.data.sendMTFValues_code; + byte[][] len = this.data.sendMTFValues_len; + + for (int t = 0; t < nGroups; t++) { + int minLen = 32; + int maxLen = 0; + final byte[] len_t = len[t]; + for (int i = alphaSize; --i >= 0;) { + final int l = len_t[i] & 0xff; + if (l > maxLen) { + maxLen = l; + } + if (l < minLen) { + minLen = l; + } + } + + // assert (maxLen <= 20) : maxLen; + // assert (minLen >= 1) : minLen; + + hbAssignCodes(code[t], len[t], minLen, maxLen, alphaSize); + } + } + + private void sendMTFValues4() throws IOException { + final boolean[] inUse = this.data.inUse; + final boolean[] inUse16 = this.data.sentMTFValues4_inUse16; + + for (int i = 16; --i >= 0;) { + inUse16[i] = false; + final int i16 = i * 16; + for (int j = 16; --j >= 0;) { + if (inUse[i16 + j]) { + inUse16[i] = true; + } + } + } + + for (int i = 0; i < 16; i++) { + bsW(1, inUse16[i] ? 1 : 0); + } + + final OutputStream outShadow = this.out; + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int i = 0; i < 16; i++) { + if (inUse16[i]) { + final int i16 = i * 16; + for (int j = 0; j < 16; j++) { + // inlined: bsW(1, inUse[i16 + j] ? 1 : 0); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + if (inUse[i16 + j]) { + bsBuffShadow |= 1 << (32 - bsLiveShadow - 1); + } + bsLiveShadow++; + } + } + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void sendMTFValues5(final int nGroups, final int nSelectors) + throws IOException { + bsW(3, nGroups); + bsW(15, nSelectors); + + final OutputStream outShadow = this.out; + final byte[] selectorMtf = this.data.selectorMtf; + + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int i = 0; i < nSelectors; i++) { + for (int j = 0, hj = selectorMtf[i] & 0xff; j < hj; j++) { + // inlined: bsW(1, 1); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= 1 << (32 - bsLiveShadow - 1); + bsLiveShadow++; + } + + // inlined: bsW(1, 0); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + // bsBuffShadow |= 0 << (32 - bsLiveShadow - 1); + bsLiveShadow++; + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void sendMTFValues6(final int nGroups, final int alphaSize) + throws IOException { + final byte[][] len = this.data.sendMTFValues_len; + final OutputStream outShadow = this.out; + + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int t = 0; t < nGroups; t++) { + byte[] len_t = len[t]; + int curr = len_t[0] & 0xff; + + // inlined: bsW(5, curr); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= curr << (32 - bsLiveShadow - 5); + bsLiveShadow += 5; + + for (int i = 0; i < alphaSize; i++) { + int lti = len_t[i] & 0xff; + while (curr < lti) { + // inlined: bsW(2, 2); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= 2 << (32 - bsLiveShadow - 2); + bsLiveShadow += 2; + + curr++; /* 10 */ + } + + while (curr > lti) { + // inlined: bsW(2, 3); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + bsBuffShadow |= 3 << (32 - bsLiveShadow - 2); + bsLiveShadow += 2; + + curr--; /* 11 */ + } + + // inlined: bsW(1, 0); + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); // write 8-bit + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + // bsBuffShadow |= 0 << (32 - bsLiveShadow - 1); + bsLiveShadow++; + } + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void sendMTFValues7() throws IOException { + final Data dataShadow = this.data; + final byte[][] len = dataShadow.sendMTFValues_len; + final int[][] code = dataShadow.sendMTFValues_code; + final OutputStream outShadow = this.out; + final byte[] selector = dataShadow.selector; + final char[] sfmap = dataShadow.sfmap; + final int nMTFShadow = this.nMTF; + + int selCtr = 0; + + int bsLiveShadow = this.bsLive; + int bsBuffShadow = this.bsBuff; + + for (int gs = 0; gs < nMTFShadow;) { + final int ge = Math.min(gs + G_SIZE - 1, nMTFShadow - 1); + final int selector_selCtr = selector[selCtr] & 0xff; + final int[] code_selCtr = code[selector_selCtr]; + final byte[] len_selCtr = len[selector_selCtr]; + + while (gs <= ge) { + final int sfmap_i = sfmap[gs]; + + // + // inlined: bsW(len_selCtr[sfmap_i] & 0xff, + // code_selCtr[sfmap_i]); + // + while (bsLiveShadow >= 8) { + outShadow.write(bsBuffShadow >> 24); + bsBuffShadow <<= 8; + bsLiveShadow -= 8; + } + final int n = len_selCtr[sfmap_i] & 0xFF; + bsBuffShadow |= code_selCtr[sfmap_i] << (32 - bsLiveShadow - n); + bsLiveShadow += n; + + gs++; + } + + gs = ge + 1; + selCtr++; + } + + this.bsBuff = bsBuffShadow; + this.bsLive = bsLiveShadow; + } + + private void moveToFrontCodeAndSend() throws IOException { + bsW(24, this.data.origPtr); + generateMTFValues(); + sendMTFValues(); + } + + private void blockSort() { + blockSorter.blockSort(data, last); + } + + /* + * Performs Move-To-Front on the Burrows-Wheeler transformed + * buffer, storing the MTFed data in data.sfmap in RUNA/RUNB + * run-length-encoded form. + * + *

    Keeps track of byte frequencies in data.mtfFreq at the same time.

    + */ + private void generateMTFValues() { + final int lastShadow = this.last; + final Data dataShadow = this.data; + final boolean[] inUse = dataShadow.inUse; + final byte[] block = dataShadow.block; + final int[] fmap = dataShadow.fmap; + final char[] sfmap = dataShadow.sfmap; + final int[] mtfFreq = dataShadow.mtfFreq; + final byte[] unseqToSeq = dataShadow.unseqToSeq; + final byte[] yy = dataShadow.generateMTFValues_yy; + + // make maps + int nInUseShadow = 0; + for (int i = 0; i < 256; i++) { + if (inUse[i]) { + unseqToSeq[i] = (byte) nInUseShadow; + nInUseShadow++; + } + } + this.nInUse = nInUseShadow; + + final int eob = nInUseShadow + 1; + + for (int i = eob; i >= 0; i--) { + mtfFreq[i] = 0; + } + + for (int i = nInUseShadow; --i >= 0;) { + yy[i] = (byte) i; + } + + int wr = 0; + int zPend = 0; + + for (int i = 0; i <= lastShadow; i++) { + final byte ll_i = unseqToSeq[block[fmap[i]] & 0xff]; + byte tmp = yy[0]; + int j = 0; + + while (ll_i != tmp) { + j++; + byte tmp2 = tmp; + tmp = yy[j]; + yy[j] = tmp2; + } + yy[0] = tmp; + + if (j == 0) { + zPend++; + } else { + if (zPend > 0) { + zPend--; + while (true) { + if ((zPend & 1) == 0) { + sfmap[wr] = RUNA; + wr++; + mtfFreq[RUNA]++; + } else { + sfmap[wr] = RUNB; + wr++; + mtfFreq[RUNB]++; + } + + if (zPend >= 2) { + zPend = (zPend - 2) >> 1; + } else { + break; + } + } + zPend = 0; + } + sfmap[wr] = (char) (j + 1); + wr++; + mtfFreq[j + 1]++; + } + } + + if (zPend > 0) { + zPend--; + while (true) { + if ((zPend & 1) == 0) { + sfmap[wr] = RUNA; + wr++; + mtfFreq[RUNA]++; + } else { + sfmap[wr] = RUNB; + wr++; + mtfFreq[RUNB]++; + } + + if (zPend >= 2) { + zPend = (zPend - 2) >> 1; + } else { + break; + } + } + } + + sfmap[wr] = (char) eob; + mtfFreq[eob]++; + this.nMTF = wr + 1; + } + + static final class Data extends Object { + + // with blockSize 900k + /* maps unsigned byte => "does it occur in block" */ + final boolean[] inUse = new boolean[256]; // 256 byte + final byte[] unseqToSeq = new byte[256]; // 256 byte + final int[] mtfFreq = new int[MAX_ALPHA_SIZE]; // 1032 byte + final byte[] selector = new byte[MAX_SELECTORS]; // 18002 byte + final byte[] selectorMtf = new byte[MAX_SELECTORS]; // 18002 byte + + final byte[] generateMTFValues_yy = new byte[256]; // 256 byte + final byte[][] sendMTFValues_len = new byte[N_GROUPS][MAX_ALPHA_SIZE]; // 1548 + // byte + final int[][] sendMTFValues_rfreq = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 + // byte + final int[] sendMTFValues_fave = new int[N_GROUPS]; // 24 byte + final short[] sendMTFValues_cost = new short[N_GROUPS]; // 12 byte + final int[][] sendMTFValues_code = new int[N_GROUPS][MAX_ALPHA_SIZE]; // 6192 + // byte + final byte[] sendMTFValues2_pos = new byte[N_GROUPS]; // 6 byte + final boolean[] sentMTFValues4_inUse16 = new boolean[16]; // 16 byte + + final int[] heap = new int[MAX_ALPHA_SIZE + 2]; // 1040 byte + final int[] weight = new int[MAX_ALPHA_SIZE * 2]; // 2064 byte + final int[] parent = new int[MAX_ALPHA_SIZE * 2]; // 2064 byte + + // ------------ + // 333408 byte + + /* holds the RLEd block of original data starting at index 1. + * After sorting the last byte added to the buffer is at index + * 0. */ + final byte[] block; // 900021 byte + /* maps index in Burrows-Wheeler transformed block => index of + * byte in original block */ + final int[] fmap; // 3600000 byte + final char[] sfmap; // 3600000 byte + // ------------ + // 8433529 byte + // ============ + + /** + * Index of original line in Burrows-Wheeler table. + * + *

    This is the index in fmap that points to the last byte + * of the original data.

    + */ + int origPtr; + + Data(int blockSize100k) { + final int n = blockSize100k * BZip2Constants.BASEBLOCKSIZE; + this.block = new byte[(n + 1 + NUM_OVERSHOOT_BYTES)]; + this.fmap = new int[n]; + this.sfmap = new char[2 * n]; + } + + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2Constants.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2Constants.java new file mode 100644 index 000000000..9a8b9c4c0 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2Constants.java @@ -0,0 +1,38 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.bzip2; + +/** + * Constants for both the compress and decompress BZip2 classes. + */ +interface BZip2Constants { + + int BASEBLOCKSIZE = 100000; + int MAX_ALPHA_SIZE = 258; + int MAX_CODE_LEN = 23; + int RUNA = 0; + int RUNB = 1; + int N_GROUPS = 6; + int G_SIZE = 50; + int N_ITERS = 4; + int MAX_SELECTORS = (2 + (900000 / G_SIZE)); + int NUM_OVERSHOOT_BYTES = 20; + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java new file mode 100644 index 000000000..e56283512 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BZip2Utils.java @@ -0,0 +1,93 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.bzip2; + +import java.util.LinkedHashMap; +import java.util.Map; +import org.apache.commons.compress.compressors.FileNameUtil; + +/** + * Utility code for the BZip2 compression format. + * @ThreadSafe + * @since 1.1 + */ +public abstract class BZip2Utils { + + private static final FileNameUtil fileNameUtil; + + static { + Map uncompressSuffix = + new LinkedHashMap(); + // backwards compatibilty: BZip2Utils never created the short + // tbz form, so .tar.bz2 has to be added explicitly + uncompressSuffix.put(".tar.bz2", ".tar"); + uncompressSuffix.put(".tbz2", ".tar"); + uncompressSuffix.put(".tbz", ".tar"); + uncompressSuffix.put(".bz2", ""); + uncompressSuffix.put(".bz", ""); + fileNameUtil = new FileNameUtil(uncompressSuffix, ".bz2"); + } + + /** Private constructor to prevent instantiation of this utility class. */ + private BZip2Utils() { + } + + /** + * Detects common bzip2 suffixes in the given filename. + * + * @param filename name of a file + * @return {@code true} if the filename has a common bzip2 suffix, + * {@code false} otherwise + */ + public static boolean isCompressedFilename(String filename) { + return fileNameUtil.isCompressedFilename(filename); + } + + /** + * Maps the given name of a bzip2-compressed file to the name that the + * file should have after uncompression. Commonly used file type specific + * suffixes like ".tbz" or ".tbz2" are automatically detected and + * correctly mapped. For example the name "package.tbz2" is mapped to + * "package.tar". And any filenames with the generic ".bz2" suffix + * (or any other generic bzip2 suffix) is mapped to a name without that + * suffix. If no bzip2 suffix is detected, then the filename is returned + * unmapped. + * + * @param filename name of a file + * @return name of the corresponding uncompressed file + */ + public static String getUncompressedFilename(String filename) { + return fileNameUtil.getUncompressedFilename(filename); + } + + /** + * Maps the given filename to the name that the file should have after + * compression with bzip2. Currently this method simply appends the suffix + * ".bz2" to the filename based on the standard behaviour of the "bzip2" + * program, but a future version may implement a more complex mapping if + * a new widely used naming pattern emerges. + * + * @param filename name of a file + * @return name of the corresponding compressed file + */ + public static String getCompressedFilename(String filename) { + return fileNameUtil.getCompressedFilename(filename); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BlockSort.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BlockSort.java new file mode 100644 index 000000000..c7c26b23d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/BlockSort.java @@ -0,0 +1,1081 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.bzip2; + +import java.util.BitSet; + +/** + * Encapsulates the Burrows-Wheeler sorting algorithm needed by {@link + * BZip2CompressorOutputStream}. + * + *

    This class is based on a Java port of Julian Seward's + * blocksort.c in his libbzip2

    + * + *

    The Burrows-Wheeler transform is a reversible transform of the + * original data that is supposed to group similiar bytes close to + * each other. The idea is to sort all permutations of the input and + * only keep the last byte of each permutation. E.g. for "Commons + * Compress" you'd get:

    + * + *
    + *  CompressCommons
    + * Commons Compress
    + * CompressCommons 
    + * essCommons Compr
    + * mmons CompressCo
    + * mons CompressCom
    + * mpressCommons Co
    + * ns CompressCommo
    + * ommons CompressC
    + * ompressCommons C
    + * ons CompressComm
    + * pressCommons Com
    + * ressCommons Comp
    + * s CompressCommon
    + * sCommons Compres
    + * ssCommons Compre
    + * 
    + * + *

    Which results in a new text "ss romooCCmmpnse", in adition the + * index of the first line that contained the original text is kept - + * in this case it is 1. The idea is that in a long English text all + * permutations that start with "he" are likely suffixes of a "the" and + * thus they end in "t" leading to a larger block of "t"s that can + * better be compressed by the subsequent Move-to-Front, run-length + * und Huffman encoding steps.

    + * + *

    For more information see for example:

    + * + * + * @NotThreadSafe + */ +class BlockSort { + + /* + * Some of the constructs used in the C code cannot be ported + * literally to Java - for example macros, unsigned types. Some + * code has been hand-tuned to improve performance. In order to + * avoid memory pressure some structures are reused for several + * blocks and some memory is even shared between sorting and the + * MTF stage even though either algorithm uses it for its own + * purpose. + * + * Comments preserved from the actual C code are prefixed with + * "LBZ2:". + */ + + /* + * 2012-05-20 Stefan Bodewig: + * + * This class seems to mix several revisions of libbzip2's code. + * The mainSort function and those used by it look closer to the + * 0.9.5 version but show some variations introduced later. At + * the same time the logic of Compress 1.4 to randomize the block + * on bad input has been dropped after libbzip2 0.9.0 and replaced + * by a fallback sorting algorithm. + * + * I've added the fallbackSort function of 1.0.6 and tried to + * integrate it with the existing code without touching too much. + * I've also removed the now unused randomization code. + */ + + /* + * LBZ2: If you are ever unlucky/improbable enough to get a stack + * overflow whilst sorting, increase the following constant and + * try again. In practice I have never seen the stack go above 27 + * elems, so the following limit seems very generous. + */ + private static final int QSORT_STACK_SIZE = 1000; + + private static final int FALLBACK_QSORT_STACK_SIZE = 100; + + private static final int STACK_SIZE = + QSORT_STACK_SIZE < FALLBACK_QSORT_STACK_SIZE + ? FALLBACK_QSORT_STACK_SIZE : QSORT_STACK_SIZE; + + /* + * Used when sorting. If too many long comparisons happen, we stop sorting, + * and use fallbackSort instead. + */ + private int workDone; + private int workLimit; + private boolean firstAttempt; + + private final int[] stack_ll = new int[STACK_SIZE]; // 4000 byte + private final int[] stack_hh = new int[STACK_SIZE]; // 4000 byte + private final int[] stack_dd = new int[QSORT_STACK_SIZE]; // 4000 byte + + private final int[] mainSort_runningOrder = new int[256]; // 1024 byte + private final int[] mainSort_copy = new int[256]; // 1024 byte + private final boolean[] mainSort_bigDone = new boolean[256]; // 256 byte + + private final int[] ftab = new int[65537]; // 262148 byte + + /** + * Array instance identical to Data's sfmap, both are used only + * temporarily and indepently, so we do not need to allocate + * additional memory. + */ + private final char[] quadrant; + + BlockSort(final BZip2CompressorOutputStream.Data data) { + this.quadrant = data.sfmap; + } + + void blockSort(final BZip2CompressorOutputStream.Data data, final int last) { + this.workLimit = WORK_FACTOR * last; + this.workDone = 0; + this.firstAttempt = true; + + if (last + 1 < 10000) { + fallbackSort(data, last); + } else { + mainSort(data, last); + + if (this.firstAttempt && (this.workDone > this.workLimit)) { + fallbackSort(data, last); + } + } + + final int[] fmap = data.fmap; + data.origPtr = -1; + for (int i = 0; i <= last; i++) { + if (fmap[i] == 0) { + data.origPtr = i; + break; + } + } + + // assert (data.origPtr != -1) : data.origPtr; + } + + /** + * Adapt fallbackSort to the expected interface of the rest of the + * code, in particular deal with the fact that block starts at + * offset 1 (in libbzip2 1.0.6 it starts at 0). + */ + final void fallbackSort(final BZip2CompressorOutputStream.Data data, + final int last) { + data.block[0] = data.block[last + 1]; + fallbackSort(data.fmap, data.block, last + 1); + for (int i = 0; i < last + 1; i++) { + --data.fmap[i]; + } + for (int i = 0; i < last + 1; i++) { + if (data.fmap[i] == -1) { + data.fmap[i] = last; + break; + } + } + } + +/*---------------------------------------------*/ + +/*---------------------------------------------*/ +/*--- LBZ2: Fallback O(N log(N)^2) sorting ---*/ +/*--- algorithm, for repetitive blocks ---*/ +/*---------------------------------------------*/ + + /* + * This is the fallback sorting algorithm libbzip2 1.0.6 uses for + * repetitive or very short inputs. + * + * The idea is inspired by Manber-Myers string suffix sorting + * algorithm. First a bucket sort places each permutation of the + * block into a bucket based on its first byte. Permutations are + * represented by pointers to their first character kept in + * (partially) sorted order inside the array ftab. + * + * The next step visits all buckets in order and performs a + * quicksort on all permutations of the bucket based on the index + * of the bucket the second byte of the permutation belongs to, + * thereby forming new buckets. When arrived here the + * permutations are sorted up to the second character and we have + * buckets of permutations that are identical up to two + * characters. + * + * Repeat the step of quicksorting each bucket, now based on the + * bucket holding the sequence of the third and forth character + * leading to four byte buckets. Repeat this doubling of bucket + * sizes until all buckets only contain single permutations or the + * bucket size exceeds the block size. + * + * I.e. + * + * "abraba" form three buckets for the chars "a", "b", and "r" in + * the first step with + * + * fmap = { 'a:' 5, 3, 0, 'b:' 4, 1, 'r', 2 } + * + * when looking at the bucket of "a"s the second characters are in + * the buckets that start with fmap-index 0 (rolled over), 3 and 3 + * respectively, forming two new buckets "aa" and "ab", so we get + * + * fmap = { 'aa:' 5, 'ab:' 3, 0, 'ba:' 4, 'br': 1, 'ra:' 2 } + * + * since the last bucket only contained a single item it didn't + * have to be sorted at all. + * + * There now is just one bucket with more than one permutation + * that remains to be sorted. For the permutation that starts + * with index 3 the third and forth char are in bucket 'aa' at + * index 0 and for the one starting at block index 0 they are in + * bucket 'ra' with sort index 5. The fully sorted order then becomes. + * + * fmap = { 5, 3, 0, 4, 1, 2 } + * + */ + + /** + * @param fmap points to the index of the starting point of a + * permutation inside the block of data in the current + * partially sorted order + * @param eclass points from the index of a character inside the + * block to the first index in fmap that contains the + * bucket of its suffix that is sorted in this step. + * @param lo lower boundary of the fmap-interval to be sorted + * @param hi upper boundary of the fmap-interval to be sorted + */ + private void fallbackSimpleSort(int[] fmap, + int[] eclass, + int lo, + int hi) { + if (lo == hi) { + return; + } + + int j; + if (hi - lo > 3) { + for (int i = hi - 4; i >= lo; i--) { + int tmp = fmap[i]; + int ec_tmp = eclass[tmp]; + for (j = i + 4; j <= hi && ec_tmp > eclass[fmap[j]]; + j += 4) { + fmap[j - 4] = fmap[j]; + } + fmap[j - 4] = tmp; + } + } + + for (int i = hi - 1; i >= lo; i--) { + int tmp = fmap[i]; + int ec_tmp = eclass[tmp]; + for (j = i + 1; j <= hi && ec_tmp > eclass[fmap[j]]; j++) { + fmap[j - 1] = fmap[j]; + } + fmap[j-1] = tmp; + } + } + + private static final int FALLBACK_QSORT_SMALL_THRESH = 10; + + /** + * swaps two values in fmap + */ + private void fswap(int[] fmap, int zz1, int zz2) { + int zztmp = fmap[zz1]; + fmap[zz1] = fmap[zz2]; + fmap[zz2] = zztmp; + } + + /** + * swaps two intervals starting at yyp1 and yyp2 of length yyn inside fmap. + */ + private void fvswap(int[] fmap, int yyp1, int yyp2, int yyn) { + while (yyn > 0) { + fswap(fmap, yyp1, yyp2); + yyp1++; yyp2++; yyn--; + } + } + + private int fmin(int a, int b) { + return a < b ? a : b; + } + + private void fpush(int sp, int lz, int hz) { + stack_ll[sp] = lz; + stack_hh[sp] = hz; + } + + private int[] fpop(int sp) { + return new int[] { stack_ll[sp], stack_hh[sp] }; + } + + /** + * @param fmap points to the index of the starting point of a + * permutation inside the block of data in the current + * partially sorted order + * @param eclass points from the index of a character inside the + * block to the first index in fmap that contains the + * bucket of its suffix that is sorted in this step. + * @param loSt lower boundary of the fmap-interval to be sorted + * @param hiSt upper boundary of the fmap-interval to be sorted + */ + private void fallbackQSort3(int[] fmap, + int[] eclass, + int loSt, + int hiSt) { + int lo, unLo, ltLo, hi, unHi, gtHi, n; + + long r = 0; + int sp = 0; + fpush(sp++, loSt, hiSt); + + while (sp > 0) { + int[] s = fpop(--sp); + lo = s[0]; hi = s[1]; + + if (hi - lo < FALLBACK_QSORT_SMALL_THRESH) { + fallbackSimpleSort(fmap, eclass, lo, hi); + continue; + } + + /* LBZ2: Random partitioning. Median of 3 sometimes fails to + avoid bad cases. Median of 9 seems to help but + looks rather expensive. This too seems to work but + is cheaper. Guidance for the magic constants + 7621 and 32768 is taken from Sedgewick's algorithms + book, chapter 35. + */ + r = ((r * 7621) + 1) % 32768; + long r3 = r % 3, med; + if (r3 == 0) { + med = eclass[fmap[lo]]; + } else if (r3 == 1) { + med = eclass[fmap[(lo + hi) >>> 1]]; + } else { + med = eclass[fmap[hi]]; + } + + unLo = ltLo = lo; + unHi = gtHi = hi; + + // looks like the ternary partition attributed to Wegner + // in the cited Sedgewick paper + while (true) { + while (true) { + if (unLo > unHi) { + break; + } + n = eclass[fmap[unLo]] - (int) med; + if (n == 0) { + fswap(fmap, unLo, ltLo); + ltLo++; unLo++; + continue; + } + if (n > 0) { + break; + } + unLo++; + } + while (true) { + if (unLo > unHi) { + break; + } + n = eclass[fmap[unHi]] - (int) med; + if (n == 0) { + fswap(fmap, unHi, gtHi); + gtHi--; unHi--; + continue; + } + if (n < 0) { + break; + } + unHi--; + } + if (unLo > unHi) { + break; + } + fswap(fmap, unLo, unHi); unLo++; unHi--; + } + + if (gtHi < ltLo) { + continue; + } + + n = fmin(ltLo - lo, unLo - ltLo); + fvswap(fmap, lo, unLo - n, n); + int m = fmin(hi - gtHi, gtHi - unHi); + fvswap(fmap, unHi + 1, hi - m + 1, m); + + n = lo + unLo - ltLo - 1; + m = hi - (gtHi - unHi) + 1; + + if (n - lo > hi - m) { + fpush(sp++, lo, n); + fpush(sp++, m, hi); + } else { + fpush(sp++, m, hi); + fpush(sp++, lo, n); + } + } + } + + +/*---------------------------------------------*/ + + private int[] eclass; + + private int[] getEclass() { + return eclass == null + ? (eclass = new int[quadrant.length / 2]) : eclass; + } + + /* + * The C code uses an array of ints (each int holding 32 flags) to + * represents the bucket-start flags (bhtab). It also contains + * optimizations to skip over 32 consecutively set or + * consecutively unset bits on word boundaries at once. For now + * I've chosen to use the simpler but potentially slower code + * using BitSet - also in the hope that using the BitSet#nextXXX + * methods may be fast enough. + */ + + /** + * @param fmap points to the index of the starting point of a + * permutation inside the block of data in the current + * partially sorted order + * @param block the original data + * @param nblock size of the block + * @param off offset of first byte to sort in block + */ + final void fallbackSort(int[] fmap, byte[] block, int nblock) { + final int[] ftab = new int[257]; + int H, i, j, k, l, r, cc, cc1; + int nNotDone; + int nBhtab; + final int[] eclass = getEclass(); + + for (i = 0; i < nblock; i++) { + eclass[i] = 0; + } + /*-- + LBZ2: Initial 1-char radix sort to generate + initial fmap and initial BH bits. + --*/ + for (i = 0; i < nblock; i++) { + ftab[block[i] & 0xff]++; + } + for (i = 1; i < 257; i++) { + ftab[i] += ftab[i - 1]; + } + + for (i = 0; i < nblock; i++) { + j = block[i] & 0xff; + k = ftab[j] - 1; + ftab[j] = k; + fmap[k] = i; + } + + nBhtab = 64 + nblock; + BitSet bhtab = new BitSet(nBhtab); + for (i = 0; i < 256; i++) { + bhtab.set(ftab[i]); + } + + /*-- + LBZ2: Inductively refine the buckets. Kind-of an + "exponential radix sort" (!), inspired by the + Manber-Myers suffix array construction algorithm. + --*/ + + /*-- LBZ2: set sentinel bits for block-end detection --*/ + for (i = 0; i < 32; i++) { + bhtab.set(nblock + 2 * i); + bhtab.clear(nblock + 2 * i + 1); + } + + /*-- LBZ2: the log(N) loop --*/ + H = 1; + while (true) { + + j = 0; + for (i = 0; i < nblock; i++) { + if (bhtab.get(i)) { + j = i; + } + k = fmap[i] - H; + if (k < 0) { + k += nblock; + } + eclass[k] = j; + } + + nNotDone = 0; + r = -1; + while (true) { + + /*-- LBZ2: find the next non-singleton bucket --*/ + k = r + 1; + k = bhtab.nextClearBit(k); + l = k - 1; + if (l >= nblock) { + break; + } + k = bhtab.nextSetBit(k + 1); + r = k - 1; + if (r >= nblock) { + break; + } + + /*-- LBZ2: now [l, r] bracket current bucket --*/ + if (r > l) { + nNotDone += (r - l + 1); + fallbackQSort3(fmap, eclass, l, r); + + /*-- LBZ2: scan bucket and generate header bits-- */ + cc = -1; + for (i = l; i <= r; i++) { + cc1 = eclass[fmap[i]]; + if (cc != cc1) { + bhtab.set(i); + cc = cc1; + } + } + } + } + + H *= 2; + if (H > nblock || nNotDone == 0) { + break; + } + } + } + +/*---------------------------------------------*/ + + /* + * LBZ2: Knuth's increments seem to work better than Incerpi-Sedgewick here. + * Possibly because the number of elems to sort is usually small, typically + * <= 20. + */ + private static final int[] INCS = { 1, 4, 13, 40, 121, 364, 1093, 3280, + 9841, 29524, 88573, 265720, 797161, + 2391484 }; + + /** + * This is the most hammered method of this class. + * + *

    + * This is the version using unrolled loops. Normally I never use such ones + * in Java code. The unrolling has shown a noticable performance improvement + * on JRE 1.4.2 (Linux i586 / HotSpot Client). Of course it depends on the + * JIT compiler of the vm. + *

    + */ + private boolean mainSimpleSort(final BZip2CompressorOutputStream.Data dataShadow, + final int lo, final int hi, final int d, + final int lastShadow) { + final int bigN = hi - lo + 1; + if (bigN < 2) { + return this.firstAttempt && (this.workDone > this.workLimit); + } + + int hp = 0; + while (INCS[hp] < bigN) { + hp++; + } + + final int[] fmap = dataShadow.fmap; + final char[] quadrant = this.quadrant; + final byte[] block = dataShadow.block; + final int lastPlus1 = lastShadow + 1; + final boolean firstAttemptShadow = this.firstAttempt; + final int workLimitShadow = this.workLimit; + int workDoneShadow = this.workDone; + + // Following block contains unrolled code which could be shortened by + // coding it in additional loops. + + HP: while (--hp >= 0) { + final int h = INCS[hp]; + final int mj = lo + h - 1; + + for (int i = lo + h; i <= hi;) { + // copy + for (int k = 3; (i <= hi) && (--k >= 0); i++) { + final int v = fmap[i]; + final int vd = v + d; + int j = i; + + // for (int a; + // (j > mj) && mainGtU((a = fmap[j - h]) + d, vd, + // block, quadrant, lastShadow); + // j -= h) { + // fmap[j] = a; + // } + // + // unrolled version: + + // start inline mainGTU + boolean onceRunned = false; + int a = 0; + + HAMMER: while (true) { + if (onceRunned) { + fmap[j] = a; + if ((j -= h) <= mj) { + break HAMMER; + } + } else { + onceRunned = true; + } + + a = fmap[j - h]; + int i1 = a + d; + int i2 = vd; + + // following could be done in a loop, but + // unrolled it for performance: + if (block[i1 + 1] == block[i2 + 1]) { + if (block[i1 + 2] == block[i2 + 2]) { + if (block[i1 + 3] == block[i2 + 3]) { + if (block[i1 + 4] == block[i2 + 4]) { + if (block[i1 + 5] == block[i2 + 5]) { + if (block[(i1 += 6)] == block[(i2 += 6)]) { + int x = lastShadow; + X: while (x > 0) { + x -= 4; + + if (block[i1 + 1] == block[i2 + 1]) { + if (quadrant[i1] == quadrant[i2]) { + if (block[i1 + 2] == block[i2 + 2]) { + if (quadrant[i1 + 1] == quadrant[i2 + 1]) { + if (block[i1 + 3] == block[i2 + 3]) { + if (quadrant[i1 + 2] == quadrant[i2 + 2]) { + if (block[i1 + 4] == block[i2 + 4]) { + if (quadrant[i1 + 3] == quadrant[i2 + 3]) { + if ((i1 += 4) >= lastPlus1) { + i1 -= lastPlus1; + } + if ((i2 += 4) >= lastPlus1) { + i2 -= lastPlus1; + } + workDoneShadow++; + continue X; + } else if ((quadrant[i1 + 3] > quadrant[i2 + 3])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 4] & 0xff) > (block[i2 + 4] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((quadrant[i1 + 2] > quadrant[i2 + 2])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 3] & 0xff) > (block[i2 + 3] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((quadrant[i1 + 1] > quadrant[i2 + 1])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 2] & 0xff) > (block[i2 + 2] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((quadrant[i1] > quadrant[i2])) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 1] & 0xff) > (block[i2 + 1] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + + } + break HAMMER; + } // while x > 0 + else { + if ((block[i1] & 0xff) > (block[i2] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } + } else if ((block[i1 + 5] & 0xff) > (block[i2 + 5] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 4] & 0xff) > (block[i2 + 4] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 3] & 0xff) > (block[i2 + 3] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 2] & 0xff) > (block[i2 + 2] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + } else if ((block[i1 + 1] & 0xff) > (block[i2 + 1] & 0xff)) { + continue HAMMER; + } else { + break HAMMER; + } + + } // HAMMER + // end inline mainGTU + + fmap[j] = v; + } + + if (firstAttemptShadow && (i <= hi) + && (workDoneShadow > workLimitShadow)) { + break HP; + } + } + } + + this.workDone = workDoneShadow; + return firstAttemptShadow && (workDoneShadow > workLimitShadow); + } + +/*-- + LBZ2: The following is an implementation of + an elegant 3-way quicksort for strings, + described in a paper "Fast Algorithms for + Sorting and Searching Strings", by Robert + Sedgewick and Jon L. Bentley. +--*/ + + private static void vswap(int[] fmap, int p1, int p2, int n) { + n += p1; + while (p1 < n) { + int t = fmap[p1]; + fmap[p1++] = fmap[p2]; + fmap[p2++] = t; + } + } + + private static byte med3(byte a, byte b, byte c) { + return (a < b) ? (b < c ? b : a < c ? c : a) : (b > c ? b : a > c ? c + : a); + } + + private static final int SMALL_THRESH = 20; + private static final int DEPTH_THRESH = 10; + private static final int WORK_FACTOR = 30; + + /** + * Method "mainQSort3", file "blocksort.c", BZip2 1.0.2 + */ + private void mainQSort3(final BZip2CompressorOutputStream.Data dataShadow, + final int loSt, final int hiSt, final int dSt, + final int last) { + final int[] stack_ll = this.stack_ll; + final int[] stack_hh = this.stack_hh; + final int[] stack_dd = this.stack_dd; + final int[] fmap = dataShadow.fmap; + final byte[] block = dataShadow.block; + + stack_ll[0] = loSt; + stack_hh[0] = hiSt; + stack_dd[0] = dSt; + + for (int sp = 1; --sp >= 0;) { + final int lo = stack_ll[sp]; + final int hi = stack_hh[sp]; + final int d = stack_dd[sp]; + + if ((hi - lo < SMALL_THRESH) || (d > DEPTH_THRESH)) { + if (mainSimpleSort(dataShadow, lo, hi, d, last)) { + return; + } + } else { + final int d1 = d + 1; + final int med = med3(block[fmap[lo] + d1], + block[fmap[hi] + d1], block[fmap[(lo + hi) >>> 1] + d1]) & 0xff; + + int unLo = lo; + int unHi = hi; + int ltLo = lo; + int gtHi = hi; + + while (true) { + while (unLo <= unHi) { + final int n = (block[fmap[unLo] + d1] & 0xff) + - med; + if (n == 0) { + final int temp = fmap[unLo]; + fmap[unLo++] = fmap[ltLo]; + fmap[ltLo++] = temp; + } else if (n < 0) { + unLo++; + } else { + break; + } + } + + while (unLo <= unHi) { + final int n = (block[fmap[unHi] + d1] & 0xff) + - med; + if (n == 0) { + final int temp = fmap[unHi]; + fmap[unHi--] = fmap[gtHi]; + fmap[gtHi--] = temp; + } else if (n > 0) { + unHi--; + } else { + break; + } + } + + if (unLo <= unHi) { + final int temp = fmap[unLo]; + fmap[unLo++] = fmap[unHi]; + fmap[unHi--] = temp; + } else { + break; + } + } + + if (gtHi < ltLo) { + stack_ll[sp] = lo; + stack_hh[sp] = hi; + stack_dd[sp] = d1; + sp++; + } else { + int n = ((ltLo - lo) < (unLo - ltLo)) ? (ltLo - lo) + : (unLo - ltLo); + vswap(fmap, lo, unLo - n, n); + int m = ((hi - gtHi) < (gtHi - unHi)) ? (hi - gtHi) + : (gtHi - unHi); + vswap(fmap, unLo, hi - m + 1, m); + + n = lo + unLo - ltLo - 1; + m = hi - (gtHi - unHi) + 1; + + stack_ll[sp] = lo; + stack_hh[sp] = n; + stack_dd[sp] = d; + sp++; + + stack_ll[sp] = n + 1; + stack_hh[sp] = m - 1; + stack_dd[sp] = d1; + sp++; + + stack_ll[sp] = m; + stack_hh[sp] = hi; + stack_dd[sp] = d; + sp++; + } + } + } + } + + private static final int SETMASK = (1 << 21); + private static final int CLEARMASK = (~SETMASK); + + final void mainSort(final BZip2CompressorOutputStream.Data dataShadow, + final int lastShadow) { + final int[] runningOrder = this.mainSort_runningOrder; + final int[] copy = this.mainSort_copy; + final boolean[] bigDone = this.mainSort_bigDone; + final int[] ftab = this.ftab; + final byte[] block = dataShadow.block; + final int[] fmap = dataShadow.fmap; + final char[] quadrant = this.quadrant; + final int workLimitShadow = this.workLimit; + final boolean firstAttemptShadow = this.firstAttempt; + + // LBZ2: Set up the 2-byte frequency table + for (int i = 65537; --i >= 0;) { + ftab[i] = 0; + } + + /* + * In the various block-sized structures, live data runs from 0 to + * last+NUM_OVERSHOOT_BYTES inclusive. First, set up the overshoot area + * for block. + */ + for (int i = 0; i < BZip2Constants.NUM_OVERSHOOT_BYTES; i++) { + block[lastShadow + i + 2] = block[(i % (lastShadow + 1)) + 1]; + } + for (int i = lastShadow + BZip2Constants.NUM_OVERSHOOT_BYTES +1; --i >= 0;) { + quadrant[i] = 0; + } + block[0] = block[lastShadow + 1]; + + // LBZ2: Complete the initial radix sort: + + int c1 = block[0] & 0xff; + for (int i = 0; i <= lastShadow; i++) { + final int c2 = block[i + 1] & 0xff; + ftab[(c1 << 8) + c2]++; + c1 = c2; + } + + for (int i = 1; i <= 65536; i++) { + ftab[i] += ftab[i - 1]; + } + + c1 = block[1] & 0xff; + for (int i = 0; i < lastShadow; i++) { + final int c2 = block[i + 2] & 0xff; + fmap[--ftab[(c1 << 8) + c2]] = i; + c1 = c2; + } + + fmap[--ftab[((block[lastShadow + 1] & 0xff) << 8) + (block[1] & 0xff)]] = lastShadow; + + /* + * LBZ2: Now ftab contains the first loc of every small bucket. Calculate the + * running order, from smallest to largest big bucket. + */ + for (int i = 256; --i >= 0;) { + bigDone[i] = false; + runningOrder[i] = i; + } + + for (int h = 364; h != 1;) { + h /= 3; + for (int i = h; i <= 255; i++) { + final int vv = runningOrder[i]; + final int a = ftab[(vv + 1) << 8] - ftab[vv << 8]; + final int b = h - 1; + int j = i; + for (int ro = runningOrder[j - h]; (ftab[(ro + 1) << 8] - ftab[ro << 8]) > a; ro = runningOrder[j + - h]) { + runningOrder[j] = ro; + j -= h; + if (j <= b) { + break; + } + } + runningOrder[j] = vv; + } + } + + /* + * LBZ2: The main sorting loop. + */ + for (int i = 0; i <= 255; i++) { + /* + * LBZ2: Process big buckets, starting with the least full. + */ + final int ss = runningOrder[i]; + + // Step 1: + /* + * LBZ2: Complete the big bucket [ss] by quicksorting any unsorted small + * buckets [ss, j]. Hopefully previous pointer-scanning phases have + * already completed many of the small buckets [ss, j], so we don't + * have to sort them at all. + */ + for (int j = 0; j <= 255; j++) { + final int sb = (ss << 8) + j; + final int ftab_sb = ftab[sb]; + if ((ftab_sb & SETMASK) != SETMASK) { + final int lo = ftab_sb & CLEARMASK; + final int hi = (ftab[sb + 1] & CLEARMASK) - 1; + if (hi > lo) { + mainQSort3(dataShadow, lo, hi, 2, lastShadow); + if (firstAttemptShadow + && (this.workDone > workLimitShadow)) { + return; + } + } + ftab[sb] = ftab_sb | SETMASK; + } + } + + // Step 2: + // LBZ2: Now scan this big bucket so as to synthesise the + // sorted order for small buckets [t, ss] for all t != ss. + + for (int j = 0; j <= 255; j++) { + copy[j] = ftab[(j << 8) + ss] & CLEARMASK; + } + + for (int j = ftab[ss << 8] & CLEARMASK, hj = (ftab[(ss + 1) << 8] & CLEARMASK); j < hj; j++) { + final int fmap_j = fmap[j]; + c1 = block[fmap_j] & 0xff; + if (!bigDone[c1]) { + fmap[copy[c1]] = (fmap_j == 0) ? lastShadow : (fmap_j - 1); + copy[c1]++; + } + } + + for (int j = 256; --j >= 0;) { + ftab[(j << 8) + ss] |= SETMASK; + } + + // Step 3: + /* + * LBZ2: The ss big bucket is now done. Record this fact, and update the + * quadrant descriptors. Remember to update quadrants in the + * overshoot area too, if necessary. The "if (i < 255)" test merely + * skips this updating for the last bucket processed, since updating + * for the last bucket is pointless. + */ + bigDone[ss] = true; + + if (i < 255) { + final int bbStart = ftab[ss << 8] & CLEARMASK; + final int bbSize = (ftab[(ss + 1) << 8] & CLEARMASK) - bbStart; + int shifts = 0; + + while ((bbSize >> shifts) > 65534) { + shifts++; + } + + for (int j = 0; j < bbSize; j++) { + final int a2update = fmap[bbStart + j]; + final char qVal = (char) (j >> shifts); + quadrant[a2update] = qVal; + if (a2update < BZip2Constants.NUM_OVERSHOOT_BYTES) { + quadrant[a2update + lastShadow + 1] = qVal; + } + } + } + + } + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/CRC.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/CRC.java new file mode 100644 index 000000000..ec0502b5d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/CRC.java @@ -0,0 +1,134 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.bzip2; + +/** + * A simple class the hold and calculate the CRC for sanity checking of the + * data. + * @NotThreadSafe + */ +class CRC { + private static final int crc32Table[] = { + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + }; + + CRC() { + initialiseCRC(); + } + + void initialiseCRC() { + globalCrc = 0xffffffff; + } + + int getFinalCRC() { + return ~globalCrc; + } + + int getGlobalCRC() { + return globalCrc; + } + + void setGlobalCRC(int newCrc) { + globalCrc = newCrc; + } + + void updateCRC(int inCh) { + int temp = (globalCrc >> 24) ^ inCh; + if (temp < 0) { + temp = 256 + temp; + } + globalCrc = (globalCrc << 8) ^ CRC.crc32Table[temp]; + } + + void updateCRC(int inCh, int repeat) { + int globalCrcShadow = this.globalCrc; + while (repeat-- > 0) { + int temp = (globalCrcShadow >> 24) ^ inCh; + globalCrcShadow = (globalCrcShadow << 8) ^ crc32Table[(temp >= 0) + ? temp + : (temp + 256)]; + } + this.globalCrc = globalCrcShadow; + } + + private int globalCrc; +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/Rand.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/Rand.java new file mode 100644 index 000000000..0c08d1f7e --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/Rand.java @@ -0,0 +1,91 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.bzip2; + +/** + * Random numbers for both the compress and decompress BZip2 classes. + */ +final class Rand { + + private static final int[] RNUMS = { + 619, 720, 127, 481, 931, 816, 813, 233, 566, 247, + 985, 724, 205, 454, 863, 491, 741, 242, 949, 214, + 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, + 419, 436, 278, 496, 867, 210, 399, 680, 480, 51, + 878, 465, 811, 169, 869, 675, 611, 697, 867, 561, + 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, + 150, 238, 59, 379, 684, 877, 625, 169, 643, 105, + 170, 607, 520, 932, 727, 476, 693, 425, 174, 647, + 73, 122, 335, 530, 442, 853, 695, 249, 445, 515, + 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, + 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, + 161, 604, 958, 533, 221, 400, 386, 867, 600, 782, + 382, 596, 414, 171, 516, 375, 682, 485, 911, 276, + 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, + 227, 730, 475, 186, 263, 647, 537, 686, 600, 224, + 469, 68, 770, 919, 190, 373, 294, 822, 808, 206, + 184, 943, 795, 384, 383, 461, 404, 758, 839, 887, + 715, 67, 618, 276, 204, 918, 873, 777, 604, 560, + 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, + 652, 934, 970, 447, 318, 353, 859, 672, 112, 785, + 645, 863, 803, 350, 139, 93, 354, 99, 820, 908, + 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, + 653, 282, 762, 623, 680, 81, 927, 626, 789, 125, + 411, 521, 938, 300, 821, 78, 343, 175, 128, 250, + 170, 774, 972, 275, 999, 639, 495, 78, 352, 126, + 857, 956, 358, 619, 580, 124, 737, 594, 701, 612, + 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, + 944, 375, 748, 52, 600, 747, 642, 182, 862, 81, + 344, 805, 988, 739, 511, 655, 814, 334, 249, 515, + 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, + 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, + 686, 754, 806, 760, 493, 403, 415, 394, 687, 700, + 946, 670, 656, 610, 738, 392, 760, 799, 887, 653, + 978, 321, 576, 617, 626, 502, 894, 679, 243, 440, + 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, + 707, 151, 457, 449, 797, 195, 791, 558, 945, 679, + 297, 59, 87, 824, 713, 663, 412, 693, 342, 606, + 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, + 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, + 140, 206, 73, 263, 980, 736, 876, 478, 430, 305, + 170, 514, 364, 692, 829, 82, 855, 953, 676, 246, + 369, 970, 294, 750, 807, 827, 150, 790, 288, 923, + 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, + 896, 831, 547, 261, 524, 462, 293, 465, 502, 56, + 661, 821, 976, 991, 658, 869, 905, 758, 745, 193, + 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, + 61, 688, 793, 644, 986, 403, 106, 366, 905, 644, + 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, + 780, 773, 635, 389, 707, 100, 626, 958, 165, 504, + 920, 176, 193, 713, 857, 265, 203, 50, 668, 108, + 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, + 936, 638 + }; + + /** + * Return the random number at a specific index. + * + * @param i the index + * @return the random number + */ + static int rNums(int i){ + return RNUMS[i]; + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/package.html new file mode 100644 index 000000000..fe27e6e66 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/bzip2/package.html @@ -0,0 +1,24 @@ + + + +

    Provides stream classes for compressing and decompressing + streams using the BZip2 algorithm.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java new file mode 100644 index 000000000..bff7021a1 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipCompressorInputStream.java @@ -0,0 +1,357 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.gzip; + +import java.io.IOException; +import java.io.EOFException; +import java.io.InputStream; +import java.io.DataInputStream; +import java.io.BufferedInputStream; +import java.util.zip.DataFormatException; +import java.util.zip.Inflater; +import java.util.zip.CRC32; + +import org.apache.commons.compress.compressors.CompressorInputStream; + +/** + * Input stream that decompresses .gz files. + * This supports decompressing concatenated .gz files which is important + * when decompressing standalone .gz files. + *

    + * {@link java.util.zip.GZIPInputStream} doesn't decompress concatenated .gz + * files: it stops after the first member and silently ignores the rest. + * It doesn't leave the read position to point to the beginning of the next + * member, which makes it difficult workaround the lack of concatenation + * support. + *

    + * Instead of using GZIPInputStream, this class has its own .gz + * container format decoder. The actual decompression is done with + * {@link java.util.zip.Inflater}. + */ +public class GzipCompressorInputStream extends CompressorInputStream { + // Header flags + // private static final int FTEXT = 0x01; // Uninteresting for us + private static final int FHCRC = 0x02; + private static final int FEXTRA = 0x04; + private static final int FNAME = 0x08; + private static final int FCOMMENT = 0x10; + private static final int FRESERVED = 0xE0; + + // Compressed input stream, possibly wrapped in a BufferedInputStream + private final InputStream in; + + // True if decompressing multimember streams. + private final boolean decompressConcatenated; + + // Buffer to hold the input data + private final byte[] buf = new byte[8192]; + + // Amount of data in buf. + private int bufUsed = 0; + + // Decompressor + private Inflater inf = new Inflater(true); + + // CRC32 from uncompressed data + private final CRC32 crc = new CRC32(); + + private int memberSize; + + // True once everything has been decompressed + private boolean endReached = false; + + // used in no-arg read method + private final byte[] oneByte = new byte[1]; + + /** + * Constructs a new input stream that decompresses gzip-compressed data + * from the specified input stream. + *

    + * This is equivalent to + * GzipCompressorInputStream(inputStream, false) and thus + * will not decompress concatenated .gz files. + * + * @param inputStream the InputStream from which this object should + * be created of + * + * @throws IOException if the stream could not be created + */ + public GzipCompressorInputStream(InputStream inputStream) + throws IOException { + this(inputStream, false); + } + + /** + * Constructs a new input stream that decompresses gzip-compressed data + * from the specified input stream. + *

    + * If decompressConcatenated is {@code false}: + * This decompressor might read more input than it will actually use. + * If inputStream supports mark and + * reset, then the input position will be adjusted + * so that it is right after the last byte of the compressed stream. + * If mark isn't supported, the input position will be + * undefined. + * + * @param inputStream the InputStream from which this object should + * be created of + * @param decompressConcatenated + * if true, decompress until the end of the input; + * if false, stop after the first .gz member + * + * @throws IOException if the stream could not be created + */ + public GzipCompressorInputStream(InputStream inputStream, + boolean decompressConcatenated) + throws IOException { + // Mark support is strictly needed for concatenated files only, + // but it's simpler if it is always available. + if (inputStream.markSupported()) { + in = inputStream; + } else { + in = new BufferedInputStream(inputStream); + } + + this.decompressConcatenated = decompressConcatenated; + init(true); + } + + private boolean init(boolean isFirstMember) throws IOException { + assert isFirstMember || decompressConcatenated; + + // Check the magic bytes without a possibility of EOFException. + int magic0 = in.read(); + int magic1 = in.read(); + + // If end of input was reached after decompressing at least + // one .gz member, we have reached the end of the file successfully. + if (magic0 == -1 && !isFirstMember) { + return false; + } + + if (magic0 != 31 || magic1 != 139) { + throw new IOException(isFirstMember + ? "Input is not in the .gz format" + : "Garbage after a valid .gz stream"); + } + + // Parsing the rest of the header may throw EOFException. + DataInputStream inData = new DataInputStream(in); + int method = inData.readUnsignedByte(); + if (method != 8) { + throw new IOException("Unsupported compression method " + + method + " in the .gz header"); + } + + int flg = inData.readUnsignedByte(); + if ((flg & FRESERVED) != 0) { + throw new IOException( + "Reserved flags are set in the .gz header"); + } + + inData.readInt(); // mtime, ignored + inData.readUnsignedByte(); // extra flags, ignored + inData.readUnsignedByte(); // operating system, ignored + + // Extra field, ignored + if ((flg & FEXTRA) != 0) { + int xlen = inData.readUnsignedByte(); + xlen |= inData.readUnsignedByte() << 8; + + // This isn't as efficient as calling in.skip would be, + // but it's lazier to handle unexpected end of input this way. + // Most files don't have an extra field anyway. + while (xlen-- > 0) { + inData.readUnsignedByte(); + } + } + + // Original file name, ignored + if ((flg & FNAME) != 0) { + readToNull(inData); + } + + // Comment, ignored + if ((flg & FCOMMENT) != 0) { + readToNull(inData); + } + + // Header "CRC16" which is actually a truncated CRC32 (which isn't + // as good as real CRC16). I don't know if any encoder implementation + // sets this, so it's not worth trying to verify it. GNU gzip 1.4 + // doesn't support this field, but zlib seems to be able to at least + // skip over it. + if ((flg & FHCRC) != 0) { + inData.readShort(); + } + + // Reset + inf.reset(); + crc.reset(); + memberSize = 0; + + return true; + } + + private void readToNull(DataInputStream inData) throws IOException { + while (inData.readUnsignedByte() != 0x00) { // NOPMD + } + } + + @Override + public int read() throws IOException { + return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF; + } + + /** + * {@inheritDoc} + * + * @since 1.1 + */ + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (endReached) { + return -1; + } + + int size = 0; + + while (len > 0) { + if (inf.needsInput()) { + // Remember the current position because we may need to + // rewind after reading too much input. + in.mark(buf.length); + + bufUsed = in.read(buf); + if (bufUsed == -1) { + throw new EOFException(); + } + + inf.setInput(buf, 0, bufUsed); + } + + int ret; + try { + ret = inf.inflate(b, off, len); + } catch (DataFormatException e) { + throw new IOException("Gzip-compressed data is corrupt"); + } + + crc.update(b, off, ret); + memberSize += ret; + off += ret; + len -= ret; + size += ret; + count(ret); + + if (inf.finished()) { + // We may have read too many bytes. Rewind the read + // position to match the actual amount used. + // + // NOTE: The "if" is there just in case. Since we used + // in.mark earler, it should always skip enough. + in.reset(); + + int skipAmount = bufUsed - inf.getRemaining(); + if (in.skip(skipAmount) != skipAmount) { + throw new IOException(); + } + + bufUsed = 0; + + DataInputStream inData = new DataInputStream(in); + + // CRC32 + long crcStored = 0; + for (int i = 0; i < 4; ++i) { + crcStored |= (long)inData.readUnsignedByte() << (i * 8); + } + + if (crcStored != crc.getValue()) { + throw new IOException("Gzip-compressed data is corrupt " + + "(CRC32 error)"); + } + + // Uncompressed size modulo 2^32 (ISIZE in the spec) + int isize = 0; + for (int i = 0; i < 4; ++i) { + isize |= inData.readUnsignedByte() << (i * 8); + } + + if (isize != memberSize) { + throw new IOException("Gzip-compressed data is corrupt" + + "(uncompressed size mismatch)"); + } + + // See if this is the end of the file. + if (!decompressConcatenated || !init(false)) { + inf.end(); + inf = null; + endReached = true; + return size == 0 ? -1 : size; + } + } + } + + return size; + } + + /** + * Checks if the signature matches what is expected for a .gz file. + * + * @param signature the bytes to check + * @param length the number of bytes to check + * @return true if this is a .gz stream, false otherwise + * + * @since 1.1 + */ + public static boolean matches(byte[] signature, int length) { + + if (length < 2) { + return false; + } + + if (signature[0] != 31) { + return false; + } + + if (signature[1] != -117) { + return false; + } + + return true; + } + + /** + * Closes the input stream (unless it is System.in). + * + * @since 1.2 + */ + @Override + public void close() throws IOException { + if (inf != null) { + inf.end(); + inf = null; + } + + if (this.in != System.in) { + this.in.close(); + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java new file mode 100644 index 000000000..43c62e7d6 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipCompressorOutputStream.java @@ -0,0 +1,207 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.gzip; + +import java.io.IOException; +import java.io.OutputStream; +import java.nio.ByteBuffer; +import java.nio.ByteOrder; +import java.util.zip.CRC32; +import java.util.zip.Deflater; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +import org.apache.commons.compress.compressors.CompressorOutputStream; + +/** + * Compressed output stream using the gzip format. This implementation improves + * over the standard {@link GZIPOutputStream} class by allowing + * the configuration of the compression level and the header metadata (filename, + * comment, modification time, operating system and extra flags). + * + * @see GZIP File Format Specification + */ +public class GzipCompressorOutputStream extends CompressorOutputStream { + + /** Header flag indicating a file name follows the header */ + private static final int FNAME = 1 << 3; + + /** Header flag indicating a comment follows the header */ + private static final int FCOMMENT = 1 << 4; + + /** The underlying stream */ + private final OutputStream out; + + /** Deflater used to compress the data */ + private final Deflater deflater; + + /** The buffer receiving the compressed data from the deflater */ + private final byte[] deflateBuffer = new byte[512]; + + /** Indicates if the stream has been closed */ + private boolean closed; + + /** The checksum of the uncompressed data */ + private final CRC32 crc = new CRC32(); + + /** + * Creates a gzip compressed output stream with the default parameters. + */ + public GzipCompressorOutputStream(OutputStream out) throws IOException { + this(out, new GzipParameters()); + } + + /** + * Creates a gzip compressed output stream with the specified parameters. + * + * @since 1.7 + */ + public GzipCompressorOutputStream(OutputStream out, GzipParameters parameters) throws IOException { + this.out = out; + this.deflater = new Deflater(parameters.getCompressionLevel(), true); + + writeHeader(parameters); + } + + private void writeHeader(GzipParameters parameters) throws IOException { + String filename = parameters.getFilename(); + String comment = parameters.getComment(); + + ByteBuffer buffer = ByteBuffer.allocate(10); + buffer.order(ByteOrder.LITTLE_ENDIAN); + buffer.putShort((short) GZIPInputStream.GZIP_MAGIC); + buffer.put((byte) Deflater.DEFLATED); // compression method (8: deflate) + buffer.put((byte) ((filename != null ? FNAME : 0) | (comment != null ? FCOMMENT : 0))); // flags + buffer.putInt((int) (parameters.getModificationTime() / 1000)); + + // extra flags + int compressionLevel = parameters.getCompressionLevel(); + if (compressionLevel == Deflater.BEST_COMPRESSION) { + buffer.put((byte) 2); + } else if (compressionLevel == Deflater.BEST_SPEED) { + buffer.put((byte) 4); + } else { + buffer.put((byte) 0); + } + + buffer.put((byte) parameters.getOperatingSystem()); + + out.write(buffer.array()); + + if (filename != null) { + out.write(filename.getBytes("ISO-8859-1")); + out.write(0); + } + + if (comment != null) { + out.write(comment.getBytes("ISO-8859-1")); + out.write(0); + } + } + + private void writeTrailer() throws IOException { + ByteBuffer buffer = ByteBuffer.allocate(8); + buffer.order(ByteOrder.LITTLE_ENDIAN); + buffer.putInt((int) crc.getValue()); + buffer.putInt(deflater.getTotalIn()); + + out.write(buffer.array()); + } + + @Override + public void write(int b) throws IOException { + write(new byte[]{(byte) (b & 0xff)}, 0, 1); + } + + /** + * {@inheritDoc} + * + * @since 1.1 + */ + @Override + public void write(byte[] buffer) throws IOException { + write(buffer, 0, buffer.length); + } + + /** + * {@inheritDoc} + * + * @since 1.1 + */ + @Override + public void write(byte[] buffer, int offset, int length) throws IOException { + if (deflater.finished()) { + throw new IOException("Cannot write more data, the end of the compressed data stream has been reached"); + + } else if (length > 0) { + deflater.setInput(buffer, offset, length); + + while (!deflater.needsInput()) { + deflate(); + } + + crc.update(buffer, offset, length); + } + } + + private void deflate() throws IOException { + int length = deflater.deflate(deflateBuffer, 0, deflateBuffer.length); + if (length > 0) { + out.write(deflateBuffer, 0, length); + } + } + + /** + * Finishes writing compressed data to the underlying stream without closing it. + * + * @since 1.7 + */ + public void finish() throws IOException { + if (!deflater.finished()) { + deflater.finish(); + + while (!deflater.finished()) { + deflate(); + } + + writeTrailer(); + } + } + + /** + * {@inheritDoc} + * + * @since 1.7 + */ + @Override + public void flush() throws IOException { + out.flush(); + } + + @Override + public void close() throws IOException { + if (!closed) { + finish(); + deflater.end(); + out.close(); + closed = true; + } + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipParameters.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipParameters.java new file mode 100644 index 000000000..ef9a1afeb --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipParameters.java @@ -0,0 +1,121 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.gzip; + +import java.util.zip.Deflater; + +/** + * Parameters for the GZIP compressor. + * + * @since 1.7 + */ +public class GzipParameters { + + private int compressionLevel = Deflater.DEFAULT_COMPRESSION; + private long modificationTime; + private String filename; + private String comment; + private int operatingSystem = 255; // Unknown OS by default + + public int getCompressionLevel() { + return compressionLevel; + } + + /** + * Sets the compression level. + * + * @param compressionLevel the compression level (between 0 and 9) + * @see Deflater#NO_COMPRESSION + * @see Deflater#BEST_SPEED + * @see Deflater#DEFAULT_COMPRESSION + * @see Deflater#BEST_COMPRESSION + */ + public void setCompressionLevel(int compressionLevel) { + if (compressionLevel < -1 || compressionLevel > 9) { + throw new IllegalArgumentException("Invalid gzip compression level: " + compressionLevel); + } + this.compressionLevel = compressionLevel; + } + + public long getModificationTime() { + return modificationTime; + } + + /** + * Sets the modification time of the compressed file. + * + * @param modificationTime the modification time, in milliseconds + */ + public void setModificationTime(long modificationTime) { + this.modificationTime = modificationTime; + } + + public String getFilename() { + return filename; + } + + /** + * Sets the name of the compressed file. + * + * @param filename the name of the file without the directory path + */ + public void setFilename(String filename) { + this.filename = filename; + } + + public String getComment() { + return comment; + } + + public void setComment(String comment) { + this.comment = comment; + } + + public int getOperatingSystem() { + return operatingSystem; + } + + /** + * Sets the operating system on which the compression took place. + * The defined values are: + *

      + *
    • 0: FAT filesystem (MS-DOS, OS/2, NT/Win32)
    • + *
    • 1: Amiga
    • + *
    • 2: VMS (or OpenVMS)
    • + *
    • 3: Unix
    • + *
    • 4: VM/CMS
    • + *
    • 5: Atari TOS
    • + *
    • 6: HPFS filesystem (OS/2, NT)
    • + *
    • 7: Macintosh
    • + *
    • 8: Z-System
    • + *
    • 9: CP/M
    • + *
    • 10: TOPS-20
    • + *
    • 11: NTFS filesystem (NT)
    • + *
    • 12: QDOS
    • + *
    • 13: Acorn RISCOS
    • + *
    • 255: Unknown
    • + *
    + * + * @param operatingSystem the code of the operating system + */ + public void setOperatingSystem(int operatingSystem) { + this.operatingSystem = operatingSystem; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipUtils.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipUtils.java new file mode 100644 index 000000000..40be4374c --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/GzipUtils.java @@ -0,0 +1,100 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.gzip; + +import java.util.LinkedHashMap; +import java.util.Map; +import org.apache.commons.compress.compressors.FileNameUtil; + +/** + * Utility code for the gzip compression format. + * @ThreadSafe + */ +public class GzipUtils { + + private static final FileNameUtil fileNameUtil; + + static { + // using LinkedHashMap so .tgz is preferred over .taz as + // compressed extension of .tar as FileNameUtil will use the + // first one found + Map uncompressSuffix = + new LinkedHashMap(); + uncompressSuffix.put(".tgz", ".tar"); + uncompressSuffix.put(".taz", ".tar"); + uncompressSuffix.put(".svgz", ".svg"); + uncompressSuffix.put(".cpgz", ".cpio"); + uncompressSuffix.put(".wmz", ".wmf"); + uncompressSuffix.put(".emz", ".emf"); + uncompressSuffix.put(".gz", ""); + uncompressSuffix.put(".z", ""); + uncompressSuffix.put("-gz", ""); + uncompressSuffix.put("-z", ""); + uncompressSuffix.put("_z", ""); + fileNameUtil = new FileNameUtil(uncompressSuffix, ".gz"); + } + + /** Private constructor to prevent instantiation of this utility class. */ + private GzipUtils() { + } + + /** + * Detects common gzip suffixes in the given filename. + * + * @param filename name of a file + * @return {@code true} if the filename has a common gzip suffix, + * {@code false} otherwise + */ + public static boolean isCompressedFilename(String filename) { + return fileNameUtil.isCompressedFilename(filename); + } + + /** + * Maps the given name of a gzip-compressed file to the name that the + * file should have after uncompression. Commonly used file type specific + * suffixes like ".tgz" or ".svgz" are automatically detected and + * correctly mapped. For example the name "package.tgz" is mapped to + * "package.tar". And any filenames with the generic ".gz" suffix + * (or any other generic gzip suffix) is mapped to a name without that + * suffix. If no gzip suffix is detected, then the filename is returned + * unmapped. + * + * @param filename name of a file + * @return name of the corresponding uncompressed file + */ + public static String getUncompressedFilename(String filename) { + return fileNameUtil.getUncompressedFilename(filename); + } + + /** + * Maps the given filename to the name that the file should have after + * compression with gzip. Common file types with custom suffixes for + * compressed versions are automatically detected and correctly mapped. + * For example the name "package.tar" is mapped to "package.tgz". If no + * custom mapping is applicable, then the default ".gz" suffix is appended + * to the filename. + * + * @param filename name of a file + * @return name of the corresponding compressed file + */ + public static String getCompressedFilename(String filename) { + return fileNameUtil.getCompressedFilename(filename); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/package.html new file mode 100644 index 000000000..e18b50f2f --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/gzip/package.html @@ -0,0 +1,29 @@ + + + +

    Provides stream classes for compressing and decompressing + streams using the GZip algorithm.

    + +

    The classes in this package are wrappers around {@link + java.util.zip.GZIPInputStream java.util.zip.GZIPInputStream} and + {@link java.util.zip.GZIPOutputStream + java.util.zip.GZIPOutputStream}.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/InMemoryCachingStreamBridge.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/InMemoryCachingStreamBridge.java new file mode 100644 index 000000000..e1fdc2cba --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/InMemoryCachingStreamBridge.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; + +/** + * StreamSwitcher that caches all data written to the output side in + * memory. + * @since 1.3 + */ +class InMemoryCachingStreamBridge extends StreamBridge { + InMemoryCachingStreamBridge() { + super(new ByteArrayOutputStream()); + } + + @Override + InputStream getInputView() throws IOException { + return new ByteArrayInputStream(((ByteArrayOutputStream) out) + .toByteArray()); + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java new file mode 100644 index 000000000..fa04aef35 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200CompressorInputStream.java @@ -0,0 +1,250 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.File; +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; +import java.util.Map; +import java.util.jar.JarOutputStream; +import java.util.jar.Pack200; + +import org.apache.commons.compress.compressors.CompressorInputStream; + +/** + * An input stream that decompresses from the Pack200 format to be read + * as any other stream. + * + *

    The {@link CompressorInputStream#getCount getCount} and {@link + * CompressorInputStream#getBytesRead getBytesRead} methods always + * return 0.

    + * + * @NotThreadSafe + * @since 1.3 + */ +public class Pack200CompressorInputStream extends CompressorInputStream { + private final InputStream originalInput; + private final StreamBridge streamBridge; + + /** + * Decompresses the given stream, caching the decompressed data in + * memory. + * + *

    When reading from a file the File-arg constructor may + * provide better performance.

    + */ + public Pack200CompressorInputStream(final InputStream in) + throws IOException { + this(in, Pack200Strategy.IN_MEMORY); + } + + /** + * Decompresses the given stream using the given strategy to cache + * the results. + * + *

    When reading from a file the File-arg constructor may + * provide better performance.

    + */ + public Pack200CompressorInputStream(final InputStream in, + final Pack200Strategy mode) + throws IOException { + this(in, null, mode, null); + } + + /** + * Decompresses the given stream, caching the decompressed data in + * memory and using the given properties. + * + *

    When reading from a file the File-arg constructor may + * provide better performance.

    + */ + public Pack200CompressorInputStream(final InputStream in, + final Map props) + throws IOException { + this(in, Pack200Strategy.IN_MEMORY, props); + } + + /** + * Decompresses the given stream using the given strategy to cache + * the results and the given properties. + * + *

    When reading from a file the File-arg constructor may + * provide better performance.

    + */ + public Pack200CompressorInputStream(final InputStream in, + final Pack200Strategy mode, + final Map props) + throws IOException { + this(in, null, mode, props); + } + + /** + * Decompresses the given file, caching the decompressed data in + * memory. + */ + public Pack200CompressorInputStream(final File f) throws IOException { + this(f, Pack200Strategy.IN_MEMORY); + } + + /** + * Decompresses the given file using the given strategy to cache + * the results. + */ + public Pack200CompressorInputStream(final File f, final Pack200Strategy mode) + throws IOException { + this(null, f, mode, null); + } + + /** + * Decompresses the given file, caching the decompressed data in + * memory and using the given properties. + */ + public Pack200CompressorInputStream(final File f, + final Map props) + throws IOException { + this(f, Pack200Strategy.IN_MEMORY, props); + } + + /** + * Decompresses the given file using the given strategy to cache + * the results and the given properties. + */ + public Pack200CompressorInputStream(final File f, final Pack200Strategy mode, + final Map props) + throws IOException { + this(null, f, mode, props); + } + + private Pack200CompressorInputStream(final InputStream in, final File f, + final Pack200Strategy mode, + final Map props) + throws IOException { + originalInput = in; + streamBridge = mode.newStreamBridge(); + JarOutputStream jarOut = new JarOutputStream(streamBridge); + Pack200.Unpacker u = Pack200.newUnpacker(); + if (props != null) { + u.properties().putAll(props); + } + if (f == null) { + u.unpack(new FilterInputStream(in) { + @Override + public void close() { + // unpack would close this stream but we + // want to give the user code more control + } + }, + jarOut); + } else { + u.unpack(f, jarOut); + } + jarOut.close(); + } + + @Override + public int read() throws IOException { + return streamBridge.getInput().read(); + } + + @Override + public int read(byte[] b) throws IOException { + return streamBridge.getInput().read(b); + } + + @Override + public int read(byte[] b, int off, int count) throws IOException { + return streamBridge.getInput().read(b, off, count); + } + + @Override + public int available() throws IOException { + return streamBridge.getInput().available(); + } + + @Override + public boolean markSupported() { + try { + return streamBridge.getInput().markSupported(); + } catch (IOException ex) { + return false; + } + } + + @Override + public void mark(int limit) { + try { + streamBridge.getInput().mark(limit); + } catch (IOException ex) { + throw new RuntimeException(ex); + } + } + + @Override + public void reset() throws IOException { + streamBridge.getInput().reset(); + } + + @Override + public long skip(long count) throws IOException { + return streamBridge.getInput().skip(count); + } + + @Override + public void close() throws IOException { + try { + streamBridge.stop(); + } finally { + if (originalInput != null) { + originalInput.close(); + } + } + } + + private static final byte[] CAFE_DOOD = new byte[] { + (byte) 0xCA, (byte) 0xFE, (byte) 0xD0, (byte) 0x0D + }; + private static final int SIG_LENGTH = CAFE_DOOD.length; + + /** + * Checks if the signature matches what is expected for a pack200 + * file (0xCAFED00D). + * + * @param signature + * the bytes to check + * @param length + * the number of bytes to check + * @return true, if this stream is a pack200 compressed stream, + * false otherwise + */ + public static boolean matches(byte[] signature, int length) { + if (length < SIG_LENGTH) { + return false; + } + + for (int i = 0; i < SIG_LENGTH; i++) { + if (signature[i] != CAFE_DOOD[i]) { + return false; + } + } + + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java new file mode 100644 index 000000000..cfb315d9e --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200CompressorOutputStream.java @@ -0,0 +1,130 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.IOException; +import java.io.OutputStream; +import java.util.Map; +import java.util.jar.JarInputStream; +import java.util.jar.Pack200; + +import org.apache.commons.compress.compressors.CompressorOutputStream; +import org.apache.commons.compress.utils.IOUtils; + +/** + * An output stream that compresses using the Pack200 format. + * + * @NotThreadSafe + * @since 1.3 + */ +public class Pack200CompressorOutputStream extends CompressorOutputStream { + private boolean finished = false; + private final OutputStream originalOutput; + private final StreamBridge streamBridge; + private final Map properties; + + /** + * Compresses the given stream, caching the compressed data in + * memory. + */ + public Pack200CompressorOutputStream(final OutputStream out) + throws IOException { + this(out, Pack200Strategy.IN_MEMORY); + } + + /** + * Compresses the given stream using the given strategy to cache + * the results. + */ + public Pack200CompressorOutputStream(final OutputStream out, + final Pack200Strategy mode) + throws IOException { + this(out, mode, null); + } + + /** + * Compresses the given stream, caching the compressed data in + * memory and using the given properties. + */ + public Pack200CompressorOutputStream(final OutputStream out, + final Map props) + throws IOException { + this(out, Pack200Strategy.IN_MEMORY, props); + } + + /** + * Compresses the given stream using the given strategy to cache + * the results and the given properties. + */ + public Pack200CompressorOutputStream(final OutputStream out, + final Pack200Strategy mode, + final Map props) + throws IOException { + originalOutput = out; + streamBridge = mode.newStreamBridge(); + properties = props; + } + + @Override + public void write(int b) throws IOException { + streamBridge.write(b); + } + + @Override + public void write(byte[] b) throws IOException { + streamBridge.write(b); + } + + @Override + public void write(byte[] b, int from, int length) throws IOException { + streamBridge.write(b, from, length); + } + + @Override + public void close() throws IOException { + finish(); + try { + streamBridge.stop(); + } finally { + originalOutput.close(); + } + } + + public void finish() throws IOException { + if (!finished) { + finished = true; + Pack200.Packer p = Pack200.newPacker(); + if (properties != null) { + p.properties().putAll(properties); + } + JarInputStream ji = null; + boolean success = false; + try { + p.pack(ji = new JarInputStream(streamBridge.getInput()), + originalOutput); + success = true; + } finally { + if (!success) { + IOUtils.closeQuietly(ji); + } + } + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200Strategy.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200Strategy.java new file mode 100644 index 000000000..dba199296 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200Strategy.java @@ -0,0 +1,46 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.IOException; + +/** + * The different modes the Pack200 streams can use to wrap input and + * output. + * @since 1.3 + */ +public enum Pack200Strategy { + /** Cache output in memory */ + IN_MEMORY() { + @Override + StreamBridge newStreamBridge() { + return new InMemoryCachingStreamBridge(); + } + }, + /** Cache output in a temporary file */ + TEMP_FILE() { + @Override + StreamBridge newStreamBridge() throws IOException { + return new TempFileCachingStreamBridge(); + } + }; + + abstract StreamBridge newStreamBridge() throws IOException; +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200Utils.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200Utils.java new file mode 100644 index 000000000..1f944dacc --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/Pack200Utils.java @@ -0,0 +1,158 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.OutputStream; +import java.util.HashMap; +import java.util.Map; +import java.util.jar.JarFile; +import java.util.jar.JarOutputStream; +import java.util.jar.Pack200; + +/** + * Utility methods for Pack200. + * + * @ThreadSafe + * @since 1.3 + */ +public class Pack200Utils { + private Pack200Utils() { } + + /** + * Normalizes a JAR archive in-place so it can be safely signed + * and packed. + * + *

    As stated in Pack200.Packer's + * javadocs applying a Pack200 compression to a JAR archive will + * in general make its sigantures invalid. In order to prepare a + * JAR for signing it should be "normalized" by packing and + * unpacking it. This is what this method does.

    + * + *

    Note this methods implicitly sets the segment length to + * -1.

    + * + * @param jar the JAR archive to normalize + */ + public static void normalize(File jar) + throws IOException { + normalize(jar, jar, null); + } + + /** + * Normalizes a JAR archive in-place so it can be safely signed + * and packed. + * + *

    As stated in Pack200.Packer's + * javadocs applying a Pack200 compression to a JAR archive will + * in general make its sigantures invalid. In order to prepare a + * JAR for signing it should be "normalized" by packing and + * unpacking it. This is what this method does.

    + * + * @param jar the JAR archive to normalize + * @param props properties to set for the pack operation. This + * method will implicitly set the segment limit to -1. + */ + public static void normalize(File jar, Map props) + throws IOException { + normalize(jar, jar, props); + } + + /** + * Normalizes a JAR archive so it can be safely signed and packed. + * + *

    As stated in Pack200.Packer's + * javadocs applying a Pack200 compression to a JAR archive will + * in general make its sigantures invalid. In order to prepare a + * JAR for signing it should be "normalized" by packing and + * unpacking it. This is what this method does.

    + * + *

    This method does not replace the existing archive but creates + * a new one.

    + * + *

    Note this methods implicitly sets the segment length to + * -1.

    + * + * @param from the JAR archive to normalize + * @param to the normalized archive + */ + public static void normalize(File from, File to) + throws IOException { + normalize(from, to, null); + } + + /** + * Normalizes a JAR archive so it can be safely signed and packed. + * + *

    As stated in Pack200.Packer's + * javadocs applying a Pack200 compression to a JAR archive will + * in general make its sigantures invalid. In order to prepare a + * JAR for signing it should be "normalized" by packing and + * unpacking it. This is what this method does.

    + * + *

    This method does not replace the existing archive but creates + * a new one.

    + * + * @param from the JAR archive to normalize + * @param to the normalized archive + * @param props properties to set for the pack operation. This + * method will implicitly set the segment limit to -1. + */ + public static void normalize(File from, File to, Map props) + throws IOException { + if (props == null) { + props = new HashMap(); + } + props.put(Pack200.Packer.SEGMENT_LIMIT, "-1"); + File f = File.createTempFile("commons-compress", "pack200normalize"); + f.deleteOnExit(); + try { + OutputStream os = new FileOutputStream(f); + JarFile j = null; + try { + Pack200.Packer p = Pack200.newPacker(); + p.properties().putAll(props); + p.pack(j = new JarFile(from), os); + j = null; + os.close(); + os = null; + + Pack200.Unpacker u = Pack200.newUnpacker(); + os = new JarOutputStream(new FileOutputStream(to)); + u.unpack(f, (JarOutputStream) os); + } finally { + if (j != null) { + j.close(); + } + if (os != null) { + os.close(); + } + } + } finally { + f.delete(); + } + } +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/StreamBridge.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/StreamBridge.java new file mode 100644 index 000000000..293e0bb1e --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/StreamBridge.java @@ -0,0 +1,75 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * Provides an InputStream to read all data written to this + * OutputStream. + * + * @ThreadSafe + * @since 1.3 + */ +abstract class StreamBridge extends FilterOutputStream { + private InputStream input; + private final Object INPUT_LOCK = new Object(); + + protected StreamBridge(OutputStream out) { + super(out); + } + + protected StreamBridge() { + this(null); + } + + /** + * Provides the input view. + */ + InputStream getInput() throws IOException { + synchronized (INPUT_LOCK) { + if (input == null) { + input = getInputView(); + } + } + return input; + } + + /** + * Creates the input view. + */ + abstract InputStream getInputView() throws IOException; + + /** + * Closes input and output and releases all associated resources. + */ + void stop() throws IOException { + close(); + synchronized (INPUT_LOCK) { + if (input != null) { + input.close(); + input = null; + } + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge.java new file mode 100644 index 000000000..b609b50fc --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/TempFileCachingStreamBridge.java @@ -0,0 +1,53 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ + +package org.apache.commons.compress.compressors.pack200; + +import java.io.File; +import java.io.FileInputStream; +import java.io.FileOutputStream; +import java.io.IOException; +import java.io.InputStream; + +/** + * StreamSwitcher that caches all data written to the output side in + * a temporary file. + * @since 1.3 + */ +class TempFileCachingStreamBridge extends StreamBridge { + private final File f; + + TempFileCachingStreamBridge() throws IOException { + f = File.createTempFile("commons-compress", "packtemp"); + f.deleteOnExit(); + out = new FileOutputStream(f); + } + + @Override + InputStream getInputView() throws IOException { + out.close(); + return new FileInputStream(f) { + @Override + public void close() throws IOException { + super.close(); + f.delete(); + } + }; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/package.html new file mode 100644 index 000000000..dfbcb88e6 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/pack200/package.html @@ -0,0 +1,82 @@ + + + +

    Provides stream classes for compressing and decompressing + streams using the Pack200 algorithm used to compress Java + archives.

    + +

    The streams of this package only work on JAR archives, i.e. a + {@link + org.apache.commons.compress.compressors.pack200.Pack200CompressorOutputStream + Pack200CompressorOutputStream} expects to be wrapped around a + stream that a valid JAR archive will be written to and a {@link + org.apache.commons.compress.compressors.pack200.Pack200CompressorInputStream + Pack200CompressorInputStream} provides a stream to read from a + JAR archive.

    + +

    JAR archives compressed with Pack200 will in general be + different from the original archive when decompressed again. + For details see + the API + documentation of Pack200.

    + +

    The streams of this package work on non-deflated streams, + i.e. archives like those created with the --no-gzip + option of the JDK's pack200 command line tool. If + you want to work on deflated streams you must use an additional + stream layer - for example by using Apache Commons Compress' + gzip package.

    + +

    The Pack200 API provided by the Java class library doesn't lend + itself to real stream + processing. Pack200CompressorInputStream will + uncompress its input immediately and then provide + an InputStream to a cached result. + Likewise Pack200CompressorOutputStream will not + write anything to the given OutputStream + until finish or close is called - at + which point the cached output written so far gets + compressed.

    + +

    Two different caching modes are available - "in memory", which + is the default, and "temporary file". By default data is cached + in memory but you should switch to the temporary file option if + your archives are really big.

    + +

    Given there always is an intermediate result + the getBytesRead and getCount methods + of Pack200CompressorInputStream are meaningless + (read from the real stream or from the intermediate result?) + and always return 0.

    + +

    During development of the initial version several attempts have + been made to use a real streaming API based for example + on Piped(In|Out)putStream or explicit stream + pumping like Commons Exec's InputStreamPumper but + they have all failed because they rely on the output end to be + consumed completely or else the (un)pack will block + forever. Especially for Pack200InputStream it is + very likely that it will be wrapped in + a ZipArchiveInputStream which will never read the + archive completely as it is not interested in the ZIP central + directory data at the end of the JAR archive.

    + + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/package.html new file mode 100644 index 000000000..7b7d504b9 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/package.html @@ -0,0 +1,24 @@ + + + +

    Provides a unified API and factories for dealing with + compressed streams.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java new file mode 100644 index 000000000..65deab73c --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/FramedSnappyCompressorInputStream.java @@ -0,0 +1,290 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.snappy; + +import java.io.IOException; +import java.io.InputStream; +import java.io.PushbackInputStream; +import java.util.Arrays; + +import org.apache.commons.compress.compressors.CompressorInputStream; +import org.apache.commons.compress.utils.BoundedInputStream; +import org.apache.commons.compress.utils.IOUtils; + +/** + * CompressorInputStream for the framing Snappy format. + * + *

    Based on the "spec" in the version "Last revised: 2013-10-25"

    + * + * @see Snappy framing format description + * @since 1.7 + */ +public class FramedSnappyCompressorInputStream extends CompressorInputStream { + /** + * package private for tests only. + */ + static final long MASK_OFFSET = 0xa282ead8L; + + private static final int STREAM_IDENTIFIER_TYPE = 0xff; + private static final int COMPRESSED_CHUNK_TYPE = 0; + private static final int UNCOMPRESSED_CHUNK_TYPE = 1; + private static final int PADDING_CHUNK_TYPE = 0xfe; + private static final int MIN_UNSKIPPABLE_TYPE = 2; + private static final int MAX_UNSKIPPABLE_TYPE = 0x7f; + private static final int MAX_SKIPPABLE_TYPE = 0xfd; + + private static final byte[] SZ_SIGNATURE = new byte[] { + (byte) STREAM_IDENTIFIER_TYPE, // tag + 6, 0, 0, // length + 's', 'N', 'a', 'P', 'p', 'Y' + }; + + /** The underlying stream to read compressed data from */ + private final PushbackInputStream in; + + private SnappyCompressorInputStream currentCompressedChunk; + + // used in no-arg read method + private final byte[] oneByte = new byte[1]; + + private boolean endReached, inUncompressedChunk; + + private int uncompressedBytesRemaining; + private long expectedChecksum = -1; + private PureJavaCrc32C checksum = new PureJavaCrc32C(); + + /** + * Constructs a new input stream that decompresses snappy-framed-compressed data + * from the specified input stream. + * @param in the InputStream from which to read the compressed data + */ + public FramedSnappyCompressorInputStream(InputStream in) throws IOException { + this.in = new PushbackInputStream(in, 1); + readStreamIdentifier(); + } + + /** {@inheritDoc} */ + @Override + public int read() throws IOException { + return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF; + } + + /** {@inheritDoc} */ + @Override + public void close() throws IOException { + if (currentCompressedChunk != null) { + currentCompressedChunk.close(); + currentCompressedChunk = null; + } + in.close(); + } + + /** {@inheritDoc} */ + @Override + public int read(byte[] b, int off, int len) throws IOException { + int read = readOnce(b, off, len); + if (read == -1) { + readNextBlock(); + if (endReached) { + return -1; + } + read = readOnce(b, off, len); + } + return read; + } + + /** {@inheritDoc} */ + @Override + public int available() throws IOException { + if (inUncompressedChunk) { + return Math.min(uncompressedBytesRemaining, + in.available()); + } else if (currentCompressedChunk != null) { + return currentCompressedChunk.available(); + } + return 0; + } + + /** + * Read from the current chunk into the given array. + * + * @return -1 if there is no current chunk or the number of bytes + * read from the current chunk (which may be -1 if the end of the + * chunk is reached). + */ + private int readOnce(byte[] b, int off, int len) throws IOException { + int read = -1; + if (inUncompressedChunk) { + int amount = Math.min(uncompressedBytesRemaining, len); + if (amount == 0) { + return -1; + } + read = in.read(b, off, amount); + if (read != -1) { + uncompressedBytesRemaining -= read; + count(read); + } + } else if (currentCompressedChunk != null) { + long before = currentCompressedChunk.getBytesRead(); + read = currentCompressedChunk.read(b, off, len); + if (read == -1) { + currentCompressedChunk.close(); + currentCompressedChunk = null; + } else { + count(currentCompressedChunk.getBytesRead() - before); + } + } + if (read > 0) { + checksum.update(b, off, read); + } + return read; + } + + private void readNextBlock() throws IOException { + verifyLastChecksumAndReset(); + inUncompressedChunk = false; + int type = readOneByte(); + if (type == -1) { + endReached = true; + } else if (type == STREAM_IDENTIFIER_TYPE) { + in.unread(type); + pushedBackBytes(1); + readStreamIdentifier(); + readNextBlock(); + } else if (type == PADDING_CHUNK_TYPE + || (type > MAX_UNSKIPPABLE_TYPE && type <= MAX_SKIPPABLE_TYPE)) { + skipBlock(); + readNextBlock(); + } else if (type >= MIN_UNSKIPPABLE_TYPE && type <= MAX_UNSKIPPABLE_TYPE) { + throw new IOException("unskippable chunk with type " + type + + " (hex " + Integer.toHexString(type) + ")" + + " detected."); + } else if (type == UNCOMPRESSED_CHUNK_TYPE) { + inUncompressedChunk = true; + uncompressedBytesRemaining = readSize() - 4 /* CRC */; + expectedChecksum = unmask(readCrc()); + } else if (type == COMPRESSED_CHUNK_TYPE) { + long size = readSize() - 4 /* CRC */; + expectedChecksum = unmask(readCrc()); + currentCompressedChunk = + new SnappyCompressorInputStream(new BoundedInputStream(in, size)); + // constructor reads uncompressed size + count(currentCompressedChunk.getBytesRead()); + } else { + // impossible as all potential byte values have been covered + throw new IOException("unknown chunk type " + type + + " detected."); + } + } + + private long readCrc() throws IOException { + byte[] b = new byte[4]; + int read = IOUtils.readFully(in, b); + count(read); + if (read != 4) { + throw new IOException("premature end of stream"); + } + long crc = 0; + for (int i = 0; i < 4; i++) { + crc |= (b[i] & 0xFFL) << (8 * i); + } + return crc; + } + + static long unmask(long x) { + // ugly, maybe we should just have used ints and deal with the + // overflow + x -= MASK_OFFSET; + x &= 0xffffFFFFL; + return ((x >> 17) | (x << 15)) & 0xffffFFFFL; + } + + private int readSize() throws IOException { + int b = 0; + int sz = 0; + for (int i = 0; i < 3; i++) { + b = readOneByte(); + if (b == -1) { + throw new IOException("premature end of stream"); + } + sz |= (b << (i * 8)); + } + return sz; + } + + private void skipBlock() throws IOException { + int size = readSize(); + long read = IOUtils.skip(in, size); + count(read); + if (read != size) { + throw new IOException("premature end of stream"); + } + } + + private void readStreamIdentifier() throws IOException { + byte[] b = new byte[10]; + int read = IOUtils.readFully(in, b); + count(read); + if (10 != read || !matches(b, 10)) { + throw new IOException("Not a framed Snappy stream"); + } + } + + private int readOneByte() throws IOException { + int b = in.read(); + if (b != -1) { + count(1); + return b & 0xFF; + } + return -1; + } + + private void verifyLastChecksumAndReset() throws IOException { + if (expectedChecksum >= 0 && expectedChecksum != checksum.getValue()) { + throw new IOException("Checksum verification failed"); + } + expectedChecksum = -1; + checksum.reset(); + } + + /** + * Checks if the signature matches what is expected for a .sz file. + * + *

    .sz files start with a chunk with tag 0xff and content sNaPpY.

    + * + * @param signature the bytes to check + * @param length the number of bytes to check + * @return true if this is a .sz stream, false otherwise + */ + public static boolean matches(byte[] signature, int length) { + + if (length < SZ_SIGNATURE.length) { + return false; + } + + byte[] shortenedSig = signature; + if (signature.length > SZ_SIGNATURE.length) { + shortenedSig = new byte[SZ_SIGNATURE.length]; + System.arraycopy(signature, 0, shortenedSig, 0, SZ_SIGNATURE.length); + } + + return Arrays.equals(shortenedSig, SZ_SIGNATURE); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java new file mode 100644 index 000000000..4163e7aac --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/PureJavaCrc32C.java @@ -0,0 +1,633 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + * Some portions of this file Copyright (c) 2004-2006 Intel Corportation + * and licensed under the BSD license. + */ +package org.apache.commons.compress.compressors.snappy; + +import java.util.zip.Checksum; + +/** + * A pure-java implementation of the CRC32 checksum that uses + * the CRC32-C polynomial, the same polynomial used by iSCSI + * and implemented on many Intel chipsets supporting SSE4.2. + * + *

    This file is a copy of the implementation at the Apache Hadoop project.

    + * @see "http://svn.apache.org/repos/asf/hadoop/common/trunk/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/util/PureJavaCrc32C.java" + * @NotThreadSafe + * @since 1.7 + */ +class PureJavaCrc32C implements Checksum { + + /** the current CRC value, bit-flipped */ + private int crc; + + /** Create a new PureJavaCrc32 object. */ + public PureJavaCrc32C() { + reset(); + } + + public long getValue() { + long ret = crc; + return (~ret) & 0xffffffffL; + } + + public void reset() { + crc = 0xffffffff; + } + + public void update(byte[] b, int off, int len) { + int localCrc = crc; + + while(len > 7) { + final int c0 =(b[off+0] ^ localCrc) & 0xff; + final int c1 =(b[off+1] ^ (localCrc >>>= 8)) & 0xff; + final int c2 =(b[off+2] ^ (localCrc >>>= 8)) & 0xff; + final int c3 =(b[off+3] ^ (localCrc >>>= 8)) & 0xff; + localCrc = (T[T8_7_start + c0] ^ T[T8_6_start + c1]) + ^ (T[T8_5_start + c2] ^ T[T8_4_start + c3]); + + final int c4 = b[off+4] & 0xff; + final int c5 = b[off+5] & 0xff; + final int c6 = b[off+6] & 0xff; + final int c7 = b[off+7] & 0xff; + + localCrc ^= (T[T8_3_start + c4] ^ T[T8_2_start + c5]) + ^ (T[T8_1_start + c6] ^ T[T8_0_start + c7]); + + off += 8; + len -= 8; + } + + /* loop unroll - duff's device style */ + switch(len) { + case 7: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 6: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 5: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 4: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 3: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 2: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + case 1: localCrc = (localCrc >>> 8) ^ T[T8_0_start + ((localCrc ^ b[off++]) & 0xff)]; + default: + /* nothing */ + } + + // Publish crc out to object + crc = localCrc; + } + + final public void update(int b) { + crc = (crc >>> 8) ^ T[T8_0_start + ((crc ^ b) & 0xff)]; + } + + // CRC polynomial tables generated by: + // java -cp build/test/classes/:build/classes/ \ + // org.apache.hadoop.util.TestPureJavaCrc32\$Table 82F63B78 + + private static final int T8_0_start = 0*256; + private static final int T8_1_start = 1*256; + private static final int T8_2_start = 2*256; + private static final int T8_3_start = 3*256; + private static final int T8_4_start = 4*256; + private static final int T8_5_start = 5*256; + private static final int T8_6_start = 6*256; + private static final int T8_7_start = 7*256; + + private static final int[] T = new int[] { + /* T8_0 */ + 0x00000000, 0xF26B8303, 0xE13B70F7, 0x1350F3F4, + 0xC79A971F, 0x35F1141C, 0x26A1E7E8, 0xD4CA64EB, + 0x8AD958CF, 0x78B2DBCC, 0x6BE22838, 0x9989AB3B, + 0x4D43CFD0, 0xBF284CD3, 0xAC78BF27, 0x5E133C24, + 0x105EC76F, 0xE235446C, 0xF165B798, 0x030E349B, + 0xD7C45070, 0x25AFD373, 0x36FF2087, 0xC494A384, + 0x9A879FA0, 0x68EC1CA3, 0x7BBCEF57, 0x89D76C54, + 0x5D1D08BF, 0xAF768BBC, 0xBC267848, 0x4E4DFB4B, + 0x20BD8EDE, 0xD2D60DDD, 0xC186FE29, 0x33ED7D2A, + 0xE72719C1, 0x154C9AC2, 0x061C6936, 0xF477EA35, + 0xAA64D611, 0x580F5512, 0x4B5FA6E6, 0xB93425E5, + 0x6DFE410E, 0x9F95C20D, 0x8CC531F9, 0x7EAEB2FA, + 0x30E349B1, 0xC288CAB2, 0xD1D83946, 0x23B3BA45, + 0xF779DEAE, 0x05125DAD, 0x1642AE59, 0xE4292D5A, + 0xBA3A117E, 0x4851927D, 0x5B016189, 0xA96AE28A, + 0x7DA08661, 0x8FCB0562, 0x9C9BF696, 0x6EF07595, + 0x417B1DBC, 0xB3109EBF, 0xA0406D4B, 0x522BEE48, + 0x86E18AA3, 0x748A09A0, 0x67DAFA54, 0x95B17957, + 0xCBA24573, 0x39C9C670, 0x2A993584, 0xD8F2B687, + 0x0C38D26C, 0xFE53516F, 0xED03A29B, 0x1F682198, + 0x5125DAD3, 0xA34E59D0, 0xB01EAA24, 0x42752927, + 0x96BF4DCC, 0x64D4CECF, 0x77843D3B, 0x85EFBE38, + 0xDBFC821C, 0x2997011F, 0x3AC7F2EB, 0xC8AC71E8, + 0x1C661503, 0xEE0D9600, 0xFD5D65F4, 0x0F36E6F7, + 0x61C69362, 0x93AD1061, 0x80FDE395, 0x72966096, + 0xA65C047D, 0x5437877E, 0x4767748A, 0xB50CF789, + 0xEB1FCBAD, 0x197448AE, 0x0A24BB5A, 0xF84F3859, + 0x2C855CB2, 0xDEEEDFB1, 0xCDBE2C45, 0x3FD5AF46, + 0x7198540D, 0x83F3D70E, 0x90A324FA, 0x62C8A7F9, + 0xB602C312, 0x44694011, 0x5739B3E5, 0xA55230E6, + 0xFB410CC2, 0x092A8FC1, 0x1A7A7C35, 0xE811FF36, + 0x3CDB9BDD, 0xCEB018DE, 0xDDE0EB2A, 0x2F8B6829, + 0x82F63B78, 0x709DB87B, 0x63CD4B8F, 0x91A6C88C, + 0x456CAC67, 0xB7072F64, 0xA457DC90, 0x563C5F93, + 0x082F63B7, 0xFA44E0B4, 0xE9141340, 0x1B7F9043, + 0xCFB5F4A8, 0x3DDE77AB, 0x2E8E845F, 0xDCE5075C, + 0x92A8FC17, 0x60C37F14, 0x73938CE0, 0x81F80FE3, + 0x55326B08, 0xA759E80B, 0xB4091BFF, 0x466298FC, + 0x1871A4D8, 0xEA1A27DB, 0xF94AD42F, 0x0B21572C, + 0xDFEB33C7, 0x2D80B0C4, 0x3ED04330, 0xCCBBC033, + 0xA24BB5A6, 0x502036A5, 0x4370C551, 0xB11B4652, + 0x65D122B9, 0x97BAA1BA, 0x84EA524E, 0x7681D14D, + 0x2892ED69, 0xDAF96E6A, 0xC9A99D9E, 0x3BC21E9D, + 0xEF087A76, 0x1D63F975, 0x0E330A81, 0xFC588982, + 0xB21572C9, 0x407EF1CA, 0x532E023E, 0xA145813D, + 0x758FE5D6, 0x87E466D5, 0x94B49521, 0x66DF1622, + 0x38CC2A06, 0xCAA7A905, 0xD9F75AF1, 0x2B9CD9F2, + 0xFF56BD19, 0x0D3D3E1A, 0x1E6DCDEE, 0xEC064EED, + 0xC38D26C4, 0x31E6A5C7, 0x22B65633, 0xD0DDD530, + 0x0417B1DB, 0xF67C32D8, 0xE52CC12C, 0x1747422F, + 0x49547E0B, 0xBB3FFD08, 0xA86F0EFC, 0x5A048DFF, + 0x8ECEE914, 0x7CA56A17, 0x6FF599E3, 0x9D9E1AE0, + 0xD3D3E1AB, 0x21B862A8, 0x32E8915C, 0xC083125F, + 0x144976B4, 0xE622F5B7, 0xF5720643, 0x07198540, + 0x590AB964, 0xAB613A67, 0xB831C993, 0x4A5A4A90, + 0x9E902E7B, 0x6CFBAD78, 0x7FAB5E8C, 0x8DC0DD8F, + 0xE330A81A, 0x115B2B19, 0x020BD8ED, 0xF0605BEE, + 0x24AA3F05, 0xD6C1BC06, 0xC5914FF2, 0x37FACCF1, + 0x69E9F0D5, 0x9B8273D6, 0x88D28022, 0x7AB90321, + 0xAE7367CA, 0x5C18E4C9, 0x4F48173D, 0xBD23943E, + 0xF36E6F75, 0x0105EC76, 0x12551F82, 0xE03E9C81, + 0x34F4F86A, 0xC69F7B69, 0xD5CF889D, 0x27A40B9E, + 0x79B737BA, 0x8BDCB4B9, 0x988C474D, 0x6AE7C44E, + 0xBE2DA0A5, 0x4C4623A6, 0x5F16D052, 0xAD7D5351, + /* T8_1 */ + 0x00000000, 0x13A29877, 0x274530EE, 0x34E7A899, + 0x4E8A61DC, 0x5D28F9AB, 0x69CF5132, 0x7A6DC945, + 0x9D14C3B8, 0x8EB65BCF, 0xBA51F356, 0xA9F36B21, + 0xD39EA264, 0xC03C3A13, 0xF4DB928A, 0xE7790AFD, + 0x3FC5F181, 0x2C6769F6, 0x1880C16F, 0x0B225918, + 0x714F905D, 0x62ED082A, 0x560AA0B3, 0x45A838C4, + 0xA2D13239, 0xB173AA4E, 0x859402D7, 0x96369AA0, + 0xEC5B53E5, 0xFFF9CB92, 0xCB1E630B, 0xD8BCFB7C, + 0x7F8BE302, 0x6C297B75, 0x58CED3EC, 0x4B6C4B9B, + 0x310182DE, 0x22A31AA9, 0x1644B230, 0x05E62A47, + 0xE29F20BA, 0xF13DB8CD, 0xC5DA1054, 0xD6788823, + 0xAC154166, 0xBFB7D911, 0x8B507188, 0x98F2E9FF, + 0x404E1283, 0x53EC8AF4, 0x670B226D, 0x74A9BA1A, + 0x0EC4735F, 0x1D66EB28, 0x298143B1, 0x3A23DBC6, + 0xDD5AD13B, 0xCEF8494C, 0xFA1FE1D5, 0xE9BD79A2, + 0x93D0B0E7, 0x80722890, 0xB4958009, 0xA737187E, + 0xFF17C604, 0xECB55E73, 0xD852F6EA, 0xCBF06E9D, + 0xB19DA7D8, 0xA23F3FAF, 0x96D89736, 0x857A0F41, + 0x620305BC, 0x71A19DCB, 0x45463552, 0x56E4AD25, + 0x2C896460, 0x3F2BFC17, 0x0BCC548E, 0x186ECCF9, + 0xC0D23785, 0xD370AFF2, 0xE797076B, 0xF4359F1C, + 0x8E585659, 0x9DFACE2E, 0xA91D66B7, 0xBABFFEC0, + 0x5DC6F43D, 0x4E646C4A, 0x7A83C4D3, 0x69215CA4, + 0x134C95E1, 0x00EE0D96, 0x3409A50F, 0x27AB3D78, + 0x809C2506, 0x933EBD71, 0xA7D915E8, 0xB47B8D9F, + 0xCE1644DA, 0xDDB4DCAD, 0xE9537434, 0xFAF1EC43, + 0x1D88E6BE, 0x0E2A7EC9, 0x3ACDD650, 0x296F4E27, + 0x53028762, 0x40A01F15, 0x7447B78C, 0x67E52FFB, + 0xBF59D487, 0xACFB4CF0, 0x981CE469, 0x8BBE7C1E, + 0xF1D3B55B, 0xE2712D2C, 0xD69685B5, 0xC5341DC2, + 0x224D173F, 0x31EF8F48, 0x050827D1, 0x16AABFA6, + 0x6CC776E3, 0x7F65EE94, 0x4B82460D, 0x5820DE7A, + 0xFBC3FAF9, 0xE861628E, 0xDC86CA17, 0xCF245260, + 0xB5499B25, 0xA6EB0352, 0x920CABCB, 0x81AE33BC, + 0x66D73941, 0x7575A136, 0x419209AF, 0x523091D8, + 0x285D589D, 0x3BFFC0EA, 0x0F186873, 0x1CBAF004, + 0xC4060B78, 0xD7A4930F, 0xE3433B96, 0xF0E1A3E1, + 0x8A8C6AA4, 0x992EF2D3, 0xADC95A4A, 0xBE6BC23D, + 0x5912C8C0, 0x4AB050B7, 0x7E57F82E, 0x6DF56059, + 0x1798A91C, 0x043A316B, 0x30DD99F2, 0x237F0185, + 0x844819FB, 0x97EA818C, 0xA30D2915, 0xB0AFB162, + 0xCAC27827, 0xD960E050, 0xED8748C9, 0xFE25D0BE, + 0x195CDA43, 0x0AFE4234, 0x3E19EAAD, 0x2DBB72DA, + 0x57D6BB9F, 0x447423E8, 0x70938B71, 0x63311306, + 0xBB8DE87A, 0xA82F700D, 0x9CC8D894, 0x8F6A40E3, + 0xF50789A6, 0xE6A511D1, 0xD242B948, 0xC1E0213F, + 0x26992BC2, 0x353BB3B5, 0x01DC1B2C, 0x127E835B, + 0x68134A1E, 0x7BB1D269, 0x4F567AF0, 0x5CF4E287, + 0x04D43CFD, 0x1776A48A, 0x23910C13, 0x30339464, + 0x4A5E5D21, 0x59FCC556, 0x6D1B6DCF, 0x7EB9F5B8, + 0x99C0FF45, 0x8A626732, 0xBE85CFAB, 0xAD2757DC, + 0xD74A9E99, 0xC4E806EE, 0xF00FAE77, 0xE3AD3600, + 0x3B11CD7C, 0x28B3550B, 0x1C54FD92, 0x0FF665E5, + 0x759BACA0, 0x663934D7, 0x52DE9C4E, 0x417C0439, + 0xA6050EC4, 0xB5A796B3, 0x81403E2A, 0x92E2A65D, + 0xE88F6F18, 0xFB2DF76F, 0xCFCA5FF6, 0xDC68C781, + 0x7B5FDFFF, 0x68FD4788, 0x5C1AEF11, 0x4FB87766, + 0x35D5BE23, 0x26772654, 0x12908ECD, 0x013216BA, + 0xE64B1C47, 0xF5E98430, 0xC10E2CA9, 0xD2ACB4DE, + 0xA8C17D9B, 0xBB63E5EC, 0x8F844D75, 0x9C26D502, + 0x449A2E7E, 0x5738B609, 0x63DF1E90, 0x707D86E7, + 0x0A104FA2, 0x19B2D7D5, 0x2D557F4C, 0x3EF7E73B, + 0xD98EEDC6, 0xCA2C75B1, 0xFECBDD28, 0xED69455F, + 0x97048C1A, 0x84A6146D, 0xB041BCF4, 0xA3E32483, + /* T8_2 */ + 0x00000000, 0xA541927E, 0x4F6F520D, 0xEA2EC073, + 0x9EDEA41A, 0x3B9F3664, 0xD1B1F617, 0x74F06469, + 0x38513EC5, 0x9D10ACBB, 0x773E6CC8, 0xD27FFEB6, + 0xA68F9ADF, 0x03CE08A1, 0xE9E0C8D2, 0x4CA15AAC, + 0x70A27D8A, 0xD5E3EFF4, 0x3FCD2F87, 0x9A8CBDF9, + 0xEE7CD990, 0x4B3D4BEE, 0xA1138B9D, 0x045219E3, + 0x48F3434F, 0xEDB2D131, 0x079C1142, 0xA2DD833C, + 0xD62DE755, 0x736C752B, 0x9942B558, 0x3C032726, + 0xE144FB14, 0x4405696A, 0xAE2BA919, 0x0B6A3B67, + 0x7F9A5F0E, 0xDADBCD70, 0x30F50D03, 0x95B49F7D, + 0xD915C5D1, 0x7C5457AF, 0x967A97DC, 0x333B05A2, + 0x47CB61CB, 0xE28AF3B5, 0x08A433C6, 0xADE5A1B8, + 0x91E6869E, 0x34A714E0, 0xDE89D493, 0x7BC846ED, + 0x0F382284, 0xAA79B0FA, 0x40577089, 0xE516E2F7, + 0xA9B7B85B, 0x0CF62A25, 0xE6D8EA56, 0x43997828, + 0x37691C41, 0x92288E3F, 0x78064E4C, 0xDD47DC32, + 0xC76580D9, 0x622412A7, 0x880AD2D4, 0x2D4B40AA, + 0x59BB24C3, 0xFCFAB6BD, 0x16D476CE, 0xB395E4B0, + 0xFF34BE1C, 0x5A752C62, 0xB05BEC11, 0x151A7E6F, + 0x61EA1A06, 0xC4AB8878, 0x2E85480B, 0x8BC4DA75, + 0xB7C7FD53, 0x12866F2D, 0xF8A8AF5E, 0x5DE93D20, + 0x29195949, 0x8C58CB37, 0x66760B44, 0xC337993A, + 0x8F96C396, 0x2AD751E8, 0xC0F9919B, 0x65B803E5, + 0x1148678C, 0xB409F5F2, 0x5E273581, 0xFB66A7FF, + 0x26217BCD, 0x8360E9B3, 0x694E29C0, 0xCC0FBBBE, + 0xB8FFDFD7, 0x1DBE4DA9, 0xF7908DDA, 0x52D11FA4, + 0x1E704508, 0xBB31D776, 0x511F1705, 0xF45E857B, + 0x80AEE112, 0x25EF736C, 0xCFC1B31F, 0x6A802161, + 0x56830647, 0xF3C29439, 0x19EC544A, 0xBCADC634, + 0xC85DA25D, 0x6D1C3023, 0x8732F050, 0x2273622E, + 0x6ED23882, 0xCB93AAFC, 0x21BD6A8F, 0x84FCF8F1, + 0xF00C9C98, 0x554D0EE6, 0xBF63CE95, 0x1A225CEB, + 0x8B277743, 0x2E66E53D, 0xC448254E, 0x6109B730, + 0x15F9D359, 0xB0B84127, 0x5A968154, 0xFFD7132A, + 0xB3764986, 0x1637DBF8, 0xFC191B8B, 0x595889F5, + 0x2DA8ED9C, 0x88E97FE2, 0x62C7BF91, 0xC7862DEF, + 0xFB850AC9, 0x5EC498B7, 0xB4EA58C4, 0x11ABCABA, + 0x655BAED3, 0xC01A3CAD, 0x2A34FCDE, 0x8F756EA0, + 0xC3D4340C, 0x6695A672, 0x8CBB6601, 0x29FAF47F, + 0x5D0A9016, 0xF84B0268, 0x1265C21B, 0xB7245065, + 0x6A638C57, 0xCF221E29, 0x250CDE5A, 0x804D4C24, + 0xF4BD284D, 0x51FCBA33, 0xBBD27A40, 0x1E93E83E, + 0x5232B292, 0xF77320EC, 0x1D5DE09F, 0xB81C72E1, + 0xCCEC1688, 0x69AD84F6, 0x83834485, 0x26C2D6FB, + 0x1AC1F1DD, 0xBF8063A3, 0x55AEA3D0, 0xF0EF31AE, + 0x841F55C7, 0x215EC7B9, 0xCB7007CA, 0x6E3195B4, + 0x2290CF18, 0x87D15D66, 0x6DFF9D15, 0xC8BE0F6B, + 0xBC4E6B02, 0x190FF97C, 0xF321390F, 0x5660AB71, + 0x4C42F79A, 0xE90365E4, 0x032DA597, 0xA66C37E9, + 0xD29C5380, 0x77DDC1FE, 0x9DF3018D, 0x38B293F3, + 0x7413C95F, 0xD1525B21, 0x3B7C9B52, 0x9E3D092C, + 0xEACD6D45, 0x4F8CFF3B, 0xA5A23F48, 0x00E3AD36, + 0x3CE08A10, 0x99A1186E, 0x738FD81D, 0xD6CE4A63, + 0xA23E2E0A, 0x077FBC74, 0xED517C07, 0x4810EE79, + 0x04B1B4D5, 0xA1F026AB, 0x4BDEE6D8, 0xEE9F74A6, + 0x9A6F10CF, 0x3F2E82B1, 0xD50042C2, 0x7041D0BC, + 0xAD060C8E, 0x08479EF0, 0xE2695E83, 0x4728CCFD, + 0x33D8A894, 0x96993AEA, 0x7CB7FA99, 0xD9F668E7, + 0x9557324B, 0x3016A035, 0xDA386046, 0x7F79F238, + 0x0B899651, 0xAEC8042F, 0x44E6C45C, 0xE1A75622, + 0xDDA47104, 0x78E5E37A, 0x92CB2309, 0x378AB177, + 0x437AD51E, 0xE63B4760, 0x0C158713, 0xA954156D, + 0xE5F54FC1, 0x40B4DDBF, 0xAA9A1DCC, 0x0FDB8FB2, + 0x7B2BEBDB, 0xDE6A79A5, 0x3444B9D6, 0x91052BA8, + /* T8_3 */ + 0x00000000, 0xDD45AAB8, 0xBF672381, 0x62228939, + 0x7B2231F3, 0xA6679B4B, 0xC4451272, 0x1900B8CA, + 0xF64463E6, 0x2B01C95E, 0x49234067, 0x9466EADF, + 0x8D665215, 0x5023F8AD, 0x32017194, 0xEF44DB2C, + 0xE964B13D, 0x34211B85, 0x560392BC, 0x8B463804, + 0x924680CE, 0x4F032A76, 0x2D21A34F, 0xF06409F7, + 0x1F20D2DB, 0xC2657863, 0xA047F15A, 0x7D025BE2, + 0x6402E328, 0xB9474990, 0xDB65C0A9, 0x06206A11, + 0xD725148B, 0x0A60BE33, 0x6842370A, 0xB5079DB2, + 0xAC072578, 0x71428FC0, 0x136006F9, 0xCE25AC41, + 0x2161776D, 0xFC24DDD5, 0x9E0654EC, 0x4343FE54, + 0x5A43469E, 0x8706EC26, 0xE524651F, 0x3861CFA7, + 0x3E41A5B6, 0xE3040F0E, 0x81268637, 0x5C632C8F, + 0x45639445, 0x98263EFD, 0xFA04B7C4, 0x27411D7C, + 0xC805C650, 0x15406CE8, 0x7762E5D1, 0xAA274F69, + 0xB327F7A3, 0x6E625D1B, 0x0C40D422, 0xD1057E9A, + 0xABA65FE7, 0x76E3F55F, 0x14C17C66, 0xC984D6DE, + 0xD0846E14, 0x0DC1C4AC, 0x6FE34D95, 0xB2A6E72D, + 0x5DE23C01, 0x80A796B9, 0xE2851F80, 0x3FC0B538, + 0x26C00DF2, 0xFB85A74A, 0x99A72E73, 0x44E284CB, + 0x42C2EEDA, 0x9F874462, 0xFDA5CD5B, 0x20E067E3, + 0x39E0DF29, 0xE4A57591, 0x8687FCA8, 0x5BC25610, + 0xB4868D3C, 0x69C32784, 0x0BE1AEBD, 0xD6A40405, + 0xCFA4BCCF, 0x12E11677, 0x70C39F4E, 0xAD8635F6, + 0x7C834B6C, 0xA1C6E1D4, 0xC3E468ED, 0x1EA1C255, + 0x07A17A9F, 0xDAE4D027, 0xB8C6591E, 0x6583F3A6, + 0x8AC7288A, 0x57828232, 0x35A00B0B, 0xE8E5A1B3, + 0xF1E51979, 0x2CA0B3C1, 0x4E823AF8, 0x93C79040, + 0x95E7FA51, 0x48A250E9, 0x2A80D9D0, 0xF7C57368, + 0xEEC5CBA2, 0x3380611A, 0x51A2E823, 0x8CE7429B, + 0x63A399B7, 0xBEE6330F, 0xDCC4BA36, 0x0181108E, + 0x1881A844, 0xC5C402FC, 0xA7E68BC5, 0x7AA3217D, + 0x52A0C93F, 0x8FE56387, 0xEDC7EABE, 0x30824006, + 0x2982F8CC, 0xF4C75274, 0x96E5DB4D, 0x4BA071F5, + 0xA4E4AAD9, 0x79A10061, 0x1B838958, 0xC6C623E0, + 0xDFC69B2A, 0x02833192, 0x60A1B8AB, 0xBDE41213, + 0xBBC47802, 0x6681D2BA, 0x04A35B83, 0xD9E6F13B, + 0xC0E649F1, 0x1DA3E349, 0x7F816A70, 0xA2C4C0C8, + 0x4D801BE4, 0x90C5B15C, 0xF2E73865, 0x2FA292DD, + 0x36A22A17, 0xEBE780AF, 0x89C50996, 0x5480A32E, + 0x8585DDB4, 0x58C0770C, 0x3AE2FE35, 0xE7A7548D, + 0xFEA7EC47, 0x23E246FF, 0x41C0CFC6, 0x9C85657E, + 0x73C1BE52, 0xAE8414EA, 0xCCA69DD3, 0x11E3376B, + 0x08E38FA1, 0xD5A62519, 0xB784AC20, 0x6AC10698, + 0x6CE16C89, 0xB1A4C631, 0xD3864F08, 0x0EC3E5B0, + 0x17C35D7A, 0xCA86F7C2, 0xA8A47EFB, 0x75E1D443, + 0x9AA50F6F, 0x47E0A5D7, 0x25C22CEE, 0xF8878656, + 0xE1873E9C, 0x3CC29424, 0x5EE01D1D, 0x83A5B7A5, + 0xF90696D8, 0x24433C60, 0x4661B559, 0x9B241FE1, + 0x8224A72B, 0x5F610D93, 0x3D4384AA, 0xE0062E12, + 0x0F42F53E, 0xD2075F86, 0xB025D6BF, 0x6D607C07, + 0x7460C4CD, 0xA9256E75, 0xCB07E74C, 0x16424DF4, + 0x106227E5, 0xCD278D5D, 0xAF050464, 0x7240AEDC, + 0x6B401616, 0xB605BCAE, 0xD4273597, 0x09629F2F, + 0xE6264403, 0x3B63EEBB, 0x59416782, 0x8404CD3A, + 0x9D0475F0, 0x4041DF48, 0x22635671, 0xFF26FCC9, + 0x2E238253, 0xF36628EB, 0x9144A1D2, 0x4C010B6A, + 0x5501B3A0, 0x88441918, 0xEA669021, 0x37233A99, + 0xD867E1B5, 0x05224B0D, 0x6700C234, 0xBA45688C, + 0xA345D046, 0x7E007AFE, 0x1C22F3C7, 0xC167597F, + 0xC747336E, 0x1A0299D6, 0x782010EF, 0xA565BA57, + 0xBC65029D, 0x6120A825, 0x0302211C, 0xDE478BA4, + 0x31035088, 0xEC46FA30, 0x8E647309, 0x5321D9B1, + 0x4A21617B, 0x9764CBC3, 0xF54642FA, 0x2803E842, + /* T8_4 */ + 0x00000000, 0x38116FAC, 0x7022DF58, 0x4833B0F4, + 0xE045BEB0, 0xD854D11C, 0x906761E8, 0xA8760E44, + 0xC5670B91, 0xFD76643D, 0xB545D4C9, 0x8D54BB65, + 0x2522B521, 0x1D33DA8D, 0x55006A79, 0x6D1105D5, + 0x8F2261D3, 0xB7330E7F, 0xFF00BE8B, 0xC711D127, + 0x6F67DF63, 0x5776B0CF, 0x1F45003B, 0x27546F97, + 0x4A456A42, 0x725405EE, 0x3A67B51A, 0x0276DAB6, + 0xAA00D4F2, 0x9211BB5E, 0xDA220BAA, 0xE2336406, + 0x1BA8B557, 0x23B9DAFB, 0x6B8A6A0F, 0x539B05A3, + 0xFBED0BE7, 0xC3FC644B, 0x8BCFD4BF, 0xB3DEBB13, + 0xDECFBEC6, 0xE6DED16A, 0xAEED619E, 0x96FC0E32, + 0x3E8A0076, 0x069B6FDA, 0x4EA8DF2E, 0x76B9B082, + 0x948AD484, 0xAC9BBB28, 0xE4A80BDC, 0xDCB96470, + 0x74CF6A34, 0x4CDE0598, 0x04EDB56C, 0x3CFCDAC0, + 0x51EDDF15, 0x69FCB0B9, 0x21CF004D, 0x19DE6FE1, + 0xB1A861A5, 0x89B90E09, 0xC18ABEFD, 0xF99BD151, + 0x37516AAE, 0x0F400502, 0x4773B5F6, 0x7F62DA5A, + 0xD714D41E, 0xEF05BBB2, 0xA7360B46, 0x9F2764EA, + 0xF236613F, 0xCA270E93, 0x8214BE67, 0xBA05D1CB, + 0x1273DF8F, 0x2A62B023, 0x625100D7, 0x5A406F7B, + 0xB8730B7D, 0x806264D1, 0xC851D425, 0xF040BB89, + 0x5836B5CD, 0x6027DA61, 0x28146A95, 0x10050539, + 0x7D1400EC, 0x45056F40, 0x0D36DFB4, 0x3527B018, + 0x9D51BE5C, 0xA540D1F0, 0xED736104, 0xD5620EA8, + 0x2CF9DFF9, 0x14E8B055, 0x5CDB00A1, 0x64CA6F0D, + 0xCCBC6149, 0xF4AD0EE5, 0xBC9EBE11, 0x848FD1BD, + 0xE99ED468, 0xD18FBBC4, 0x99BC0B30, 0xA1AD649C, + 0x09DB6AD8, 0x31CA0574, 0x79F9B580, 0x41E8DA2C, + 0xA3DBBE2A, 0x9BCAD186, 0xD3F96172, 0xEBE80EDE, + 0x439E009A, 0x7B8F6F36, 0x33BCDFC2, 0x0BADB06E, + 0x66BCB5BB, 0x5EADDA17, 0x169E6AE3, 0x2E8F054F, + 0x86F90B0B, 0xBEE864A7, 0xF6DBD453, 0xCECABBFF, + 0x6EA2D55C, 0x56B3BAF0, 0x1E800A04, 0x269165A8, + 0x8EE76BEC, 0xB6F60440, 0xFEC5B4B4, 0xC6D4DB18, + 0xABC5DECD, 0x93D4B161, 0xDBE70195, 0xE3F66E39, + 0x4B80607D, 0x73910FD1, 0x3BA2BF25, 0x03B3D089, + 0xE180B48F, 0xD991DB23, 0x91A26BD7, 0xA9B3047B, + 0x01C50A3F, 0x39D46593, 0x71E7D567, 0x49F6BACB, + 0x24E7BF1E, 0x1CF6D0B2, 0x54C56046, 0x6CD40FEA, + 0xC4A201AE, 0xFCB36E02, 0xB480DEF6, 0x8C91B15A, + 0x750A600B, 0x4D1B0FA7, 0x0528BF53, 0x3D39D0FF, + 0x954FDEBB, 0xAD5EB117, 0xE56D01E3, 0xDD7C6E4F, + 0xB06D6B9A, 0x887C0436, 0xC04FB4C2, 0xF85EDB6E, + 0x5028D52A, 0x6839BA86, 0x200A0A72, 0x181B65DE, + 0xFA2801D8, 0xC2396E74, 0x8A0ADE80, 0xB21BB12C, + 0x1A6DBF68, 0x227CD0C4, 0x6A4F6030, 0x525E0F9C, + 0x3F4F0A49, 0x075E65E5, 0x4F6DD511, 0x777CBABD, + 0xDF0AB4F9, 0xE71BDB55, 0xAF286BA1, 0x9739040D, + 0x59F3BFF2, 0x61E2D05E, 0x29D160AA, 0x11C00F06, + 0xB9B60142, 0x81A76EEE, 0xC994DE1A, 0xF185B1B6, + 0x9C94B463, 0xA485DBCF, 0xECB66B3B, 0xD4A70497, + 0x7CD10AD3, 0x44C0657F, 0x0CF3D58B, 0x34E2BA27, + 0xD6D1DE21, 0xEEC0B18D, 0xA6F30179, 0x9EE26ED5, + 0x36946091, 0x0E850F3D, 0x46B6BFC9, 0x7EA7D065, + 0x13B6D5B0, 0x2BA7BA1C, 0x63940AE8, 0x5B856544, + 0xF3F36B00, 0xCBE204AC, 0x83D1B458, 0xBBC0DBF4, + 0x425B0AA5, 0x7A4A6509, 0x3279D5FD, 0x0A68BA51, + 0xA21EB415, 0x9A0FDBB9, 0xD23C6B4D, 0xEA2D04E1, + 0x873C0134, 0xBF2D6E98, 0xF71EDE6C, 0xCF0FB1C0, + 0x6779BF84, 0x5F68D028, 0x175B60DC, 0x2F4A0F70, + 0xCD796B76, 0xF56804DA, 0xBD5BB42E, 0x854ADB82, + 0x2D3CD5C6, 0x152DBA6A, 0x5D1E0A9E, 0x650F6532, + 0x081E60E7, 0x300F0F4B, 0x783CBFBF, 0x402DD013, + 0xE85BDE57, 0xD04AB1FB, 0x9879010F, 0xA0686EA3, + /* T8_5 */ + 0x00000000, 0xEF306B19, 0xDB8CA0C3, 0x34BCCBDA, + 0xB2F53777, 0x5DC55C6E, 0x697997B4, 0x8649FCAD, + 0x6006181F, 0x8F367306, 0xBB8AB8DC, 0x54BAD3C5, + 0xD2F32F68, 0x3DC34471, 0x097F8FAB, 0xE64FE4B2, + 0xC00C303E, 0x2F3C5B27, 0x1B8090FD, 0xF4B0FBE4, + 0x72F90749, 0x9DC96C50, 0xA975A78A, 0x4645CC93, + 0xA00A2821, 0x4F3A4338, 0x7B8688E2, 0x94B6E3FB, + 0x12FF1F56, 0xFDCF744F, 0xC973BF95, 0x2643D48C, + 0x85F4168D, 0x6AC47D94, 0x5E78B64E, 0xB148DD57, + 0x370121FA, 0xD8314AE3, 0xEC8D8139, 0x03BDEA20, + 0xE5F20E92, 0x0AC2658B, 0x3E7EAE51, 0xD14EC548, + 0x570739E5, 0xB83752FC, 0x8C8B9926, 0x63BBF23F, + 0x45F826B3, 0xAAC84DAA, 0x9E748670, 0x7144ED69, + 0xF70D11C4, 0x183D7ADD, 0x2C81B107, 0xC3B1DA1E, + 0x25FE3EAC, 0xCACE55B5, 0xFE729E6F, 0x1142F576, + 0x970B09DB, 0x783B62C2, 0x4C87A918, 0xA3B7C201, + 0x0E045BEB, 0xE13430F2, 0xD588FB28, 0x3AB89031, + 0xBCF16C9C, 0x53C10785, 0x677DCC5F, 0x884DA746, + 0x6E0243F4, 0x813228ED, 0xB58EE337, 0x5ABE882E, + 0xDCF77483, 0x33C71F9A, 0x077BD440, 0xE84BBF59, + 0xCE086BD5, 0x213800CC, 0x1584CB16, 0xFAB4A00F, + 0x7CFD5CA2, 0x93CD37BB, 0xA771FC61, 0x48419778, + 0xAE0E73CA, 0x413E18D3, 0x7582D309, 0x9AB2B810, + 0x1CFB44BD, 0xF3CB2FA4, 0xC777E47E, 0x28478F67, + 0x8BF04D66, 0x64C0267F, 0x507CEDA5, 0xBF4C86BC, + 0x39057A11, 0xD6351108, 0xE289DAD2, 0x0DB9B1CB, + 0xEBF65579, 0x04C63E60, 0x307AF5BA, 0xDF4A9EA3, + 0x5903620E, 0xB6330917, 0x828FC2CD, 0x6DBFA9D4, + 0x4BFC7D58, 0xA4CC1641, 0x9070DD9B, 0x7F40B682, + 0xF9094A2F, 0x16392136, 0x2285EAEC, 0xCDB581F5, + 0x2BFA6547, 0xC4CA0E5E, 0xF076C584, 0x1F46AE9D, + 0x990F5230, 0x763F3929, 0x4283F2F3, 0xADB399EA, + 0x1C08B7D6, 0xF338DCCF, 0xC7841715, 0x28B47C0C, + 0xAEFD80A1, 0x41CDEBB8, 0x75712062, 0x9A414B7B, + 0x7C0EAFC9, 0x933EC4D0, 0xA7820F0A, 0x48B26413, + 0xCEFB98BE, 0x21CBF3A7, 0x1577387D, 0xFA475364, + 0xDC0487E8, 0x3334ECF1, 0x0788272B, 0xE8B84C32, + 0x6EF1B09F, 0x81C1DB86, 0xB57D105C, 0x5A4D7B45, + 0xBC029FF7, 0x5332F4EE, 0x678E3F34, 0x88BE542D, + 0x0EF7A880, 0xE1C7C399, 0xD57B0843, 0x3A4B635A, + 0x99FCA15B, 0x76CCCA42, 0x42700198, 0xAD406A81, + 0x2B09962C, 0xC439FD35, 0xF08536EF, 0x1FB55DF6, + 0xF9FAB944, 0x16CAD25D, 0x22761987, 0xCD46729E, + 0x4B0F8E33, 0xA43FE52A, 0x90832EF0, 0x7FB345E9, + 0x59F09165, 0xB6C0FA7C, 0x827C31A6, 0x6D4C5ABF, + 0xEB05A612, 0x0435CD0B, 0x308906D1, 0xDFB96DC8, + 0x39F6897A, 0xD6C6E263, 0xE27A29B9, 0x0D4A42A0, + 0x8B03BE0D, 0x6433D514, 0x508F1ECE, 0xBFBF75D7, + 0x120CEC3D, 0xFD3C8724, 0xC9804CFE, 0x26B027E7, + 0xA0F9DB4A, 0x4FC9B053, 0x7B757B89, 0x94451090, + 0x720AF422, 0x9D3A9F3B, 0xA98654E1, 0x46B63FF8, + 0xC0FFC355, 0x2FCFA84C, 0x1B736396, 0xF443088F, + 0xD200DC03, 0x3D30B71A, 0x098C7CC0, 0xE6BC17D9, + 0x60F5EB74, 0x8FC5806D, 0xBB794BB7, 0x544920AE, + 0xB206C41C, 0x5D36AF05, 0x698A64DF, 0x86BA0FC6, + 0x00F3F36B, 0xEFC39872, 0xDB7F53A8, 0x344F38B1, + 0x97F8FAB0, 0x78C891A9, 0x4C745A73, 0xA344316A, + 0x250DCDC7, 0xCA3DA6DE, 0xFE816D04, 0x11B1061D, + 0xF7FEE2AF, 0x18CE89B6, 0x2C72426C, 0xC3422975, + 0x450BD5D8, 0xAA3BBEC1, 0x9E87751B, 0x71B71E02, + 0x57F4CA8E, 0xB8C4A197, 0x8C786A4D, 0x63480154, + 0xE501FDF9, 0x0A3196E0, 0x3E8D5D3A, 0xD1BD3623, + 0x37F2D291, 0xD8C2B988, 0xEC7E7252, 0x034E194B, + 0x8507E5E6, 0x6A378EFF, 0x5E8B4525, 0xB1BB2E3C, + /* T8_6 */ + 0x00000000, 0x68032CC8, 0xD0065990, 0xB8057558, + 0xA5E0C5D1, 0xCDE3E919, 0x75E69C41, 0x1DE5B089, + 0x4E2DFD53, 0x262ED19B, 0x9E2BA4C3, 0xF628880B, + 0xEBCD3882, 0x83CE144A, 0x3BCB6112, 0x53C84DDA, + 0x9C5BFAA6, 0xF458D66E, 0x4C5DA336, 0x245E8FFE, + 0x39BB3F77, 0x51B813BF, 0xE9BD66E7, 0x81BE4A2F, + 0xD27607F5, 0xBA752B3D, 0x02705E65, 0x6A7372AD, + 0x7796C224, 0x1F95EEEC, 0xA7909BB4, 0xCF93B77C, + 0x3D5B83BD, 0x5558AF75, 0xED5DDA2D, 0x855EF6E5, + 0x98BB466C, 0xF0B86AA4, 0x48BD1FFC, 0x20BE3334, + 0x73767EEE, 0x1B755226, 0xA370277E, 0xCB730BB6, + 0xD696BB3F, 0xBE9597F7, 0x0690E2AF, 0x6E93CE67, + 0xA100791B, 0xC90355D3, 0x7106208B, 0x19050C43, + 0x04E0BCCA, 0x6CE39002, 0xD4E6E55A, 0xBCE5C992, + 0xEF2D8448, 0x872EA880, 0x3F2BDDD8, 0x5728F110, + 0x4ACD4199, 0x22CE6D51, 0x9ACB1809, 0xF2C834C1, + 0x7AB7077A, 0x12B42BB2, 0xAAB15EEA, 0xC2B27222, + 0xDF57C2AB, 0xB754EE63, 0x0F519B3B, 0x6752B7F3, + 0x349AFA29, 0x5C99D6E1, 0xE49CA3B9, 0x8C9F8F71, + 0x917A3FF8, 0xF9791330, 0x417C6668, 0x297F4AA0, + 0xE6ECFDDC, 0x8EEFD114, 0x36EAA44C, 0x5EE98884, + 0x430C380D, 0x2B0F14C5, 0x930A619D, 0xFB094D55, + 0xA8C1008F, 0xC0C22C47, 0x78C7591F, 0x10C475D7, + 0x0D21C55E, 0x6522E996, 0xDD279CCE, 0xB524B006, + 0x47EC84C7, 0x2FEFA80F, 0x97EADD57, 0xFFE9F19F, + 0xE20C4116, 0x8A0F6DDE, 0x320A1886, 0x5A09344E, + 0x09C17994, 0x61C2555C, 0xD9C72004, 0xB1C40CCC, + 0xAC21BC45, 0xC422908D, 0x7C27E5D5, 0x1424C91D, + 0xDBB77E61, 0xB3B452A9, 0x0BB127F1, 0x63B20B39, + 0x7E57BBB0, 0x16549778, 0xAE51E220, 0xC652CEE8, + 0x959A8332, 0xFD99AFFA, 0x459CDAA2, 0x2D9FF66A, + 0x307A46E3, 0x58796A2B, 0xE07C1F73, 0x887F33BB, + 0xF56E0EF4, 0x9D6D223C, 0x25685764, 0x4D6B7BAC, + 0x508ECB25, 0x388DE7ED, 0x808892B5, 0xE88BBE7D, + 0xBB43F3A7, 0xD340DF6F, 0x6B45AA37, 0x034686FF, + 0x1EA33676, 0x76A01ABE, 0xCEA56FE6, 0xA6A6432E, + 0x6935F452, 0x0136D89A, 0xB933ADC2, 0xD130810A, + 0xCCD53183, 0xA4D61D4B, 0x1CD36813, 0x74D044DB, + 0x27180901, 0x4F1B25C9, 0xF71E5091, 0x9F1D7C59, + 0x82F8CCD0, 0xEAFBE018, 0x52FE9540, 0x3AFDB988, + 0xC8358D49, 0xA036A181, 0x1833D4D9, 0x7030F811, + 0x6DD54898, 0x05D66450, 0xBDD31108, 0xD5D03DC0, + 0x8618701A, 0xEE1B5CD2, 0x561E298A, 0x3E1D0542, + 0x23F8B5CB, 0x4BFB9903, 0xF3FEEC5B, 0x9BFDC093, + 0x546E77EF, 0x3C6D5B27, 0x84682E7F, 0xEC6B02B7, + 0xF18EB23E, 0x998D9EF6, 0x2188EBAE, 0x498BC766, + 0x1A438ABC, 0x7240A674, 0xCA45D32C, 0xA246FFE4, + 0xBFA34F6D, 0xD7A063A5, 0x6FA516FD, 0x07A63A35, + 0x8FD9098E, 0xE7DA2546, 0x5FDF501E, 0x37DC7CD6, + 0x2A39CC5F, 0x423AE097, 0xFA3F95CF, 0x923CB907, + 0xC1F4F4DD, 0xA9F7D815, 0x11F2AD4D, 0x79F18185, + 0x6414310C, 0x0C171DC4, 0xB412689C, 0xDC114454, + 0x1382F328, 0x7B81DFE0, 0xC384AAB8, 0xAB878670, + 0xB66236F9, 0xDE611A31, 0x66646F69, 0x0E6743A1, + 0x5DAF0E7B, 0x35AC22B3, 0x8DA957EB, 0xE5AA7B23, + 0xF84FCBAA, 0x904CE762, 0x2849923A, 0x404ABEF2, + 0xB2828A33, 0xDA81A6FB, 0x6284D3A3, 0x0A87FF6B, + 0x17624FE2, 0x7F61632A, 0xC7641672, 0xAF673ABA, + 0xFCAF7760, 0x94AC5BA8, 0x2CA92EF0, 0x44AA0238, + 0x594FB2B1, 0x314C9E79, 0x8949EB21, 0xE14AC7E9, + 0x2ED97095, 0x46DA5C5D, 0xFEDF2905, 0x96DC05CD, + 0x8B39B544, 0xE33A998C, 0x5B3FECD4, 0x333CC01C, + 0x60F48DC6, 0x08F7A10E, 0xB0F2D456, 0xD8F1F89E, + 0xC5144817, 0xAD1764DF, 0x15121187, 0x7D113D4F, + /* T8_7 */ + 0x00000000, 0x493C7D27, 0x9278FA4E, 0xDB448769, + 0x211D826D, 0x6821FF4A, 0xB3657823, 0xFA590504, + 0x423B04DA, 0x0B0779FD, 0xD043FE94, 0x997F83B3, + 0x632686B7, 0x2A1AFB90, 0xF15E7CF9, 0xB86201DE, + 0x847609B4, 0xCD4A7493, 0x160EF3FA, 0x5F328EDD, + 0xA56B8BD9, 0xEC57F6FE, 0x37137197, 0x7E2F0CB0, + 0xC64D0D6E, 0x8F717049, 0x5435F720, 0x1D098A07, + 0xE7508F03, 0xAE6CF224, 0x7528754D, 0x3C14086A, + 0x0D006599, 0x443C18BE, 0x9F789FD7, 0xD644E2F0, + 0x2C1DE7F4, 0x65219AD3, 0xBE651DBA, 0xF759609D, + 0x4F3B6143, 0x06071C64, 0xDD439B0D, 0x947FE62A, + 0x6E26E32E, 0x271A9E09, 0xFC5E1960, 0xB5626447, + 0x89766C2D, 0xC04A110A, 0x1B0E9663, 0x5232EB44, + 0xA86BEE40, 0xE1579367, 0x3A13140E, 0x732F6929, + 0xCB4D68F7, 0x827115D0, 0x593592B9, 0x1009EF9E, + 0xEA50EA9A, 0xA36C97BD, 0x782810D4, 0x31146DF3, + 0x1A00CB32, 0x533CB615, 0x8878317C, 0xC1444C5B, + 0x3B1D495F, 0x72213478, 0xA965B311, 0xE059CE36, + 0x583BCFE8, 0x1107B2CF, 0xCA4335A6, 0x837F4881, + 0x79264D85, 0x301A30A2, 0xEB5EB7CB, 0xA262CAEC, + 0x9E76C286, 0xD74ABFA1, 0x0C0E38C8, 0x453245EF, + 0xBF6B40EB, 0xF6573DCC, 0x2D13BAA5, 0x642FC782, + 0xDC4DC65C, 0x9571BB7B, 0x4E353C12, 0x07094135, + 0xFD504431, 0xB46C3916, 0x6F28BE7F, 0x2614C358, + 0x1700AEAB, 0x5E3CD38C, 0x857854E5, 0xCC4429C2, + 0x361D2CC6, 0x7F2151E1, 0xA465D688, 0xED59ABAF, + 0x553BAA71, 0x1C07D756, 0xC743503F, 0x8E7F2D18, + 0x7426281C, 0x3D1A553B, 0xE65ED252, 0xAF62AF75, + 0x9376A71F, 0xDA4ADA38, 0x010E5D51, 0x48322076, + 0xB26B2572, 0xFB575855, 0x2013DF3C, 0x692FA21B, + 0xD14DA3C5, 0x9871DEE2, 0x4335598B, 0x0A0924AC, + 0xF05021A8, 0xB96C5C8F, 0x6228DBE6, 0x2B14A6C1, + 0x34019664, 0x7D3DEB43, 0xA6796C2A, 0xEF45110D, + 0x151C1409, 0x5C20692E, 0x8764EE47, 0xCE589360, + 0x763A92BE, 0x3F06EF99, 0xE44268F0, 0xAD7E15D7, + 0x572710D3, 0x1E1B6DF4, 0xC55FEA9D, 0x8C6397BA, + 0xB0779FD0, 0xF94BE2F7, 0x220F659E, 0x6B3318B9, + 0x916A1DBD, 0xD856609A, 0x0312E7F3, 0x4A2E9AD4, + 0xF24C9B0A, 0xBB70E62D, 0x60346144, 0x29081C63, + 0xD3511967, 0x9A6D6440, 0x4129E329, 0x08159E0E, + 0x3901F3FD, 0x703D8EDA, 0xAB7909B3, 0xE2457494, + 0x181C7190, 0x51200CB7, 0x8A648BDE, 0xC358F6F9, + 0x7B3AF727, 0x32068A00, 0xE9420D69, 0xA07E704E, + 0x5A27754A, 0x131B086D, 0xC85F8F04, 0x8163F223, + 0xBD77FA49, 0xF44B876E, 0x2F0F0007, 0x66337D20, + 0x9C6A7824, 0xD5560503, 0x0E12826A, 0x472EFF4D, + 0xFF4CFE93, 0xB67083B4, 0x6D3404DD, 0x240879FA, + 0xDE517CFE, 0x976D01D9, 0x4C2986B0, 0x0515FB97, + 0x2E015D56, 0x673D2071, 0xBC79A718, 0xF545DA3F, + 0x0F1CDF3B, 0x4620A21C, 0x9D642575, 0xD4585852, + 0x6C3A598C, 0x250624AB, 0xFE42A3C2, 0xB77EDEE5, + 0x4D27DBE1, 0x041BA6C6, 0xDF5F21AF, 0x96635C88, + 0xAA7754E2, 0xE34B29C5, 0x380FAEAC, 0x7133D38B, + 0x8B6AD68F, 0xC256ABA8, 0x19122CC1, 0x502E51E6, + 0xE84C5038, 0xA1702D1F, 0x7A34AA76, 0x3308D751, + 0xC951D255, 0x806DAF72, 0x5B29281B, 0x1215553C, + 0x230138CF, 0x6A3D45E8, 0xB179C281, 0xF845BFA6, + 0x021CBAA2, 0x4B20C785, 0x906440EC, 0xD9583DCB, + 0x613A3C15, 0x28064132, 0xF342C65B, 0xBA7EBB7C, + 0x4027BE78, 0x091BC35F, 0xD25F4436, 0x9B633911, + 0xA777317B, 0xEE4B4C5C, 0x350FCB35, 0x7C33B612, + 0x866AB316, 0xCF56CE31, 0x14124958, 0x5D2E347F, + 0xE54C35A1, 0xAC704886, 0x7734CFEF, 0x3E08B2C8, + 0xC451B7CC, 0x8D6DCAEB, 0x56294D82, 0x1F1530A5 + }; +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java new file mode 100644 index 000000000..46c218868 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/SnappyCompressorInputStream.java @@ -0,0 +1,423 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.snappy; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.compressors.CompressorInputStream; + +/** + * CompressorInputStream for the raw Snappy format. + * + *

    This implementation uses an internal buffer in order to handle + * the back-references that are at the heart of the LZ77 algorithm. + * The size of the buffer must be at least as big as the biggest + * offset used in the compressed stream. The current version of the + * Snappy algorithm as defined by Google works on 32k blocks and + * doesn't contain offsets bigger than 32k which is the default block + * size used by this class.

    + * + * @see Snappy compressed format description + * @since 1.7 + */ +public class SnappyCompressorInputStream extends CompressorInputStream { + + /** Mask used to determine the type of "tag" is being processed */ + private static final int TAG_MASK = 0x03; + + /** Default block size */ + public static final int DEFAULT_BLOCK_SIZE = 32768; + + /** Buffer to write decompressed bytes to for back-references */ + private final byte[] decompressBuf; + + /** One behind the index of the last byte in the buffer that was written */ + private int writeIndex; + + /** Index of the next byte to be read. */ + private int readIndex; + + /** The actual block size specified */ + private final int blockSize; + + /** The underlying stream to read compressed data from */ + private final InputStream in; + + /** The size of the uncompressed data */ + private final int size; + + /** Number of uncompressed bytes still to be read. */ + private int uncompressedBytesRemaining; + + // used in no-arg read method + private final byte[] oneByte = new byte[1]; + + private boolean endReached = false; + + /** + * Constructor using the default buffer size of 32k. + * + * @param is + * An InputStream to read compressed data from + * + * @throws IOException + */ + public SnappyCompressorInputStream(final InputStream is) throws IOException { + this(is, DEFAULT_BLOCK_SIZE); + } + + /** + * Constructor using a configurable buffer size. + * + * @param is + * An InputStream to read compressed data from + * @param blockSize + * The block size used in compression + * + * @throws IOException + */ + public SnappyCompressorInputStream(final InputStream is, final int blockSize) + throws IOException { + this.in = is; + this.blockSize = blockSize; + this.decompressBuf = new byte[blockSize * 3]; + this.writeIndex = readIndex = 0; + uncompressedBytesRemaining = size = (int) readSize(); + } + + /** {@inheritDoc} */ + @Override + public int read() throws IOException { + return read(oneByte, 0, 1) == -1 ? -1 : oneByte[0] & 0xFF; + } + + /** {@inheritDoc} */ + @Override + public void close() throws IOException { + in.close(); + } + + /** {@inheritDoc} */ + @Override + public int available() { + return writeIndex - readIndex; + } + + /** + * {@inheritDoc} + */ + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (endReached) { + return -1; + } + final int avail = available(); + if (len > avail) { + fill(len - avail); + } + + int readable = Math.min(len, available()); + System.arraycopy(decompressBuf, readIndex, b, off, readable); + readIndex += readable; + if (readIndex > blockSize) { + slideBuffer(); + } + return readable; + } + + /** + * Try to fill the buffer with enough bytes to satisfy the current + * read request. + * + * @param len the number of uncompressed bytes to read + */ + private void fill(int len) throws IOException { + if (uncompressedBytesRemaining == 0) { + endReached = true; + } + int readNow = Math.min(len, uncompressedBytesRemaining); + + while (readNow > 0) { + final int b = readOneByte(); + int length = 0; + long offset = 0; + + switch (b & TAG_MASK) { + + case 0x00: + + length = readLiteralLength(b); + + if (expandLiteral(length)) { + return; + } + break; + + case 0x01: + + /* + * These elements can encode lengths between [4..11] bytes and + * offsets between [0..2047] bytes. (len-4) occupies three bits + * and is stored in bits [2..4] of the tag byte. The offset + * occupies 11 bits, of which the upper three are stored in the + * upper three bits ([5..7]) of the tag byte, and the lower + * eight are stored in a byte following the tag byte. + */ + + length = 4 + ((b >> 2) & 0x07); + offset = (b & 0xE0) << 3; + offset |= readOneByte(); + + if (expandCopy(offset, length)) { + return; + } + break; + + case 0x02: + + /* + * These elements can encode lengths between [1..64] and offsets + * from [0..65535]. (len-1) occupies six bits and is stored in + * the upper six bits ([2..7]) of the tag byte. The offset is + * stored as a little-endian 16-bit integer in the two bytes + * following the tag byte. + */ + + length = (b >> 2) + 1; + + offset = readOneByte(); + offset |= readOneByte() << 8; + + if (expandCopy(offset, length)) { + return; + } + break; + + case 0x03: + + /* + * These are like the copies with 2-byte offsets (see previous + * subsection), except that the offset is stored as a 32-bit + * integer instead of a 16-bit integer (and thus will occupy + * four bytes). + */ + + length = (b >> 2) + 1; + + offset = readOneByte(); + offset |= readOneByte() << 8; + offset |= readOneByte() << 16; + offset |= ((long) readOneByte()) << 24; + + if (expandCopy(offset, length)) { + return; + } + break; + } + + readNow -= length; + uncompressedBytesRemaining -= length; + } + } + + /** + * Slide buffer. + * + *

    Move all bytes of the buffer after the first block down to + * the beginning of the buffer.

    + */ + private void slideBuffer() { + System.arraycopy(decompressBuf, blockSize, decompressBuf, 0, + blockSize * 2); + writeIndex -= blockSize; + readIndex -= blockSize; + } + + + /* + * For literals up to and including 60 bytes in length, the + * upper six bits of the tag byte contain (len-1). The literal + * follows immediately thereafter in the bytestream. - For + * longer literals, the (len-1) value is stored after the tag + * byte, little-endian. The upper six bits of the tag byte + * describe how many bytes are used for the length; 60, 61, 62 + * or 63 for 1-4 bytes, respectively. The literal itself follows + * after the length. + */ + private int readLiteralLength(int b) throws IOException { + int length; + switch (b >> 2) { + case 60: + length = readOneByte(); + break; + case 61: + length = readOneByte(); + length |= readOneByte() << 8; + break; + case 62: + length = readOneByte(); + length |= readOneByte() << 8; + length |= readOneByte() << 16; + break; + case 63: + length = readOneByte(); + length |= readOneByte() << 8; + length |= readOneByte() << 16; + length |= (((long) readOneByte()) << 24); + break; + default: + length = b >> 2; + break; + } + + return length + 1; + } + + /** + * Literals are uncompressed data stored directly in the byte stream. + * + * @param length + * The number of bytes to read from the underlying stream + * + * @throws IOException + * If the first byte cannot be read for any reason other than + * end of file, or if the input stream has been closed, or if + * some other I/O error occurs. + * @return True if the decompressed data should be flushed + */ + private boolean expandLiteral(final int length) throws IOException { + int bytesRead = in.read(decompressBuf, writeIndex, length); + count(bytesRead); + if (length != bytesRead) { + throw new IOException("Premature end of stream"); + } + + writeIndex += length; + return writeIndex >= 2 * this.blockSize; + } + + /** + * Copies are references back into previous decompressed data, telling the + * decompressor to reuse data it has previously decoded. They encode two + * values: The offset, saying how many bytes back from the current position + * to read, and the length, how many bytes to copy. Offsets of zero can be + * encoded, but are not legal; similarly, it is possible to encode + * backreferences that would go past the end of the block (offset > current + * decompressed position), which is also nonsensical and thus not allowed. + * + * @param off + * The offset from the backward from the end of expanded stream + * @param length + * The number of bytes to copy + * + * @throws IOException + * An the offset expands past the front of the decompression + * buffer + * @return True if the decompressed data should be flushed + */ + private boolean expandCopy(final long off, int length) throws IOException { + if (off > blockSize) { + throw new IOException("Offset is larger than block size"); + } + int offset = (int) off; + + if (offset == 1) { + byte lastChar = decompressBuf[writeIndex - 1]; + for (int i = 0; i < length; i++) { + decompressBuf[writeIndex++] = lastChar; + } + } else if (length < offset) { + System.arraycopy(decompressBuf, writeIndex - offset, + decompressBuf, writeIndex, length); + writeIndex += length; + } else { + int fullRotations = length / offset; + int pad = length - (offset * fullRotations); + + while (fullRotations-- != 0) { + System.arraycopy(decompressBuf, writeIndex - offset, + decompressBuf, writeIndex, offset); + writeIndex += offset; + } + + if (pad > 0) { + System.arraycopy(decompressBuf, writeIndex - offset, + decompressBuf, writeIndex, pad); + + writeIndex += pad; + } + } + return writeIndex >= 2 * this.blockSize; + } + + /** + * This helper method reads the next byte of data from the input stream. The + * value byte is returned as an int in the range 0 + * to 255. If no byte is available because the end of the + * stream has been reached, an Exception is thrown. + * + * @return The next byte of data + * @throws IOException + * EOF is reached or error reading the stream + */ + private int readOneByte() throws IOException { + int b = in.read(); + if (b == -1) { + throw new IOException("Premature end of stream"); + } + count(1); + return b & 0xFF; + } + + /** + * The stream starts with the uncompressed length (up to a maximum of 2^32 - + * 1), stored as a little-endian varint. Varints consist of a series of + * bytes, where the lower 7 bits are data and the upper bit is set iff there + * are more bytes to be read. In other words, an uncompressed length of 64 + * would be stored as 0x40, and an uncompressed length of 2097150 (0x1FFFFE) + * would be stored as 0xFE 0xFF 0x7F. + * + * @return The size of the uncompressed data + * + * @throws IOException + * Could not read a byte + */ + private long readSize() throws IOException { + int index = 0; + long sz = 0; + int b = 0; + + do { + b = readOneByte(); + sz |= (b & 0x7f) << (index++ * 7); + } while (0 != (b & 0x80)); + return sz; + } + + /** + * Get the uncompressed size of the stream + * + * @return the uncompressed size + */ + public int getSize() { + return size; + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/package.html new file mode 100644 index 000000000..a0d61de05 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/snappy/package.html @@ -0,0 +1,38 @@ + + + +

    Provides stream classes for decompressing streams using the + Snappy + algorithm.

    + +

    The raw Snappy format which only contains the compressed data + is supported by the SnappyCompressorInputStream + class while the so called "framing format" is implemented + by FramedSnappyCompressorInputStream. Note there + have been different versions of the fraing format specification, + the implementation in Commons Compress is based on the + specification "Last revised: 2013-10-25".

    + +

    Only the "framing format" can be auto-detected this means you + have to speficy the format explicitly if you want to read a + "raw" Snappy stream + via CompressorStreamFactory.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java new file mode 100644 index 000000000..789448dac --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/ZCompressorInputStream.java @@ -0,0 +1,152 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.z; + +import java.io.IOException; +import java.io.InputStream; +import org.apache.commons.compress.compressors.z._internal_.InternalLZWInputStream; + +/** + * Input stream that decompresses .Z files. + * @NotThreadSafe + * @since 1.7 + */ +public class ZCompressorInputStream extends InternalLZWInputStream { + private static final int MAGIC_1 = 0x1f; + private static final int MAGIC_2 = 0x9d; + private static final int BLOCK_MODE_MASK = 0x80; + private static final int MAX_CODE_SIZE_MASK = 0x1f; + private final boolean blockMode; + private final int maxCodeSize; + private long totalCodesRead = 0; + + public ZCompressorInputStream(InputStream inputStream) throws IOException { + super(inputStream); + int firstByte = in.read(); + int secondByte = in.read(); + int thirdByte = in.read(); + if (firstByte != MAGIC_1 || secondByte != MAGIC_2 || thirdByte < 0) { + throw new IOException("Input is not in .Z format"); + } + blockMode = (thirdByte & BLOCK_MODE_MASK) != 0; + maxCodeSize = thirdByte & MAX_CODE_SIZE_MASK; + if (blockMode) { + setClearCode(codeSize); + } + initializeTables(maxCodeSize); + clearEntries(); + } + + private void clearEntries() { + tableSize = 1 << 8; + if (blockMode) { + tableSize++; + } + } + + /** + * {@inheritDoc} + *

    This method is only protected for technical reasons + * and is not part of Commons Compress' published API. It may + * change or disappear without warning.

    + */ + @Override + protected int readNextCode() throws IOException { + int code = super.readNextCode(); + if (code >= 0) { + ++totalCodesRead; + } + return code; + } + + private void reAlignReading() throws IOException { + // "compress" works in multiples of 8 symbols, each codeBits bits long. + // When codeBits changes, the remaining unused symbols in the current + // group of 8 are still written out, in the old codeSize, + // as garbage values (usually zeroes) that need to be skipped. + long codeReadsToThrowAway = 8 - (totalCodesRead % 8); + if (codeReadsToThrowAway == 8) { + codeReadsToThrowAway = 0; + } + for (long i = 0; i < codeReadsToThrowAway; i++) { + readNextCode(); + } + bitsCached = 0; + bitsCachedSize = 0; + } + + /** + * {@inheritDoc} + *

    This method is only protected for technical reasons + * and is not part of Commons Compress' published API. It may + * change or disappear without warning.

    + */ + @Override + protected int addEntry(int previousCode, byte character) throws IOException { + final int maxTableSize = 1 << codeSize; + int r = addEntry(previousCode, character, maxTableSize); + if (tableSize == maxTableSize && codeSize < maxCodeSize) { + reAlignReading(); + codeSize++; + } + return r; + } + + /** + * {@inheritDoc} + *

    This method is only protected for technical reasons + * and is not part of Commons Compress' published API. It may + * change or disappear without warning.

    + */ + @Override + protected int decompressNextSymbol() throws IOException { + // + // table entry table entry + // _____________ _____ + // table entry / \ / \ + // ____________/ \ \ + // / / \ / \ \ + // +---+---+---+---+---+---+---+---+---+---+ + // | . | . | . | . | . | . | . | . | . | . | + // +---+---+---+---+---+---+---+---+---+---+ + // |<--------->|<------------->|<----->|<->| + // symbol symbol symbol symbol + // + final int code = readNextCode(); + if (code < 0) { + return -1; + } else if (blockMode && code == clearCode) { + clearEntries(); + reAlignReading(); + codeSize = 9; + previousCode = -1; + return 0; + } else { + boolean addedUnfinishedEntry = false; + if (code == tableSize) { + addRepeatOfPreviousCode(); + addedUnfinishedEntry = true; + } else if (code > tableSize) { + throw new IOException(String.format("Invalid %d bit code 0x%x", Integer.valueOf(codeSize), Integer.valueOf(code))); + } + return expandCodeToOutputStack(code, addedUnfinishedEntry); + } + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/_internal_/InternalLZWInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/_internal_/InternalLZWInputStream.java new file mode 100644 index 000000000..a19e54b42 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/_internal_/InternalLZWInputStream.java @@ -0,0 +1,197 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.compressors.z._internal_; + +import java.io.IOException; +import java.io.InputStream; + +import org.apache.commons.compress.compressors.CompressorInputStream; + +/** + * This class is only public for technical reasons and is not + * part of Commons Compress' published API - it may change or + * disappear without warning. + * + *

    Base-class for traditional Unix ".Z" compression and the + * Unshrinking method of ZIP archive.

    + * + * @NotThreadSafe + * @since 1.7 + */ +public abstract class InternalLZWInputStream extends CompressorInputStream { + private final byte[] oneByte = new byte[1]; + + protected final InputStream in; + protected int clearCode = -1; + protected int codeSize = 9; + protected int bitsCached = 0; + protected int bitsCachedSize = 0; + protected int previousCode = -1; + protected int tableSize = 0; + protected int[] prefixes; + protected byte[] characters; + private byte[] outputStack; + private int outputStackLocation; + + protected InternalLZWInputStream(InputStream inputStream) throws IOException { + this.in = inputStream; + } + + @Override + public void close() throws IOException { + in.close(); + } + + @Override + public int read() throws IOException { + int ret = read(oneByte); + if (ret < 0) { + return ret; + } + return 0xff & oneByte[0]; + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + int bytesRead = readFromStack(b, off, len); + while (len - bytesRead > 0) { + int result = decompressNextSymbol(); + if (result < 0) { + if (bytesRead > 0) { + count(bytesRead); + return bytesRead; + } + return result; + } + bytesRead += readFromStack(b, off + bytesRead, len - bytesRead); + } + count(bytesRead); + return bytesRead; + } + + /** + * Read the next code and expand it. + */ + protected abstract int decompressNextSymbol() throws IOException; + + /** + * Add a new entry to the dictionary. + */ + protected abstract int addEntry(int previousCode, byte character) + throws IOException; + + /** + * Sets the clear code based on the code size. + */ + protected void setClearCode(int codeSize) { + clearCode = (1 << (codeSize - 1)); + } + + /** + * Initializes the arrays based on the maximum code size. + */ + protected void initializeTables(int maxCodeSize) { + final int maxTableSize = 1 << maxCodeSize; + prefixes = new int[maxTableSize]; + characters = new byte[maxTableSize]; + outputStack = new byte[maxTableSize]; + outputStackLocation = maxTableSize; + final int max = 1 << 8; + for (int i = 0; i < max; i++) { + prefixes[i] = -1; + characters[i] = (byte) i; + } + } + + /** + * Reads the next code from the stream. + */ + protected int readNextCode() throws IOException { + while (bitsCachedSize < codeSize) { + final int nextByte = in.read(); + if (nextByte < 0) { + return nextByte; + } + bitsCached |= (nextByte << bitsCachedSize); + bitsCachedSize += 8; + } + final int mask = (1 << codeSize) - 1; + final int code = (bitsCached & mask); + bitsCached >>>= codeSize; + bitsCachedSize -= codeSize; + return code; + } + + /** + * Adds a new entry if the maximum table size hasn't been exceeded + * and returns the new index. + */ + protected int addEntry(int previousCode, byte character, int maxTableSize) { + if (tableSize < maxTableSize) { + final int index = tableSize; + prefixes[tableSize] = previousCode; + characters[tableSize] = character; + tableSize++; + return index; + } + return -1; + } + + /** + * Add entry for repeat of previousCode we haven't added, yet. + */ + protected int addRepeatOfPreviousCode() throws IOException { + if (previousCode == -1) { + // can't have a repeat for the very first code + throw new IOException("The first code can't be a reference to its preceding code"); + } + byte firstCharacter = 0; + for (int last = previousCode; last >= 0; last = prefixes[last]) { + firstCharacter = characters[last]; + } + return addEntry(previousCode, firstCharacter); + } + + /** + * Expands the entry with index code to the output stack and may + * create a new entry + */ + protected int expandCodeToOutputStack(int code, boolean addedUnfinishedEntry) + throws IOException { + for (int entry = code; entry >= 0; entry = prefixes[entry]) { + outputStack[--outputStackLocation] = characters[entry]; + } + if (previousCode != -1 && !addedUnfinishedEntry) { + addEntry(previousCode, outputStack[outputStackLocation]); + } + previousCode = code; + return outputStackLocation; + } + + private int readFromStack(byte[] b, int off, int len) { + int remainingInStack = outputStack.length - outputStackLocation; + if (remainingInStack > 0) { + int maxLength = Math.min(remainingInStack, len); + System.arraycopy(outputStack, outputStackLocation, b, off, maxLength); + outputStackLocation += maxLength; + return maxLength; + } + return 0; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/_internal_/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/_internal_/package.html new file mode 100644 index 000000000..b0f1525ec --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/_internal_/package.html @@ -0,0 +1,25 @@ + + + +

    This package is not part of Commons Compress' published + API. It may change without warning. Contains classes + used by Commons Compress internally.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/package.html new file mode 100644 index 000000000..ca9924b78 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/compressors/z/package.html @@ -0,0 +1,24 @@ + + + +

    Provides stream classes for decompressing + streams using the "compress" algorithm used to write .Z files.

    + + diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/ArchiveUtils.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/ArchiveUtils.java new file mode 100644 index 000000000..16beed2e0 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/ArchiveUtils.java @@ -0,0 +1,253 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.utils; + +import java.io.UnsupportedEncodingException; + +import org.apache.commons.compress.archivers.ArchiveEntry; + +/** + * Generic Archive utilities + */ +public class ArchiveUtils { + + /** Private constructor to prevent instantiation of this utility class. */ + private ArchiveUtils(){ + } + + /** + * Generates a string containing the name, isDirectory setting and size of an entry. + *

    + * For example: + *

    +     * -    2000 main.c
    +     * d     100 testfiles
    +     * 
    + * + * @return the representation of the entry + */ + public static String toString(ArchiveEntry entry){ + StringBuilder sb = new StringBuilder(); + sb.append(entry.isDirectory()? 'd' : '-');// c.f. "ls -l" output + String size = Long.toString(entry.getSize()); + sb.append(' '); + // Pad output to 7 places, leading spaces + for(int i=7; i > size.length(); i--){ + sb.append(' '); + } + sb.append(size); + sb.append(' ').append(entry.getName()); + return sb.toString(); + } + + /** + * Check if buffer contents matches Ascii String. + * + * @param expected + * @param buffer + * @param offset + * @param length + * @return {@code true} if buffer is the same as the expected string + */ + public static boolean matchAsciiBuffer( + String expected, byte[] buffer, int offset, int length){ + byte[] buffer1; + try { + buffer1 = expected.getBytes(CharsetNames.US_ASCII); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); // Should not happen + } + return isEqual(buffer1, 0, buffer1.length, buffer, offset, length, false); + } + + /** + * Check if buffer contents matches Ascii String. + * + * @param expected + * @param buffer + * @return {@code true} if buffer is the same as the expected string + */ + public static boolean matchAsciiBuffer(String expected, byte[] buffer){ + return matchAsciiBuffer(expected, buffer, 0, buffer.length); + } + + /** + * Convert a string to Ascii bytes. + * Used for comparing "magic" strings which need to be independent of the default Locale. + * + * @param inputString + * @return the bytes + */ + public static byte[] toAsciiBytes(String inputString){ + try { + return inputString.getBytes(CharsetNames.US_ASCII); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); // Should never happen + } + } + + /** + * Convert an input byte array to a String using the ASCII character set. + * + * @param inputBytes + * @return the bytes, interpreted as an Ascii string + */ + public static String toAsciiString(final byte[] inputBytes){ + try { + return new String(inputBytes, CharsetNames.US_ASCII); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); // Should never happen + } + } + + /** + * Convert an input byte array to a String using the ASCII character set. + * + * @param inputBytes input byte array + * @param offset offset within array + * @param length length of array + * @return the bytes, interpreted as an Ascii string + */ + public static String toAsciiString(final byte[] inputBytes, int offset, int length){ + try { + return new String(inputBytes, offset, length, CharsetNames.US_ASCII); + } catch (UnsupportedEncodingException e) { + throw new RuntimeException(e); // Should never happen + } + } + + /** + * Compare byte buffers, optionally ignoring trailing nulls + * + * @param buffer1 + * @param offset1 + * @param length1 + * @param buffer2 + * @param offset2 + * @param length2 + * @param ignoreTrailingNulls + * @return {@code true} if buffer1 and buffer2 have same contents, having regard to trailing nulls + */ + public static boolean isEqual( + final byte[] buffer1, final int offset1, final int length1, + final byte[] buffer2, final int offset2, final int length2, + boolean ignoreTrailingNulls){ + int minLen=length1 < length2 ? length1 : length2; + for (int i=0; i < minLen; i++){ + if (buffer1[offset1+i] != buffer2[offset2+i]){ + return false; + } + } + if (length1 == length2){ + return true; + } + if (ignoreTrailingNulls){ + if (length1 > length2){ + for(int i = length2; i < length1; i++){ + if (buffer1[offset1+i] != 0){ + return false; + } + } + } else { + for(int i = length1; i < length2; i++){ + if (buffer2[offset2+i] != 0){ + return false; + } + } + } + return true; + } + return false; + } + + /** + * Compare byte buffers + * + * @param buffer1 + * @param offset1 + * @param length1 + * @param buffer2 + * @param offset2 + * @param length2 + * @return {@code true} if buffer1 and buffer2 have same contents + */ + public static boolean isEqual( + final byte[] buffer1, final int offset1, final int length1, + final byte[] buffer2, final int offset2, final int length2){ + return isEqual(buffer1, offset1, length1, buffer2, offset2, length2, false); + } + + /** + * Compare byte buffers + * + * @param buffer1 + * @param buffer2 + * @return {@code true} if buffer1 and buffer2 have same contents + */ + public static boolean isEqual(final byte[] buffer1, final byte[] buffer2 ){ + return isEqual(buffer1, 0, buffer1.length, buffer2, 0, buffer2.length, false); + } + + /** + * Compare byte buffers, optionally ignoring trailing nulls + * + * @param buffer1 + * @param buffer2 + * @param ignoreTrailingNulls + * @return {@code true} if buffer1 and buffer2 have same contents + */ + public static boolean isEqual(final byte[] buffer1, final byte[] buffer2, boolean ignoreTrailingNulls){ + return isEqual(buffer1, 0, buffer1.length, buffer2, 0, buffer2.length, ignoreTrailingNulls); + } + + /** + * Compare byte buffers, ignoring trailing nulls + * + * @param buffer1 + * @param offset1 + * @param length1 + * @param buffer2 + * @param offset2 + * @param length2 + * @return {@code true} if buffer1 and buffer2 have same contents, having regard to trailing nulls + */ + public static boolean isEqualWithNull( + final byte[] buffer1, final int offset1, final int length1, + final byte[] buffer2, final int offset2, final int length2){ + return isEqual(buffer1, offset1, length1, buffer2, offset2, length2, true); + } + + /** + * Returns true if the first N bytes of an array are all zero + * + * @param a + * The array to check + * @param size + * The number of characters to check (not the size of the array) + * @return true if the first N bytes are zero + */ + public static boolean isArrayZero(byte[] a, int size) { + for (int i = 0; i < size; i++) { + if (a[i] != 0) { + return false; + } + } + return true; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/BoundedInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/BoundedInputStream.java new file mode 100644 index 000000000..791f08cf7 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/BoundedInputStream.java @@ -0,0 +1,74 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.utils; + +import java.io.IOException; +import java.io.InputStream; + +/** + * A stream that limits reading from a wrapped stream to a given number of bytes. + * @NotThreadSafe + * @since 1.6 + */ +public class BoundedInputStream extends InputStream { + private final InputStream in; + private long bytesRemaining; + + /** + * Creates the stream that will at most read the given amount of + * bytes from the given stream. + * @param in the stream to read from + * @param size the maximum amount of bytes to read + */ + public BoundedInputStream(final InputStream in, final long size) { + this.in = in; + bytesRemaining = size; + } + + @Override + public int read() throws IOException { + if (bytesRemaining > 0) { + --bytesRemaining; + return in.read(); + } else { + return -1; + } + } + + @Override + public int read(byte[] b, int off, int len) throws IOException { + if (bytesRemaining == 0) { + return -1; + } + int bytesToRead = len; + if (bytesToRead > bytesRemaining) { + bytesToRead = (int) bytesRemaining; + } + final int bytesRead = in.read(b, off, bytesToRead); + if (bytesRead >= 0) { + bytesRemaining -= bytesRead; + } + return bytesRead; + } + + @Override + public void close() { + // there isn't anything to close in this stream and the nested + // stream is controlled externally + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java new file mode 100644 index 000000000..0dc5b9882 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CRC32VerifyingInputStream.java @@ -0,0 +1,42 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.utils; + +import java.io.InputStream; +import java.util.zip.CRC32; + +/** + * A stream that verifies the CRC of the data read once the stream is + * exhausted. + * @NotThreadSafe + * @since 1.6 + */ +public class CRC32VerifyingInputStream extends ChecksumVerifyingInputStream { + + public CRC32VerifyingInputStream(final InputStream in, final long size, final int expectedCrc32) { + this(in, size, expectedCrc32 & 0xFFFFffffl); + } + + /** + * @since 1.7 + */ + public CRC32VerifyingInputStream(final InputStream in, final long size, final long expectedCrc32) { + super(new CRC32(), in, size, expectedCrc32); + } + +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/CharsetNames.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CharsetNames.java new file mode 100644 index 000000000..6acaeb299 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CharsetNames.java @@ -0,0 +1,126 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.utils; + +/** + * Character encoding names required of every implementation of the Java platform. + * + * From the Java documentation Standard + * charsets: + *

    + * Every implementation of the Java platform is required to support the following character encodings. Consult the + * release documentation for your implementation to see if any other encodings are supported. Consult the release + * documentation for your implementation to see if any other encodings are supported. + *

    + * + *
    + *
    US-ASCII
    + *
    Seven-bit ASCII, a.k.a. ISO646-US, a.k.a. the Basic Latin block of the Unicode character set.
    + *
    ISO-8859-1
    + *
    ISO Latin Alphabet No. 1, a.k.a. ISO-LATIN-1.
    + *
    UTF-8
    + *
    Eight-bit Unicode Transformation Format.
    + *
    UTF-16BE
    + *
    Sixteen-bit Unicode Transformation Format, big-endian byte order.
    + *
    UTF-16LE
    + *
    Sixteen-bit Unicode Transformation Format, little-endian byte order.
    + *
    UTF-16
    + *
    Sixteen-bit Unicode Transformation Format, byte order specified by a mandatory initial byte-order mark (either order + * accepted on input, big-endian used on output.)
    + *
    + * + *

    This perhaps would best belong in the [lang] project. Even if a similar interface is defined in [lang], it is not + * foreseen that [compress] would be made to depend on [lang].

    + * + * @see Standard charsets + * @since 1.4 + * @version $Id: CharsetNames.java 1552970 2013-12-22 07:03:43Z bodewig $ + */ +public class CharsetNames { + /** + * CharEncodingISO Latin Alphabet No. 1, a.k.a. ISO-LATIN-1. + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final String ISO_8859_1 = "ISO-8859-1"; + + /** + *

    + * Seven-bit ASCII, also known as ISO646-US, also known as the Basic Latin block of the Unicode character set. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final String US_ASCII = "US-ASCII"; + + /** + *

    + * Sixteen-bit Unicode Transformation Format, The byte order specified by a mandatory initial byte-order mark + * (either order accepted on input, big-endian used on output) + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final String UTF_16 = "UTF-16"; + + /** + *

    + * Sixteen-bit Unicode Transformation Format, big-endian byte order. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final String UTF_16BE = "UTF-16BE"; + + /** + *

    + * Sixteen-bit Unicode Transformation Format, little-endian byte order. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final String UTF_16LE = "UTF-16LE"; + + /** + *

    + * Eight-bit Unicode Transformation Format. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final String UTF_8 = "UTF-8"; +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/Charsets.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/Charsets.java new file mode 100644 index 000000000..fb5ded363 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/Charsets.java @@ -0,0 +1,160 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ + +package org.apache.commons.compress.utils; + +import java.nio.charset.Charset; + +/** + * Charsets required of every implementation of the Java platform. + * + * From the Java documentation Standard + * charsets: + *

    + * Every implementation of the Java platform is required to support the following character encodings. Consult the + * release documentation for your implementation to see if any other encodings are supported. Consult the release + * documentation for your implementation to see if any other encodings are supported. + *

    + * + *
    + *
    US-ASCII
    + *
    Seven-bit ASCII, a.k.a. ISO646-US, a.k.a. the Basic Latin block of the Unicode character set.
    + *
    ISO-8859-1
    + *
    ISO Latin Alphabet No. 1, a.k.a. ISO-LATIN-1.
    + *
    UTF-8
    + *
    Eight-bit Unicode Transformation Format.
    + *
    UTF-16BE
    + *
    Sixteen-bit Unicode Transformation Format, big-endian byte order.
    + *
    UTF-16LE
    + *
    Sixteen-bit Unicode Transformation Format, little-endian byte order.
    + *
    UTF-16
    + *
    Sixteen-bit Unicode Transformation Format, byte order specified by a mandatory initial byte-order mark (either order + * accepted on input, big-endian used on output.)
    + *
    + * + *

    This class best belongs in the Commons Lang or IO project. Even if a similar class is defined in another Commons + * component, it is not foreseen that Commons Compress would be made to depend on another Commons component.

    + * + * @see Standard charsets + * @since 1.4 + * @version $Id: Charsets.java 1552970 2013-12-22 07:03:43Z bodewig $ + */ +public class Charsets { + + // + // This class should only contain Charset instances for required encodings. This guarantees that it will load correctly and + // without delay on all Java platforms. + // + + /** + * Returns the given Charset or the default Charset if the given Charset is null. + * + * @param charset + * A charset or null. + * @return the given Charset or the default Charset if the given Charset is null + */ + public static Charset toCharset(Charset charset) { + return charset == null ? Charset.defaultCharset() : charset; + } + + /** + * Returns a Charset for the named charset. If the name is null, return the default Charset. + * + * @param charset + * The name of the requested charset, may be null. + * @return a Charset for the named charset + * @throws java.nio.charset.UnsupportedCharsetException + * If the named charset is unavailable + * @throws java.nio.charset.IllegalCharsetNameException + * If the given charset name is illegal + */ + public static Charset toCharset(String charset) { + return charset == null ? Charset.defaultCharset() : Charset.forName(charset); + } + + /** + * CharsetNamesISO Latin Alphabet No. 1, a.k.a. ISO-LATIN-1. + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final Charset ISO_8859_1 = Charset.forName(CharsetNames.ISO_8859_1); + + /** + *

    + * Seven-bit ASCII, also known as ISO646-US, also known as the Basic Latin block of the Unicode character set. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final Charset US_ASCII = Charset.forName(CharsetNames.US_ASCII); + + /** + *

    + * Sixteen-bit Unicode Transformation Format, The byte order specified by a mandatory initial byte-order mark + * (either order accepted on input, big-endian used on output) + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final Charset UTF_16 = Charset.forName(CharsetNames.UTF_16); + + /** + *

    + * Sixteen-bit Unicode Transformation Format, big-endian byte order. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final Charset UTF_16BE = Charset.forName(CharsetNames.UTF_16BE); + + /** + *

    + * Sixteen-bit Unicode Transformation Format, little-endian byte order. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final Charset UTF_16LE = Charset.forName(CharsetNames.UTF_16LE); + + /** + *

    + * Eight-bit Unicode Transformation Format. + *

    + *

    + * Every implementation of the Java platform is required to support this character encoding. + *

    + * + * @see Standard charsets + */ + public static final Charset UTF_8 = Charset.forName(CharsetNames.UTF_8); +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/ChecksumVerifyingInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/ChecksumVerifyingInputStream.java new file mode 100644 index 000000000..69bf03c16 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/ChecksumVerifyingInputStream.java @@ -0,0 +1,110 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one or more + * contributor license agreements. See the NOTICE file distributed with + * this work for additional information regarding copyright ownership. + * The ASF licenses this file to You under the Apache License, Version 2.0 + * (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * + */ +package org.apache.commons.compress.utils; + +import java.io.IOException; +import java.io.InputStream; +import java.util.zip.Checksum; + +/** + * A stream that verifies the checksum of the data read once the stream is + * exhausted. + * @NotThreadSafe + * @since 1.7 + */ +public class ChecksumVerifyingInputStream extends InputStream { + private final InputStream in; + private long bytesRemaining; + private final long expectedChecksum; + private final Checksum checksum; + + public ChecksumVerifyingInputStream(final Checksum checksum, final InputStream in, + final long size, final long expectedChecksum) { + this.checksum = checksum; + this.in = in; + this.expectedChecksum = expectedChecksum; + this.bytesRemaining = size; + } + + /** + * Reads a single byte from the stream + * @throws IOException if the underlying stream throws or the + * stream is exhausted and the Checksum doesn't match the expected + * value + */ + @Override + public int read() throws IOException { + if (bytesRemaining <= 0) { + return -1; + } + int ret = in.read(); + if (ret >= 0) { + checksum.update(ret); + --bytesRemaining; + } + if (bytesRemaining == 0 && expectedChecksum != checksum.getValue()) { + throw new IOException("Checksum verification failed"); + } + return ret; + } + + /** + * Reads a byte array from the stream + * @throws IOException if the underlying stream throws or the + * stream is exhausted and the Checksum doesn't match the expected + * value + */ + @Override + public int read(byte[] b) throws IOException { + return read(b, 0, b.length); + } + + /** + * Reads from the stream into a byte array. + * @throws IOException if the underlying stream throws or the + * stream is exhausted and the Checksum doesn't match the expected + * value + */ + @Override + public int read(byte[] b, int off, int len) throws IOException { + int ret = in.read(b, off, len); + if (ret >= 0) { + checksum.update(b, off, ret); + bytesRemaining -= ret; + } + if (bytesRemaining <= 0 && expectedChecksum != checksum.getValue()) { + throw new IOException("Checksum verification failed"); + } + return ret; + } + + @Override + public long skip(long n) throws IOException { + // Can't really skip, we have to hash everything to verify the checksum + if (read() >= 0) { + return 1; + } else { + return 0; + } + } + + @Override + public void close() throws IOException { + in.close(); + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/CountingInputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CountingInputStream.java new file mode 100644 index 000000000..ab26d2d61 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CountingInputStream.java @@ -0,0 +1,76 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.utils; + +import java.io.FilterInputStream; +import java.io.IOException; +import java.io.InputStream; + +/** + * Stream that tracks the number of bytes read. + * @since 1.3 + * @NotThreadSafe + */ +public class CountingInputStream extends FilterInputStream { + private long bytesRead; + + public CountingInputStream(final InputStream in) { + super(in); + } + + @Override + public int read() throws IOException { + int r = in.read(); + if (r >= 0) { + count(1); + } + return r; + } + @Override + public int read(byte[] b) throws IOException { + return read(b, 0, b.length); + } + @Override + public int read(byte[] b, int off, int len) throws IOException { + int r = in.read(b, off, len); + if (r >= 0) { + count(r); + } + return r; + } + /** + * Increments the counter of already read bytes. + * Doesn't increment if the EOF has been hit (read == -1) + * + * @param read the number of bytes read + */ + protected final void count(long read) { + if (read != -1) { + bytesRead += read; + } + } + + /** + * Returns the current number of bytes read from this stream. + * @return the number of read bytes + */ + public long getBytesRead() { + return bytesRead; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/CountingOutputStream.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CountingOutputStream.java new file mode 100644 index 000000000..3e62fdec0 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/CountingOutputStream.java @@ -0,0 +1,71 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.utils; + +import java.io.FilterOutputStream; +import java.io.IOException; +import java.io.OutputStream; + +/** + * Stream that tracks the number of bytes read. + * @since 1.3 + * @NotThreadSafe + */ +public class CountingOutputStream extends FilterOutputStream { + private long bytesWritten = 0; + + public CountingOutputStream(final OutputStream out) { + super(out); + } + + @Override + public void write(int b) throws IOException { + out.write(b); + count(1); + } + @Override + public void write(byte[] b) throws IOException { + write(b, 0, b.length); + } + @Override + public void write(byte[] b, int off, int len) throws IOException { + out.write(b, off, len); + count(len); + } + + /** + * Increments the counter of already written bytes. + * Doesn't increment if the EOF has been hit (written == -1) + * + * @param written the number of bytes written + */ + protected void count(long written) { + if (written != -1) { + bytesWritten += written; + } + } + + /** + * Returns the current number of bytes written to this stream. + * @return the number of written bytes + */ + public long getBytesWritten() { + return bytesWritten; + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/IOUtils.java b/Tools/Cache Editor/src/org/apache/commons/compress/utils/IOUtils.java new file mode 100644 index 000000000..8ce49d293 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/IOUtils.java @@ -0,0 +1,184 @@ +/* + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an + * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY + * KIND, either express or implied. See the License for the + * specific language governing permissions and limitations + * under the License. + */ +package org.apache.commons.compress.utils; + +import java.io.ByteArrayOutputStream; +import java.io.Closeable; +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; + +/** + * Utility functions + * @Immutable + */ +public final class IOUtils { + + /** Private constructor to prevent instantiation of this utility class. */ + private IOUtils(){ + } + + /** + * Copies the content of a InputStream into an OutputStream. + * Uses a default buffer size of 8024 bytes. + * + * @param input + * the InputStream to copy + * @param output + * the target Stream + * @throws IOException + * if an error occurs + */ + public static long copy(final InputStream input, final OutputStream output) throws IOException { + return copy(input, output, 8024); + } + + /** + * Copies the content of a InputStream into an OutputStream + * + * @param input + * the InputStream to copy + * @param output + * the target Stream + * @param buffersize + * the buffer size to use + * @throws IOException + * if an error occurs + */ + public static long copy(final InputStream input, final OutputStream output, int buffersize) throws IOException { + final byte[] buffer = new byte[buffersize]; + int n = 0; + long count=0; + while (-1 != (n = input.read(buffer))) { + output.write(buffer, 0, n); + count += n; + } + return count; + } + + /** + * Skips the given number of bytes by repeatedly invoking skip on + * the given input stream if necessary. + * + *

    This method will only skip less than the requested number of + * bytes if the end of the input stream has been reached.

    + * + * @param input stream to skip bytes in + * @param numToSkip the number of bytes to skip + * @return the number of bytes actually skipped + * @throws IOException + */ + public static long skip(InputStream input, long numToSkip) throws IOException { + long available = numToSkip; + while (numToSkip > 0) { + long skipped = input.skip(numToSkip); + if (skipped == 0) { + break; + } + numToSkip -= skipped; + } + return available - numToSkip; + } + + /** + * Reads as much from input as possible to fill the given array. + * + *

    This method may invoke read repeatedly to fill the array and + * only read less bytes than the length of the array if the end of + * the stream has been reached.

    + * + * @param input stream to read from + * @param b buffer to fill + * @return the number of bytes actually read + * @throws IOException + */ + public static int readFully(InputStream input, byte[] b) throws IOException { + return readFully(input, b, 0, b.length); + } + + /** + * Reads as much from input as possible to fill the given array + * with the given amount of bytes. + * + *

    This method may invoke read repeatedly to read the bytes and + * only read less bytes than the requested length if the end of + * the stream has been reached.

    + * + * @param input stream to read from + * @param b buffer to fill + * @param offset offset into the buffer to start filling at + * @param len of bytes to read + * @return the number of bytes actually read + * @throws IOException + * if an I/O error has occurred + */ + public static int readFully(InputStream input, byte[] b, int offset, int len) + throws IOException { + if (len < 0 || offset < 0 || len + offset > b.length) { + throw new IndexOutOfBoundsException(); + } + int count = 0, x = 0; + while (count != len) { + x = input.read(b, offset + count, len - count); + if (x == -1) { + break; + } + count += x; + } + return count; + } + + // toByteArray(InputStream) copied from: + // commons/proper/io/trunk/src/main/java/org/apache/commons/io/IOUtils.java?revision=1428941 + // January 8th, 2013 + // + // Assuming our copy() works just as well as theirs! :-) + + /** + * Gets the contents of an InputStream as a byte[]. + *

    + * This method buffers the input internally, so there is no need to use a + * BufferedInputStream. + * + * @param input the InputStream to read from + * @return the requested byte array + * @throws NullPointerException if the input is null + * @throws IOException if an I/O error occurs + * @since 1.5 + */ + public static byte[] toByteArray(final InputStream input) throws IOException { + final ByteArrayOutputStream output = new ByteArrayOutputStream(); + copy(input, output); + return output.toByteArray(); + } + + /** + * Closes the given Closeable and swallows any IOException that may occur. + * @param c Closeable to close, can be null + * @since 1.7 + */ + public static void closeQuietly(Closeable c) { + if (c != null) { + try { + c.close(); + } catch (IOException ignored) { // NOPMD + } + } + } +} diff --git a/Tools/Cache Editor/src/org/apache/commons/compress/utils/package.html b/Tools/Cache Editor/src/org/apache/commons/compress/utils/package.html new file mode 100644 index 000000000..0409d1267 --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/commons/compress/utils/package.html @@ -0,0 +1,23 @@ + + + +

    Contains utilities used internally by the compress library.

    + + diff --git a/Tools/Cache Editor/src/org/apache/tools/bzip2/BZip2Constants.java b/Tools/Cache Editor/src/org/apache/tools/bzip2/BZip2Constants.java new file mode 100644 index 000000000..4f832d67d --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/tools/bzip2/BZip2Constants.java @@ -0,0 +1,136 @@ +/* + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2001 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowlegement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowlegement may appear in the software itself, + * if and wherever such third-party acknowlegements normally appear. + * + * 4. The names "Ant" and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Group. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ + +/* + * This package is based on the work done by Keiron Liddle, Aftex Software + * to whom the Ant project is very grateful for his + * great code. + */ + +package org.apache.tools.bzip2; + +/** + * Base class for both the compress and decompress classes. + * Holds common arrays, and static data. + * + * @author Keiron Liddle + */ +public interface BZip2Constants { + + int baseBlockSize = 100000; + int MAX_ALPHA_SIZE = 258; + int MAX_CODE_LEN = 23; + int RUNA = 0; + int RUNB = 1; + int N_GROUPS = 6; + int G_SIZE = 50; + int N_ITERS = 4; + int MAX_SELECTORS = (2 + (900000 / G_SIZE)); + int NUM_OVERSHOOT_BYTES = 20; + + int[] rNums = { + 619, 720, 127, 481, 931, 816, 813, 233, 566, 247, + 985, 724, 205, 454, 863, 491, 741, 242, 949, 214, + 733, 859, 335, 708, 621, 574, 73, 654, 730, 472, + 419, 436, 278, 496, 867, 210, 399, 680, 480, 51, + 878, 465, 811, 169, 869, 675, 611, 697, 867, 561, + 862, 687, 507, 283, 482, 129, 807, 591, 733, 623, + 150, 238, 59, 379, 684, 877, 625, 169, 643, 105, + 170, 607, 520, 932, 727, 476, 693, 425, 174, 647, + 73, 122, 335, 530, 442, 853, 695, 249, 445, 515, + 909, 545, 703, 919, 874, 474, 882, 500, 594, 612, + 641, 801, 220, 162, 819, 984, 589, 513, 495, 799, + 161, 604, 958, 533, 221, 400, 386, 867, 600, 782, + 382, 596, 414, 171, 516, 375, 682, 485, 911, 276, + 98, 553, 163, 354, 666, 933, 424, 341, 533, 870, + 227, 730, 475, 186, 263, 647, 537, 686, 600, 224, + 469, 68, 770, 919, 190, 373, 294, 822, 808, 206, + 184, 943, 795, 384, 383, 461, 404, 758, 839, 887, + 715, 67, 618, 276, 204, 918, 873, 777, 604, 560, + 951, 160, 578, 722, 79, 804, 96, 409, 713, 940, + 652, 934, 970, 447, 318, 353, 859, 672, 112, 785, + 645, 863, 803, 350, 139, 93, 354, 99, 820, 908, + 609, 772, 154, 274, 580, 184, 79, 626, 630, 742, + 653, 282, 762, 623, 680, 81, 927, 626, 789, 125, + 411, 521, 938, 300, 821, 78, 343, 175, 128, 250, + 170, 774, 972, 275, 999, 639, 495, 78, 352, 126, + 857, 956, 358, 619, 580, 124, 737, 594, 701, 612, + 669, 112, 134, 694, 363, 992, 809, 743, 168, 974, + 944, 375, 748, 52, 600, 747, 642, 182, 862, 81, + 344, 805, 988, 739, 511, 655, 814, 334, 249, 515, + 897, 955, 664, 981, 649, 113, 974, 459, 893, 228, + 433, 837, 553, 268, 926, 240, 102, 654, 459, 51, + 686, 754, 806, 760, 493, 403, 415, 394, 687, 700, + 946, 670, 656, 610, 738, 392, 760, 799, 887, 653, + 978, 321, 576, 617, 626, 502, 894, 679, 243, 440, + 680, 879, 194, 572, 640, 724, 926, 56, 204, 700, + 707, 151, 457, 449, 797, 195, 791, 558, 945, 679, + 297, 59, 87, 824, 713, 663, 412, 693, 342, 606, + 134, 108, 571, 364, 631, 212, 174, 643, 304, 329, + 343, 97, 430, 751, 497, 314, 983, 374, 822, 928, + 140, 206, 73, 263, 980, 736, 876, 478, 430, 305, + 170, 514, 364, 692, 829, 82, 855, 953, 676, 246, + 369, 970, 294, 750, 807, 827, 150, 790, 288, 923, + 804, 378, 215, 828, 592, 281, 565, 555, 710, 82, + 896, 831, 547, 261, 524, 462, 293, 465, 502, 56, + 661, 821, 976, 991, 658, 869, 905, 758, 745, 193, + 768, 550, 608, 933, 378, 286, 215, 979, 792, 961, + 61, 688, 793, 644, 986, 403, 106, 366, 905, 644, + 372, 567, 466, 434, 645, 210, 389, 550, 919, 135, + 780, 773, 635, 389, 707, 100, 626, 958, 165, 504, + 920, 176, 193, 713, 857, 265, 203, 50, 668, 108, + 645, 990, 626, 197, 510, 357, 358, 850, 858, 364, + 936, 638 + }; +} diff --git a/Tools/Cache Editor/src/org/apache/tools/bzip2/CBZip2OutputStream.java b/Tools/Cache Editor/src/org/apache/tools/bzip2/CBZip2OutputStream.java new file mode 100644 index 000000000..5bdbe7eae --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/tools/bzip2/CBZip2OutputStream.java @@ -0,0 +1,1674 @@ +/* + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2001-2003 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowlegement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowlegement may appear in the software itself, + * if and wherever such third-party acknowlegements normally appear. + * + * 4. The names "Ant" and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Group. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ + +/* + * This package is based on the work done by Keiron Liddle, Aftex Software + * to whom the Ant project is very grateful for his + * great code. + */ + +package org.apache.tools.bzip2; + +import java.io.IOException; +import java.io.OutputStream; + +/** + * An output stream that compresses into the BZip2 format (without the file + * header chars) into another stream. + * + * @author Keiron Liddle + * + * TODO: Update to BZip2 1.0.1 + */ +public class CBZip2OutputStream extends OutputStream implements BZip2Constants { + protected static final int SETMASK = (1 << 21); + protected static final int CLEARMASK = (~SETMASK); + protected static final int GREATER_ICOST = 15; + protected static final int LESSER_ICOST = 0; + protected static final int SMALL_THRESH = 20; + protected static final int DEPTH_THRESH = 10; + + /* + If you are ever unlucky/improbable enough + to get a stack overflow whilst sorting, + increase the following constant and try + again. In practice I have never seen the + stack go above 27 elems, so the following + limit seems very generous. + */ + protected static final int QSORT_STACK_SIZE = 1000; + + private static void panic() { + System.out.println("panic"); + //throw new CError(); + } + + private void makeMaps() { + int i; + nInUse = 0; + for (i = 0; i < 256; i++) { + if (inUse[i]) { + seqToUnseq[nInUse] = (char) i; + unseqToSeq[i] = (char) nInUse; + nInUse++; + } + } + } + + protected static void hbMakeCodeLengths(char[] len, int[] freq, + int alphaSize, int maxLen) { + /* + Nodes and heap entries run from 1. Entry 0 + for both the heap and nodes is a sentinel. + */ + int nNodes, nHeap, n1, n2, i, j, k; + boolean tooLong; + + int[] heap = new int[MAX_ALPHA_SIZE + 2]; + int[] weight = new int[MAX_ALPHA_SIZE * 2]; + int[] parent = new int[MAX_ALPHA_SIZE * 2]; + + for (i = 0; i < alphaSize; i++) { + weight[i + 1] = (freq[i] == 0 ? 1 : freq[i]) << 8; + } + + while (true) { + nNodes = alphaSize; + nHeap = 0; + + heap[0] = 0; + weight[0] = 0; + parent[0] = -2; + + for (i = 1; i <= alphaSize; i++) { + parent[i] = -1; + nHeap++; + heap[nHeap] = i; + { + int zz, tmp; + zz = nHeap; + tmp = heap[zz]; + while (weight[tmp] < weight[heap[zz >> 1]]) { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + } + } + if (!(nHeap < (MAX_ALPHA_SIZE + 2))) { + panic(); + } + + while (nHeap > 1) { + n1 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + { + int zz = 0, yy = 0, tmp = 0; + zz = 1; + tmp = heap[zz]; + while (true) { + yy = zz << 1; + if (yy > nHeap) { + break; + } + if (yy < nHeap + && weight[heap[yy + 1]] < weight[heap[yy]]) { + yy++; + } + if (weight[tmp] < weight[heap[yy]]) { + break; + } + heap[zz] = heap[yy]; + zz = yy; + } + heap[zz] = tmp; + } + n2 = heap[1]; + heap[1] = heap[nHeap]; + nHeap--; + { + int zz = 0, yy = 0, tmp = 0; + zz = 1; + tmp = heap[zz]; + while (true) { + yy = zz << 1; + if (yy > nHeap) { + break; + } + if (yy < nHeap + && weight[heap[yy + 1]] < weight[heap[yy]]) { + yy++; + } + if (weight[tmp] < weight[heap[yy]]) { + break; + } + heap[zz] = heap[yy]; + zz = yy; + } + heap[zz] = tmp; + } + nNodes++; + parent[n1] = parent[n2] = nNodes; + + weight[nNodes] = ((weight[n1] & 0xffffff00) + + (weight[n2] & 0xffffff00)) + | (1 + (((weight[n1] & 0x000000ff) > + (weight[n2] & 0x000000ff)) ? + (weight[n1] & 0x000000ff) : + (weight[n2] & 0x000000ff))); + + parent[nNodes] = -1; + nHeap++; + heap[nHeap] = nNodes; + { + int zz = 0, tmp = 0; + zz = nHeap; + tmp = heap[zz]; + while (weight[tmp] < weight[heap[zz >> 1]]) { + heap[zz] = heap[zz >> 1]; + zz >>= 1; + } + heap[zz] = tmp; + } + } + if (!(nNodes < (MAX_ALPHA_SIZE * 2))) { + panic(); + } + + tooLong = false; + for (i = 1; i <= alphaSize; i++) { + j = 0; + k = i; + while (parent[k] >= 0) { + k = parent[k]; + j++; + } + len[i - 1] = (char) j; + if (j > maxLen) { + tooLong = true; + } + } + + if (!tooLong) { + break; + } + + for (i = 1; i < alphaSize; i++) { + j = weight[i] >> 8; + j = 1 + (j / 2); + weight[i] = j << 8; + } + } + } + + /* + index of the last char in the block, so + the block size == last + 1. + */ + int last; + + /* + index in zptr[] of original string after sorting. + */ + int origPtr; + + /* + always: in the range 0 .. 9. + The current block size is 100000 * this number. + */ + int blockSize100k; + + boolean blockRandomised; + + int bytesOut; + int bsBuff; + int bsLive; + CRC mCrc = new CRC(); + + private boolean[] inUse = new boolean[256]; + private int nInUse; + + private char[] seqToUnseq = new char[256]; + private char[] unseqToSeq = new char[256]; + + private char[] selector = new char[MAX_SELECTORS]; + private char[] selectorMtf = new char[MAX_SELECTORS]; + + private char[] block; + private int[] quadrant; + private int[] zptr; + private short[] szptr; + private int[] ftab; + + private int nMTF; + + private int[] mtfFreq = new int[MAX_ALPHA_SIZE]; + + /* + * Used when sorting. If too many long comparisons + * happen, we stop sorting, randomise the block + * slightly, and try again. + */ + private int workFactor; + private int workDone; + private int workLimit; + private boolean firstAttempt; + @SuppressWarnings("unused") + private int nBlocksRandomised; + + private int currentChar = -1; + private int runLength = 0; + + public CBZip2OutputStream(OutputStream inStream) throws IOException { + this(inStream, 9); + } + + public CBZip2OutputStream(OutputStream inStream, int inBlockSize) + throws IOException { + block = null; + quadrant = null; + zptr = null; + ftab = null; + + bsSetStream(inStream); + + workFactor = 50; + if (inBlockSize > 9) { + inBlockSize = 9; + } + if (inBlockSize < 1) { + inBlockSize = 1; + } + blockSize100k = inBlockSize; + allocateCompressStructures(); + initialize(); + initBlock(); + } + + /** + * + * modified by Oliver Merkel, 010128 + * + */ + public void write(int bv) throws IOException { + int b = (256 + bv) % 256; + if (currentChar != -1) { + if (currentChar == b) { + runLength++; + if (runLength > 254) { + writeRun(); + currentChar = -1; + runLength = 0; + } + } else { + writeRun(); + runLength = 1; + currentChar = b; + } + } else { + currentChar = b; + runLength++; + } + } + + private void writeRun() throws IOException { + if (last < allowableBlockSize) { + inUse[currentChar] = true; + for (int i = 0; i < runLength; i++) { + mCrc.updateCRC((char) currentChar); + } + switch (runLength) { + case 1: + last++; + block[last + 1] = (char) currentChar; + break; + case 2: + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) currentChar; + break; + case 3: + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) currentChar; + break; + default: + inUse[runLength - 4] = true; + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) currentChar; + last++; + block[last + 1] = (char) (runLength - 4); + break; + } + } else { + endBlock(); + initBlock(); + writeRun(); + } + } + + boolean closed = false; + + protected void finalize() throws Throwable { + close(); + super.finalize(); + } + + public void close() throws IOException { + if (closed) { + return; + } + + if (runLength > 0) { + writeRun(); + } + currentChar = -1; + endBlock(); + endCompression(); + closed = true; + super.close(); + bsStream.close(); + } + + public void flush() throws IOException { + super.flush(); + bsStream.flush(); + } + + private int blockCRC, combinedCRC; + + private void initialize() throws IOException { + bytesOut = 0; + nBlocksRandomised = 0; + + /* Write `magic' bytes h indicating file-format == huffmanised, + followed by a digit indicating blockSize100k. + */ + bsPutUChar('h'); + bsPutUChar('0' + blockSize100k); + + combinedCRC = 0; + } + + private int allowableBlockSize; + + private void initBlock() { + // blockNo++; + mCrc.initialiseCRC(); + last = -1; + // ch = 0; + + for (int i = 0; i < 256; i++) { + inUse[i] = false; + } + + /* 20 is just a paranoia constant */ + allowableBlockSize = baseBlockSize * blockSize100k - 20; + } + + private void endBlock() throws IOException { + blockCRC = mCrc.getFinalCRC(); + combinedCRC = (combinedCRC << 1) | (combinedCRC >>> 31); + combinedCRC ^= blockCRC; + + /* sort the block and establish posn of original string */ + doReversibleTransformation(); + + /* + A 6-byte block header, the value chosen arbitrarily + as 0x314159265359 :-). A 32 bit value does not really + give a strong enough guarantee that the value will not + appear by chance in the compressed datastream. Worst-case + probability of this event, for a 900k block, is about + 2.0e-3 for 32 bits, 1.0e-5 for 40 bits and 4.0e-8 for 48 bits. + For a compressed file of size 100Gb -- about 100000 blocks -- + only a 48-bit marker will do. NB: normal compression/ + decompression do *not* rely on these statistical properties. + They are only important when trying to recover blocks from + damaged files. + */ + bsPutUChar(0x31); + bsPutUChar(0x41); + bsPutUChar(0x59); + bsPutUChar(0x26); + bsPutUChar(0x53); + bsPutUChar(0x59); + + /* Now the block's CRC, so it is in a known place. */ + bsPutint(blockCRC); + + /* Now a single bit indicating randomisation. */ + if (blockRandomised) { + bsW(1, 1); + nBlocksRandomised++; + } else { + bsW(1, 0); + } + + /* Finally, block's contents proper. */ + moveToFrontCodeAndSend(); + } + + private void endCompression() throws IOException { + /* + Now another magic 48-bit number, 0x177245385090, to + indicate the end of the last block. (sqrt(pi), if + you want to know. I did want to use e, but it contains + too much repetition -- 27 18 28 18 28 46 -- for me + to feel statistically comfortable. Call me paranoid.) + */ + bsPutUChar(0x17); + bsPutUChar(0x72); + bsPutUChar(0x45); + bsPutUChar(0x38); + bsPutUChar(0x50); + bsPutUChar(0x90); + + bsPutint(combinedCRC); + + bsFinishedWithStream(); + } + + private void hbAssignCodes (int[] code, char[] length, int minLen, + int maxLen, int alphaSize) { + int n, vec, i; + + vec = 0; + for (n = minLen; n <= maxLen; n++) { + for (i = 0; i < alphaSize; i++) { + if (length[i] == n) { + code[i] = vec; + vec++; + } + }; + vec <<= 1; + } + } + + private void bsSetStream(OutputStream f) { + bsStream = f; + bsLive = 0; + bsBuff = 0; + bytesOut = 0; + } + + private void bsFinishedWithStream() throws IOException { + while (bsLive > 0) { + int ch = (bsBuff >> 24); + try { + bsStream.write(ch); // write 8-bit + } catch (IOException e) { + throw e; + } + bsBuff <<= 8; + bsLive -= 8; + bytesOut++; + } + } + + private void bsW(int n, int v) throws IOException { + while (bsLive >= 8) { + int ch = (bsBuff >> 24); + try { + bsStream.write(ch); // write 8-bit + } catch (IOException e) { + throw e; + } + bsBuff <<= 8; + bsLive -= 8; + bytesOut++; + } + bsBuff |= (v << (32 - bsLive - n)); + bsLive += n; + } + + private void bsPutUChar(int c) throws IOException { + bsW(8, c); + } + + private void bsPutint(int u) throws IOException { + bsW(8, (u >> 24) & 0xff); + bsW(8, (u >> 16) & 0xff); + bsW(8, (u >> 8) & 0xff); + bsW(8, u & 0xff); + } + + private void bsPutIntVS(int numBits, int c) throws IOException { + bsW(numBits, c); + } + + private void sendMTFValues() throws IOException { + char len[][] = new char[N_GROUPS][MAX_ALPHA_SIZE]; + + @SuppressWarnings("unused") + int v, t, i, j, gs, ge, totc, bt, bc, iter; + int nSelectors = 0, alphaSize, minLen, maxLen, selCtr; + @SuppressWarnings("unused") + int nGroups, nBytes; + + alphaSize = nInUse + 2; + for (t = 0; t < N_GROUPS; t++) { + for (v = 0; v < alphaSize; v++) { + len[t][v] = (char) GREATER_ICOST; + } + } + + /* Decide how many coding tables to use */ + if (nMTF <= 0) { + panic(); + } + + if (nMTF < 200) { + nGroups = 2; + } else if (nMTF < 600) { + nGroups = 3; + } else if (nMTF < 1200) { + nGroups = 4; + } else if (nMTF < 2400) { + nGroups = 5; + } else { + nGroups = 6; + } + + /* Generate an initial set of coding tables */ { + int nPart, remF, tFreq, aFreq; + + nPart = nGroups; + remF = nMTF; + gs = 0; + while (nPart > 0) { + tFreq = remF / nPart; + ge = gs - 1; + aFreq = 0; + while (aFreq < tFreq && ge < alphaSize - 1) { + ge++; + aFreq += mtfFreq[ge]; + } + + if (ge > gs && nPart != nGroups && nPart != 1 + && ((nGroups - nPart) % 2 == 1)) { + aFreq -= mtfFreq[ge]; + ge--; + } + + for (v = 0; v < alphaSize; v++) { + if (v >= gs && v <= ge) { + len[nPart - 1][v] = (char) LESSER_ICOST; + } else { + len[nPart - 1][v] = (char) GREATER_ICOST; + } + } + + nPart--; + gs = ge + 1; + remF -= aFreq; + } + } + + int[][] rfreq = new int[N_GROUPS][MAX_ALPHA_SIZE]; + int[] fave = new int[N_GROUPS]; + short[] cost = new short[N_GROUPS]; + /* + Iterate up to N_ITERS times to improve the tables. + */ + for (iter = 0; iter < N_ITERS; iter++) { + for (t = 0; t < nGroups; t++) { + fave[t] = 0; + } + + for (t = 0; t < nGroups; t++) { + for (v = 0; v < alphaSize; v++) { + rfreq[t][v] = 0; + } + } + + nSelectors = 0; + totc = 0; + gs = 0; + while (true) { + + /* Set group start & end marks. */ + if (gs >= nMTF) { + break; + } + ge = gs + G_SIZE - 1; + if (ge >= nMTF) { + ge = nMTF - 1; + } + + /* + Calculate the cost of this group as coded + by each of the coding tables. + */ + for (t = 0; t < nGroups; t++) { + cost[t] = 0; + } + + if (nGroups == 6) { + short cost0, cost1, cost2, cost3, cost4, cost5; + cost0 = cost1 = cost2 = cost3 = cost4 = cost5 = 0; + for (i = gs; i <= ge; i++) { + short icv = szptr[i]; + cost0 += len[0][icv]; + cost1 += len[1][icv]; + cost2 += len[2][icv]; + cost3 += len[3][icv]; + cost4 += len[4][icv]; + cost5 += len[5][icv]; + } + cost[0] = cost0; + cost[1] = cost1; + cost[2] = cost2; + cost[3] = cost3; + cost[4] = cost4; + cost[5] = cost5; + } else { + for (i = gs; i <= ge; i++) { + short icv = szptr[i]; + for (t = 0; t < nGroups; t++) { + cost[t] += len[t][icv]; + } + } + } + + /* + Find the coding table which is best for this group, + and record its identity in the selector table. + */ + bc = 999999999; + bt = -1; + for (t = 0; t < nGroups; t++) { + if (cost[t] < bc) { + bc = cost[t]; + bt = t; + } + }; + totc += bc; + fave[bt]++; + selector[nSelectors] = (char) bt; + nSelectors++; + + /* + Increment the symbol frequencies for the selected table. + */ + for (i = gs; i <= ge; i++) { + rfreq[bt][szptr[i]]++; + } + + gs = ge + 1; + } + + /* + Recompute the tables based on the accumulated frequencies. + */ + for (t = 0; t < nGroups; t++) { + hbMakeCodeLengths(len[t], rfreq[t], alphaSize, 20); + } + } + + rfreq = null; + fave = null; + cost = null; + + if (!(nGroups < 8)) { + panic(); + } + if (!(nSelectors < 32768 && nSelectors <= (2 + (900000 / G_SIZE)))) { + panic(); + } + + + /* Compute MTF values for the selectors. */ + { + char[] pos = new char[N_GROUPS]; + char ll_i, tmp2, tmp; + for (i = 0; i < nGroups; i++) { + pos[i] = (char) i; + } + for (i = 0; i < nSelectors; i++) { + ll_i = selector[i]; + j = 0; + tmp = pos[j]; + while (ll_i != tmp) { + j++; + tmp2 = tmp; + tmp = pos[j]; + pos[j] = tmp2; + } + pos[0] = tmp; + selectorMtf[i] = (char) j; + } + } + + int[][] code = new int[N_GROUPS][MAX_ALPHA_SIZE]; + + /* Assign actual codes for the tables. */ + for (t = 0; t < nGroups; t++) { + minLen = 32; + maxLen = 0; + for (i = 0; i < alphaSize; i++) { + if (len[t][i] > maxLen) { + maxLen = len[t][i]; + } + if (len[t][i] < minLen) { + minLen = len[t][i]; + } + } + if (maxLen > 20) { + panic(); + } + if (minLen < 1) { + panic(); + } + hbAssignCodes(code[t], len[t], minLen, maxLen, alphaSize); + } + + /* Transmit the mapping table. */ + { + boolean[] inUse16 = new boolean[16]; + for (i = 0; i < 16; i++) { + inUse16[i] = false; + for (j = 0; j < 16; j++) { + if (inUse[i * 16 + j]) { + inUse16[i] = true; + } + } + } + + nBytes = bytesOut; + for (i = 0; i < 16; i++) { + if (inUse16[i]) { + bsW(1, 1); + } else { + bsW(1, 0); + } + } + + for (i = 0; i < 16; i++) { + if (inUse16[i]) { + for (j = 0; j < 16; j++) { + if (inUse[i * 16 + j]) { + bsW(1, 1); + } else { + bsW(1, 0); + } + } + } + } + + } + + /* Now the selectors. */ + nBytes = bytesOut; + bsW (3, nGroups); + bsW (15, nSelectors); + for (i = 0; i < nSelectors; i++) { + for (j = 0; j < selectorMtf[i]; j++) { + bsW(1, 1); + } + bsW(1, 0); + } + + /* Now the coding tables. */ + nBytes = bytesOut; + + for (t = 0; t < nGroups; t++) { + int curr = len[t][0]; + bsW(5, curr); + for (i = 0; i < alphaSize; i++) { + while (curr < len[t][i]) { + bsW(2, 2); + curr++; /* 10 */ + } + while (curr > len[t][i]) { + bsW(2, 3); + curr--; /* 11 */ + } + bsW (1, 0); + } + } + + /* And finally, the block data proper */ + nBytes = bytesOut; + selCtr = 0; + gs = 0; + while (true) { + if (gs >= nMTF) { + break; + } + ge = gs + G_SIZE - 1; + if (ge >= nMTF) { + ge = nMTF - 1; + } + for (i = gs; i <= ge; i++) { + bsW(len[selector[selCtr]][szptr[i]], + code[selector[selCtr]][szptr[i]]); + } + + gs = ge + 1; + selCtr++; + } + if (!(selCtr == nSelectors)) { + panic(); + } + } + + private void moveToFrontCodeAndSend () throws IOException { + bsPutIntVS(24, origPtr); + generateMTFValues(); + sendMTFValues(); + } + + private OutputStream bsStream; + + private void simpleSort(int lo, int hi, int d) { + int i, j, h, bigN, hp; + int v; + + bigN = hi - lo + 1; + if (bigN < 2) { + return; + } + + hp = 0; + while (incs[hp] < bigN) { + hp++; + } + hp--; + + for (; hp >= 0; hp--) { + h = incs[hp]; + + i = lo + h; + while (true) { + /* copy 1 */ + if (i > hi) { + break; + } + v = zptr[i]; + j = i; + while (fullGtU(zptr[j - h] + d, v + d)) { + zptr[j] = zptr[j - h]; + j = j - h; + if (j <= (lo + h - 1)) { + break; + } + } + zptr[j] = v; + i++; + + /* copy 2 */ + if (i > hi) { + break; + } + v = zptr[i]; + j = i; + while (fullGtU(zptr[j - h] + d, v + d)) { + zptr[j] = zptr[j - h]; + j = j - h; + if (j <= (lo + h - 1)) { + break; + } + } + zptr[j] = v; + i++; + + /* copy 3 */ + if (i > hi) { + break; + } + v = zptr[i]; + j = i; + while (fullGtU(zptr[j - h] + d, v + d)) { + zptr[j] = zptr[j - h]; + j = j - h; + if (j <= (lo + h - 1)) { + break; + } + } + zptr[j] = v; + i++; + + if (workDone > workLimit && firstAttempt) { + return; + } + } + } + } + + private void vswap(int p1, int p2, int n) { + int temp = 0; + while (n > 0) { + temp = zptr[p1]; + zptr[p1] = zptr[p2]; + zptr[p2] = temp; + p1++; + p2++; + n--; + } + } + + private char med3(char a, char b, char c) { + char t; + if (a > b) { + t = a; + a = b; + b = t; + } + if (b > c) { + t = b; + b = c; + c = t; + } + if (a > b) { + b = a; + } + return b; + } + + private static class StackElem { + int ll; + int hh; + int dd; + } + + private void qSort3(int loSt, int hiSt, int dSt) { + int unLo, unHi, ltLo, gtHi, med, n, m; + int sp, lo, hi, d; + StackElem[] stack = new StackElem[QSORT_STACK_SIZE]; + for (int count = 0; count < QSORT_STACK_SIZE; count++) { + stack[count] = new StackElem(); + } + + sp = 0; + + stack[sp].ll = loSt; + stack[sp].hh = hiSt; + stack[sp].dd = dSt; + sp++; + + while (sp > 0) { + if (sp >= QSORT_STACK_SIZE) { + panic(); + } + + sp--; + lo = stack[sp].ll; + hi = stack[sp].hh; + d = stack[sp].dd; + + if (hi - lo < SMALL_THRESH || d > DEPTH_THRESH) { + simpleSort(lo, hi, d); + if (workDone > workLimit && firstAttempt) { + return; + } + continue; + } + + med = med3(block[zptr[lo] + d + 1], + block[zptr[hi ] + d + 1], + block[zptr[(lo + hi) >> 1] + d + 1]); + + unLo = ltLo = lo; + unHi = gtHi = hi; + + while (true) { + while (true) { + if (unLo > unHi) { + break; + } + n = ((int) block[zptr[unLo] + d + 1]) - med; + if (n == 0) { + int temp = 0; + temp = zptr[unLo]; + zptr[unLo] = zptr[ltLo]; + zptr[ltLo] = temp; + ltLo++; + unLo++; + continue; + }; + if (n > 0) { + break; + } + unLo++; + } + while (true) { + if (unLo > unHi) { + break; + } + n = ((int) block[zptr[unHi] + d + 1]) - med; + if (n == 0) { + int temp = 0; + temp = zptr[unHi]; + zptr[unHi] = zptr[gtHi]; + zptr[gtHi] = temp; + gtHi--; + unHi--; + continue; + }; + if (n < 0) { + break; + } + unHi--; + } + if (unLo > unHi) { + break; + } + int temp = 0; + temp = zptr[unLo]; + zptr[unLo] = zptr[unHi]; + zptr[unHi] = temp; + unLo++; + unHi--; + } + + if (gtHi < ltLo) { + stack[sp].ll = lo; + stack[sp].hh = hi; + stack[sp].dd = d + 1; + sp++; + continue; + } + + n = ((ltLo - lo) < (unLo - ltLo)) ? (ltLo - lo) : (unLo - ltLo); + vswap(lo, unLo - n, n); + m = ((hi - gtHi) < (gtHi - unHi)) ? (hi - gtHi) : (gtHi - unHi); + vswap(unLo, hi - m + 1, m); + + n = lo + unLo - ltLo - 1; + m = hi - (gtHi - unHi) + 1; + + stack[sp].ll = lo; + stack[sp].hh = n; + stack[sp].dd = d; + sp++; + + stack[sp].ll = n + 1; + stack[sp].hh = m - 1; + stack[sp].dd = d + 1; + sp++; + + stack[sp].ll = m; + stack[sp].hh = hi; + stack[sp].dd = d; + sp++; + } + } + + private void mainSort() { + int i, j, ss, sb; + int[] runningOrder = new int[256]; + int[] copy = new int[256]; + boolean[] bigDone = new boolean[256]; + int c1, c2; + @SuppressWarnings("unused") + int numQSorted; + + /* + In the various block-sized structures, live data runs + from 0 to last+NUM_OVERSHOOT_BYTES inclusive. First, + set up the overshoot area for block. + */ + + // if (verbosity >= 4) fprintf ( stderr, " sort initialise ...\n" ); + for (i = 0; i < NUM_OVERSHOOT_BYTES; i++) { + block[last + i + 2] = block[(i % (last + 1)) + 1]; + } + for (i = 0; i <= last + NUM_OVERSHOOT_BYTES; i++) { + quadrant[i] = 0; + } + + block[0] = (char) (block[last + 1]); + + if (last < 4000) { + /* + Use simpleSort(), since the full sorting mechanism + has quite a large constant overhead. + */ + for (i = 0; i <= last; i++) { + zptr[i] = i; + } + firstAttempt = false; + workDone = workLimit = 0; + simpleSort(0, last, 0); + } else { + numQSorted = 0; + for (i = 0; i <= 255; i++) { + bigDone[i] = false; + } + + for (i = 0; i <= 65536; i++) { + ftab[i] = 0; + } + + c1 = block[0]; + for (i = 0; i <= last; i++) { + c2 = block[i + 1]; + ftab[(c1 << 8) + c2]++; + c1 = c2; + } + + for (i = 1; i <= 65536; i++) { + ftab[i] += ftab[i - 1]; + } + + c1 = block[1]; + for (i = 0; i < last; i++) { + c2 = block[i + 2]; + j = (c1 << 8) + c2; + c1 = c2; + ftab[j]--; + zptr[ftab[j]] = i; + } + + j = ((block[last + 1]) << 8) + (block[1]); + ftab[j]--; + zptr[ftab[j]] = last; + + /* + Now ftab contains the first loc of every small bucket. + Calculate the running order, from smallest to largest + big bucket. + */ + + for (i = 0; i <= 255; i++) { + runningOrder[i] = i; + } + + { + int vv; + int h = 1; + do { + h = 3 * h + 1; + } + while (h <= 256); + do { + h = h / 3; + for (i = h; i <= 255; i++) { + vv = runningOrder[i]; + j = i; + while ((ftab[((runningOrder[j - h]) + 1) << 8] + - ftab[(runningOrder[j - h]) << 8]) > + (ftab[((vv) + 1) << 8] - ftab[(vv) << 8])) { + runningOrder[j] = runningOrder[j - h]; + j = j - h; + if (j <= (h - 1)) { + break; + } + } + runningOrder[j] = vv; + } + } while (h != 1); + } + + /* + The main sorting loop. + */ + for (i = 0; i <= 255; i++) { + + /* + Process big buckets, starting with the least full. + */ + ss = runningOrder[i]; + + /* + Complete the big bucket [ss] by quicksorting + any unsorted small buckets [ss, j]. Hopefully + previous pointer-scanning phases have already + completed many of the small buckets [ss, j], so + we don't have to sort them at all. + */ + for (j = 0; j <= 255; j++) { + sb = (ss << 8) + j; + if (!((ftab[sb] & SETMASK) == SETMASK)) { + int lo = ftab[sb] & CLEARMASK; + int hi = (ftab[sb + 1] & CLEARMASK) - 1; + if (hi > lo) { + qSort3(lo, hi, 2); + numQSorted += (hi - lo + 1); + if (workDone > workLimit && firstAttempt) { + return; + } + } + ftab[sb] |= SETMASK; + } + } + + /* + The ss big bucket is now done. Record this fact, + and update the quadrant descriptors. Remember to + update quadrants in the overshoot area too, if + necessary. The "if (i < 255)" test merely skips + this updating for the last bucket processed, since + updating for the last bucket is pointless. + */ + bigDone[ss] = true; + + if (i < 255) { + int bbStart = ftab[ss << 8] & CLEARMASK; + int bbSize = (ftab[(ss + 1) << 8] & CLEARMASK) - bbStart; + int shifts = 0; + + while ((bbSize >> shifts) > 65534) { + shifts++; + } + + for (j = 0; j < bbSize; j++) { + int a2update = zptr[bbStart + j]; + int qVal = (j >> shifts); + quadrant[a2update] = qVal; + if (a2update < NUM_OVERSHOOT_BYTES) { + quadrant[a2update + last + 1] = qVal; + } + } + + if (!(((bbSize - 1) >> shifts) <= 65535)) { + panic(); + } + } + + /* + Now scan this big bucket so as to synthesise the + sorted order for small buckets [t, ss] for all t != ss. + */ + for (j = 0; j <= 255; j++) { + copy[j] = ftab[(j << 8) + ss] & CLEARMASK; + } + + for (j = ftab[ss << 8] & CLEARMASK; + j < (ftab[(ss + 1) << 8] & CLEARMASK); j++) { + c1 = block[zptr[j]]; + if (!bigDone[c1]) { + zptr[copy[c1]] = zptr[j] == 0 ? last : zptr[j] - 1; + copy[c1]++; + } + } + + for (j = 0; j <= 255; j++) { + ftab[(j << 8) + ss] |= SETMASK; + } + } + } + } + + private void randomiseBlock() { + int i; + int rNToGo = 0; + int rTPos = 0; + for (i = 0; i < 256; i++) { + inUse[i] = false; + } + + for (i = 0; i <= last; i++) { + if (rNToGo == 0) { + rNToGo = (char) rNums[rTPos]; + rTPos++; + if (rTPos == 512) { + rTPos = 0; + } + } + rNToGo--; + block[i + 1] ^= ((rNToGo == 1) ? 1 : 0); + // handle 16 bit signed numbers + block[i + 1] &= 0xFF; + + inUse[block[i + 1]] = true; + } + } + + private void doReversibleTransformation() { + int i; + + workLimit = workFactor * last; + workDone = 0; + blockRandomised = false; + firstAttempt = true; + + mainSort(); + + if (workDone > workLimit && firstAttempt) { + randomiseBlock(); + workLimit = workDone = 0; + blockRandomised = true; + firstAttempt = false; + mainSort(); + } + + origPtr = -1; + for (i = 0; i <= last; i++) { + if (zptr[i] == 0) { + origPtr = i; + break; + } + }; + + if (origPtr == -1) { + panic(); + } + } + + private boolean fullGtU(int i1, int i2) { + int k; + char c1, c2; + int s1, s2; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + i1++; + i2++; + + k = last + 1; + + do { + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) { + return (s1 > s2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) { + return (s1 > s2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) { + return (s1 > s2); + } + i1++; + i2++; + + c1 = block[i1 + 1]; + c2 = block[i2 + 1]; + if (c1 != c2) { + return (c1 > c2); + } + s1 = quadrant[i1]; + s2 = quadrant[i2]; + if (s1 != s2) { + return (s1 > s2); + } + i1++; + i2++; + + if (i1 > last) { + i1 -= last; + i1--; + }; + if (i2 > last) { + i2 -= last; + i2--; + }; + + k -= 4; + workDone++; + } while (k >= 0); + + return false; + } + + /* + Knuth's increments seem to work better + than Incerpi-Sedgewick here. Possibly + because the number of elems to sort is + usually small, typically <= 20. + */ + private int[] incs = { 1, 4, 13, 40, 121, 364, 1093, 3280, + 9841, 29524, 88573, 265720, + 797161, 2391484 }; + + private void allocateCompressStructures () { + int n = baseBlockSize * blockSize100k; + block = new char[(n + 1 + NUM_OVERSHOOT_BYTES)]; + quadrant = new int[(n + NUM_OVERSHOOT_BYTES)]; + zptr = new int[n]; + ftab = new int[65537]; + + if (block == null || quadrant == null || zptr == null + || ftab == null) { + //int totalDraw = (n + 1 + NUM_OVERSHOOT_BYTES) + (n + NUM_OVERSHOOT_BYTES) + n + 65537; + //compressOutOfMemory ( totalDraw, n ); + } + + /* + The back end needs a place to store the MTF values + whilst it calculates the coding tables. We could + put them in the zptr array. However, these values + will fit in a short, so we overlay szptr at the + start of zptr, in the hope of reducing the number + of cache misses induced by the multiple traversals + of the MTF values when calculating coding tables. + Seems to improve compression speed by about 1%. + */ + // szptr = zptr; + + + szptr = new short[2 * n]; + } + + private void generateMTFValues() { + char[] yy = new char[256]; + int i, j; + char tmp; + char tmp2; + int zPend; + int wr; + int EOB; + + makeMaps(); + EOB = nInUse + 1; + + for (i = 0; i <= EOB; i++) { + mtfFreq[i] = 0; + } + + wr = 0; + zPend = 0; + for (i = 0; i < nInUse; i++) { + yy[i] = (char) i; + } + + + for (i = 0; i <= last; i++) { + char ll_i; + + ll_i = unseqToSeq[block[zptr[i]]]; + + j = 0; + tmp = yy[j]; + while (ll_i != tmp) { + j++; + tmp2 = tmp; + tmp = yy[j]; + yy[j] = tmp2; + }; + yy[0] = tmp; + + if (j == 0) { + zPend++; + } else { + if (zPend > 0) { + zPend--; + while (true) { + switch (zPend % 2) { + case 0: + szptr[wr] = (short) RUNA; + wr++; + mtfFreq[RUNA]++; + break; + case 1: + szptr[wr] = (short) RUNB; + wr++; + mtfFreq[RUNB]++; + break; + }; + if (zPend < 2) { + break; + } + zPend = (zPend - 2) / 2; + }; + zPend = 0; + } + szptr[wr] = (short) (j + 1); + wr++; + mtfFreq[j + 1]++; + } + } + + if (zPend > 0) { + zPend--; + while (true) { + switch (zPend % 2) { + case 0: + szptr[wr] = (short) RUNA; + wr++; + mtfFreq[RUNA]++; + break; + case 1: + szptr[wr] = (short) RUNB; + wr++; + mtfFreq[RUNB]++; + break; + } + if (zPend < 2) { + break; + } + zPend = (zPend - 2) / 2; + } + } + + szptr[wr] = (short) EOB; + wr++; + mtfFreq[EOB]++; + + nMTF = wr; + } +} + + diff --git a/Tools/Cache Editor/src/org/apache/tools/bzip2/CRC.java b/Tools/Cache Editor/src/org/apache/tools/bzip2/CRC.java new file mode 100644 index 000000000..bc8bc644b --- /dev/null +++ b/Tools/Cache Editor/src/org/apache/tools/bzip2/CRC.java @@ -0,0 +1,167 @@ +/* + * The Apache Software License, Version 1.1 + * + * Copyright (c) 2001-2002 The Apache Software Foundation. All rights + * reserved. + * + * Redistribution and use in source and binary forms, with or without + * modification, are permitted provided that the following conditions + * are met: + * + * 1. Redistributions of source code must retain the above copyright + * notice, this list of conditions and the following disclaimer. + * + * 2. Redistributions in binary form must reproduce the above copyright + * notice, this list of conditions and the following disclaimer in + * the documentation and/or other materials provided with the + * distribution. + * + * 3. The end-user documentation included with the redistribution, if + * any, must include the following acknowlegement: + * "This product includes software developed by the + * Apache Software Foundation (http://www.apache.org/)." + * Alternately, this acknowlegement may appear in the software itself, + * if and wherever such third-party acknowlegements normally appear. + * + * 4. The names "Ant" and "Apache Software + * Foundation" must not be used to endorse or promote products derived + * from this software without prior written permission. For written + * permission, please contact apache@apache.org. + * + * 5. Products derived from this software may not be called "Apache" + * nor may "Apache" appear in their names without prior written + * permission of the Apache Group. + * + * THIS SOFTWARE IS PROVIDED ``AS IS'' AND ANY EXPRESSED OR IMPLIED + * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES + * OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE + * DISCLAIMED. IN NO EVENT SHALL THE APACHE SOFTWARE FOUNDATION OR + * ITS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, + * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT + * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF + * USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND + * ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, + * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT + * OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF + * SUCH DAMAGE. + * ==================================================================== + * + * This software consists of voluntary contributions made by many + * individuals on behalf of the Apache Software Foundation. For more + * information on the Apache Software Foundation, please see + * . + */ + +/* + * This package is based on the work done by Keiron Liddle, Aftex Software + * to whom the Ant project is very grateful for his + * great code. + */ + +package org.apache.tools.bzip2; + +/** + * A simple class the hold and calculate the CRC for sanity checking + * of the data. + * + * @author Keiron Liddle + */ +class CRC { + public static int crc32Table[] = { + 0x00000000, 0x04c11db7, 0x09823b6e, 0x0d4326d9, + 0x130476dc, 0x17c56b6b, 0x1a864db2, 0x1e475005, + 0x2608edb8, 0x22c9f00f, 0x2f8ad6d6, 0x2b4bcb61, + 0x350c9b64, 0x31cd86d3, 0x3c8ea00a, 0x384fbdbd, + 0x4c11db70, 0x48d0c6c7, 0x4593e01e, 0x4152fda9, + 0x5f15adac, 0x5bd4b01b, 0x569796c2, 0x52568b75, + 0x6a1936c8, 0x6ed82b7f, 0x639b0da6, 0x675a1011, + 0x791d4014, 0x7ddc5da3, 0x709f7b7a, 0x745e66cd, + 0x9823b6e0, 0x9ce2ab57, 0x91a18d8e, 0x95609039, + 0x8b27c03c, 0x8fe6dd8b, 0x82a5fb52, 0x8664e6e5, + 0xbe2b5b58, 0xbaea46ef, 0xb7a96036, 0xb3687d81, + 0xad2f2d84, 0xa9ee3033, 0xa4ad16ea, 0xa06c0b5d, + 0xd4326d90, 0xd0f37027, 0xddb056fe, 0xd9714b49, + 0xc7361b4c, 0xc3f706fb, 0xceb42022, 0xca753d95, + 0xf23a8028, 0xf6fb9d9f, 0xfbb8bb46, 0xff79a6f1, + 0xe13ef6f4, 0xe5ffeb43, 0xe8bccd9a, 0xec7dd02d, + 0x34867077, 0x30476dc0, 0x3d044b19, 0x39c556ae, + 0x278206ab, 0x23431b1c, 0x2e003dc5, 0x2ac12072, + 0x128e9dcf, 0x164f8078, 0x1b0ca6a1, 0x1fcdbb16, + 0x018aeb13, 0x054bf6a4, 0x0808d07d, 0x0cc9cdca, + 0x7897ab07, 0x7c56b6b0, 0x71159069, 0x75d48dde, + 0x6b93dddb, 0x6f52c06c, 0x6211e6b5, 0x66d0fb02, + 0x5e9f46bf, 0x5a5e5b08, 0x571d7dd1, 0x53dc6066, + 0x4d9b3063, 0x495a2dd4, 0x44190b0d, 0x40d816ba, + 0xaca5c697, 0xa864db20, 0xa527fdf9, 0xa1e6e04e, + 0xbfa1b04b, 0xbb60adfc, 0xb6238b25, 0xb2e29692, + 0x8aad2b2f, 0x8e6c3698, 0x832f1041, 0x87ee0df6, + 0x99a95df3, 0x9d684044, 0x902b669d, 0x94ea7b2a, + 0xe0b41de7, 0xe4750050, 0xe9362689, 0xedf73b3e, + 0xf3b06b3b, 0xf771768c, 0xfa325055, 0xfef34de2, + 0xc6bcf05f, 0xc27dede8, 0xcf3ecb31, 0xcbffd686, + 0xd5b88683, 0xd1799b34, 0xdc3abded, 0xd8fba05a, + 0x690ce0ee, 0x6dcdfd59, 0x608edb80, 0x644fc637, + 0x7a089632, 0x7ec98b85, 0x738aad5c, 0x774bb0eb, + 0x4f040d56, 0x4bc510e1, 0x46863638, 0x42472b8f, + 0x5c007b8a, 0x58c1663d, 0x558240e4, 0x51435d53, + 0x251d3b9e, 0x21dc2629, 0x2c9f00f0, 0x285e1d47, + 0x36194d42, 0x32d850f5, 0x3f9b762c, 0x3b5a6b9b, + 0x0315d626, 0x07d4cb91, 0x0a97ed48, 0x0e56f0ff, + 0x1011a0fa, 0x14d0bd4d, 0x19939b94, 0x1d528623, + 0xf12f560e, 0xf5ee4bb9, 0xf8ad6d60, 0xfc6c70d7, + 0xe22b20d2, 0xe6ea3d65, 0xeba91bbc, 0xef68060b, + 0xd727bbb6, 0xd3e6a601, 0xdea580d8, 0xda649d6f, + 0xc423cd6a, 0xc0e2d0dd, 0xcda1f604, 0xc960ebb3, + 0xbd3e8d7e, 0xb9ff90c9, 0xb4bcb610, 0xb07daba7, + 0xae3afba2, 0xaafbe615, 0xa7b8c0cc, 0xa379dd7b, + 0x9b3660c6, 0x9ff77d71, 0x92b45ba8, 0x9675461f, + 0x8832161a, 0x8cf30bad, 0x81b02d74, 0x857130c3, + 0x5d8a9099, 0x594b8d2e, 0x5408abf7, 0x50c9b640, + 0x4e8ee645, 0x4a4ffbf2, 0x470cdd2b, 0x43cdc09c, + 0x7b827d21, 0x7f436096, 0x7200464f, 0x76c15bf8, + 0x68860bfd, 0x6c47164a, 0x61043093, 0x65c52d24, + 0x119b4be9, 0x155a565e, 0x18197087, 0x1cd86d30, + 0x029f3d35, 0x065e2082, 0x0b1d065b, 0x0fdc1bec, + 0x3793a651, 0x3352bbe6, 0x3e119d3f, 0x3ad08088, + 0x2497d08d, 0x2056cd3a, 0x2d15ebe3, 0x29d4f654, + 0xc5a92679, 0xc1683bce, 0xcc2b1d17, 0xc8ea00a0, + 0xd6ad50a5, 0xd26c4d12, 0xdf2f6bcb, 0xdbee767c, + 0xe3a1cbc1, 0xe760d676, 0xea23f0af, 0xeee2ed18, + 0xf0a5bd1d, 0xf464a0aa, 0xf9278673, 0xfde69bc4, + 0x89b8fd09, 0x8d79e0be, 0x803ac667, 0x84fbdbd0, + 0x9abc8bd5, 0x9e7d9662, 0x933eb0bb, 0x97ffad0c, + 0xafb010b1, 0xab710d06, 0xa6322bdf, 0xa2f33668, + 0xbcb4666d, 0xb8757bda, 0xb5365d03, 0xb1f740b4 + }; + + public CRC() { + initialiseCRC(); + } + + void initialiseCRC() { + globalCrc = 0xffffffff; + } + + int getFinalCRC() { + return ~globalCrc; + } + + int getGlobalCRC() { + return globalCrc; + } + + void setGlobalCRC(int newCrc) { + globalCrc = newCrc; + } + + void updateCRC(int inCh) { + int temp = (globalCrc >> 24) ^ inCh; + if (temp < 0) { + temp = 256 + temp; + } + globalCrc = (globalCrc << 8) ^ CRC.crc32Table[temp]; + } + + int globalCrc; +} + diff --git a/Tools/Cache Editor/src/org/apollo/fs/FileDescriptor.java b/Tools/Cache Editor/src/org/apollo/fs/FileDescriptor.java new file mode 100644 index 000000000..212703588 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/FileDescriptor.java @@ -0,0 +1,45 @@ +package org.apollo.fs; + +/** + * A class which points to a file in the cache. + * @author Graham + */ +public final class FileDescriptor { + + /** + * The file type. + */ + private final int type; + + /** + * The file id. + */ + private final int file; + + /** + * Creates the file descriptor. + * @param type The file type. + * @param file The file id. + */ + public FileDescriptor(int type, int file) { + this.type = type; + this.file = file; + } + + /** + * Gets the file type. + * @return The file type. + */ + public int getType() { + return type; + } + + /** + * Gets the file id. + * @return The file id. + */ + public int getFile() { + return file; + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/FileSystemConstants.java b/Tools/Cache Editor/src/org/apollo/fs/FileSystemConstants.java new file mode 100644 index 000000000..2c308162b --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/FileSystemConstants.java @@ -0,0 +1,46 @@ +package org.apollo.fs; + +/** + * Holds file system related constants. + * @author Graham + */ +public final class FileSystemConstants { + + /** + * The number of caches. + */ + public static final int CACHE_COUNT = 5; + + /** + * The number of archives in cache 0. + */ + public static final int ARCHIVE_COUNT = 9; + + /** + * The size of an index. + */ + public static final int INDEX_SIZE = 6; + + /** + * The size of a header. + */ + public static final int HEADER_SIZE = 8; + + /** + * The size of a chunk. + */ + public static final int CHUNK_SIZE = 512; + + /** + * The size of a block. + */ + public static final int BLOCK_SIZE = HEADER_SIZE + CHUNK_SIZE; + + /** + * Default private constructor to prevent instantiation. + */ + private FileSystemConstants() { + + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/Index.java b/Tools/Cache Editor/src/org/apollo/fs/Index.java new file mode 100644 index 000000000..505034f16 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/Index.java @@ -0,0 +1,62 @@ +package org.apollo.fs; + +/** + * An {@link Index} points to a file in the {@code main_file_cache.dat} file. + * @author Graham + */ +public final class Index { + + /** + * Decodes a buffer into an index. + * @param buffer The buffer. + * @return The decoded {@link Index}. + * @throws IllegalArgumentException if the buffer length is invalid. + */ + public static Index decode(byte[] buffer) { + if (buffer.length != FileSystemConstants.INDEX_SIZE) { + throw new IllegalArgumentException("Incorrect buffer length."); + } + + int size = ((buffer[0] & 0xFF) << 16) | ((buffer[1] & 0xFF) << 8) | (buffer[2] & 0xFF); + int block = ((buffer[3] & 0xFF) << 16) | ((buffer[4] & 0xFF) << 8) | (buffer[5] & 0xFF); + + return new Index(size, block); + } + + /** + * The size of the file. + */ + private final int size; + + /** + * The first block of the file. + */ + private final int block; + + /** + * Creates the index. + * @param size The size of the file. + * @param block The first block of the file. + */ + public Index(int size, int block) { + this.size = size; + this.block = block; + } + + /** + * Gets the size of the file. + * @return The size of the file. + */ + public int getSize() { + return size; + } + + /** + * Gets the first block of the file. + * @return The first block of the file. + */ + public int getBlock() { + return block; + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/IndexedFileSystem.java b/Tools/Cache Editor/src/org/apollo/fs/IndexedFileSystem.java new file mode 100644 index 000000000..c8110e050 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/IndexedFileSystem.java @@ -0,0 +1,291 @@ +package org.apollo.fs; + +import java.io.Closeable; +import java.io.File; +import java.io.FileNotFoundException; +import java.io.IOException; +import java.io.RandomAccessFile; +import java.nio.ByteBuffer; +import java.util.zip.CRC32; + +/** + * A file system based on top of the operating system's file system. It + * consists of a data file and index files. Index files point to blocks in the + * data file, which contains the actual data. + * @author Graham + */ +public final class IndexedFileSystem implements Closeable { + + /** + * Read only flag. + */ + private final boolean readOnly; + + /** + * The index files. + */ + private RandomAccessFile[] indices = new RandomAccessFile[256]; + + /** + * The data file. + */ + private RandomAccessFile data; + + /** + * The cached CRC table. + */ + private ByteBuffer crcTable; + + /** + * Creates the file system with the specified base directory. + * @param base The base directory. + * @param readOnly A flag indicating if the file system will be read only. + * @throws Exception if the file system is invalid. + */ + public IndexedFileSystem(File base, boolean readOnly) throws Exception { + this.readOnly = readOnly; + detectLayout(base); + } + + /** + * Checks if this {@link IndexedFileSystem} is read only. + * @return {@code true} if so, {@code false} if not. + */ + public boolean isReadOnly() { + return readOnly; + } + + /** + * Automatically detect the layout of the specified directory. + * @param base The base directory. + * @throws Exception if the file system is invalid. + */ + private void detectLayout(File base) throws Exception { + int indexCount = 0; + for (int index = 0; index < indices.length; index++) { + File f = new File(base.getAbsolutePath() + "/main_file_cache.idx" + index); + if (f.exists() && !f.isDirectory()) { + indexCount++; + indices[index] = new RandomAccessFile(f, readOnly ? "r" : "rw"); + } + } + if (indexCount <= 0) { + throw new Exception("No index file(s) present"); + } + + File oldEngineData = new File(base.getAbsolutePath() + "/main_file_cache.dat"); + File newEngineData = new File(base.getAbsolutePath() + "/main_file_cache.dat2"); + if (oldEngineData.exists() && !oldEngineData.isDirectory()) { + data = new RandomAccessFile(oldEngineData, readOnly ? "r" : "rw"); + } else if (newEngineData.exists() && !oldEngineData.isDirectory()) { + data = new RandomAccessFile(newEngineData, readOnly ? "r" : "rw"); + } else { + throw new Exception("No data file present"); + } + } + + /** + * Gets the index of a file. + * @param fd The {@link FileDescriptor} which points to the file. + * @return The {@link Index}. + * @throws IOException if an I/O error occurs. + */ + private Index getIndex(FileDescriptor fd) throws IOException { + int index = fd.getType(); + if (index < 0 || index >= indices.length) { + throw new IndexOutOfBoundsException(); + } + + byte[] buffer = new byte[FileSystemConstants.INDEX_SIZE]; + RandomAccessFile indexFile = indices[index]; + synchronized (indexFile) { + long ptr = (long) fd.getFile() * (long) FileSystemConstants.INDEX_SIZE; + if (ptr >= 0 && indexFile.length() >= (ptr + FileSystemConstants.INDEX_SIZE)) { + indexFile.seek(ptr); + indexFile.readFully(buffer); + } else { + throw new FileNotFoundException(); + } + } + + return Index.decode(buffer); + } + + /** + * Gets the number of files with the specified type. + * @param type The type. + * @return The number of files. + * @throws IOException if an I/O error occurs. + */ + private int getFileCount(int type) throws IOException { + if (type < 0 || type >= indices.length) { + throw new IndexOutOfBoundsException(); + } + + RandomAccessFile indexFile = indices[type]; + synchronized (indexFile) { + return (int) (indexFile.length() / FileSystemConstants.INDEX_SIZE); + } + } + + /** + * Gets the CRC table. + * @return The CRC table. + * @throws IOException if an I/O erorr occurs. + */ + public ByteBuffer getCrcTable() throws IOException { + if (readOnly) { + synchronized (this) { + if (crcTable != null) { + return crcTable.duplicate(); + } + } + + // the number of archives + int archives = getFileCount(0); + + // the hash + int hash = 1234; + + // the CRCs + int[] crcs = new int[archives]; + + // calculate the CRCs + CRC32 crc32 = new CRC32(); + for (int i = 1; i < crcs.length; i++) { + crc32.reset(); + + ByteBuffer bb = getFile(0, i); + byte[] bytes = new byte[bb.remaining()]; + bb.get(bytes, 0, bytes.length); + crc32.update(bytes, 0, bytes.length); + + crcs[i] = (int) crc32.getValue(); + } + + // hash the CRCs and place them in the buffer + ByteBuffer buf = ByteBuffer.allocate(crcs.length * 4 + 4); + for (int i = 0; i < crcs.length; i++) { + hash = (hash << 1) + crcs[i]; + buf.putInt(crcs[i]); + } + + // place the hash into the buffer + buf.putInt(hash); + buf.flip(); + + synchronized (this) { + crcTable = buf.asReadOnlyBuffer(); + return crcTable.duplicate(); + } + } else { + throw new IOException("cannot get CRC table from a writable file system"); + } + } + + /** + * Gets a file. + * @param type The file type. + * @param file The file id. + * @return A {@link ByteBuffer} which contains the contents of the file. + * @throws IOException if an I/O error occurs. + */ + public ByteBuffer getFile(int type, int file) throws IOException { + return getFile(new FileDescriptor(type, file)); + } + + /** + * Gets a file. + * @param fd The {@link FileDescriptor} which points to the file. + * @return A {@link ByteBuffer} which contains the contents of the file. + * @throws IOException if an I/O error occurs. + */ + public ByteBuffer getFile(FileDescriptor fd) throws IOException { + Index index = getIndex(fd); + ByteBuffer buffer = ByteBuffer.allocate(index.getSize()); + + // calculate some initial values + long ptr = (long) index.getBlock() * (long) FileSystemConstants.BLOCK_SIZE; + int read = 0; + int size = index.getSize(); + int blocks = size / FileSystemConstants.CHUNK_SIZE; + if (size % FileSystemConstants.CHUNK_SIZE != 0) { + blocks++; + } + + for (int i = 0; i < blocks; i++) { + + // read header + byte[] header = new byte[FileSystemConstants.HEADER_SIZE]; + synchronized (data) { + data.seek(ptr); + data.readFully(header); + } + + // increment pointers + ptr += FileSystemConstants.HEADER_SIZE; + + // parse header + int nextFile = ((header[0] & 0xFF) << 8) | (header[1] & 0xFF); + int curChunk = ((header[2] & 0xFF) << 8) | (header[3] & 0xFF); + int nextBlock = ((header[4] & 0xFF) << 16) | ((header[5] & 0xFF) << 8) | (header[6] & 0xFF); + int nextType = header[7] & 0xFF; + + // check expected chunk id is correct + if (i != curChunk) { + throw new IOException("Chunk id mismatch."); + } + + // calculate how much we can read + int chunkSize = size - read; + if (chunkSize > FileSystemConstants.CHUNK_SIZE) { + chunkSize = FileSystemConstants.CHUNK_SIZE; + } + + // read the next chunk and put it in the buffer + byte[] chunk = new byte[chunkSize]; + synchronized (data) { + data.seek(ptr); + data.readFully(chunk); + } + buffer.put(chunk); + + // increment pointers + read += chunkSize; + ptr = (long) nextBlock * (long) FileSystemConstants.BLOCK_SIZE; + + // if we still have more data to read, check the validity of the + // header + if (size > read) { + if (nextType != (fd.getType() + 1)) { + throw new IOException("File type mismatch."); + } + + if (nextFile != fd.getFile()) { + throw new IOException("File id mismatch."); + } + } + } + + buffer.flip(); + return buffer; + } + + @Override + public void close() throws IOException { + if (data != null) { + synchronized (data) { + data.close(); + } + } + + for (RandomAccessFile index : indices) { + if (index != null) { + synchronized (index) { + index.close(); + } + } + } + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/archive/Archive.java b/Tools/Cache Editor/src/org/apollo/fs/archive/Archive.java new file mode 100644 index 000000000..080a51bb3 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/archive/Archive.java @@ -0,0 +1,97 @@ +package org.apollo.fs.archive; + +import java.io.FileNotFoundException; +import java.io.IOException; +import java.nio.ByteBuffer; + +import org.apollo.fs.util.ByteBufferUtil; +import org.apollo.fs.util.CompressionUtil; + +/** + * Represents an archive. + * @author Graham + */ +public final class Archive { + + /** + * Decodes the archive in the specified buffer. + * @param buffer The buffer. + * @return The archive. + * @throws IOException if an I/O error occurs. + */ + public static Archive decode(ByteBuffer buffer) throws IOException { + int extractedSize = ByteBufferUtil.readUnsignedTriByte(buffer); + int size = ByteBufferUtil.readUnsignedTriByte(buffer); + boolean extracted = false; + + if (size != extractedSize) { + byte[] compressed = new byte[size]; + byte[] uncompressed = new byte[extractedSize]; + buffer.get(compressed); + CompressionUtil.unbzip2(compressed, uncompressed); + buffer = ByteBuffer.wrap(uncompressed); + extracted = true; + } + + int entries = buffer.getShort() & 0xFFFF; + int[] identifiers = new int[entries]; + int[] extractedSizes = new int[entries]; + int[] sizes = new int[entries]; + + for (int i = 0; i < entries; i++) { + identifiers[i] = buffer.getInt(); + extractedSizes[i] = ByteBufferUtil.readUnsignedTriByte(buffer); + sizes[i] = ByteBufferUtil.readUnsignedTriByte(buffer); + } + + ArchiveEntry[] entry = new ArchiveEntry[entries]; + + for (int i = 0; i < entries; i++) { + ByteBuffer entryBuffer = ByteBuffer.allocate(extractedSizes[i]); + if (!extracted) { + byte[] compressed = new byte[sizes[i]]; + byte[] uncompressed = new byte[extractedSizes[i]]; + buffer.get(compressed); + CompressionUtil.unbzip2(compressed, uncompressed); + entryBuffer = ByteBuffer.wrap(uncompressed); + } + entry[i] = new ArchiveEntry(identifiers[i], entryBuffer); + } + + return new Archive(entry); + } + + /** + * The entries in this archive. + */ + private final ArchiveEntry[] entries; + + /** + * Creates a new archive. + * @param entries The entries in this archive. + */ + public Archive(ArchiveEntry[] entries) { + this.entries = entries; + } + + /** + * Gets an entry by its name. + * @param name The name. + * @return The entry. + * @throws FileNotFoundException if the file could not be found. + */ + public ArchiveEntry getEntry(String name) throws FileNotFoundException { + int hash = 0; + name = name.toUpperCase(); + for (int i = 0; i < name.length(); i++) { + hash = (hash * 61 + name.charAt(i)) - 32; + } + for (ArchiveEntry entry : entries) { + if (entry.getIdentifier() == hash) { + return entry; + } + } + throw new FileNotFoundException(); + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/archive/ArchiveEntry.java b/Tools/Cache Editor/src/org/apollo/fs/archive/ArchiveEntry.java new file mode 100644 index 000000000..1688483cd --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/archive/ArchiveEntry.java @@ -0,0 +1,47 @@ +package org.apollo.fs.archive; + +import java.nio.ByteBuffer; + +/** + * Represents a single entry in an {@link Archive}. + * @author Graham + */ +public final class ArchiveEntry { + + /** + * The identifier of this entry. + */ + private final int identifier; + + /** + * The buffer of this entry. + */ + private final ByteBuffer buffer; + + /** + * Creates a new archive entry. + * @param identifier The identifier. + * @param buffer The buffer. + */ + public ArchiveEntry(int identifier, ByteBuffer buffer) { + this.identifier = identifier; + this.buffer = buffer.asReadOnlyBuffer(); + } + + /** + * Gets the identifier of this entry. + * @return The identifier of this entry. + */ + public int getIdentifier() { + return identifier; + } + + /** + * Gets the buffer of this entry. + * @return This buffer of this entry. + */ + public ByteBuffer getBuffer() { + return buffer.duplicate(); + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/archive/package-info.java b/Tools/Cache Editor/src/org/apollo/fs/archive/package-info.java new file mode 100644 index 000000000..0d21259ce --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/archive/package-info.java @@ -0,0 +1,4 @@ +/** + * Contains classes which deal with archives. + */ +package org.apollo.fs.archive; diff --git a/Tools/Cache Editor/src/org/apollo/fs/package-info.java b/Tools/Cache Editor/src/org/apollo/fs/package-info.java new file mode 100644 index 000000000..14c2a3bd8 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/package-info.java @@ -0,0 +1,5 @@ +/** + * Contains classes which deal with the file system that the client uses to + * store game data files. + */ +package org.apollo.fs; diff --git a/Tools/Cache Editor/src/org/apollo/fs/parser/package-info.java b/Tools/Cache Editor/src/org/apollo/fs/parser/package-info.java new file mode 100644 index 000000000..c08852a93 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/parser/package-info.java @@ -0,0 +1,4 @@ +/** + * Contains classes which parse files within the game's cache. + */ +package org.apollo.fs.parser; diff --git a/Tools/Cache Editor/src/org/apollo/fs/util/ByteBufferUtil.java b/Tools/Cache Editor/src/org/apollo/fs/util/ByteBufferUtil.java new file mode 100644 index 000000000..68f7febad --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/util/ByteBufferUtil.java @@ -0,0 +1,41 @@ +package org.apollo.fs.util; + +import java.nio.ByteBuffer; + +/** + * A utility class which contains {@link ByteBuffer}-related methods. + * @author Graham + */ +public final class ByteBufferUtil { + + /** + * Reads an unsigned tri byte from the specified buffer. + * @param buffer The buffer. + * @return The tri byte. + */ + public static int readUnsignedTriByte(ByteBuffer buffer) { + return ((buffer.get() & 0xFF) << 16) | ((buffer.get() & 0xFF) << 8) | (buffer.get() & 0xFF); + } + + /** + * Reads a string from the specified buffer. + * @param buffer The buffer. + * @return The string. + */ + public static String readString(ByteBuffer buffer) { + StringBuilder bldr = new StringBuilder(); + char c; + while ((c = (char) buffer.get()) != 10) { + bldr.append(c); + } + return bldr.toString(); + } + + /** + * Default private constructor to prevent instantiation. + */ + private ByteBufferUtil() { + + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/util/CompressionUtil.java b/Tools/Cache Editor/src/org/apollo/fs/util/CompressionUtil.java new file mode 100644 index 000000000..173ae2c3b --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/util/CompressionUtil.java @@ -0,0 +1,103 @@ +package org.apollo.fs.util; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.DataInputStream; +import java.io.IOException; +import java.util.zip.DeflaterOutputStream; +import java.util.zip.GZIPInputStream; +import java.util.zip.GZIPOutputStream; + +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorInputStream; +import org.apache.commons.compress.compressors.bzip2.BZip2CompressorOutputStream; + +/** + * A utility class for performing compression/uncompression. + * @author Graham + */ +public final class CompressionUtil { + + /** + * Ungzips the compressed array and places the results into the uncompressed array. + * @param compressed The compressed array. + * @param uncompressed The uncompressed array. + * @throws IOException if an I/O error occurs. + */ + public static void ungzip(byte[] compressed, byte[] uncompressed) throws IOException { + DataInputStream is = new DataInputStream(new GZIPInputStream(new ByteArrayInputStream(compressed))); + try { + is.readFully(uncompressed); + } finally { + is.close(); + } + } + + /** + * Unbzip2s the compressed array and places the result into the uncompressed array. + * @param compressed The compressed array. + * @param uncompressed The uncompressed array. + * @throws IOException if an I/O error occurs. + */ + public static void unbzip2(byte[] compressed, byte[] uncompressed) throws IOException { + byte[] newCompressed = new byte[compressed.length + 4]; + newCompressed[0] = 'B'; + newCompressed[1] = 'Z'; + newCompressed[2] = 'h'; + newCompressed[3] = '1'; + System.arraycopy(compressed, 0, newCompressed, 4, compressed.length); + + DataInputStream is = new DataInputStream(new BZip2CompressorInputStream(new ByteArrayInputStream(newCompressed))); + try { + is.readFully(uncompressed); + } finally { + is.close(); + } + } + + /** + * Gzips the specified array. + * @param bytes The uncompressed array. + * @return The compressed array. + * @throws IOException if an I/O error occurs. + */ + public static byte[] gzip(byte[] bytes) throws IOException { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + DeflaterOutputStream os = new GZIPOutputStream(bout); + try { + os.write(bytes); + os.finish(); + return bout.toByteArray(); + } finally { + os.close(); + } + } + + /** + * Bzip2s the specified array. + * @param bytes The uncompressed array. + * @return The compressed array. + * @throws IOException if an I/O error occurs. + */ + public static byte[] bzip2(byte[] bytes) throws IOException { + ByteArrayOutputStream bout = new ByteArrayOutputStream(); + BZip2CompressorOutputStream os = new BZip2CompressorOutputStream(bout, 1); + try { + os.write(bytes); + os.finish(); + byte[] compressed = bout.toByteArray(); + byte[] newCompressed = new byte[compressed.length - 4]; + System.arraycopy(compressed, 4, newCompressed, 0, newCompressed.length); + return newCompressed; + } finally { + os.close(); + } + } + + /** + * Default private constructor to prevent instantiation. + */ + private CompressionUtil() { + + } + +} diff --git a/Tools/Cache Editor/src/org/apollo/fs/util/ZipUtils.java b/Tools/Cache Editor/src/org/apollo/fs/util/ZipUtils.java new file mode 100644 index 000000000..3009ba2c3 --- /dev/null +++ b/Tools/Cache Editor/src/org/apollo/fs/util/ZipUtils.java @@ -0,0 +1,50 @@ +package org.apollo.fs.util; + +import java.io.ByteArrayInputStream; +import java.io.ByteArrayOutputStream; +import java.io.IOException; +import java.io.InputStream; +import java.nio.ByteBuffer; +import java.util.zip.GZIPInputStream; + +public class ZipUtils { + + /** + * Unzips a cache file. + * + * @param file + * The cache file. + * @return The unzipped byte buffer. + * @throws IOException + * if an I/O error occurs. + */ + public static ByteBuffer unzip(ByteBuffer buffer) throws IOException { + byte[] data = new byte[buffer.remaining()]; + buffer.get(data); + InputStream is = new GZIPInputStream(new ByteArrayInputStream(data)); + byte[] out; + try { + ByteArrayOutputStream os = new ByteArrayOutputStream(); + try { + while (true) { + byte[] buf = new byte[1024]; + int read = is.read(buf, 0, buf.length); + if (read == -1) { + break; + } + os.write(buf, 0, read); + } + } finally { + os.close(); + } + out = os.toByteArray(); + } finally { + is.close(); + } + ByteBuffer newBuf = ByteBuffer.allocate(out.length); + newBuf.put(out); + newBuf.flip(); + return newBuf; + } +} + diff --git a/Tools/Cache Editor/src/valkyrion/CachePacker.java b/Tools/Cache Editor/src/valkyrion/CachePacker.java new file mode 100644 index 000000000..568b9f7fd --- /dev/null +++ b/Tools/Cache Editor/src/valkyrion/CachePacker.java @@ -0,0 +1,132 @@ +package valkyrion; + +import java.io.File; +import java.io.FileInputStream; +import java.io.IOException; +import java.io.InputStream; + +import javax.swing.JOptionPane; + +import com.alex.store.Store; +import com.alex.utils.Constants; +import com.alex.utils.Utils; + +public class CachePacker { + FileInputStream f2; + + public static void replaceMidi(String cacheDir, int archiveId, int fileId, String convertedFileDir) throws IOException { + Store cache = new Store(cacheDir); + if (cache.getIndexes()[6].putFile(archiveId, fileId, getBytesFromFile(new File(convertedFileDir)))) { + JOptionPane.showMessageDialog(null, "MIDI packed successfully, if your client crashes on startup, use another midi and the backuped cache and try again."); + } else { + JOptionPane.showMessageDialog(null, "MIDI did not successfully pack!"); + } + } + public static void main(String...args) throws Throwable { + String dir = "C:/Users/v4rg/Downloads/rs music/"; + Store store = new Store("./498/"); + Store from = new Store("./666/"); + if (true) { + int index = 14; + System.out.println("To amount=" + store.getIndexes()[index].getValidArchivesCount()); + System.out.println("From amount=" + from.getIndexes()[index].getValidArchivesCount()); + int count = 0; + int fail = 0; + for (int archive = 0; archive < from.getIndexes()[index].getValidArchivesCount(); archive++) { + if (from.getIndexes()[index].archiveExists(archive)) { + byte[] data = from.getIndexes()[index].getFile(archive, 0); + if (data == null || data.length < 1) { + fail++; + // System.out.println("Invalid archive " + archive); + continue; + } + store.getIndexes()[index].putFile(archive, 0, data); + if (store.getIndexes()[index].getFile(archive, 0) != null) { + if (count++ % 100 == 0) + System.out.println("Packed music " + archive); + } else { + System.out.println("Failed to pack music " + archive); + fail++; + } + // continue; + } else { + fail++; + } + } + System.out.println("Packed " + count + "/" + (count + fail) + " music (" + 666 + ")!"); + return; + } + // for (File f : new File(dir + "out/").listFiles()) { + // int index = Integer.parseInt(f.getName().replace(".mid", "")); + // boolean b = store.getIndexes()[6].putFile(index, 0, Constants.GZIP_COMPRESSION, getBytesFromFile(f), null, true, false, -1, -1); + // System.out.println(b ? "Successfully packed music " + index + "!" : "Failed to pack music " + index + "!"); + // } + // store.getIndexes()[6].resetCachedFiles(); + // BufferedWriter musicList = new BufferedWriter(new FileWriter("./music-list.txt")); + // new File(dir + "out/").mkdir(); + // int index = 1; + // for (File f : new File(dir + "rs music/").listFiles()) { + // if (!f.getName().startsWith("runescape")) { + // continue; + // } + // System.out.println(f.getName()); + // try { + // convertMidi(dir + "rs music/" + f.getName(), dir + "out/" + index + ".mid"); + // musicList.append((index++) + ": " + f.getName().replace(".mid", "")); + // musicList.newLine(); + // } catch (Throwable t) { + // t.printStackTrace(); + // musicList.append((index++) + ": " + f.getName().replace(".mid", "") + " //FAILED!"); + // musicList.newLine(); + // } + // } + // musicList.flush(); + // musicList.close(); + } + + public static void convertMidi(String input, String output) throws Exception { + MusicEncoder.convertMidi(input, output); + } + + public static void addMusicFile(String cacheDir, String convertedFileDir, String musicName) throws IOException { + Store cache = new Store(cacheDir); + cache.getIndexes()[6].putFile(803, 0, Constants.GZIP_COMPRESSION, getBytesFromFile(new File(convertedFileDir)), null, true, false, Utils.getNameHash(musicName), -1); + } + + public static byte[] getBytesFromFile(File file) throws IOException { + InputStream is = new FileInputStream(file); + + // Get the size of the file + long length = file.length(); + + // You cannot create an array using a long type. + // It needs to be an int type. + // Before converting to an int type, check + // to ensure that file is not larger than Integer.MAX_VALUE. + if (length > Integer.MAX_VALUE) { + // File is too large + } + + // Create the byte array to hold the data + byte[] bytes = new byte[(int)length]; + + // Read in the bytes + int offset = 0; + int numRead = 0; + while (offset < bytes.length + && (numRead=is.read(bytes, offset, bytes.length-offset)) >= 0) { + offset += numRead; + } + + // Ensure all the bytes have been read in + if (offset < bytes.length) { + is.close(); + throw new IOException("Could not completely read file "+file.getName()); + } + + // Close the input stream and return bytes + is.close(); + return bytes; + } + +} diff --git a/Tools/Cache Editor/src/valkyrion/MusicEncoder.java b/Tools/Cache Editor/src/valkyrion/MusicEncoder.java new file mode 100644 index 000000000..881e4a35e --- /dev/null +++ b/Tools/Cache Editor/src/valkyrion/MusicEncoder.java @@ -0,0 +1,487 @@ +package valkyrion; + +import java.io.DataOutputStream; +import java.io.File; +import java.io.FileOutputStream; +import java.io.IOException; + +import javax.sound.midi.MetaMessage; +import javax.sound.midi.MidiEvent; +import javax.sound.midi.MidiMessage; +import javax.sound.midi.MidiSystem; +import javax.sound.midi.Sequence; +import javax.sound.midi.ShortMessage; +import javax.sound.midi.Track; +import javax.swing.JOptionPane; + +/** + * Converts a MIDI file to the runescape format + * + * NOTE: Jagex doesn't use the default soundbank, + * they have multiple soundbanks and their own instruments located in + * idx15 that use sound effects as their notes (idx4/14) + * For this reason some midi files might sound different although most of their + * first soundbank matches the default soundbank instruments + * + * @author Vincent + * + */ +public class MusicEncoder { + + public static final int NOTE_OFF = 0x80; + public static final int NOTE_ON = 0x90; + public static final int KEY_AFTER_TOUCH = 0xA0; + public static final int CONTROL_CHANGE = 0xB0; + public static final int PROGRAM_CHANGE = 0xC0; + public static final int CHANNEL_AFTER_TOUCH = 0xD0; + public static final int PITCH_WHEEL_CHANGE = 0xE0; + + public static final int END_OF_TRACK = 0x2F; + public static final int SET_TEMPO = 0x51; + + public static void convertMidi(String input, String output) throws Exception { + Sequence sequence = MidiSystem.getSequence(new File(input)); + DataOutputStream dos = new DataOutputStream(new FileOutputStream(output)); + + //this could be done with a lot less loops and using multiple buffers instead + + //write event opcodes with channel + for (Track track : sequence.getTracks()) { + int prevChannel = 0; + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + int ch = (sm.getChannel() ^ prevChannel) << 4; + switch(sm.getCommand()) { + case NOTE_OFF: + dos.write(1 | ch); + prevChannel = sm.getChannel(); + break; + case NOTE_ON: + dos.write(0 | ch); + prevChannel = sm.getChannel(); + break; + case KEY_AFTER_TOUCH: + dos.write(5 | ch); + prevChannel = sm.getChannel(); + break; + case CONTROL_CHANGE: + dos.write(2 | ch); + prevChannel = sm.getChannel(); + break; + case PROGRAM_CHANGE: + dos.write(6 | ch); + prevChannel = sm.getChannel(); + break; + case CHANNEL_AFTER_TOUCH: + dos.write(4 | ch); + prevChannel = sm.getChannel(); + break; + case PITCH_WHEEL_CHANGE: + dos.write(3 | ch); + prevChannel = sm.getChannel(); + break; + } + } else if(message instanceof MetaMessage) { + MetaMessage mm = (MetaMessage) message; + switch(mm.getType()) { + case END_OF_TRACK: + dos.write(7); + break; + case SET_TEMPO: + dos.write(23); + break; + default: + //OTHER META EVENTS ARE IGNORED + break; + } + } else { + //SYSEX MESSAGES ARE IGNORED + } + } + + } + + //write event timestamp for used opcodes + for (Track track : sequence.getTracks()) { + int lastTick = 0; + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + switch(sm.getCommand()) { + case NOTE_OFF: + case NOTE_ON: + case KEY_AFTER_TOUCH: + case CONTROL_CHANGE: + case PROGRAM_CHANGE: + case CHANNEL_AFTER_TOUCH: + case PITCH_WHEEL_CHANGE: + putVariableInt(dos, (int)event.getTick() - lastTick); + lastTick = (int) event.getTick(); + break; + } + } else if(message instanceof MetaMessage) { + MetaMessage mm = (MetaMessage) message; + switch(mm.getType()) { + case END_OF_TRACK: + case SET_TEMPO: + putVariableInt(dos, (int)event.getTick() - lastTick); + lastTick = (int) event.getTick(); + break; + } + } + } + } + + //jagex works with offset from the last one because this is usually 0 and gives better compression rates + int lastController = 0; + int lastNote = 0; + int lastNoteOnVelocity = 0; + int lastNoteOffVelocity = 0; + int lastWheelChangeT = 0; + int lastWheelChangeB = 0; + int lastChannelAfterTouch = 0; + int lastKeyAfterTouchVelocity = 0; + + //write controller number changes + int[] lastControllerValue = new int[128]; + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE) { + dos.write(sm.getData1() - lastController); + lastController = sm.getData1(); + } + } + } + } + + //controller 64 65 120 121 123 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && (sm.getData1() == 64 || sm.getData1() == 65 || sm.getData1() == 120 || sm.getData1() == 121 || sm.getData1() == 123)) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //key after touch velocity changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == KEY_AFTER_TOUCH) { + dos.write(sm.getData2() - lastKeyAfterTouchVelocity); + lastKeyAfterTouchVelocity = sm.getData2(); + } + } + } + } + //channel after touch channel changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CHANNEL_AFTER_TOUCH) { + dos.write(sm.getData1() - lastChannelAfterTouch); + lastChannelAfterTouch = sm.getData1(); + } + } + } + } + //pitch bend top values + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == PITCH_WHEEL_CHANGE) { + dos.write(sm.getData2() - lastWheelChangeT); + lastWheelChangeT = sm.getData2(); + } + } + } + } + //controller 1 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 1) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 7 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 7) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 10 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 10) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //note changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == NOTE_OFF || sm.getCommand() == NOTE_ON || sm.getCommand() == KEY_AFTER_TOUCH) { + dos.write(sm.getData1() - lastNote); + lastNote = sm.getData1(); + } + } + } + } + //note on velocity changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == NOTE_ON) { + dos.write(sm.getData2() - lastNoteOnVelocity); + lastNoteOnVelocity = sm.getData2(); + } + } + } + } + //all unlisted controller changes (controllers are probably grouped like this because it gives an even better compression) + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && !(sm.getData1() == 64 || sm.getData1() == 65 || sm.getData1() == 120 || sm.getData1() == 121 || sm.getData1() == 123 || sm.getData1() == 0 || sm.getData1() == 32 || sm.getData1() == 1 || sm.getData1() == 33 || sm.getData1() == 7 || sm.getData1() == 39 || sm.getData1() == 10 || sm.getData1() == 42 || sm.getData1() == 99 || sm.getData1() == 98 || sm.getData1() == 101 || sm.getData1() == 100)) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //note off velocity changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == NOTE_OFF) { + dos.write(sm.getData2() - lastNoteOffVelocity); + lastNoteOffVelocity = sm.getData2(); + } + } + } + } + //controller 33 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 33) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 39 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 39) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 42 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 42) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 0, 32 and program changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && (sm.getData1() == 0 || sm.getData1() == 32)) { + JOptionPane.showMessageDialog(null, "WARNING SONG USES SOUND BANKS BYTE: "+sm.getData1()+" VALUE: "+sm.getData2()+" "); + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } else if(sm.getCommand() == PROGRAM_CHANGE) { + dos.write(sm.getData1()); + } + } + } + } + //pitch bend bottom changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == PITCH_WHEEL_CHANGE) { + dos.write(sm.getData1() - lastWheelChangeB); + lastWheelChangeB = sm.getData1(); + } + } + } + } + //controller 99 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 99) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 98 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 98) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 101 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 101) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //controller 100 changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof ShortMessage) { + ShortMessage sm = (ShortMessage) message; + if(sm.getCommand() == CONTROL_CHANGE && sm.getData1() == 100) { + dos.write(sm.getData2() - lastControllerValue[sm.getData1()]); + lastControllerValue[sm.getData1()] = sm.getData2(); + } + } + } + } + //tempo changes + for (Track track : sequence.getTracks()) { + for (int i=0; i < track.size(); i++) { + MidiEvent event = track.get(i); + MidiMessage message = event.getMessage(); + if (message instanceof MetaMessage) { + MetaMessage mm = (MetaMessage) message; + if(mm.getType() == SET_TEMPO) { + dos.write(mm.getData()); + } + } + } + } + //write footer + dos.write(sequence.getTracks().length); + dos.writeShort(sequence.getResolution()); + + dos.flush(); + dos.close(); + + } + + static final void putVariableInt(DataOutputStream dos, int value) throws IOException { + if ((value & ~0x7f) != 0) { + if ((value & ~0x3fff) != 0) { + if ((~0x1fffff & value) != 0) { + if ((~0xfffffff & value) != 0) { + dos.write(value >>> 28 | 0x80); + } + dos.write(value >>> 21 | 0x80); + } + dos.write(value >>> 14 | 0x80); + } + dos.write(value >>> 7 | 0x80); + } + dos.write(0x7f & value); + } + +} \ No newline at end of file diff --git a/Tools/Cache Editor/src/valkyrion/PackerGUI.java b/Tools/Cache Editor/src/valkyrion/PackerGUI.java new file mode 100644 index 000000000..bfe37aeab --- /dev/null +++ b/Tools/Cache Editor/src/valkyrion/PackerGUI.java @@ -0,0 +1,115 @@ +package valkyrion; + +import java.awt.EventQueue; +import java.awt.event.ActionEvent; +import java.awt.event.ActionListener; + +import javax.swing.JButton; +import javax.swing.JDialog; +import javax.swing.JFileChooser; +import javax.swing.JFrame; +import javax.swing.JLabel; +import javax.swing.JPanel; +import javax.swing.JTextField; +import javax.swing.UIManager; +import javax.swing.UnsupportedLookAndFeelException; +import javax.swing.border.EmptyBorder; +import javax.swing.filechooser.FileNameExtensionFilter; + +import org.jvnet.substance.skin.SubstanceRavenGraphiteLookAndFeel; + +@SuppressWarnings("serial") +public class PackerGUI extends JFrame { + + public JPanel contentPane; + public JTextField input = new JTextField(); + public JTextField cacheDir = new JTextField(); + public JTextField musicId = new JTextField(); + public JLabel lblInput = new JLabel("MIDI location:"); + public JLabel lblCacheDir = new JLabel("Cache location:"); + public JLabel lblMusicId = new JLabel("Music Id (has to be an int, e.g 0 is login music):"); + public JFileChooser filePicker = new JFileChooser(); + public JFileChooser filePicker2 = new JFileChooser(); + public JButton btnFilePick, btnFilePick2, btnPack; + + public PackerGUI() { + setTitle("Music Packer/Replacer"); + setSize(450, 400); + setLocationRelativeTo(null); + setDefaultCloseOperation(JFrame.EXIT_ON_CLOSE); + contentPane = new JPanel(); + contentPane.setBorder(new EmptyBorder(5, 5, 5, 5)); + setContentPane(contentPane); + contentPane.setLayout(null); + + btnFilePick = new JButton("Browse"); + btnFilePick.setBounds(330, 50, 90, 25); + btnFilePick2 = new JButton("Browse"); + btnFilePick2.setBounds(330, 120, 90, 25); + btnPack = new JButton("Pack to Cache"); + btnPack.setBounds(140, 240, 120, 60); + input.setBounds(25, 50, 300, 30); + cacheDir.setBounds(25, 120, 300, 30); + musicId.setBounds(25, 190, 300, 30); + lblCacheDir.setBounds(25, 90, 300, 30); + lblInput.setBounds(25, 15, 300, 30); + lblMusicId.setBounds(25, 160, 300, 30); + contentPane.add(lblCacheDir); + contentPane.add(cacheDir); + contentPane.add(lblMusicId); + contentPane.add(lblInput); + contentPane.add(btnFilePick); + contentPane.add(btnFilePick2); + contentPane.add(musicId); + contentPane.add(btnPack); + contentPane.add(input); + filePicker.setFileFilter(new FileNameExtensionFilter("MIDI Files", "mid")); + + btnFilePick.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + //filePicker.addChoosableFileFilter(); + filePicker.showOpenDialog(null); + input.setText(filePicker.getSelectedFile().getAbsolutePath()); + } + }); + btnFilePick2.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + filePicker2.setFileSelectionMode(JFileChooser.DIRECTORIES_ONLY); + filePicker2.showOpenDialog(null); + cacheDir.setText(filePicker2.getSelectedFile().getAbsolutePath()); + } + }); + + btnPack.addActionListener(new ActionListener() { + public void actionPerformed(ActionEvent e) { + try { + CachePacker.convertMidi(input.getText().toString(), System.getProperty("user.home") + "/tempout"); + CachePacker.replaceMidi(cacheDir.getText().toString() + "/", Integer.parseInt(musicId.getText().toString()), 0, System.getProperty("user.home") + "/tempout"); + } catch (Exception e1) { + // TODO Auto-generated catch block + e1.printStackTrace(); + } + } + }); + } + + public static void main(String[] args) { + JFrame.setDefaultLookAndFeelDecorated(true); + JDialog.setDefaultLookAndFeelDecorated(true); + + EventQueue.invokeLater(new Runnable() { + + public void run() { + try { + UIManager.setLookAndFeel(new SubstanceRavenGraphiteLookAndFeel()); + } catch (UnsupportedLookAndFeelException e) { + // TODO Auto-generated catch block + e.printStackTrace(); + } + new PackerGUI().setVisible(true); + } + + }); + } + +} diff --git a/Tools/Cache Editor/substance-5.3.jar b/Tools/Cache Editor/substance-5.3.jar new file mode 100644 index 000000000..3c6359aa2 Binary files /dev/null and b/Tools/Cache Editor/substance-5.3.jar differ