Here are the examples of the java api org.dynmap.DynmapWorld taken from open source projects. By voting up you can indicate which examples are most useful and appropriate.
57 Examples
19
View Complete Implementation : MapStorageTile.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/**
* Abstract clreplaced for instance of a stored map tile
*/
public abstract clreplaced MapStorageTile {
public final DynmapWorld world;
public final MapType map;
public final int x, y;
public final int zoom;
public final MapType.ImageVariant var;
public static clreplaced TileRead {
// Image bytes
public BufferInputStream image;
// Image format
public MapType.ImageEncoding format;
// Image hashcode (-1 = unknown)
public long hashCode;
// Last modified timestamp (-1 = unknown)
public long lastModified;
}
protected MapStorageTile(DynmapWorld world, MapType map, int x, int y, int zoom, MapType.ImageVariant var) {
this.world = world;
this.map = map;
this.x = x;
this.y = y;
this.zoom = zoom;
this.var = var;
}
/**
* Test if given tile exists in the tile storage
* @return true if tile exists, false if not
*/
public abstract boolean exists();
/**
* Test if tile exists and matches given hash code
* @param hash - hash code to test against tile's content
* @return true if tile exists and matches given hash code, false if not
*/
public abstract boolean matchesHashCode(long hash);
/**
* Read tile
*
* @return loaded Tile, or null if not read
*/
public abstract TileRead read();
/**
* Write tile
*
* @param hash - hash code of uncompressed image
* @param encImage - output stream for encoded image
* @return true if write succeeded
*/
public abstract boolean write(long hash, BufferOutputStream encImage);
/**
* Write tile from image
*
* @param hash - hash code of uncompressed image
* @param image - image to be encoded
* @return true if write succeeded
*/
public boolean write(long hash, BufferedImage image) {
BufferOutputStream bos = ImageIOManager.imageIOEncode(image, map.getImageFormat());
if (bos != null) {
return write(hash, bos);
}
return false;
}
/**
* Delete tile
*
* @return true if write succeeded
*/
public boolean delete() {
return write(-1, (BufferOutputStream) null);
}
/**
* Get write lock on tile
* @return true if locked
*/
public abstract boolean getWriteLock();
/**
* Release write lock on tile
*/
public abstract void releaseWriteLock();
/**
* Get read lock on tile
* @param timeout - timeout, in msec (-1 = never)
* @return true if lock acquired, false if not (timeout)
*/
public abstract boolean getReadLock(long timeout);
/**
* Get read lock on tile (indefinite timeout)
* @return true if lock acquired, false if not (timeout)
*/
public boolean getReadLock() {
return getReadLock(-1L);
}
/**
* Release read lock on tile
*/
public abstract void releaseReadLock();
/**
* Cleanup
*/
public abstract void cleanup();
/**
* Get URI for tile (for web interface)
* @return URI for tile
*/
public abstract String getURI();
/**
* Enqueue zoom out update for tile
*/
public abstract void enqueueZoomOutUpdate();
/**
* Get zoom out tile for this tile (next zoom leveL)
* @return zoom out tile
*/
public abstract MapStorageTile getZoomOutTile();
/**
* Equals
*/
@Override
public abstract boolean equals(Object o);
/**
* Hashcode
*/
@Override
public abstract int hashCode();
}
19
View Complete Implementation : MarkerSetImpl.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
final boolean testTileForBoostMarkers(DynmapWorld w, HDPerspective perspective, double tile_x, double tile_y, double tile_dim) {
if (boostingareamarkers != null) {
for (AreaMarkerImpl am : boostingareamarkers.values()) {
if (am.testTileForBoostMarkers(w, perspective, tile_x, tile_y, tile_dim)) {
return true;
}
}
}
if (boostingcirclemarkers != null) {
for (CircleMarkerImpl cm : boostingcirclemarkers.values()) {
if (cm.testTileForBoostMarkers(w, perspective, tile_x, tile_y, tile_dim)) {
return true;
}
}
}
return false;
}
18
View Complete Implementation : WPDynmapServer.java
Copyright GNU General Public License v3.0
Author : Captain-Chaos
Copyright GNU General Public License v3.0
Author : Captain-Chaos
@Override
public MapChunkCache createMapChunkCache(DynmapWorld w, List<DynmapChunk> chunks, boolean blockdata, boolean highesty, boolean biome, boolean rawbiome) {
return null;
}
18
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public List<TileFlags.TileCoord> getTileCoords(DynmapWorld w, int x, int y, int z) {
return perspective.getTileCoords(w, x, y, z);
}
18
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void purgeMapTiles(DynmapWorld world, MapType map) {
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processPurgeMapTiles(world, mt, var);
}
}
}
18
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public List<TileFlags.TileCoord> getTileCoords(DynmapWorld w, int minx, int miny, int minz, int maxx, int maxy, int maxz) {
return perspective.getTileCoords(w, minx, miny, minz, maxx, maxy, maxz);
}
18
View Complete Implementation : DefaultHDLighting.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public int[] getBrightnessTable(DynmapWorld world) {
return null;
}
18
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void enumMapBaseTiles(DynmapWorld world, MapType map, MapStorageBaseTileEnumCB cbBase, MapStorageTileSearchEndCB cbEnd) {
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processEnumMapTiles(world, mt, var, null, cbBase, cbEnd);
}
}
}
18
View Complete Implementation : WPMapChunkCache.java
Copyright GNU General Public License v3.0
Author : Captain-Chaos
Copyright GNU General Public License v3.0
Author : Captain-Chaos
/**
* Implementation of {@link MapChunkCache} used by {@link WPDynmapWorld}.
*
* <p>Created by Pepijn Schmitz on 05-06-15.
*/
clreplaced WPMapChunkCache extends MapChunkCache {
WPMapChunkCache(DynmapWorld dmWorld, MinecraftWorld mcWorld) {
this.dmWorld = dmWorld;
this.mcWorld = mcWorld;
}
@Override
public boolean setChunkDataTypes(boolean blockdata, boolean biome, boolean highestblocky, boolean rawbiome) {
return !rawbiome;
}
@Override
public int loadChunks(int maxToLoad) {
return 0;
}
@Override
public boolean isDoneLoading() {
return true;
}
@Override
public boolean isEmpty() {
return false;
}
@Override
public void unloadChunks() {
// Do nothing
}
@Override
public boolean isEmptySection(int sx, int sy, int sz) {
Chunk chunk = mcWorld.getChunk(sx, sz);
if (chunk instanceof MC12AnvilChunk) {
return !((MC12AnvilChunk) chunk).isSectionPresent(sy);
} else {
return chunk == null;
}
}
@Override
public MapIterator gereplacederator(int x, int y, int z) {
return new WPMapIterator(mcWorld, x, y, z);
}
@Override
public void setHiddenFillStyle(HiddenChunkStyle style) {
// Do nothing
}
@Override
public void setVisibleRange(VisibilityLimit limit) {
// Do nothing
}
@Override
public void setHiddenRange(VisibilityLimit limit) {
// Do nothing
}
@Override
public DynmapWorld getWorld() {
return dmWorld;
}
private final DynmapWorld dmWorld;
private final MinecraftWorld mcWorld;
}
18
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public MapStorageTile getTile(DynmapWorld world, MapType map, int x, int y, int zoom, ImageVariant var) {
return new StorageTile(world, map, x, y, zoom, var);
}
18
View Complete Implementation : DynMapTileProvider.java
Copyright GNU General Public License v3.0
Author : Captain-Chaos
Copyright GNU General Public License v3.0
Author : Captain-Chaos
/**
* A {@link TileProvider} for {@link TiledImageViewer} which provides an
* isometric 3D view of a {@link DynmapWorld} by using dynmap to render the
* view.
*
* <p>Created by Pepijn Schmitz on 08-06-15.
*/
public clreplaced DynMapTileProvider implements TileProvider {
public DynMapTileProvider(DynmapWorld dmWorld) {
this.dmWorld = dmWorld;
refreshMap();
}
@Override
public int getTileSize() {
return 128;
}
@Override
public boolean isTilePresent(int x, int y) {
return true;
}
@Override
public boolean paintTile(Image image, int x, int y, int dx, int dy) {
HDMapTile tile = new HDMapTile(dmWorld, map.getPerspective(), x, -y, 0);
BufferedImage tileImage = rendererRef.get().render(dmWorld.getChunkCache(null), tile);
Graphics2D g2 = (Graphics2D) image.getGraphics();
try {
g2.drawImage(tileImage, dx, dy, null);
} finally {
g2.dispose();
}
return true;
}
@Override
public int getTilePriority(int x, int y) {
return 0;
}
@Override
public Rectangle getExtent() {
return null;
}
@Override
public void addTileListener(TileListener tileListener) {
// Do nothing
}
@Override
public void removeTileListener(TileListener tileListener) {
// Do nothing
}
@Override
public boolean isZoomSupported() {
return true;
}
@Override
public int getZoom() {
return zoom;
}
@Override
public void setZoom(int zoom) {
if (zoom != this.zoom) {
this.zoom = zoom;
scale = MathUtils.pow(2, 4 + zoom);
refreshRenderers();
}
}
private void refreshRenderers() {
rendererRef = new ThreadLocal<DynMapRenderer>() {
@Override
protected DynMapRenderer initialValue() {
return new DynMapRenderer(map.getPerspective(), map, scale, inclination, azimuth);
}
};
}
private void refreshMap() {
Map<String, Object> config = new HashMap<>();
config.put("name", "WorldPainter");
config.put("image-format", "png");
if (caves) {
config.put("shader", "caves");
}
ConfigurationNode configNode = new ConfigurationNode(config);
map = new HDMap(null, configNode);
refreshRenderers();
}
public double getAzimuth() {
return azimuth;
}
public void setAzimuth(double azimuth) {
if (azimuth != this.azimuth) {
if (azimuth == 360.0) {
azimuth = 0.0;
} else if ((azimuth < 0.0) || (azimuth >= 360.0)) {
throw new IllegalArgumentException("Azimuth must be >= 0 and < 360");
}
this.azimuth = azimuth;
refreshRenderers();
}
}
public double getInclination() {
return inclination;
}
public void setInclination(double inclination) {
if (inclination != this.inclination) {
if ((inclination < 30.0) || (inclination > 90.0)) {
throw new IllegalArgumentException("Inclination must be >= 30 and <= 90");
}
this.inclination = inclination;
refreshRenderers();
}
}
public boolean isCaves() {
return caves;
}
public void setCaves(boolean caves) {
if (caves != this.caves) {
this.caves = caves;
refreshMap();
}
}
/**
* Get the bounds (in tiles) of a rectangle which would completely encompreplaced
* a specific 3D volume of the world in the current projection.
*
* @param volume The 3D volume which must be encompreplaceded.
* @return The rectangle in tile coordinates which will completely encompreplaced
* the specified volume.
*/
public Rectangle getBounds(Box volume) {
Rectangle rect = rendererRef.get().getTileCoords(volume.getX1(), volume.getZ1(), volume.getY1(), volume.getX2() + 1, volume.getZ2() + 1, volume.getY2() + 1);
rect.setLocation(rect.x, -rect.y - rect.height);
return rect;
}
private final DynmapWorld dmWorld;
private int zoom;
private boolean caves;
private volatile double inclination = 60.0, azimuth = 135.0;
private volatile int scale = 16;
private volatile HDMap map;
private volatile ThreadLocal<DynMapRenderer> rendererRef;
}
18
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void addMapTiles(List<MapTile> list, DynmapWorld w, int tx, int ty) {
list.add(new HDMapTile(w, this.perspective, tx, ty, boostzoom));
}
17
View Complete Implementation : ShadowHDLighting.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public int[] getBrightnessTable(DynmapWorld world) {
if (useWorldBrightnessTable) {
return world.getBrightnessTable();
} else {
return null;
}
}
17
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void enumMapBaseTiles(DynmapWorld world, MapType map, MapStorageBaseTileEnumCB cbBase, MapStorageTileSearchEndCB cbEnd) {
// Get base directory for world
File base = new File(baseTileDir, world.getName());
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processEnumMapTiles(world, mt, base, var, null, cbBase, cbEnd);
}
}
}
17
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processPurgeMapTiles(DynmapWorld world, MapType map, File base, ImageVariant var) {
String mname = map.getPrefix() + var.variantSuffix;
// Clean up hash files
String[] hlist = base.list();
if (hlist != null) {
for (String h : hlist) {
if (h.endsWith(".hash") == false)
continue;
if (h.startsWith(mname + "_"))
continue;
File f = new File(base, h);
f.delete();
}
}
File bdir = new File(base, mname);
if (bdir.isDirectory() == false)
return;
// List to traverse
LinkedList<File> dirs = new LinkedList<File>();
LinkedList<File> dirsdone = new LinkedList<File>();
// Directory for map
dirs.add(bdir);
// While more paths to handle
while (dirs.isEmpty() == false) {
File dir = dirs.pop();
dirsdone.add(dir);
String[] dirlst = dir.list();
if (dirlst == null)
continue;
for (String fn : dirlst) {
if (fn.equals(".") || fn.equals(".."))
continue;
File f = new File(dir, fn);
if (f.isDirectory()) {
/* If directory, add to list to process */
dirs.add(f);
} else {
/* Else, file - cleanup */
f.delete();
}
}
}
// Clean up directories, in reverse order of traverse
int cnt = dirsdone.size();
for (int i = cnt - 1; i >= 0; i--) {
File f = dirsdone.get(i);
f.delete();
}
}
17
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void purgeMapTiles(DynmapWorld world, MapType map) {
// Get base directory for world
File base = new File(baseTileDir, world.getName());
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processPurgeMapTiles(world, mt, base, var);
}
}
}
17
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void enumMapTiles(DynmapWorld world, MapType map, MapStorageTileEnumCB cb) {
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processEnumMapTiles(world, mt, var, cb, null, null);
}
}
}
17
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public MapStorageTile getTile(DynmapWorld world, String uri) {
String[] suri = uri.split("/");
if (suri.length < 2)
return null;
// Map URI - might include variant
String mname = suri[0];
MapType mt = null;
ImageVariant imgvar = null;
// Find matching map type and image variant
for (int mti = 0; (mt == null) && (mti < world.maps.size()); mti++) {
MapType type = world.maps.get(mti);
ImageVariant[] var = type.getVariants();
for (int ivi = 0; (imgvar == null) && (ivi < var.length); ivi++) {
if (mname.equals(type.getPrefix() + var[ivi].variantSuffix)) {
mt = type;
imgvar = var[ivi];
}
}
}
if (mt == null) {
// Not found?
return null;
}
// Now, take the last section and parse out coordinates and zoom
String fname = suri[suri.length - 1];
String[] coord = fname.split("[_\\.]");
if (coord.length < 3) {
// 3 or 4
return null;
}
int zoom = 0;
int x, y;
try {
if (coord[0].charAt(0) == 'z') {
zoom = coord[0].length();
x = Integer.parseInt(coord[1]);
y = Integer.parseInt(coord[2]);
} else {
x = Integer.parseInt(coord[0]);
y = Integer.parseInt(coord[1]);
}
return getTile(world, mt, x, y, zoom, imgvar);
} catch (NumberFormatException nfx) {
return null;
}
}
17
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void enumMapTiles(DynmapWorld world, MapType map, MapStorageTileEnumCB cb) {
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processEnumMapTiles(world, mt, var, cb);
}
}
}
17
View Complete Implementation : DynmapExpCommands.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private boolean handlePosN(DynmapCommandSender sender, String[] args, ExportContext ctx, DynmapCore core, int n) {
if ((sender instanceof DynmapPlayer) == false) {
// Not a player
sender.sendMessage("Only usable by player");
return true;
}
DynmapPlayer plyr = (DynmapPlayer) sender;
DynmapLocation loc = plyr.getLocation();
DynmapWorld world = null;
if (loc != null) {
world = core.getWorld(loc.world);
}
if (world == null) {
sender.sendMessage("Location not found for player");
return true;
}
if (n == 0) {
ctx.xmin = (int) Math.floor(loc.x);
ctx.ymin = (int) Math.floor(loc.y);
ctx.zmin = (int) Math.floor(loc.z);
} else {
ctx.xmax = (int) Math.floor(loc.x);
ctx.ymax = (int) Math.floor(loc.y);
ctx.zmax = (int) Math.floor(loc.z);
}
ctx.world = world.getName();
return handleInfo(sender, args, ctx, core);
}
17
View Complete Implementation : DynmapExpCommands.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private boolean handleRadius(DynmapCommandSender sender, String[] args, ExportContext ctx, DynmapCore core) {
if ((sender instanceof DynmapPlayer) == false) {
// Not a player
sender.sendMessage("Only usable by player");
return true;
}
DynmapPlayer plyr = (DynmapPlayer) sender;
DynmapLocation loc = plyr.getLocation();
DynmapWorld world = null;
if (loc != null) {
world = core.getWorld(loc.world);
}
if (world == null) {
sender.sendMessage("Location not found for player");
return true;
}
int radius = 16;
if (args.length >= 2) {
try {
radius = Integer.parseInt(args[1]);
if (radius < 0) {
sender.sendMessage("Invalid radius - " + args[1]);
return true;
}
} catch (NumberFormatException nfx) {
sender.sendMessage("Invalid radius - " + args[1]);
return true;
}
}
ctx.xmin = (int) Math.floor(loc.x) - radius;
ctx.xmax = (int) Math.ceil(loc.x) + radius;
ctx.zmin = (int) Math.floor(loc.z) - radius;
ctx.zmax = (int) Math.ceil(loc.z) + radius;
ctx.ymin = 0;
ctx.ymax = world.worldheight - 1;
ctx.world = world.getName();
return handleInfo(sender, args, ctx, core);
}
17
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void buildClientConfiguration(JSONObject worldObject, DynmapWorld world) {
JSONObject o = new JSONObject();
s(o, "type", "HDMapType");
s(o, "name", name);
s(o, "replacedle", replacedle);
s(o, "icon", icon);
s(o, "prefix", prefix);
s(o, "background", bg_cfg);
s(o, "backgroundday", bg_day_cfg);
s(o, "backgroundnight", bg_night_cfg);
s(o, "bigmap", true);
s(o, "mapzoomout", (world.getExtraZoomOutLevels() + mapzoomout));
s(o, "mapzoomin", mapzoomin);
s(o, "boostzoom", boostzoom);
s(o, "protected", isProtected());
s(o, "image-format", imgformat.getFileExt());
if (append_to_world.length() > 0)
s(o, "append_to_world", append_to_world);
perspective.addClientConfiguration(o);
shader.addClientConfiguration(o);
lighting.addClientConfiguration(o);
a(worldObject, "maps", o);
}
16
View Complete Implementation : DynmapListenerManager.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
public void processWorldEvent(EventType type, DynmapWorld w) {
ArrayList<EventListener> lst = listeners.get(type);
if (lst == null)
return;
int sz = lst.size();
for (int i = 0; i < sz; i++) {
EventListener el = lst.get(i);
if (el instanceof WorldEventListener) {
try {
((WorldEventListener) el).worldEvent(w);
} catch (Throwable t) {
Log.warning("processWorldEvent(" + type + "," + w + ") - exception", t);
}
}
}
}
16
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/* Get maps rendered concurrently with this map in this world */
public List<MapType> getMapsSharingRender(DynmapWorld w) {
ArrayList<MapType> maps = new ArrayList<MapType>();
for (MapType mt : w.maps) {
if (mt instanceof HDMap) {
HDMap hdmt = (HDMap) mt;
if ((hdmt.perspective == this.perspective) && (hdmt.boostzoom == this.boostzoom)) {
/* Same perspective */
maps.add(hdmt);
}
}
}
return maps;
}
16
View Complete Implementation : HDMapManager.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private boolean[] getCachedFlags(HDMapTile t) {
String w = t.getDynmapWorld().getName();
String k = w + "/" + t.perspective.getName();
boolean[] flags = cached_data_flags_by_world_perspective.get(k);
if (flags != null)
return flags;
flags = new boolean[4];
cached_data_flags_by_world_perspective.put(k, flags);
DynmapWorld dw = MapManager.mapman.worldsLookup.get(w);
if (dw == null)
return flags;
for (MapType map : dw.maps) {
if (map instanceof HDMap) {
HDMap hdmap = (HDMap) map;
if (hdmap.getPerspective() == t.perspective) {
HDShader sh = hdmap.getShader();
HDLighting lt = hdmap.getLighting();
flags[BIOMEDATAFLAG] |= sh.isBiomeDataNeeded() | lt.isBiomeDataNeeded();
flags[HIGHESTZFLAG] |= sh.isHightestBlockYDataNeeded() | lt.isHightestBlockYDataNeeded();
flags[RAWBIOMEFLAG] |= sh.isRawBiomeDataNeeded() | lt.isRawBiomeDataNeeded();
flags[BLOCKTYPEFLAG] |= sh.isBlockTypeDataNeeded() | lt.isBlockTypeDataNeeded();
}
}
}
return flags;
}
16
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void enumMapTiles(DynmapWorld world, MapType map, MapStorageTileEnumCB cb) {
// Get base directory for world
File base = new File(baseTileDir, world.getName());
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processEnumMapTiles(world, mt, base, var, cb, null, null);
}
}
}
16
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processPurgeMapTiles(DynmapWorld world, MapType map, ImageVariant var) {
Connection c = null;
boolean err = false;
Integer mapkey = getMapKey(world, map, var);
if (mapkey == null)
return;
try {
c = getConnection();
// Query tiles for given mapkey
Statement stmt = c.createStatement();
stmt.executeUpdate("DELETE FROM " + tableTiles + " WHERE MapID=" + mapkey + ";");
stmt.close();
} catch (SQLException x) {
Log.severe("Tile purge error - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
16
View Complete Implementation : SQLiteMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processPurgeMapTiles(DynmapWorld world, MapType map, ImageVariant var) {
Connection c = null;
boolean err = false;
Integer mapkey = getMapKey(world, map, var);
if (mapkey == null)
return;
try {
c = getConnection();
// Query tiles for given mapkey
Statement stmt = c.createStatement();
// stmt.executeUpdate("DELETE FROM Tiles WHERE MapID=" + mapkey + ";");
doExecuteUpdate(stmt, "DELETE FROM Tiles WHERE MapID=" + mapkey + ";");
stmt.close();
} catch (SQLException x) {
Log.severe("Tile purge error - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
16
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
@Override
public void enumMapTiles(DynmapWorld world, MapType map, MapStorageTileEnumCB cb) {
// Get base directory for world
File base = new File(baseTileDir, world.getName());
List<MapType> mtlist;
if (map != null) {
mtlist = Collections.singletonList(map);
} else {
// Else, add all directories under world directory (for maps)
mtlist = new ArrayList<MapType>(world.maps);
}
for (MapType mt : mtlist) {
ImageVariant[] vars = mt.getVariants();
for (ImageVariant var : vars) {
processEnumMapTiles(world, mt, base, var, cb);
}
}
}
15
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/* Get names of maps rendered concurrently with this map type in this world */
public List<String> getMapNamesSharingRender(DynmapWorld w) {
ArrayList<String> lst = new ArrayList<String>();
for (MapType mt : w.maps) {
if (mt instanceof HDMap) {
HDMap hdmt = (HDMap) mt;
if ((hdmt.perspective == this.perspective) && (hdmt.boostzoom == this.boostzoom)) {
/* Same perspective */
if (hdmt.lighting.isNightAndDayEnabled())
lst.add(hdmt.getName() + "(night/day)");
else
lst.add(hdmt.getName());
}
}
}
return lst;
}
15
View Complete Implementation : HDMap.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
public void purgeOldTiles(final DynmapWorld world, final TileFlags rendered) {
final MapStorage ms = world.getMapStorage();
ms.enumMapTiles(world, this, new MapStorageTileEnumCB() {
@Override
public void tileFound(MapStorageTile tile, ImageEncoding fmt) {
if (fmt != getImageFormat().getEncoding()) {
// Wrong format? toss it
/* Otherwise, delete tile */
tile.delete();
} else if (tile.zoom == 1) {
// First tier zoom? sensitive to newly rendered tiles
// If any were rendered, already triggered (and still needed
if (rendered.getFlag(tile.x, tile.y) || rendered.getFlag(tile.x + 1, tile.y) || rendered.getFlag(tile.x, tile.y - 1) || rendered.getFlag(tile.x + 1, tile.y - 1)) {
return;
}
tile.enqueueZoomOutUpdate();
} else if (tile.zoom == 0) {
if (rendered.getFlag(tile.x, tile.y)) {
/* If we rendered this tile, its good */
return;
}
/* Otherwise, delete tile */
tile.delete();
/* Push updates, clear hash code, and signal zoom tile update */
MapManager.mapman.pushUpdate(world, new Client.Tile(tile.getURI()));
tile.enqueueZoomOutUpdate();
}
}
});
}
14
View Complete Implementation : DynmapExpCommands.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private boolean handleSetExport(DynmapCommandSender sender, String[] args, ExportContext ctx, DynmapCore core) {
if (args.length < 3) {
sender.sendMessage(String.format("Bounds: <%s,%s,%s> - <%s,%s,%s> on world '%s'", val(ctx.xmin), val(ctx.ymin), val(ctx.zmin), val(ctx.xmax), val(ctx.ymax), val(ctx.zmax), ctx.world));
return true;
}
for (int i = 1; i < (args.length - 1); i += 2) {
try {
if (args[i].equals("x0")) {
ctx.xmin = Integer.parseInt(args[i + 1]);
} else if (args[i].equals("x1")) {
ctx.xmax = Integer.parseInt(args[i + 1]);
} else if (args[i].equals("y0")) {
ctx.ymin = Integer.parseInt(args[i + 1]);
} else if (args[i].equals("y1")) {
ctx.ymax = Integer.parseInt(args[i + 1]);
} else if (args[i].equals("z0")) {
ctx.zmin = Integer.parseInt(args[i + 1]);
} else if (args[i].equals("z1")) {
ctx.zmax = Integer.parseInt(args[i + 1]);
} else if (args[i].equals("world")) {
DynmapWorld w = core.getWorld(args[i + 1]);
if (w != null) {
ctx.world = args[i + 1];
} else {
sender.sendMessage("Invalid world '" + args[i + 1] + "'");
return true;
}
} else if (args[i].equals("shader")) {
HDShader s = MapManager.mapman.hdmapman.shaders.get(args[i + 1]);
if (s == null) {
sender.sendMessage("Unknown shader '" + args[i + 1] + "'");
return true;
}
ctx.shader = args[i + 1];
} else if (args[i].equals("byChunk")) {
ctx.groupByChunk = args[i + 1].equalsIgnoreCase("true");
} else if (args[i].equals("byBlockID")) {
ctx.groupByBlockID = args[i + 1].equalsIgnoreCase("true");
} else if (args[i].equals("byBlockIDData")) {
ctx.groupByBlockIDData = args[i + 1].equalsIgnoreCase("true");
} else if (args[i].equals("byTexture")) {
ctx.groupByTexture = args[i + 1].equalsIgnoreCase("true");
} else {
// Unknown setting
sender.sendMessage("Unknown setting '" + args[i] + "'");
return true;
}
} catch (NumberFormatException nfx) {
sender.sendMessage("Invalid value for '" + args[i] + "' - " + args[i + 1]);
return true;
}
}
return handleInfo(sender, args, ctx, core);
}
14
View Complete Implementation : HDMapManager.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/**
* Initialize shader states for all shaders for given tile
*
* @param tile - tile to init
* @param cache - chunk cache
* @param mapiter - map iterator
* @param mapname - map name
* @param scale - map scale
* @return array of shader states for all replacedociated shaders
*/
public HDShaderState[] getShaderStateForTile(HDMapTile tile, MapChunkCache cache, MapIterator mapiter, String mapname, int scale) {
DynmapWorld w = MapManager.mapman.worldsLookup.get(tile.getDynmapWorld().getName());
if (w == null) {
return new HDShaderState[0];
}
ArrayList<HDShaderState> shaders = new ArrayList<HDShaderState>();
for (MapType map : w.maps) {
if (map instanceof HDMap) {
HDMap hdmap = (HDMap) map;
if ((hdmap.getPerspective() == tile.perspective) && (hdmap.getBoostZoom() == tile.boostzoom)) {
/* If limited to one map, and this isn't it, skip */
if ((mapname != null) && (!hdmap.getName().equals(mapname)))
continue;
shaders.add(hdmap.getShader().getStateInstance(hdmap, cache, mapiter, scale));
}
}
}
return shaders.toArray(new HDShaderState[shaders.size()]);
}
14
View Complete Implementation : AreaMarkerImpl.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
final boolean testTileForBoostMarkers(DynmapWorld w, HDPerspective perspective, final double tile_x, final double tile_y, final double tile_dim) {
Map<String, BoundingBox> bbc = bb_cache;
if (bbc == null) {
bbc = new ConcurrentHashMap<String, BoundingBox>();
}
BoundingBox bb = bbc.get(perspective.getName());
if (bb == null) {
// No cached bounding box, so generate it
bb = new BoundingBox();
Vector3D v = new Vector3D();
Vector3D v2 = new Vector3D();
bb.xmin = Double.MAX_VALUE;
bb.xmax = -Double.MAX_VALUE;
bb.ymin = Double.MAX_VALUE;
bb.ymax = -Double.MAX_VALUE;
if (corners != null) {
ArrayList<Coord> crn = corners;
int cnt = crn.size();
if (cnt == 2) {
// Special case
cnt = 4;
crn = new ArrayList<Coord>();
Coord c0 = corners.get(0);
Coord c1 = corners.get(1);
crn.add(c0);
crn.add(new Coord(c0.x, c1.z));
crn.add(c1);
crn.add(new Coord(c1.x, c0.z));
}
double ymid = (this.ytop + this.ybottom) / 2.0;
bb.xp = new double[cnt];
bb.yp = new double[cnt];
for (int i = 0; i < cnt; i++) {
Coord c = crn.get(i);
// get coords of point, in world coord
v.x = c.x;
// get coords of point, in world coord
v.y = ymid;
// get coords of point, in world coord
v.z = c.z;
// Transform to map coord
perspective.transformWorldToMapCoord(v, v2);
if (v2.x < bb.xmin)
bb.xmin = v2.x;
if (v2.y < bb.ymin)
bb.ymin = v2.y;
if (v2.x > bb.xmax)
bb.xmax = v2.x;
if (v2.y > bb.ymax)
bb.ymax = v2.y;
bb.xp[i] = v2.x;
bb.yp[i] = v2.y;
}
}
// System.out.println("x=" + bb.xmin + " - " + bb.xmax + ", y=" + bb.ymin + " - " + bb.ymax);
bbc.put(perspective.getName(), bb);
bb_cache = bbc;
}
final double tile_x2 = tile_x + tile_dim;
final double tile_y2 = tile_y + tile_dim;
if ((bb.xmin > tile_x2) || (bb.xmax < tile_x) || (bb.ymin > tile_y2) || (bb.ymax < tile_y)) {
// System.out.println("tile: " + tile_x + " / " + tile_y + " - miss");
return false;
}
final int cnt = bb.xp.length;
final double[] px = bb.xp;
final double[] py = bb.yp;
/* Now see if tile square intersects polygon - start with seeing if any point inside */
if (MarkerImpl.testPointInPolygon(tile_x, tile_y, px, py)) {
// If tile corner inside, we intersect
return true;
}
if (MarkerImpl.testPointInPolygon(tile_x2, tile_y, px, py)) {
// If tile corner inside, we intersect
return true;
}
if (MarkerImpl.testPointInPolygon(tile_x, tile_y2, px, py)) {
// If tile corner inside, we intersect
return true;
}
if (MarkerImpl.testPointInPolygon(tile_x2, tile_y2, px, py)) {
// If tile corner inside, we intersect
return true;
}
/* Test if any polygon corners are inside square */
for (int i = 0; i < cnt; i++) {
if ((px[i] >= tile_x) && (px[i] <= tile_x2) && (py[i] >= tile_y) && (py[i] <= tile_y2)) {
// If poly corner inside tile, we intersect
return true;
}
}
// Otherwise, only intersects if at least one edge crosses
// for (int i = 0, j = cnt-1; i < cnt; j = i++) {
// // Test for X=tile_x side
// if ((px[i] < tile_x) && (px[j] >= tile_x) && ()
// }
// System.out.println("tile: " + tile_x + " / " + tile_y + " - hit");
return false;
}
14
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processEnumMapTiles(DynmapWorld world, MapType map, File base, ImageVariant var, MapStorageTileEnumCB cb, MapStorageBaseTileEnumCB cbBase, MapStorageTileSearchEndCB cbEnd) {
File bdir = new File(base, map.getPrefix() + var.variantSuffix);
if (bdir.isDirectory() == false) {
if (cbEnd != null)
cbEnd.searchEnded();
return;
}
// List to traverse
LinkedList<File> dirs = new LinkedList<File>();
// Directory for map
dirs.add(bdir);
// While more paths to handle
while (dirs.isEmpty() == false) {
File dir = dirs.pop();
String[] dirlst = dir.list();
if (dirlst == null)
continue;
for (String fn : dirlst) {
if (fn.equals(".") || fn.equals(".."))
continue;
File f = new File(dir, fn);
if (f.isDirectory()) {
/* If directory, add to list to process */
dirs.add(f);
} else {
/* Else, file - see if tile */
String ext = null;
int extoff = fn.lastIndexOf('.');
if (extoff >= 0) {
ext = fn.substring(extoff + 1);
fn = fn.substring(0, extoff);
}
ImageEncoding fmt = ImageEncoding.fromExt(ext);
if (fmt == null) {
continue;
}
// See if zoom tile
int zoom = 0;
if (fn.startsWith("z")) {
while (fn.startsWith("z")) {
fn = fn.substring(1);
zoom++;
}
if (fn.startsWith("_")) {
fn = fn.substring(1);
}
}
// Split remainder to get coords
String[] coord = fn.split("_");
if (coord.length == 2) {
// Must be 2 to be a tile
try {
int x = Integer.parseInt(coord[0]);
int y = Integer.parseInt(coord[1]);
// Invoke callback
MapStorageTile t = new StorageTile(world, map, x, y, zoom, var);
if (cb != null)
cb.tileFound(t, fmt);
if (cbBase != null && t.zoom == 0)
cbBase.tileFound(t, fmt);
t.cleanup();
} catch (NumberFormatException nfx) {
}
}
}
}
}
if (cbEnd != null) {
cbEnd.searchEnded();
}
}
14
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processEnumMapTiles(DynmapWorld world, MapType map, ImageVariant var, MapStorageTileEnumCB cb, MapStorageBaseTileEnumCB cbBase, MapStorageTileSearchEndCB cbEnd) {
Connection c = null;
boolean err = false;
Integer mapkey = getMapKey(world, map, var);
if (mapkey == null) {
if (cbEnd != null)
cbEnd.searchEnded();
return;
}
try {
c = getConnection();
// Query tiles for given mapkey
Statement stmt = c.createStatement();
ResultSet rs = stmt.executeQuery("SELECT x,y,zoom,Format FROM " + tableTiles + " WHERE MapID=" + mapkey + ";");
while (rs.next()) {
StorageTile st = new StorageTile(world, map, rs.getInt("x"), rs.getInt("y"), rs.getInt("zoom"), var);
final MapType.ImageEncoding encoding = MapType.ImageEncoding.fromOrd(rs.getInt("Format"));
if (cb != null)
cb.tileFound(st, encoding);
if (cbBase != null && st.zoom == 0)
cbBase.tileFound(st, encoding);
st.cleanup();
}
if (cbEnd != null)
cbEnd.searchEnded();
rs.close();
stmt.close();
} catch (SQLException x) {
Log.severe("Tile enum error - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
14
View Complete Implementation : SQLiteMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private Integer getMapKey(DynmapWorld w, MapType mt, ImageVariant var) {
String id = w.getName() + ":" + mt.getPrefix() + ":" + var.toString();
synchronized (mapKey) {
Integer k = mapKey.get(id);
if (k == null) {
// No hit: new value so we need to add it to table
Connection c = null;
boolean err = false;
try {
c = getConnection();
// Insert row
PreparedStatement stmt = c.prepareStatement("INSERT INTO Maps (WorldID,MapID,Variant) VALUES (?, ?, ?);");
stmt.setString(1, w.getName());
stmt.setString(2, mt.getPrefix());
stmt.setString(3, var.toString());
// stmt.executeUpdate();
doExecuteUpdate(stmt);
stmt.close();
// Query key replacedigned
stmt = c.prepareStatement("SELECT ID FROM Maps WHERE WorldID = ? AND MapID = ? AND Variant = ?;");
stmt.setString(1, w.getName());
stmt.setString(2, mt.getPrefix());
stmt.setString(3, var.toString());
// ResultSet rs = stmt.executeQuery();
ResultSet rs = doExecuteQuery(stmt);
if (rs.next()) {
k = rs.getInt("ID");
mapKey.put(id, k);
}
rs.close();
stmt.close();
} catch (SQLException x) {
Log.severe("Error updating Maps table - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
return k;
}
}
14
View Complete Implementation : SQLiteMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processEnumMapTiles(DynmapWorld world, MapType map, ImageVariant var, MapStorageTileEnumCB cb, MapStorageBaseTileEnumCB cbBase, MapStorageTileSearchEndCB cbEnd) {
Connection c = null;
boolean err = false;
Integer mapkey = getMapKey(world, map, var);
if (mapkey == null) {
if (cbEnd != null)
cbEnd.searchEnded();
return;
}
try {
c = getConnection();
// Query tiles for given mapkey
Statement stmt = c.createStatement();
// ResultSet rs = stmt.executeQuery("SELECT x,y,zoom,Format FROM Tiles WHERE MapID=" + mapkey + ";");
ResultSet rs = doExecuteQuery(stmt, "SELECT x,y,zoom,Format FROM Tiles WHERE MapID=" + mapkey + ";");
while (rs.next()) {
StorageTile st = new StorageTile(world, map, rs.getInt("x"), rs.getInt("y"), rs.getInt("zoom"), var);
final MapType.ImageEncoding encoding = MapType.ImageEncoding.fromOrd(rs.getInt("Format"));
if (cb != null)
cb.tileFound(st, encoding);
if (cbBase != null && st.zoom == 0)
cbBase.tileFound(st, encoding);
st.cleanup();
}
if (cbEnd != null)
cbEnd.searchEnded();
rs.close();
stmt.close();
} catch (SQLException x) {
Log.severe("Tile enum error - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
14
View Complete Implementation : FileTreeMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processEnumMapTiles(DynmapWorld world, MapType map, File base, ImageVariant var, MapStorageTileEnumCB cb) {
File bdir = new File(base, map.getPrefix() + var.variantSuffix);
if (bdir.isDirectory() == false)
return;
// List to traverse
LinkedList<File> dirs = new LinkedList<File>();
// Directory for map
dirs.add(bdir);
// While more paths to handle
while (dirs.isEmpty() == false) {
File dir = dirs.pop();
String[] dirlst = dir.list();
if (dirlst == null)
continue;
for (String fn : dirlst) {
if (fn.equals(".") || fn.equals(".."))
continue;
File f = new File(dir, fn);
if (f.isDirectory()) {
/* If directory, add to list to process */
dirs.add(f);
} else {
/* Else, file - see if tile */
String ext = null;
int extoff = fn.lastIndexOf('.');
if (extoff >= 0) {
ext = fn.substring(extoff + 1);
fn = fn.substring(0, extoff);
}
ImageEncoding fmt = ImageEncoding.fromExt(ext);
if (fmt == null) {
continue;
}
// See if zoom tile
int zoom = 0;
if (fn.startsWith("z")) {
while (fn.startsWith("z")) {
fn = fn.substring(1);
zoom++;
}
if (fn.startsWith("_")) {
fn = fn.substring(1);
}
}
// Split remainder to get coords
String[] coord = fn.split("_");
if (coord.length == 2) {
// Must be 2 to be a tile
try {
int x = Integer.parseInt(coord[0]);
int y = Integer.parseInt(coord[1]);
// Invoke callback
MapStorageTile t = new StorageTile(world, map, x, y, zoom, var);
cb.tileFound(t, fmt);
t.cleanup();
} catch (NumberFormatException nfx) {
}
}
}
}
}
}
14
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processEnumMapTiles(DynmapWorld world, MapType map, ImageVariant var, MapStorageTileEnumCB cb) {
Connection c = null;
boolean err = false;
Integer mapkey = getMapKey(world, map, var);
if (mapkey == null)
return;
try {
c = getConnection();
// Query tiles for given mapkey
Statement stmt = c.createStatement();
ResultSet rs = stmt.executeQuery("SELECT x,y,zoom,Format FROM " + tableTiles + " WHERE MapID=" + mapkey + ";");
while (rs.next()) {
StorageTile st = new StorageTile(world, map, rs.getInt("x"), rs.getInt("y"), rs.getInt("zoom"), var);
cb.tileFound(st, MapType.ImageEncoding.fromOrd(rs.getInt("Format")));
st.cleanup();
}
rs.close();
stmt.close();
} catch (SQLException x) {
Log.severe("Tile enum error - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
14
View Complete Implementation : SQLiteMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private void processEnumMapTiles(DynmapWorld world, MapType map, ImageVariant var, MapStorageTileEnumCB cb) {
Connection c = null;
boolean err = false;
Integer mapkey = getMapKey(world, map, var);
if (mapkey == null)
return;
try {
c = getConnection();
// Query tiles for given mapkey
Statement stmt = c.createStatement();
// ResultSet rs = stmt.executeQuery("SELECT x,y,zoom,Format FROM Tiles WHERE MapID=" + mapkey + ";");
ResultSet rs = doExecuteQuery(stmt, "SELECT x,y,zoom,Format FROM Tiles WHERE MapID=" + mapkey + ";");
while (rs.next()) {
StorageTile st = new StorageTile(world, map, rs.getInt("x"), rs.getInt("y"), rs.getInt("zoom"), var);
cb.tileFound(st, MapType.ImageEncoding.fromOrd(rs.getInt("Format")));
st.cleanup();
}
rs.close();
stmt.close();
} catch (SQLException x) {
Log.severe("Tile enum error - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
13
View Complete Implementation : DynMapRenderer.java
Copyright GNU General Public License v3.0
Author : Captain-Chaos
Copyright GNU General Public License v3.0
Author : Captain-Chaos
/**
* Render the tile. Note that the returned image is only valid until the
* next invocation of this method.
*
* @param cache The chunk cache from which to retrieve the world data to
* render.
* @param tile The tile to render.
* @return The rendered tile. <strong>Note:</strong> only valid until the
* next invocation of this method!
*/
BufferedImage render(MapChunkCache cache, HDMapTile tile) {
Color rslt = new Color();
MapIterator mapiter = cache.gereplacederator(0, 0, 0);
DynmapWorld world = tile.getDynmapWorld();
int scaled = 0;
if ((tile.boostzoom > 0) && MarkerAPIImpl.testTileForBoostMarkers(cache.getWorld(), perspective, tile.tx * TILE_WIDTH, tile.ty * TILE_HEIGHT, TILE_WIDTH)) {
scaled = tile.boostzoom;
}
int sizescale = 1 << scaled;
/* Build shader state object for each shader */
HDShaderState shaderstate = map.getShader().getStateInstance(map, cache, mapiter, sizescale * basemodscale);
/* Check if nether world */
boolean isnether = world.isNether();
// Mark the tiles we're going to render as validated
MapTypeState mts = world.getMapState(map);
if (mts != null) {
mts.validateTile(tile.tx, tile.ty);
}
/* Create perspective state object */
OurPerspectiveState ps = new OurPerspectiveState(mapiter, isnether, scaled);
ps.top = new Vector3D();
ps.bottom = new Vector3D();
ps.direction = new Vector3D();
double xbase = tile.tx * TILE_WIDTH;
double ybase = tile.ty * TILE_HEIGHT;
boolean[] shaderdone = new boolean[1];
double height = maxheight;
if (height < 0) {
/* Not set - replacedume world height - 1 */
if (isnether)
height = 127;
else
height = tile.getDynmapWorld().worldheight - 1;
}
for (int x = 0; x < TILE_WIDTH * sizescale; x++) {
ps.px = x;
for (int y = 0; y < TILE_HEIGHT * sizescale; y++) {
ps.top.x = ps.bottom.x = xbase + ((double) x) / sizescale + 0.5;
/* Start at center of pixel at Y=height+0.5, bottom at Y=-0.5 */
ps.top.y = ps.bottom.y = ybase + ((double) y) / sizescale + 0.5;
ps.top.z = height + 0.5;
ps.bottom.z = minheight - 0.5;
map_to_world.transform(ps.top);
/* Transform to world coordinates */
map_to_world.transform(ps.bottom);
ps.direction.set(ps.bottom);
ps.direction.subtract(ps.top);
ps.py = y / sizescale;
shaderstate.reset(ps);
ps.raytrace(cache, shaderstate, shaderdone);
if (!shaderdone[0]) {
shaderstate.rayFinished(ps);
} else {
shaderdone[0] = false;
}
shaderstate.getRayColor(rslt, 0);
int c_argb = rslt.getARGB();
argb_buf[(TILE_HEIGHT * sizescale - y - 1) * TILE_WIDTH * sizescale + x] = c_argb;
}
}
return buf_img;
}
13
View Complete Implementation : AbstractMapChunkCache.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/**
* Container for managing chunks - dependent upon using chunk snapshots, since rendering is off server thread
*/
public abstract clreplaced AbstractMapChunkCache extends MapChunkCache {
// Reduced interface for snapshots
public interface Snapshot {
public DynmapBlockState getBlockType(int x, int y, int z);
public int getBlockSkyLight(int x, int y, int z);
public int getBlockEmittedLight(int x, int y, int z);
public int getHighestBlockYAt(int x, int z);
public Biome getBiome(int x, int z);
public boolean isSectionEmpty(int sy);
public Object[] getBiomeBaseFromSnapshot();
}
private static boolean init = false;
protected World w;
protected DynmapWorld dw;
private int nsect;
protected List<DynmapChunk> chunks;
protected Lisreplacederator<DynmapChunk> iterator;
protected int x_min;
private int x_max;
protected int z_min;
private int z_max;
protected int x_dim;
protected boolean biome;
protected boolean biomeraw;
protected boolean highesty;
protected boolean blockdata;
protected HiddenChunkStyle hidestyle = HiddenChunkStyle.FILL_AIR;
protected List<VisibilityLimit> visible_limits = null;
protected List<VisibilityLimit> hidden_limits = null;
protected boolean isempty = true;
private int snapcnt;
protected Snapshot[] snaparray;
/* Index = (x-x_min) + ((z-z_min)*x_dim) */
protected DynIntHashMap[] snaptile;
private byte[][] sameneighborbiomecnt;
private BiomeMap[][] biomemap;
private boolean[][] isSectionNotEmpty;
/* Indexed by snapshot index, then by section index */
protected long[] inhabitedTicks;
/* Index = (x-x_min) + ((z-z_min)*x_dim) */
private static final BiomeMap[] nullBiomeMap = { BiomeMap.NULL };
private static final BlockStep[] unstep = { BlockStep.X_MINUS, BlockStep.Y_MINUS, BlockStep.Z_MINUS, BlockStep.X_PLUS, BlockStep.Y_PLUS, BlockStep.Z_PLUS };
private static BiomeMap[] biome_to_bmap;
private static Biome[] biome_by_id;
protected static final int getIndexInChunk(int cx, int cy, int cz) {
return (cy << 8) | (cz << 4) | cx;
}
/**
* Iterator for traversing map chunk cache (base is for non-snapshot)
*/
public clreplaced BasetMapIterator implements MapIterator {
@SuppressWarnings("unused")
private int x, y, z, chunkindex, bx, bz, off;
private Snapshot snap;
private BlockStep laststep;
private DynmapBlockState type = null;
private final int worldheight;
private final int x_base;
private final int z_base;
BasetMapIterator(int x0, int y0, int z0) {
x_base = x_min << 4;
z_base = z_min << 4;
if (biome)
biomePrep();
initialize(x0, y0, z0);
worldheight = w.getMaxHeight();
}
@Override
public final void initialize(int x0, int y0, int z0) {
this.x = x0;
this.y = y0;
this.z = z0;
this.chunkindex = ((x >> 4) - x_min) + (((z >> 4) - z_min) * x_dim);
this.bx = x & 0xF;
this.bz = z & 0xF;
this.off = bx + (bz << 4);
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
laststep = BlockStep.Y_MINUS;
if ((y >= 0) && (y < worldheight))
type = null;
else
type = DynmapBlockState.AIR;
}
@Override
public final DynmapBlockState getBlockType() {
if (type == null) {
type = snap.getBlockType(bx, y, bz);
}
return type;
}
@Override
public int getBlockSkyLight() {
try {
return snap.getBlockSkyLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException | IllegalArgumentException x) {
}
return 15;
}
@Override
public final int getBlockEmittedLight() {
try {
return snap.getBlockEmittedLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException | IllegalArgumentException x) {
}
return 0;
}
private void biomePrep() {
if (sameneighborbiomecnt != null)
return;
int x_size = x_dim << 4;
int z_size = (z_max - z_min + 1) << 4;
sameneighborbiomecnt = new byte[x_size][];
biomemap = new BiomeMap[x_size][];
for (int i = 0; i < x_size; i++) {
sameneighborbiomecnt[i] = new byte[z_size];
biomemap[i] = new BiomeMap[z_size];
}
Snapshot last_css = null;
Object[] biomebase = null;
for (int i = 0; i < x_size; i++) {
for (int j = 0; j < z_size; j++) {
BiomeMap bm;
if (j == 0) {
initialize(i + x_base, 64, z_base);
} else {
stepPosition(BlockStep.Z_PLUS);
}
if (last_css != snap) {
if ((snap instanceof EmptyChunk) || (snap instanceof PlainChunk)) {
biomebase = nullBiomeMap;
} else {
biomebase = snap.getBiomeBaseFromSnapshot();
}
last_css = snap;
}
if (biomebase == nullBiomeMap) {
bm = BiomeMap.NULL;
} else if (biomebase != null) {
bm = BiomeMap.byBiomeID(BukkitVersionHelper.helper.getBiomeBaseID(biomebase[bz << 4 | bx]));
} else {
Biome bb = snap.getBiome(bx, bz);
if (bb == null)
bm = BiomeMap.NULL;
else
bm = biome_to_bmap[bb.ordinal()];
}
biomemap[i][j] = bm;
int cnt = 0;
if (i > 0) {
if (bm == biomemap[i - 1][j]) {
/* Same as one to left */
cnt++;
sameneighborbiomecnt[i - 1][j]++;
}
if ((j > 0) && (bm == biomemap[i - 1][j - 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j - 1]++;
}
if ((j < (z_size - 1)) && (bm == biomemap[i - 1][j + 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j + 1]++;
}
}
if ((j > 0) && (biomemap[i][j] == biomemap[i][j - 1])) {
/* Same as one to above */
cnt++;
sameneighborbiomecnt[i][j - 1]++;
}
sameneighborbiomecnt[i][j] = (byte) cnt;
}
}
}
@Override
public final BiomeMap getBiome() {
try {
return biomemap[x - x_base][z - z_base];
} catch (Exception ex) {
return BiomeMap.NULL;
}
}
@Override
public final int getSmoothGrreplacedColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) {
/* All neighbors same? */
mult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothFoliageColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) {
/* All neighbors same? */
mult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothColorMultiplier(int[] colormap, int[] swampmap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) {
/* All neighbors same? */
if (bm == BiomeMap.SWAMPLAND) {
mult = swampmap[bm.biomeLookup()];
} else {
mult = colormap[bm.biomeLookup()];
}
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult;
if (bm == BiomeMap.SWAMPLAND) {
rmult = swampmap[bm.biomeLookup()];
} else {
rmult = colormap[bm.biomeLookup()];
}
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothWaterColorMultiplier() {
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) {
/* All neighbors same? */
return bm.gereplacederColorMult();
}
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int mult = bm.gereplacederColorMult();
racreplaced += (mult >> 16) & 0xFF;
gacreplaced += (mult >> 8) & 0xFF;
bacreplaced += mult & 0xFF;
}
}
return ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
} catch (Exception x) {
return 0xFFFFFF;
}
}
@Override
public final int getSmoothWaterColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) {
/* All neighbors same? */
mult = colormap[bm.biomeLookup()];
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = colormap[bm.biomeLookup()];
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
Log.warning("Water colormult exception", x);
mult = 0xFFFFFF;
}
return mult;
}
/**
* Step current position in given direction
*/
@Override
public final void stepPosition(BlockStep step) {
type = null;
switch(step.ordinal()) {
case 0:
x++;
bx++;
off++;
if (bx == 16) {
/* Next chunk? */
bx = 0;
off -= 16;
chunkindex++;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 1:
y++;
if (y >= worldheight) {
type = DynmapBlockState.AIR;
}
break;
case 2:
z++;
bz++;
off += 16;
if (bz == 16) {
/* Next chunk? */
bz = 0;
off -= 256;
chunkindex += x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 3:
x--;
bx--;
off--;
if (bx == -1) {
/* Next chunk? */
bx = 15;
off += 16;
chunkindex--;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 4:
y--;
if (y < 0) {
type = DynmapBlockState.AIR;
}
break;
case 5:
z--;
bz--;
off -= 16;
if (bz == -1) {
/* Next chunk? */
bz = 15;
off += 256;
chunkindex -= x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
}
laststep = step;
}
/**
* Unstep current position to previous position
*/
@Override
public BlockStep unstepPosition() {
BlockStep ls = laststep;
stepPosition(unstep[ls.ordinal()]);
return ls;
}
/**
* Unstep current position in oppisite director of given step
*/
@Override
public void unstepPosition(BlockStep s) {
stepPosition(unstep[s.ordinal()]);
}
@Override
public final void setY(int y) {
if (y > this.y)
laststep = BlockStep.Y_PLUS;
else
laststep = BlockStep.Y_MINUS;
this.y = y;
if ((y < 0) || (y >= worldheight)) {
type = DynmapBlockState.AIR;
} else {
type = null;
}
}
@Override
public final int getX() {
return x;
}
@Override
public final int getY() {
return y;
}
@Override
public final int getZ() {
return z;
}
@Override
public final DynmapBlockState getBlockTypeAt(BlockStep s) {
if (s == BlockStep.Y_MINUS) {
if (y > 0)
return snap.getBlockType(bx, y - 1, bz);
} else if (s == BlockStep.Y_PLUS) {
if (y < (worldheight - 1))
return snap.getBlockType(bx, y + 1, bz);
} else {
BlockStep ls = laststep;
stepPosition(s);
DynmapBlockState tid = snap.getBlockType(bx, y, bz);
unstepPosition();
laststep = ls;
return tid;
}
return DynmapBlockState.AIR;
}
@Override
public BlockStep getLastStep() {
return laststep;
}
@Override
public int getWorldHeight() {
return worldheight;
}
@Override
public long getBlockKey() {
return (((chunkindex * worldheight) + y) << 8) | (bx << 4) | bz;
}
@Override
public final boolean isEmptySection() {
try {
return !isSectionNotEmpty[chunkindex][y >> 4];
} catch (Exception x) {
initSectionData(chunkindex);
return !isSectionNotEmpty[chunkindex][y >> 4];
}
}
@Override
public RenderPatchFactory getPatchFactory() {
return HDBlockModels.getPatchDefinitionFactory();
}
@Override
public Object getBlockTileEnreplacedyField(String fieldId) {
try {
int idx = getIndexInChunk(bx, y, bz);
Object[] vals = (Object[]) snaptile[chunkindex].get(idx);
for (int i = 0; i < vals.length; i += 2) {
if (vals[i].equals(fieldId)) {
return vals[i + 1];
}
}
} catch (Exception x) {
}
return null;
}
@Override
public DynmapBlockState getBlockTypeAt(int xoff, int yoff, int zoff) {
int xx = this.x + xoff;
int yy = this.y + yoff;
int zz = this.z + zoff;
int idx = ((xx >> 4) - x_min) + (((zz >> 4) - z_min) * x_dim);
try {
return snaparray[idx].getBlockType(xx & 0xF, yy, zz & 0xF);
} catch (Exception x) {
return DynmapBlockState.AIR;
}
}
@Override
public Object getBlockTileEnreplacedyFieldAt(String fieldId, int xoff, int yoff, int zoff) {
return null;
}
@Override
public long getInhabitedTicks() {
try {
return inhabitedTicks[chunkindex];
} catch (Exception x) {
return 0;
}
}
}
// Special iterator for END : forces skylight to 15
private clreplaced OurEndMapIterator extends BasetMapIterator {
OurEndMapIterator(int x0, int y0, int z0) {
super(x0, y0, z0);
}
@Override
public final int getBlockSkyLight() {
return 15;
}
}
/**
* Chunk cache for representing unloaded chunk (or air)
*/
private static clreplaced EmptyChunk implements Snapshot {
public final DynmapBlockState getBlockType(int x, int y, int z) {
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 0;
}
@Override
public Biome getBiome(int x, int z) {
return null;
}
@Override
public boolean isSectionEmpty(int sy) {
return true;
}
@Override
public Object[] getBiomeBaseFromSnapshot() {
return new Object[256];
}
}
/**
* Chunk cache for representing generic stone chunk
*/
private static clreplaced PlainChunk implements Snapshot {
private DynmapBlockState fill;
PlainChunk(String blockname) {
this.fill = DynmapBlockState.getBaseStateByName(blockname);
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
return (y < 64) ? fill : DynmapBlockState.AIR;
}
@Override
public Biome getBiome(int x, int z) {
return null;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
if (y < 64)
return 0;
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 64;
}
@Override
public boolean isSectionEmpty(int sy) {
return (sy < 4);
}
@Override
public Object[] getBiomeBaseFromSnapshot() {
return new Object[256];
}
}
// Well known choices for hidden/empty chunks
protected static final EmptyChunk EMPTY = new EmptyChunk();
protected static final PlainChunk STONE = new PlainChunk(DynmapBlockState.STONE_BLOCK);
protected static final PlainChunk OCEAN = new PlainChunk(DynmapBlockState.WATER_BLOCK);
/**
* Construct empty cache
*/
public AbstractMapChunkCache() {
if (!init) {
init = true;
}
}
public void setChunks(BukkitWorld dw, List<DynmapChunk> chunks) {
this.dw = dw;
this.w = dw.getWorld();
if (this.w == null) {
this.chunks = new ArrayList<DynmapChunk>();
}
nsect = dw.worldheight >> 4;
this.chunks = chunks;
/* Compute range */
if (chunks.size() == 0) {
this.x_min = 0;
this.x_max = 0;
this.z_min = 0;
this.z_max = 0;
x_dim = 1;
} else {
x_min = x_max = chunks.get(0).x;
z_min = z_max = chunks.get(0).z;
for (DynmapChunk c : chunks) {
if (c.x > x_max)
x_max = c.x;
if (c.x < x_min)
x_min = c.x;
if (c.z > z_max)
z_max = c.z;
if (c.z < z_min)
z_min = c.z;
}
x_dim = x_max - x_min + 1;
}
snapcnt = x_dim * (z_max - z_min + 1);
snaparray = new Snapshot[snapcnt];
inhabitedTicks = new long[snapcnt];
snaptile = new DynIntHashMap[snapcnt];
isSectionNotEmpty = new boolean[snapcnt][];
}
public abstract Snapshot wrapChunkSnapshot(ChunkSnapshot css);
// Load chunk snapshots
public int loadChunks(int max_to_load) {
if (dw.isLoaded() == false)
return 0;
Object queue = BukkitVersionHelper.helper.getUnloadQueue(w);
int cnt = 0;
if (iterator == null)
iterator = chunks.lisreplacederator();
DynmapCore.setIgnoreChunkLoads(true);
// boolean isnormral = w.getEnvironment() == Environment.NORMAL;
// Load the required chunks.
while ((cnt < max_to_load) && iterator.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iterator.next();
boolean vis = true;
if (visible_limits != null) {
vis = false;
for (VisibilityLimit limit : visible_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = true;
break;
}
}
}
if (vis && (hidden_limits != null)) {
for (VisibilityLimit limit : hidden_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = false;
break;
}
}
}
/* Check if cached chunk snapshot found */
Snapshot ss = null;
long inhabited_ticks = 0;
DynIntHashMap tileData = null;
SnapshotRec ssr = SnapshotCache.sscache.getSnapshot(dw.getName(), chunk.x, chunk.z, blockdata, biome, biomeraw, highesty);
if (ssr != null) {
inhabited_ticks = ssr.inhabitedTicks;
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN)
ss = STONE;
else if (hidestyle == HiddenChunkStyle.FILL_OCEAN)
ss = OCEAN;
else
ss = EMPTY;
} else {
ss = ssr.ss;
}
int idx = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
snaparray[idx] = ss;
snaptile[idx] = ssr.tileData;
inhabitedTicks[idx] = inhabited_ticks;
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
continue;
}
boolean wasLoaded = w.isChunkLoaded(chunk.x, chunk.z);
boolean didload = false;
boolean isunloadpending = false;
if (queue != null) {
isunloadpending = BukkitVersionHelper.helper.isInUnloadQueue(queue, chunk.x, chunk.z);
}
if (isunloadpending) {
/* Workaround: can't be pending if not loaded */
wasLoaded = true;
}
try {
didload = loadChunkNoGenerate(w, chunk.x, chunk.z);
} catch (Throwable t) {
/* Catch chunk error from Bukkit */
Log.warning("Bukkit error loading chunk " + chunk.x + "," + chunk.z + " on " + w.getName());
if (!wasLoaded) {
/* If wasn't loaded, we loaded it if it now is */
didload = w.isChunkLoaded(chunk.x, chunk.z);
}
}
/* If it did load, make cache of it */
if (didload) {
tileData = new DynIntHashMap();
Chunk c = w.getChunkAt(chunk.x, chunk.z);
/* Get the chunk */
/* Get inhabited ticks count */
inhabited_ticks = BukkitVersionHelper.helper.getInhabitedTicks(c);
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN)
ss = STONE;
else if (hidestyle == HiddenChunkStyle.FILL_OCEAN)
ss = OCEAN;
else
ss = EMPTY;
} else {
ChunkSnapshot css;
if (blockdata || highesty) {
css = c.getChunkSnapshot(highesty, biome, biomeraw);
ss = wrapChunkSnapshot(css);
/* Get tile enreplacedy data */
List<Object> vals = new ArrayList<Object>();
Map<?, ?> tileents = BukkitVersionHelper.helper.getTileEnreplacediesForChunk(c);
for (Object t : tileents.values()) {
int te_x = BukkitVersionHelper.helper.getTileEnreplacedyX(t);
int te_y = BukkitVersionHelper.helper.getTileEnreplacedyY(t);
int te_z = BukkitVersionHelper.helper.getTileEnreplacedyZ(t);
int cx = te_x & 0xF;
int cz = te_z & 0xF;
String[] te_fields = HDBlockModels.getTileEnreplacedyFieldsNeeded(ss.getBlockType(cx, te_y, cz));
if (te_fields != null) {
Object nbtcompound = BukkitVersionHelper.helper.readTileEnreplacedyNBT(t);
vals.clear();
for (String id : te_fields) {
Object val = BukkitVersionHelper.helper.getFieldValue(nbtcompound, id);
if (val != null) {
vals.add(id);
vals.add(val);
}
}
if (vals.size() > 0) {
Object[] vlist = vals.toArray(new Object[vals.size()]);
tileData.put(getIndexInChunk(cx, te_y, cz), vlist);
}
}
}
} else {
css = w.getEmptyChunkSnapshot(chunk.x, chunk.z, biome, biomeraw);
ss = wrapChunkSnapshot(css);
}
if (ss != null) {
ssr = new SnapshotRec();
ssr.ss = ss;
ssr.inhabitedTicks = inhabited_ticks;
ssr.tileData = tileData;
SnapshotCache.sscache.putSnapshot(dw.getName(), chunk.x, chunk.z, ssr, blockdata, biome, biomeraw, highesty);
}
}
int chunkIndex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
snaparray[chunkIndex] = ss;
snaptile[chunkIndex] = tileData;
inhabitedTicks[chunkIndex] = inhabited_ticks;
/* If wasn't loaded before, we need to do unload */
if (!wasLoaded) {
/* Since we only remember ones we loaded, and we're synchronous, no player has
* moved, so it must be safe (also prevent chunk leak, which appears to happen
* because isChunkInUse defined "in use" as being within 256 blocks of a player,
* while the actual in-use chunk area for a player where the chunks are managed
* by the MC base server is 21x21 (or about a 160 block radius).
* Also, if we did generate it, need to save it */
if (w.isChunkInUse(chunk.x, chunk.z) == false) {
if (BukkitVersionHelper.helper.isUnloadChunkBroken()) {
// Give up on broken unloadChunk API - lets see if this works
w.unloadChunkRequest(chunk.x, chunk.z);
} else {
BukkitVersionHelper.helper.unloadChunkNoSave(w, c, chunk.x, chunk.z);
}
}
endChunkLoad(startTime, ChunkStats.UNLOADED_CHUNKS);
} else if (isunloadpending) {
/* Else, if loaded and unload is pending */
if (w.isChunkInUse(chunk.x, chunk.z) == false) {
w.unloadChunkRequest(chunk.x, chunk.z);
/* Request new unload */
}
endChunkLoad(startTime, ChunkStats.LOADED_CHUNKS);
} else {
endChunkLoad(startTime, ChunkStats.LOADED_CHUNKS);
}
} else {
endChunkLoad(startTime, ChunkStats.UNGENERATED_CHUNKS);
}
cnt++;
}
DynmapCore.setIgnoreChunkLoads(false);
if (iterator.hasNext() == false) {
/* If we're done */
isempty = true;
/* Fill missing chunks with empty dummy chunk */
for (int i = 0; i < snaparray.length; i++) {
if (snaparray[i] == null)
snaparray[i] = EMPTY;
else if (snaparray[i] != EMPTY)
isempty = false;
}
}
return cnt;
}
/**
* Test if done loading
*/
public boolean isDoneLoading() {
if (dw.isLoaded() == false) {
isempty = true;
unloadChunks();
return true;
}
if (iterator != null)
return !iterator.hasNext();
return false;
}
/**
* Test if all empty blocks
*/
public boolean isEmpty() {
return isempty;
}
/**
* Unload chunks
*/
public void unloadChunks() {
if (snaparray != null) {
for (int i = 0; i < snaparray.length; i++) {
snaparray[i] = null;
}
snaparray = null;
inhabitedTicks = null;
}
}
private void initSectionData(int idx) {
isSectionNotEmpty[idx] = new boolean[nsect + 1];
if (snaparray[idx] != EMPTY) {
for (int i = 0; i < nsect; i++) {
if (snaparray[idx].isSectionEmpty(i) == false) {
isSectionNotEmpty[idx][i] = true;
}
}
}
}
public boolean isEmptySection(int sx, int sy, int sz) {
int idx = (sx - x_min) + (sz - z_min) * x_dim;
if (isSectionNotEmpty[idx] == null) {
initSectionData(idx);
}
return !isSectionNotEmpty[idx][sy];
}
/**
* Get cache iterator
*/
public MapIterator gereplacederator(int x, int y, int z) {
if (w.getEnvironment().toString().equals("THE_END"))
return new OurEndMapIterator(x, y, z);
return new BasetMapIterator(x, y, z);
}
/**
* Set hidden chunk style (default is FILL_AIR)
*/
public void setHiddenFillStyle(HiddenChunkStyle style) {
this.hidestyle = style;
}
/**
* Add visible area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setVisibleRange(VisibilityLimit lim) {
if (visible_limits == null)
visible_limits = new ArrayList<VisibilityLimit>();
visible_limits.add(lim);
}
/**
* Add hidden area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setHiddenRange(VisibilityLimit lim) {
if (hidden_limits == null)
hidden_limits = new ArrayList<VisibilityLimit>();
hidden_limits.add(lim);
}
@Override
public boolean setChunkDataTypes(boolean blockdata, boolean biome, boolean highestblocky, boolean rawbiome) {
this.biome = biome;
this.biomeraw = rawbiome;
this.highesty = highestblocky;
this.blockdata = blockdata;
return true;
}
@Override
public DynmapWorld getWorld() {
return dw;
}
public boolean loadChunkNoGenerate(World w, int x, int z) {
return w.loadChunk(x, z, false);
}
public static Biome getBiomeByID(int id) {
if ((id >= 0) && (id < biome_by_id.length)) {
return biome_by_id[id];
}
return Biome.PLAINS;
}
static {
Biome[] b = Biome.values();
BiomeMap[] bm = BiomeMap.values();
biome_to_bmap = new BiomeMap[1024];
biome_by_id = new Biome[1024];
Arrays.fill(biome_by_id, Biome.PLAINS);
for (int i = 0; i < biome_to_bmap.length; i++) {
biome_to_bmap[i] = BiomeMap.NULL;
}
for (int i = 0; i < b.length; i++) {
String bs = b[i].toString();
for (int j = 0; j < bm.length; j++) {
if (bm[j].toString().equals(bs)) {
biome_to_bmap[b[i].ordinal()] = bm[j];
biome_by_id[j] = b[i];
break;
}
}
}
}
}
13
View Complete Implementation : MariaDBMapStorage.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
private Integer getMapKey(DynmapWorld w, MapType mt, ImageVariant var) {
String id = w.getName() + ":" + mt.getPrefix() + ":" + var.toString();
synchronized (mapKey) {
Integer k = mapKey.get(id);
if (k == null) {
// No hit: new value so we need to add it to table
Connection c = null;
boolean err = false;
try {
c = getConnection();
// Insert row
PreparedStatement stmt = c.prepareStatement("INSERT INTO " + tableMaps + " (WorldID,MapID,Variant,ServerID) VALUES (?, ?, ?, ?);");
stmt.setString(1, w.getName());
stmt.setString(2, mt.getPrefix());
stmt.setString(3, var.toString());
stmt.setLong(4, serverID);
stmt.executeUpdate();
stmt.close();
// Query key replacedigned
stmt = c.prepareStatement("SELECT ID FROM " + tableMaps + " WHERE WorldID = ? AND MapID = ? AND Variant = ? AND ServerID = ?;");
stmt.setString(1, w.getName());
stmt.setString(2, mt.getPrefix());
stmt.setString(3, var.toString());
stmt.setLong(4, serverID);
ResultSet rs = stmt.executeQuery();
if (rs.next()) {
k = rs.getInt("ID");
mapKey.put(id, k);
}
rs.close();
stmt.close();
} catch (SQLException x) {
Log.severe("Error updating Maps table - " + x.getMessage());
err = true;
} finally {
releaseConnection(c, err);
}
}
return k;
}
}
12
View Complete Implementation : CircleMarkerImpl.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
final boolean testTileForBoostMarkers(DynmapWorld w, HDPerspective perspective, double tile_x, double tile_y, double tile_dim) {
Map<String, BoundingBox> bbc = bb_cache;
if (bbc == null) {
bbc = new ConcurrentHashMap<String, BoundingBox>();
}
BoundingBox bb = bbc.get(perspective.getName());
if (bb == null) {
// No cached bounding box, so generate it
bb = new BoundingBox();
Vector3D v = new Vector3D();
Vector3D v2 = new Vector3D();
bb.xmin = Double.MAX_VALUE;
bb.xmax = -Double.MAX_VALUE;
bb.ymin = Double.MAX_VALUE;
bb.ymax = -Double.MAX_VALUE;
// Just do 16 points for now
int cnt = 16;
bb.xp = new double[cnt];
bb.yp = new double[cnt];
for (int i = 0; i < cnt; i++) {
v.x = this.x + (this.xr * Math.cos(2.0 * Math.PI * i / cnt));
v.y = this.y;
v.z = this.z + (this.zr * Math.sin(2.0 * Math.PI * i / cnt));
// Transform to map coord
perspective.transformWorldToMapCoord(v, v2);
if (v2.x < bb.xmin)
bb.xmin = v2.x;
if (v2.y < bb.ymin)
bb.ymin = v2.y;
if (v2.x > bb.xmax)
bb.xmax = v2.x;
if (v2.y > bb.ymax)
bb.ymax = v2.y;
bb.xp[i] = v2.x;
bb.yp[i] = v2.y;
}
// System.out.println("x=" + bb.xmin + " - " + bb.xmax + ", y=" + bb.ymin + " - " + bb.ymax);
bbc.put(perspective.getName(), bb);
bb_cache = bbc;
}
final double tile_x2 = tile_x + tile_dim;
final double tile_y2 = tile_y + tile_dim;
if ((bb.xmin > tile_x2) || (bb.xmax < tile_x) || (bb.ymin > tile_y2) || (bb.ymax < tile_y)) {
// System.out.println("tile: " + tile_x + " / " + tile_y + " - miss");
return false;
}
final int cnt = bb.xp.length;
final double[] px = bb.xp;
final double[] py = bb.yp;
/* Now see if tile square intersects polygon - start with seeing if any point inside */
if (MarkerImpl.testPointInPolygon(tile_x, tile_y, px, py)) {
// If tile corner inside, we intersect
return true;
}
if (MarkerImpl.testPointInPolygon(tile_x2, tile_y, px, py)) {
// If tile corner inside, we intersect
return true;
}
if (MarkerImpl.testPointInPolygon(tile_x, tile_y2, px, py)) {
// If tile corner inside, we intersect
return true;
}
if (MarkerImpl.testPointInPolygon(tile_x2, tile_y2, px, py)) {
// If tile corner inside, we intersect
return true;
}
/* Test if any polygon corners are inside square */
for (int i = 0; i < cnt; i++) {
if ((px[i] >= tile_x) && (px[i] <= tile_x2) && (py[i] >= tile_y) && (py[i] <= tile_y2)) {
// If poly corner inside tile, we intersect
return true;
}
}
// Otherwise, only intersects if at least one edge crosses
// for (int i = 0, j = cnt-1; i < cnt; j = i++) {
// // Test for X=tile_x side
// if ((px[i] < tile_x) && (px[j] >= tile_x) && ()
// }
// System.out.println("tile: " + tile_x + " / " + tile_y + " - hit");
return false;
}
11
View Complete Implementation : OBJExport.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
public clreplaced OBJExport {
// Destination ZIP file
private final File destZipFile;
// Shader to be used for textures
private final HDShader shader;
// World to be rendered
private final DynmapWorld world;
private final DynmapCore core;
private final String basename;
// Minimum world coordinates to be rendered
private int minX, minY, minZ;
// Maximum world coordinates to be rendered
private int maxX, maxY, maxZ;
private static Charset UTF8 = Charset.forName("UTF-8");
// Output stream ZIP for result
private ZipOutputStream zos;
// Origin for exported model
private double originX, originY, originZ;
// Scale for exported model
private double scale = 1.0;
// Center at origin
private boolean centerOrigin = true;
// Default patches for solid block, indexed by BlockStep.ordinal()
private PatchDefinition[] defaultPathces;
// Set of defined material ids for RP
private HashSet<String> matIDs = new HashSet<String>();
private static clreplaced Face {
String groupLine;
String faceLine;
}
private HashMap<String, List<Face>> facesByTexture = new HashMap<String, List<Face>>();
private static final int MODELSCALE = 16;
private static final double BLKSIZE = 1.0 / (double) MODELSCALE;
// Index of group settings
public static final int GROUP_CHUNK = 0;
public static final int GROUP_TEXTURE = 1;
public static final int GROUP_BLOCKID = 2;
public static final int GROUP_BLOCKIDMETA = 3;
public static final int GROUP_COUNT = 4;
private String[] group = new String[GROUP_COUNT];
private boolean[] enabledGroups = new boolean[GROUP_COUNT];
private String groupline = null;
// Vertex set
private IndexedVector3DList vertices;
// UV set
private IndexedVector3DList uvs;
// Scaled models
private HDScaledBlockModels models;
public static final int ROT0 = 0;
public static final int ROT90 = 1;
public static final int ROT180 = 2;
public static final int ROT270 = 3;
public static final int HFLIP = 4;
private static final double[][] pp = { { 0, 0, 0, 1, 0, 0, 0, 0, 1 }, { 0, 1, 1, 1, 1, 1, 0, 1, 0 }, { 1, 0, 0, 0, 0, 0, 1, 1, 0 }, { 0, 0, 1, 1, 0, 1, 0, 1, 1 }, { 0, 0, 0, 0, 0, 1, 0, 1, 0 }, { 1, 0, 1, 1, 0, 0, 1, 1, 1 } };
/**
* Constructor for OBJ file export
* @param dest - destination file (ZIP)
* @param shader - shader to be used for coloring/texturing
* @param world - world to be rendered
* @param core - core object
* @param basename - base file name
*/
public OBJExport(File dest, HDShader shader, DynmapWorld world, DynmapCore core, String basename) {
destZipFile = dest;
this.shader = shader;
this.world = world;
this.core = core;
this.basename = basename;
this.defaultPathces = new PatchDefinition[6];
PatchDefinitionFactory fact = HDBlockModels.getPatchDefinitionFactory();
for (BlockStep s : BlockStep.values()) {
double[] p = pp[s.getFaceEntered()];
int ord = s.ordinal();
defaultPathces[ord] = fact.getPatch(p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7], p[8], 0, 1, 0, 0, 1, 1, SideVisible.TOP, ord);
}
vertices = new IndexedVector3DList(new IndexedVector3DList.ListCallback() {
@Override
public void elementAdded(IndexedVector3DList list, IndexedVector3D newElement) {
try {
/* Minecraft XYZ maps to OBJ YZX */
addStringToExportedFile(String.format(Locale.US, "v %.4f %.4f %.4f\n", (newElement.x - originX) * scale, (newElement.y - originY) * scale, (newElement.z - originZ) * scale));
} catch (IOException iox) {
}
}
});
uvs = new IndexedVector3DList(new IndexedVector3DList.ListCallback() {
@Override
public void elementAdded(IndexedVector3DList list, IndexedVector3D newElement) {
try {
addStringToExportedFile(String.format(Locale.US, "vt %.4f %.4f\n", newElement.x, newElement.y));
} catch (IOException iox) {
}
}
});
// Get models
models = HDBlockModels.getModelsForScale(MODELSCALE);
}
/**
* Set render bounds
*
* @param minx - minimum X coord
* @param miny - minimum Y coord
* @param minz - minimum Z coord
* @param maxx - maximum X coord
* @param maxy - maximum Y coord
* @param maxz - maximum Z coord
*/
public void setRenderBounds(int minx, int miny, int minz, int maxx, int maxy, int maxz) {
if (minx < maxx) {
minX = minx;
maxX = maxx;
} else {
minX = maxx;
maxX = minx;
}
if (miny < maxy) {
minY = miny;
maxY = maxy;
} else {
minY = maxy;
maxY = miny;
}
if (minz < maxz) {
minZ = minz;
maxZ = maxz;
} else {
minZ = maxz;
maxZ = minz;
}
if (minY < 0)
minY = 0;
if (maxY >= world.worldheight)
maxY = world.worldheight - 1;
if (centerOrigin) {
originX = (maxX + minX) / 2.0;
originY = minY;
originZ = (maxZ + minZ) / 2.0;
}
}
/**
* Set origin for exported model
* @param ox - origin x
* @param oy - origin y
* @param oz - origin z
*/
public void setOrigin(double ox, double oy, double oz) {
originX = ox;
originY = oy;
originZ = oz;
centerOrigin = false;
}
/**
* Set scale for exported model
* @param scale = scale
*/
public void setScale(double scale) {
this.scale = scale;
}
/**
* Process export
*
* @param sender - command sender: use for feedback messages
* @return true if successful, false if not
*/
public boolean processExport(DynmapCommandSender sender) {
boolean good = false;
try {
// Open ZIP file destination
zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(destZipFile)));
List<DynmapChunk> requiredChunks = new ArrayList<DynmapChunk>();
int mincx = (minX >> 4);
int maxcx = (maxX + 15) >> 4;
int mincz = (minZ >> 4);
int maxcz = (maxZ + 15) >> 4;
boolean[] edgebits = new boolean[6];
startExportedFile(basename + ".obj");
// Add material library
addStringToExportedFile("mtllib " + basename + ".mtl\n");
// Loop through - do 8x8 chunks at a time (plus 1 border each way)
for (int cx = mincx; cx <= maxcx; cx += 4) {
for (int cz = mincz; cz <= maxcz; cz += 4) {
// Build chunk cache for block of chunks
requiredChunks.clear();
for (int i = -1; i < 5; i++) {
for (int j = -1; j < 5; j++) {
if (((cx + i) <= maxcx) && ((cz + j) <= maxcz) && ((cx + i) >= mincx) && ((cz + j) >= mincz)) {
requiredChunks.add(new DynmapChunk(cx + i, cz + j));
}
}
}
// Get the chunk buffer
MapChunkCache cache = core.getServer().createMapChunkCache(world, requiredChunks, true, false, true, false);
if (cache == null) {
throw new IOException("Error loading chunk cache");
}
MapIterator iter = cache.gereplacederator(minX, minY, minZ);
for (int x = cx * 16; (x < (cx * 16 + 64)) && (x <= maxX); x++) {
if (x < minX)
x = minX;
edgebits[BlockStep.X_PLUS.ordinal()] = (x == minX);
edgebits[BlockStep.X_MINUS.ordinal()] = (x == maxX);
for (int z = cz * 16; (z < (cz * 16 + 64)) && (z <= maxZ); z++) {
if (z < minZ)
z = minZ;
edgebits[BlockStep.Z_PLUS.ordinal()] = (z == minZ);
edgebits[BlockStep.Z_MINUS.ordinal()] = (z == maxZ);
iter.initialize(x, minY, z);
updateGroup(GROUP_CHUNK, "chunk" + (x >> 4) + "_" + (z >> 4));
// Do first (bottom)
edgebits[BlockStep.Y_MINUS.ordinal()] = true;
edgebits[BlockStep.Y_PLUS.ordinal()] = false;
DynmapBlockState blk = iter.getBlockType();
if (blk.isNotAir()) {
// Not air
handleBlock(blk, iter, edgebits);
}
// Do middle
edgebits[BlockStep.Y_MINUS.ordinal()] = false;
for (int y = minY + 1; y < maxY; y++) {
iter.setY(y);
blk = iter.getBlockType();
if (blk.isNotAir()) {
// Not air
handleBlock(blk, iter, edgebits);
}
}
// Do top
edgebits[BlockStep.Y_PLUS.ordinal()] = true;
iter.setY(maxY);
blk = iter.getBlockType();
if (blk.isNotAir()) {
// Not air
handleBlock(blk, iter, edgebits);
}
}
}
// Output faces by texture
String grp = "";
for (String material : facesByTexture.keySet()) {
List<Face> faces = facesByTexture.get(material);
// Record material use
matIDs.add(material);
addStringToExportedFile(String.format("usemtl %s\n", material));
for (Face face : faces) {
if ((face.groupLine != null) && (!face.groupLine.equals(grp))) {
grp = face.groupLine;
addStringToExportedFile(grp);
}
addStringToExportedFile(face.faceLine);
}
}
// Clear face table
facesByTexture.clear();
// Clean up vertices we've moved past
vertices.resetSet(minX, minY, minZ, cx * 16 + 64, maxY, cz * 16 + 64);
}
}
finishExportedFile();
// If shader provided, add shader content to ZIP
if (shader != null) {
sender.sendMessage("Adding textures from shader " + shader.getName());
shader.exportAsMaterialLibrary(sender, this);
sender.sendMessage("Texture export completed");
}
// And close the ZIP
zos.finish();
zos.close();
zos = null;
good = true;
sender.sendMessage("Export completed - " + destZipFile.getPath());
} catch (IOException iox) {
sender.sendMessage("Export failed: " + iox.getMessage());
} finally {
if (zos != null) {
try {
zos.close();
} catch (IOException e) {
}
zos = null;
destZipFile.delete();
}
}
return good;
}
/**
* Start adding file to export
* @param fname - path/name of file in destination zip
* @throws IOException if error starting file
*/
public void startExportedFile(String fname) throws IOException {
ZipEntry ze = new ZipEntry(fname);
zos.putNextEntry(ze);
}
/**
* Add bytes to current exported file
* @param buf - buffer with bytes
* @param off - offset of start
* @param len - length to be added
* @throws IOException if error adding to file
*/
public void addBytesToExportedFile(byte[] buf, int off, int len) throws IOException {
zos.write(buf, off, len);
}
/**
* Add string to curent exported file (UTF-8)
* @param str - string to be written
* @throws IOException if error adding to file
*/
public void addStringToExportedFile(String str) throws IOException {
byte[] b = str.getBytes(UTF8);
zos.write(b, 0, b.length);
}
/**
* Finish adding file to export
* @throws IOException if error completing file
*/
public void finishExportedFile() throws IOException {
zos.closeEntry();
}
/**
* Handle block at current iterator coord
* @param id - block ID
* @param iter - iterator
* @param edgebits - bit N corresponds to side N being an endge (forge render)
*/
private void handleBlock(DynmapBlockState blk, MapIterator map, boolean[] edgebits) throws IOException {
BlockStep[] steps = BlockStep.values();
int[] txtidx = null;
// See if the block has a patch model
RenderPatch[] patches = models.getPatchModel(blk);
/* If no patches, see if custom model */
if (patches == null) {
CustomBlockModel cbm = models.getCustomBlockModel(blk);
if (cbm != null) {
/* If so, get our meshes */
patches = cbm.getMeshForBlock(map);
}
}
if (patches != null) {
steps = new BlockStep[patches.length];
txtidx = new int[patches.length];
for (int i = 0; i < txtidx.length; i++) {
txtidx[i] = ((PatchDefinition) patches[i]).getTextureIndex();
steps[i] = ((PatchDefinition) patches[i]).step;
}
} else {
// See if volumetric
short[] smod = models.getScaledModel(blk);
if (smod != null) {
patches = getScaledModelAsPatches(smod);
steps = new BlockStep[patches.length];
txtidx = new int[patches.length];
for (int i = 0; i < patches.length; i++) {
PatchDefinition pd = (PatchDefinition) patches[i];
steps[i] = pd.step;
txtidx[i] = pd.getTextureIndex();
}
}
}
// Set block ID and ID+meta groups
updateGroup(GROUP_BLOCKID, "blk" + blk.baseState.globalStateIndex);
updateGroup(GROUP_BLOCKIDMETA, "blk" + blk.globalStateIndex);
// Get materials for patches
String[] mats = shader.getCurrentBlockMaterials(blk, map, txtidx, steps);
if (patches != null) {
// Patch based model?
for (int i = 0; i < patches.length; i++) {
addPatch((PatchDefinition) patches[i], map.getX(), map.getY(), map.getZ(), mats[i]);
}
} else {
boolean opaque = HDBlockStateTextureMap.getTransparency(blk) == BlockTransparency.OPAQUE;
for (int face = 0; face < 6; face++) {
// Get block in direction
DynmapBlockState blk2 = map.getBlockTypeAt(BlockStep.oppositeValues[face]);
// If we're not solid, or adjacent block is not solid, draw side
if ((!opaque) || blk2.isAir() || edgebits[face] || (HDBlockStateTextureMap.getTransparency(blk2) != BlockTransparency.OPAQUE)) {
addPatch(defaultPathces[face], map.getX(), map.getY(), map.getZ(), mats[face]);
}
}
}
}
private int[] getTextureUVs(PatchDefinition pd, int rot) {
int[] uv = new int[4];
if (rot == ROT0) {
uv[0] = uvs.getVectorIndex(pd.umin, pd.vmin, 0);
uv[1] = uvs.getVectorIndex(pd.umax, pd.vmin, 0);
uv[2] = uvs.getVectorIndex(pd.umax, pd.vmax, 0);
uv[3] = uvs.getVectorIndex(pd.umin, pd.vmax, 0);
} else if (rot == ROT90) {
// 90 degrees on texture
uv[0] = uvs.getVectorIndex(1.0 - pd.vmin, pd.umin, 0);
uv[1] = uvs.getVectorIndex(1.0 - pd.vmin, pd.umax, 0);
uv[2] = uvs.getVectorIndex(1.0 - pd.vmax, pd.umax, 0);
uv[3] = uvs.getVectorIndex(1.0 - pd.vmax, pd.umin, 0);
} else if (rot == ROT180) {
// 180 degrees on texture
uv[0] = uvs.getVectorIndex(1.0 - pd.umin, 1.0 - pd.vmin, 0);
uv[1] = uvs.getVectorIndex(1.0 - pd.umax, 1.0 - pd.vmin, 0);
uv[2] = uvs.getVectorIndex(1.0 - pd.umax, 1.0 - pd.vmax, 0);
uv[3] = uvs.getVectorIndex(1.0 - pd.umin, 1.0 - pd.vmax, 0);
} else if (rot == ROT270) {
// 270 degrees on texture
uv[0] = uvs.getVectorIndex(pd.vmin, 1.0 - pd.umin, 0);
uv[1] = uvs.getVectorIndex(pd.vmin, 1.0 - pd.umax, 0);
uv[2] = uvs.getVectorIndex(pd.vmax, 1.0 - pd.umax, 0);
uv[3] = uvs.getVectorIndex(pd.vmax, 1.0 - pd.umin, 0);
} else if (rot == HFLIP) {
uv[0] = uvs.getVectorIndex(1.0 - pd.umin, pd.vmin, 0);
uv[1] = uvs.getVectorIndex(1.0 - pd.umax, pd.vmin, 0);
uv[2] = uvs.getVectorIndex(1.0 - pd.umax, pd.vmax, 0);
uv[3] = uvs.getVectorIndex(1.0 - pd.umin, pd.vmax, 0);
} else {
uv[0] = uvs.getVectorIndex(pd.umin, pd.vmin, 0);
uv[1] = uvs.getVectorIndex(pd.umax, pd.vmin, 0);
uv[2] = uvs.getVectorIndex(pd.umax, pd.vmax, 0);
uv[3] = uvs.getVectorIndex(pd.umin, pd.vmax, 0);
}
return uv;
}
/**
* Add patch as face to output
*/
private void addPatch(PatchDefinition pd, double x, double y, double z, String material) throws IOException {
// No material? No face
if (material == null) {
return;
}
int rot = 0;
// Check for rotation modifier
int rotidx = material.indexOf('@');
if (rotidx >= 0) {
// 0-3
rot = material.charAt(rotidx + 1) - '0';
material = material.substring(0, rotidx);
}
int[] v = new int[4];
int[] uv = getTextureUVs(pd, rot);
// Get offsets for U and V from origin
double ux = pd.xu - pd.x0;
double uy = pd.yu - pd.y0;
double uz = pd.zu - pd.z0;
double vx = pd.xv - pd.x0;
double vy = pd.yv - pd.y0;
double vz = pd.zv - pd.z0;
// Offset to origin corner
x = x + pd.x0;
y = y + pd.y0;
z = z + pd.z0;
// Origin corner, offset by umin, vmin
v[0] = vertices.getVectorIndex(x + ux * pd.umin + vx * pd.vmin, y + uy * pd.umin + vy * pd.vmin, z + uz * pd.umin + vz * pd.vmin);
uv[0] = uvs.getVectorIndex(pd.umin, pd.vmin, 0);
// Second is end of U (umax, vmin)
v[1] = vertices.getVectorIndex(x + ux * pd.umax + vx * pd.vmin, y + uy * pd.umax + vy * pd.vmin, z + uz * pd.umax + vz * pd.vmin);
uv[1] = uvs.getVectorIndex(pd.umax, pd.vmin, 0);
// Third is end of U+V (umax, vmax)
v[2] = vertices.getVectorIndex(x + ux * pd.umax + vx * pd.vmax, y + uy * pd.umax + vy * pd.vmax, z + uz * pd.umax + vz * pd.vmax);
uv[2] = uvs.getVectorIndex(pd.umax, pd.vmax, 0);
// Forth is end of V (umin, vmax)
v[3] = vertices.getVectorIndex(x + ux * pd.umin + vx * pd.vmax, y + uy * pd.umin + vy * pd.vmax, z + uz * pd.umin + vz * pd.vmax);
uv[3] = uvs.getVectorIndex(pd.umin, pd.vmax, 0);
// Add patch to file
addPatchToFile(v, uv, pd.sidevis, material, rot);
}
private void addPatchToFile(int[] v, int[] uv, SideVisible sv, String material, int rot) throws IOException {
List<Face> faces = facesByTexture.get(material);
if (faces == null) {
faces = new ArrayList<Face>();
facesByTexture.put(material, faces);
}
// If needed, rotate the UV sequence
if (rot == HFLIP) {
// Flip horizonntal
int[] newuv = new int[uv.length];
for (int i = 0; i < uv.length; i++) {
newuv[i] = uv[i ^ 1];
}
uv = newuv;
} else if (rot != ROT0) {
int[] newuv = new int[uv.length];
for (int i = 0; i < uv.length; i++) {
newuv[i] = uv[(i + 4 - rot) % uv.length];
}
uv = newuv;
}
Face f = new Face();
f.groupLine = updateGroup(GROUP_TEXTURE, material);
switch(sv) {
case TOP:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[0], uv[0], v[1], uv[1], v[2], uv[2], v[3], uv[3]);
break;
case BOTTOM:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[3], uv[3], v[2], uv[2], v[1], uv[1], v[0], uv[0]);
break;
case BOTH:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[0], uv[0], v[1], uv[1], v[2], uv[2], v[3], uv[3]);
f.faceLine += String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[3], uv[3], v[2], uv[2], v[1], uv[1], v[0], uv[0]);
break;
case FLIP:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[0], uv[0], v[1], uv[1], v[2], uv[2], v[3], uv[3]);
f.faceLine += String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[3], uv[2], v[2], uv[3], v[1], uv[0], v[0], uv[1]);
break;
}
faces.add(f);
}
public Set<String> getMaterialIDs() {
return matIDs;
}
private static final boolean getSubblock(short[] mod, int x, int y, int z) {
if ((x >= 0) && (x < MODELSCALE) && (y >= 0) && (y < MODELSCALE) && (z >= 0) && (z < MODELSCALE)) {
return mod[MODELSCALE * MODELSCALE * y + MODELSCALE * z + x] != 0;
}
return false;
}
// Scan along X axis
private int scanX(short[] tmod, int x, int y, int z) {
int xlen = 0;
while (getSubblock(tmod, x + xlen, y, z)) {
xlen++;
}
return xlen;
}
// Scan along Z axis for rows matching given x length
private int scanZ(short[] tmod, int x, int y, int z, int xlen) {
int zlen = 0;
while (scanX(tmod, x, y, z + zlen) >= xlen) {
zlen++;
}
return zlen;
}
// Scan along Y axis for layers matching given X and Z lengths
private int scanY(short[] tmod, int x, int y, int z, int xlen, int zlen) {
int ylen = 0;
while (scanZ(tmod, x, y + ylen, z, xlen) >= zlen) {
ylen++;
}
return ylen;
}
private void addSubblock(short[] tmod, int x, int y, int z, List<RenderPatch> list) {
// Find dimensions of cuboid
int xlen = scanX(tmod, x, y, z);
int zlen = scanZ(tmod, x, y, z, xlen);
int ylen = scanY(tmod, x, y, z, xlen, zlen);
// Add equivalent of boxblock
CustomRenderer.addBox(HDBlockModels.getPatchDefinitionFactory(), list, BLKSIZE * x, BLKSIZE * (x + xlen), BLKSIZE * y, BLKSIZE * (y + ylen), BLKSIZE * z, BLKSIZE * (z + zlen), HDBlockModels.boxPatchList);
// And remove blocks from model (since we have them covered)
for (int xx = 0; xx < xlen; xx++) {
for (int yy = 0; yy < ylen; yy++) {
for (int zz = 0; zz < zlen; zz++) {
tmod[MODELSCALE * MODELSCALE * (y + yy) + MODELSCALE * (z + zz) + (x + xx)] = 0;
}
}
}
}
private PatchDefinition[] getScaledModelAsPatches(short[] mod) {
ArrayList<RenderPatch> list = new ArrayList<RenderPatch>();
// Make copy
short[] tmod = Arrays.copyOf(mod, mod.length);
for (int y = 0; y < MODELSCALE; y++) {
for (int z = 0; z < MODELSCALE; z++) {
for (int x = 0; x < MODELSCALE; x++) {
if (getSubblock(tmod, x, y, z)) {
// If occupied, try to add to list
addSubblock(tmod, x, y, z, list);
}
}
}
}
PatchDefinition[] pd = new PatchDefinition[list.size()];
for (int i = 0; i < pd.length; i++) {
pd[i] = (PatchDefinition) list.get(i);
}
return pd;
}
private String updateGroup(int grpIndex, String newgroup) {
if (enabledGroups[grpIndex]) {
if (!newgroup.equals(group[grpIndex])) {
group[grpIndex] = newgroup;
String newline = "g";
for (int i = 0; i < GROUP_COUNT; i++) {
if (enabledGroups[i]) {
newline += " " + group[i];
}
}
newline += "\n";
groupline = newline;
}
}
return groupline;
}
public boolean getGroupEnabled(int grpIndex) {
if (grpIndex < enabledGroups.length) {
return enabledGroups[grpIndex];
} else {
return false;
}
}
public void setGroupEnabled(int grpIndex, boolean set) {
if (grpIndex < enabledGroups.length) {
enabledGroups[grpIndex] = set;
}
}
public String getBaseName() {
return basename;
}
}
11
View Complete Implementation : OBJExport.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
public clreplaced OBJExport {
// Destination ZIP file
private final File destZipFile;
// Shader to be used for textures
private final HDShader shader;
// World to be rendered
private final DynmapWorld world;
private final DynmapCore core;
private final String basename;
// Minimum world coordinates to be rendered
private int minX, minY, minZ;
// Maximum world coordinates to be rendered
private int maxX, maxY, maxZ;
private static Charset UTF8 = Charset.forName("UTF-8");
// Output stream ZIP for result
private ZipOutputStream zos;
// Origin for exported model
private double originX, originY, originZ;
// Scale for exported model
private double scale = 1.0;
// Center at origin
private boolean centerOrigin = true;
// Default patches for solid block, indexed by BlockStep.ordinal()
private PatchDefinition[] defaultPathces;
// Set of defined material ids for RP
private HashSet<String> matIDs = new HashSet<String>();
private static clreplaced Face {
String groupLine;
String faceLine;
}
private HashMap<String, List<Face>> facesByTexture = new HashMap<String, List<Face>>();
private static final int MODELSCALE = 16;
private static final double BLKSIZE = 1.0 / (double) MODELSCALE;
// Index of group settings
public static final int GROUP_CHUNK = 0;
public static final int GROUP_TEXTURE = 1;
public static final int GROUP_BLOCKID = 2;
public static final int GROUP_BLOCKIDMETA = 3;
public static final int GROUP_COUNT = 4;
private String[] group = new String[GROUP_COUNT];
private boolean[] enabledGroups = new boolean[GROUP_COUNT];
private String groupline = null;
// Vertex set
private IndexedVector3DList vertices;
// UV set
private IndexedVector3DList uvs;
// Scaled models
private HDScaledBlockModels models;
public static final int ROT0 = 0;
public static final int ROT90 = 1;
public static final int ROT180 = 2;
public static final int ROT270 = 3;
public static final int HFLIP = 4;
private static final double[][] pp = { { 0, 0, 0, 1, 0, 0, 0, 0, 1 }, { 0, 1, 1, 1, 1, 1, 0, 1, 0 }, { 1, 0, 0, 0, 0, 0, 1, 1, 0 }, { 0, 0, 1, 1, 0, 1, 0, 1, 1 }, { 0, 0, 0, 0, 0, 1, 0, 1, 0 }, { 1, 0, 1, 1, 0, 0, 1, 1, 1 } };
/**
* Constructor for OBJ file export
* @param dest - destination file (ZIP)
* @param shader - shader to be used for coloring/texturing
* @param world - world to be rendered
* @param core - core object
* @param basename - base file name
*/
public OBJExport(File dest, HDShader shader, DynmapWorld world, DynmapCore core, String basename) {
destZipFile = dest;
this.shader = shader;
this.world = world;
this.core = core;
this.basename = basename;
this.defaultPathces = new PatchDefinition[6];
PatchDefinitionFactory fact = HDBlockModels.getPatchDefinitionFactory();
for (BlockStep s : BlockStep.values()) {
double[] p = pp[s.getFaceEntered()];
int ord = s.ordinal();
defaultPathces[ord] = fact.getPatch(p[0], p[1], p[2], p[3], p[4], p[5], p[6], p[7], p[8], 0, 1, 0, 1, 100, SideVisible.TOP, ord);
}
vertices = new IndexedVector3DList(new IndexedVector3DList.ListCallback() {
@Override
public void elementAdded(IndexedVector3DList list, IndexedVector3D newElement) {
try {
/* Minecraft XYZ maps to OBJ YZX */
addStringToExportedFile(String.format(Locale.US, "v %.4f %.4f %.4f\n", (newElement.x - originX) * scale, (newElement.y - originY) * scale, (newElement.z - originZ) * scale));
} catch (IOException iox) {
}
}
});
uvs = new IndexedVector3DList(new IndexedVector3DList.ListCallback() {
@Override
public void elementAdded(IndexedVector3DList list, IndexedVector3D newElement) {
try {
addStringToExportedFile(String.format(Locale.US, "vt %.4f %.4f\n", newElement.x, newElement.y));
} catch (IOException iox) {
}
}
});
// Get models
models = HDBlockModels.getModelsForScale(MODELSCALE);
}
/**
* Set render bounds
*
* @param minx - minimum X coord
* @param miny - minimum Y coord
* @param minz - minimum Z coord
* @param maxx - maximum X coord
* @param maxy - maximum Y coord
* @param maxz - maximum Z coord
*/
public void setRenderBounds(int minx, int miny, int minz, int maxx, int maxy, int maxz) {
if (minx < maxx) {
minX = minx;
maxX = maxx;
} else {
minX = maxx;
maxX = minx;
}
if (miny < maxy) {
minY = miny;
maxY = maxy;
} else {
minY = maxy;
maxY = miny;
}
if (minz < maxz) {
minZ = minz;
maxZ = maxz;
} else {
minZ = maxz;
maxZ = minz;
}
if (minY < 0)
minY = 0;
if (maxY >= world.worldheight)
maxY = world.worldheight - 1;
if (centerOrigin) {
originX = (maxX + minX) / 2.0;
originY = minY;
originZ = (maxZ + minZ) / 2.0;
}
}
/**
* Set origin for exported model
* @param ox - origin x
* @param oy - origin y
* @param oz - origin z
*/
public void setOrigin(double ox, double oy, double oz) {
originX = ox;
originY = oy;
originZ = oz;
centerOrigin = false;
}
/**
* Set scale for exported model
* @param scale = scale
*/
public void setScale(double scale) {
this.scale = scale;
}
/**
* Process export
*
* @param sender - command sender: use for feedback messages
* @return true if successful, false if not
*/
public boolean processExport(DynmapCommandSender sender) {
boolean good = false;
try {
// Open ZIP file destination
zos = new ZipOutputStream(new BufferedOutputStream(new FileOutputStream(destZipFile)));
List<DynmapChunk> requiredChunks = new ArrayList<DynmapChunk>();
int mincx = (minX >> 4);
int maxcx = (maxX + 15) >> 4;
int mincz = (minZ >> 4);
int maxcz = (maxZ + 15) >> 4;
boolean[] edgebits = new boolean[6];
startExportedFile(basename + ".obj");
// Add material library
addStringToExportedFile("mtllib " + basename + ".mtl\n");
// Loop through - do 8x8 chunks at a time (plus 1 border each way)
for (int cx = mincx; cx <= maxcx; cx += 4) {
for (int cz = mincz; cz <= maxcz; cz += 4) {
// Build chunk cache for block of chunks
requiredChunks.clear();
for (int i = -1; i < 5; i++) {
for (int j = -1; j < 5; j++) {
if (((cx + i) <= maxcx) && ((cz + j) <= maxcz) && ((cx + i) >= mincx) && ((cz + j) >= mincz)) {
requiredChunks.add(new DynmapChunk(cx + i, cz + j));
}
}
}
// Get the chunk buffer
MapChunkCache cache = core.getServer().createMapChunkCache(world, requiredChunks, true, false, true, false);
if (cache == null) {
throw new IOException("Error loading chunk cache");
}
MapIterator iter = cache.gereplacederator(minX, minY, minZ);
for (int x = cx * 16; (x < (cx * 16 + 64)) && (x <= maxX); x++) {
if (x < minX)
x = minX;
edgebits[BlockStep.X_PLUS.ordinal()] = (x == minX);
edgebits[BlockStep.X_MINUS.ordinal()] = (x == maxX);
for (int z = cz * 16; (z < (cz * 16 + 64)) && (z <= maxZ); z++) {
if (z < minZ)
z = minZ;
edgebits[BlockStep.Z_PLUS.ordinal()] = (z == minZ);
edgebits[BlockStep.Z_MINUS.ordinal()] = (z == maxZ);
iter.initialize(x, minY, z);
updateGroup(GROUP_CHUNK, "chunk" + (x >> 4) + "_" + (z >> 4));
// Do first (bottom)
edgebits[BlockStep.Y_MINUS.ordinal()] = true;
edgebits[BlockStep.Y_PLUS.ordinal()] = false;
int id = iter.getBlockTypeID();
if (id > 0) {
// Not air
handleBlock(id, iter, edgebits);
}
// Do middle
edgebits[BlockStep.Y_MINUS.ordinal()] = false;
for (int y = minY + 1; y < maxY; y++) {
iter.setY(y);
id = iter.getBlockTypeID();
if (id > 0) {
// Not air
handleBlock(id, iter, edgebits);
}
}
// Do top
edgebits[BlockStep.Y_PLUS.ordinal()] = true;
iter.setY(maxY);
id = iter.getBlockTypeID();
if (id > 0) {
// Not air
handleBlock(id, iter, edgebits);
}
}
}
// Output faces by texture
String grp = "";
for (String material : facesByTexture.keySet()) {
List<Face> faces = facesByTexture.get(material);
// Record material use
matIDs.add(material);
addStringToExportedFile(String.format("usemtl %s\n", material));
for (Face face : faces) {
if ((face.groupLine != null) && (!face.groupLine.equals(grp))) {
grp = face.groupLine;
addStringToExportedFile(grp);
}
addStringToExportedFile(face.faceLine);
}
}
// Clear face table
facesByTexture.clear();
// Clean up vertices we've moved past
vertices.resetSet(minX, minY, minZ, cx * 16 + 64, maxY, cz * 16 + 64);
}
}
finishExportedFile();
// If shader provided, add shader content to ZIP
if (shader != null) {
sender.sendMessage("Adding textures from shader " + shader.getName());
shader.exportAsMaterialLibrary(sender, this);
sender.sendMessage("Texture export completed");
}
// And close the ZIP
zos.finish();
zos.close();
zos = null;
good = true;
sender.sendMessage("Export completed - " + destZipFile.getPath());
} catch (IOException iox) {
sender.sendMessage("Export failed: " + iox.getMessage());
} finally {
if (zos != null) {
try {
zos.close();
} catch (IOException e) {
}
zos = null;
destZipFile.delete();
}
}
return good;
}
/**
* Start adding file to export
* @param fname - path/name of file in destination zip
* @throws IOException if error starting file
*/
public void startExportedFile(String fname) throws IOException {
ZipEntry ze = new ZipEntry(fname);
zos.putNextEntry(ze);
}
/**
* Add bytes to current exported file
* @param buf - buffer with bytes
* @param off - offset of start
* @param len - length to be added
* @throws IOException if error adding to file
*/
public void addBytesToExportedFile(byte[] buf, int off, int len) throws IOException {
zos.write(buf, off, len);
}
/**
* Add string to curent exported file (UTF-8)
* @param str - string to be written
* @throws IOException if error adding to file
*/
public void addStringToExportedFile(String str) throws IOException {
byte[] b = str.getBytes(UTF8);
zos.write(b, 0, b.length);
}
/**
* Finish adding file to export
* @throws IOException if error completing file
*/
public void finishExportedFile() throws IOException {
zos.closeEntry();
}
/**
* Handle block at current iterator coord
* @param id - block ID
* @param iter - iterator
* @param edgebits - bit N corresponds to side N being an endge (forge render)
*/
private void handleBlock(int blkid, MapIterator map, boolean[] edgebits) throws IOException {
BlockStep[] steps = BlockStep.values();
int[] txtidx = null;
int data = map.getBlockData();
// Get render data, if needed
int renderdata = HDBlockModels.getBlockRenderData(blkid, map);
// See if the block has a patch model
RenderPatch[] patches = models.getPatchModel(blkid, data, renderdata);
/* If no patches, see if custom model */
if (patches == null) {
CustomBlockModel cbm = models.getCustomBlockModel(blkid, data);
if (cbm != null) {
/* If so, get our meshes */
patches = cbm.getMeshForBlock(map);
}
}
if (patches != null) {
steps = new BlockStep[patches.length];
txtidx = new int[patches.length];
for (int i = 0; i < txtidx.length; i++) {
txtidx[i] = ((PatchDefinition) patches[i]).getTextureIndex();
steps[i] = ((PatchDefinition) patches[i]).step;
}
} else {
// See if volumetric
short[] smod = models.getScaledModel(blkid, data, renderdata);
if (smod != null) {
patches = getScaledModelAsPatches(smod);
steps = new BlockStep[patches.length];
txtidx = new int[patches.length];
for (int i = 0; i < patches.length; i++) {
PatchDefinition pd = (PatchDefinition) patches[i];
steps[i] = pd.step;
txtidx[i] = pd.getTextureIndex();
}
}
}
// Set block ID and ID+meta groups
updateGroup(GROUP_BLOCKID, "blk" + blkid);
updateGroup(GROUP_BLOCKIDMETA, "blk" + blkid + "_" + data);
// Get materials for patches
String[] mats = shader.getCurrentBlockMaterials(blkid, data, renderdata, map, txtidx, steps);
if (patches != null) {
// Patch based model?
for (int i = 0; i < patches.length; i++) {
addPatch((PatchDefinition) patches[i], map.getX(), map.getY(), map.getZ(), mats[i]);
}
} else {
boolean opaque = TexturePack.HDTextureMap.getTransparency(blkid) == BlockTransparency.OPAQUE;
for (int face = 0; face < 6; face++) {
// Get block in direction
int id2 = map.getBlockTypeIDAt(BlockStep.oppositeValues[face]);
// If we're not solid, or adjacent block is not solid, draw side
if ((!opaque) || (id2 == 0) || edgebits[face] || (TexturePack.HDTextureMap.getTransparency(id2) != BlockTransparency.OPAQUE)) {
addPatch(defaultPathces[face], map.getX(), map.getY(), map.getZ(), mats[face]);
}
}
}
}
private int[] getTextureUVs(PatchDefinition pd, int rot) {
int[] uv = new int[4];
if (rot == ROT0) {
uv[0] = uvs.getVectorIndex(pd.umin, pd.vmin, 0);
uv[1] = uvs.getVectorIndex(pd.umax, pd.vmin, 0);
uv[2] = uvs.getVectorIndex(pd.umax, pd.vmax, 0);
uv[3] = uvs.getVectorIndex(pd.umin, pd.vmax, 0);
} else if (rot == ROT90) {
// 90 degrees on texture
uv[0] = uvs.getVectorIndex(1.0 - pd.vmin, pd.umin, 0);
uv[1] = uvs.getVectorIndex(1.0 - pd.vmin, pd.umax, 0);
uv[2] = uvs.getVectorIndex(1.0 - pd.vmax, pd.umax, 0);
uv[3] = uvs.getVectorIndex(1.0 - pd.vmax, pd.umin, 0);
} else if (rot == ROT180) {
// 180 degrees on texture
uv[0] = uvs.getVectorIndex(1.0 - pd.umin, 1.0 - pd.vmin, 0);
uv[1] = uvs.getVectorIndex(1.0 - pd.umax, 1.0 - pd.vmin, 0);
uv[2] = uvs.getVectorIndex(1.0 - pd.umax, 1.0 - pd.vmax, 0);
uv[3] = uvs.getVectorIndex(1.0 - pd.umin, 1.0 - pd.vmax, 0);
} else if (rot == ROT270) {
// 270 degrees on texture
uv[0] = uvs.getVectorIndex(pd.vmin, 1.0 - pd.umin, 0);
uv[1] = uvs.getVectorIndex(pd.vmin, 1.0 - pd.umax, 0);
uv[2] = uvs.getVectorIndex(pd.vmax, 1.0 - pd.umax, 0);
uv[3] = uvs.getVectorIndex(pd.vmax, 1.0 - pd.umin, 0);
} else if (rot == HFLIP) {
uv[0] = uvs.getVectorIndex(1.0 - pd.umin, pd.vmin, 0);
uv[1] = uvs.getVectorIndex(1.0 - pd.umax, pd.vmin, 0);
uv[2] = uvs.getVectorIndex(1.0 - pd.umax, pd.vmax, 0);
uv[3] = uvs.getVectorIndex(1.0 - pd.umin, pd.vmax, 0);
} else {
uv[0] = uvs.getVectorIndex(pd.umin, pd.vmin, 0);
uv[1] = uvs.getVectorIndex(pd.umax, pd.vmin, 0);
uv[2] = uvs.getVectorIndex(pd.umax, pd.vmax, 0);
uv[3] = uvs.getVectorIndex(pd.umin, pd.vmax, 0);
}
return uv;
}
/**
* Add patch as face to output
*/
private void addPatch(PatchDefinition pd, double x, double y, double z, String material) throws IOException {
// No material? No face
if (material == null) {
return;
}
int rot = 0;
// Check for rotation modifier
int rotidx = material.indexOf('@');
if (rotidx >= 0) {
// 0-3
rot = material.charAt(rotidx + 1) - '0';
material = material.substring(0, rotidx);
}
int[] v = new int[4];
int[] uv = getTextureUVs(pd, rot);
// Get offsets for U and V from origin
double ux = pd.xu - pd.x0;
double uy = pd.yu - pd.y0;
double uz = pd.zu - pd.z0;
double vx = pd.xv - pd.x0;
double vy = pd.yv - pd.y0;
double vz = pd.zv - pd.z0;
// Offset to origin corner
x = x + pd.x0;
y = y + pd.y0;
z = z + pd.z0;
// Origin corner, offset by umin, vmin
v[0] = vertices.getVectorIndex(x + ux * pd.umin + vx * pd.vmin, y + uy * pd.umin + vy * pd.vmin, z + uz * pd.umin + vz * pd.vmin);
uv[0] = uvs.getVectorIndex(pd.umin, pd.vmin, 0);
// Second is end of U (umax, vmin)
v[1] = vertices.getVectorIndex(x + ux * pd.umax + vx * pd.vmin, y + uy * pd.umax + vy * pd.vmin, z + uz * pd.umax + vz * pd.vmin);
uv[1] = uvs.getVectorIndex(pd.umax, pd.vmin, 0);
// Third is end of U+V (umax, vmax)
v[2] = vertices.getVectorIndex(x + ux * pd.umax + vx * pd.vmax, y + uy * pd.umax + vy * pd.vmax, z + uz * pd.umax + vz * pd.vmax);
uv[2] = uvs.getVectorIndex(pd.umax, pd.vmax, 0);
// Forth is end of V (umin, vmax)
v[3] = vertices.getVectorIndex(x + ux * pd.umin + vx * pd.vmax, y + uy * pd.umin + vy * pd.vmax, z + uz * pd.umin + vz * pd.vmax);
uv[3] = uvs.getVectorIndex(pd.umin, pd.vmax, 0);
// Add patch to file
addPatchToFile(v, uv, pd.sidevis, material, rot);
}
private void addPatchToFile(int[] v, int[] uv, SideVisible sv, String material, int rot) throws IOException {
List<Face> faces = facesByTexture.get(material);
if (faces == null) {
faces = new ArrayList<Face>();
facesByTexture.put(material, faces);
}
// If needed, rotate the UV sequence
if (rot == HFLIP) {
// Flip horizonntal
int[] newuv = new int[uv.length];
for (int i = 0; i < uv.length; i++) {
newuv[i] = uv[i ^ 1];
}
uv = newuv;
} else if (rot != ROT0) {
int[] newuv = new int[uv.length];
for (int i = 0; i < uv.length; i++) {
newuv[i] = uv[(i + 4 - rot) % uv.length];
}
uv = newuv;
}
Face f = new Face();
f.groupLine = updateGroup(GROUP_TEXTURE, material);
switch(sv) {
case TOP:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[0], uv[0], v[1], uv[1], v[2], uv[2], v[3], uv[3]);
break;
case BOTTOM:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[3], uv[3], v[2], uv[2], v[1], uv[1], v[0], uv[0]);
break;
case BOTH:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[0], uv[0], v[1], uv[1], v[2], uv[2], v[3], uv[3]);
f.faceLine += String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[3], uv[3], v[2], uv[2], v[1], uv[1], v[0], uv[0]);
break;
case FLIP:
f.faceLine = String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[0], uv[0], v[1], uv[1], v[2], uv[2], v[3], uv[3]);
f.faceLine += String.format("f %d/%d %d/%d %d/%d %d/%d\n", v[3], uv[2], v[2], uv[3], v[1], uv[0], v[0], uv[1]);
break;
}
faces.add(f);
}
public Set<String> getMaterialIDs() {
return matIDs;
}
private static final boolean getSubblock(short[] mod, int x, int y, int z) {
if ((x >= 0) && (x < MODELSCALE) && (y >= 0) && (y < MODELSCALE) && (z >= 0) && (z < MODELSCALE)) {
return mod[MODELSCALE * MODELSCALE * y + MODELSCALE * z + x] != 0;
}
return false;
}
// Scan along X axis
private int scanX(short[] tmod, int x, int y, int z) {
int xlen = 0;
while (getSubblock(tmod, x + xlen, y, z)) {
xlen++;
}
return xlen;
}
// Scan along Z axis for rows matching given x length
private int scanZ(short[] tmod, int x, int y, int z, int xlen) {
int zlen = 0;
while (scanX(tmod, x, y, z + zlen) >= xlen) {
zlen++;
}
return zlen;
}
// Scan along Y axis for layers matching given X and Z lengths
private int scanY(short[] tmod, int x, int y, int z, int xlen, int zlen) {
int ylen = 0;
while (scanZ(tmod, x, y + ylen, z, xlen) >= zlen) {
ylen++;
}
return ylen;
}
private void addSubblock(short[] tmod, int x, int y, int z, List<RenderPatch> list) {
// Find dimensions of cuboid
int xlen = scanX(tmod, x, y, z);
int zlen = scanZ(tmod, x, y, z, xlen);
int ylen = scanY(tmod, x, y, z, xlen, zlen);
// Add equivalent of boxblock
CustomRenderer.addBox(HDBlockModels.getPatchDefinitionFactory(), list, BLKSIZE * x, BLKSIZE * (x + xlen), BLKSIZE * y, BLKSIZE * (y + ylen), BLKSIZE * z, BLKSIZE * (z + zlen), HDBlockModels.boxPatchList);
// And remove blocks from model (since we have them covered)
for (int xx = 0; xx < xlen; xx++) {
for (int yy = 0; yy < ylen; yy++) {
for (int zz = 0; zz < zlen; zz++) {
tmod[MODELSCALE * MODELSCALE * (y + yy) + MODELSCALE * (z + zz) + (x + xx)] = 0;
}
}
}
}
private PatchDefinition[] getScaledModelAsPatches(short[] mod) {
ArrayList<RenderPatch> list = new ArrayList<RenderPatch>();
// Make copy
short[] tmod = Arrays.copyOf(mod, mod.length);
for (int y = 0; y < MODELSCALE; y++) {
for (int z = 0; z < MODELSCALE; z++) {
for (int x = 0; x < MODELSCALE; x++) {
if (getSubblock(tmod, x, y, z)) {
// If occupied, try to add to list
addSubblock(tmod, x, y, z, list);
}
}
}
}
PatchDefinition[] pd = new PatchDefinition[list.size()];
for (int i = 0; i < pd.length; i++) {
pd[i] = (PatchDefinition) list.get(i);
}
return pd;
}
private String updateGroup(int grpIndex, String newgroup) {
if (enabledGroups[grpIndex]) {
if (!newgroup.equals(group[grpIndex])) {
group[grpIndex] = newgroup;
String newline = "g";
for (int i = 0; i < GROUP_COUNT; i++) {
if (enabledGroups[i]) {
newline += " " + group[i];
}
}
newline += "\n";
groupline = newline;
}
}
return groupline;
}
public boolean getGroupEnabled(int grpIndex) {
if (grpIndex < enabledGroups.length) {
return enabledGroups[grpIndex];
} else {
return false;
}
}
public void setGroupEnabled(int grpIndex, boolean set) {
if (grpIndex < enabledGroups.length) {
enabledGroups[grpIndex] = set;
}
}
public String getBaseName() {
return basename;
}
}
10
View Complete Implementation : ForgeMapChunkCache.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/**
* Container for managing chunks - dependent upon using chunk snapshots, since rendering is off server thread
*/
public clreplaced ForgeMapChunkCache extends MapChunkCache {
private static boolean init = false;
private static Field unloadqueue = null;
private static Field currentchunkloader = null;
private static Field updateEnreplacedyTick = null;
/* AnvilChunkLoader fields */
// Map
private static Field chunksToRemove = null;
// Set
private static Field pendingAnvilChunksCoordinates = null;
// writeChunkToNBT(Chunk c, World w, NBTTagCompound nbt)
private static Method writechunktonbt = null;
/* AnvilChunkLoaderPending fields */
private static Field chunkCoord = null;
private static Field nbtTag = null;
private World w;
private DynmapWorld dw;
private ChunkProviderServer cps;
private int nsect;
private List<DynmapChunk> chunks;
private Lisreplacederator<DynmapChunk> iterator;
private int x_min, x_max, z_min, z_max;
private int x_dim;
private boolean biome, biomeraw, highesty, blockdata;
private HiddenChunkStyle hidestyle = HiddenChunkStyle.FILL_AIR;
private List<VisibilityLimit> visible_limits = null;
private List<VisibilityLimit> hidden_limits = null;
private boolean isempty = true;
private int snapcnt;
private ChunkSnapshot[] snaparray;
/* Index = (x-x_min) + ((z-z_min)*x_dim) */
private DynIntHashMap[] snaptile;
private byte[][] sameneighborbiomecnt;
private BiomeMap[][] biomemap;
private boolean[][] isSectionNotEmpty;
/* Indexed by snapshot index, then by section index */
private Set<?> queue = null;
private static final BlockStep[] unstep = { BlockStep.X_MINUS, BlockStep.Y_MINUS, BlockStep.Z_MINUS, BlockStep.X_PLUS, BlockStep.Y_PLUS, BlockStep.Z_PLUS };
private static BiomeMap[] biome_to_bmap;
private static final int getIndexInChunk(int cx, int cy, int cz) {
return (cy << 8) | (cz << 4) | cx;
}
/**
* Iterator for traversing map chunk cache (base is for non-snapshot)
*/
public clreplaced OurMapIterator implements MapIterator {
private int x, y, z, chunkindex, bx, bz;
private ChunkSnapshot snap;
private BlockStep laststep;
private DynmapBlockState blk;
private final int worldheight;
private final int x_base;
private final int z_base;
OurMapIterator(int x0, int y0, int z0) {
x_base = x_min << 4;
z_base = z_min << 4;
if (biome) {
biomePrep();
}
initialize(x0, y0, z0);
worldheight = w.getHeight();
}
@Override
public final void initialize(int x0, int y0, int z0) {
this.x = x0;
this.y = y0;
this.z = z0;
this.chunkindex = ((x >> 4) - x_min) + (((z >> 4) - z_min) * x_dim);
this.bx = x & 0xF;
this.bz = z & 0xF;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
laststep = BlockStep.Y_MINUS;
if ((y >= 0) && (y < worldheight)) {
blk = null;
} else {
blk = DynmapBlockState.AIR;
}
}
@Override
public int getBlockSkyLight() {
try {
return snap.getBlockSkyLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException aioobx) {
return 15;
}
}
@Override
public final int getBlockEmittedLight() {
try {
return snap.getBlockEmittedLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException aioobx) {
return 0;
}
}
private void biomePrep() {
if (sameneighborbiomecnt != null) {
return;
}
int x_size = x_dim << 4;
int z_size = (z_max - z_min + 1) << 4;
sameneighborbiomecnt = new byte[x_size][];
biomemap = new BiomeMap[x_size][];
for (int i = 0; i < x_size; i++) {
sameneighborbiomecnt[i] = new byte[z_size];
biomemap[i] = new BiomeMap[z_size];
}
for (int i = 0; i < x_size; i++) {
for (int j = 0; j < z_size; j++) {
if (j == 0)
initialize(i + x_base, 64, z_base);
else
stepPosition(BlockStep.Z_PLUS);
int bb = snap.getBiome(bx, bz);
BiomeMap bm = BiomeMap.byBiomeID(bb);
biomemap[i][j] = bm;
int cnt = 0;
if (i > 0) {
if (bm == biomemap[i - 1][j]) /* Same as one to left */
{
cnt++;
sameneighborbiomecnt[i - 1][j]++;
}
if ((j > 0) && (bm == biomemap[i - 1][j - 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j - 1]++;
}
if ((j < (z_size - 1)) && (bm == biomemap[i - 1][j + 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j + 1]++;
}
}
if ((j > 0) && (biomemap[i][j] == biomemap[i][j - 1])) /* Same as one to above */
{
cnt++;
sameneighborbiomecnt[i][j - 1]++;
}
sameneighborbiomecnt[i][j] = (byte) cnt;
}
}
}
@Override
public final BiomeMap getBiome() {
try {
return biomemap[x - x_base][z - z_base];
} catch (Exception ex) {
return BiomeMap.NULL;
}
}
@Override
public final int getSmoothGrreplacedColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothFoliageColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothColorMultiplier(int[] colormap, int[] swampmap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
if (bm == BiomeMap.SWAMPLAND) {
mult = swampmap[bm.biomeLookup()];
} else {
mult = colormap[bm.biomeLookup()];
}
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult;
if (bm == BiomeMap.SWAMPLAND) {
rmult = swampmap[bm.biomeLookup()];
} else {
rmult = colormap[bm.biomeLookup()];
}
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothWaterColorMultiplier() {
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
return bm.gereplacederColorMult();
}
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int mult = bm.gereplacederColorMult();
racreplaced += (mult >> 16) & 0xFF;
gacreplaced += (mult >> 8) & 0xFF;
bacreplaced += mult & 0xFF;
}
}
return ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
} catch (Exception x) {
return 0xFFFFFF;
}
}
@Override
public final int getSmoothWaterColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = colormap[bm.biomeLookup()];
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = colormap[bm.biomeLookup()];
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
/**
* Step current position in given direction
*/
@Override
public final void stepPosition(BlockStep step) {
blk = null;
switch(step.ordinal()) {
case 0:
x++;
bx++;
if (bx == 16) /* Next chunk? */
{
bx = 0;
chunkindex++;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 1:
y++;
if (y >= worldheight) {
blk = DynmapBlockState.AIR;
}
break;
case 2:
z++;
bz++;
if (bz == 16) /* Next chunk? */
{
bz = 0;
chunkindex += x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 3:
x--;
bx--;
if (bx == -1) /* Next chunk? */
{
bx = 15;
chunkindex--;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 4:
y--;
if (y < 0) {
blk = DynmapBlockState.AIR;
}
break;
case 5:
z--;
bz--;
if (bz == -1) /* Next chunk? */
{
bz = 15;
chunkindex -= x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
}
laststep = step;
}
/**
* Unstep current position to previous position
*/
@Override
public BlockStep unstepPosition() {
BlockStep ls = laststep;
stepPosition(unstep[ls.ordinal()]);
return ls;
}
/**
* Unstep current position in oppisite director of given step
*/
@Override
public void unstepPosition(BlockStep s) {
stepPosition(unstep[s.ordinal()]);
}
@Override
public final void setY(int y) {
if (y > this.y) {
laststep = BlockStep.Y_PLUS;
} else {
laststep = BlockStep.Y_MINUS;
}
this.y = y;
if ((y < 0) || (y >= worldheight)) {
blk = DynmapBlockState.AIR;
} else {
blk = null;
}
}
@Override
public final int getX() {
return x;
}
@Override
public final int getY() {
return y;
}
@Override
public final int getZ() {
return z;
}
@Override
public final DynmapBlockState getBlockTypeAt(BlockStep s) {
if (s == BlockStep.Y_MINUS) {
if (y > 0) {
return snap.getBlockType(bx, y - 1, bz);
}
} else if (s == BlockStep.Y_PLUS) {
if (y < (worldheight - 1)) {
return snap.getBlockType(bx, y + 1, bz);
}
} else {
BlockStep ls = laststep;
stepPosition(s);
DynmapBlockState tid = snap.getBlockType(bx, y, bz);
unstepPosition();
laststep = ls;
return tid;
}
return DynmapBlockState.AIR;
}
@Override
public BlockStep getLastStep() {
return laststep;
}
@Override
public int getWorldHeight() {
return worldheight;
}
@Override
public long getBlockKey() {
return (((chunkindex * worldheight) + y) << 8) | (bx << 4) | bz;
}
@Override
public final boolean isEmptySection() {
try {
return !isSectionNotEmpty[chunkindex][y >> 4];
} catch (Exception x) {
initSectionData(chunkindex);
return !isSectionNotEmpty[chunkindex][y >> 4];
}
}
@Override
public RenderPatchFactory getPatchFactory() {
return HDBlockModels.getPatchDefinitionFactory();
}
@Override
public Object getBlockTileEnreplacedyField(String fieldId) {
try {
int idx = getIndexInChunk(bx, y, bz);
Object[] vals = (Object[]) snaptile[chunkindex].get(idx);
for (int i = 0; i < vals.length; i += 2) {
if (vals[i].equals(fieldId)) {
return vals[i + 1];
}
}
} catch (Exception x) {
}
return null;
}
@Override
public DynmapBlockState getBlockTypeAt(int xoff, int yoff, int zoff) {
int xx = this.x + xoff;
int yy = this.y + yoff;
int zz = this.z + zoff;
int idx = ((xx >> 4) - x_min) + (((zz >> 4) - z_min) * x_dim);
try {
return snaparray[idx].getBlockType(xx & 0xF, yy, zz & 0xF);
} catch (Exception x) {
return DynmapBlockState.AIR;
}
}
@Override
public Object getBlockTileEnreplacedyFieldAt(String fieldId, int xoff, int yoff, int zoff) {
return null;
}
@Override
public long getInhabitedTicks() {
try {
return snap.getInhabitedTicks();
} catch (Exception x) {
return 0;
}
}
@Override
public DynmapBlockState getBlockType() {
if (blk == null) {
blk = snap.getBlockType(bx, y, bz);
}
return blk;
}
}
private clreplaced OurEndMapIterator extends OurMapIterator {
OurEndMapIterator(int x0, int y0, int z0) {
super(x0, y0, z0);
}
@Override
public final int getBlockSkyLight() {
return 15;
}
}
/**
* Chunk cache for representing unloaded chunk (or air)
*/
private static clreplaced EmptyChunk extends ChunkSnapshot {
public EmptyChunk() {
super(256, 0, 0, 0, 0);
}
/* Need these for interface, but not used */
@Override
public int getX() {
return 0;
}
@Override
public int getZ() {
return 0;
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 0;
}
@Override
public int getBiome(int x, int z) {
return -1;
}
@Override
public boolean isSectionEmpty(int sy) {
return true;
}
}
/**
* Chunk cache for representing generic stone chunk
*/
private static clreplaced PlainChunk extends ChunkSnapshot {
private DynmapBlockState fill;
PlainChunk(String fill) {
super(256, 0, 0, 0, 0);
this.fill = DynmapBlockState.getBaseStateByName(fill);
}
/* Need these for interface, but not used */
@Override
public int getX() {
return 0;
}
@Override
public int getZ() {
return 0;
}
@Override
public int getBiome(int x, int z) {
return -1;
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
if (y < 64) {
return fill;
}
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
if (y < 64) {
return 0;
}
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 64;
}
@Override
public boolean isSectionEmpty(int sy) {
return (sy < 4);
}
}
private static final EmptyChunk EMPTY = new EmptyChunk();
private static final PlainChunk STONE = new PlainChunk(DynmapBlockState.STONE_BLOCK);
private static final PlainChunk OCEAN = new PlainChunk(DynmapBlockState.WATER_BLOCK);
public static void init() {
if (!init) {
Field[] f = ChunkProviderServer.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((unloadqueue == null) && f[i].getType().isreplacedignableFrom(java.util.Set.clreplaced)) {
unloadqueue = f[i];
// Log.info("Found unloadqueue - " + f[i].getName());
unloadqueue.setAccessible(true);
} else if ((currentchunkloader == null) && f[i].getType().isreplacedignableFrom(IChunkLoader.clreplaced)) {
currentchunkloader = f[i];
// Log.info("Found currentchunkprovider - " + f[i].getName());
currentchunkloader.setAccessible(true);
}
}
f = WorldServer.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((updateEnreplacedyTick == null) && f[i].getType().isreplacedignableFrom(int.clreplaced)) {
updateEnreplacedyTick = f[i];
// Log.info("Found updateEnreplacedyTick - " + f[i].getName());
updateEnreplacedyTick.setAccessible(true);
}
}
f = AnvilChunkLoader.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((chunksToRemove == null) && (f[i].getType().equals(Map.clreplaced))) {
chunksToRemove = f[i];
chunksToRemove.setAccessible(true);
} else if ((pendingAnvilChunksCoordinates == null) && (f[i].getType().equals(Set.clreplaced))) {
pendingAnvilChunksCoordinates = f[i];
pendingAnvilChunksCoordinates.setAccessible(true);
}
}
// Get writeChunkToNBT method
Method[] ma = AnvilChunkLoader.clreplaced.getDeclaredMethods();
for (Method m : ma) {
Clreplaced<?>[] p = m.getParameterTypes();
if ((p.length == 3) && (p[0].equals(Chunk.clreplaced)) && (p[1].equals(World.clreplaced)) && (p[2].equals(NBTTagCompound.clreplaced))) {
writechunktonbt = m;
m.setAccessible(true);
break;
}
}
if ((unloadqueue == null) || (currentchunkloader == null) || (writechunktonbt == null)) {
Log.severe("ERROR: cannot find unload queue or chunk provider field - dynmap cannot load chunks");
}
if (updateEnreplacedyTick == null) {
Log.severe("ERROR: cannot find updateEnreplacedyTick - dynmap cannot drive enreplacedy cleanup when no players are active");
}
init = true;
}
}
/**
* Construct empty cache
*/
public ForgeMapChunkCache() {
init();
}
public void setChunks(ForgeWorld dw, List<DynmapChunk> chunks) {
this.dw = dw;
this.w = dw.getWorld();
if (dw.isLoaded()) {
/* Check if world's provider is ChunkProviderServer */
IChunkProvider cp = this.w.getChunkProvider();
if (cp instanceof ChunkProviderServer) {
cps = (ChunkProviderServer) cp;
} else {
Log.severe("Error: world " + dw.getName() + " has unsupported chunk provider");
}
} else {
chunks = new ArrayList<DynmapChunk>();
}
nsect = dw.worldheight >> 4;
this.chunks = chunks;
/* Compute range */
if (chunks.size() == 0) {
this.x_min = 0;
this.x_max = 0;
this.z_min = 0;
this.z_max = 0;
x_dim = 1;
} else {
x_min = x_max = chunks.get(0).x;
z_min = z_max = chunks.get(0).z;
for (DynmapChunk c : chunks) {
if (c.x > x_max) {
x_max = c.x;
}
if (c.x < x_min) {
x_min = c.x;
}
if (c.z > z_max) {
z_max = c.z;
}
if (c.z < z_min) {
z_min = c.z;
}
}
x_dim = x_max - x_min + 1;
}
snapcnt = x_dim * (z_max - z_min + 1);
snaparray = new ChunkSnapshot[snapcnt];
snaptile = new DynIntHashMap[snapcnt];
isSectionNotEmpty = new boolean[snapcnt][];
try {
if ((unloadqueue != null) && (cps != null)) {
queue = (Set<?>) unloadqueue.get(cps);
}
} catch (IllegalArgumentException iax) {
} catch (IllegalAccessException e) {
}
}
private static boolean didError = false;
public NBTTagCompound readChunk(int x, int z) {
if ((cps == null) || (!(cps.chunkLoader instanceof AnvilChunkLoader)) || (((chunksToRemove == null) || (pendingAnvilChunksCoordinates == null)))) {
if (!didError) {
Log.severe("**** DYNMAP CANNOT READ CHUNKS (UNSUPPORTED CHUNK LOADER) ****");
didError = true;
}
return null;
}
try {
AnvilChunkLoader acl = (AnvilChunkLoader) cps.chunkLoader;
Map<?, ?> chunkstoremove = null;
Set<?> pendingcoords = null;
chunkstoremove = (Map<?, ?>) chunksToRemove.get(acl);
pendingcoords = (Set<?>) pendingAnvilChunksCoordinates.get(acl);
NBTTagCompound rslt = null;
ChunkPos coord = new ChunkPos(x, z);
if (pendingcoords.contains(coord)) {
for (Object o : chunkstoremove.values()) {
if (chunkCoord == null) {
Field[] f = o.getClreplaced().getDeclaredFields();
for (Field ff : f) {
if ((chunkCoord == null) && (ff.getType().equals(ChunkPos.clreplaced))) {
chunkCoord = ff;
chunkCoord.setAccessible(true);
} else if ((nbtTag == null) && (ff.getType().equals(NBTTagCompound.clreplaced))) {
nbtTag = ff;
nbtTag.setAccessible(true);
}
}
if ((chunkCoord == null) || (nbtTag == null)) {
Log.severe("Error getting chunkCoord and nbtTag for Forge");
return null;
}
}
ChunkPos occ = (ChunkPos) chunkCoord.get(o);
if (occ.equals(coord)) {
rslt = (NBTTagCompound) nbtTag.get(o);
break;
}
}
}
if (rslt == null) {
DataInputStream str = RegionFileCache.getChunkInputStream(acl.chunkSaveLocation, x, z);
if (str == null) {
return null;
}
rslt = CompressedStreamTools.read(str);
}
if (rslt != null)
rslt = rslt.getCompoundTag("Level");
return rslt;
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;
}
}
private Object getNBTValue(NBTBase v) {
Object val = null;
switch(v.getId()) {
case // Byte
1:
val = Byte.valueOf(((NBTTagByte) v).getByte());
break;
case // Short
2:
val = Short.valueOf(((NBTTagShort) v).getShort());
break;
case // Int
3:
val = Integer.valueOf(((NBTTagInt) v).getInt());
break;
case // Long
4:
val = Long.valueOf(((NBTTagLong) v).getLong());
break;
case // Float
5:
val = Float.valueOf(((NBTTagFloat) v).getFloat());
break;
case // Double
6:
val = Double.valueOf(((NBTTagDouble) v).getDouble());
break;
case // Byte[]
7:
val = ((NBTTagByteArray) v).getByteArray();
break;
case // String
8:
val = ((NBTTagString) v).getString();
break;
case // List
9:
NBTTagList tl = (NBTTagList) v;
ArrayList<Object> vlist = new ArrayList<Object>();
int type = tl.getTagType();
for (int i = 0; i < tl.tagCount(); i++) {
switch(type) {
case 5:
float fv = tl.getFloatAt(i);
vlist.add(fv);
break;
case 6:
double dv = tl.getDoubleAt(i);
vlist.add(dv);
break;
case 8:
String sv = tl.getStringTagAt(i);
vlist.add(sv);
break;
case 10:
NBTTagCompound tc = tl.getCompoundTagAt(i);
vlist.add(getNBTValue(tc));
break;
case 11:
int[] ia = tl.getIntArrayAt(i);
vlist.add(ia);
break;
}
}
val = vlist;
break;
case // Map
10:
NBTTagCompound tc = (NBTTagCompound) v;
HashMap<String, Object> vmap = new HashMap<String, Object>();
for (Object t : tc.getKeySet()) {
String st = (String) t;
NBTBase tg = tc.getTag(st);
vmap.put(st, getNBTValue(tg));
}
val = vmap;
break;
case // Int[]
11:
val = ((NBTTagIntArray) v).getIntArray();
break;
}
return val;
}
private boolean isChunkVisible(DynmapChunk chunk) {
boolean vis = true;
if (visible_limits != null) {
vis = false;
for (VisibilityLimit limit : visible_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = true;
break;
}
}
}
if (vis && (hidden_limits != null)) {
for (VisibilityLimit limit : hidden_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = false;
break;
}
}
}
return vis;
}
private boolean tryChunkCache(DynmapChunk chunk, boolean vis) {
/* Check if cached chunk snapshot found */
ChunkSnapshot ss = null;
SnapshotRec ssr = DynmapPlugin.plugin.sscache.getSnapshot(dw.getName(), chunk.x, chunk.z, blockdata, biome, biomeraw, highesty);
if (ssr != null) {
ss = ssr.ss;
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
}
int idx = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
snaparray[idx] = ss;
snaptile[idx] = ssr.tileData;
}
return (ssr != null);
}
private boolean isChunkUnloadPending(DynmapChunk chunk) {
boolean isunloadpending = false;
if (queue != null) {
long coord = ChunkPos.asLong(chunk.x, chunk.z);
isunloadpending = queue.contains(Long.valueOf(coord));
}
return isunloadpending;
}
// Prep snapshot and add to cache
private SnapshotRec prepChunkSnapshot(DynmapChunk chunk, NBTTagCompound nbt) {
ChunkSnapshot ss = new ChunkSnapshot(nbt, dw.worldheight);
DynIntHashMap tileData = new DynIntHashMap();
NBTTagList tiles = nbt.getTagList("TileEnreplacedies", 10);
if (tiles == null)
tiles = new NBTTagList();
/* Get tile enreplacedy data */
List<Object> vals = new ArrayList<Object>();
for (int tid = 0; tid < tiles.tagCount(); tid++) {
NBTTagCompound tc = tiles.getCompoundTagAt(tid);
int tx = tc.getInteger("x");
int ty = tc.getInteger("y");
int tz = tc.getInteger("z");
int cx = tx & 0xF;
int cz = tz & 0xF;
DynmapBlockState blk = ss.getBlockType(cx, ty, cz);
String[] te_fields = HDBlockModels.getTileEnreplacedyFieldsNeeded(blk);
if (te_fields != null) {
vals.clear();
for (String id : te_fields) {
NBTBase v = tc.getTag(id);
/* Get field */
if (v != null) {
Object val = getNBTValue(v);
if (val != null) {
vals.add(id);
vals.add(val);
}
}
}
if (vals.size() > 0) {
Object[] vlist = vals.toArray(new Object[vals.size()]);
tileData.put(getIndexInChunk(cx, ty, cz), vlist);
}
}
}
SnapshotRec ssr = new SnapshotRec();
ssr.ss = ss;
ssr.tileData = tileData;
DynmapPlugin.plugin.sscache.putSnapshot(dw.getName(), chunk.x, chunk.z, ssr, blockdata, biome, biomeraw, highesty);
return ssr;
}
/**
* Read NBT data from loaded chunks - needs to be called from server/world thread to be safe
* @returns number loaded
*/
public int getLoadedChunks() {
int cnt = 0;
if (!dw.isLoaded()) {
isempty = true;
unloadChunks();
return 0;
}
Lisreplacederator<DynmapChunk> iter = chunks.lisreplacederator();
while (iter.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iter.next();
int chunkindex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
// Skip if already processed
if (snaparray[chunkindex] != null)
continue;
boolean vis = isChunkVisible(chunk);
/* Check if cached chunk snapshot found */
if (tryChunkCache(chunk, vis)) {
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
cnt++;
} else // If chunk is loaded and not being unloaded, we're grabbing its NBT data
if (cps.chunkExists(chunk.x, chunk.z) && (!isChunkUnloadPending(chunk))) {
ChunkSnapshot ss;
DynIntHashMap tileData;
if (vis) {
// If visible
NBTTagCompound nbt = new NBTTagCompound();
try {
writechunktonbt.invoke(cps.chunkLoader, cps.loadChunk(chunk.x, chunk.z), w, nbt);
} catch (IllegalAccessException e) {
} catch (IllegalArgumentException e) {
} catch (InvocationTargetException e) {
}
SnapshotRec ssr = prepChunkSnapshot(chunk, nbt);
ss = ssr.ss;
tileData = ssr.tileData;
} else {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
tileData = new DynIntHashMap();
}
snaparray[chunkindex] = ss;
snaptile[chunkindex] = tileData;
endChunkLoad(startTime, ChunkStats.LOADED_CHUNKS);
cnt++;
}
}
return cnt;
}
@Override
public int loadChunks(int max_to_load) {
return getLoadedChunks() + readChunks(max_to_load);
}
public int readChunks(int max_to_load) {
if (!dw.isLoaded()) {
isempty = true;
unloadChunks();
return 0;
}
int cnt = 0;
if (iterator == null) {
iterator = chunks.lisreplacederator();
}
DynmapCore.setIgnoreChunkLoads(true);
// Load the required chunks.
while ((cnt < max_to_load) && iterator.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iterator.next();
int chunkindex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
// Skip if already processed
if (snaparray[chunkindex] != null)
continue;
boolean vis = isChunkVisible(chunk);
/* Check if cached chunk snapshot found */
if (tryChunkCache(chunk, vis)) {
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
} else {
NBTTagCompound nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (nbt != null) {
ChunkSnapshot ss;
DynIntHashMap tileData;
// If hidden
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
tileData = new DynIntHashMap();
} else {
// Prep snapshot
SnapshotRec ssr = prepChunkSnapshot(chunk, nbt);
ss = ssr.ss;
tileData = ssr.tileData;
}
snaparray[chunkindex] = ss;
snaptile[chunkindex] = tileData;
endChunkLoad(startTime, ChunkStats.UNLOADED_CHUNKS);
} else {
endChunkLoad(startTime, ChunkStats.UNGENERATED_CHUNKS);
}
}
cnt++;
}
DynmapCore.setIgnoreChunkLoads(false);
if (iterator.hasNext() == false) /* If we're done */
{
isempty = true;
/* Fill missing chunks with empty dummy chunk */
for (int i = 0; i < snaparray.length; i++) {
if (snaparray[i] == null) {
snaparray[i] = EMPTY;
} else if (snaparray[i] != EMPTY) {
isempty = false;
}
}
}
return cnt;
}
/**
* Test if done loading
*/
public boolean isDoneLoading() {
if (!dw.isLoaded()) {
return true;
}
if (iterator != null) {
return !iterator.hasNext();
}
return false;
}
/**
* Test if all empty blocks
*/
public boolean isEmpty() {
return isempty;
}
/**
* Unload chunks
*/
public void unloadChunks() {
if (snaparray != null) {
for (int i = 0; i < snaparray.length; i++) {
snaparray[i] = null;
}
snaparray = null;
}
}
private void initSectionData(int idx) {
isSectionNotEmpty[idx] = new boolean[nsect + 1];
if (snaparray[idx] != EMPTY) {
for (int i = 0; i < nsect; i++) {
if (snaparray[idx].isSectionEmpty(i) == false) {
isSectionNotEmpty[idx][i] = true;
}
}
}
}
public boolean isEmptySection(int sx, int sy, int sz) {
int idx = (sx - x_min) + (sz - z_min) * x_dim;
if (isSectionNotEmpty[idx] == null) {
initSectionData(idx);
}
return !isSectionNotEmpty[idx][sy];
}
/**
* Get cache iterator
*/
public MapIterator gereplacederator(int x, int y, int z) {
if (dw.getEnvironment().equals("the_end")) {
return new OurEndMapIterator(x, y, z);
}
return new OurMapIterator(x, y, z);
}
/**
* Set hidden chunk style (default is FILL_AIR)
*/
public void setHiddenFillStyle(HiddenChunkStyle style) {
this.hidestyle = style;
}
/**
* Add visible area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setVisibleRange(VisibilityLimit lim) {
if (visible_limits == null)
visible_limits = new ArrayList<VisibilityLimit>();
visible_limits.add(lim);
}
/**
* Add hidden area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setHiddenRange(VisibilityLimit lim) {
if (hidden_limits == null)
hidden_limits = new ArrayList<VisibilityLimit>();
hidden_limits.add(lim);
}
@Override
public boolean setChunkDataTypes(boolean blockdata, boolean biome, boolean highestblocky, boolean rawbiome) {
this.biome = biome;
this.biomeraw = rawbiome;
this.highesty = highestblocky;
this.blockdata = blockdata;
return true;
}
@Override
public DynmapWorld getWorld() {
return dw;
}
static {
Biome[] b = DynmapPlugin.getBiomeList();
BiomeMap[] bm = BiomeMap.values();
biome_to_bmap = new BiomeMap[256];
for (int i = 0; i < biome_to_bmap.length; i++) {
biome_to_bmap[i] = BiomeMap.NULL;
}
for (int i = 0; i < b.length; i++) {
if (b[i] == null)
continue;
String bs = b[i].getBiomeName();
for (int j = 0; j < bm.length; j++) {
if (bm[j].toString().equals(bs)) {
biome_to_bmap[i] = bm[j];
break;
}
}
}
}
}
10
View Complete Implementation : ForgeMapChunkCache.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/**
* Container for managing chunks - dependent upon using chunk snapshots, since rendering is off server thread
*/
public clreplaced ForgeMapChunkCache extends MapChunkCache {
private static boolean init = false;
private static Field unloadqueue = null;
private static Field currentchunkloader = null;
private static Field updateEnreplacedyTick = null;
/* AnvilChunkLoader fields */
// Map
private static Field chunksToRemove = null;
// Set
private static Field pendingAnvilChunksCoordinates = null;
// writeChunkToNBT(Chunk c, World w, NBTTagCompound nbt)
private static Method writechunktonbt = null;
/* AnvilChunkLoaderPending fields */
private static Field chunkCoord = null;
private static Field nbtTag = null;
private World w;
private DynmapWorld dw;
private ChunkProviderServer cps;
private int nsect;
private List<DynmapChunk> chunks;
private Lisreplacederator<DynmapChunk> iterator;
private int x_min, x_max, z_min, z_max;
private int x_dim;
private boolean biome, biomeraw, highesty, blockdata;
private HiddenChunkStyle hidestyle = HiddenChunkStyle.FILL_AIR;
private List<VisibilityLimit> visible_limits = null;
private List<VisibilityLimit> hidden_limits = null;
private boolean isempty = true;
private int snapcnt;
private ChunkSnapshot[] snaparray;
/* Index = (x-x_min) + ((z-z_min)*x_dim) */
private DynIntHashMap[] snaptile;
private byte[][] sameneighborbiomecnt;
private BiomeMap[][] biomemap;
private boolean[][] isSectionNotEmpty;
/* Indexed by snapshot index, then by section index */
private Set<?> queue = null;
private static final BlockStep[] unstep = { BlockStep.X_MINUS, BlockStep.Y_MINUS, BlockStep.Z_MINUS, BlockStep.X_PLUS, BlockStep.Y_PLUS, BlockStep.Z_PLUS };
private static BiomeMap[] biome_to_bmap;
private static final int getIndexInChunk(int cx, int cy, int cz) {
return (cy << 8) | (cz << 4) | cx;
}
/**
* Iterator for traversing map chunk cache (base is for non-snapshot)
*/
public clreplaced OurMapIterator implements MapIterator {
private int x, y, z, chunkindex, bx, bz;
private ChunkSnapshot snap;
private BlockStep laststep;
private DynmapBlockState blk;
private final int worldheight;
private final int x_base;
private final int z_base;
OurMapIterator(int x0, int y0, int z0) {
x_base = x_min << 4;
z_base = z_min << 4;
if (biome) {
biomePrep();
}
initialize(x0, y0, z0);
worldheight = w.getHeight();
}
@Override
public final void initialize(int x0, int y0, int z0) {
this.x = x0;
this.y = y0;
this.z = z0;
this.chunkindex = ((x >> 4) - x_min) + (((z >> 4) - z_min) * x_dim);
this.bx = x & 0xF;
this.bz = z & 0xF;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
laststep = BlockStep.Y_MINUS;
if ((y >= 0) && (y < worldheight)) {
blk = null;
} else {
blk = DynmapBlockState.AIR;
}
}
@Override
public int getBlockSkyLight() {
try {
return snap.getBlockSkyLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException aioobx) {
return 15;
}
}
@Override
public final int getBlockEmittedLight() {
try {
return snap.getBlockEmittedLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException aioobx) {
return 0;
}
}
private void biomePrep() {
if (sameneighborbiomecnt != null) {
return;
}
int x_size = x_dim << 4;
int z_size = (z_max - z_min + 1) << 4;
sameneighborbiomecnt = new byte[x_size][];
biomemap = new BiomeMap[x_size][];
for (int i = 0; i < x_size; i++) {
sameneighborbiomecnt[i] = new byte[z_size];
biomemap[i] = new BiomeMap[z_size];
}
for (int i = 0; i < x_size; i++) {
for (int j = 0; j < z_size; j++) {
if (j == 0)
initialize(i + x_base, 64, z_base);
else
stepPosition(BlockStep.Z_PLUS);
int bb = snap.getBiome(bx, bz);
BiomeMap bm = BiomeMap.byBiomeID(bb);
biomemap[i][j] = bm;
int cnt = 0;
if (i > 0) {
if (bm == biomemap[i - 1][j]) /* Same as one to left */
{
cnt++;
sameneighborbiomecnt[i - 1][j]++;
}
if ((j > 0) && (bm == biomemap[i - 1][j - 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j - 1]++;
}
if ((j < (z_size - 1)) && (bm == biomemap[i - 1][j + 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j + 1]++;
}
}
if ((j > 0) && (biomemap[i][j] == biomemap[i][j - 1])) /* Same as one to above */
{
cnt++;
sameneighborbiomecnt[i][j - 1]++;
}
sameneighborbiomecnt[i][j] = (byte) cnt;
}
}
}
@Override
public final BiomeMap getBiome() {
try {
return biomemap[x - x_base][z - z_base];
} catch (Exception ex) {
return BiomeMap.NULL;
}
}
@Override
public final int getSmoothGrreplacedColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothFoliageColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothColorMultiplier(int[] colormap, int[] swampmap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
if (bm == BiomeMap.SWAMPLAND) {
mult = swampmap[bm.biomeLookup()];
} else {
mult = colormap[bm.biomeLookup()];
}
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult;
if (bm == BiomeMap.SWAMPLAND) {
rmult = swampmap[bm.biomeLookup()];
} else {
rmult = colormap[bm.biomeLookup()];
}
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothWaterColorMultiplier() {
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
return bm.gereplacederColorMult();
}
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int mult = bm.gereplacederColorMult();
racreplaced += (mult >> 16) & 0xFF;
gacreplaced += (mult >> 8) & 0xFF;
bacreplaced += mult & 0xFF;
}
}
return ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
} catch (Exception x) {
return 0xFFFFFF;
}
}
@Override
public final int getSmoothWaterColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = colormap[bm.biomeLookup()];
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = colormap[bm.biomeLookup()];
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
/**
* Step current position in given direction
*/
@Override
public final void stepPosition(BlockStep step) {
blk = null;
switch(step.ordinal()) {
case 0:
x++;
bx++;
if (bx == 16) /* Next chunk? */
{
bx = 0;
chunkindex++;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 1:
y++;
if (y >= worldheight) {
blk = DynmapBlockState.AIR;
}
break;
case 2:
z++;
bz++;
if (bz == 16) /* Next chunk? */
{
bz = 0;
chunkindex += x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 3:
x--;
bx--;
if (bx == -1) /* Next chunk? */
{
bx = 15;
chunkindex--;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 4:
y--;
if (y < 0) {
blk = DynmapBlockState.AIR;
}
break;
case 5:
z--;
bz--;
if (bz == -1) /* Next chunk? */
{
bz = 15;
chunkindex -= x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
}
laststep = step;
}
/**
* Unstep current position to previous position
*/
@Override
public BlockStep unstepPosition() {
BlockStep ls = laststep;
stepPosition(unstep[ls.ordinal()]);
return ls;
}
/**
* Unstep current position in oppisite director of given step
*/
@Override
public void unstepPosition(BlockStep s) {
stepPosition(unstep[s.ordinal()]);
}
@Override
public final void setY(int y) {
if (y > this.y) {
laststep = BlockStep.Y_PLUS;
} else {
laststep = BlockStep.Y_MINUS;
}
this.y = y;
if ((y < 0) || (y >= worldheight)) {
blk = DynmapBlockState.AIR;
} else {
blk = null;
}
}
@Override
public final int getX() {
return x;
}
@Override
public final int getY() {
return y;
}
@Override
public final int getZ() {
return z;
}
@Override
public final DynmapBlockState getBlockTypeAt(BlockStep s) {
if (s == BlockStep.Y_MINUS) {
if (y > 0) {
return snap.getBlockType(bx, y - 1, bz);
}
} else if (s == BlockStep.Y_PLUS) {
if (y < (worldheight - 1)) {
return snap.getBlockType(bx, y + 1, bz);
}
} else {
BlockStep ls = laststep;
stepPosition(s);
DynmapBlockState tid = snap.getBlockType(bx, y, bz);
unstepPosition();
laststep = ls;
return tid;
}
return DynmapBlockState.AIR;
}
@Override
public BlockStep getLastStep() {
return laststep;
}
@Override
public int getWorldHeight() {
return worldheight;
}
@Override
public long getBlockKey() {
return (((chunkindex * worldheight) + y) << 8) | (bx << 4) | bz;
}
@Override
public final boolean isEmptySection() {
try {
return !isSectionNotEmpty[chunkindex][y >> 4];
} catch (Exception x) {
initSectionData(chunkindex);
return !isSectionNotEmpty[chunkindex][y >> 4];
}
}
@Override
public RenderPatchFactory getPatchFactory() {
return HDBlockModels.getPatchDefinitionFactory();
}
@Override
public Object getBlockTileEnreplacedyField(String fieldId) {
try {
int idx = getIndexInChunk(bx, y, bz);
Object[] vals = (Object[]) snaptile[chunkindex].get(idx);
for (int i = 0; i < vals.length; i += 2) {
if (vals[i].equals(fieldId)) {
return vals[i + 1];
}
}
} catch (Exception x) {
}
return null;
}
@Override
public DynmapBlockState getBlockTypeAt(int xoff, int yoff, int zoff) {
int xx = this.x + xoff;
int yy = this.y + yoff;
int zz = this.z + zoff;
int idx = ((xx >> 4) - x_min) + (((zz >> 4) - z_min) * x_dim);
try {
return snaparray[idx].getBlockType(xx & 0xF, yy, zz & 0xF);
} catch (Exception x) {
return DynmapBlockState.AIR;
}
}
@Override
public Object getBlockTileEnreplacedyFieldAt(String fieldId, int xoff, int yoff, int zoff) {
return null;
}
@Override
public long getInhabitedTicks() {
try {
return snap.getInhabitedTicks();
} catch (Exception x) {
return 0;
}
}
@Override
public DynmapBlockState getBlockType() {
if (blk == null) {
blk = snap.getBlockType(bx, y, bz);
}
return blk;
}
}
private clreplaced OurEndMapIterator extends OurMapIterator {
OurEndMapIterator(int x0, int y0, int z0) {
super(x0, y0, z0);
}
@Override
public final int getBlockSkyLight() {
return 15;
}
}
/**
* Chunk cache for representing unloaded chunk (or air)
*/
private static clreplaced EmptyChunk extends ChunkSnapshot {
public EmptyChunk() {
super(256, 0, 0, 0, 0);
}
/* Need these for interface, but not used */
@Override
public int getX() {
return 0;
}
@Override
public int getZ() {
return 0;
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 0;
}
@Override
public int getBiome(int x, int z) {
return -1;
}
@Override
public boolean isSectionEmpty(int sy) {
return true;
}
}
/**
* Chunk cache for representing generic stone chunk
*/
private static clreplaced PlainChunk extends ChunkSnapshot {
private DynmapBlockState fill;
PlainChunk(String fill) {
super(256, 0, 0, 0, 0);
this.fill = DynmapBlockState.getBaseStateByName(fill);
}
/* Need these for interface, but not used */
@Override
public int getX() {
return 0;
}
@Override
public int getZ() {
return 0;
}
@Override
public int getBiome(int x, int z) {
return -1;
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
if (y < 64) {
return fill;
}
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
if (y < 64) {
return 0;
}
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 64;
}
@Override
public boolean isSectionEmpty(int sy) {
return (sy < 4);
}
}
private static final EmptyChunk EMPTY = new EmptyChunk();
private static final PlainChunk STONE = new PlainChunk(DynmapBlockState.STONE_BLOCK);
private static final PlainChunk OCEAN = new PlainChunk(DynmapBlockState.WATER_BLOCK);
public static void init() {
if (!init) {
Field[] f = ChunkProviderServer.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((unloadqueue == null) && f[i].getType().isreplacedignableFrom(java.util.Set.clreplaced)) {
unloadqueue = f[i];
// Log.info("Found unloadqueue - " + f[i].getName());
unloadqueue.setAccessible(true);
} else if ((currentchunkloader == null) && f[i].getType().isreplacedignableFrom(IChunkLoader.clreplaced)) {
currentchunkloader = f[i];
// Log.info("Found currentchunkprovider - " + f[i].getName());
currentchunkloader.setAccessible(true);
}
}
f = WorldServer.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((updateEnreplacedyTick == null) && f[i].getType().isreplacedignableFrom(int.clreplaced)) {
updateEnreplacedyTick = f[i];
// Log.info("Found updateEnreplacedyTick - " + f[i].getName());
updateEnreplacedyTick.setAccessible(true);
}
}
f = AnvilChunkLoader.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((chunksToRemove == null) && (f[i].getType().equals(Map.clreplaced))) {
chunksToRemove = f[i];
chunksToRemove.setAccessible(true);
} else if ((pendingAnvilChunksCoordinates == null) && (f[i].getType().equals(Set.clreplaced))) {
pendingAnvilChunksCoordinates = f[i];
pendingAnvilChunksCoordinates.setAccessible(true);
}
}
// Get writeChunkToNBT method
Method[] ma = AnvilChunkLoader.clreplaced.getDeclaredMethods();
for (Method m : ma) {
Clreplaced<?>[] p = m.getParameterTypes();
if ((p.length == 3) && (p[0].equals(Chunk.clreplaced)) && (p[1].equals(World.clreplaced)) && (p[2].equals(NBTTagCompound.clreplaced))) {
writechunktonbt = m;
m.setAccessible(true);
break;
}
}
if ((unloadqueue == null) || (currentchunkloader == null) || (writechunktonbt == null)) {
Log.severe("ERROR: cannot find unload queue or chunk provider field - dynmap cannot load chunks");
}
if (updateEnreplacedyTick == null) {
Log.severe("ERROR: cannot find updateEnreplacedyTick - dynmap cannot drive enreplacedy cleanup when no players are active");
}
init = true;
}
}
/**
* Construct empty cache
*/
public ForgeMapChunkCache() {
init();
}
public void setChunks(ForgeWorld dw, List<DynmapChunk> chunks) {
this.dw = dw;
this.w = dw.getWorld();
if (dw.isLoaded()) {
/* Check if world's provider is ChunkProviderServer */
IChunkProvider cp = this.w.getChunkProvider();
if (cp instanceof ChunkProviderServer) {
cps = (ChunkProviderServer) cp;
} else {
Log.severe("Error: world " + dw.getName() + " has unsupported chunk provider");
}
} else {
chunks = new ArrayList<DynmapChunk>();
}
nsect = dw.worldheight >> 4;
this.chunks = chunks;
/* Compute range */
if (chunks.size() == 0) {
this.x_min = 0;
this.x_max = 0;
this.z_min = 0;
this.z_max = 0;
x_dim = 1;
} else {
x_min = x_max = chunks.get(0).x;
z_min = z_max = chunks.get(0).z;
for (DynmapChunk c : chunks) {
if (c.x > x_max) {
x_max = c.x;
}
if (c.x < x_min) {
x_min = c.x;
}
if (c.z > z_max) {
z_max = c.z;
}
if (c.z < z_min) {
z_min = c.z;
}
}
x_dim = x_max - x_min + 1;
}
snapcnt = x_dim * (z_max - z_min + 1);
snaparray = new ChunkSnapshot[snapcnt];
snaptile = new DynIntHashMap[snapcnt];
isSectionNotEmpty = new boolean[snapcnt][];
try {
if ((unloadqueue != null) && (cps != null)) {
queue = (Set<?>) unloadqueue.get(cps);
}
} catch (IllegalArgumentException iax) {
} catch (IllegalAccessException e) {
}
}
private static boolean didError = false;
public NBTTagCompound readChunk(int x, int z) {
if ((cps == null) || (!(cps.chunkLoader instanceof AnvilChunkLoader)) || (((chunksToRemove == null) || (pendingAnvilChunksCoordinates == null)))) {
if (!didError) {
Log.severe("**** DYNMAP CANNOT READ CHUNKS (UNSUPPORTED CHUNK LOADER) ****");
didError = true;
}
return null;
}
try {
AnvilChunkLoader acl = (AnvilChunkLoader) cps.chunkLoader;
Map<?, ?> chunkstoremove = null;
Set<?> pendingcoords = null;
chunkstoremove = (Map<?, ?>) chunksToRemove.get(acl);
pendingcoords = (Set<?>) pendingAnvilChunksCoordinates.get(acl);
NBTTagCompound rslt = null;
ChunkPos coord = new ChunkPos(x, z);
if (pendingcoords.contains(coord)) {
for (Object o : chunkstoremove.values()) {
if (chunkCoord == null) {
Field[] f = o.getClreplaced().getDeclaredFields();
for (Field ff : f) {
if ((chunkCoord == null) && (ff.getType().equals(ChunkPos.clreplaced))) {
chunkCoord = ff;
chunkCoord.setAccessible(true);
} else if ((nbtTag == null) && (ff.getType().equals(NBTTagCompound.clreplaced))) {
nbtTag = ff;
nbtTag.setAccessible(true);
}
}
if ((chunkCoord == null) || (nbtTag == null)) {
Log.severe("Error getting chunkCoord and nbtTag for Forge");
return null;
}
}
ChunkPos occ = (ChunkPos) chunkCoord.get(o);
if (occ.equals(coord)) {
rslt = (NBTTagCompound) nbtTag.get(o);
break;
}
}
}
if (rslt == null) {
DataInputStream str = RegionFileCache.getChunkInputStream(acl.chunkSaveLocation, x, z);
if (str == null) {
return null;
}
rslt = CompressedStreamTools.read(str);
}
if (rslt != null)
rslt = rslt.getCompoundTag("Level");
return rslt;
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;
}
}
private Object getNBTValue(NBTBase v) {
Object val = null;
switch(v.getId()) {
case // Byte
1:
val = Byte.valueOf(((NBTTagByte) v).getByte());
break;
case // Short
2:
val = Short.valueOf(((NBTTagShort) v).getShort());
break;
case // Int
3:
val = Integer.valueOf(((NBTTagInt) v).getInt());
break;
case // Long
4:
val = Long.valueOf(((NBTTagLong) v).getLong());
break;
case // Float
5:
val = Float.valueOf(((NBTTagFloat) v).getFloat());
break;
case // Double
6:
val = Double.valueOf(((NBTTagDouble) v).getDouble());
break;
case // Byte[]
7:
val = ((NBTTagByteArray) v).getByteArray();
break;
case // String
8:
val = ((NBTTagString) v).getString();
break;
case // List
9:
NBTTagList tl = (NBTTagList) v;
ArrayList<Object> vlist = new ArrayList<Object>();
int type = tl.getTagType();
for (int i = 0; i < tl.tagCount(); i++) {
switch(type) {
case 5:
float fv = tl.getFloatAt(i);
vlist.add(fv);
break;
case 6:
double dv = tl.getDoubleAt(i);
vlist.add(dv);
break;
case 8:
String sv = tl.getStringTagAt(i);
vlist.add(sv);
break;
case 10:
NBTTagCompound tc = tl.getCompoundTagAt(i);
vlist.add(getNBTValue(tc));
break;
case 11:
int[] ia = tl.getIntArrayAt(i);
vlist.add(ia);
break;
}
}
val = vlist;
break;
case // Map
10:
NBTTagCompound tc = (NBTTagCompound) v;
HashMap<String, Object> vmap = new HashMap<String, Object>();
for (Object t : tc.getKeySet()) {
String st = (String) t;
NBTBase tg = tc.getTag(st);
vmap.put(st, getNBTValue(tg));
}
val = vmap;
break;
case // Int[]
11:
val = ((NBTTagIntArray) v).getIntArray();
break;
}
return val;
}
private boolean isChunkVisible(DynmapChunk chunk) {
boolean vis = true;
if (visible_limits != null) {
vis = false;
for (VisibilityLimit limit : visible_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = true;
break;
}
}
}
if (vis && (hidden_limits != null)) {
for (VisibilityLimit limit : hidden_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = false;
break;
}
}
}
return vis;
}
private boolean tryChunkCache(DynmapChunk chunk, boolean vis) {
/* Check if cached chunk snapshot found */
ChunkSnapshot ss = null;
SnapshotRec ssr = DynmapPlugin.plugin.sscache.getSnapshot(dw.getName(), chunk.x, chunk.z, blockdata, biome, biomeraw, highesty);
if (ssr != null) {
ss = ssr.ss;
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
}
int idx = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
snaparray[idx] = ss;
snaptile[idx] = ssr.tileData;
}
return (ssr != null);
}
private boolean isChunkUnloadPending(DynmapChunk chunk) {
boolean isunloadpending = false;
if (queue != null) {
long coord = ChunkPos.asLong(chunk.x, chunk.z);
isunloadpending = queue.contains(Long.valueOf(coord));
}
return isunloadpending;
}
// Prep snapshot and add to cache
private SnapshotRec prepChunkSnapshot(DynmapChunk chunk, NBTTagCompound nbt) {
ChunkSnapshot ss = new ChunkSnapshot(nbt, dw.worldheight);
DynIntHashMap tileData = new DynIntHashMap();
NBTTagList tiles = nbt.getTagList("TileEnreplacedies", 10);
if (tiles == null)
tiles = new NBTTagList();
/* Get tile enreplacedy data */
List<Object> vals = new ArrayList<Object>();
for (int tid = 0; tid < tiles.tagCount(); tid++) {
NBTTagCompound tc = tiles.getCompoundTagAt(tid);
int tx = tc.getInteger("x");
int ty = tc.getInteger("y");
int tz = tc.getInteger("z");
int cx = tx & 0xF;
int cz = tz & 0xF;
DynmapBlockState blk = ss.getBlockType(cx, ty, cz);
String[] te_fields = HDBlockModels.getTileEnreplacedyFieldsNeeded(blk);
if (te_fields != null) {
vals.clear();
for (String id : te_fields) {
NBTBase v = tc.getTag(id);
/* Get field */
if (v != null) {
Object val = getNBTValue(v);
if (val != null) {
vals.add(id);
vals.add(val);
}
}
}
if (vals.size() > 0) {
Object[] vlist = vals.toArray(new Object[vals.size()]);
tileData.put(getIndexInChunk(cx, ty, cz), vlist);
}
}
}
SnapshotRec ssr = new SnapshotRec();
ssr.ss = ss;
ssr.tileData = tileData;
DynmapPlugin.plugin.sscache.putSnapshot(dw.getName(), chunk.x, chunk.z, ssr, blockdata, biome, biomeraw, highesty);
return ssr;
}
/**
* Read NBT data from loaded chunks - needs to be called from server/world thread to be safe
* @returns number loaded
*/
public int getLoadedChunks() {
int cnt = 0;
if (!dw.isLoaded()) {
isempty = true;
unloadChunks();
return 0;
}
Lisreplacederator<DynmapChunk> iter = chunks.lisreplacederator();
while (iter.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iter.next();
int chunkindex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
// Skip if already processed
if (snaparray[chunkindex] != null)
continue;
boolean vis = isChunkVisible(chunk);
/* Check if cached chunk snapshot found */
if (tryChunkCache(chunk, vis)) {
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
cnt++;
} else // If chunk is loaded and not being unloaded, we're grabbing its NBT data
if (cps.chunkExists(chunk.x, chunk.z) && (!isChunkUnloadPending(chunk))) {
ChunkSnapshot ss;
DynIntHashMap tileData;
if (vis) {
// If visible
NBTTagCompound nbt = new NBTTagCompound();
try {
writechunktonbt.invoke(cps.chunkLoader, cps.loadChunk(chunk.x, chunk.z), w, nbt);
} catch (IllegalAccessException e) {
} catch (IllegalArgumentException e) {
} catch (InvocationTargetException e) {
}
SnapshotRec ssr = prepChunkSnapshot(chunk, nbt);
ss = ssr.ss;
tileData = ssr.tileData;
} else {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
tileData = new DynIntHashMap();
}
snaparray[chunkindex] = ss;
snaptile[chunkindex] = tileData;
endChunkLoad(startTime, ChunkStats.LOADED_CHUNKS);
cnt++;
}
}
return cnt;
}
@Override
public int loadChunks(int max_to_load) {
return getLoadedChunks() + readChunks(max_to_load);
}
public int readChunks(int max_to_load) {
if (!dw.isLoaded()) {
isempty = true;
unloadChunks();
return 0;
}
int cnt = 0;
if (iterator == null) {
iterator = chunks.lisreplacederator();
}
DynmapCore.setIgnoreChunkLoads(true);
// Load the required chunks.
while ((cnt < max_to_load) && iterator.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iterator.next();
int chunkindex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
// Skip if already processed
if (snaparray[chunkindex] != null)
continue;
boolean vis = isChunkVisible(chunk);
/* Check if cached chunk snapshot found */
if (tryChunkCache(chunk, vis)) {
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
} else {
NBTTagCompound nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (nbt != null) {
ChunkSnapshot ss;
DynIntHashMap tileData;
// If hidden
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
tileData = new DynIntHashMap();
} else {
// Prep snapshot
SnapshotRec ssr = prepChunkSnapshot(chunk, nbt);
ss = ssr.ss;
tileData = ssr.tileData;
}
snaparray[chunkindex] = ss;
snaptile[chunkindex] = tileData;
endChunkLoad(startTime, ChunkStats.UNLOADED_CHUNKS);
} else {
endChunkLoad(startTime, ChunkStats.UNGENERATED_CHUNKS);
}
}
cnt++;
}
DynmapCore.setIgnoreChunkLoads(false);
if (iterator.hasNext() == false) /* If we're done */
{
isempty = true;
/* Fill missing chunks with empty dummy chunk */
for (int i = 0; i < snaparray.length; i++) {
if (snaparray[i] == null) {
snaparray[i] = EMPTY;
} else if (snaparray[i] != EMPTY) {
isempty = false;
}
}
}
return cnt;
}
/**
* Test if done loading
*/
public boolean isDoneLoading() {
if (!dw.isLoaded()) {
return true;
}
if (iterator != null) {
return !iterator.hasNext();
}
return false;
}
/**
* Test if all empty blocks
*/
public boolean isEmpty() {
return isempty;
}
/**
* Unload chunks
*/
public void unloadChunks() {
if (snaparray != null) {
for (int i = 0; i < snaparray.length; i++) {
snaparray[i] = null;
}
snaparray = null;
}
}
private void initSectionData(int idx) {
isSectionNotEmpty[idx] = new boolean[nsect + 1];
if (snaparray[idx] != EMPTY) {
for (int i = 0; i < nsect; i++) {
if (snaparray[idx].isSectionEmpty(i) == false) {
isSectionNotEmpty[idx][i] = true;
}
}
}
}
public boolean isEmptySection(int sx, int sy, int sz) {
int idx = (sx - x_min) + (sz - z_min) * x_dim;
if (isSectionNotEmpty[idx] == null) {
initSectionData(idx);
}
return !isSectionNotEmpty[idx][sy];
}
/**
* Get cache iterator
*/
public MapIterator gereplacederator(int x, int y, int z) {
if (dw.getEnvironment().equals("the_end")) {
return new OurEndMapIterator(x, y, z);
}
return new OurMapIterator(x, y, z);
}
/**
* Set hidden chunk style (default is FILL_AIR)
*/
public void setHiddenFillStyle(HiddenChunkStyle style) {
this.hidestyle = style;
}
/**
* Add visible area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setVisibleRange(VisibilityLimit lim) {
if (visible_limits == null)
visible_limits = new ArrayList<VisibilityLimit>();
visible_limits.add(lim);
}
/**
* Add hidden area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setHiddenRange(VisibilityLimit lim) {
if (hidden_limits == null)
hidden_limits = new ArrayList<VisibilityLimit>();
hidden_limits.add(lim);
}
@Override
public boolean setChunkDataTypes(boolean blockdata, boolean biome, boolean highestblocky, boolean rawbiome) {
this.biome = biome;
this.biomeraw = rawbiome;
this.highesty = highestblocky;
this.blockdata = blockdata;
return true;
}
@Override
public DynmapWorld getWorld() {
return dw;
}
static {
Biome[] b = DynmapPlugin.getBiomeList();
BiomeMap[] bm = BiomeMap.values();
biome_to_bmap = new BiomeMap[256];
for (int i = 0; i < biome_to_bmap.length; i++) {
biome_to_bmap[i] = BiomeMap.NULL;
}
for (int i = 0; i < b.length; i++) {
if (b[i] == null)
continue;
String bs = b[i].biomeName;
for (int j = 0; j < bm.length; j++) {
if (bm[j].toString().equals(bs)) {
biome_to_bmap[i] = bm[j];
break;
}
}
}
}
}
10
View Complete Implementation : ForgeMapChunkCache.java
Copyright Apache License 2.0
Author : webbukkit
Copyright Apache License 2.0
Author : webbukkit
/**
* Container for managing chunks - dependent upon using chunk snapshots, since rendering is off server thread
*/
public clreplaced ForgeMapChunkCache extends MapChunkCache {
private static boolean init = false;
private static Field unloadqueue = null;
private static Field currentchunkloader = null;
private static Field updateEnreplacedyTick = null;
/* AnvilChunkLoader fields */
// Map
private static Field chunksToRemove = null;
// Set
private static Field pendingAnvilChunksCoordinates = null;
// writeChunkToNBT(Chunk c, World w, NBTTagCompound nbt)
private static Method writechunktonbt = null;
/* AnvilChunkLoaderPending fields */
private static Field chunkCoord = null;
private static Field nbtTag = null;
private World w;
private DynmapWorld dw;
private ChunkProviderServer cps;
private int nsect;
private List<DynmapChunk> chunks;
private Lisreplacederator<DynmapChunk> iterator;
private int x_min, x_max, z_min, z_max;
private int x_dim;
private boolean biome, biomeraw, highesty, blockdata;
private HiddenChunkStyle hidestyle = HiddenChunkStyle.FILL_AIR;
private List<VisibilityLimit> visible_limits = null;
private List<VisibilityLimit> hidden_limits = null;
private boolean isempty = true;
private int snapcnt;
private ChunkSnapshot[] snaparray;
/* Index = (x-x_min) + ((z-z_min)*x_dim) */
private DynIntHashMap[] snaptile;
private byte[][] sameneighborbiomecnt;
private BiomeMap[][] biomemap;
private boolean[][] isSectionNotEmpty;
/* Indexed by snapshot index, then by section index */
private Set<?> queue = null;
private static final BlockStep[] unstep = { BlockStep.X_MINUS, BlockStep.Y_MINUS, BlockStep.Z_MINUS, BlockStep.X_PLUS, BlockStep.Y_PLUS, BlockStep.Z_PLUS };
private static BiomeMap[] biome_to_bmap;
private static final int getIndexInChunk(int cx, int cy, int cz) {
return (cy << 8) | (cz << 4) | cx;
}
/**
* Iterator for traversing map chunk cache (base is for non-snapshot)
*/
public clreplaced OurMapIterator implements MapIterator {
private int x, y, z, chunkindex, bx, bz;
private ChunkSnapshot snap;
private BlockStep laststep;
private DynmapBlockState blk;
private final int worldheight;
private final int x_base;
private final int z_base;
OurMapIterator(int x0, int y0, int z0) {
x_base = x_min << 4;
z_base = z_min << 4;
if (biome) {
biomePrep();
}
initialize(x0, y0, z0);
worldheight = w.getHeight();
}
@Override
public final void initialize(int x0, int y0, int z0) {
this.x = x0;
this.y = y0;
this.z = z0;
this.chunkindex = ((x >> 4) - x_min) + (((z >> 4) - z_min) * x_dim);
this.bx = x & 0xF;
this.bz = z & 0xF;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
laststep = BlockStep.Y_MINUS;
if ((y >= 0) && (y < worldheight)) {
blk = null;
} else {
blk = DynmapBlockState.AIR;
}
}
@Override
public int getBlockSkyLight() {
try {
return snap.getBlockSkyLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException aioobx) {
return 15;
}
}
@Override
public final int getBlockEmittedLight() {
try {
return snap.getBlockEmittedLight(bx, y, bz);
} catch (ArrayIndexOutOfBoundsException aioobx) {
return 0;
}
}
private void biomePrep() {
if (sameneighborbiomecnt != null) {
return;
}
int x_size = x_dim << 4;
int z_size = (z_max - z_min + 1) << 4;
sameneighborbiomecnt = new byte[x_size][];
biomemap = new BiomeMap[x_size][];
for (int i = 0; i < x_size; i++) {
sameneighborbiomecnt[i] = new byte[z_size];
biomemap[i] = new BiomeMap[z_size];
}
for (int i = 0; i < x_size; i++) {
for (int j = 0; j < z_size; j++) {
if (j == 0)
initialize(i + x_base, 64, z_base);
else
stepPosition(BlockStep.Z_PLUS);
int bb = snap.getBiome(bx, bz);
BiomeMap bm = BiomeMap.byBiomeID(bb);
biomemap[i][j] = bm;
int cnt = 0;
if (i > 0) {
if (bm == biomemap[i - 1][j]) /* Same as one to left */
{
cnt++;
sameneighborbiomecnt[i - 1][j]++;
}
if ((j > 0) && (bm == biomemap[i - 1][j - 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j - 1]++;
}
if ((j < (z_size - 1)) && (bm == biomemap[i - 1][j + 1])) {
cnt++;
sameneighborbiomecnt[i - 1][j + 1]++;
}
}
if ((j > 0) && (biomemap[i][j] == biomemap[i][j - 1])) /* Same as one to above */
{
cnt++;
sameneighborbiomecnt[i][j - 1]++;
}
sameneighborbiomecnt[i][j] = (byte) cnt;
}
}
}
@Override
public final BiomeMap getBiome() {
try {
return biomemap[x - x_base][z - z_base];
} catch (Exception ex) {
return BiomeMap.NULL;
}
}
@Override
public final int getSmoothGrreplacedColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedGrreplacedMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothFoliageColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = bm.getModifiedFoliageMultiplier(colormap[bm.biomeLookup()]);
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothColorMultiplier(int[] colormap, int[] swampmap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
if (bm == BiomeMap.SWAMPLAND) {
mult = swampmap[bm.biomeLookup()];
} else {
mult = colormap[bm.biomeLookup()];
}
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult;
if (bm == BiomeMap.SWAMPLAND) {
rmult = swampmap[bm.biomeLookup()];
} else {
rmult = colormap[bm.biomeLookup()];
}
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
@Override
public final int getSmoothWaterColorMultiplier() {
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
return bm.gereplacederColorMult();
}
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int mult = bm.gereplacederColorMult();
racreplaced += (mult >> 16) & 0xFF;
gacreplaced += (mult >> 8) & 0xFF;
bacreplaced += mult & 0xFF;
}
}
return ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
} catch (Exception x) {
return 0xFFFFFF;
}
}
@Override
public final int getSmoothWaterColorMultiplier(int[] colormap) {
int mult = 0xFFFFFF;
try {
int rx = x - x_base;
int rz = z - z_base;
BiomeMap bm = biomemap[rx][rz];
if (sameneighborbiomecnt[rx][rz] >= (byte) 8) /* All neighbors same? */
{
mult = colormap[bm.biomeLookup()];
} else {
int racreplaced = 0;
int gacreplaced = 0;
int bacreplaced = 0;
for (int xoff = -1; xoff < 2; xoff++) {
for (int zoff = -1; zoff < 2; zoff++) {
bm = biomemap[rx + xoff][rz + zoff];
int rmult = colormap[bm.biomeLookup()];
racreplaced += (rmult >> 16) & 0xFF;
gacreplaced += (rmult >> 8) & 0xFF;
bacreplaced += rmult & 0xFF;
}
}
mult = ((racreplaced / 9) << 16) | ((gacreplaced / 9) << 8) | (bacreplaced / 9);
}
} catch (Exception x) {
mult = 0xFFFFFF;
}
return mult;
}
/**
* Step current position in given direction
*/
@Override
public final void stepPosition(BlockStep step) {
blk = null;
switch(step.ordinal()) {
case 0:
x++;
bx++;
if (bx == 16) /* Next chunk? */
{
bx = 0;
chunkindex++;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 1:
y++;
if (y >= worldheight) {
blk = DynmapBlockState.AIR;
}
break;
case 2:
z++;
bz++;
if (bz == 16) /* Next chunk? */
{
bz = 0;
chunkindex += x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 3:
x--;
bx--;
if (bx == -1) /* Next chunk? */
{
bx = 15;
chunkindex--;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
case 4:
y--;
if (y < 0) {
blk = DynmapBlockState.AIR;
}
break;
case 5:
z--;
bz--;
if (bz == -1) /* Next chunk? */
{
bz = 15;
chunkindex -= x_dim;
if ((chunkindex >= snapcnt) || (chunkindex < 0)) {
snap = EMPTY;
} else {
snap = snaparray[chunkindex];
}
}
break;
}
laststep = step;
}
/**
* Unstep current position to previous position
*/
@Override
public BlockStep unstepPosition() {
BlockStep ls = laststep;
stepPosition(unstep[ls.ordinal()]);
return ls;
}
/**
* Unstep current position in oppisite director of given step
*/
@Override
public void unstepPosition(BlockStep s) {
stepPosition(unstep[s.ordinal()]);
}
@Override
public final void setY(int y) {
if (y > this.y) {
laststep = BlockStep.Y_PLUS;
} else {
laststep = BlockStep.Y_MINUS;
}
this.y = y;
if ((y < 0) || (y >= worldheight)) {
blk = DynmapBlockState.AIR;
} else {
blk = null;
}
}
@Override
public final int getX() {
return x;
}
@Override
public final int getY() {
return y;
}
@Override
public final int getZ() {
return z;
}
@Override
public final DynmapBlockState getBlockTypeAt(BlockStep s) {
if (s == BlockStep.Y_MINUS) {
if (y > 0) {
return snap.getBlockType(bx, y - 1, bz);
}
} else if (s == BlockStep.Y_PLUS) {
if (y < (worldheight - 1)) {
return snap.getBlockType(bx, y + 1, bz);
}
} else {
BlockStep ls = laststep;
stepPosition(s);
DynmapBlockState tid = snap.getBlockType(bx, y, bz);
unstepPosition();
laststep = ls;
return tid;
}
return DynmapBlockState.AIR;
}
@Override
public BlockStep getLastStep() {
return laststep;
}
@Override
public int getWorldHeight() {
return worldheight;
}
@Override
public long getBlockKey() {
return (((chunkindex * worldheight) + y) << 8) | (bx << 4) | bz;
}
@Override
public final boolean isEmptySection() {
try {
return !isSectionNotEmpty[chunkindex][y >> 4];
} catch (Exception x) {
initSectionData(chunkindex);
return !isSectionNotEmpty[chunkindex][y >> 4];
}
}
@Override
public RenderPatchFactory getPatchFactory() {
return HDBlockModels.getPatchDefinitionFactory();
}
@Override
public Object getBlockTileEnreplacedyField(String fieldId) {
try {
int idx = getIndexInChunk(bx, y, bz);
Object[] vals = (Object[]) snaptile[chunkindex].get(idx);
for (int i = 0; i < vals.length; i += 2) {
if (vals[i].equals(fieldId)) {
return vals[i + 1];
}
}
} catch (Exception x) {
}
return null;
}
@Override
public DynmapBlockState getBlockTypeAt(int xoff, int yoff, int zoff) {
int xx = this.x + xoff;
int yy = this.y + yoff;
int zz = this.z + zoff;
int idx = ((xx >> 4) - x_min) + (((zz >> 4) - z_min) * x_dim);
try {
return snaparray[idx].getBlockType(xx & 0xF, yy, zz & 0xF);
} catch (Exception x) {
return DynmapBlockState.AIR;
}
}
@Override
public Object getBlockTileEnreplacedyFieldAt(String fieldId, int xoff, int yoff, int zoff) {
return null;
}
@Override
public long getInhabitedTicks() {
try {
return snap.getInhabitedTicks();
} catch (Exception x) {
return 0;
}
}
@Override
public DynmapBlockState getBlockType() {
if (blk == null) {
blk = snap.getBlockType(bx, y, bz);
}
return blk;
}
}
private clreplaced OurEndMapIterator extends OurMapIterator {
OurEndMapIterator(int x0, int y0, int z0) {
super(x0, y0, z0);
}
@Override
public final int getBlockSkyLight() {
return 15;
}
}
/**
* Chunk cache for representing unloaded chunk (or air)
*/
private static clreplaced EmptyChunk extends ChunkSnapshot {
public EmptyChunk() {
super(256, 0, 0, 0, 0);
}
/* Need these for interface, but not used */
@Override
public int getX() {
return 0;
}
@Override
public int getZ() {
return 0;
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 0;
}
@Override
public int getBiome(int x, int z) {
return -1;
}
@Override
public boolean isSectionEmpty(int sy) {
return true;
}
}
/**
* Chunk cache for representing generic stone chunk
*/
private static clreplaced PlainChunk extends ChunkSnapshot {
private DynmapBlockState fill;
PlainChunk(String fill) {
super(256, 0, 0, 0, 0);
this.fill = DynmapBlockState.getBaseStateByName(fill);
}
/* Need these for interface, but not used */
@Override
public int getX() {
return 0;
}
@Override
public int getZ() {
return 0;
}
@Override
public int getBiome(int x, int z) {
return -1;
}
@Override
public final DynmapBlockState getBlockType(int x, int y, int z) {
if (y < 64) {
return fill;
}
return DynmapBlockState.AIR;
}
@Override
public final int getBlockSkyLight(int x, int y, int z) {
if (y < 64) {
return 0;
}
return 15;
}
@Override
public final int getBlockEmittedLight(int x, int y, int z) {
return 0;
}
@Override
public final int getHighestBlockYAt(int x, int z) {
return 64;
}
@Override
public boolean isSectionEmpty(int sy) {
return (sy < 4);
}
}
private static final EmptyChunk EMPTY = new EmptyChunk();
private static final PlainChunk STONE = new PlainChunk(DynmapBlockState.STONE_BLOCK);
private static final PlainChunk OCEAN = new PlainChunk(DynmapBlockState.WATER_BLOCK);
public static void init() {
if (!init) {
Field[] f = ChunkProviderServer.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((unloadqueue == null) && f[i].getType().isreplacedignableFrom(java.util.Set.clreplaced)) {
unloadqueue = f[i];
// Log.info("Found unloadqueue - " + f[i].getName());
unloadqueue.setAccessible(true);
} else if ((currentchunkloader == null) && f[i].getType().isreplacedignableFrom(IChunkLoader.clreplaced)) {
currentchunkloader = f[i];
// Log.info("Found currentchunkprovider - " + f[i].getName());
currentchunkloader.setAccessible(true);
}
}
f = WorldServer.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((updateEnreplacedyTick == null) && f[i].getType().isreplacedignableFrom(int.clreplaced)) {
updateEnreplacedyTick = f[i];
// Log.info("Found updateEnreplacedyTick - " + f[i].getName());
updateEnreplacedyTick.setAccessible(true);
}
}
f = AnvilChunkLoader.clreplaced.getDeclaredFields();
for (int i = 0; i < f.length; i++) {
if ((chunksToRemove == null) && (f[i].getType().equals(Map.clreplaced))) {
chunksToRemove = f[i];
chunksToRemove.setAccessible(true);
} else if ((pendingAnvilChunksCoordinates == null) && (f[i].getType().equals(Set.clreplaced))) {
pendingAnvilChunksCoordinates = f[i];
pendingAnvilChunksCoordinates.setAccessible(true);
}
}
// Get writeChunkToNBT method
Method[] ma = AnvilChunkLoader.clreplaced.getDeclaredMethods();
for (Method m : ma) {
Clreplaced<?>[] p = m.getParameterTypes();
if ((p.length == 3) && (p[0].equals(Chunk.clreplaced)) && (p[1].equals(World.clreplaced)) && (p[2].equals(NBTTagCompound.clreplaced))) {
writechunktonbt = m;
m.setAccessible(true);
break;
}
}
if ((unloadqueue == null) || (currentchunkloader == null) || (writechunktonbt == null)) {
Log.severe("ERROR: cannot find unload queue or chunk provider field - dynmap cannot load chunks");
}
if (updateEnreplacedyTick == null) {
Log.severe("ERROR: cannot find updateEnreplacedyTick - dynmap cannot drive enreplacedy cleanup when no players are active");
}
init = true;
}
}
/**
* Construct empty cache
*/
public ForgeMapChunkCache() {
init();
}
public void setChunks(ForgeWorld dw, List<DynmapChunk> chunks) {
this.dw = dw;
this.w = dw.getWorld();
if (dw.isLoaded()) {
/* Check if world's provider is ChunkProviderServer */
IChunkProvider cp = this.w.getChunkProvider();
if (cp instanceof ChunkProviderServer) {
cps = (ChunkProviderServer) cp;
} else {
Log.severe("Error: world " + dw.getName() + " has unsupported chunk provider");
}
} else {
chunks = new ArrayList<DynmapChunk>();
}
nsect = dw.worldheight >> 4;
this.chunks = chunks;
/* Compute range */
if (chunks.size() == 0) {
this.x_min = 0;
this.x_max = 0;
this.z_min = 0;
this.z_max = 0;
x_dim = 1;
} else {
x_min = x_max = chunks.get(0).x;
z_min = z_max = chunks.get(0).z;
for (DynmapChunk c : chunks) {
if (c.x > x_max) {
x_max = c.x;
}
if (c.x < x_min) {
x_min = c.x;
}
if (c.z > z_max) {
z_max = c.z;
}
if (c.z < z_min) {
z_min = c.z;
}
}
x_dim = x_max - x_min + 1;
}
snapcnt = x_dim * (z_max - z_min + 1);
snaparray = new ChunkSnapshot[snapcnt];
snaptile = new DynIntHashMap[snapcnt];
isSectionNotEmpty = new boolean[snapcnt][];
try {
if ((unloadqueue != null) && (cps != null)) {
queue = (Set<?>) unloadqueue.get(cps);
}
} catch (IllegalArgumentException iax) {
} catch (IllegalAccessException e) {
}
}
private static boolean didError = false;
public NBTTagCompound readChunk(int x, int z) {
if ((cps == null) || (!(cps.chunkLoader instanceof AnvilChunkLoader)) || (((chunksToRemove == null) || (pendingAnvilChunksCoordinates == null)))) {
if (!didError) {
Log.severe("**** DYNMAP CANNOT READ CHUNKS (UNSUPPORTED CHUNK LOADER) ****");
didError = true;
}
return null;
}
try {
AnvilChunkLoader acl = (AnvilChunkLoader) cps.chunkLoader;
Map<?, ?> chunkstoremove = null;
Set<?> pendingcoords = null;
chunkstoremove = (Map<?, ?>) chunksToRemove.get(acl);
pendingcoords = (Set<?>) pendingAnvilChunksCoordinates.get(acl);
NBTTagCompound rslt = null;
ChunkPos coord = new ChunkPos(x, z);
if (pendingcoords.contains(coord)) {
for (Object o : chunkstoremove.values()) {
if (chunkCoord == null) {
Field[] f = o.getClreplaced().getDeclaredFields();
for (Field ff : f) {
if ((chunkCoord == null) && (ff.getType().equals(ChunkPos.clreplaced))) {
chunkCoord = ff;
chunkCoord.setAccessible(true);
} else if ((nbtTag == null) && (ff.getType().equals(NBTTagCompound.clreplaced))) {
nbtTag = ff;
nbtTag.setAccessible(true);
}
}
if ((chunkCoord == null) || (nbtTag == null)) {
Log.severe("Error getting chunkCoord and nbtTag for Forge");
return null;
}
}
ChunkPos occ = (ChunkPos) chunkCoord.get(o);
if (occ.equals(coord)) {
rslt = (NBTTagCompound) nbtTag.get(o);
break;
}
}
}
if (rslt == null) {
DataInputStream str = RegionFileCache.getChunkInputStream(acl.chunkSaveLocation, x, z);
if (str == null) {
return null;
}
rslt = CompressedStreamTools.read(str);
}
if (rslt != null)
rslt = rslt.getCompound("Level");
return rslt;
} catch (Exception exc) {
Log.severe(String.format("Error reading chunk: %s,%d,%d", dw.getName(), x, z), exc);
return null;
}
}
private Object getNBTValue(INBTBase v) {
Object val = null;
switch(v.getId()) {
case // Byte
1:
val = Byte.valueOf(((NBTTagByte) v).getByte());
break;
case // Short
2:
val = Short.valueOf(((NBTTagShort) v).getShort());
break;
case // Int
3:
val = Integer.valueOf(((NBTTagInt) v).getInt());
break;
case // Long
4:
val = Long.valueOf(((NBTTagLong) v).getLong());
break;
case // Float
5:
val = Float.valueOf(((NBTTagFloat) v).getFloat());
break;
case // Double
6:
val = Double.valueOf(((NBTTagDouble) v).getDouble());
break;
case // Byte[]
7:
val = ((NBTTagByteArray) v).getByteArray();
break;
case // String
8:
val = ((NBTTagString) v).getString();
break;
case // List
9:
NBTTagList tl = (NBTTagList) v;
ArrayList<Object> vlist = new ArrayList<Object>();
int type = tl.getTagType();
for (int i = 0; i < tl.size(); i++) {
switch(type) {
case 5:
float fv = tl.getFloat(i);
vlist.add(fv);
break;
case 6:
double dv = tl.getDouble(i);
vlist.add(dv);
break;
case 8:
String sv = tl.getString(i);
vlist.add(sv);
break;
case 10:
NBTTagCompound tc = tl.getCompound(i);
vlist.add(getNBTValue(tc));
break;
case 11:
int[] ia = tl.getIntArray(i);
vlist.add(ia);
break;
}
}
val = vlist;
break;
case // Map
10:
NBTTagCompound tc = (NBTTagCompound) v;
HashMap<String, Object> vmap = new HashMap<String, Object>();
for (Object t : tc.keySet()) {
String st = (String) t;
INBTBase tg = tc.get(st);
vmap.put(st, getNBTValue(tg));
}
val = vmap;
break;
case // Int[]
11:
val = ((NBTTagIntArray) v).getIntArray();
break;
}
return val;
}
private boolean isChunkVisible(DynmapChunk chunk) {
boolean vis = true;
if (visible_limits != null) {
vis = false;
for (VisibilityLimit limit : visible_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = true;
break;
}
}
}
if (vis && (hidden_limits != null)) {
for (VisibilityLimit limit : hidden_limits) {
if (limit.doIntersectChunk(chunk.x, chunk.z)) {
vis = false;
break;
}
}
}
return vis;
}
private boolean tryChunkCache(DynmapChunk chunk, boolean vis) {
/* Check if cached chunk snapshot found */
ChunkSnapshot ss = null;
SnapshotRec ssr = DynmapPlugin.plugin.sscache.getSnapshot(dw.getName(), chunk.x, chunk.z, blockdata, biome, biomeraw, highesty);
if (ssr != null) {
ss = ssr.ss;
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
}
int idx = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
snaparray[idx] = ss;
snaptile[idx] = ssr.tileData;
}
return (ssr != null);
}
private boolean isChunkUnloadPending(DynmapChunk chunk) {
boolean isunloadpending = false;
if (queue != null) {
long coord = ChunkPos.asLong(chunk.x, chunk.z);
isunloadpending = queue.contains(Long.valueOf(coord));
}
return isunloadpending;
}
// Prep snapshot and add to cache
private SnapshotRec prepChunkSnapshot(DynmapChunk chunk, NBTTagCompound nbt) {
ChunkSnapshot ss = new ChunkSnapshot(nbt, dw.worldheight);
DynIntHashMap tileData = new DynIntHashMap();
NBTTagList tiles = nbt.getList("TileEnreplacedies", 10);
if (tiles == null)
tiles = new NBTTagList();
/* Get tile enreplacedy data */
List<Object> vals = new ArrayList<Object>();
for (int tid = 0; tid < tiles.size(); tid++) {
NBTTagCompound tc = tiles.getCompound(tid);
int tx = tc.getInt("x");
int ty = tc.getInt("y");
int tz = tc.getInt("z");
int cx = tx & 0xF;
int cz = tz & 0xF;
DynmapBlockState blk = ss.getBlockType(cx, ty, cz);
String[] te_fields = HDBlockModels.getTileEnreplacedyFieldsNeeded(blk);
if (te_fields != null) {
vals.clear();
for (String id : te_fields) {
INBTBase v = tc.get(id);
/* Get field */
if (v != null) {
Object val = getNBTValue(v);
if (val != null) {
vals.add(id);
vals.add(val);
}
}
}
if (vals.size() > 0) {
Object[] vlist = vals.toArray(new Object[vals.size()]);
tileData.put(getIndexInChunk(cx, ty, cz), vlist);
}
}
}
SnapshotRec ssr = new SnapshotRec();
ssr.ss = ss;
ssr.tileData = tileData;
DynmapPlugin.plugin.sscache.putSnapshot(dw.getName(), chunk.x, chunk.z, ssr, blockdata, biome, biomeraw, highesty);
return ssr;
}
/**
* Read NBT data from loaded chunks - needs to be called from server/world thread to be safe
* @returns number loaded
*/
public int getLoadedChunks() {
int cnt = 0;
if (!dw.isLoaded()) {
isempty = true;
unloadChunks();
return 0;
}
Lisreplacederator<DynmapChunk> iter = chunks.lisreplacederator();
while (iter.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iter.next();
int chunkindex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
// Skip if already processed
if (snaparray[chunkindex] != null)
continue;
boolean vis = isChunkVisible(chunk);
/* Check if cached chunk snapshot found */
if (tryChunkCache(chunk, vis)) {
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
cnt++;
} else // If chunk is loaded and not being unloaded, we're grabbing its NBT data
if (cps.chunkExists(chunk.x, chunk.z) && (!isChunkUnloadPending(chunk))) {
ChunkSnapshot ss;
DynIntHashMap tileData;
if (vis) {
// If visible
NBTTagCompound nbt = new NBTTagCompound();
try {
writechunktonbt.invoke(cps.chunkLoader, cps.getChunk(chunk.x, chunk.z, false, false), w, nbt);
} catch (IllegalAccessException e) {
} catch (IllegalArgumentException e) {
} catch (InvocationTargetException e) {
}
SnapshotRec ssr = prepChunkSnapshot(chunk, nbt);
ss = ssr.ss;
tileData = ssr.tileData;
} else {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
tileData = new DynIntHashMap();
}
snaparray[chunkindex] = ss;
snaptile[chunkindex] = tileData;
endChunkLoad(startTime, ChunkStats.LOADED_CHUNKS);
cnt++;
}
}
return cnt;
}
@Override
public int loadChunks(int max_to_load) {
return getLoadedChunks() + readChunks(max_to_load);
}
public int readChunks(int max_to_load) {
if (!dw.isLoaded()) {
isempty = true;
unloadChunks();
return 0;
}
int cnt = 0;
if (iterator == null) {
iterator = chunks.lisreplacederator();
}
DynmapCore.setIgnoreChunkLoads(true);
// Load the required chunks.
while ((cnt < max_to_load) && iterator.hasNext()) {
long startTime = System.nanoTime();
DynmapChunk chunk = iterator.next();
int chunkindex = (chunk.x - x_min) + (chunk.z - z_min) * x_dim;
// Skip if already processed
if (snaparray[chunkindex] != null)
continue;
boolean vis = isChunkVisible(chunk);
/* Check if cached chunk snapshot found */
if (tryChunkCache(chunk, vis)) {
endChunkLoad(startTime, ChunkStats.CACHED_SNAPSHOT_HIT);
} else {
NBTTagCompound nbt = readChunk(chunk.x, chunk.z);
// If read was good
if (nbt != null) {
ChunkSnapshot ss;
DynIntHashMap tileData;
// If hidden
if (!vis) {
if (hidestyle == HiddenChunkStyle.FILL_STONE_PLAIN) {
ss = STONE;
} else if (hidestyle == HiddenChunkStyle.FILL_OCEAN) {
ss = OCEAN;
} else {
ss = EMPTY;
}
tileData = new DynIntHashMap();
} else {
// Prep snapshot
SnapshotRec ssr = prepChunkSnapshot(chunk, nbt);
ss = ssr.ss;
tileData = ssr.tileData;
}
snaparray[chunkindex] = ss;
snaptile[chunkindex] = tileData;
endChunkLoad(startTime, ChunkStats.UNLOADED_CHUNKS);
} else {
endChunkLoad(startTime, ChunkStats.UNGENERATED_CHUNKS);
}
}
cnt++;
}
DynmapCore.setIgnoreChunkLoads(false);
if (iterator.hasNext() == false) /* If we're done */
{
isempty = true;
/* Fill missing chunks with empty dummy chunk */
for (int i = 0; i < snaparray.length; i++) {
if (snaparray[i] == null) {
snaparray[i] = EMPTY;
} else if (snaparray[i] != EMPTY) {
isempty = false;
}
}
}
return cnt;
}
/**
* Test if done loading
*/
public boolean isDoneLoading() {
if (!dw.isLoaded()) {
return true;
}
if (iterator != null) {
return !iterator.hasNext();
}
return false;
}
/**
* Test if all empty blocks
*/
public boolean isEmpty() {
return isempty;
}
/**
* Unload chunks
*/
public void unloadChunks() {
if (snaparray != null) {
for (int i = 0; i < snaparray.length; i++) {
snaparray[i] = null;
}
snaparray = null;
}
}
private void initSectionData(int idx) {
isSectionNotEmpty[idx] = new boolean[nsect + 1];
if (snaparray[idx] != EMPTY) {
for (int i = 0; i < nsect; i++) {
if (snaparray[idx].isSectionEmpty(i) == false) {
isSectionNotEmpty[idx][i] = true;
}
}
}
}
public boolean isEmptySection(int sx, int sy, int sz) {
int idx = (sx - x_min) + (sz - z_min) * x_dim;
if (isSectionNotEmpty[idx] == null) {
initSectionData(idx);
}
return !isSectionNotEmpty[idx][sy];
}
/**
* Get cache iterator
*/
public MapIterator gereplacederator(int x, int y, int z) {
if (dw.getEnvironment().equals("the_end")) {
return new OurEndMapIterator(x, y, z);
}
return new OurMapIterator(x, y, z);
}
/**
* Set hidden chunk style (default is FILL_AIR)
*/
public void setHiddenFillStyle(HiddenChunkStyle style) {
this.hidestyle = style;
}
/**
* Add visible area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setVisibleRange(VisibilityLimit lim) {
if (visible_limits == null)
visible_limits = new ArrayList<VisibilityLimit>();
visible_limits.add(lim);
}
/**
* Add hidden area limit - can be called more than once
* Needs to be set before chunks are loaded
* Coordinates are block coordinates
*/
public void setHiddenRange(VisibilityLimit lim) {
if (hidden_limits == null)
hidden_limits = new ArrayList<VisibilityLimit>();
hidden_limits.add(lim);
}
@Override
public boolean setChunkDataTypes(boolean blockdata, boolean biome, boolean highestblocky, boolean rawbiome) {
this.biome = biome;
this.biomeraw = rawbiome;
this.highesty = highestblocky;
this.blockdata = blockdata;
return true;
}
@Override
public DynmapWorld getWorld() {
return dw;
}
static {
Biome[] b = DynmapPlugin.getBiomeList();
BiomeMap[] bm = BiomeMap.values();
biome_to_bmap = new BiomeMap[256];
for (int i = 0; i < biome_to_bmap.length; i++) {
biome_to_bmap[i] = BiomeMap.NULL;
}
for (int i = 0; i < b.length; i++) {
if (b[i] == null)
continue;
String bs = b[i].getTranslationKey();
for (int j = 0; j < bm.length; j++) {
if (bm[j].toString().equals(bs)) {
biome_to_bmap[i] = bm[j];
break;
}
}
}
}
}