diff --git a/config.yml b/config.yml
index bf0c11d3..62f22402 100644
--- a/config.yml
+++ b/config.yml
@@ -5,6 +5,7 @@ realistic_biomes:
database_user: "tekkit"
database_password: ""
database_prefix: "rb"
+ database_poolsize: 10
cache_entire_database: true
persistence_enabled: true
diff --git a/pom.xml b/pom.xml
index 89f7a26c..0f7e0259 100644
--- a/pom.xml
+++ b/pom.xml
@@ -6,7 +6,7 @@
com.untamedears
RealisticBiomes
jar
- 1.3.1
+ 1.3.2
RealisticBiomes
https://github.com/Civcraft/RealisticBiomes
@@ -29,6 +29,31 @@
true
+
+
+ org.apache.maven.plugins
+ maven-shade-plugin
+ 2.3
+
+
+
+ com.zaxxer:HikariCP
+
+ **
+
+
+
+
+
+
+ package
+
+ shade
+
+
+
+
+
@@ -38,6 +63,12 @@
1.10.2-R0.1-SNAPSHOT
provided
+
+ com.zaxxer
+ HikariCP
+ 2.6.2
+ compile
+
diff --git a/src/com/untamedears/realisticbiomes/PersistConfig.java b/src/com/untamedears/realisticbiomes/PersistConfig.java
index e8039f22..92f88da2 100644
--- a/src/com/untamedears/realisticbiomes/PersistConfig.java
+++ b/src/com/untamedears/realisticbiomes/PersistConfig.java
@@ -9,6 +9,8 @@ public class PersistConfig {
public String user;
public String password;
public String prefix;
+
+ public int poolSize;
public boolean enabled;
diff --git a/src/com/untamedears/realisticbiomes/RealisticBiomes.java b/src/com/untamedears/realisticbiomes/RealisticBiomes.java
index ed634150..e099e997 100644
--- a/src/com/untamedears/realisticbiomes/RealisticBiomes.java
+++ b/src/com/untamedears/realisticbiomes/RealisticBiomes.java
@@ -1,5 +1,6 @@
package com.untamedears.realisticbiomes;
+import java.sql.SQLException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
@@ -128,6 +129,21 @@ public static void doLog(Level level, String message) {
}
}
}
+
+
+ public static void doLog(Level level, String message, Throwable wrong) {
+
+ if (RealisticBiomes.LOG != null) {
+
+ // here we make sure that we only log messages that are loggable with the given Level
+ // so if its set to INFO (800) and we try to log a FINER message (400), then it wont work
+ // However if its ALL, then its set to Integer.MIN_VALUE, so everything will get logged. etc etc
+ if (level.intValue() >= RealisticBiomes.minLogLevel.intValue() ) {
+ RealisticBiomes.LOG.log(level, "[" + level.toString() + "] " + message, wrong);
+
+ }
+ }
+ }
private void loadPersistConfig(ConfigurationSection config) {
persistConfig = new PersistConfig();
@@ -138,6 +154,7 @@ private void loadPersistConfig(ConfigurationSection config) {
persistConfig.user = config.getString("database_user");
persistConfig.password = config.getString("database_password");
persistConfig.prefix = config.getString("database_prefix");
+ persistConfig.poolSize = config.getInt("database_poolsize", 10);
persistConfig.enabled = config.getBoolean("persistence_enabled");
persistConfig.unloadBatchPeriod = config.getInt("unload_batch_period");
@@ -464,4 +481,5 @@ public void growPlant(Plant plant, Block block, GrowthConfig growthConfig, Block
public PlantManager getPlantManager() {
return plantManager;
}
+
}
diff --git a/src/com/untamedears/realisticbiomes/persist/ChunkWriter.java b/src/com/untamedears/realisticbiomes/persist/ChunkWriter.java
deleted file mode 100644
index 466b701d..00000000
--- a/src/com/untamedears/realisticbiomes/persist/ChunkWriter.java
+++ /dev/null
@@ -1,61 +0,0 @@
-package com.untamedears.realisticbiomes.persist;
-
-import java.sql.Connection;
-import java.sql.PreparedStatement;
-import java.sql.SQLException;
-
-
-import com.avaje.ebeaninternal.server.lib.sql.DataSourceException;
-import com.untamedears.realisticbiomes.PersistConfig;
-import com.untamedears.realisticbiomes.RealisticBiomes;
-
-/**
- * basically a container class that holds most of the prepared statements that we will be using often
- * use the static methods to get the prepared statement you want, which makes sure that the prepared statement has its
- * params cleared
- *
- * @author Mark
- *
- */
-public class ChunkWriter {
- public static PreparedStatement deleteOldDataStmt = null;
- public static PreparedStatement deleteChunkStmt = null;
- public static PreparedStatement addChunkStmt = null;
- public static PreparedStatement updatePlantStmt = null;
- public static PreparedStatement getLastChunkIdStmt = null;
- public static PreparedStatement addPlantStmt = null;
- public static PreparedStatement deleteOldPlantsStmt = null;
- public static PersistConfig curConfig;
-
- public static PreparedStatement loadPlantsStmt = null;
-
- public static void init(Connection writeConn, Connection readConn, PersistConfig config) {
- try {
- curConfig = config;
-
- initWrite(writeConn);
- initRead(readConn);
- } catch (SQLException e) {
- throw new DataSourceException("Failed to create the prepared statements in ChunkWriter", e);
- }
- }
-
- public static void initWrite(Connection writeConnection) throws SQLException{
- deleteOldDataStmt = writeConnection.prepareStatement(String.format("DELETE FROM %s_plant WHERE chunkid = ?", curConfig.prefix));
-
- addChunkStmt = writeConnection.prepareStatement(String.format("INSERT INTO %s_chunk (w, x, z) VALUES (?, ?, ?)", curConfig.prefix));
- getLastChunkIdStmt = writeConnection.prepareStatement("SELECT LAST_INSERT_ID()");
-
- addPlantStmt = writeConnection.prepareStatement(String.format("INSERT INTO %s_plant (chunkid, w, x, y, z, date, growth, fruitGrowth) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", curConfig.prefix));
- // don't need for now,...maybe later?
- //updatePlantStmt = writeConnection.prepareStatement(String.format("UPDATE %s_plant SET date = ?, growth = ? where chunkid = ?", curConfig.prefix));
- deleteOldPlantsStmt = writeConnection.prepareStatement(String.format("DELETE FROM %s_plant WHERE chunkid = ?", curConfig.prefix));
- }
-
- public static void initRead(Connection readConnection) throws SQLException{
- loadPlantsStmt = readConnection.prepareStatement(String
- .format("SELECT w, x, y, z, date, growth, fruitGrowth FROM %s_plant WHERE chunkid = ?",
- curConfig.prefix));
- }
-
-}
diff --git a/src/com/untamedears/realisticbiomes/persist/Database.java b/src/com/untamedears/realisticbiomes/persist/Database.java
new file mode 100644
index 00000000..c71d4f62
--- /dev/null
+++ b/src/com/untamedears/realisticbiomes/persist/Database.java
@@ -0,0 +1,156 @@
+package com.untamedears.realisticbiomes.persist;
+
+import com.zaxxer.hikari.HikariDataSource;
+
+import java.sql.Connection;
+import java.sql.PreparedStatement;
+import java.sql.SQLException;
+import java.util.logging.Level;
+
+import com.untamedears.realisticbiomes.PersistConfig;
+import com.untamedears.realisticbiomes.RealisticBiomes;
+import com.zaxxer.hikari.HikariConfig;
+
+/**
+ * Wrapper for Connection Pool, and holder for instance-static strings.
+ *
+ * @author ProgrammerDan
+ */
+public class Database {
+
+ private HikariDataSource datasource;
+
+ public Database(PersistConfig baseconfig) {
+
+ initStatements(baseconfig);
+
+ if (baseconfig.user != null && baseconfig.host != null &&
+ baseconfig.port != null && baseconfig.databaseName != null) {
+ HikariConfig config = new HikariConfig();
+ config.setJdbcUrl("jdbc:mysql://" + baseconfig.host + ":" + baseconfig.port + "/" + baseconfig.databaseName);
+ // TODO make these config'd
+ config.setConnectionTimeout(1000l);
+ config.setIdleTimeout(600000l);
+ config.setMaxLifetime(7200000l);
+ // END TODO
+ config.setMaximumPoolSize(baseconfig.poolSize);
+ config.setUsername(baseconfig.user);
+ if (baseconfig.password != null) {
+ config.setPassword(baseconfig.password);
+ }
+ this.datasource = new HikariDataSource(config);
+
+ RealisticBiomes.doLog(Level.FINER, "creating chunk table (if necessary) with prepared statement:" + Database.makeTableChunk);
+
+ try (Connection connection = getConnection();
+ PreparedStatement statement = connection.prepareStatement(Database.makeTableChunk);) {
+ statement.execute();
+ statement.close();
+ } catch (SQLException se) {
+ RealisticBiomes.doLog(Level.SEVERE, "Unable to initialize chunk table in Database!", se);
+ this.datasource = null;
+ return;
+ }
+
+ try (Connection connection = getConnection();
+ PreparedStatement statement = connection.prepareStatement(Database.makeTablePlant);) {
+ statement.execute();
+ } catch (SQLException se) {
+ RealisticBiomes.doLog(Level.SEVERE, "Unable to initialize plant table in Database!", se);
+ this.datasource = null;
+ return;
+ }
+
+ /* MIGRATIONS */
+
+ // update database schema: try and catch
+ try (Connection connection = getConnection();
+ PreparedStatement upgradeTablePlant = connection.prepareStatement(Database.migration0001);){
+ upgradeTablePlant.execute();
+ } catch (SQLException e) {
+ RealisticBiomes.LOG.info("Could not update table - ignore if already updated. Error code: " + e.getErrorCode() + ", error message: " + e.getMessage());
+ }
+
+ } else {
+ this.datasource = null;
+ RealisticBiomes.doLog(Level.SEVERE, "Database not configured and is unavaiable");
+ }
+ }
+
+ /**
+ * Gets a single connection from the pool for use. Checks for null database first.
+ *
+ * @return A new Connection
+ * @throws SQLException
+ */
+ public Connection getConnection() throws SQLException {
+ available();
+ return this.datasource.getConnection();
+ }
+
+ /**
+ * Closes all connections and this connection pool.
+ * @throws SQLException
+ */
+ public void close() throws SQLException {
+ available();
+ this.datasource.close();
+ }
+
+ /**
+ * Quick test; either ends or throws an exception if data source isn't configured.
+ * @throws SQLException
+ */
+ public void available() throws SQLException {
+ if (this.datasource == null) {
+ throw new SQLException("No Datasource Available");
+ }
+ }
+
+ public static String deleteOldDataStmt = null;
+ public static String deleteChunkStmt = null;
+ public static String addChunkStmt = null;
+ public static String updatePlantStmt = null;
+ public static String addPlantStmt = null;
+ public static String deleteOldPlantsStmt = null;
+ public static String loadPlantsStmt = null;
+ public static String makeTableChunk = null;
+ public static String makeTablePlant = null;
+ public static String selectAllFromChunk = null;
+
+ // MIGRATIONS -- TODO: convert to civmodcore migrations
+ public static String migration0001 = null;
+
+
+ private void initStatements(PersistConfig config) {
+
+ deleteOldDataStmt = String.format("DELETE FROM %s_plant WHERE chunkid = ?", config.prefix);
+
+ addChunkStmt = String.format("INSERT INTO %s_chunk (w, x, z) VALUES (?, ?, ?)", config.prefix);
+
+ addPlantStmt = String.format("INSERT INTO %s_plant (chunkid, w, x, y, z, date, growth, fruitGrowth) VALUES (?, ?, ?, ?, ?, ?, ?, ?)", config.prefix);
+
+ // don't need for now,...maybe later?
+ //updatePlantStmt = String.format("UPDATE %s_plant SET date = ?, growth = ? where chunkid = ?", config.prefix);
+ deleteOldPlantsStmt = String.format("DELETE FROM %s_plant WHERE chunkid = ?", config.prefix);
+
+ loadPlantsStmt = String.format("SELECT w, x, y, z, date, growth, fruitGrowth FROM %s_plant WHERE chunkid = ?", config.prefix);
+
+ makeTableChunk = String.format("CREATE TABLE IF NOT EXISTS %s_chunk " +
+ "(id BIGINT PRIMARY KEY AUTO_INCREMENT, " +
+ "w INTEGER, x INTEGER, z INTEGER," +
+ "INDEX chunk_coords_idx (w, x, z)) " +
+ "ENGINE INNODB", config.prefix);
+
+ // we need InnoDB storage engine or else we can't do foreign keys!
+ makeTablePlant = String.format("CREATE TABLE IF NOT EXISTS %s_plant" +
+ "(chunkId BIGINT, w INTEGER, x INTEGER, y INTEGER, z INTEGER, date INTEGER UNSIGNED, growth REAL, fruitGrowth REAL, " +
+ "INDEX plant_chunk_idx (chunkId), " +
+ "CONSTRAINT chunkIdConstraint FOREIGN KEY (chunkId) REFERENCES %s_chunk (id))" +
+ "ENGINE INNODB", config.prefix, config.prefix);
+
+ selectAllFromChunk = String.format("SELECT id, w, x, z FROM %s_chunk", config.prefix);
+
+ migration0001 = String.format("ALTER TABLE %s_plant ADD fruitGrowth REAL AFTER growth", config.prefix);
+ }
+}
diff --git a/src/com/untamedears/realisticbiomes/persist/PlantChunk.java b/src/com/untamedears/realisticbiomes/persist/PlantChunk.java
index 7f372a7d..8a4224a7 100644
--- a/src/com/untamedears/realisticbiomes/persist/PlantChunk.java
+++ b/src/com/untamedears/realisticbiomes/persist/PlantChunk.java
@@ -1,8 +1,10 @@
package com.untamedears.realisticbiomes.persist;
import java.sql.Connection;
+import java.sql.PreparedStatement;
import java.sql.ResultSet;
import java.sql.SQLException;
+import java.sql.Statement;
import java.util.HashMap;
import java.util.Set;
import java.util.logging.Level;
@@ -131,9 +133,8 @@ public synchronized boolean load() {
RealisticBiomes.LOG.log(Level.WARNING, "Looks like DB has gone away: ", dse);
}
- // if we are here, had failure.
+ // if we are here, had failure -- one retry, then bail
try {
- plugin.getPlantManager().reconnect();
return innerLoad();
} catch(DataSourceException dse) {
RealisticBiomes.LOG.log(Level.WARNING, "DB really has gone away: ", dse);
@@ -163,48 +164,45 @@ private boolean innerLoad() {
DropGrouper dropGrouper = new DropGrouper(world);
// execute the load plant statement
- try {
-
- ChunkWriter.loadPlantsStmt.setLong(1, index);
- RealisticBiomes.doLog(Level.FINER,
- "PlantChunk.load() executing sql query: "
- + ChunkWriter.loadPlantsStmt.toString());
- ChunkWriter.loadPlantsStmt.execute();
-
- ResultSet rs = ChunkWriter.loadPlantsStmt.getResultSet();
- while (rs.next()) {
- int w = rs.getInt("w");
- int x = rs.getInt("x");
- int y = rs.getInt("y");
- int z = rs.getInt("z");
- long date = rs.getLong(5);
- float growth = rs.getFloat(6);
- float fruitGrowth = rs.getFloat(7);
-
- RealisticBiomes.doLog(Level.FINEST, String
- .format("PlantChunk.load(): got result: w:%s x:%s y:%s z:%s date:%s growth:%s",
- w, x, y, z, date, growth));
-
- // if the plant does not correspond to an actual crop, don't
- // load it
- if (MaterialAliases.getConfig(plugin.materialGrowth, world.getBlockAt(x, y, z)) == null) {
- RealisticBiomes.doLog(Level.FINER, "Plantchunk.load(): plant we got from db doesn't correspond to an actual crop, not loading");
- continue;
- }
-
- Plant plant = new Plant(date, growth, fruitGrowth);
-
- Block block = world.getBlockAt(x, y, z);
- GrowthConfig growthConfig = MaterialAliases.getConfig(plugin.materialGrowth, block);
- if (growthConfig.isPersistent()) {
- plugin.growPlant(plant, block, growthConfig, null, dropGrouper);
- }
-
- // if the plant isn't finished growing, add it to the
- // plants
- if (!plant.isFullyGrown()) {
- plants.put(new Coords(w, x, y, z), plant);
- RealisticBiomes.doLog(Level.FINER, "PlantChunk.load(): plant not finished growing, adding to plants list");
+ try (Connection connection = plugin.getPlantManager().getDb().getConnection();
+ PreparedStatement loadPlantsStmt = connection.prepareStatement(Database.loadPlantsStmt);){
+
+ loadPlantsStmt.setLong(1, index);
+ RealisticBiomes.doLog(Level.FINER, "PlantChunk.load() executing sql query: " + Database.loadPlantsStmt);
+
+ try (ResultSet rs = loadPlantsStmt.executeQuery()) {
+ while (rs.next()) {
+ int w = rs.getInt("w");
+ int x = rs.getInt("x");
+ int y = rs.getInt("y");
+ int z = rs.getInt("z");
+ long date = rs.getLong(5);
+ float growth = rs.getFloat(6);
+ float fruitGrowth = rs.getFloat(7);
+
+ RealisticBiomes.doLog(Level.FINEST, String
+ .format("PlantChunk.load(): got result: w:%s x:%s y:%s z:%s date:%s growth:%s",
+ w, x, y, z, date, growth));
+
+ // if the plant does not correspond to an actual crop, don't load it
+ if (MaterialAliases.getConfig(plugin.materialGrowth, world.getBlockAt(x, y, z)) == null) {
+ RealisticBiomes.doLog(Level.FINER, "Plantchunk.load(): plant we got from db doesn't correspond to an actual crop, not loading");
+ continue;
+ }
+
+ Plant plant = new Plant(date, growth, fruitGrowth);
+
+ Block block = world.getBlockAt(x, y, z);
+ GrowthConfig growthConfig = MaterialAliases.getConfig(plugin.materialGrowth, block);
+ if (growthConfig.isPersistent()) {
+ plugin.growPlant(plant, block, growthConfig, null, dropGrouper);
+ }
+
+ // if the plant isn't finished growing, add it to the plants
+ if (!plant.isFullyGrown()) {
+ plants.put(new Coords(w, x, y, z), plant);
+ RealisticBiomes.doLog(Level.FINER, "PlantChunk.load(): plant not finished growing, adding to plants list");
+ }
}
}
} catch (SQLException e) {
@@ -231,9 +229,8 @@ public synchronized void unload() {
RealisticBiomes.LOG.log(Level.WARNING, "Looks like DB has gone away: ", dse);
}
- // if we are here, had failure.
+ // if we are here, had failure -- do one retry
try {
- plugin.getPlantManager().reconnect();
innerUnload();
} catch(DataSourceException dse) {
RealisticBiomes.LOG.log(Level.WARNING, "DB really has gone away: ", dse);
@@ -244,68 +241,46 @@ public synchronized void unload() {
/**
* unloads the plant chunk, and saves it to the database.
- *
- * Note that this is called by PlantManager.saveAllAndStop(), so that method
- * takes care of setting autocommit to false/true and actually committing to
- * the database
-
- * @param writeStmts
*/
private void innerUnload() {
- RealisticBiomes.doLog(Level.FINEST,"PlantChunk.unload(): called with coords "
+ RealisticBiomes.doLog(Level.FINEST,"PlantChunk.innerUnload(): called with coords "
+ coords + "plantchunk object: " + this);
if (!loaded) {
- RealisticBiomes.doLog(Level.FINEST, "Plantchunk.unload(): not loaded so returning");
+ RealisticBiomes.doLog(Level.FINEST, "Plantchunk.innerUnload(): not loaded so returning");
return;
}
try {
// if this chunk was not in the database, then add it to the
// database
- RealisticBiomes.doLog(Level.FINEST,"PlantChunk.unload(): is inDatabase?: "
- + inDatabase);
+ RealisticBiomes.doLog(Level.FINEST,"PlantChunk.innerUnload(): is inDatabase?: " + inDatabase);
if (!inDatabase) {
-
- RealisticBiomes.doLog(Level.FINEST, "not in database, adding new chunk");
- ChunkWriter.addChunkStmt.setInt(1, coords.w);
- ChunkWriter.addChunkStmt.setInt(2, coords.x);
- ChunkWriter.addChunkStmt.setInt(3, coords.z);
- ChunkWriter.addChunkStmt.execute();
- ChunkWriter.getLastChunkIdStmt.execute();
- ResultSet rs = ChunkWriter.getLastChunkIdStmt.getResultSet();
-
- // need to call rs.next() to get the first result, and make sure
- // we get the index, and throw an exception
- // if we don't
- if (rs.next()) {
- index = rs.getLong(1);
- RealisticBiomes.doLog(Level.FINEST, "plantchunk.unload(): got new autoincrement index, it is now "
- + index);
- } else {
- throw new DataSourceException(
- "Trying to add the chunk to the database, but was unable to get "
- + "the last inserted statement to get the index");
- }
-
- // make sure to commit this newly added chunk, or else when we
- // add plants to the
- // database later in this method we dont get a Constraint
- // Failure exception
-
- try {
- RealisticBiomes.doLog(Level.FINEST, "plantchunk.unload(): committing new plantchunk with index "
- + this.index);
- plugin.getPlantManager().getWriteConnection().commit();
- } catch(SQLException e) {
- RealisticBiomes.LOG.warning("Can't commit?" + e);
+ RealisticBiomes.doLog(Level.FINEST, " not in database, adding new chunk");
+ try (Connection connection = plugin.getPlantManager().getDb().getConnection();
+ PreparedStatement addChunkStmt = connection.prepareStatement(Database.addChunkStmt, Statement.RETURN_GENERATED_KEYS);) {
+ addChunkStmt.setInt(1, coords.w);
+ addChunkStmt.setInt(2, coords.x);
+ addChunkStmt.setInt(3, coords.z);
+ addChunkStmt.execute();
+ try (ResultSet rs = addChunkStmt.getGeneratedKeys()) {
+ // need to call rs.next() to get the first result, and make sure
+ // we get the index, and throw an exceptionif we don't
+ if (rs.next()) {
+ index = rs.getLong(1);
+ RealisticBiomes.doLog(Level.FINEST, "plantchunk.innerUnload(): got new autoincrement index, it is now "
+ + index);
+ } else {
+ throw new DataSourceException(
+ "Trying to add the chunk to the database, but was unable to get "
+ + "the last inserted statement to get the index");
+ }
+ }
}
inDatabase = true;
}
-
} catch (SQLException e) {
-
throw new DataSourceException(
String.format(
"Failed to unload the chunk (In PlantChunk, adding chunk to db if needed), index %s, coords %s, PlantChunk obj: %s",
@@ -317,84 +292,69 @@ private void innerUnload() {
// if we are already unloaded then don't do anything
if (loaded) {
if (!plants.isEmpty()) {
- try {
- plugin.getPlantManager().getWriteConnection().setAutoCommit(false);
- } catch (SQLException e) {
- RealisticBiomes.LOG.severe("Can't set autocommit?" + e);
- }
-
- // delete plants in the database for this chunk and re-add them
- // this is OK because rb_plant does not have a autoincrement index
- // so it won't explode. However, does this have a negative performance impact?
- // TODO: add listener for block break event, and if its a plant, we remove it
- // from the correct plantchunk? Right now if a plant gets destroyed before
- // it is fully grown then it won't get remove from the database
- ChunkWriter.deleteOldPlantsStmt.setLong(1, index);
- ChunkWriter.deleteOldPlantsStmt.execute();
-
- int coordCounter = 0;
- boolean needToExec = false;
-
- RealisticBiomes.doLog(Level.FINEST, "PlantChunk.unload(): Unloading plantchunk with index: " + this.index);
- for (Coords coords : plants.keySet()) {
- if (!needToExec) {
- needToExec = true;
+ try (Connection connection = plugin.getPlantManager().getDb().getConnection();){
+ connection.setAutoCommit(false);
+
+ // delete plants in the database for this chunk and re-add them
+ // this is OK because rb_plant does not have a autoincrement index
+ // so it won't explode. However, does this have a negative performance impact?
+ // TODO: add listener for block break event, and if its a plant, we remove it
+ // from the correct plantchunk? Right now if a plant gets destroyed before
+ // it is fully grown then it won't get remove from the database
+ try (PreparedStatement deleteOldPlantsStmt = connection.prepareStatement(Database.deleteOldPlantsStmt);) {
+ deleteOldPlantsStmt.setLong(1, index);
+ deleteOldPlantsStmt.execute();
}
+
+ int coordCounter = 0;
+ boolean needToExec = false;
- Plant plant = plants.get(coords);
-
- ChunkWriter.addPlantStmt.clearParameters();
- ChunkWriter.addPlantStmt.setLong(1, index);
- ChunkWriter.addPlantStmt.setInt(2, coords.w);
- ChunkWriter.addPlantStmt.setInt(3, coords.x);
- ChunkWriter.addPlantStmt.setInt(4, coords.y);
- ChunkWriter.addPlantStmt.setInt(5, coords.z);
- ChunkWriter.addPlantStmt.setLong(6,
- plant.getUpdateTime());
- ChunkWriter.addPlantStmt.setFloat(7,
- plant.getGrowth());
- ChunkWriter.addPlantStmt.setFloat(8,
- plant.getFruitGrowth());
-
- ChunkWriter.addPlantStmt.addBatch();
-
- // execute the statement if we hit 1000 batches
- if ((coordCounter + 1) % 1000 == 0) {
+ try (PreparedStatement addPlantStmt = connection.prepareStatement(Database.addPlantStmt);) {
+ RealisticBiomes.doLog(Level.FINEST, "PlantChunk.unload(): Unloading plantchunk with index: " + this.index);
+ for (Coords coords : plants.keySet()) {
+ if (!needToExec) {
+ needToExec = true;
+ }
+
+ Plant plant = plants.get(coords);
+
+ addPlantStmt.clearParameters();
+ addPlantStmt.setLong(1, index);
+ addPlantStmt.setInt(2, coords.w);
+ addPlantStmt.setInt(3, coords.x);
+ addPlantStmt.setInt(4, coords.y);
+ addPlantStmt.setInt(5, coords.z);
+ addPlantStmt.setLong(6, plant.getUpdateTime());
+ addPlantStmt.setFloat(7, plant.getGrowth());
+ addPlantStmt.setFloat(8, plant.getFruitGrowth());
+
+ addPlantStmt.addBatch();
+
+ // execute the statement if we hit 100 batches -- most connections limit to 100 in a batch
+ if ((++coordCounter) % 100 == 0) {
+
+ addPlantStmt.executeBatch();
+ coordCounter = 0;
+ needToExec = false;
+ }
+
+ } // end for
- ChunkWriter.addPlantStmt.executeBatch();
- coordCounter = 0;
- needToExec = false;
- try {
- plugin.getPlantManager().getWriteConnection().commit();
- } catch (SQLException e) {
- RealisticBiomes.LOG.warning("Autocommit is probably still on..." + e);
+ // if we have left over statements afterwards, execute them
+ if (needToExec) {
+ addPlantStmt.executeBatch();
}
}
-
- } // end for
-
- // if we have left over statements afterwards, execute them
- if (needToExec) {
- ChunkWriter.addPlantStmt.executeBatch();
- try {
- plugin.getPlantManager().getWriteConnection().commit();
- } catch (SQLException e) {
- RealisticBiomes.LOG.warning("Autocommit is probably still on..." + e);
- }
- }
- try {
- plugin.getPlantManager().getWriteConnection().setAutoCommit(true);
- } catch (SQLException e) {
- RealisticBiomes.LOG.severe("Can't set autocommit?" + e);
+
+ connection.commit();
+ connection.setAutoCommit(true);
}
-
- }
+ }
}
} catch (SQLException e) {
throw new DataSourceException(
- String.format(
- "Failed to unload the chunk (In PlantChunk, "
- + "replacing with new data/deleting), index %s, coords %s, PlantChunk obj: %s",
+ String.format("Failed to unload the chunk (In PlantChunk, "
+ + "replacing with new data/deleting), index %s, coords %s, PlantChunk obj: %s",
index, coords, this), e);
}
diff --git a/src/com/untamedears/realisticbiomes/persist/PlantManager.java b/src/com/untamedears/realisticbiomes/persist/PlantManager.java
index d8b05031..136fa90d 100644
--- a/src/com/untamedears/realisticbiomes/persist/PlantManager.java
+++ b/src/com/untamedears/realisticbiomes/persist/PlantManager.java
@@ -34,8 +34,7 @@ public class PlantManager {
private final PersistConfig config;
// database connection
- private Connection writeConn;
- private Connection readConn;
+ private Database db;
public class PlantChunks {
private final HashMap map;
@@ -73,10 +72,6 @@ Set getCoordsSet() {
// prepared statements
- PreparedStatement makeTableChunk;
- PreparedStatement makeTablePlant;
- PreparedStatement selectAllFromChunk;
-
private Logger log;
////================================================================================= ////
@@ -89,75 +84,22 @@ public PlantManager(RealisticBiomes plugin, PersistConfig config) {
chunks = new PlantChunks();
- // open the database
- String sDriverName = "com.mysql.jdbc.Driver";
- try {
- Class.forName(sDriverName);
- } catch (ClassNotFoundException e) {
- throw new DataSourceException("Failed to initalize the " + sDriverName + " driver class!", e);
- }
+ db = new Database(config);
- this.connect();
-
- // KeepAlives. TODO: Replace with actual connection handling.
- new BukkitRunnable() {
- private long failCount = 0l;
- @Override
- public void run() {
- if (failCount > 50) {
- RealisticBiomes.doLog(Level.WARNING, "Keepalive has failed too many times, cancelling");
- this.cancel();
- return;
- }
- try {
- Statement writeAlive = RealisticBiomes.plugin.getPlantManager().getWriteConnection().createStatement();
- Statement readAlive = RealisticBiomes.plugin.getPlantManager().getReadConnection().createStatement();
-
- writeAlive.execute("SELECT 1;");
- readAlive.execute("SELECT 1;");
-
- RealisticBiomes.doLog(Level.FINER, "Keepalive Sent for read and write connections");
- } catch(SQLException e) {
- RealisticBiomes.doLog(Level.WARNING, "Keepalive has failed");
- failCount++;
- }
- }
-
- }.runTaskTimer(plugin, 180000l, 180000l); // every 3 minutes
-
- setupStatements();
-
- // run the prepared statements that create the tables if they do not exist in the database
try {
-
- RealisticBiomes.doLog(Level.FINER, "creating chunk table (if necessary) with prepared statement:" + this.makeTableChunk.toString());
-
- this.makeTableChunk.execute();
- this.makeTablePlant.execute();
-
- } catch (SQLException e) {
-
- throw new DataSourceException("PlantManager constructor: Caught exception when trying to run the " +
- "'create xx_chunk and xx_plant' tables if they don't exist!", e);
+ db.available();
+ } catch (SQLException se) {
+ throw new DataSourceException("Failed to connect to the database! ", se);
}
- try {
- // update database schema: try and catch
- PreparedStatement upgradeTablePlant = writeConn.prepareStatement(String.format("ALTER TABLE %s_plant " +
- "ADD fruitGrowth REAL AFTER growth", config.prefix));
- upgradeTablePlant.execute();
- } catch (SQLException e) {
- RealisticBiomes.LOG.info("Could not update table - ignore if already updated. Error code: " + e.getErrorCode() + ", error message: " + e.getMessage());
- }
-
// load all chunks
RealisticBiomes.LOG.info("loading PlantChunks");
long startTime = System.nanoTime()/1000000/*ns/ms*/;
- try {
- ResultSet rs = this.selectAllFromChunk.executeQuery();
-
+ try (Connection connection = db.getConnection();
+ PreparedStatement selectAllFromChunk = connection.prepareStatement(Database.selectAllFromChunk);
+ ResultSet rs = selectAllFromChunk.executeQuery();) {
while (rs.next()) {
long id = rs.getLong("id");
int w = rs.getInt("w");
@@ -193,100 +135,11 @@ public void run() {
log = plugin.getLogger();
}
-
- public void reconnect() {
- RealisticBiomes.LOG.info("Triggering reconnection for write and read channels.");
- try {
- if (writeConn != null) {
- writeConn.close();
- }
- } catch (SQLException e){
- RealisticBiomes.LOG.log(Level.WARNING, "Can't close prior write connection, may already be closed", e);
- }
-
- try {
- if (readConn != null) {
- readConn.close();
- }
- } catch (SQLException e){
- RealisticBiomes.LOG.log(Level.WARNING, "Can't close prior read connection, may already be closed", e);
- }
- try {
- connect();
- setupStatements();
- } catch (DataSourceException dse) {
- RealisticBiomes.LOG.log(Level.SEVERE, "Unable to reconnect to RealisticBiomes database", dse);
- }
- }
-
- public void connect() {
- String jdbcUrl = "jdbc:mysql://" + config.host + ":" + config.port + "/" + config.databaseName + "?user=" + config.user + "&password=" + config.password;
- int iTimeout = 30;
-
- // Try and connect to the database
- try {
- RealisticBiomes.LOG.info("Connecting write and read channels.");
- writeConn = DriverManager.getConnection(jdbcUrl);
- readConn = DriverManager.getConnection(jdbcUrl);
- //Statement stmt = readConn.createStatement(); // TODO: wtf is this supposed to do
- //stmt.setQueryTimeout(iTimeout);
-
- } catch (SQLException e) {
- throw new DataSourceException("Failed to connect to the database with the jdbcUrl: " + jdbcUrl, e);
- }
- }
- public void setupStatements() {
- // Create the prepared statements
- try {
- // we need InnoDB storage engine or else we can't do foreign keys!
- this.makeTableChunk = writeConn.prepareStatement(String.format("CREATE TABLE IF NOT EXISTS %s_chunk " +
- "(id BIGINT PRIMARY KEY AUTO_INCREMENT, " +
- "w INTEGER, x INTEGER, z INTEGER," +
- "INDEX chunk_coords_idx (w, x, z)) " +
- "ENGINE INNODB", config.prefix));
-
- // we need InnoDB storage engine or else we can't do foreign keys!
- this.makeTablePlant = writeConn.prepareStatement(String.format("CREATE TABLE IF NOT EXISTS %s_plant" +
- "(chunkId BIGINT, w INTEGER, x INTEGER, y INTEGER, z INTEGER, date INTEGER UNSIGNED, growth REAL, fruitGrowth REAL, " +
- "INDEX plant_chunk_idx (chunkId), " +
- "CONSTRAINT chunkIdConstraint FOREIGN KEY (chunkId) REFERENCES %s_chunk (id))" +
- "ENGINE INNODB", config.prefix, config.prefix));
-
- this.selectAllFromChunk = readConn.prepareStatement(String.format("SELECT id, w, x, z FROM %s_chunk", config.prefix));
-
- // create chunk writer
- ChunkWriter.init(writeConn, readConn, config);
-
- } catch (SQLException e) {
- throw new DataSourceException("PlantManager constructor: Failed to create the prepared statements! (for table creation)", e);
- }
- }
-
- public Connection getWriteConnection() {
- return writeConn;
- }
-
- public Connection getReadConnection() {
- return readConn;
+ public Database getDb() {
+ return this.db;
}
- public void testOrReconnect() {
- try {
- Statement writeAlive = writeConn.createStatement();
- Statement readAlive = readConn.createStatement();
-
- writeAlive.execute("SELECT 1;");
- readAlive.execute("SELECT 1;");
-
- } catch(SQLException e) {
- RealisticBiomes.LOG.log(Level.WARNING, "Connection has died.", e);
-
- reconnect();
- }
- }
-
-
/**
* call this to load all the plants from all our plant chunks
* this should only be called if persistConfig.cacheEntireDatabase is true
@@ -427,17 +280,16 @@ public void saveAllAndStop() {
public void run() {
try {
- log.info("Starting runnable in saveAllAndStop()");
- testOrReconnect();
-
- for (ChunkCoords coords : chunks.getCoordsSet()) {
-
- PlantChunk pChunk = chunks.get(coords);
-
- pChunk.unload();
- }
-
- log.info("finished runnable in saveAllAndStop()");
+ log.info("Starting runnable in saveAllAndStop()");
+
+ for (ChunkCoords coords : chunks.getCoordsSet()) {
+
+ PlantChunk pChunk = chunks.get(coords);
+
+ pChunk.unload();
+ }
+
+ log.info("finished runnable in saveAllAndStop()");
} catch (Exception e) {
log.log(Level.SEVERE, "error in run() when shutting down!", e);
@@ -458,6 +310,13 @@ public void run() {
// Keep trying to shut down
}
}
+
+ try {
+ db.close();
+ } catch (SQLException e) {
+ log.log(Level.SEVERE, "error in saveAllAndStop, closing db", e);
+ }
+
log.info("write service finished");
}
@@ -482,7 +341,6 @@ private int unloadChunk(ChunkCoords coords) {
return 0;
// finally, actually unload this thing
- testOrReconnect();
int tmpCount = pChunk.getPlantCount();
pChunk.unload();
return tmpCount;
@@ -544,7 +402,6 @@ public PlantChunk loadChunk(ChunkCoords coords) {
}
// finally, just load this thing!
- testOrReconnect();
long start = System.nanoTime()/1000000/*ns/ms*/;
boolean loaded = pChunk.load();
long end = System.nanoTime()/1000000/*ns/ms*/;
@@ -606,7 +463,6 @@ public Plant getPlantFromBlock(Block block) {
// load the plant data if it is not yet loaded
if (pChunk.isLoaded() == false) {
- testOrReconnect();
pChunk.load();
}