Fix/merge conflict (#87)

* Version 1.3.0 (#85)

* Feature/async chunkmaster (#81)

* Change generation to be asynchronous

At this stage it will most likely crash the server at a certain point.
Pausing and resuming isn't stable. Saving the progress isn't stable as well.
Chunks are being unloaded in the main thread by an unloader class.

* Switch to native threads

- Use thread instead of async tasks
- Store pending paper chunks in the database
- Interrupt the thread when it should be stopped

* Fix insertion of pending chunks

Fix an error that is thrown when the sql for inserting pending chunks doesn't have any chunks to insert.

* Add task states

Add states to differentiate between generating and validating tasks
as well as a field in the database to store this information.
A task will first generate a world until the required radius or the
worldborder is reached. Then it validates that each chunk has been generated.

* Add object representation of world_properties table

* Add DAO for pending_chunks table

* Add DAO for generation_tasks table

* Add state updating to periodic save

* Fix loading of world properties

* Add states to tasks and fix completion handling

* Fix progress report and spiral shape

* Modify the paper generation task so it works with spigot

This change is being made because normal chunk generation doesn't allow
chunks to be requested from a different thread. With PaperLib this issue
can be solved.

* Add workarounds for spigot problems

* Fix some blocking issues and update README

* Add locking to ChunkUnloader class

* Add total chunk count to list command (closes #79) (#82)

* Fix shape beign stuck (#83)

* Add autostart config parameter (closes #78) (#84)

* Add circleci badge to readme

* Add codefactor badge

* Update Translation to 1.3.0 (#86)

* Update Translation to 1.3.0

* Tweak SQL_ERROR word order

Co-authored-by: NPBeta <shanhang007@gmail.com>
pull/88/head
Trivernis 4 years ago committed by GitHub
parent aaa614f651
commit 441b13ec69
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23

@ -1,189 +1,189 @@
package net.trivernis.chunkmaster.lib.database package net.trivernis.chunkmaster.lib.database
import net.trivernis.chunkmaster.Chunkmaster import net.trivernis.chunkmaster.Chunkmaster
import org.apache.commons.lang.exception.ExceptionUtils import org.apache.commons.lang.exception.ExceptionUtils
import java.sql.Connection import java.sql.Connection
import java.sql.DriverManager import java.sql.DriverManager
import java.sql.ResultSet import java.sql.ResultSet
class SqliteManager(private val chunkmaster: Chunkmaster) { class SqliteManager(private val chunkmaster: Chunkmaster) {
private val tables = listOf( private val tables = listOf(
Pair( Pair(
"generation_tasks", "generation_tasks",
listOf( listOf(
Pair("id", "integer PRIMARY KEY AUTOINCREMENT"), Pair("id", "integer PRIMARY KEY AUTOINCREMENT"),
Pair("center_x", "integer NOT NULL DEFAULT 0"), Pair("center_x", "integer NOT NULL DEFAULT 0"),
Pair("center_z", "integer NOT NULL DEFAULT 0"), Pair("center_z", "integer NOT NULL DEFAULT 0"),
Pair("last_x", "integer NOT NULL DEFAULT 0"), Pair("last_x", "integer NOT NULL DEFAULT 0"),
Pair("last_z", "integer NOT NULL DEFAULT 0"), Pair("last_z", "integer NOT NULL DEFAULT 0"),
Pair("world", "text UNIQUE NOT NULL DEFAULT 'world'"), Pair("world", "text UNIQUE NOT NULL DEFAULT 'world'"),
Pair("radius", "integer DEFAULT -1"), Pair("radius", "integer DEFAULT -1"),
Pair("shape", "text NOT NULL DEFAULT 'square'"), Pair("shape", "text NOT NULL DEFAULT 'square'"),
Pair("state", "text NOT NULL DEFAULT 'GENERATING'") Pair("state", "text NOT NULL DEFAULT 'GENERATING'")
) )
), ),
Pair( Pair(
"world_properties", "world_properties",
listOf( listOf(
Pair("name", "text PRIMARY KEY"), Pair("name", "text PRIMARY KEY"),
Pair("center_x", "integer NOT NULL DEFAULT 0"), Pair("center_x", "integer NOT NULL DEFAULT 0"),
Pair("center_z", "integer NOT NULL DEFAULT 0") Pair("center_z", "integer NOT NULL DEFAULT 0")
) )
), ),
Pair( Pair(
"pending_chunks", "pending_chunks",
listOf( listOf(
Pair("id", "integer PRIMARY KEY AUTOINCREMENT"), Pair("id", "integer PRIMARY KEY AUTOINCREMENT"),
Pair("task_id", "integer NOT NULL"), Pair("task_id", "integer NOT NULL"),
Pair("chunk_x", "integer NOT NULL"), Pair("chunk_x", "integer NOT NULL"),
Pair("chunk_z", "integer NOT NULL") Pair("chunk_z", "integer NOT NULL")
) )
) )
) )
private val needUpdate = HashSet<Pair<String, Pair<String, String>>>() private val needUpdate = HashSet<Pair<String, Pair<String, String>>>()
private val needCreation = HashSet<String>() private val needCreation = HashSet<String>()
private var connection: Connection? = null private var connection: Connection? = null
private var activeTasks = 0 private var activeTasks = 0
val worldProperties = WorldProperties(this) val worldProperties = WorldProperties(this)
val pendingChunks = PendingChunks(this) val pendingChunks = PendingChunks(this)
val generationTasks = GenerationTasks(this) val generationTasks = GenerationTasks(this)
/** /**
* Returns the connection to the database * Returns the connection to the database
*/ */
fun getConnection(): Connection? { fun getConnection(): Connection? {
if (this.connection != null) { if (this.connection != null) {
return this.connection return this.connection
} }
try { try {
Class.forName("org.sqlite.JDBC") Class.forName("org.sqlite.JDBC")
this.connection = DriverManager.getConnection( this.connection = DriverManager.getConnection(
"jdbc:sqlite:${chunkmaster.dataFolder.absolutePath}/" + "jdbc:sqlite:${chunkmaster.dataFolder.absolutePath}/" +
"${chunkmaster.config.getString("database.filename")}" "${chunkmaster.config.getString("database.filename")}"
) )
return this.connection return this.connection
} catch (e: Exception) { } catch (e: Exception) {
chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("DATABASE_CONNECTION_ERROR")) chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("DATABASE_CONNECTION_ERROR"))
chunkmaster.logger.severe(e.message) chunkmaster.logger.severe(e.message)
} }
return null return null
} }
/** /**
* Checks for and performs an update * Checks for and performs an update
*/ */
fun init() { fun init() {
this.checkUpdate() this.checkUpdate()
this.performUpdate() this.performUpdate()
} }
/** /**
* Checks which tables need an update or creation. * Checks which tables need an update or creation.
*/ */
private fun checkUpdate() { private fun checkUpdate() {
val meta = getConnection()!!.metaData val meta = getConnection()!!.metaData
for (table in tables) { for (table in tables) {
val resTables = meta.getTables(null, null, table.first, null) val resTables = meta.getTables(null, null, table.first, null)
if (resTables.next()) { // table exists if (resTables.next()) { // table exists
for (column in table.second) { for (column in table.second) {
val resColumn = meta.getColumns(null, null, table.first, column.first) val resColumn = meta.getColumns(null, null, table.first, column.first)
if (!resColumn.next()) { if (!resColumn.next()) {
needUpdate.add(Pair(table.first, column)) needUpdate.add(Pair(table.first, column))
} }
resColumn.close() resColumn.close()
} }
} else { } else {
needCreation.add(table.first) needCreation.add(table.first)
} }
resTables.close() resTables.close()
} }
} }
/** /**
* Executes a sql statement on the database. * Executes a sql statement on the database.
*/ */
fun executeStatement(sql: String, values: HashMap<Int, Any>, callback: ((ResultSet?) -> Unit)?) { fun executeStatement(sql: String, values: HashMap<Int, Any>, callback: ((ResultSet?) -> Unit)?) {
val connection = getConnection() val connection = getConnection()
activeTasks++ activeTasks++
if (connection != null) { if (connection != null) {
try { try {
//println("'$sql' with values $values") //println("'$sql' with values $values")
val statement = connection.prepareStatement(sql) val statement = connection.prepareStatement(sql)
for (parameterValue in values) { for (parameterValue in values) {
statement.setObject(parameterValue.key, parameterValue.value) statement.setObject(parameterValue.key, parameterValue.value)
} }
statement.execute() statement.execute()
val res: ResultSet? = statement.resultSet val res: ResultSet? = statement.resultSet
if (callback != null) { if (callback != null) {
callback(res) callback(res)
} }
statement.close() statement.close()
} catch (e: Exception) { } catch (e: Exception) {
chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("SQL_ERROR", e.toString())) chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("SQL_ERROR", e.toString()))
chunkmaster.logger.info(ExceptionUtils.getStackTrace(e)) chunkmaster.logger.info(ExceptionUtils.getStackTrace(e))
} finally { } finally {
activeTasks-- activeTasks--
if (activeTasks == 0) { if (activeTasks == 0) {
connection.close() connection.close()
this.connection = null this.connection = null
} }
} }
} else { } else {
chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("NO_DATABASE_CONNECTION")) chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("NO_DATABASE_CONNECTION"))
} }
} }
/** /**
* Creates or updates tables that needed an update. * Creates or updates tables that needed an update.
*/ */
private fun performUpdate() { private fun performUpdate() {
for (table in needCreation) { for (table in needCreation) {
try { try {
var tableDef = "CREATE TABLE IF NOT EXISTS $table (" var tableDef = "CREATE TABLE IF NOT EXISTS $table ("
for (column in tables.find { it.first == table }!!.second) { for (column in tables.find { it.first == table }!!.second) {
tableDef += "${column.first} ${column.second}," tableDef += "${column.first} ${column.second},"
} }
tableDef = tableDef.substringBeforeLast(",") + ");" tableDef = tableDef.substringBeforeLast(",") + ");"
chunkmaster.logger.finest( chunkmaster.logger.finest(
chunkmaster.langManager.getLocalized( chunkmaster.langManager.getLocalized(
"CREATE_TABLE_DEFINITION", "CREATE_TABLE_DEFINITION",
table, table,
tableDef tableDef
) )
) )
executeStatement(tableDef, HashMap(), null) executeStatement(tableDef, HashMap(), null)
} catch (e: Exception) { } catch (e: Exception) {
chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("TABLE_CREATE_ERROR", table)) chunkmaster.logger.severe(chunkmaster.langManager.getLocalized("TABLE_CREATE_ERROR", table))
chunkmaster.logger.severe(e.message) chunkmaster.logger.severe(e.message)
chunkmaster.logger.info(ExceptionUtils.getStackTrace(e)) chunkmaster.logger.info(ExceptionUtils.getStackTrace(e))
} }
} }
for (table in needUpdate) { for (table in needUpdate) {
val updateSql = "ALTER TABLE ${table.first} ADD COLUMN ${table.second.first} ${table.second.second}" val updateSql = "ALTER TABLE ${table.first} ADD COLUMN ${table.second.first} ${table.second.second}"
try { try {
executeStatement(updateSql, HashMap(), null) executeStatement(updateSql, HashMap(), null)
chunkmaster.logger.finest( chunkmaster.logger.finest(
chunkmaster.langManager.getLocalized( chunkmaster.langManager.getLocalized(
"UPDATE_TABLE_DEFINITION", "UPDATE_TABLE_DEFINITION",
table.first, table.first,
updateSql updateSql
) )
) )
} catch (e: Exception) { } catch (e: Exception) {
chunkmaster.logger.severe( chunkmaster.logger.severe(
chunkmaster.langManager.getLocalized( chunkmaster.langManager.getLocalized(
"UPDATE_TABLE_FAILED", "UPDATE_TABLE_FAILED",
table.first, table.first,
updateSql updateSql
) )
) )
chunkmaster.logger.severe(e.message) chunkmaster.logger.severe(e.message)
chunkmaster.logger.info(ExceptionUtils.getStackTrace(e)) chunkmaster.logger.info(ExceptionUtils.getStackTrace(e))
} }
} }
} }
} }

@ -1,69 +1,69 @@
package net.trivernis.chunkmaster.lib.generation.taskentry package net.trivernis.chunkmaster.lib.generation.taskentry
import net.trivernis.chunkmaster.lib.generation.GenerationTask import net.trivernis.chunkmaster.lib.generation.GenerationTask
class RunningTaskEntry( class RunningTaskEntry(
override val id: Int, override val id: Int,
override val generationTask: GenerationTask override val generationTask: GenerationTask
) : TaskEntry { ) : TaskEntry {
private var lastProgress: Pair<Long, Double>? = null private var lastProgress: Pair<Long, Double>? = null
private var lastChunkCount: Pair<Long, Int>? = null private var lastChunkCount: Pair<Long, Int>? = null
private var thread = Thread(generationTask) private var thread = Thread(generationTask)
/** /**
* Returns the generation Speed * Returns the generation Speed
*/ */
val generationSpeed: Pair<Double?, Double?> val generationSpeed: Pair<Double?, Double?>
get() { get() {
var generationSpeed: Double? = null var generationSpeed: Double? = null
var chunkGenerationSpeed: Double? = null var chunkGenerationSpeed: Double? = null
if (lastProgress != null) { if (lastProgress != null) {
val progressDiff = generationTask.shape.progress() - lastProgress!!.second val progressDiff = generationTask.shape.progress() - lastProgress!!.second
val timeDiff = (System.currentTimeMillis() - lastProgress!!.first).toDouble() / 1000 val timeDiff = (System.currentTimeMillis() - lastProgress!!.first).toDouble() / 1000
generationSpeed = progressDiff / timeDiff generationSpeed = progressDiff / timeDiff
} }
if (lastChunkCount != null) { if (lastChunkCount != null) {
val chunkDiff = generationTask.count - lastChunkCount!!.second val chunkDiff = generationTask.count - lastChunkCount!!.second
val timeDiff = (System.currentTimeMillis() - lastChunkCount!!.first).toDouble() / 1000 val timeDiff = (System.currentTimeMillis() - lastChunkCount!!.first).toDouble() / 1000
chunkGenerationSpeed = chunkDiff / timeDiff chunkGenerationSpeed = chunkDiff / timeDiff
} }
lastProgress = Pair(System.currentTimeMillis(), generationTask.shape.progress()) lastProgress = Pair(System.currentTimeMillis(), generationTask.shape.progress())
lastChunkCount = Pair(System.currentTimeMillis(), generationTask.count) lastChunkCount = Pair(System.currentTimeMillis(), generationTask.count)
return Pair(generationSpeed, chunkGenerationSpeed) return Pair(generationSpeed, chunkGenerationSpeed)
} }
init { init {
lastProgress = Pair(System.currentTimeMillis(), generationTask.shape.progress()) lastProgress = Pair(System.currentTimeMillis(), generationTask.shape.progress())
lastChunkCount = Pair(System.currentTimeMillis(), generationTask.count) lastChunkCount = Pair(System.currentTimeMillis(), generationTask.count)
} }
fun start() { fun start() {
thread.start() thread.start()
} }
fun cancel(timeout: Long): Boolean { fun cancel(timeout: Long): Boolean {
if (generationTask.isRunning) { if (generationTask.isRunning) {
generationTask.cancel() generationTask.cancel()
thread.interrupt() thread.interrupt()
} }
return try { return try {
joinThread(timeout) joinThread(timeout)
} catch (e: InterruptedException) { } catch (e: InterruptedException) {
true true
} }
} }
private fun joinThread(timeout: Long): Boolean { private fun joinThread(timeout: Long): Boolean {
var threadStopped = false var threadStopped = false
for (i in 0..100) { for (i in 0..100) {
if (!thread.isAlive || !generationTask.isRunning) { if (!thread.isAlive || !generationTask.isRunning) {
threadStopped = true threadStopped = true
break break
} }
Thread.sleep(timeout / 100) Thread.sleep(timeout / 100)
} }
return threadStopped return threadStopped
} }
} }

@ -1,11 +1,12 @@
RESUME_FOR_WORLD = 正在恢复执行 '%s' 世界的区块生成任务... RESUME_FOR_WORLD = 正在恢复执行 '%s' 世界的区块生成任务...
TASK_FINISHED = 任务 #%d 在生成 %d 个区块后完成. TASK_FINISHED = 任务 #%d 在生成 %d 个区块后完成.
TASK_CANCELED = 已取消任务 #%s. TASK_CANCELLED = 已取消任务 #%s.
TASK_LOAD_FAILED = §c加载任务 #%d 失败. TASK_LOAD_FAILED = §c加载任务 #%d 失败.
TASK_LOAD_SUCCESS = %d 个已保存的任务加载完成. TASK_LOAD_SUCCESS = %d 个已保存的任务加载完成.
TASK_NOT_FOUND = §c任务 %s 未找到! TASK_NOT_FOUND = §c任务 %s 未找到!
CREATE_DELAYED_LOAD = 正在创建延迟执行的区块生成任务... CREATE_DELAYED_LOAD = 正在创建延迟执行的区块生成任务...
TASK_PERIODIC_REPORT = 任务 #%d 正在 '%s' 世界执行. 进度: %d 区块 %s %s, 速度: %.1f 区块 / 秒, 最新生成的区块: %d, %d TASK_PERIODIC_REPORT = 任务 #%d 正在 '%s' 世界执行. 状态: %s. 进度: %d 区块 %s %s, 速度: %.1f 区块 / 秒, 最新生成的区块: %d, %d
TASK_PERIODIC_REPORT_CORRECTING = 任务 #%d 正在为世界 '%s' 生成缺失的区块. 进度: %d 区块 %s
TASK_SAVE_FAILED = §c保存任务时发生错误: %s TASK_SAVE_FAILED = §c保存任务时发生错误: %s
WORLD_NAME_REQUIRED = §c你需要提供世界名称! WORLD_NAME_REQUIRED = §c你需要提供世界名称!
@ -18,7 +19,7 @@ TASK_ID_REQUIRED = §c你需要提供任务 ID!
INVALID_ARGUMENT = §c在 %s: %s 存在无效的变量! INVALID_ARGUMENT = §c在 %s: %s 存在无效的变量!
PAUSED_TASKS_HEADER = 当前暂停的区块生成任务 PAUSED_TASKS_HEADER = 当前暂停的区块生成任务
TASKS_ENTRY = - §9#%d§r - §2%s§r - §2%d 区块 %s§r TASKS_ENTRY = - §9#%d§r - §2%s§r - §2%s§r - §2%s 区块 %s§r
RUNNING_TASKS_HEADER = 当前运行的区块生成任务 RUNNING_TASKS_HEADER = 当前运行的区块生成任务
NO_GENERATION_TASKS = 无区块生成任务. NO_GENERATION_TASKS = 无区块生成任务.
@ -44,7 +45,7 @@ DB_INIT_FINISHED = 数据库初始化完成.
DB_INIT_EROR = 初始化数据库时发生错误: %s. DB_INIT_EROR = 初始化数据库时发生错误: %s.
DATABASE_CONNECTION_ERROR = §c连接数据库失败! DATABASE_CONNECTION_ERROR = §c连接数据库失败!
SQL_ERROR = §cSQL %s 发生错误! SQL_ERROR = §cSQL 发生错误: %s !
NO_DATABASE_CONNECTION = §c无法执行 SQL 语句: 无数据库连接. NO_DATABASE_CONNECTION = §c无法执行 SQL 语句: 无数据库连接.
CREATE_TABLE_DEFINITION = 已创建表 %s ,定义 %s. CREATE_TABLE_DEFINITION = 已创建表 %s ,定义 %s.
TABLE_CREATE_ERROR = §c创建表 %s 失败. TABLE_CREATE_ERROR = §c创建表 %s 失败.
@ -73,3 +74,7 @@ STATS_WORLD_NAME = §l%s§r
STATS_ENTITY_COUNT = - §2%d§r 实体 STATS_ENTITY_COUNT = - §2%d§r 实体
STATS_LOADED_CHUNKS = - §2%d§r 已载入区块 STATS_LOADED_CHUNKS = - §2%d§r 已载入区块
STATS_PLUGIN_LOADED_CHUNKS = - §2%d§r 被 Chunk Master 载入的区块 STATS_PLUGIN_LOADED_CHUNKS = - §2%d§r 被 Chunk Master 载入的区块
SAVING_CHUNKS = 正在保存 %d 已载入的区块...
CANCEL_FAIL = 取消任务 #%d 操作超时!
NO_AUTOSTART = 自动启动被设置为 §2关闭§r. 正在暂停...

Loading…
Cancel
Save