0
在JSON文件中,大約有100000條記錄。我試圖將它們全部寫入mantle.product.Product實體。從JSON導入100000條記錄 - 慢速打到AT_ENTITY
該程序啓動,並在約35000條記錄,它開始惡化,併發出警告'緩慢打到AT_ENTITY:create:mantle.product.Product'。然後它肯定停止'java.lang.OutOfMemoryError:GC開銷限制超出'錯誤。這種行爲在我的電腦上。
任何提示歡迎。
這是代碼:
void processJson2(String filePath) {
//def json = new JsonSlurper().parseText(new BufferedReader(new InputStreamReader(this.getFileIO().openStream(), "UTF-8")))
//will initialize class manually
def docReadReference = this.executionContext.resource.getLocationReference(filePath)
if (docReadReference.isFile()) {
//inputstream
InputStream inputFile = docReadReference.openStream()
TransactionFacade trxFacade = this.executionContext.getTransaction()
this.executionContext.artifactExecution.disableTarpit()
this.executionContext.artifactExecution.disableEntityEca()
this.executionContext.artifactExecution.disableAuthz()
trxFacade.runRequireNew(50000, "Error loading entity JSON data", {
try {
logMachine.info("Opening file ${docReadReference.isFile()}")
JsonSlurper slurper = new JsonSlurper().setType(JsonParserType.CHARACTER_SOURCE)
def json = slurper.parse(new BufferedReader(new InputStreamReader(inputFile, "UTF-8")))
//writer
Long counter = 1
json.each {
this.executionContext.service.sync().name("create", "mantle.product.Product").parameters([productId: it.sourceFileReference]).call()
//display thousands
if (counter % 1000 == 0) {
logMachine.info("JSON rows processed ${counter} > ${it.sourceFileReference}")
}
//move counter
counter += 1
}
//log
logMachine.info("File processed.")
} catch (Throwable t) {
trxFacade.rollback("Error while processing JSON", t);
//log as warning
logMachine.warn("Incorrectly handled JSON parsing ${t.message}.")
} finally {
if (trxFacade.isTransactionInPlace()) trxFacade.commit();
inputFile.close()
this.executionContext.artifactExecution.enableTarpit()
this.executionContext.artifactExecution.enableEntityEca()
this.executionContext.artifactExecution.enableAuthz()
}
})
}
}