0
我正在嘗試使用Jena的讀取方法來讀取大尺寸數據集(超過1 GB),但我收到內存不足錯誤。我嘗試增加到2048年的tomcat heapsize(-Xmx參數),也是eclipse.ini文件中的相同參數。但是我無法獲得工作解決方案 由於我將數據集解析爲hashmaps並在網頁上顯示內容,因此我願意接受有關如何處理大型數據集的任何建議。下面java「OutOfMemory錯誤」Jena應用程序
控制檯錯誤是:
Exception in thread "http-bio-8080-AsyncTimeout" java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.util.concurrent.ConcurrentLinkedQueue.iterator(ConcurrentLinkedQueue.java:667)
at org.apache.tomcat.util.net.JIoEndpoint$AsyncTimeout.run(JIoEndpoint.java:157)
at java.lang.Thread.run(Thread.java:745)
Exception in thread "http-bio-8080-exec-6" java.lang.OutOfMemoryError: GC overhead limit exceeded
at java.util.concurrent.CopyOnWriteArrayList.iterator(CopyOnWriteArrayList.java:959)
at com.hp.hpl.jena.graph.impl.SimpleEventManager.notifyAddTriple(SimpleEventManager.java:91)
at com.hp.hpl.jena.graph.impl.GraphBase.notifyAdd(GraphBase.java:124)
at com.hp.hpl.jena.graph.impl.GraphBase.add(GraphBase.java:203)
at org.apache.jena.riot.system.StreamRDFLib$ParserOutputGraph.triple(StreamRDFLib.java:165)
at org.apache.jena.riot.lang.LangNTriples.runParser(LangNTriples.java:56)
at org.apache.jena.riot.lang.LangBase.parse(LangBase.java:42)
at org.apache.jena.riot.RDFParserRegistry$ReaderRIOTLang.read(RDFParserRegistry.java:182)
at org.apache.jena.riot.RDFDataMgr.process(RDFDataMgr.java:906)
at org.apache.jena.riot.RDFDataMgr.read(RDFDataMgr.java:257)
at org.apache.jena.riot.RDFDataMgr.read(RDFDataMgr.java:243)
at org.apache.jena.riot.adapters.RDFReaderRIOT_Web.read(RDFReaderRIOT_Web.java:96)
at com.hp.hpl.jena.rdf.model.impl.ModelCom.read(ModelCom.java:235)
at com.packages.rdf.FileAnalyse.GetFileComponents(FileAnalyse.java:77)
at com.packages.servlets.CreatePatternServlet.GetStatements(CreatePatternServlet.java:96)
at com.packages.servlets.CreatePatternServlet.doPost(CreatePatternServlet.java:68)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:646)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:303)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:220)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:122)
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:501)
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:171)
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:103)
at org.apache.catalina.valves.AccessLogValve.invoke(AccessLogValve.java:950)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:116)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:408)
at org.apache.coyote.http11.AbstractHttp11Processor.process(AbstractHttp11Processor.java:1070)
Exception in thread "ContainerBackgroundProcessor[StandardEngine[Catalina]]" java.lang.OutOfMemoryError: GC overhead limit exceeded
at org.apache.naming.resources.FileDirContext.file(FileDirContext.java:765)
at org.apache.naming.resources.FileDirContext.doGetAttributes(FileDirContext.java:398)
at org.apache.naming.resources.BaseDirContext.getAttributes(BaseDirContext.java:1137)
at org.apache.naming.resources.BaseDirContext.getAttributes(BaseDirContext.java:1090)
at org.apache.naming.resources.ProxyDirContext.getAttributes(ProxyDirContext.java:882)
at org.apache.catalina.loader.WebappClassLoader.modified(WebappClassLoader.java:1026)
at org.apache.catalina.loader.WebappLoader.modified(WebappLoader.java:500)
at org.apache.catalina.loader.WebappLoader.backgroundProcess(WebappLoader.java:420)
at org.apache.catalina.core.ContainerBase.backgroundProcess(ContainerBase.java:1345)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.processChildren(ContainerBase.java:1546)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.processChildren(ContainerBase.java:1556)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.processChildren(ContainerBase.java:1556)
at org.apache.catalina.core.ContainerBase$ContainerBackgroundProcessor.run(ContainerBase.java:1524)
at java.lang.Thread.run(Thread.java:745)
Exception in thread "http-bio-8080-exec-6" java.lang.OutOfMemoryError: GC overhead limit exceeded
at org.apache.jena.riot.tokens.TokenizerText.parseToken(TokenizerText.java:170)
at org.apache.jena.riot.tokens.TokenizerText.hasNext(TokenizerText.java:86)
at org.apache.jena.atlas.iterator.PeekIterator.fill(PeekIterator.java:50)
at org.apache.jena.atlas.iterator.PeekIterator.next(PeekIterator.java:92)
at org.apache.jena.riot.lang.LangEngine.nextToken(LangEngine.java:99)
at org.apache.jena.riot.lang.LangNTriples.parseOne(LangNTriples.java:71)
at org.apache.jena.riot.lang.LangNTriples.runParser(LangNTriples.java:54)
at org.apache.jena.riot.lang.LangBase.parse(LangBase.java:42)
at org.apache.jena.riot.RDFParserRegistry$ReaderRIOTLang.read(RDFParserRegistry.java:182)
at org.apache.jena.riot.RDFDataMgr.process(RDFDataMgr.java:906)
at org.apache.jena.riot.RDFDataMgr.read(RDFDataMgr.java:257)
at org.apache.jena.riot.RDFDataMgr.read(RDFDataMgr.java:243)
at org.apache.jena.riot.adapters.RDFReaderRIOT_Web.read(RDFReaderRIOT_Web.java:96)
at com.hp.hpl.jena.rdf.model.impl.ModelCom.read(ModelCom.java:235)
at com.packages.rdf.FileAnalyse.GetFileComponents(FileAnalyse.java:77)
at com.packages.servlets.CreatePatternServlet.GetStatements(CreatePatternServlet.java:96)
at com.packages.servlets.CreatePatternServlet.doPost(CreatePatternServlet.java:68)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:646)
at javax.servlet.http.HttpServlet.service(HttpServlet.java:727)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:303)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
at org.apache.tomcat.websocket.server.WsFilter.doFilter(WsFilter.java:52)
at org.apache.catalina.core.ApplicationFilterChain.internalDoFilter(ApplicationFilterChain.java:241)
at org.apache.catalina.core.ApplicationFilterChain.doFilter(ApplicationFilterChain.java:208)
at org.apache.catalina.core.StandardWrapperValve.invoke(StandardWrapperValve.java:220)
at org.apache.catalina.core.StandardContextValve.invoke(StandardContextValve.java:122)
at org.apache.catalina.authenticator.AuthenticatorBase.invoke(AuthenticatorBase.java:501)
at org.apache.catalina.core.StandardHostValve.invoke(StandardHostValve.java:171)
at org.apache.catalina.valves.ErrorReportValve.invoke(ErrorReportValve.java:103)
at org.apache.catalina.valves.AccessLogValve.invoke(AccessLogValve.java:950)
at org.apache.catalina.core.StandardEngineValve.invoke(StandardEngineValve.java:116)
at org.apache.catalina.connector.CoyoteAdapter.service(CoyoteAdapter.java:408)
僅僅因爲你的文件是1 GB,並不意味着你只會在內存中使用1GB--由於各種原因,它通常更多。您可以嘗試將堆大小提高到更高的值,例如8或16 GB? (順便說一下,你不必寫'2048M' - 你可以只寫'2G'並且它的數量是相同的) – childofsoong 2015-03-18 22:11:50
我嘗試了4g,但對於這個應用程序,我可能不得不分析10 GB的文件,所以我不知道它是否足夠 – emrahozkan 2015-03-18 22:13:54
然後,您可能必須找到一個不會嘗試一次將整個文件加載到內存中的庫。恐怕我不知道有什麼會讓我頭昏眼花。但是,您的分析可能是在多個較小的文件上運行,然後放在一起?如果是這樣,你可以將巨型文件分解成更小的文件。 – childofsoong 2015-03-18 22:19:05