2015-10-29 328 views
3

我現在面臨的一些問題與此代碼:語法錯誤,插入「...... VariableDeclaratorId」完成FormalParameterList

import edu.uci.ics.crawler4j.crawler.CrawlConfig; 
import edu.uci.ics.crawler4j.crawler.CrawlController; 
import edu.uci.ics.crawler4j.fetcher.PageFetcher; 
import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig; 
import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer; 

public class Controller { 

    String crawlStorageFolder = "/data/crawl/root"; 
    int numberOfCrawlers = 7; 

    CrawlConfig config = new CrawlConfig(); 
    config.setCrawlStorageFolder(crawlStorageFolder); 
    /* 
     * Instantiate the controller for this crawl. 
     */ 
    PageFetcher pageFetcher = new PageFetcher(config); 
    RobotstxtConfig robotstxtConfig = new RobotstxtConfig(); 
    RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher); 
    CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer); 

    /* 
     * For each crawl, you need to add some seed urls. These are the first 
     * URLs that are fetched and then the crawler starts following links 
     * which are found in these pages 
     */ 
    controller.addSeed("http://www.ics.uci.edu/~lopes/"); 
    controller.addSeed("http://www.ics.uci.edu/~welling/"); 
    controller.addSeed("http://www.ics.uci.edu/"); 
    /* 
     * Start the crawl. This is a blocking operation, meaning that your code 
     * will reach the line after this only when crawling is finished. 
     */ 
    controller.start(MyCrawler.class, numberOfCrawlers); 
} 

我收到以下錯誤:

"Syntax error, insert "... VariableDeclaratorId" to complete FormalParameterList" on config.setCrawlStrorageFolder(crawlStorageFolder)

+0

得到錯誤「語法錯誤,插入」 ...... VariableDeclaratorId上config.setCrawlStrorageFolder(crawlStorageFolder) –

+0

「完成FormalParameterList」你可以讓我們知道錯誤的完整堆棧跟蹤,並從其中發生線 –

回答

3

你不能在類體中直接使用任意代碼。它必須位於方法(或構造函數或初始化塊)中。

0

你的代碼是在類的主體。把它放在一個主要的方法來運行。

import edu.uci.ics.crawler4j.crawler.CrawlConfig; 
    import edu.uci.ics.crawler4j.crawler.CrawlController; 
    import edu.uci.ics.crawler4j.fetcher.PageFetcher; 
    import edu.uci.ics.crawler4j.robotstxt.RobotstxtConfig; 
    import edu.uci.ics.crawler4j.robotstxt.RobotstxtServer; 

    public class Controller { 
    public static void main(String[] args){ 

     String crawlStorageFolder = "/data/crawl/root"; 
     int numberOfCrawlers = 7; 

     CrawlConfig config = new CrawlConfig(); 
     config.setCrawlStorageFolder(crawlStorageFolder); 
     /* 
      * Instantiate the controller for this crawl. 
      */ 
     PageFetcher pageFetcher = new PageFetcher(config); 
     RobotstxtConfig robotstxtConfig = new RobotstxtConfig(); 
     RobotstxtServer robotstxtServer = new RobotstxtServer(robotstxtConfig, pageFetcher); 
     CrawlController controller = new CrawlController(config, pageFetcher, robotstxtServer); 

     /* 
      * For each crawl, you need to add some seed urls. These are the first 
      * URLs that are fetched and then the crawler starts following links 
      * which are found in these pages 
      */ 
     controller.addSeed("http://www.ics.uci.edu/~lopes/"); 
     controller.addSeed("http://www.ics.uci.edu/~welling/"); 
     controller.addSeed("http://www.ics.uci.edu/"); 
     /* 
      * Start the crawl. This is a blocking operation, meaning that your code 
      * will reach the line after this only when crawling is finished. 
      */ 
     controller.start(MyCrawler.class, numberOfCrawlers); 
    } 
    } 
相關問題