How can re-implement this using concurrent executor, or just a much better way. meaning threadpool executor . Basically i want the crawler to crawl the given url and maybe later follow the urls found to another website and so one.
package Mainpackge;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import org.jsoup.Jsoup;
import org.jsoup.nodes.Document;
import org.jsoup.nodes.Element;
import org.jsoup.select.Elements;
public class main {
    public static void main(String[] args) {
        //List of urs to collect  data from
        String[] urls = new String[]{
                "http://www.answers.com/",
                "http://www.britannica.com/",
                "https://ie.yahoo.com/?p=us",
                "https://en.wikipedia.org/wiki/Main_Page",
                "http://ww w.worldbook.com/",
                "http://www.computerlanguage.com/",
                "http://www.howstuffworks.com/",
                "http://www.dmoz.org/Computers/Computer_Science/"
                };
        // Create and start workers
        List<Worker> workers = new ArrayList<>(urls.length);
        for (String url : urls) {
            Worker w = new Worker(url);
            workers.add(w);
            new Thread(w).start();
        }
        // Retrieve results
        for (Worker w : workers) {
            Elements results = w.waitForResults();
            if (results != null)
                for (Element result : results) { result.absUrl("a") ;
                    System.out.println(w.getName()+": "+result.absUrl("href"));
                }
            else
                System.err.println(w.getName()+" had some error!");
        }
    }
}
class Worker implements Runnable {
    private String url;
    private Elements results;
    private String name;
    private static int number = 0;
    private final Object lock = new Object();
    public Worker(String url) {
        this.url = url;
        this.name = "Worker-" + (number++);
    }
    public String getName() {
        return name;
    }
    @Override
    public void run() {
        try {
            Document doc = Jsoup.connect(this.url).get();
            Elements links = doc.select("a");
            // Update results
            synchronized (lock) {
                this.results = links;
                lock.notifyAll();
            }
        } catch (IOException e) {
            // You should implement a better error handling code..
            System.err.println("Error while parsing: "+this.url);
            e.printStackTrace();
        }
    }
    public Elements waitForResults() {
        synchronized (lock) {
            try {
                while (this.results == null) {
                    lock.wait();
                }
                return this.results;
            } catch (InterruptedException e) {
                // Again better error handling
                e.printStackTrace();
            }
            return null;
        }
    }
}