Is there a free tool that can read given webpage and take a screenshot of it?
I had best results with Selenium Webdriver using a VirtualFramebuffer and Firefox Binary. This is tested under ubuntu. You need to have xvfb and firefox installed
First install firefox and virtual framebuffer:
aptitude install xvfb firefox
Compile and run this class, open /tmp/screenshot.png afterwards
import java.io.File;
import java.io.IOException;
import org.apache.commons.io.FileUtils;
import org.openqa.selenium.OutputType;
import org.openqa.selenium.TakesScreenshot;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.firefox.FirefoxBinary;
import org.openqa.selenium.firefox.FirefoxDriver;
public class CaptureScreenshotTest
{
private static int DISPLAY_NUMBER = 99;
private static String XVFB = "/usr/bin/Xvfb";
private static String XVFB_COMMAND = XVFB + " :" + DISPLAY_NUMBER;
private static String URL = "http://www.google.com/";
private static String RESULT_FILENAME = "/tmp/screenshot.png";
public static void main ( String[] args ) throws IOException
{
Process p = Runtime.getRuntime().exec(XVFB_COMMAND);
FirefoxBinary firefox = new FirefoxBinary();
firefox.setEnvironmentProperty("DISPLAY", ":" + DISPLAY_NUMBER);
WebDriver driver = new FirefoxDriver(firefox, null);
driver.get(URL);
File scrFile = ( (TakesScreenshot) driver ).getScreenshotAs(OutputType.FILE);
FileUtils.copyFile(scrFile, new File(RESULT_FILENAME));
driver.close();
p.destroy();
}
}