Capturing all Links in a page, filtering and visiting filtered pages.

In general while testing any application we are facing a situation where we need to visit specific link available on the Web Page. The Following script help floks how to handle it.

In the following example, I am going to visit 'www.google.com', captures all the links available on the page, filter the required link and visit that link page.

import java.util.List;
import org.openqa.selenium.By;
import org.openqa.selenium.WebDriver;
import org.openqa.selenium.WebElement;
import org.openqa.selenium.firefox.FirefoxDriver;

public class Links {
 public static void main(String[] args) throws InterruptedException {

  WebDriver driver = new FirefoxDriver();
  driver.get("http://www.tata.com/");
  driver.manage().window().maximize();
//  Extract all links from the webpage using selenium webdriver
  List<WebElement> all_links_webpage = driver.findElements(By.tagName("a"));

//  Print total no of links on the webpage
  System.out.println("Print total no of links on the webpage---------------------------------------------");
  System.out.println(all_links_webpage.size());

//  Filter the Link and visit that link
  System.out.println("Print Links------------------------------------------------------------------------");
  for(int i=0;i<all_links_webpage.size();i++)
  {
   Thread.sleep(1000);
   if(all_links_webpage.get(i).getText().toLowerCase().contains("contact"))
   {
    System.out.println("Search Resutl found");
   String link = all_links_webpage.get(i).getAttribute("href");
   driver.navigate().to(link);
   Thread.sleep(3000);
   break;
   }
  }
 driver.quit();
 }
}