Hello everyone im pretty new to this forum, Im a perl newbie and I wanted to find out what Im doing wrong, my goal is to connect to remote host grab html code and dump it to local file and then I want it to filter out all URLs and dump em to another file and here's my code. all help appreciated thanks in advance!
use IO::Socket;
$get = "GET // \n"
$file = $#ARGV[2];
open (FILE, $file);
$file = @lines ;
close (FILE) ;
if ($line =~ /(<a\s+href[^>]+>[^<]+<\/a> ;)/i) {
print $1;
}
$connect = IO::Socket::INET ->new (Proto=>"tcp", PeerAddr=> "$ARGV[0]",
PeerPort=>"80"); unless ($connect) { die "Cannot connect to host $ARGV[0]" }
system('clear ');
print $connect "$get" ($1 > file.html);
if ($#connect<0)
{
print "\nRemote host must be down or won\'t allow connection \n";
exit;
Code:
#!/usr/bin/perl -w use IO::Socket;
$get = "GET // \n"
$file = $#ARGV[2];
open (FILE, $file);
$file = @lines ;
close (FILE) ;
if ($line =~ /(<a\s+href[^>]+>[^<]+<\/a> ;)/i) {
print $1;
}
$connect = IO::Socket::INET ->new (Proto=>"tcp", PeerAddr=> "$ARGV[0]",
PeerPort=>"80"); unless ($connect) { die "Cannot connect to host $ARGV[0]" }
system('clear ');
print $connect "$get" ($1 > file.html);
if ($#connect<0)
{
print "\nRemote host must be down or won\'t allow connection \n";
exit;