loadHTML($html); $links = $dom->getElementsByTagName('a'); $vlnk = 0; foreach ($links as $link){ $lol= $link->getAttribute('href'); if( strpos( $lol, '?' ) !== false ){ echo"\n$blue [#] ".$fgreen .$lol ."\n$cln"; echo$blue." [-] Searching For SQL Errors: "; $sqllist = file_get_contents('sqlerrors.ini'); $sqlist = explode(',', $sqllist); if (strpos($lol, '://') !== false){ $sqlurl = $lol ."'"; } else{ $sqlurl = $ipsl.$ip."/".$lol."'"; } $sqlsc = file_get_contents($sqlurl); $sqlvn = "$red Not Found"; foreach($sqlist as $sqli){ if (strpos($sqlsc, $sqli) !== false) $sqlvn ="$green Found!"; } echo $sqlvn; echo"\n$cln"; echo "\n"; $vlnk++ ; } } echo"\n\n$blue [+] URL(s) With Parameter(s):".$green.$vlnk; echo"\n\n"; //----------------------------------------------------------// // Crawler //----------------------------------------------------------// echo"\n\n$bold".$lblue."C R A W L E R \n"; echo "============="; echo"\n\n"; echo "\nCrawling Types & Descriptions:$cln"; echo "\n\n$bold"."69:$cln This is the lite version of tge crawler, This will show you the files which returns the http code '200'. This is time efficient and less messy.\n"; echo "\n$bold"."420:$cln This is a little advance one it will show you all the list of files with their http code other then the badboy 404. This is a little messier but informative \n\n"; csel : echo "Select Crawler Type (69/420): "; $ctype = trim(fgets(STDIN,1024)); if ($ctype == "420"){ echo"\n\t -[ A D V A N C E C R A W L I N G ]-\n"; echo"\n\n"; echo"\n Loading Crawler File ....\n"; if (file_exists(".crawl/admin.ini")){ echo"\n[-] Admin Crawler File Found! Scanning For Admin Pannel [-]\n"; $crawllnk = file_get_contents(".crawl/admin.ini"); //$crawls = array($crawllnk); $crawls = explode(',', $crawllnk); echo"\nURLs Loaded: ".count($crawls) ."\n\n"; foreach ($crawls as $crawl){ $url = $ipsl.$ip ."/".$crawl; $handle = curl_init($url); curl_setopt($handle, CURLOPT_RETURNTRANSFER, TRUE); /* Get the HTML or whatever is linked in $url. */ $response = curl_exec($handle); /* Check for 404 (file not found). */ $httpCode = curl_getinfo($handle, CURLINFO_HTTP_CODE); if($httpCode == 200) { echo"\n\n [ • ] $url : "; echo "Found!"; } elseif($httpCode == 404) { //do nothing } else{ echo"\n\n [ • ] $url : "; echo "HTTP Response: " .$httpCode; } curl_close($handle); } } else{ echo"\n File Not Found, Aborting Crawl ....\n"; } if (file_exists(".crawl/backup.ini")){ echo"\n[-] Backup Crawler File Found! Scanning For Site Backups [-]\n"; $crawllnk = file_get_contents(".crawl/backup.ini"); //$crawls = array($crawllnk); $crawls = explode(',', $crawllnk); echo"\nURLs Loaded: ".count($crawls) ."\n\n"; foreach ($crawls as $crawl){ $url = $ipsl.$ip ."/".$crawl; $handle = curl_init($url); curl_setopt($handle, CURLOPT_RETURNTRANSFER, TRUE); /* Get the HTML or whatever is linked in $url. */ $response = curl_exec($handle); /* Check for 404 (file not found). */ $httpCode = curl_getinfo($handle, CURLINFO_HTTP_CODE); if($httpCode == 200) { echo"\n\n [ • ] $url : "; echo "Found!"; } elseif($httpCode == 404) { //do nothing } else{ echo"\n\n [ • ] $url : "; echo "HTTP Response: " .$httpCode; } curl_close($handle); } } else{ echo"\n File Not Found, Aborting Crawl ....\n"; } if (file_exists(".crawl/others.ini")){ echo"\n[-] General Crawler File Found! Crawling The Site [-]\n"; $crawllnk = file_get_contents(".crawl/others.ini"); //$crawls = array($crawllnk); $crawls = explode(',', $crawllnk); echo"\nURLs Loaded: ".count($crawls) ."\n\n"; foreach ($crawls as $crawl){ $url = $ipsl.$ip ."/".$crawl; $handle = curl_init($url); curl_setopt($handle, CURLOPT_RETURNTRANSFER, TRUE); /* Get the HTML or whatever is linked in $url. */ $response = curl_exec($handle); /* Check for 404 (file not found). */ $httpCode = curl_getinfo($handle, CURLINFO_HTTP_CODE); if($httpCode == 200) { echo"\n\n [ • ] $url : "; echo "Found!"; } elseif($httpCode == 404) { //do nothing } else{ echo"\n\n [ • ] $url : "; echo "HTTP Response: " .$httpCode; } curl_close($handle); } } else{ echo"\n File Not Found, Aborting Crawl ....\n"; } } elseif ($ctype == "69"){ echo"\n\t -[ B A S I C C R A W L I N G ]-\n"; echo"\n\n"; echo"\n Loading Crawler File ....\n"; if (file_exists(".crawl/admin.ini")){ echo"\n[-] Admin Crawler File Found! Scanning For Admin Pannel [-]\n"; $crawllnk = file_get_contents(".crawl/admin.ini"); //$crawls = array($crawllnk); $crawls = explode(',', $crawllnk); echo"\nURLs Loaded: ".count($crawls) ."\n\n"; foreach ($crawls as $crawl){ $url = $ipsl.$ip ."/".$crawl; $handle = curl_init($url); curl_setopt($handle, CURLOPT_RETURNTRANSFER, TRUE); $response = curl_exec($handle); $httpCode = curl_getinfo($handle, CURLINFO_HTTP_CODE); if($httpCode == 200) { echo"\n\n [ • ] $url : "; echo "Found!"; } elseif($httpCode == 404) { //do nothing } else { echo "."; } curl_close($handle); } } else{ echo"\n File Not Found, Aborting Crawl ....\n"; } if (file_exists(".crawl/backup.ini")){ echo"\n[-] Backup Crawler File Found! Scanning For Site Backups [-]\n"; $crawllnk = file_get_contents(".crawl/backup.ini"); $crawls = explode(',', $crawllnk); echo"\nURLs Loaded: ".count($crawls) ."\n\n"; foreach ($crawls as $crawl){ $url = $ipsl.$ip ."/".$crawl; $handle = curl_init($url); curl_setopt($handle, CURLOPT_RETURNTRANSFER, TRUE); $response = curl_exec($handle); $httpCode = curl_getinfo($handle, CURLINFO_HTTP_CODE); if($httpCode == 200) { echo"\n\n [ • ] $url : "; echo "Found!"; } elseif($httpCode == 404) { //do nothing } curl_close($handle); } } else{ echo"\n File Not Found, Aborting Crawl ....\n"; } if (file_exists(".crawl/others.ini")){ echo"\n[-] General Crawler File Found! Crawling The Site [-]\n"; $crawllnk = file_get_contents(".crawl/others.ini"); $crawls = explode(',', $crawllnk); echo"\nURLs Loaded: ".count($crawls) ."\n\n"; foreach ($crawls as $crawl){ $url = $ipsl.$ip ."/".$crawl; $handle = curl_init($url); curl_setopt($handle, CURLOPT_RETURNTRANSFER, TRUE); $response = curl_exec($handle); $httpCode = curl_getinfo($handle, CURLINFO_HTTP_CODE); if($httpCode == 200) { echo"\n\n [ • ] $url : "; echo "Found!"; } elseif($httpCode == 404) { //do nothing } curl_close($handle); } } else{ echo"\n File Not Found, Aborting Crawl ....\n"; } } else { goto csel ;} } ?>