I wrote this code to check if the files are available on a site i've referenced. It works but I'm not happy the foreach inside a foreach as I think it's slowing it but not sure. Can anyone please give advice on how to optimize this?
<?php
//previous page on my site
$url = $_SERVER['HTTP_REFERER'];
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_HEADER, false);
curl_setopt($ch, CURLOPT_RETURNTRANSFER,1);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT,60); // tries for 60 seconds
$html = curl_exec($ch);
curl_close($ch);
$html = strtolower($html);
$regex="/example\.com\/uploads\/([a-zA-Z0-9._-]+)/";
preg_match_all("$regex", $html, $matches);
foreach($matches[0] as $url)
{
$url = 'http://' . $url;
$url = explode("\n", $url);
foreach ($url as $urls){
$upurls = @file_get_contents($urls);
if (preg_match("/File Exists/", $upurls)){
echo 'Valid';
} else {
echo 'Dead';
}
}
}
?>