在一段时间后,无限循环停止

This is my code which has infinite loop and sleep for 60sec and then again download the content from other website. It is working fine when i am working with local Machine in wamp but when i upload this it doesnot work after certain time its just stop. I am unable to find what is the reason..??

 <?php include('scrapconnection.php');
include_once('simple_html_dom.php');
 for(;;){
 sleep(60);
 $keyword = "laptop";
 $result = mysql_query("select * from website");
while ($row = mysql_fetch_array($result)) {
$searchLink = $row['searchLink'];
$rootElement = $row['rootElement'];
$productTitle = $row['productTitle'];
$productLink = $row['productLink'];
$productPrice = $row['productPrice'];
$productImage = $row['productImage'];
$productDescription = $row['productDescription'];
$newurl = str_replace("__search_keyword__", $keyword , $searchLink);
$url = $newurl;

$html = file_get_html($url);

$pat[0] = "/^\s+/";
$pat[1] = "/\s{2,}/";
$pat[2] = "/\s+\$/";
$rep[0] = "";
$rep[1] = " ";
$rep[2] = "";


 foreach($html->find("$rootElement") as $heading) { 

    $item['productTitle'] = preg_replace($pat, $rep, $heading->find("$productTitle", 0)->plaintext); 
    $item['productLink'] = preg_replace($pat, $rep, $heading->find("$productLink", 0)->href);
    $item['productImage'] = preg_replace($pat, $rep, $heading->find("$productImage", 0)->src);
    $item['productPrice'] = preg_replace($pat, $rep, $heading->find("$productPrice", 0)->innertext);
    $item['productDescription'] = preg_replace($pat, $rep, $heading->find("$productDescription", 0)->plaintext); 
    preg_match('@^(?:http://www.)?([^/]+)@i',$item['productLink'], $matches);
    $item['domainName'] = $matches[1];
    $articles[] = $item;
    }
 }
  unlink('http://xab.com/files/'.$keyword.'.json');
  $deal=json_encode($articles);

      file_put_contents('http://xab.com/files/'.$keyword.'.json', $deal);
     unset($articles);
    }
?>

this happen due to timeout on webservers and i believe that no shared server will permit thus a script from running even using cron. Try to get a dedicated server with this capacity of execution.

Try adding this on top of your script -

ini_set("memory_limit", "-1");


Also checkout:
What happens when the server is in an infinite loop and the client stops?