curl 做爬虫 用服务器代理ip

有时候会ip会封锁,所以会用一些模拟代理ip进行抓取测试

从网上找了一下代码

function curl_string ($url,$user_agent,$proxy){

       $ch = curl_init();
       curl_setopt ($ch, CURLOPT_PROXY, $proxy);
       curl_setopt ($ch, CURLOPT_URL, $url);
       curl_setopt ($ch, CURLOPT_USERAGENT, $user_agent);
       curl_setopt ($ch, CURLOPT_COOKIEJAR, "c:cookie.txt");
       curl_setopt ($ch, CURLOPT_HEADER, 1);
       curl_setopt ($ch, CURLOPT_RETURNTRANSFER, 1);
       curl_setopt ($ch, CURLOPT_FOLLOWLOCATION, 1);
       curl_setopt ($ch, CURLOPT_TIMEOUT, 120);
       $result = curl_exec ($ch);
       curl_close($ch);
       return $result;

}

$url_page = "http://www.google.com";
$user_agent = "Mozilla/4.0";
$proxy = "http://192.11.222.124:8000";
$string = curl_string($url_page,$user_agent,$proxy);
echo $string;


或者
$proxy
= "80.25.198.25"; $proxyport = "8080"; $ch = curl_init("http://sfbay.craigslist.org/"); curl_setopt($ch, curlOPT_RETURNTRANSFER,1); curl_setopt($ch,curlOPT_proxy,$proxy); curl_setopt($ch,curlOPT_proxyPORT,$proxyport); curl_setopt ($ch, CURLOPT_TIMEOUT, 120); $result = curl_exec($ch); echo $result; curl_close($ch);

还有 抓取用的

<?php
$requestUrl = 'ip138.com';
$ch = curl_init();
$timeout = 5;
curl_setopt($ch, CURLOPT_URL, $requestUrl);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, 1);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, $timeout);
curl_setopt($ch, CURLOPT_PROXYAUTH, CURLAUTH_BASIC); //代理认证模式
curl_setopt($ch, CURLOPT_PROXY, "112.65.219.72"); //代理服务器地址
curl_setopt($ch, CURLOPT_PROXYPORT, 80); //代理服务器端口
//curl_setopt($ch, CURLOPT_PROXYUSERPWD, ":"); //http代理认证帐号,username:password的格式
curl_setopt($ch, CURLOPT_PROXYTYPE, CURLPROXY_HTTP); //使用http代理模式
$file_contents = curl_exec($ch);
curl_close($ch);
echo $file_contents;
?>
原文地址:https://www.cnblogs.com/matengfei123/p/7998096.html