X-Git-Url: http://git.heureux-cyclage.org/?a=blobdiff_plain;f=includes%2FHttpFunctions.php;h=a87e6f6095fe7957bc4da735344ed23a453aa08a;hb=418e7767b4e6d27344bd81da8a8ddd319784f4b3;hp=1f812d85e1c2e461ebcc69af0c9c0a751895c8f4;hpb=5733faae101a6365c25bb6948823659e01805337;p=lhc%2Fweb%2Fwiklou.git diff --git a/includes/HttpFunctions.php b/includes/HttpFunctions.php index 1f812d85e1..a87e6f6095 100644 --- a/includes/HttpFunctions.php +++ b/includes/HttpFunctions.php @@ -11,7 +11,8 @@ class Http { const ASYNC_DOWNLOAD = 2; // asynchronous upload we should spawn out another process and monitor progress if possible) var $body = ''; - public static function request($method, $url, $opts = Array() ){ + + public static function request( $method, $url, $opts = array() ){ $opts['method'] = ( strtoupper( $method ) == 'GET' || strtoupper( $method ) == 'POST' ) ? strtoupper( $method ) : null; $req = new HttpRequest( $url, $opts ); $status = $req->doRequest(); @@ -22,11 +23,12 @@ class Http { return false; } } + /** * Simple wrapper for Http::request( 'GET' ) */ - public static function get( $url, $timeout = false) { - $opts = Array(); + public static function get( $url, $timeout = false ) { + $opts = array(); if( $timeout ) $opts['timeout'] = $timeout; return Http::request( 'GET', $url, $opts ); @@ -39,16 +41,21 @@ class Http { return Http::request( 'POST', $url, $opts ); } - public static function doDownload( $url, $target_file_path , $dl_mode = self::SYNC_DOWNLOAD , $redirectCount = 0 ){ + public static function doDownload( $url, $target_file_path, $dl_mode = self::SYNC_DOWNLOAD, $redirectCount = 0 ){ global $wgPhpCli, $wgMaxUploadSize, $wgMaxRedirects; // do a quick check to HEAD to insure the file size is not > $wgMaxUploadSize - $head = get_headers( $url, 1 ); + $headRequest = new HttpRequest( $url, array( 'headers_only' => true ) ); + $headResponse = $headRequest->doRequest(); + if( !$headResponse->isOK() ){ + return $headResponse; + } + $head = $headResponse->value; // check for redirects: if( isset( $head['Location'] ) && strrpos( $head[0], '302' ) !== false ){ if( $redirectCount < $wgMaxRedirects ){ if( UploadFromUrl::isValidURI( $head['Location'] ) ){ - return self::doDownload( $head['Location'], $target_file_path , $dl_mode, $redirectCount++ ); + return self::doDownload( $head['Location'], $target_file_path, $dl_mode, $redirectCount++ ); } else { return Status::newFatal( 'upload-proto-error' ); } @@ -88,8 +95,8 @@ class Http { * should write to a file location and give updates * */ - private function initBackgroundDownload( $url, $target_file_path, $content_length = null ){ - global $wgMaxUploadSize, $IP, $wgPhpCli; + private static function initBackgroundDownload( $url, $target_file_path, $content_length = null ){ + global $wgMaxUploadSize, $IP, $wgPhpCli, $wgServer; $status = Status::newGood(); // generate a session id with all the details for the download (pid, target_file_path ) @@ -99,6 +106,8 @@ class Http { // store the url and target path: $_SESSION['wsDownload'][$upload_session_key]['url'] = $url; $_SESSION['wsDownload'][$upload_session_key]['target_file_path'] = $target_file_path; + // since we request from the cmd line we lose the original host name pass in the session: + $_SESSION['wsDownload'][$upload_session_key]['orgServer'] = $wgServer; if( $content_length ) $_SESSION['wsDownload'][$upload_session_key]['content_length'] = $content_length; @@ -108,7 +117,7 @@ class Http { // run the background download request: $cmd = $wgPhpCli . ' ' . $IP . "/maintenance/http_session_download.php --sid {$session_id} --usk {$upload_session_key}"; - $pid = wfShellBackgroundExec( $cmd, $retval ); + $pid = wfShellBackgroundExec( $cmd ); // the pid is not of much use since we won't be visiting this same apache any-time soon. if( !$pid ) return Status::newFatal( 'could not run background shell exec' ); @@ -120,7 +129,7 @@ class Http { return $status; } - function getUploadSessionKey(){ + static function getUploadSessionKey(){ $key = mt_rand( 0, 0x7fffffff ); $_SESSION['wsUploadData'][$key] = array(); return $key; @@ -129,22 +138,23 @@ class Http { /** * used to run a session based download. Is initiated via the shell. * - * @param $session_id String: the session id to grab download details from + * @param $session_id String: the session id to grab download details from * @param $upload_session_key String: the key of the given upload session * (a given client could have started a few http uploads at once) */ public static function doSessionIdDownload( $session_id, $upload_session_key ){ - global $wgUser, $wgEnableWriteAPI, $wgAsyncHTTPTimeout; - wfDebug( __METHOD__ . "\n\ndoSessionIdDownload\n\n" ); + global $wgUser, $wgEnableWriteAPI, $wgAsyncHTTPTimeout, $wgServer; + wfDebug( __METHOD__ . "\n\ndoSessionIdDownload:\n\n" ); + // set session to the provided key: session_id( $session_id ); // start the session if( session_start() === false ){ wfDebug( __METHOD__ . ' could not start session' ); } - //get all the vars we need from session_id - if(!isset($_SESSION[ 'wsDownload' ][$upload_session_key])){ - wfDebug( __METHOD__ .' Error:could not find upload session'); + // get all the vars we need from session_id + if( !isset( $_SESSION[ 'wsDownload' ][$upload_session_key] ) ){ + wfDebug( __METHOD__ . ' Error:could not find upload session'); exit(); } // setup the global user from the session key we just inherited @@ -152,6 +162,11 @@ class Http { // grab the session data to setup the request: $sd =& $_SESSION['wsDownload'][$upload_session_key]; + + // update the wgServer var ( since cmd line thinks we are localhost when we are really orgServer) + if( isset( $sd['orgServer'] ) && $sd['orgServer'] ){ + $wgServer = $sd['orgServer']; + } // close down the session so we can other http queries can get session updates: session_write_close(); @@ -161,7 +176,7 @@ class Http { 'timeout' => $wgAsyncHTTPTimeout ) ); // run the actual request .. (this can take some time) - wfDebug( __METHOD__ . "do Request: " . $sd['url'] . ' tf: ' . $sd['target_file_path'] ); + wfDebug( __METHOD__ . 'do Request: ' . $sd['url'] . ' tf: ' . $sd['target_file_path'] ); $status = $req->doRequest(); //wfDebug("done with req status is: ". $status->isOK(). ' '.$status->getWikiText(). "\n"); @@ -180,11 +195,17 @@ class Http { // if status okay process upload using fauxReq to api: if( $status->isOK() ){ // setup the FauxRequest - $fauxReqData = $sd['mParams']; + $fauxReqData = $sd['mParams']; + + // Fix boolean parameters + foreach( $fauxReqData as $k => $v ) { + if( $v === false ) + unset( $fauxReqData[$k] ); + } + $fauxReqData['action'] = 'upload'; $fauxReqData['format'] = 'json'; - $fauxReqData['internalhttpsession'] = $upload_session_key; - + $fauxReqData['internalhttpsession'] = $upload_session_key; // evil but no other clean way about it: $faxReq = new FauxRequest( $fauxReqData, true ); $processor = new ApiMain( $faxReq, $wgEnableWriteAPI ); @@ -248,18 +269,22 @@ class Http { return "MediaWiki/$wgVersion"; } } -class HttpRequest{ + +class HttpRequest { var $target_file_path; var $upload_session_key; function __construct( $url, $opt ){ global $wgSyncHTTPTimeout; + // double check that it's a valid url: $this->url = $url; + // set the timeout to default sync timeout (unless the timeout option is provided) $this->timeout = ( isset( $opt['timeout'] ) ) ? $opt['timeout'] : $wgSyncHTTPTimeout; $this->method = ( isset( $opt['method'] ) ) ? $opt['method'] : 'GET'; $this->target_file_path = ( isset( $opt['target_file_path'] ) ) ? $opt['target_file_path'] : false; $this->upload_session_key = ( isset( $opt['upload_session_key'] ) ) ? $opt['upload_session_key'] : false; + $this->headers_only = ( isset( $opt['headers_only'] ) ) ? $opt['headers_only'] : false; } /** @@ -271,6 +296,10 @@ class HttpRequest{ * 'adapter' => 'curl', 'soket' */ public function doRequest() { + # Make sure we have a valid url + if( !UploadFromUrl::isValidURI( $this->url ) ) + return Status::newFatal('bad-url'); + # Use curl if available if ( function_exists( 'curl_init' ) ) { return $this->doCurlReq(); @@ -295,7 +324,10 @@ class HttpRequest{ curl_setopt( $c, CURLOPT_TIMEOUT, $this->timeout ); curl_setopt( $c, CURLOPT_USERAGENT, Http::userAgent() ); - if ( $this->method == 'POST' ) { + if ( $this->headers_only ) { + curl_setopt( $c, CURLOPT_NOBODY, true ); + curl_setopt( $c, CURLOPT_HEADER, true ); + } elseif ( $this->method == 'POST' ) { curl_setopt( $c, CURLOPT_POST, true ); curl_setopt( $c, CURLOPT_POSTFIELDS, '' ); } else { @@ -317,6 +349,7 @@ class HttpRequest{ if( !$cwrite->status->isOK() ){ wfDebug( __METHOD__ . "ERROR in setting up simpleFileWriter\n" ); $status = $cwrite->status; + return $status; } curl_setopt( $c, CURLOPT_WRITEFUNCTION, array( $cwrite, 'callbackWriteBody' ) ); } @@ -348,19 +381,37 @@ class HttpRequest{ } } - # Don't return the text of error messages, return false on error - $retcode = curl_getinfo( $c, CURLINFO_HTTP_CODE ); - if ( $retcode != 200 ) { - wfDebug( __METHOD__ . ": HTTP return code $retcode\n" ); - $status = Status::newFatal( "HTTP return code $retcode\n" ); - } - # Don't return truncated output - $errno = curl_errno( $c ); - if ( $errno != CURLE_OK ) { - $errstr = curl_error( $c ); - wfDebug( __METHOD__ . ": CURL error code $errno: $errstr\n" ); + if ( $this->headers_only ) { + $headers = explode( "\n", $status->value ); + $headerArray = array(); + foreach ( $headers as $header ) { + if ( !strlen( trim( $header ) ) ) + continue; + $headerParts = explode( ':', $header, 2 ); + if ( count( $headerParts ) == 1 ) { + $headerArray[] = trim( $header ); + } else { + list( $key, $val ) = $headerParts; + $headerArray[trim( $key )] = trim( $val ); + } + } + $status->value = $headerArray; + } else { + # Don't return the text of error messages, return false on error + $retcode = curl_getinfo( $c, CURLINFO_HTTP_CODE ); + if ( $retcode != 200 ) { + wfDebug( __METHOD__ . ": HTTP return code $retcode\n" ); + $status = Status::newFatal( "HTTP return code $retcode\n" ); + } + # Don't return truncated output + $errno = curl_errno( $c ); + if ( $errno != CURLE_OK ) { + $errstr = curl_error( $c ); + wfDebug( __METHOD__ . ": CURL error code $errno: $errstr\n" ); $status = Status::newFatal( " CURL error code $errno: $errstr\n" ); + } } + curl_close( $c ); // return the result obj @@ -368,24 +419,70 @@ class HttpRequest{ } public function doPhpReq(){ - #$use file_get_contents... - # This doesn't have local fetch capabilities... + global $wgTitle, $wgHTTPProxy; + + # Check for php.ini allow_url_fopen + if( !ini_get( 'allow_url_fopen' ) ){ + return Status::newFatal( 'allow_url_fopen needs to be enabled for http copy to work' ); + } - $headers = array( "User-Agent: " . Http :: userAgent() ); - if( strcasecmp( $method, 'post' ) == 0 ) { + // start with good status: + $status = Status::newGood(); + + if ( $this->headers_only ) { + $status->value = get_headers( $this->url, 1 ); + return $status; + } + + // setup the headers + $headers = array( "User-Agent: " . Http::userAgent() ); + if ( is_object( $wgTitle ) ) { + $headers[] = "Referer: ". $wgTitle->getFullURL(); + } + + if( strcasecmp( $this->method, 'post' ) == 0 ) { // Required for HTTP 1.0 POSTs $headers[] = "Content-Length: 0"; } - $opts = array( + $fcontext = stream_context_create ( array( 'http' => array( - 'method' => $method, + 'method' => $this->method, 'header' => implode( "\r\n", $headers ), - 'timeout' => $timeout ) ); - $ctx = stream_context_create( $opts ); + 'timeout' => $this->timeout ) + ) + ); + + $fh = fopen( $this->url, "r", false, $fcontext); - $status = new Status; - $status->value = file_get_contents( $url, false, $ctx ); - if( !$status->value ){ + // set the write back function (if we are writing to a file) + if( $this->target_file_path ){ + $cwrite = new simpleFileWriter( $this->target_file_path, $this->upload_session_key ); + if( !$cwrite->status->isOK() ){ + wfDebug( __METHOD__ . "ERROR in setting up simpleFileWriter\n" ); + $status = $cwrite->status; + return $status; + } + + // read $fh into the simpleFileWriter (grab in 64K chunks since its likely a media file) + while ( !feof( $fh ) ) { + $contents = fread( $fh, 65536 ); + $cwrite->callbackWriteBody( $fh, $contents ); + } + + $cwrite->close(); + // check for simpleFileWriter error: + if( !$cwrite->status->isOK() ){ + return $cwrite->status; + } + } else { + // read $fh into status->value + $status->value = @stream_get_contents( $fh ); + } + //close the url file wrapper + fclose( $fh ); + + // check for "false" + if( $status->value === false ){ $status->error( 'file_get_contents-failed' ); } return $status; @@ -415,7 +512,7 @@ class simpleFileWriter { $this->prevTime = time(); } - public function callbackWriteBody($ch, $data_packet){ + public function callbackWriteBody( $ch, $data_packet ){ global $wgMaxUploadSize; // write out the content @@ -458,7 +555,7 @@ class simpleFileWriter { } $sd =& $_SESSION['wsDownload'][$this->upload_session_key]; // check if the user canceled the request: - if( $sd['user_cancel'] == true ){ + if( isset( $sd['user_cancel'] ) && $sd['user_cancel'] == true ){ // kill the download return Status::newFatal( 'user-canceled-request' ); } @@ -479,4 +576,4 @@ class simpleFileWriter { } } -} \ No newline at end of file +}