Merge "Provide command to adjust phpunit.xml for code coverage"
[lhc/web/wiklou.git] / includes / import / ImportStreamSource.php
1 <?php
2 /**
3 * MediaWiki page data importer.
4 *
5 * Copyright © 2003,2005 Brion Vibber <brion@pobox.com>
6 * https://www.mediawiki.org/
7 *
8 * This program is free software; you can redistribute it and/or modify
9 * it under the terms of the GNU General Public License as published by
10 * the Free Software Foundation; either version 2 of the License, or
11 * (at your option) any later version.
12 *
13 * This program is distributed in the hope that it will be useful,
14 * but WITHOUT ANY WARRANTY; without even the implied warranty of
15 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
16 * GNU General Public License for more details.
17 *
18 * You should have received a copy of the GNU General Public License along
19 * with this program; if not, write to the Free Software Foundation, Inc.,
20 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
21 * http://www.gnu.org/copyleft/gpl.html
22 *
23 * @file
24 * @ingroup SpecialPage
25 */
26 use MediaWiki\MediaWikiServices;
27
28 /**
29 * Imports a XML dump from a file (either from file upload, files on disk, or HTTP)
30 * @ingroup SpecialPage
31 */
32 class ImportStreamSource implements ImportSource {
33 /**
34 * @param resource $handle
35 */
36 function __construct( $handle ) {
37 $this->mHandle = $handle;
38 }
39
40 /**
41 * @return bool
42 */
43 function atEnd() {
44 return feof( $this->mHandle );
45 }
46
47 /**
48 * @return string
49 */
50 function readChunk() {
51 return fread( $this->mHandle, 32768 );
52 }
53
54 /**
55 * @param string $filename
56 * @return Status
57 */
58 static function newFromFile( $filename ) {
59 Wikimedia\suppressWarnings();
60 $file = fopen( $filename, 'rt' );
61 Wikimedia\restoreWarnings();
62 if ( !$file ) {
63 return Status::newFatal( "importcantopen" );
64 }
65 return Status::newGood( new ImportStreamSource( $file ) );
66 }
67
68 /**
69 * @param string $fieldname
70 * @return Status
71 */
72 static function newFromUpload( $fieldname = "xmlimport" ) {
73 $upload =& $_FILES[$fieldname];
74
75 if ( $upload === null || !$upload['name'] ) {
76 return Status::newFatal( 'importnofile' );
77 }
78 if ( !empty( $upload['error'] ) ) {
79 switch ( $upload['error'] ) {
80 case UPLOAD_ERR_INI_SIZE:
81 // The uploaded file exceeds the upload_max_filesize directive in php.ini.
82 return Status::newFatal( 'importuploaderrorsize' );
83 case UPLOAD_ERR_FORM_SIZE:
84 // The uploaded file exceeds the MAX_FILE_SIZE directive that
85 // was specified in the HTML form.
86 // FIXME This is probably never used since that directive was removed in 8e91c520?
87 return Status::newFatal( 'importuploaderrorsize' );
88 case UPLOAD_ERR_PARTIAL:
89 // The uploaded file was only partially uploaded
90 return Status::newFatal( 'importuploaderrorpartial' );
91 case UPLOAD_ERR_NO_TMP_DIR:
92 // Missing a temporary folder.
93 return Status::newFatal( 'importuploaderrortemp' );
94 // Other error codes get the generic 'importnofile' error message below
95 }
96
97 }
98 $fname = $upload['tmp_name'];
99 if ( is_uploaded_file( $fname ) ) {
100 return self::newFromFile( $fname );
101 } else {
102 return Status::newFatal( 'importnofile' );
103 }
104 }
105
106 /**
107 * @param string $url
108 * @param string $method
109 * @return Status
110 */
111 static function newFromURL( $url, $method = 'GET' ) {
112 global $wgHTTPImportTimeout;
113 wfDebug( __METHOD__ . ": opening $url\n" );
114 # Use the standard HTTP fetch function; it times out
115 # quicker and sorts out user-agent problems which might
116 # otherwise prevent importing from large sites, such
117 # as the Wikimedia cluster, etc.
118 $data = MediaWikiServices::getInstance()->getHttpRequestFactory()->request(
119 $method,
120 $url,
121 [
122 'followRedirects' => true,
123 'timeout' => $wgHTTPImportTimeout
124 ],
125 __METHOD__
126 );
127 if ( $data !== false ) {
128 $file = tmpfile();
129 fwrite( $file, $data );
130 fflush( $file );
131 fseek( $file, 0 );
132 return Status::newGood( new ImportStreamSource( $file ) );
133 } else {
134 return Status::newFatal( 'importcantopen' );
135 }
136 }
137
138 /**
139 * @param string $interwiki
140 * @param string $page
141 * @param bool $history
142 * @param bool $templates
143 * @param int $pageLinkDepth
144 * @return Status
145 */
146 public static function newFromInterwiki( $interwiki, $page, $history = false,
147 $templates = false, $pageLinkDepth = 0
148 ) {
149 if ( $page == '' ) {
150 return Status::newFatal( 'import-noarticle' );
151 }
152
153 # Look up the first interwiki prefix, and let the foreign site handle
154 # subsequent interwiki prefixes
155 $firstIwPrefix = strtok( $interwiki, ':' );
156 $interwikiLookup = MediaWikiServices::getInstance()->getInterwikiLookup();
157 $firstIw = $interwikiLookup->fetch( $firstIwPrefix );
158 if ( !$firstIw ) {
159 return Status::newFatal( 'importbadinterwiki' );
160 }
161
162 $additionalIwPrefixes = strtok( '' );
163 if ( $additionalIwPrefixes ) {
164 $additionalIwPrefixes .= ':';
165 }
166 # Have to do a DB-key replacement ourselves; otherwise spaces get
167 # URL-encoded to +, which is wrong in this case. Similar to logic in
168 # Title::getLocalURL
169 $link = $firstIw->getURL( strtr( "${additionalIwPrefixes}Special:Export/$page",
170 ' ', '_' ) );
171
172 $params = [];
173 if ( $history ) {
174 $params['history'] = 1;
175 }
176 if ( $templates ) {
177 $params['templates'] = 1;
178 }
179 if ( $pageLinkDepth ) {
180 $params['pagelink-depth'] = $pageLinkDepth;
181 }
182
183 $url = wfAppendQuery( $link, $params );
184 # For interwikis, use POST to avoid redirects.
185 return self::newFromURL( $url, "POST" );
186 }
187 }