08819f2030d014487dec192e884261c74efb84e4
[lhc/web/wiklou.git] / includes / specials / SpecialExport.php
1 <?php
2 # Copyright (C) 2003-2008 Brion Vibber <brion@pobox.com>
3 # http://www.mediawiki.org/
4 #
5 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 2 of the License, or
8 # (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License along
16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 # http://www.gnu.org/copyleft/gpl.html
19 /**
20 * @file
21 * @ingroup SpecialPage
22 */
23
24 class SpecialExport extends SpecialPage {
25
26 private $curonly, $doExport, $pageLinkDepth, $templates;
27 private $images;
28
29 public function __construct() {
30 parent::__construct( 'Export' );
31 }
32
33 public function execute( $par ) {
34 global $wgOut, $wgRequest, $wgSitename, $wgExportAllowListContributors;
35 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
36 global $wgExportFromNamespaces;
37
38 $this->setHeaders();
39 $this->outputHeader();
40
41 // Set some variables
42 $this->curonly = true;
43 $this->doExport = false;
44 $this->templates = $wgRequest->getCheck( 'templates' );
45 $this->images = $wgRequest->getCheck( 'images' ); // Doesn't do anything yet
46 $this->pageLinkDepth = $this->validateLinkDepth(
47 $wgRequest->getIntOrNull( 'pagelink-depth' ) );
48 $nsindex = '';
49
50 if ( $wgRequest->getCheck( 'addcat' ) ) {
51 $page = $wgRequest->getText( 'pages' );
52 $catname = $wgRequest->getText( 'catname' );
53
54 if ( $catname !== '' && $catname !== null && $catname !== false ) {
55 $t = Title::makeTitleSafe( NS_MAIN, $catname );
56 if ( $t ) {
57 /**
58 * @todo Fixme: this can lead to hitting memory limit for very large
59 * categories. Ideally we would do the lookup synchronously
60 * during the export in a single query.
61 */
62 $catpages = $this->getPagesFromCategory( $t );
63 if ( $catpages ) $page .= "\n" . implode( "\n", $catpages );
64 }
65 }
66 }
67 else if( $wgRequest->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
68 $page = $wgRequest->getText( 'pages' );
69 $nsindex = $wgRequest->getText( 'nsindex', '' );
70
71 if ( strval( $nsindex ) !== '' ) {
72 /**
73 * Same implementation as above, so same @todo
74 */
75 $nspages = $this->getPagesFromNamespace( $nsindex );
76 if ( $nspages ) $page .= "\n" . implode( "\n", $nspages );
77 }
78 }
79 else if( $wgRequest->wasPosted() && $par == '' ) {
80 $page = $wgRequest->getText( 'pages' );
81 $this->curonly = $wgRequest->getCheck( 'curonly' );
82 $rawOffset = $wgRequest->getVal( 'offset' );
83 if( $rawOffset ) {
84 $offset = wfTimestamp( TS_MW, $rawOffset );
85 } else {
86 $offset = null;
87 }
88 $limit = $wgRequest->getInt( 'limit' );
89 $dir = $wgRequest->getVal( 'dir' );
90 $history = array(
91 'dir' => 'asc',
92 'offset' => false,
93 'limit' => $wgExportMaxHistory,
94 );
95 $historyCheck = $wgRequest->getCheck( 'history' );
96 if ( $this->curonly ) {
97 $history = WikiExporter::CURRENT;
98 } elseif ( !$historyCheck ) {
99 if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
100 $history['limit'] = $limit;
101 }
102 if ( !is_null( $offset ) ) {
103 $history['offset'] = $offset;
104 }
105 if ( strtolower( $dir ) == 'desc' ) {
106 $history['dir'] = 'desc';
107 }
108 }
109
110 if( $page != '' ) $this->doExport = true;
111 } else {
112 // Default to current-only for GET requests
113 $page = $wgRequest->getText( 'pages', $par );
114 $historyCheck = $wgRequest->getCheck( 'history' );
115 if( $historyCheck ) {
116 $history = WikiExporter::FULL;
117 } else {
118 $history = WikiExporter::CURRENT;
119 }
120
121 if( $page != '' ) $this->doExport = true;
122 }
123
124 if( !$wgExportAllowHistory ) {
125 // Override
126 $history = WikiExporter::CURRENT;
127 }
128
129 $list_authors = $wgRequest->getCheck( 'listauthors' );
130 if ( !$this->curonly || !$wgExportAllowListContributors ) $list_authors = false ;
131
132 if ( $this->doExport ) {
133 $wgOut->disable();
134 // Cancel output buffering and gzipping if set
135 // This should provide safer streaming for pages with history
136 wfResetOutputBuffers();
137 $wgRequest->response()->header( "Content-type: application/xml; charset=utf-8" );
138 if( $wgRequest->getCheck( 'wpDownload' ) ) {
139 // Provide a sane filename suggestion
140 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
141 $wgRequest->response()->header( "Content-disposition: attachment;filename={$filename}" );
142 }
143 $this->doExport( $page, $history, $list_authors );
144 return;
145 }
146
147 $wgOut->addWikiMsg( 'exporttext' );
148
149 $form = Xml::openElement( 'form', array( 'method' => 'post',
150 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
151 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&nbsp;';
152 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
153
154 if ( $wgExportFromNamespaces ) {
155 $form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&nbsp;';
156 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
157 }
158
159 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
160 $form .= '<br />';
161
162 if( $wgExportAllowHistory ) {
163 $form .= Xml::checkLabel( wfMsg( 'exportcuronly' ), 'curonly', 'curonly', true ) . '<br />';
164 } else {
165 $wgOut->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
166 }
167 $form .= Xml::checkLabel( wfMsg( 'export-templates' ), 'templates', 'wpExportTemplates', false ) . '<br />';
168 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
169 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
170 }
171 // Enable this when we can do something useful exporting/importing image information. :)
172 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
173 $form .= Xml::checkLabel( wfMsg( 'export-download' ), 'wpDownload', 'wpDownload', true ) . '<br />';
174
175 $form .= Xml::submitButton( wfMsg( 'export-submit' ), array( 'accesskey' => 's' ) );
176 $form .= Xml::closeElement( 'form' );
177 $wgOut->addHTML( $form );
178 }
179
180 private function userCanOverrideExportDepth() {
181 global $wgUser;
182
183 return $wgUser->isAllowed( 'override-export-depth' );
184 }
185
186 /**
187 * Do the actual page exporting
188 *
189 * @param $page String: user input on what page(s) to export
190 * @param $history Mixed: one of the WikiExporter history export constants
191 * @param $list_authors Boolean: Whether to add distinct author list (when
192 * not returning full history)
193 */
194 private function doExport( $page, $history, $list_authors ) {
195 global $wgExportMaxHistory;
196
197 $pageSet = array(); // Inverted index of all pages to look up
198
199 // Split up and normalize input
200 foreach( explode( "\n", $page ) as $pageName ) {
201 $pageName = trim( $pageName );
202 $title = Title::newFromText( $pageName );
203 if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
204 // Only record each page once!
205 $pageSet[$title->getPrefixedText()] = true;
206 }
207 }
208
209 // Set of original pages to pass on to further manipulation...
210 $inputPages = array_keys( $pageSet );
211
212 // Look up any linked pages if asked...
213 if( $this->templates ) {
214 $pageSet = $this->getTemplates( $inputPages, $pageSet );
215 }
216
217 if( $linkDepth = $this->pageLinkDepth ) {
218 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
219 }
220
221 /*
222 // Enable this when we can do something useful exporting/importing image information. :)
223 if( $this->images ) ) {
224 $pageSet = $this->getImages( $inputPages, $pageSet );
225 }
226 */
227
228 $pages = array_keys( $pageSet );
229
230 // Normalize titles to the same format and remove dupes, see bug 17374
231 foreach( $pages as $k => $v ) {
232 $pages[$k] = str_replace( " ", "_", $v );
233 }
234 $pages = array_unique( $pages );
235
236 /* Ok, let's get to it... */
237 if( $history == WikiExporter::CURRENT ) {
238 $lb = false;
239 $db = wfGetDB( DB_SLAVE );
240 $buffer = WikiExporter::BUFFER;
241 } else {
242 // Use an unbuffered query; histories may be very long!
243 $lb = wfGetLBFactory()->newMainLB();
244 $db = $lb->getConnection( DB_SLAVE );
245 $buffer = WikiExporter::STREAM;
246
247 // This might take a while... :D
248 wfSuppressWarnings();
249 set_time_limit(0);
250 wfRestoreWarnings();
251 }
252 $exporter = new WikiExporter( $db, $history, $buffer );
253 $exporter->list_authors = $list_authors;
254 $exporter->openStream();
255 foreach( $pages as $page ) {
256 /*
257 if( $wgExportMaxHistory && !$this->curonly ) {
258 $title = Title::newFromText( $page );
259 if( $title ) {
260 $count = Revision::countByTitle( $db, $title );
261 if( $count > $wgExportMaxHistory ) {
262 wfDebug( __FUNCTION__ .
263 ": Skipped $page, $count revisions too big\n" );
264 continue;
265 }
266 }
267 }*/
268 #Bug 8824: Only export pages the user can read
269 $title = Title::newFromText( $page );
270 if( is_null( $title ) ) continue; #TODO: perhaps output an <error> tag or something.
271 if( !$title->userCanRead() ) continue; #TODO: perhaps output an <error> tag or something.
272
273 $exporter->pageByTitle( $title );
274 }
275
276 $exporter->closeStream();
277 if( $lb ) {
278 $lb->closeAll();
279 }
280 }
281
282 private function getPagesFromCategory( $title ) {
283 global $wgContLang;
284
285 $name = $title->getDBkey();
286
287 $dbr = wfGetDB( DB_SLAVE );
288 $res = $dbr->select( array('page', 'categorylinks' ),
289 array( 'page_namespace', 'page_title' ),
290 array('cl_from=page_id', 'cl_to' => $name ),
291 __METHOD__, array('LIMIT' => '5000'));
292
293 $pages = array();
294 while ( $row = $dbr->fetchObject( $res ) ) {
295 $n = $row->page_title;
296 if ($row->page_namespace) {
297 $ns = $wgContLang->getNsText( $row->page_namespace );
298 $n = $ns . ':' . $n;
299 }
300
301 $pages[] = $n;
302 }
303 $dbr->freeResult($res);
304
305 return $pages;
306 }
307
308 private function getPagesFromNamespace( $nsindex ) {
309 global $wgContLang;
310
311 $dbr = wfGetDB( DB_SLAVE );
312 $res = $dbr->select( 'page', array('page_namespace', 'page_title'),
313 array('page_namespace' => $nsindex),
314 __METHOD__, array('LIMIT' => '5000') );
315
316 $pages = array();
317 while ( $row = $dbr->fetchObject( $res ) ) {
318 $n = $row->page_title;
319 if ($row->page_namespace) {
320 $ns = $wgContLang->getNsText( $row->page_namespace );
321 $n = $ns . ':' . $n;
322 }
323
324 $pages[] = $n;
325 }
326 $dbr->freeResult($res);
327
328 return $pages;
329 }
330
331 /**
332 * Expand a list of pages to include templates used in those pages.
333 * @param $inputPages array, list of titles to look up
334 * @param $pageSet array, associative array indexed by titles for output
335 * @return array associative array index by titles
336 */
337 private function getTemplates( $inputPages, $pageSet ) {
338 return $this->getLinks( $inputPages, $pageSet,
339 'templatelinks',
340 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
341 array( 'page_id=tl_from' ) );
342 }
343
344 /**
345 * Validate link depth setting, if available.
346 */
347 private function validateLinkDepth( $depth ) {
348 global $wgExportMaxLinkDepth, $wgExportMaxLinkDepthLimit;
349 if( $depth < 0 ) {
350 return 0;
351 }
352 if ( !$this->userCanOverrideExportDepth() ) {
353 if( $depth > $wgExportMaxLinkDepth ) {
354 return $wgExportMaxLinkDepth;
355 }
356 }
357 /*
358 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
359 * crazy-big export from being done by someone setting the depth
360 * number too high. In other words, last resort safety net.
361 */
362 return intval( min( $depth, 5 ) );
363 }
364
365 /** Expand a list of pages to include pages linked to from that page. */
366 private function getPageLinks( $inputPages, $pageSet, $depth ) {
367 for( $depth=$depth; $depth>0; --$depth ) {
368 $pageSet = $this->getLinks( $inputPages, $pageSet, 'pagelinks',
369 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
370 array( 'page_id=pl_from' ) );
371 $inputPages = array_keys( $pageSet );
372 }
373 return $pageSet;
374 }
375
376 /**
377 * Expand a list of pages to include images used in those pages.
378 * @param $inputPages array, list of titles to look up
379 * @param $pageSet array, associative array indexed by titles for output
380 * @return array associative array index by titles
381 */
382 private function getImages( $inputPages, $pageSet ) {
383 return $this->getLinks( $inputPages, $pageSet,
384 'imagelinks',
385 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
386 array( 'page_id=il_from' ) );
387 }
388
389 /**
390 * Expand a list of pages to include items used in those pages.
391 * @private
392 */
393 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
394 $dbr = wfGetDB( DB_SLAVE );
395 foreach( $inputPages as $page ) {
396 $title = Title::newFromText( $page );
397 if( $title ) {
398 $pageSet[$title->getPrefixedText()] = true;
399 /// @todo Fixme: May or may not be more efficient to batch these
400 /// by namespace when given multiple input pages.
401 $result = $dbr->select(
402 array( 'page', $table ),
403 $fields,
404 array_merge( $join,
405 array(
406 'page_namespace' => $title->getNamespace(),
407 'page_title' => $title->getDBkey() ) ),
408 __METHOD__ );
409 foreach( $result as $row ) {
410 $template = Title::makeTitle( $row->namespace, $row->title );
411 $pageSet[$template->getPrefixedText()] = true;
412 }
413 }
414 }
415 return $pageSet;
416 }
417 }
418