Allow new checkbox on Special::Export to export all pages. See bug 10574.
[lhc/web/wiklou.git] / includes / specials / SpecialExport.php
1 <?php
2 /**
3 * Implements Special:Export
4 *
5 * Copyright © 2003-2008 Brion Vibber <brion@pobox.com>
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
21 *
22 * @file
23 * @ingroup SpecialPage
24 */
25
26 /**
27 * A special page that allows users to export pages in a XML file
28 *
29 * @ingroup SpecialPage
30 */
31 class SpecialExport extends SpecialPage {
32
33 private $curonly, $doExport, $pageLinkDepth, $templates;
34 private $images;
35
36 public function __construct() {
37 parent::__construct( 'Export' );
38 }
39
40 public function execute( $par ) {
41 global $wgSitename, $wgExportAllowListContributors, $wgExportFromNamespaces;
42 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
43 global $wgExportAllowAll;
44
45 $this->setHeaders();
46 $this->outputHeader();
47
48 // Set some variables
49 $this->curonly = true;
50 $this->doExport = false;
51 $request = $this->getRequest();
52 $this->templates = $request->getCheck( 'templates' );
53 $this->images = $request->getCheck( 'images' ); // Doesn't do anything yet
54 $this->pageLinkDepth = $this->validateLinkDepth(
55 $request->getIntOrNull( 'pagelink-depth' )
56 );
57 $nsindex = '';
58
59 if ( $request->getCheck( 'addcat' ) ) {
60 $page = $request->getText( 'pages' );
61 $catname = $request->getText( 'catname' );
62
63 if ( $catname !== '' && $catname !== null && $catname !== false ) {
64 $t = Title::makeTitleSafe( NS_MAIN, $catname );
65 if ( $t ) {
66 /**
67 * @todo FIXME: This can lead to hitting memory limit for very large
68 * categories. Ideally we would do the lookup synchronously
69 * during the export in a single query.
70 */
71 $catpages = $this->getPagesFromCategory( $t );
72 if ( $catpages ) {
73 $page .= "\n" . implode( "\n", $catpages );
74 }
75 }
76 }
77 }
78 elseif( $request->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
79 $page = $request->getText( 'pages' );
80 $nsindex = $request->getText( 'nsindex', '' );
81
82 if ( strval( $nsindex ) !== '' ) {
83 /**
84 * Same implementation as above, so same @todo
85 */
86 $nspages = $this->getPagesFromNamespace( $nsindex );
87 if ( $nspages ) {
88 $page .= "\n" . implode( "\n", $nspages );
89 }
90 }
91 }
92 elseif( $request->getCheck( 'exportall' ) && $wgExportAllowAll ) {
93 $this->doExport = true;
94 $exportall = true;
95 }
96 elseif( $request->wasPosted() && $par == '' ) {
97 $page = $request->getText( 'pages' );
98 $this->curonly = $request->getCheck( 'curonly' );
99 $rawOffset = $request->getVal( 'offset' );
100
101 if( $rawOffset ) {
102 $offset = wfTimestamp( TS_MW, $rawOffset );
103 } else {
104 $offset = null;
105 }
106
107 $limit = $request->getInt( 'limit' );
108 $dir = $request->getVal( 'dir' );
109 $history = array(
110 'dir' => 'asc',
111 'offset' => false,
112 'limit' => $wgExportMaxHistory,
113 );
114 $historyCheck = $request->getCheck( 'history' );
115
116 if ( $this->curonly ) {
117 $history = WikiExporter::CURRENT;
118 } elseif ( !$historyCheck ) {
119 if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
120 $history['limit'] = $limit;
121 }
122 if ( !is_null( $offset ) ) {
123 $history['offset'] = $offset;
124 }
125 if ( strtolower( $dir ) == 'desc' ) {
126 $history['dir'] = 'desc';
127 }
128 }
129
130 if( $page != '' ) {
131 $this->doExport = true;
132 }
133 } else {
134 // Default to current-only for GET requests.
135 $page = $request->getText( 'pages', $par );
136 $historyCheck = $request->getCheck( 'history' );
137
138 if( $historyCheck ) {
139 $history = WikiExporter::FULL;
140 } else {
141 $history = WikiExporter::CURRENT;
142 }
143
144 if( $page != '' ) {
145 $this->doExport = true;
146 }
147 }
148
149 if( !$wgExportAllowHistory ) {
150 // Override
151 $history = WikiExporter::CURRENT;
152 }
153
154 $list_authors = $request->getCheck( 'listauthors' );
155 if ( !$this->curonly || !$wgExportAllowListContributors ) {
156 $list_authors = false ;
157 }
158
159 if ( $this->doExport ) {
160 $this->getOutput()->disable();
161
162 // Cancel output buffering and gzipping if set
163 // This should provide safer streaming for pages with history
164 wfResetOutputBuffers();
165 $request->response()->header( "Content-type: application/xml; charset=utf-8" );
166
167 if( $request->getCheck( 'wpDownload' ) ) {
168 // Provide a sane filename suggestion
169 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
170 $request->response()->header( "Content-disposition: attachment;filename={$filename}" );
171 }
172
173 $this->doExport( $page, $history, $list_authors, $exportall );
174
175 return;
176 }
177
178 $out = $this->getOutput();
179 $out->addWikiMsg( 'exporttext' );
180
181 $form = Xml::openElement( 'form', array( 'method' => 'post',
182 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
183 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&#160;';
184 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
185
186 if ( $wgExportFromNamespaces ) {
187 $form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&#160;';
188 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
189 }
190
191 if ( $wgExportAllowAll ) {
192 $form .= Xml::checkLabel(
193 wfMsg( 'exportall' ),
194 'exportall',
195 'exportall',
196 $request->wasPosted() ? $request->getCheck( 'exportall' ) : false
197 ) . '<br />';
198 }
199
200 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
201 $form .= '<br />';
202
203 if( $wgExportAllowHistory ) {
204 $form .= Xml::checkLabel(
205 wfMsg( 'exportcuronly' ),
206 'curonly',
207 'curonly',
208 $request->wasPosted() ? $request->getCheck( 'curonly' ) : true
209 ) . '<br />';
210 } else {
211 $out->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
212 }
213
214 $form .= Xml::checkLabel(
215 wfMsg( 'export-templates' ),
216 'templates',
217 'wpExportTemplates',
218 $request->wasPosted() ? $request->getCheck( 'templates' ) : false
219 ) . '<br />';
220
221 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
222 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
223 }
224 // Enable this when we can do something useful exporting/importing image information. :)
225 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
226 $form .= Xml::checkLabel(
227 wfMsg( 'export-download' ),
228 'wpDownload',
229 'wpDownload',
230 $request->wasPosted() ? $request->getCheck( 'wpDownload' ) : true
231 ) . '<br />';
232
233 if ( $wgExportAllowListContributors ) {
234 $form .= Xml::checkLabel(
235 wfMsg( 'exportlistauthors' ),
236 'listauthors',
237 'listauthors',
238 $request->wasPosted() ? $request->getCheck( 'listauthors' ) : false
239 ) . '<br />';
240 }
241
242 $form .= Xml::submitButton( wfMsg( 'export-submit' ), Linker::tooltipAndAccesskeyAttribs( 'export' ) );
243 $form .= Xml::closeElement( 'form' );
244
245 $out->addHTML( $form );
246 }
247
248 /**
249 * @return bool
250 */
251 private function userCanOverrideExportDepth() {
252 return $this->getUser()->isAllowed( 'override-export-depth' );
253 }
254
255 /**
256 * Do the actual page exporting
257 *
258 * @param $page String: user input on what page(s) to export
259 * @param $history Mixed: one of the WikiExporter history export constants
260 * @param $list_authors Boolean: Whether to add distinct author list (when
261 * not returning full history)
262 * @param $exportall Boolean: Whether to export everything
263 */
264 private function doExport( $page, $history, $list_authors, $exportall ) {
265
266 // If we are grabbing everything, enable full history and ignore the rest
267 if ($exportall) {
268 $history = WikiExporter::FULL;
269 } else {
270
271 $pageSet = array(); // Inverted index of all pages to look up
272
273 // Split up and normalize input
274 foreach( explode( "\n", $page ) as $pageName ) {
275 $pageName = trim( $pageName );
276 $title = Title::newFromText( $pageName );
277 if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
278 // Only record each page once!
279 $pageSet[$title->getPrefixedText()] = true;
280 }
281 }
282
283 // Set of original pages to pass on to further manipulation...
284 $inputPages = array_keys( $pageSet );
285
286 // Look up any linked pages if asked...
287 if( $this->templates ) {
288 $pageSet = $this->getTemplates( $inputPages, $pageSet );
289 }
290 $linkDepth = $this->pageLinkDepth;
291 if( $linkDepth ) {
292 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
293 }
294
295 /*
296 // Enable this when we can do something useful exporting/importing image information. :)
297 if( $this->images ) ) {
298 $pageSet = $this->getImages( $inputPages, $pageSet );
299 }
300 */
301
302 $pages = array_keys( $pageSet );
303
304 // Normalize titles to the same format and remove dupes, see bug 17374
305 foreach( $pages as $k => $v ) {
306 $pages[$k] = str_replace( " ", "_", $v );
307 }
308
309 $pages = array_unique( $pages );
310 }
311
312 /* Ok, let's get to it... */
313 if( $history == WikiExporter::CURRENT && ! $exportall ) {
314 $lb = false;
315 $db = wfGetDB( DB_SLAVE );
316 $buffer = WikiExporter::BUFFER;
317 } else {
318 // Use an unbuffered query; histories may be very long!
319 $lb = wfGetLBFactory()->newMainLB();
320 $db = $lb->getConnection( DB_SLAVE );
321 $buffer = WikiExporter::STREAM;
322
323 // This might take a while... :D
324 wfSuppressWarnings();
325 set_time_limit(0);
326 wfRestoreWarnings();
327 }
328
329 $exporter = new WikiExporter( $db, $history, $buffer );
330 $exporter->list_authors = $list_authors;
331 $exporter->openStream();
332
333 if ( $exportall ) {
334 $exporter->allPages();
335 } else {
336 foreach( $pages as $page ) {
337 /*
338 if( $wgExportMaxHistory && !$this->curonly ) {
339 $title = Title::newFromText( $page );
340 if( $title ) {
341 $count = Revision::countByTitle( $db, $title );
342 if( $count > $wgExportMaxHistory ) {
343 wfDebug( __FUNCTION__ .
344 ": Skipped $page, $count revisions too big\n" );
345 continue;
346 }
347 }
348 }*/
349 #Bug 8824: Only export pages the user can read
350 $title = Title::newFromText( $page );
351 if( is_null( $title ) ) {
352 continue; #TODO: perhaps output an <error> tag or something.
353 }
354 if( !$title->userCan( 'read', $this->getUser() ) ) {
355 continue; #TODO: perhaps output an <error> tag or something.
356 }
357
358 $exporter->pageByTitle( $title );
359 }
360 }
361
362 $exporter->closeStream();
363
364 if( $lb ) {
365 $lb->closeAll();
366 }
367 }
368
369 /**
370 * @param $title Title
371 * @return array
372 */
373 private function getPagesFromCategory( $title ) {
374 global $wgContLang;
375
376 $name = $title->getDBkey();
377
378 $dbr = wfGetDB( DB_SLAVE );
379 $res = $dbr->select(
380 array( 'page', 'categorylinks' ),
381 array( 'page_namespace', 'page_title' ),
382 array( 'cl_from=page_id', 'cl_to' => $name ),
383 __METHOD__,
384 array( 'LIMIT' => '5000' )
385 );
386
387 $pages = array();
388
389 foreach ( $res as $row ) {
390 $n = $row->page_title;
391 if ($row->page_namespace) {
392 $ns = $wgContLang->getNsText( $row->page_namespace );
393 $n = $ns . ':' . $n;
394 }
395
396 $pages[] = $n;
397 }
398 return $pages;
399 }
400
401 /**
402 * @param $nsindex int
403 * @return array
404 */
405 private function getPagesFromNamespace( $nsindex ) {
406 global $wgContLang;
407
408 $dbr = wfGetDB( DB_SLAVE );
409 $res = $dbr->select(
410 'page',
411 array( 'page_namespace', 'page_title' ),
412 array( 'page_namespace' => $nsindex ),
413 __METHOD__,
414 array( 'LIMIT' => '5000' )
415 );
416
417 $pages = array();
418
419 foreach ( $res as $row ) {
420 $n = $row->page_title;
421
422 if ( $row->page_namespace ) {
423 $ns = $wgContLang->getNsText( $row->page_namespace );
424 $n = $ns . ':' . $n;
425 }
426
427 $pages[] = $n;
428 }
429 return $pages;
430 }
431
432 /**
433 * Expand a list of pages to include templates used in those pages.
434 * @param $inputPages array, list of titles to look up
435 * @param $pageSet array, associative array indexed by titles for output
436 * @return array associative array index by titles
437 */
438 private function getTemplates( $inputPages, $pageSet ) {
439 return $this->getLinks( $inputPages, $pageSet,
440 'templatelinks',
441 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
442 array( 'page_id=tl_from' )
443 );
444 }
445
446 /**
447 * Validate link depth setting, if available.
448 * @param $depth int
449 * @return int
450 */
451 private function validateLinkDepth( $depth ) {
452 global $wgExportMaxLinkDepth;
453
454 if( $depth < 0 ) {
455 return 0;
456 }
457
458 if ( !$this->userCanOverrideExportDepth() ) {
459 if( $depth > $wgExportMaxLinkDepth ) {
460 return $wgExportMaxLinkDepth;
461 }
462 }
463
464 /*
465 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
466 * crazy-big export from being done by someone setting the depth
467 * number too high. In other words, last resort safety net.
468 */
469 return intval( min( $depth, 5 ) );
470 }
471
472 /**
473 * Expand a list of pages to include pages linked to from that page.
474 * @param $inputPages array
475 * @param $pageSet array
476 * @param $depth int
477 * @return array
478 */
479 private function getPageLinks( $inputPages, $pageSet, $depth ) {
480 for( ; $depth > 0; --$depth ) {
481 $pageSet = $this->getLinks(
482 $inputPages, $pageSet, 'pagelinks',
483 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
484 array( 'page_id=pl_from' )
485 );
486 $inputPages = array_keys( $pageSet );
487 }
488
489 return $pageSet;
490 }
491
492 /**
493 * Expand a list of pages to include images used in those pages.
494 *
495 * @param $inputPages array, list of titles to look up
496 * @param $pageSet array, associative array indexed by titles for output
497 *
498 * @return array associative array index by titles
499 */
500 private function getImages( $inputPages, $pageSet ) {
501 return $this->getLinks(
502 $inputPages,
503 $pageSet,
504 'imagelinks',
505 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
506 array( 'page_id=il_from' )
507 );
508 }
509
510 /**
511 * Expand a list of pages to include items used in those pages.
512 */
513 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
514 $dbr = wfGetDB( DB_SLAVE );
515
516 foreach( $inputPages as $page ) {
517 $title = Title::newFromText( $page );
518
519 if( $title ) {
520 $pageSet[$title->getPrefixedText()] = true;
521 /// @todo FIXME: May or may not be more efficient to batch these
522 /// by namespace when given multiple input pages.
523 $result = $dbr->select(
524 array( 'page', $table ),
525 $fields,
526 array_merge(
527 $join,
528 array(
529 'page_namespace' => $title->getNamespace(),
530 'page_title' => $title->getDBkey()
531 )
532 ),
533 __METHOD__
534 );
535
536 foreach( $result as $row ) {
537 $template = Title::makeTitle( $row->namespace, $row->title );
538 $pageSet[$template->getPrefixedText()] = true;
539 }
540 }
541 }
542
543 return $pageSet;
544 }
545
546 }