86e8a1f7934d366efffa5459c22d938ccdeb80f0
[lhc/web/wiklou.git] / includes / specials / SpecialExport.php
1 <?php
2 /**
3 * Copyright (C) 2003-2008 Brion Vibber <brion@pobox.com>
4 *
5 * This program is free software; you can redistribute it and/or modify
6 * it under the terms of the GNU General Public License as published by
7 * the Free Software Foundation; either version 2 of the License, or
8 * (at your option) any later version.
9 *
10 * This program is distributed in the hope that it will be useful,
11 * but WITHOUT ANY WARRANTY; without even the implied warranty of
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 * GNU General Public License for more details.
14 *
15 * You should have received a copy of the GNU General Public License along
16 * with this program; if not, write to the Free Software Foundation, Inc.,
17 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 * http://www.gnu.org/copyleft/gpl.html
19 */
20
21 /**
22 * @file
23 * @ingroup SpecialPage
24 */
25
26 class SpecialExport extends SpecialPage {
27
28 private $curonly, $doExport, $pageLinkDepth, $templates;
29 private $images;
30
31 public function __construct() {
32 parent::__construct( 'Export' );
33 }
34
35 public function execute( $par ) {
36 global $wgOut, $wgRequest, $wgSitename, $wgExportAllowListContributors;
37 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
38 global $wgExportFromNamespaces, $wgUser;
39
40 $this->setHeaders();
41 $this->outputHeader();
42
43 // Set some variables
44 $this->curonly = true;
45 $this->doExport = false;
46 $this->templates = $wgRequest->getCheck( 'templates' );
47 $this->images = $wgRequest->getCheck( 'images' ); // Doesn't do anything yet
48 $this->pageLinkDepth = $this->validateLinkDepth(
49 $wgRequest->getIntOrNull( 'pagelink-depth' )
50 );
51 $nsindex = '';
52
53 if ( $wgRequest->getCheck( 'addcat' ) ) {
54 $page = $wgRequest->getText( 'pages' );
55 $catname = $wgRequest->getText( 'catname' );
56
57 if ( $catname !== '' && $catname !== null && $catname !== false ) {
58 $t = Title::makeTitleSafe( NS_MAIN, $catname );
59 if ( $t ) {
60 /**
61 * @todo Fixme: this can lead to hitting memory limit for very large
62 * categories. Ideally we would do the lookup synchronously
63 * during the export in a single query.
64 */
65 $catpages = $this->getPagesFromCategory( $t );
66 if ( $catpages ) $page .= "\n" . implode( "\n", $catpages );
67 }
68 }
69 }
70 else if( $wgRequest->getCheck( 'addns' ) && $wgExportFromNamespaces ) {
71 $page = $wgRequest->getText( 'pages' );
72 $nsindex = $wgRequest->getText( 'nsindex', '' );
73
74 if ( strval( $nsindex ) !== '' ) {
75 /**
76 * Same implementation as above, so same @todo
77 */
78 $nspages = $this->getPagesFromNamespace( $nsindex );
79 if ( $nspages ) $page .= "\n" . implode( "\n", $nspages );
80 }
81 }
82 else if( $wgRequest->wasPosted() && $par == '' ) {
83 $page = $wgRequest->getText( 'pages' );
84 $this->curonly = $wgRequest->getCheck( 'curonly' );
85 $rawOffset = $wgRequest->getVal( 'offset' );
86
87 if( $rawOffset ) {
88 $offset = wfTimestamp( TS_MW, $rawOffset );
89 } else {
90 $offset = null;
91 }
92
93 $limit = $wgRequest->getInt( 'limit' );
94 $dir = $wgRequest->getVal( 'dir' );
95 $history = array(
96 'dir' => 'asc',
97 'offset' => false,
98 'limit' => $wgExportMaxHistory,
99 );
100 $historyCheck = $wgRequest->getCheck( 'history' );
101
102 if ( $this->curonly ) {
103 $history = WikiExporter::CURRENT;
104 } elseif ( !$historyCheck ) {
105 if ( $limit > 0 && ($wgExportMaxHistory == 0 || $limit < $wgExportMaxHistory ) ) {
106 $history['limit'] = $limit;
107 }
108 if ( !is_null( $offset ) ) {
109 $history['offset'] = $offset;
110 }
111 if ( strtolower( $dir ) == 'desc' ) {
112 $history['dir'] = 'desc';
113 }
114 }
115
116 if( $page != '' ) $this->doExport = true;
117 } else {
118 // Default to current-only for GET requests.
119 $page = $wgRequest->getText( 'pages', $par );
120 $historyCheck = $wgRequest->getCheck( 'history' );
121
122 if( $historyCheck ) {
123 $history = WikiExporter::FULL;
124 } else {
125 $history = WikiExporter::CURRENT;
126 }
127
128 if( $page != '' ) $this->doExport = true;
129 }
130
131 if( !$wgExportAllowHistory ) {
132 // Override
133 $history = WikiExporter::CURRENT;
134 }
135
136 $list_authors = $wgRequest->getCheck( 'listauthors' );
137 if ( !$this->curonly || !$wgExportAllowListContributors ) $list_authors = false ;
138
139 if ( $this->doExport ) {
140 $wgOut->disable();
141
142 // Cancel output buffering and gzipping if set
143 // This should provide safer streaming for pages with history
144 wfResetOutputBuffers();
145 $wgRequest->response()->header( "Content-type: application/xml; charset=utf-8" );
146
147 if( $wgRequest->getCheck( 'wpDownload' ) ) {
148 // Provide a sane filename suggestion
149 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
150 $wgRequest->response()->header( "Content-disposition: attachment;filename={$filename}" );
151 }
152
153 $this->doExport( $page, $history, $list_authors );
154
155 return;
156 }
157
158 $wgOut->addWikiMsg( 'exporttext' );
159
160 $form = Xml::openElement( 'form', array( 'method' => 'post',
161 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
162 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&#160;';
163 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
164
165 if ( $wgExportFromNamespaces ) {
166 $form .= Xml::namespaceSelector( $nsindex, null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&#160;';
167 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
168 }
169
170 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
171 $form .= '<br />';
172
173 if( $wgExportAllowHistory ) {
174 $form .= Xml::checkLabel( wfMsg( 'exportcuronly' ), 'curonly', 'curonly', true ) . '<br />';
175 } else {
176 $wgOut->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
177 }
178
179 $form .= Xml::checkLabel( wfMsg( 'export-templates' ), 'templates', 'wpExportTemplates', false ) . '<br />';
180
181 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
182 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
183 }
184 // Enable this when we can do something useful exporting/importing image information. :)
185 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
186 $form .= Xml::checkLabel( wfMsg( 'export-download' ), 'wpDownload', 'wpDownload', true ) . '<br />';
187
188 $form .= Xml::submitButton( wfMsg( 'export-submit' ), $wgUser->getSkin()->tooltipAndAccessKeyAttribs( 'export' ) );
189 $form .= Xml::closeElement( 'form' );
190
191 $wgOut->addHTML( $form );
192 }
193
194 private function userCanOverrideExportDepth() {
195 global $wgUser;
196 return $wgUser->isAllowed( 'override-export-depth' );
197 }
198
199 /**
200 * Do the actual page exporting
201 *
202 * @param $page String: user input on what page(s) to export
203 * @param $history Mixed: one of the WikiExporter history export constants
204 * @param $list_authors Boolean: Whether to add distinct author list (when
205 * not returning full history)
206 */
207 private function doExport( $page, $history, $list_authors ) {
208 $pageSet = array(); // Inverted index of all pages to look up
209
210 // Split up and normalize input
211 foreach( explode( "\n", $page ) as $pageName ) {
212 $pageName = trim( $pageName );
213 $title = Title::newFromText( $pageName );
214 if( $title && $title->getInterwiki() == '' && $title->getText() !== '' ) {
215 // Only record each page once!
216 $pageSet[$title->getPrefixedText()] = true;
217 }
218 }
219
220 // Set of original pages to pass on to further manipulation...
221 $inputPages = array_keys( $pageSet );
222
223 // Look up any linked pages if asked...
224 if( $this->templates ) {
225 $pageSet = $this->getTemplates( $inputPages, $pageSet );
226 }
227
228 if( $linkDepth = $this->pageLinkDepth ) {
229 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
230 }
231
232 /*
233 // Enable this when we can do something useful exporting/importing image information. :)
234 if( $this->images ) ) {
235 $pageSet = $this->getImages( $inputPages, $pageSet );
236 }
237 */
238
239 $pages = array_keys( $pageSet );
240
241 // Normalize titles to the same format and remove dupes, see bug 17374
242 foreach( $pages as $k => $v ) {
243 $pages[$k] = str_replace( " ", "_", $v );
244 }
245
246 $pages = array_unique( $pages );
247
248 /* Ok, let's get to it... */
249 if( $history == WikiExporter::CURRENT ) {
250 $lb = false;
251 $db = wfGetDB( DB_SLAVE );
252 $buffer = WikiExporter::BUFFER;
253 } else {
254 // Use an unbuffered query; histories may be very long!
255 $lb = wfGetLBFactory()->newMainLB();
256 $db = $lb->getConnection( DB_SLAVE );
257 $buffer = WikiExporter::STREAM;
258
259 // This might take a while... :D
260 wfSuppressWarnings();
261 set_time_limit(0);
262 wfRestoreWarnings();
263 }
264
265 $exporter = new WikiExporter( $db, $history, $buffer );
266 $exporter->list_authors = $list_authors;
267 $exporter->openStream();
268
269 foreach( $pages as $page ) {
270 /*
271 if( $wgExportMaxHistory && !$this->curonly ) {
272 $title = Title::newFromText( $page );
273 if( $title ) {
274 $count = Revision::countByTitle( $db, $title );
275 if( $count > $wgExportMaxHistory ) {
276 wfDebug( __FUNCTION__ .
277 ": Skipped $page, $count revisions too big\n" );
278 continue;
279 }
280 }
281 }*/
282 #Bug 8824: Only export pages the user can read
283 $title = Title::newFromText( $page );
284 if( is_null( $title ) ) continue; #TODO: perhaps output an <error> tag or something.
285 if( !$title->userCanRead() ) continue; #TODO: perhaps output an <error> tag or something.
286
287 $exporter->pageByTitle( $title );
288 }
289
290 $exporter->closeStream();
291
292 if( $lb ) {
293 $lb->closeAll();
294 }
295 }
296
297 private function getPagesFromCategory( $title ) {
298 global $wgContLang;
299
300 $name = $title->getDBkey();
301
302 $dbr = wfGetDB( DB_SLAVE );
303 $res = $dbr->select(
304 array( 'page', 'categorylinks' ),
305 array( 'page_namespace', 'page_title' ),
306 array( 'cl_from=page_id', 'cl_to' => $name ),
307 __METHOD__,
308 array( 'LIMIT' => '5000' )
309 );
310
311 $pages = array();
312
313 while ( $row = $dbr->fetchObject( $res ) ) {
314 $n = $row->page_title;
315 if ($row->page_namespace) {
316 $ns = $wgContLang->getNsText( $row->page_namespace );
317 $n = $ns . ':' . $n;
318 }
319
320 $pages[] = $n;
321 }
322 return $pages;
323 }
324
325 private function getPagesFromNamespace( $nsindex ) {
326 global $wgContLang;
327
328 $dbr = wfGetDB( DB_SLAVE );
329 $res = $dbr->select(
330 'page',
331 array( 'page_namespace', 'page_title' ),
332 array( 'page_namespace' => $nsindex ),
333 __METHOD__,
334 array( 'LIMIT' => '5000' )
335 );
336
337 $pages = array();
338
339 while ( $row = $dbr->fetchObject( $res ) ) {
340 $n = $row->page_title;
341
342 if ( $row->page_namespace ) {
343 $ns = $wgContLang->getNsText( $row->page_namespace );
344 $n = $ns . ':' . $n;
345 }
346
347 $pages[] = $n;
348 }
349 return $pages;
350 }
351
352 /**
353 * Expand a list of pages to include templates used in those pages.
354 * @param $inputPages array, list of titles to look up
355 * @param $pageSet array, associative array indexed by titles for output
356 * @return array associative array index by titles
357 */
358 private function getTemplates( $inputPages, $pageSet ) {
359 return $this->getLinks( $inputPages, $pageSet,
360 'templatelinks',
361 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
362 array( 'page_id=tl_from' )
363 );
364 }
365
366 /**
367 * Validate link depth setting, if available.
368 */
369 private function validateLinkDepth( $depth ) {
370 global $wgExportMaxLinkDepth;
371
372 if( $depth < 0 ) {
373 return 0;
374 }
375
376 if ( !$this->userCanOverrideExportDepth() ) {
377 if( $depth > $wgExportMaxLinkDepth ) {
378 return $wgExportMaxLinkDepth;
379 }
380 }
381
382 /*
383 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
384 * crazy-big export from being done by someone setting the depth
385 * number too high. In other words, last resort safety net.
386 */
387 return intval( min( $depth, 5 ) );
388 }
389
390 /** Expand a list of pages to include pages linked to from that page. */
391 private function getPageLinks( $inputPages, $pageSet, $depth ) {
392 for(; $depth > 0; --$depth ) {
393 $pageSet = $this->getLinks(
394 $inputPages, $pageSet, 'pagelinks',
395 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
396 array( 'page_id=pl_from' )
397 );
398 $inputPages = array_keys( $pageSet );
399 }
400
401 return $pageSet;
402 }
403
404 /**
405 * Expand a list of pages to include images used in those pages.
406 *
407 * @param $inputPages array, list of titles to look up
408 * @param $pageSet array, associative array indexed by titles for output
409 *
410 * @return array associative array index by titles
411 */
412 private function getImages( $inputPages, $pageSet ) {
413 return $this->getLinks(
414 $inputPages,
415 $pageSet,
416 'imagelinks',
417 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
418 array( 'page_id=il_from' )
419 );
420 }
421
422 /**
423 * Expand a list of pages to include items used in those pages.
424 */
425 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
426 $dbr = wfGetDB( DB_SLAVE );
427
428 foreach( $inputPages as $page ) {
429 $title = Title::newFromText( $page );
430
431 if( $title ) {
432 $pageSet[$title->getPrefixedText()] = true;
433 /// @todo Fixme: May or may not be more efficient to batch these
434 /// by namespace when given multiple input pages.
435 $result = $dbr->select(
436 array( 'page', $table ),
437 $fields,
438 array_merge(
439 $join,
440 array(
441 'page_namespace' => $title->getNamespace(),
442 'page_title' => $title->getDBkey()
443 )
444 ),
445 __METHOD__
446 );
447
448 foreach( $result as $row ) {
449 $template = Title::makeTitle( $row->namespace, $row->title );
450 $pageSet[$template->getPrefixedText()] = true;
451 }
452 }
453 }
454
455 return $pageSet;
456 }
457
458 }