We have User::isAllowed() for that :)
[lhc/web/wiklou.git] / includes / specials / SpecialExport.php
1 <?php
2 # Copyright (C) 2003-2008 Brion Vibber <brion@pobox.com>
3 # http://www.mediawiki.org/
4 #
5 # This program is free software; you can redistribute it and/or modify
6 # it under the terms of the GNU General Public License as published by
7 # the Free Software Foundation; either version 2 of the License, or
8 # (at your option) any later version.
9 #
10 # This program is distributed in the hope that it will be useful,
11 # but WITHOUT ANY WARRANTY; without even the implied warranty of
12 # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
13 # GNU General Public License for more details.
14 #
15 # You should have received a copy of the GNU General Public License along
16 # with this program; if not, write to the Free Software Foundation, Inc.,
17 # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
18 # http://www.gnu.org/copyleft/gpl.html
19 /**
20 * @file
21 * @ingroup SpecialPage
22 */
23
24 class SpecialExport extends SpecialPage {
25
26 private $curonly, $doExport, $pageLinkDepth, $templates;
27 private $images;
28
29 public function __construct() {
30 parent::__construct( 'Export' );
31 }
32
33 public function execute( $par ) {
34 global $wgOut, $wgRequest, $wgSitename, $wgExportAllowListContributors;
35 global $wgExportAllowHistory, $wgExportMaxHistory, $wgExportMaxLinkDepth;
36
37 $this->setHeaders();
38 $this->outputHeader();
39
40 // Set some variables
41 $this->curonly = true;
42 $this->doExport = false;
43 $this->templates = $wgRequest->getCheck( 'templates' );
44 $this->images = $wgRequest->getCheck( 'images' ); // Doesn't do anything yet
45 $this->pageLinkDepth = $this->validateLinkDepth(
46 $wgRequest->getIntOrNull( 'pagelink-depth' ) );
47
48 if ( $wgRequest->getCheck( 'addcat' ) ) {
49 $page = $wgRequest->getText( 'pages' );
50 $catname = $wgRequest->getText( 'catname' );
51
52 if ( $catname !== '' && $catname !== NULL && $catname !== false ) {
53 $t = Title::makeTitleSafe( NS_MAIN, $catname );
54 if ( $t ) {
55 /**
56 * @fixme This can lead to hitting memory limit for very large
57 * categories. Ideally we would do the lookup synchronously
58 * during the export in a single query.
59 */
60 $catpages = $this->getPagesFromCategory( $t );
61 if ( $catpages ) $page .= "\n" . implode( "\n", $catpages );
62 }
63 }
64 }
65 else if( $wgRequest->getCheck( 'addns' ) ) {
66 $page = $wgRequest->getText( 'pages' );
67 $nsindex = $wgRequest->getText( 'nsindex' );
68
69 if ( $nsindex !== '' && $nsindex !== NULL && $nsindex !== false ) {
70 /**
71 * Same implementation as above, so same @fixme
72 */
73 $nspages = $this->getPagesFromNamespace( $nsindex );
74 if ( $nspages ) $page .= "\n" . implode( "\n", $nspages );
75 }
76 }
77 else if( $wgRequest->wasPosted() && $par == '' ) {
78 $page = $wgRequest->getText( 'pages' );
79 $this->curonly = $wgRequest->getCheck( 'curonly' );
80 $rawOffset = $wgRequest->getVal( 'offset' );
81 if( $rawOffset ) {
82 $offset = wfTimestamp( TS_MW, $rawOffset );
83 } else {
84 $offset = null;
85 }
86 $limit = $wgRequest->getInt( 'limit' );
87 $dir = $wgRequest->getVal( 'dir' );
88 $history = array(
89 'dir' => 'asc',
90 'offset' => false,
91 'limit' => $wgExportMaxHistory,
92 );
93 $historyCheck = $wgRequest->getCheck( 'history' );
94 if ( $this->curonly ) {
95 $history = WikiExporter::CURRENT;
96 } elseif ( !$historyCheck ) {
97 if ( $limit > 0 && $limit < $wgExportMaxHistory ) {
98 $history['limit'] = $limit;
99 }
100 if ( !is_null( $offset ) ) {
101 $history['offset'] = $offset;
102 }
103 if ( strtolower( $dir ) == 'desc' ) {
104 $history['dir'] = 'desc';
105 }
106 }
107
108 if( $page != '' ) $this->doExport = true;
109 } else {
110 // Default to current-only for GET requests
111 $page = $wgRequest->getText( 'pages', $par );
112 $historyCheck = $wgRequest->getCheck( 'history' );
113 if( $historyCheck ) {
114 $history = WikiExporter::FULL;
115 } else {
116 $history = WikiExporter::CURRENT;
117 }
118
119 if( $page != '' ) $this->doExport = true;
120 }
121
122 if( !$wgExportAllowHistory ) {
123 // Override
124 $history = WikiExporter::CURRENT;
125 }
126
127 $list_authors = $wgRequest->getCheck( 'listauthors' );
128 if ( !$this->curonly || !$wgExportAllowListContributors ) $list_authors = false ;
129
130 if ( $this->doExport ) {
131 $wgOut->disable();
132 // Cancel output buffering and gzipping if set
133 // This should provide safer streaming for pages with history
134 wfResetOutputBuffers();
135 header( "Content-type: application/xml; charset=utf-8" );
136 if( $wgRequest->getCheck( 'wpDownload' ) ) {
137 // Provide a sane filename suggestion
138 $filename = urlencode( $wgSitename . '-' . wfTimestampNow() . '.xml' );
139 $wgRequest->response()->header( "Content-disposition: attachment;filename={$filename}" );
140 }
141 $this->doExport( $page, $history, $list_authors );
142 return;
143 }
144
145 $wgOut->addWikiMsg( 'exporttext' );
146
147 $form = Xml::openElement( 'form', array( 'method' => 'post',
148 'action' => $this->getTitle()->getLocalUrl( 'action=submit' ) ) );
149 $form .= Xml::inputLabel( wfMsg( 'export-addcattext' ) , 'catname', 'catname', 40 ) . '&nbsp;';
150 $form .= Xml::submitButton( wfMsg( 'export-addcat' ), array( 'name' => 'addcat' ) ) . '<br />';
151
152 $form .= Xml::namespaceSelector( '', null, 'nsindex', wfMsg( 'export-addnstext' ) ) . '&nbsp;';
153 $form .= Xml::submitButton( wfMsg( 'export-addns' ), array( 'name' => 'addns' ) ) . '<br />';
154
155 $form .= Xml::element( 'textarea', array( 'name' => 'pages', 'cols' => 40, 'rows' => 10 ), $page, false );
156 $form .= '<br />';
157
158 if( $wgExportAllowHistory ) {
159 $form .= Xml::checkLabel( wfMsg( 'exportcuronly' ), 'curonly', 'curonly', true ) . '<br />';
160 } else {
161 $wgOut->addHTML( wfMsgExt( 'exportnohistory', 'parse' ) );
162 }
163 $form .= Xml::checkLabel( wfMsg( 'export-templates' ), 'templates', 'wpExportTemplates', false ) . '<br />';
164 if( $wgExportMaxLinkDepth || $this->userCanOverrideExportDepth() ) {
165 $form .= Xml::inputLabel( wfMsg( 'export-pagelinks' ), 'pagelink-depth', 'pagelink-depth', 20, 0 ) . '<br />';
166 }
167 // Enable this when we can do something useful exporting/importing image information. :)
168 //$form .= Xml::checkLabel( wfMsg( 'export-images' ), 'images', 'wpExportImages', false ) . '<br />';
169 $form .= Xml::checkLabel( wfMsg( 'export-download' ), 'wpDownload', 'wpDownload', true ) . '<br />';
170
171 $form .= Xml::submitButton( wfMsg( 'export-submit' ), array( 'accesskey' => 's' ) );
172 $form .= Xml::closeElement( 'form' );
173 $wgOut->addHTML( $form );
174 }
175
176 private function userCanOverrideExportDepth() {
177 global $wgUser;
178
179 return $wgUser->isAllowed( 'override-export-depth' );
180 }
181
182 /**
183 * Do the actual page exporting
184 * @param string $page User input on what page(s) to export
185 * @param mixed $history one of the WikiExporter history export constants
186 */
187 private function doExport( $page, $history, $list_authors ) {
188 global $wgExportMaxHistory;
189
190 /* Split up the input and look up linked pages */
191 $inputPages = array_filter( explode( "\n", $page ), array( $this, 'filterPage' ) );
192 $pageSet = array_flip( $inputPages );
193
194 if( $this->templates ) {
195 $pageSet = $this->getTemplates( $inputPages, $pageSet );
196 }
197
198 if( $linkDepth = $this->pageLinkDepth ) {
199 $pageSet = $this->getPageLinks( $inputPages, $pageSet, $linkDepth );
200 }
201
202 /*
203 // Enable this when we can do something useful exporting/importing image information. :)
204 if( $this->images ) ) {
205 $pageSet = $this->getImages( $inputPages, $pageSet );
206 }
207 */
208
209 $pages = array_keys( $pageSet );
210
211 /* Ok, let's get to it... */
212 if( $history == WikiExporter::CURRENT ) {
213 $lb = false;
214 $db = wfGetDB( DB_SLAVE );
215 $buffer = WikiExporter::BUFFER;
216 } else {
217 // Use an unbuffered query; histories may be very long!
218 $lb = wfGetLBFactory()->newMainLB();
219 $db = $lb->getConnection( DB_SLAVE );
220 $buffer = WikiExporter::STREAM;
221
222 // This might take a while... :D
223 wfSuppressWarnings();
224 set_time_limit(0);
225 wfRestoreWarnings();
226 }
227 $exporter = new WikiExporter( $db, $history, $buffer );
228 $exporter->list_authors = $list_authors;
229 $exporter->openStream();
230 foreach( $pages as $page ) {
231 /*
232 if( $wgExportMaxHistory && !$this->curonly ) {
233 $title = Title::newFromText( $page );
234 if( $title ) {
235 $count = Revision::countByTitle( $db, $title );
236 if( $count > $wgExportMaxHistory ) {
237 wfDebug( __FUNCTION__ .
238 ": Skipped $page, $count revisions too big\n" );
239 continue;
240 }
241 }
242 }*/
243 #Bug 8824: Only export pages the user can read
244 $title = Title::newFromText( $page );
245 if( is_null( $title ) ) continue; #TODO: perhaps output an <error> tag or something.
246 if( !$title->userCanRead() ) continue; #TODO: perhaps output an <error> tag or something.
247
248 $exporter->pageByTitle( $title );
249 }
250
251 $exporter->closeStream();
252 if( $lb ) {
253 $lb->closeAll();
254 }
255 }
256
257
258 private function getPagesFromCategory( $title ) {
259 global $wgContLang;
260
261 $name = $title->getDBkey();
262
263 $dbr = wfGetDB( DB_SLAVE );
264 $res = $dbr->select( array('page', 'categorylinks' ),
265 array( 'page_namespace', 'page_title' ),
266 array('cl_from=page_id', 'cl_to' => $name ),
267 __METHOD__, array('LIMIT' => '5000'));
268
269 $pages = array();
270 while ( $row = $dbr->fetchObject( $res ) ) {
271 $n = $row->page_title;
272 if ($row->page_namespace) {
273 $ns = $wgContLang->getNsText( $row->page_namespace );
274 $n = $ns . ':' . $n;
275 }
276
277 $pages[] = $n;
278 }
279 $dbr->freeResult($res);
280
281 return $pages;
282 }
283
284 private function getPagesFromNamespace( $nsindex ) {
285 global $wgContLang;
286
287 $dbr = wfGetDB( DB_SLAVE );
288 $res = $dbr->select( 'page', array('page_namespace', 'page_title'),
289 array('page_namespace' => $nsindex),
290 __METHOD__, array('LIMIT' => '5000') );
291
292 $pages = array();
293 while ( $row = $dbr->fetchObject( $res ) ) {
294 $n = $row->page_title;
295 if ($row->page_namespace) {
296 $ns = $wgContLang->getNsText( $row->page_namespace );
297 $n = $ns . ':' . $n;
298 }
299
300 $pages[] = $n;
301 }
302 $dbr->freeResult($res);
303
304 return $pages;
305 }
306 /**
307 * Expand a list of pages to include templates used in those pages.
308 * @param $inputPages array, list of titles to look up
309 * @param $pageSet array, associative array indexed by titles for output
310 * @return array associative array index by titles
311 */
312 private function getTemplates( $inputPages, $pageSet ) {
313 return $this->getLinks( $inputPages, $pageSet,
314 'templatelinks',
315 array( 'tl_namespace AS namespace', 'tl_title AS title' ),
316 array( 'page_id=tl_from' ) );
317 }
318
319 /**
320 * Validate link depth setting, if available.
321 */
322 private function validateLinkDepth( $depth ) {
323 global $wgExportMaxLinkDepth, $wgExportMaxLinkDepthLimit;
324 if( $depth < 0 ) {
325 return 0;
326 }
327 if ( !$this->userCanOverrideExportDepth() ) {
328 if( $depth > $wgExportMaxLinkDepth ) {
329 return $wgExportMaxLinkDepth;
330 }
331 }
332 /*
333 * There's a HARD CODED limit of 5 levels of recursion here to prevent a
334 * crazy-big export from being done by someone setting the depth
335 * number too high. In other words, last resort safety net.
336 */
337 return intval( min( $depth, 5 ) );
338 }
339
340 /** Expand a list of pages to include pages linked to from that page. */
341 private function getPageLinks( $inputPages, $pageSet, $depth ) {
342 for( $depth=$depth; $depth>0; --$depth ) {
343 $pageSet = $this->getLinks( $inputPages, $pageSet, 'pagelinks',
344 array( 'pl_namespace AS namespace', 'pl_title AS title' ),
345 array( 'page_id=pl_from' ) );
346 }
347 return $pageSet;
348 }
349
350 /**
351 * Expand a list of pages to include images used in those pages.
352 * @param $inputPages array, list of titles to look up
353 * @param $pageSet array, associative array indexed by titles for output
354 * @return array associative array index by titles
355 */
356 private function getImages( $inputPages, $pageSet ) {
357 return $this->getLinks( $inputPages, $pageSet,
358 'imagelinks',
359 array( NS_FILE . ' AS namespace', 'il_to AS title' ),
360 array( 'page_id=il_from' ) );
361 }
362
363 /**
364 * Expand a list of pages to include items used in those pages.
365 * @private
366 */
367 private function getLinks( $inputPages, $pageSet, $table, $fields, $join ) {
368 $dbr = wfGetDB( DB_SLAVE );
369 foreach( $inputPages as $page ) {
370 $title = Title::newFromText( $page );
371 if( $title ) {
372 $pageSet[$title->getPrefixedText()] = true;
373 /// @fixme May or may not be more efficient to batch these
374 /// by namespace when given multiple input pages.
375 $result = $dbr->select(
376 array( 'page', $table ),
377 $fields,
378 array_merge( $join,
379 array(
380 'page_namespace' => $title->getNamespace(),
381 'page_title' => $title->getDBKey() ) ),
382 __METHOD__ );
383 foreach( $result as $row ) {
384 $template = Title::makeTitle( $row->namespace, $row->title );
385 $pageSet[$template->getPrefixedText()] = true;
386 }
387 }
388 }
389 return $pageSet;
390 }
391
392 /**
393 * Callback function to remove empty strings from the pages array.
394 */
395 private function filterPage( $page ) {
396 return $page !== '' && $page !== null;
397 }
398 }
399