Rewrite output to create Special:Export format for import
[lhc/web/wiklou.git] / maintenance / importUseModWiki.php
1 <?php
2
3 /**
4 * Import data from a UseModWiki into a MediaWiki wiki
5 * 2003-02-09 Brion VIBBER <brion@pobox.com>
6 * Based loosely on Magnus's code from 2001-2002
7 *
8 * Updated limited version to get something working temporarily
9 * 2003-10-09
10 * Be sure to run the link & index rebuilding scripts!
11 *
12 * Some more munging for charsets etc
13 * 2003-11-28
14 *
15 * Partial fix for pages starting with lowercase letters (??)
16 * and CamelCase and /Subpage link conversion
17 * 2004-11-17
18 *
19 * Rewrite output to create Special:Export format for import
20 * instead of raw SQL. Should be 'future-proof' against future
21 * schema changes.
22 * 2005-03-14
23 *
24 * @todo document
25 * @package MediaWiki
26 * @subpackage Maintenance
27 */
28
29 if( php_sapi_name() != 'cli' ) {
30 die( "Please customize the settings and run me from the command line." );
31 }
32
33 /** Set these correctly! */
34 $wgImportEncoding = "CP1252"; /* We convert all to UTF-8 */
35 $wgRootDirectory = "/kalman/Projects/wiki2002/wiki/lib-http/db/wiki";
36
37 /* On a large wiki, you might run out of memory */
38 @ini_set( 'memory_limit', '40M' );
39
40 /* globals */
41 $wgFieldSeparator = "\xb3"; # Some wikis may use different char
42 $FS = $wgFieldSeparator ;
43 $FS1 = $FS."1" ;
44 $FS2 = $FS."2" ;
45 $FS3 = $FS."3" ;
46
47 # Unicode sanitization tools
48 require_once( '../includes/normal/UtfNormal.php' );
49
50 $usercache = array();
51
52 importPages();
53
54 # ------------------------------------------------------------------------------
55
56 function importPages()
57 {
58 global $wgRootDirectory;
59
60 $gt = '>';
61 echo <<<END
62 <?xml version="1.0" encoding="UTF-8" ?$gt
63 <mediawiki xmlns="http://www.mediawiki.org/xml/export-0.1/"
64 xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
65 xsi:schemaLocation="http://www.mediawiki.org/xml/export-0.1/
66 http://www.mediawiki.org/xml/export-0.1.xsd"
67 version="0.1">
68 <!-- generated by importUseModWiki.php -->
69
70 END;
71 $letters = array(
72 'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I',
73 'J', 'K', 'L', 'M', 'N', 'O', 'P', 'Q', 'R',
74 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'other' );
75 foreach( $letters as $letter ) {
76 $dir = "$wgRootDirectory/page/$letter";
77 if( is_dir( $dir ) )
78 importPageDirectory( $dir );
79 }
80 echo <<<END
81 </mediawiki>
82
83 END;
84 }
85
86 function importPageDirectory( $dir, $prefix = "" )
87 {
88 echo "\n<!-- Checking page directory $dir -->\n";
89 $mydir = opendir( $dir );
90 while( $entry = readdir( $mydir ) ) {
91 if( preg_match( '/^(.+)\.db$/', $entry, $m ) ) {
92 echo importPage( $prefix . $m[1] );
93 } else {
94 if( is_dir( "$dir/$entry" ) ) {
95 if( $entry != '.' && $entry != '..' ) {
96 importPageDirectory( "$dir/$entry", "$entry/" );
97 }
98 } else {
99 echo "<!-- File '" . xmlCommentSafe( $entry ) . "' doesn't seem to contain an article. Skipping. -->\n";
100 }
101 }
102 }
103 }
104
105
106 # ------------------------------------------------------------------------------
107
108 /* fetch_ functions
109 Grab a given item from the database
110 */
111
112 function useModFilename( $title ) {
113 $c = substr( $title, 0, 1 );
114 if(preg_match( '/[A-Z]/i', $c ) ) {
115 return strtoupper( $c ) . "/$title";
116 }
117 return "other/$title";
118 }
119
120 function fetchPage( $title )
121 {
122 global $FS,$FS1,$FS2,$FS3, $wgRootDirectory;
123
124 $fname = $wgRootDirectory . "/page/" . useModFilename( $title ) . ".db";
125 if( !file_exists( $fname ) ) {
126 die( "Couldn't open file '$fname' for page '$title'.\n" );
127 }
128
129 $page = splitHash( $FS1, file_get_contents( $fname ) );
130 $section = splitHash( $FS2, $page["text_default"] );
131 $text = splitHash( $FS3, $section["data"] );
132
133 return array2object( array( "text" => $text["text"] , "summary" => $text["summary"] ,
134 "minor" => $text["minor"] , "ts" => $section["ts"] ,
135 "username" => $section["username"] , "host" => $section["host"] ) );
136 }
137
138 function fetchKeptPages( $title )
139 {
140 global $FS,$FS1,$FS2,$FS3, $wgRootDirectory, $wgTimezoneCorrection;
141
142 $fname = $wgRootDirectory . "/keep/" . useModFilename( $title ) . ".kp";
143 if( !file_exists( $fname ) ) return array();
144
145 $keptlist = explode( $FS1, file_get_contents( $fname ) );
146 array_shift( $keptlist ); # Drop the junk at beginning of file
147
148 $revisions = array();
149 foreach( $keptlist as $rev ) {
150 $section = splitHash( $FS2, $rev );
151 $text = splitHash( $FS3, $section["data"] );
152 if ( $text["text"] && $text["minor"] != "" && ( $section["ts"]*1 > 0 ) ) {
153 array_push( $revisions, array2object( array ( "text" => $text["text"] , "summary" => $text["summary"] ,
154 "minor" => $text["minor"] , "ts" => $section["ts"] ,
155 "username" => $section["username"] , "host" => $section["host"] ) ) );
156 } else {
157 echo "-- skipped a bad old revision\n";
158 }
159 }
160 return $revisions;
161 }
162
163 function splitHash ( $sep , $str ) {
164 $temp = explode ( $sep , $str ) ;
165 $ret = array () ;
166 for ( $i = 0; $i+1 < count ( $temp ) ; $i++ ) {
167 $ret[$temp[$i]] = $temp[++$i] ;
168 }
169 return $ret ;
170 }
171
172
173 /* import_ functions
174 Take a fetched item and produce SQL
175 */
176
177 function checkUserCache( $name, $host )
178 {
179 global $usercache;
180
181 if( $name ) {
182 if( in_array( $name, $usercache ) ) {
183 $userid = $usercache[$name];
184 } else {
185 # If we haven't imported user accounts
186 $userid = 0;
187 }
188 $username = str_replace( '_', ' ', $name );
189 } else {
190 $userid = 0;
191 $username = $host;
192 }
193 return array( $userid, $username );
194 }
195
196 function importPage( $title )
197 {
198 global $usercache;
199
200 echo "\n<!-- Importing page " . xmlCommentSafe( $title ) . " -->\n";
201 $page = fetchPage( $title );
202
203 $newtitle = xmlsafe( str_replace( '_', ' ', recodeText( $title ) ) );
204
205 $munged = mungeFormat( $page->text );
206 if( $munged != $page->text ) {
207 /**
208 * Save a *new* revision with the conversion, and put the
209 * previous last version into the history.
210 */
211 $next = array2object( array(
212 'text' => $munged,
213 'minor' => 1,
214 'username' => 'Conversion script',
215 'host' => '127.0.0.1',
216 'ts' => time(),
217 'summary' => 'link fix',
218 ) );
219 $revisions = array( $page, $next );
220 } else {
221 /**
222 * Current revision:
223 */
224 $revisions = array( $page );
225 }
226 $xml = <<<END
227 <page>
228 <title>$newtitle</title>
229
230 END;
231
232 # History
233 $revisions = array_merge( $revisions, fetchKeptPages( $title ) );
234 if(count( $revisions ) == 0 ) {
235 return $sql;
236 }
237
238 foreach( $revisions as $rev ) {
239 $text = xmlsafe( recodeText( $rev->text ) );
240 $minor = ($rev->minor ? '<minor/>' : '');
241 list( $userid, $username ) = checkUserCache( $rev->username, $rev->host );
242 $username = xmlsafe( recodeText( $username ) );
243 $timestamp = xmlsafe( timestamp2ISO8601( $rev->ts ) );
244 $comment = xmlsafe( recodeText( $rev->summary ) );
245
246 $xml .= <<<END
247 <revision>
248 <timestamp>$timestamp</timestamp>
249 <contributor><username>$username</username></contributor>
250 <comment>$comment</comment>
251 $minor
252 <text>$text</text>
253 </revision>
254
255 END;
256 }
257 $xml .= "</page>\n\n";
258 return $xml;
259 }
260
261 # Whee!
262 function recodeText( $string ) {
263 global $wgImportEncoding;
264 # For currently latin-1 wikis
265 $string = str_replace( "\r\n", "\n", $string );
266 $string = @iconv( $wgImportEncoding, "UTF-8", $string );
267 $string = wfMungeToUtf8( $string ); # Any old &#1234; stuff
268 return $string;
269 }
270
271 function wfUtf8Sequence($codepoint) {
272 if($codepoint < 0x80) return chr($codepoint);
273 if($codepoint < 0x800) return chr($codepoint >> 6 & 0x3f | 0xc0) .
274 chr($codepoint & 0x3f | 0x80);
275 if($codepoint < 0x10000) return chr($codepoint >> 12 & 0x0f | 0xe0) .
276 chr($codepoint >> 6 & 0x3f | 0x80) .
277 chr($codepoint & 0x3f | 0x80);
278 if($codepoint < 0x100000) return chr($codepoint >> 18 & 0x07 | 0xf0) . # Double-check this
279 chr($codepoint >> 12 & 0x3f | 0x80) .
280 chr($codepoint >> 6 & 0x3f | 0x80) .
281 chr($codepoint & 0x3f | 0x80);
282 # Doesn't yet handle outside the BMP
283 return "&#$codepoint;";
284 }
285
286 function wfMungeToUtf8($string) {
287 $string = preg_replace ( '/&#([0-9]+);/e', 'wfUtf8Sequence($1)', $string );
288 $string = preg_replace ( '/&#x([0-9a-f]+);/ie', 'wfUtf8Sequence(0x$1)', $string );
289 # Should also do named entities here
290 return $string;
291 }
292
293 function timestamp2ISO8601( $ts ) {
294 #2003-08-05T18:30:02Z
295 return gmdate( 'Y-m-d', $ts ) . 'T' . gmdate( 'H:i:s', $ts ) . 'Z';
296 }
297
298 function xmlsafe( $string ) {
299 /**
300 * The page may contain old data which has not been properly normalized.
301 * Invalid UTF-8 sequences or forbidden control characters will make our
302 * XML output invalid, so be sure to strip them out.
303 */
304 $string = UtfNormal::cleanUp( $string );
305
306 $string = htmlspecialchars( $string );
307 return $string;
308 }
309
310 function xmlCommentSafe( $text ) {
311 return str_replace( '--', '\\-\\-', xmlsafe( $text ) );
312 }
313
314
315 function array2object( $arr ) {
316 $o = (object)0;
317 foreach( $arr as $x => $y ) {
318 $o->$x = $y;
319 }
320 return $o;
321 }
322
323
324 /**
325 * Make CamelCase and /Talk links work
326 */
327 function mungeFormat( $text ) {
328 global $nowiki;
329 $nowiki = array();
330 $staged = preg_replace_callback(
331 '/(<nowiki>.*?<\\/nowiki>|(?:http|https|ftp):\\S+|\[\[[^]\\n]+]])/s',
332 'nowikiPlaceholder', $text );
333
334 # This is probably not 100% correct, I'm just
335 # glancing at the UseModWiki code.
336 $upper = "[A-Z]";
337 $lower = "[a-z_0-9]";
338 $any = "[A-Za-z_0-9]";
339 $camel = "(?:$upper+$lower+$upper+$any*)";
340 $subpage = "(?:\\/$any+)";
341 $substart = "(?:\\/$upper$any*)";
342
343 $munged = preg_replace( "/(?!\\[\\[)($camel$subpage*|$substart$subpage*)\\b(?!\\]\\]|>)/",
344 '[[$1]]', $staged );
345
346 $final = preg_replace( '/' . preg_quote( placeholder() ) . '/es',
347 'array_shift( $nowiki )', $munged );
348 return $final;
349 }
350
351
352 function placeholder( $x = null ) {
353 return '\xffplaceholder\xff';
354 }
355
356 function nowikiPlaceholder( $matches ) {
357 global $nowiki;
358 $nowiki[] = $matches[1];
359 return placeholder();
360 }
361
362 ?>