Merge "Add a test for mixed /*@noflip*/ and /*@embed*/ CSS annotations"
[lhc/web/wiklou.git] / includes / specials / SpecialListDuplicatedFiles.php
1 <?php
2 /**
3 * Implements Special:ListDuplicatedFiles
4 *
5 * Copyright © 2013 Brian Wolff
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
21 *
22 * @file
23 * @ingroup SpecialPage
24 * @author Brian Wolff
25 */
26
27 /**
28 * Special:ListDuplicatedFiles Lists all files where the current version is
29 * a duplicate of the current version of some other file.
30 * @ingroup SpecialPage
31 */
32 class ListDuplicatedFilesPage extends QueryPage {
33 function __construct( $name = 'ListDuplicatedFiles' ) {
34 parent::__construct( $name );
35 }
36
37 function isExpensive() {
38 return true;
39 }
40
41 function isSyndicated() {
42 return false;
43 }
44
45 /**
46 * Get all the duplicates by grouping on sha1s.
47 *
48 * A cheaper (but less useful) version of this
49 * query would be to not care how many duplicates a
50 * particular file has, and do a self-join on image table.
51 * However this version should be no more expensive then
52 * Special:MostLinked, which seems to get handled fine
53 * with however we are doing cached special pages.
54 * @return array
55 */
56 function getQueryInfo() {
57 return array(
58 'tables' => array( 'image' ),
59 'fields' => array(
60 'namespace' => NS_FILE,
61 'title' => 'MIN(img_name)',
62 'value' => 'count(*)'
63 ),
64 'options' => array(
65 'GROUP BY' => 'img_sha1',
66 'HAVING' => 'count(*) > 1',
67 ),
68 );
69 }
70
71 /**
72 * Pre-fill the link cache
73 *
74 * @param DatabaseBase $db
75 * @param ResultWrapper $res
76 */
77 function preprocessResults( $db, $res ) {
78 if ( $res->numRows() > 0 ) {
79 $linkBatch = new LinkBatch();
80
81 foreach ( $res as $row ) {
82 $linkBatch->add( $row->namespace, $row->title );
83 }
84
85 $res->seek( 0 );
86 $linkBatch->execute();
87 }
88 }
89
90
91 /**
92 * @param Skin $skin
93 * @param object $result Result row
94 * @return string
95 */
96 function formatResult( $skin, $result ) {
97 // Future version might include a list of the first 5 duplicates
98 // perhaps separated by an "↔".
99 $image1 = Title::makeTitle( $result->namespace, $result->title );
100 $dupeSearch = SpecialPage::getTitleFor( 'FileDuplicateSearch', $image1->getDBKey() );
101
102 $msg = $this->msg( 'listduplicatedfiles-entry' )
103 ->params( $image1->getText() )
104 ->numParams( $result->value - 1 )
105 ->params( $dupeSearch->getPrefixedDBKey() );
106
107 return $msg->parse();
108 }
109
110 protected function getGroupName() {
111 return 'media';
112 }
113 }