Merge "Http::getProxy() method to get proxy configuration"
[lhc/web/wiklou.git] / includes / specials / SpecialListDuplicatedFiles.php
1 <?php
2 /**
3 * Implements Special:ListDuplicatedFiles
4 *
5 * Copyright © 2013 Brian Wolff
6 *
7 * This program is free software; you can redistribute it and/or modify
8 * it under the terms of the GNU General Public License as published by
9 * the Free Software Foundation; either version 2 of the License, or
10 * (at your option) any later version.
11 *
12 * This program is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 * GNU General Public License for more details.
16 *
17 * You should have received a copy of the GNU General Public License along
18 * with this program; if not, write to the Free Software Foundation, Inc.,
19 * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
20 * http://www.gnu.org/copyleft/gpl.html
21 *
22 * @file
23 * @ingroup SpecialPage
24 * @author Brian Wolff
25 */
26
27 /**
28 * Special:ListDuplicatedFiles Lists all files where the current version is
29 * a duplicate of the current version of some other file.
30 * @ingroup SpecialPage
31 */
32 class ListDuplicatedFilesPage extends QueryPage {
33 function __construct( $name = 'ListDuplicatedFiles' ) {
34 parent::__construct( $name );
35 }
36
37 public function isExpensive() {
38 return true;
39 }
40
41 function isSyndicated() {
42 return false;
43 }
44
45 /**
46 * Get all the duplicates by grouping on sha1s.
47 *
48 * A cheaper (but less useful) version of this
49 * query would be to not care how many duplicates a
50 * particular file has, and do a self-join on image table.
51 * However this version should be no more expensive then
52 * Special:MostLinked, which seems to get handled fine
53 * with however we are doing cached special pages.
54 * @return array
55 */
56 public function getQueryInfo() {
57 return [
58 'tables' => [ 'image' ],
59 'fields' => [
60 'namespace' => NS_FILE,
61 'title' => 'MIN(img_name)',
62 'value' => 'count(*)'
63 ],
64 'options' => [
65 'GROUP BY' => 'img_sha1',
66 'HAVING' => 'count(*) > 1',
67 ],
68 ];
69 }
70
71 /**
72 * Pre-fill the link cache
73 *
74 * @param IDatabase $db
75 * @param ResultWrapper $res
76 */
77 function preprocessResults( $db, $res ) {
78 if ( $res->numRows() > 0 ) {
79 $linkBatch = new LinkBatch();
80
81 foreach ( $res as $row ) {
82 $linkBatch->add( $row->namespace, $row->title );
83 }
84
85 $res->seek( 0 );
86 $linkBatch->execute();
87 }
88 }
89
90 /**
91 * @param Skin $skin
92 * @param object $result Result row
93 * @return string
94 */
95 function formatResult( $skin, $result ) {
96 // Future version might include a list of the first 5 duplicates
97 // perhaps separated by an "↔".
98 $image1 = Title::makeTitle( $result->namespace, $result->title );
99 $dupeSearch = SpecialPage::getTitleFor( 'FileDuplicateSearch', $image1->getDBkey() );
100
101 $msg = $this->msg( 'listduplicatedfiles-entry' )
102 ->params( $image1->getText() )
103 ->numParams( $result->value - 1 )
104 ->params( $dupeSearch->getPrefixedDBkey() );
105
106 return $msg->parse();
107 }
108
109 protected function getGroupName() {
110 return 'media';
111 }
112 }