Move FileContentsHasher into includes/utils/
authorKunal Mehta <legoktm@gmail.com>
Fri, 25 Sep 2015 01:27:28 +0000 (18:27 -0700)
committerKunal Mehta <legoktm@gmail.com>
Fri, 25 Sep 2015 01:27:28 +0000 (18:27 -0700)
The class only contains two dependencies upon MediaWiki (ObjectCache &
wfGlobalCacheKey) which are suitable for inclusion in the utils
directory.

Change-Id: I85b4c763be2670c40f26d93e75cedcb68eaa7987

autoload.php
includes/FileContentsHasher.php [deleted file]
includes/utils/FileContentsHasher.php [new file with mode: 0644]
tests/phpunit/includes/FileContentsHasherTest.php [deleted file]
tests/phpunit/includes/utils/FileContentsHasherTest.php [new file with mode: 0644]

index 9cb5b43..f1b0a6c 100644 (file)
@@ -430,7 +430,7 @@ $wgAutoloadLocalClasses = array(
        'FileBackendStoreShardListIterator' => __DIR__ . '/includes/filebackend/FileBackendStore.php',
        'FileBasedSiteLookup' => __DIR__ . '/includes/site/FileBasedSiteLookup.php',
        'FileCacheBase' => __DIR__ . '/includes/cache/FileCacheBase.php',
-       'FileContentsHasher' => __DIR__ . '/includes/FileContentsHasher.php',
+       'FileContentsHasher' => __DIR__ . '/includes/utils/FileContentsHasher.php',
        'FileDeleteForm' => __DIR__ . '/includes/FileDeleteForm.php',
        'FileDependency' => __DIR__ . '/includes/cache/CacheDependency.php',
        'FileDuplicateSearchPage' => __DIR__ . '/includes/specials/SpecialFileDuplicateSearch.php',
diff --git a/includes/FileContentsHasher.php b/includes/FileContentsHasher.php
deleted file mode 100644 (file)
index 67eb9d2..0000000
+++ /dev/null
@@ -1,111 +0,0 @@
-<?php
-/**
- * Generate hash digests of file contents to help with cache invalidation.
- *
- * This program is free software; you can redistribute it and/or modify
- * it under the terms of the GNU General Public License as published by
- * the Free Software Foundation; either version 2 of the License, or
- * (at your option) any later version.
- *
- * This program is distributed in the hope that it will be useful,
- * but WITHOUT ANY WARRANTY; without even the implied warranty of
- * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
- * GNU General Public License for more details.
- *
- * You should have received a copy of the GNU General Public License along
- * with this program; if not, write to the Free Software Foundation, Inc.,
- * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
- * http://www.gnu.org/copyleft/gpl.html
- *
- * @file
- */
-class FileContentsHasher {
-
-       /** @var BagOStuff */
-       protected $cache;
-
-       /** @var FileContentsHasher */
-       private static $instance;
-
-       /**
-        * Constructor.
-        */
-       public function __construct() {
-               $this->cache = ObjectCache::newAccelerator( 'hash' );
-       }
-
-       /**
-        * Get the singleton instance of this class.
-        *
-        * @return FileContentsHasher
-        */
-       public static function singleton() {
-               if ( !self::$instance ) {
-                       self::$instance = new self;
-               }
-
-               return self::$instance;
-       }
-
-       /**
-        * Get a hash of a file's contents, either by retrieving a previously-
-        * computed hash from the cache, or by computing a hash from the file.
-        *
-        * @private
-        * @param string $filePath Full path to the file.
-        * @param string $algo Name of selected hashing algorithm.
-        * @return string|bool Hash of file contents, or false if the file could not be read.
-        */
-       public function getFileContentsHashInternal( $filePath, $algo = 'md4' ) {
-               $mtime = MediaWiki\quietCall( 'filemtime', $filePath );
-               if ( $mtime === false ) {
-                       return false;
-               }
-
-               $cacheKey = wfGlobalCacheKey( __CLASS__, $filePath, $mtime, $algo );
-               $hash = $this->cache->get( $cacheKey );
-
-               if ( $hash ) {
-                       return $hash;
-               }
-
-               $contents = MediaWiki\quietCall( 'file_get_contents', $filePath );
-               if ( $contents === false ) {
-                       return false;
-               }
-
-               $hash = hash( $algo, $contents );
-               $this->cache->set( $cacheKey, $hash, 60 * 60 * 24 );  // 24h
-
-               return $hash;
-       }
-
-       /**
-        * Get a hash of the combined contents of one or more files, either by
-        * retrieving a previously-computed hash from the cache, or by computing
-        * a hash from the files.
-        *
-        * @param string|string[] $filePaths One or more file paths.
-        * @param string $algo Name of selected hashing algorithm.
-        * @return string|bool Hash of files' contents, or false if no file could not be read.
-        */
-       public static function getFileContentsHash( $filePaths, $algo = 'md4' ) {
-               $instance = self::singleton();
-
-               if ( !is_array( $filePaths ) ) {
-                       $filePaths = (array) $filePaths;
-               }
-
-               if ( count( $filePaths ) === 1 ) {
-                       return $instance->getFileContentsHashInternal( $filePaths[0], $algo );
-               }
-
-               sort( $filePaths );
-               $hashes = array_map( function ( $filePath ) use ( $instance, $algo ) {
-                       return $instance->getFileContentsHashInternal( $filePath, $algo ) ?: '';
-               }, $filePaths );
-
-               $hashes = implode( '', $hashes );
-               return $hashes ? hash( $algo, $hashes ) : false;
-       }
-}
diff --git a/includes/utils/FileContentsHasher.php b/includes/utils/FileContentsHasher.php
new file mode 100644 (file)
index 0000000..67eb9d2
--- /dev/null
@@ -0,0 +1,111 @@
+<?php
+/**
+ * Generate hash digests of file contents to help with cache invalidation.
+ *
+ * This program is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU General Public License as published by
+ * the Free Software Foundation; either version 2 of the License, or
+ * (at your option) any later version.
+ *
+ * This program is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU General Public License for more details.
+ *
+ * You should have received a copy of the GNU General Public License along
+ * with this program; if not, write to the Free Software Foundation, Inc.,
+ * 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
+ * http://www.gnu.org/copyleft/gpl.html
+ *
+ * @file
+ */
+class FileContentsHasher {
+
+       /** @var BagOStuff */
+       protected $cache;
+
+       /** @var FileContentsHasher */
+       private static $instance;
+
+       /**
+        * Constructor.
+        */
+       public function __construct() {
+               $this->cache = ObjectCache::newAccelerator( 'hash' );
+       }
+
+       /**
+        * Get the singleton instance of this class.
+        *
+        * @return FileContentsHasher
+        */
+       public static function singleton() {
+               if ( !self::$instance ) {
+                       self::$instance = new self;
+               }
+
+               return self::$instance;
+       }
+
+       /**
+        * Get a hash of a file's contents, either by retrieving a previously-
+        * computed hash from the cache, or by computing a hash from the file.
+        *
+        * @private
+        * @param string $filePath Full path to the file.
+        * @param string $algo Name of selected hashing algorithm.
+        * @return string|bool Hash of file contents, or false if the file could not be read.
+        */
+       public function getFileContentsHashInternal( $filePath, $algo = 'md4' ) {
+               $mtime = MediaWiki\quietCall( 'filemtime', $filePath );
+               if ( $mtime === false ) {
+                       return false;
+               }
+
+               $cacheKey = wfGlobalCacheKey( __CLASS__, $filePath, $mtime, $algo );
+               $hash = $this->cache->get( $cacheKey );
+
+               if ( $hash ) {
+                       return $hash;
+               }
+
+               $contents = MediaWiki\quietCall( 'file_get_contents', $filePath );
+               if ( $contents === false ) {
+                       return false;
+               }
+
+               $hash = hash( $algo, $contents );
+               $this->cache->set( $cacheKey, $hash, 60 * 60 * 24 );  // 24h
+
+               return $hash;
+       }
+
+       /**
+        * Get a hash of the combined contents of one or more files, either by
+        * retrieving a previously-computed hash from the cache, or by computing
+        * a hash from the files.
+        *
+        * @param string|string[] $filePaths One or more file paths.
+        * @param string $algo Name of selected hashing algorithm.
+        * @return string|bool Hash of files' contents, or false if no file could not be read.
+        */
+       public static function getFileContentsHash( $filePaths, $algo = 'md4' ) {
+               $instance = self::singleton();
+
+               if ( !is_array( $filePaths ) ) {
+                       $filePaths = (array) $filePaths;
+               }
+
+               if ( count( $filePaths ) === 1 ) {
+                       return $instance->getFileContentsHashInternal( $filePaths[0], $algo );
+               }
+
+               sort( $filePaths );
+               $hashes = array_map( function ( $filePath ) use ( $instance, $algo ) {
+                       return $instance->getFileContentsHashInternal( $filePath, $algo ) ?: '';
+               }, $filePaths );
+
+               $hashes = implode( '', $hashes );
+               return $hashes ? hash( $algo, $hashes ) : false;
+       }
+}
diff --git a/tests/phpunit/includes/FileContentsHasherTest.php b/tests/phpunit/includes/FileContentsHasherTest.php
deleted file mode 100644 (file)
index eb63649..0000000
+++ /dev/null
@@ -1,55 +0,0 @@
-<?php
-
-/**
- * @covers FileContentsHasherTest
- */
-class FileContentsHasherTest extends MediaWikiTestCase {
-
-       public function provideSingleFile() {
-               return array_map( function ( $file ) {
-                       return array( $file, file_get_contents( $file ) );
-               }, glob( __DIR__ . '/../data/filecontentshasher/*.*' ) );
-       }
-
-       public function provideMultipleFiles() {
-               return array(
-                       array( $this->provideSingleFile() )
-               );
-       }
-
-       /**
-        * @covers FileContentsHasher::getFileContentHash
-        * @covers FileContentsHasher::getFileContentsHashInternal
-        * @dataProvider provideSingleFile
-        */
-       public function testSingleFileHash( $fileName, $contents ) {
-               foreach ( array( 'md4', 'md5' ) as $algo ) {
-                       $expectedHash = hash( $algo, $contents );
-                       $actualHash = FileContentsHasher::getFileContentsHash( $fileName, $algo );
-                       $this->assertEquals( $expectedHash, $actualHash );
-                       $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileName, $algo );
-                       $this->assertEquals( $expectedHash, $actualHashRepeat );
-               }
-       }
-
-       /**
-        * @covers FileContentsHasher::getFileContentHash
-        * @covers FileContentsHasher::getFileContentsHashInternal
-        * @dataProvider provideMultipleFiles
-        */
-       public function testMultipleFileHash( $files ) {
-               $fileNames = array();
-               $hashes = array();
-               foreach ( $files as $fileInfo ) {
-                       list( $fileName, $contents ) = $fileInfo;
-                       $fileNames[] = $fileName;
-                       $hashes[] = md5( $contents );
-               }
-
-               $expectedHash = md5( implode( '', $hashes ) );
-               $actualHash = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' );
-               $this->assertEquals( $expectedHash, $actualHash );
-               $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' );
-               $this->assertEquals( $expectedHash, $actualHashRepeat );
-       }
-}
diff --git a/tests/phpunit/includes/utils/FileContentsHasherTest.php b/tests/phpunit/includes/utils/FileContentsHasherTest.php
new file mode 100644 (file)
index 0000000..a03e1fc
--- /dev/null
@@ -0,0 +1,55 @@
+<?php
+
+/**
+ * @covers FileContentsHasherTest
+ */
+class FileContentsHasherTest extends MediaWikiTestCase {
+
+       public function provideSingleFile() {
+               return array_map( function ( $file ) {
+                       return array( $file, file_get_contents( $file ) );
+               }, glob( __DIR__ . '/../../data/filecontentshasher/*.*' ) );
+       }
+
+       public function provideMultipleFiles() {
+               return array(
+                       array( $this->provideSingleFile() )
+               );
+       }
+
+       /**
+        * @covers FileContentsHasher::getFileContentHash
+        * @covers FileContentsHasher::getFileContentsHashInternal
+        * @dataProvider provideSingleFile
+        */
+       public function testSingleFileHash( $fileName, $contents ) {
+               foreach ( array( 'md4', 'md5' ) as $algo ) {
+                       $expectedHash = hash( $algo, $contents );
+                       $actualHash = FileContentsHasher::getFileContentsHash( $fileName, $algo );
+                       $this->assertEquals( $expectedHash, $actualHash );
+                       $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileName, $algo );
+                       $this->assertEquals( $expectedHash, $actualHashRepeat );
+               }
+       }
+
+       /**
+        * @covers FileContentsHasher::getFileContentHash
+        * @covers FileContentsHasher::getFileContentsHashInternal
+        * @dataProvider provideMultipleFiles
+        */
+       public function testMultipleFileHash( $files ) {
+               $fileNames = array();
+               $hashes = array();
+               foreach ( $files as $fileInfo ) {
+                       list( $fileName, $contents ) = $fileInfo;
+                       $fileNames[] = $fileName;
+                       $hashes[] = md5( $contents );
+               }
+
+               $expectedHash = md5( implode( '', $hashes ) );
+               $actualHash = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' );
+               $this->assertEquals( $expectedHash, $actualHash );
+               $actualHashRepeat = FileContentsHasher::getFileContentsHash( $fileNames, 'md5' );
+               $this->assertEquals( $expectedHash, $actualHashRepeat );
+       }
+}