diff e2gallerypro/e2upload/Backend/Assets/getid3/module.lib.data_hash.php @ 3:3f6b44aa6b35 judyates

[svn r4] added ability to buy stuff, from a Prints page, but it doesn't work well with the css, and it also has not been fitted into the perl make system.
author rlm
date Mon, 22 Feb 2010 08:02:39 -0500
parents
children
line wrap: on
line diff
     1.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     1.2 +++ b/e2gallerypro/e2upload/Backend/Assets/getid3/module.lib.data_hash.php	Mon Feb 22 08:02:39 2010 -0500
     1.3 @@ -0,0 +1,196 @@
     1.4 +<?php
     1.5 +// +----------------------------------------------------------------------+
     1.6 +// | PHP version 5                                                        |
     1.7 +// +----------------------------------------------------------------------+
     1.8 +// | Copyright (c) 2002-2006 James Heinrich, Allan Hansen                 |
     1.9 +// +----------------------------------------------------------------------+
    1.10 +// | This source file is subject to version 2 of the GPL license,         |
    1.11 +// | that is bundled with this package in the file license.txt and is     |
    1.12 +// | available through the world-wide-web at the following url:           |
    1.13 +// | http://www.gnu.org/copyleft/gpl.html                                 |
    1.14 +// +----------------------------------------------------------------------+
    1.15 +// | getID3() - http://getid3.sourceforge.net or http://www.getid3.org    |
    1.16 +// +----------------------------------------------------------------------+
    1.17 +// | Authors: James Heinrich <infoØgetid3*org>                            |
    1.18 +// |          Allan Hansen <ahØartemis*dk>                                |
    1.19 +// +----------------------------------------------------------------------+
    1.20 +// | module.lib.data-hash.php                                             |
    1.21 +// | getID3() library file.                                               |
    1.22 +// | dependencies: NONE.                                                  |
    1.23 +// +----------------------------------------------------------------------+
    1.24 +//
    1.25 +// $Id: module.lib.data_hash.php,v 1.5 2006/12/03 19:28:18 ah Exp $
    1.26 +
    1.27 +
    1.28 +
    1.29 +class getid3_lib_data_hash
    1.30 +{
    1.31 +    
    1.32 +    private $getid3;
    1.33 +    
    1.34 +    
    1.35 +    // constructer - calculate md5/sha1 data
    1.36 +    public function __construct(getID3 $getid3, $algorithm) {
    1.37 +    
    1.38 +        $this->getid3 = $getid3;
    1.39 +        
    1.40 +        // Check algorithm
    1.41 +        if (!preg_match('/^(md5|sha1)$/', $algorithm)) {
    1.42 +            throw new getid3_exception('Unsupported algorithm, "'.$algorithm.'", in GetHashdata()');
    1.43 +        }
    1.44 +        
    1.45 +        
    1.46 +        //// Handle ogg vorbis files
    1.47 +        
    1.48 +        if ((@$getid3->info['fileformat'] == 'ogg') && (@$getid3->info['audio']['dataformat'] == 'vorbis')) {
    1.49 +
    1.50 +            // We cannot get an identical md5_data value for Ogg files where the comments
    1.51 +            // span more than 1 Ogg page (compared to the same audio data with smaller
    1.52 +            // comments) using the normal getID3() method of MD5'ing the data between the
    1.53 +            // end of the comments and the end of the file (minus any trailing tags),
    1.54 +            // because the page sequence numbers of the pages that the audio data is on
    1.55 +            // do not match. Under normal circumstances, where comments are smaller than
    1.56 +            // the nominal 4-8kB page size, then this is not a problem, but if there are
    1.57 +            // very large comments, the only way around it is to strip off the comment
    1.58 +            // tags with vorbiscomment and MD5 that file.
    1.59 +            // This procedure must be applied to ALL Ogg files, not just the ones with
    1.60 +            // comments larger than 1 page, because the below method simply MD5's the
    1.61 +            // whole file with the comments stripped, not just the portion after the
    1.62 +            // comments block (which is the standard getID3() method.
    1.63 +
    1.64 +            // The above-mentioned problem of comments spanning multiple pages and changing
    1.65 +            // page sequence numbers likely happens for OggSpeex and OggFLAC as well, but
    1.66 +            // currently vorbiscomment only works on OggVorbis files.
    1.67 +
    1.68 +            if ((bool)ini_get('safe_mode')) {
    1.69 +                throw new getid3_exception('PHP running in Safe Mode - cannot make system call to vorbiscomment[.exe]  needed for '.$algorithm.'_data.');
    1.70 +            }
    1.71 +        
    1.72 +            if (!preg_match('/^Vorbiscomment /', `vorbiscomment --version 2>&1`)) {
    1.73 +                throw new getid3_exception('vorbiscomment[.exe] binary not found in path. UNIX: typically /usr/bin. Windows: typically c:\windows\system32.');
    1.74 +            }
    1.75 +        
    1.76 +            // Prevent user from aborting script
    1.77 +            $old_abort = ignore_user_abort(true);
    1.78 +
    1.79 +            // Create empty file
    1.80 +            $empty = tempnam('*', 'getID3');
    1.81 +            touch($empty);
    1.82 +
    1.83 +            // Use vorbiscomment to make temp file without comments
    1.84 +            $temp = tempnam('*', 'getID3');
    1.85 +            
    1.86 +            $command_line = 'vorbiscomment -w -c '.escapeshellarg($empty).' '.escapeshellarg(realpath($getid3->filename)).' '.escapeshellarg($temp).' 2>&1';
    1.87 +
    1.88 +            // Error from vorbiscomment
    1.89 +            if ($vorbis_comment_error = `$command_line`) {
    1.90 +                throw new getid3_exception('System call to vorbiscomment[.exe] failed.');
    1.91 +            } 
    1.92 +
    1.93 +            // Get hash of newly created file
    1.94 +            $hash_function = $algorithm . '_file';
    1.95 +            $getid3->info[$algorithm.'_data'] = $hash_function($temp);
    1.96 +
    1.97 +            // Clean up
    1.98 +            unlink($empty);
    1.99 +            unlink($temp);
   1.100 +
   1.101 +            // Reset abort setting
   1.102 +            ignore_user_abort($old_abort);
   1.103 +            
   1.104 +            // Return success
   1.105 +            return true;
   1.106 +        }
   1.107 +
   1.108 +        //// Handle other file formats
   1.109 +        
   1.110 +        // Get hash from part of file
   1.111 +        if (@$getid3->info['avdataoffset'] || (@$getid3->info['avdataend']  &&  @$getid3->info['avdataend'] < $getid3->info['filesize'])) {
   1.112 +            
   1.113 +            if ((bool)ini_get('safe_mode')) {
   1.114 +                $getid3->warning('PHP running in Safe Mode - backtick operator not available, using slower non-system-call '.$algorithm.' algorithm.');
   1.115 +                $hash_function = 'hash_file_partial_safe_mode';
   1.116 +            }
   1.117 +            else {
   1.118 +                $hash_function = 'hash_file_partial';
   1.119 +            }
   1.120 +            
   1.121 +            $getid3->info[$algorithm.'_data'] = $this->$hash_function($getid3->filename, $getid3->info['avdataoffset'], $getid3->info['avdataend'], $algorithm);
   1.122 +        } 
   1.123 +    
   1.124 +        // Get hash from whole file - use built-in md5_file() and sha1_file()
   1.125 +        else {
   1.126 +            $hash_function = $algorithm . '_file';
   1.127 +            $getid3->info[$algorithm.'_data'] = $hash_function($getid3->filename);
   1.128 +        }
   1.129 +    }
   1.130 +    
   1.131 +    
   1.132 +    
   1.133 +    // Return md5/sha1sum for a file from starting position to absolute end position
   1.134 +    // Using windows system call
   1.135 +    private function hash_file_partial($file, $offset, $end, $algorithm) {
   1.136 +        
   1.137 +        // It seems that sha1sum.exe for Windows only works on physical files, does not accept piped data
   1.138 +        // Fall back to create-temp-file method:
   1.139 +        if ($algorithm == 'sha1'  &&  strtoupper(substr(PHP_OS, 0, 3)) == 'WIN') {
   1.140 +            return $this->hash_file_partial_safe_mode($file, $offset, $end, $algorithm);
   1.141 +        }
   1.142 +        
   1.143 +        // Check for presence of binaries and revert to safe mode if not found
   1.144 +        if (!`head --version`) {
   1.145 +            return $this->hash_file_partial_safe_mode($file, $offset, $end, $algorithm);
   1.146 +        }
   1.147 +        
   1.148 +        if (!`tail --version`) {
   1.149 +            return $this->hash_file_partial_safe_mode($file, $offset, $end, $algorithm);
   1.150 +        }
   1.151 +        
   1.152 +        if (!`${algorithm}sum --version`) {
   1.153 +            return $this->hash_file_partial_safe_mode($file, $offset, $end, $algorithm);
   1.154 +        }   
   1.155 +        
   1.156 +        $size = $end - $offset;
   1.157 +        $command_line  = 'head -c'.$end.' '.escapeshellarg(realpath($file)).' | tail -c'.$size.' | '.$algorithm.'sum';
   1.158 +        return substr(`$command_line`, 0, $algorithm == 'md5' ? 32 : 40);
   1.159 +    }
   1.160 +    
   1.161 +    
   1.162 +
   1.163 +    // Return md5/sha1sum for a file from starting position to absolute end position
   1.164 +    // Using slow safe_mode temp file
   1.165 +    private function hash_file_partial_safe_mode($file, $offset, $end, $algorithm) {        
   1.166 +
   1.167 +        // Attempt to create a temporary file in the system temp directory - invalid dirname should force to system temp dir
   1.168 +        if (($data_filename = tempnam('*', 'getID3')) === false) {
   1.169 +            throw new getid3_exception('Unable to create temporary file.');
   1.170 +        }
   1.171 +
   1.172 +        // Init
   1.173 +        $result = false;
   1.174 +
   1.175 +        // Copy parts of file
   1.176 +        if ($fp = @fopen($file, 'rb')) {
   1.177 +
   1.178 +            if ($fp_data = @fopen($data_filename, 'wb')) {
   1.179 +
   1.180 +                fseek($fp, $offset, SEEK_SET);
   1.181 +                $bytes_left_to_write = $end - $offset;
   1.182 +                while (($bytes_left_to_write > 0) && ($buffer = fread($fp, getid3::FREAD_BUFFER_SIZE))) {
   1.183 +                    $bytes_written = fwrite($fp_data, $buffer, $bytes_left_to_write);
   1.184 +                    $bytes_left_to_write -= $bytes_written;
   1.185 +                }
   1.186 +                fclose($fp_data);
   1.187 +                $hash_function = $algorithm . '_file';
   1.188 +                $result = $hash_function($data_filename);
   1.189 +
   1.190 +            }
   1.191 +            fclose($fp);
   1.192 +        }
   1.193 +        unlink($data_filename);
   1.194 +        return $result;
   1.195 +    }
   1.196 +
   1.197 +}
   1.198 +
   1.199 +?>
   1.200 \ No newline at end of file