#!/bin/bash
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.

# The optimization code is based on pngslim (http://goo.gl/a0XHg)
# and executes a similar pipleline to optimize the png file size.
# The steps that require pngoptimizercl/pngrewrite/deflopt are omitted,
# but this runs all other processes, including:
# 1) various color-dependent optimizations using optipng.
# 2) optimize the number of huffman blocks.
# 3) randomize the huffman table.
# 4) Further optimize using optipng and advdef (zlib stream).
# Due to the step 3), each run may produce slightly different results.
#
# Note(oshima): In my experiment, advdef didn't reduce much. I'm keeping it
# for now as it does not take much time to run.

readonly ALL_DIRS="
ash/resources
ui/resources
chrome/app/theme
chrome/browser/resources
chrome/renderer/resources
webkit/glue/resources
remoting/resources
remoting/webapp
"

# Files larger than this file size (in bytes) will
# use the optimization parameters tailored for large files.
LARGE_FILE_THRESHOLD=3000

# Constants used for optimization
readonly DEFAULT_MIN_BLOCK_SIZE=128
readonly DEFAULT_LIMIT_BLOCKS=256
readonly DEFAULT_RANDOM_TRIALS=100
# Taken from the recommendation in the pngslim's readme.txt.
readonly LARGE_MIN_BLOCK_SIZE=1
readonly LARGE_LIMIT_BLOCKS=2
readonly LARGE_RANDOM_TRIALS=1

# Global variables for stats
TOTAL_OLD_BYTES=0
TOTAL_NEW_BYTES=0
TOTAL_FILE=0
PROCESSED_FILE=0

declare -a THROBBER_STR=('-' '\\' '|' '/')
THROBBER_COUNT=0

# Show throbber character at current cursor position.
function throbber {
  echo -ne "${THROBBER_STR[$THROBBER_COUNT]}\b"
  let THROBBER_COUNT=($THROBBER_COUNT+1)%4
}

# Usage: pngout_loop <file> <png_out_options> ...
# Optimize the png file using pngout with the given options
# using various block split thresholds and filter types.
function pngout_loop {
  local file=$1
  shift
  local opts=$*
  if [ $OPTIMIZE_LEVEL == 1 ]; then
    for j in $(seq 0 5); do
      throbber
      pngout -q -k1 -s1 -f$j $opts $file
    done
  else
    for i in 0 128 256 512; do
      for j in $(seq 0 5); do
        throbber
        pngout -q -k1 -s1 -b$i -f$j $opts $file
      done
    done
  fi
}

# Usage: get_color_depth_list
# Returns the list of color depth options for current optimization level.
function get_color_depth_list {
  if [ $OPTIMIZE_LEVEL == 1 ]; then
    echo "-d0"
  else
    echo "-d1 -d2 -d4 -d8"
  fi
}

# Usage: process_grayscale <file>
# Optimize grayscale images for all color bit depths.
#
# TODO(oshima): Experiment with -d0 w/o -c0.
function process_grayscale {
  echo -n "|gray"
  for opt in $(get_color_depth_list); do
    pngout_loop $file -c0 $opt
  done
}

# Usage: process_grayscale_alpha <file>
# Optimize grayscale images with alpha for all color bit depths.
function process_grayscale_alpha {
  echo -n "|gray-a"
  pngout_loop $file -c4
  for opt in $(get_color_depth_list); do
    pngout_loop $file -c3 $opt
  done
}

# Usage: process_rgb <file>
# Optimize rgb images with or without alpha for all color bit depths.
function process_rgb {
  echo -n "|rgb"
  for opt in $(get_color_depth_list); do
    pngout_loop $file -c3 $opt
  done
  pngout_loop $file -c2
  pngout_loop $file -c6
}

# Usage: huffman_blocks <file>
# Optimize the huffman blocks.
function huffman_blocks {
  local file=$1
  echo -n "|huffman"
  local size=$(stat -c%s $file)
  local min_block_size=$DEFAULT_MIN_BLOCK_SIZE
  local limit_blocks=$DEFAULT_LIMIT_BLOCKS

  if [ $size -gt $LARGE_FILE_THRESHOLD ]; then
    min_block_size=$LARGE_MIN_BLOCK_SIZE
    limit_blocks=$LARGE_LIMIT_BLOCKS
  fi
  let max_blocks=$size/$min_block_size
  if [ $max_blocks -gt $limit_blocks ]; then
    max_blocks=$limit_blocks
  fi

  for i in $(seq 2 $max_blocks); do
    throbber
    pngout -q -k1 -ks -s1 -n$i $file
  done
}

# Usage: random_huffman_table_trial <file>
# Try compressing by randomizing the initial huffman table.
#
# TODO(oshima): Try adjusting different parameters for large files to
# reduce runtime.
function random_huffman_table_trial {
  echo -n "|random"
  local file=$1
  local old_size=$(stat -c%s $file)
  local trials_count=$DEFAULT_RANDOM_TRIALS

  if [ $old_size -gt $LARGE_FILE_THRESHOLD ]; then
    trials_count=$LARGE_RANDOM_TRIALS
  fi
  for i in $(seq 1 $trials_count); do
    throbber
    pngout -q -k1 -ks -s0 -r $file
  done
  local new_size=$(stat -c%s $file)
  if [ $new_size -lt $old_size ]; then
    random_huffman_table_trial $file
  fi
}

# Usage: final_comprssion <file>
# Further compress using optipng and advdef.
# TODO(oshima): Experiment with 256.
function final_compression {
  echo -n "|final"
  local file=$1
  if [ $OPTIMIZE_LEVEL == 2 ]; then
    for i in 32k 16k 8k 4k 2k 1k 512; do
      throbber
      optipng -q -nb -nc -zw$i -zc1-9 -zm1-9 -zs0-3 -f0-5 $file
    done
  fi
  for i in $(seq 1 4); do
    throbber
    advdef -q -z -$i $file
  done
  echo -ne "\r"
}

# Usage: get_color_type <file>
# Returns the color type name of the png file. Here is the list of names
# for each color type codes.
# 0 : grayscale
# 2 : RGB
# 3 : colormap
# 4 : gray+alpha
# 6 : RGBA
# See http://en.wikipedia.org/wiki/Portable_Network_Graphics#Color_depth
# for details about the color type code.
function get_color_type {
  local file=$1
  echo $(file $file | awk -F, '{print $3}' | awk '{print $2}')
}

# Usage: optimize_size <file>
# Performs png file optimization.
function optimize_size {
  tput el
  local file=$1
  echo -n "$file "

  advdef -q -z -4 $file

  pngout -q -s4 -c0 -force $file $file.tmp.png
  if [ -f $file.tmp.png ]; then
    rm $file.tmp.png
    process_grayscale $file
    process_grayscale_alpha $file
  else
    pngout -q -s4 -c4 -force $file $file.tmp.png
    if [ -f $file.tmp.png ]; then
      rm $file.tmp.png
      process_grayscale_alpha $file
    else
      process_rgb $file
    fi
  fi

  echo -n "|filter"
  local old_color_type=$(get_color_type $file)
  optipng -q -zc9 -zm8 -zs0-3 -f0-5 $file -out $file.tmp.png
  local new_color_type=$(get_color_type $file.tmp.png)
  # optipng may corrupt a png file when reducing the color type
  # to grayscale/grayscale+alpha. Just skip such cases until
  # the bug is fixed. See crbug.com/174505, crbug.com/174084.
  # The issue is reported in
  # https://sourceforge.net/tracker/?func=detail&aid=3603630&group_id=151404&atid=780913
  if [[ $old_color_type == "RGBA" && $new_color_type =~ gray.* ]] ; then
    rm $file.tmp.png
    echo -n "[skip opting]"
  else
    mv $file.tmp.png $file
  fi
  pngout -q -k1 -s1 $file

  huffman_blocks $file

  # TODO(oshima): Experiment with strategy 1.
  echo -n "|strategy"
  if [ $OPTIMIZE_LEVEL == 2 ]; then
    for i in 3 2 0; do
      pngout -q -k1 -ks -s$i $file
    done
  else
    pngout -q -k1 -ks -s1 $file
  fi

  if [ $OPTIMIZE_LEVEL == 2 ]; then
    random_huffman_table_trial $file
  fi

  final_compression $file
}

# Usage: process_file <file>
function process_file {
  local file=$1
  local name=$(basename $file)
  # -rem alla removes all ancillary chunks except for tRNS
  pngcrush -d $TMP_DIR -brute -reduce -rem alla $file > /dev/null

  if [ $OPTIMIZE_LEVEL != 0 ]; then
    optimize_size $TMP_DIR/$name
  fi
}

# Usage: optimize_file <file>
function optimize_file {
  local file=$1
  if $using_cygwin ; then
    file=$(cygpath -w $file)
  fi

  local name=$(basename $file)
  local old=$(stat -c%s $file)
  local tmp_file=$TMP_DIR/$name

  process_file $file

  local new=$(stat -c%s $tmp_file)
  let diff=$old-$new
  let percent=($diff*100)/$old
  let TOTAL_FILE+=1

  tput el
  if [ $new -lt $old ]; then
    echo -ne "$file : $old => $new ($diff bytes : $percent %)\n"
    mv "$tmp_file" "$file"
    let TOTAL_OLD_BYTES+=$old
    let TOTAL_NEW_BYTES+=$new
    let PROCESSED_FILE+=1
  else
    if [ $OPTIMIZE_LEVEL == 0 ]; then
      echo -ne "$file : skipped\r"
    fi
    rm $tmp_file
  fi
}

function optimize_dir {
  local dir=$1
  if $using_cygwin ; then
    dir=$(cygpath -w $dir)
  fi

  for f in $(find $dir -name "*.png"); do
    optimize_file $f
  done
}

function install_if_not_installed {
  local program=$1
  local package=$2
  which $program > /dev/null 2>&1
  if [ "$?" != "0" ]; then
    if $using_cygwin ; then
      echo "Couldn't find $program. " \
           "Please run cygwin's setup.exe and install the $package package."
      exit 1
    else
      read -p "Couldn't find $program. Do you want to install? (y/n)"
      [ "$REPLY" == "y" ] && sudo apt-get install $package
      [ "$REPLY" == "y" ] || exit
    fi
  fi
}

function fail_if_not_installed {
  local program=$1
  local url=$2
  which $program > /dev/null 2>&1
  if [ $? != 0 ]; then
    echo "Couldn't find $program. Please download and install it from $url ."
    exit 1
  fi
}

function show_help {
  local program=$(basename $0)
  echo \
"Usage: $program [options] dir ...

$program is a utility to reduce the size of png files by removing
unnecessary chunks and compressing the image.

Options:
  -o<optimize_level>  Specify optimization level: (default is 1)
      0  Just run pngcrush. It removes unnecessary chunks and perform basic
         optimization on the encoded data.
      1  Optimize png files using pngout/optipng and advdef. This can further
         reduce addtional 5~30%. This is the default level.
      2  Aggressively optimize the size of png files. This may produce
         addtional 1%~5% reduction.  Warning: this is *VERY*
         slow and can take hours to process all files.
  -r<revision> If this is specified, the script processes only png files
               changed since this revision. The <dir> options will be used
               to narrow down the files under specific directories.
  -h  Print this help text."
  exit 1
}

if [ ! -e ../.gclient ]; then
  echo "$0 must be run in src directory"
  exit 1
fi

if [ "$(expr substr $(uname -s) 1 6)" == "CYGWIN" ]; then
  using_cygwin=true
else
  using_cygwin=false
fi

OPTIMIZE_LEVEL=1
# Parse options
while getopts o:r:h opts
do
  case $opts in
    r)
      COMMIT=$(git svn find-rev r$OPTARG | tail -1) || exit
      if [ -z "$COMMIT" ] ; then
        echo "Revision $OPTARG not found"
        show_help
      fi
      ;;
    o)
      if [[ ! "$OPTARG" =~ [012] ]] ; then
        show_help
      fi
      OPTIMIZE_LEVEL=$OPTARG
      ;;
    [h?])
      show_help;;
  esac
done

# Remove options from argument list.
shift $(($OPTIND -1))

# Make sure we have all necessary commands installed.
install_if_not_installed pngcrush pngcrush
if [ $OPTIMIZE_LEVEL -ge 1 ]; then
  install_if_not_installed optipng optipng

  if $using_cygwin ; then
    fail_if_not_installed advdef "http://advancemame.sourceforge.net/comp-readme.html"
  else
    install_if_not_installed advdef advancecomp
  fi

  if $using_cygwin ; then
    pngout_url="http://www.advsys.net/ken/utils.htm"
  else
    pngout_url="http://www.jonof.id.au/kenutils"
  fi
  fail_if_not_installed pngout $pngout_url
fi

# Create tmp directory for crushed png file.
TMP_DIR=$(mktemp -d)
if $using_cygwin ; then
  TMP_DIR=$(cygpath -w $TMP_DIR)
fi

# Make sure we cleanup temp dir
trap "rm -rf $TMP_DIR" EXIT

# If no directories are specified, optimize all directories.
DIRS=$@
set ${DIRS:=$ALL_DIRS}

echo "Optimize level=$OPTIMIZE_LEVEL"
if [ -n "$COMMIT" ] ; then
 ALL_FILES=$(git diff --name-only $COMMIT HEAD $DIRS | grep "png$")
 ALL_FILES_LIST=( $ALL_FILES )
 echo "Processing ${#ALL_FILES_LIST[*]} files"
 for f in $ALL_FILES; do
   if [ -f $f ] ; then
     optimize_file $f
   else
     echo "Skipping deleted file: $f";
   fi
 done
else
  for d in $DIRS; do
    echo "Optimizing png files in $d"
    optimize_dir $d
    echo
  done
fi

# Print the results.
if [ $PROCESSED_FILE == 0 ]; then
  echo "Did not find any files (out of $TOTAL_FILE files)" \
       "that could be optimized" \
       "in $(date -u -d @$SECONDS +%T)s"
else
  let diff=$TOTAL_OLD_BYTES-$TOTAL_NEW_BYTES
  let percent=$diff*100/$TOTAL_OLD_BYTES
  echo "Processed $PROCESSED_FILE files (out of $TOTAL_FILE files)" \
       "in $(date -u -d @$SECONDS +%T)s"
  echo "Result : $TOTAL_OLD_BYTES => $TOTAL_NEW_BYTES bytes" \
       "($diff bytes : $percent %)"
fi