Imported Upstream version 1.11.33.4
diff --git a/generate.sh b/generate.sh
new file mode 100755
index 0000000..68e13b3
--- /dev/null
+++ b/generate.sh
@@ -0,0 +1,19 @@
+#!/bin/sh
+#
+# This script uses gyp to generate Makefiles for mod_pagespeed built against
+# the following system libraries:
+# apr, aprutil, apache httpd headers, icu, libjpeg_turbo, libpng, zlib.
+#
+# Besides the -D use_system_libs=1 below, you may need to set (via -D var=value)
+# paths for some of these libraries via these variables:
+# system_include_path_httpd, system_include_path_apr,
+# system_include_path_aprutil.
+#
+# for example, you might run
+# ./generate.sh -Dsystem_include_path_apr=/usr/include/apr-1 \
+# -Dsystem_include_path_httpd=/usr/include/httpd
+# to specify APR and Apache include directories.
+#
+# Also, BUILDTYPE=Release can be passed to make (the default is Debug).
+echo "Generating src/Makefile"
+src/build/gyp_chromium -D use_system_libs=1 $*
diff --git a/src/LASTCHANGE.in b/src/LASTCHANGE.in
new file mode 100644
index 0000000..ed534db
--- /dev/null
+++ b/src/LASTCHANGE.in
@@ -0,0 +1 @@
+LASTCHANGE=0
diff --git a/src/base/base.gyp b/src/base/base.gyp
new file mode 100644
index 0000000..f2dedb2
--- /dev/null
+++ b/src/base/base.gyp
@@ -0,0 +1,63 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# Base was branched from the chromium version to reduce the number of
+# dependencies of this package. Specifically, we would like to avoid
+# depending on the chrome directory, which contains the chrome version
+# and branding information.
+# TODO(morlovich): push this refactoring to chronium trunk.
+
+{
+ 'variables': {
+ 'chromium_code': 1,
+ 'chromium_root': '<(DEPTH)/third_party/chromium/src',
+ },
+ 'includes': [
+ 'base.gypi',
+ ],
+ 'targets': [
+ {
+ # This is the subset of files from base that should not be used with a
+ # dynamic library. Note that this library cannot depend on base because
+ # base depends on base_static.
+ 'target_name': 'base_static',
+ 'type': 'static_library',
+ 'sources': [
+ '<(chromium_root)/base/base_switches.cc',
+ '<(chromium_root)/base/base_switches.h',
+ '<(chromium_root)/base/win/pe_image.cc',
+ '<(chromium_root)/base/win/pe_image.h',
+ ],
+ 'include_dirs': [
+ '<(chromium_root)',
+ '<(DEPTH)',
+ ],
+ },
+ {
+ 'target_name': 'base_unittests',
+ 'type': 'executable',
+ 'sources': [
+ '<(chromium_root)/base/string_piece_unittest.cc',
+ '<(chromium_root)/base/win/win_util_unittest.cc',
+ ],
+ 'dependencies': [
+ 'base',
+ 'base_static',
+ '<(DEPTH)/testing/gmock.gyp:gmock',
+ '<(DEPTH)/testing/gtest.gyp:gtest',
+ '<(DEPTH)/testing/gtest.gyp:gtest_main',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)',
+ ],
+ 'conditions': [
+ ['OS != "win"', {
+ 'sources!': [
+ '<(chromium_root)/base/win_util_unittest.cc',
+ ],
+ }],
+ ],
+ },
+ ],
+}
diff --git a/src/base/base.gypi b/src/base/base.gypi
new file mode 100644
index 0000000..0fba793
--- /dev/null
+++ b/src/base/base.gypi
@@ -0,0 +1,298 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'target_defaults': {
+ 'variables': {
+ 'base_target': 0,
+ 'chromium_root': '<(DEPTH)/third_party/chromium/src',
+ 'conditions': [
+ # The default stack_trace_posix.cc is not compatible with NaCL newlib
+ # toolchain, so we provide a stubbed version when building for NaCL.
+ [ 'build_nacl==1', {
+ 'stack_trace_posix_cc': 'nacl_stubs/stack_trace_posix.cc',
+ }, {
+ 'stack_trace_posix_cc': '<(chromium_root)/base/debug/stack_trace_posix.cc',
+ }],
+ ],
+ },
+ 'target_conditions': [
+ # This part is shared between the targets defined below. Only files and
+ # settings relevant for building the Win64 target should be added here.
+ # All the rest should be added to the 'base' target below.
+ ['base_target==1', {
+ 'sources': [
+ '<(chromium_root)/build/build_config.h',
+ '<(chromium_root)/base/third_party/dmg_fp/dmg_fp.h',
+ '<(chromium_root)/base/third_party/dmg_fp/g_fmt.cc',
+ '<(chromium_root)/base/third_party/dmg_fp/dtoa_wrapper.cc',
+ '<(chromium_root)/base/third_party/icu/icu_utf.cc',
+ '<(chromium_root)/base/third_party/icu/icu_utf.h',
+ '<(chromium_root)/base/third_party/nspr/prtime.cc',
+ '<(chromium_root)/base/third_party/nspr/prtime.h',
+ '<(chromium_root)/base/at_exit.cc',
+ '<(chromium_root)/base/at_exit.h',
+ '<(chromium_root)/base/atomicops.h',
+ '<(chromium_root)/base/atomicops_internals_x86_gcc.cc',
+ '<(chromium_root)/base/atomicops_internals_x86_msvc.h',
+ '<(chromium_root)/base/callback.h',
+ '<(chromium_root)/base/callback_internal.h',
+ '<(chromium_root)/base/callback_internal.cc',
+ '<(chromium_root)/base/command_line.cc',
+ '<(chromium_root)/base/command_line.h',
+ '<(chromium_root)/base/cpu_patched.cc',
+ '<(chromium_root)/base/cpu.h',
+ '<(chromium_root)/base/debug/alias.cc',
+ '<(chromium_root)/base/debug/alias.h',
+ '<(chromium_root)/base/debug/debugger.cc',
+ '<(chromium_root)/base/debug/debugger.h',
+ '<(chromium_root)/base/debug/debugger_posix.cc',
+ '<(chromium_root)/base/debug/debugger_win.cc',
+ '<(chromium_root)/base/debug/profiler.cc',
+ '<(chromium_root)/base/debug/profiler.h',
+ '<(chromium_root)/base/debug/stack_trace.cc',
+ '<(chromium_root)/base/debug/stack_trace.h',
+ '<(stack_trace_posix_cc)',
+ '<(chromium_root)/base/debug/stack_trace_win.cc',
+ '<(chromium_root)/base/files/file_path.cc',
+ '<(chromium_root)/base/files/file_path.h',
+ '<(chromium_root)/base/files/file_path_constants.cc',
+ '<(chromium_root)/base/json/json_parser.cc',
+ '<(chromium_root)/base/json/json_parser.h',
+ '<(chromium_root)/base/json/json_reader.cc',
+ '<(chromium_root)/base/json/json_reader.h',
+ '<(chromium_root)/base/json/json_writer.cc',
+ '<(chromium_root)/base/json/json_writer.h',
+ '<(chromium_root)/base/json/string_escape.cc',
+ '<(chromium_root)/base/json/string_escape.h',
+ '<(chromium_root)/base/lazy_instance.cc',
+ '<(chromium_root)/base/lazy_instance.h',
+ '<(chromium_root)/base/logging.cc',
+ '<(chromium_root)/base/logging.h',
+ '<(chromium_root)/base/logging_win.cc',
+ '<(chromium_root)/base/logging_win.h',
+ '<(chromium_root)/base/location.cc',
+ '<(chromium_root)/base/memory/ref_counted.cc',
+ '<(chromium_root)/base/memory/ref_counted.h',
+ '<(chromium_root)/base/memory/singleton.cc',
+ '<(chromium_root)/base/memory/singleton.h',
+ '<(chromium_root)/base/mac/foundation_util.h',
+ 'mac/foundation_util.mm',
+ '<(chromium_root)/base/pickle.cc',
+ '<(chromium_root)/base/pickle.h',
+ '<(chromium_root)/base/process.h',
+ 'process_util.cc',
+ '<(chromium_root)/base/safe_strerror_posix.cc',
+ '<(chromium_root)/base/safe_strerror_posix.h',
+ '<(chromium_root)/base/strings/string_number_conversions.cc',
+ '<(chromium_root)/base/strings/string_number_conversions.h',
+ '<(chromium_root)/base/strings/string_piece.cc',
+ '<(chromium_root)/base/strings/string_piece.h',
+ '<(chromium_root)/base/strings/string_split.cc',
+ '<(chromium_root)/base/strings/string_split.h',
+ '<(chromium_root)/base/strings/string_util.cc',
+ '<(chromium_root)/base/strings/string_util.h',
+ '<(chromium_root)/base/strings/string_util_constants.cc',
+ '<(chromium_root)/base/strings/string_util_win.h',
+ '<(chromium_root)/base/strings/stringprintf.cc',
+ '<(chromium_root)/base/strings/stringprintf.h',
+ '<(chromium_root)/base/strings/sys_string_conversions.h',
+ '<(chromium_root)/base/strings/sys_string_conversions_mac.mm',
+ '<(chromium_root)/base/strings/sys_string_conversions_posix.cc',
+ '<(chromium_root)/base/strings/sys_string_conversions_win.cc',
+ '<(chromium_root)/base/strings/utf_string_conversion_utils.cc',
+ '<(chromium_root)/base/strings/utf_string_conversion_utils.h',
+ '<(chromium_root)/base/strings/utf_string_conversions.cc',
+ '<(chromium_root)/base/strings/utf_string_conversions.h',
+ '<(chromium_root)/base/synchronization/cancellation_flag.cc',
+ '<(chromium_root)/base/synchronization/cancellation_flag.h',
+ '<(chromium_root)/base/synchronization/condition_variable.h',
+ '<(chromium_root)/base/synchronization/condition_variable_posix.cc',
+ '<(chromium_root)/base/synchronization/condition_variable_win.cc',
+ '<(chromium_root)/base/synchronization/lock.cc',
+ '<(chromium_root)/base/synchronization/lock.h',
+ '<(chromium_root)/base/synchronization/lock_impl.h',
+ '<(chromium_root)/base/synchronization/lock_impl_posix.cc',
+ '<(chromium_root)/base/synchronization/lock_impl_win.cc',
+ '<(chromium_root)/base/synchronization/spin_wait.h',
+ '<(chromium_root)/base/synchronization/waitable_event.h',
+ '<(chromium_root)/base/synchronization/waitable_event_posix.cc',
+ '<(chromium_root)/base/synchronization/waitable_event_watcher.h',
+ '<(chromium_root)/base/synchronization/waitable_event_watcher_posix.cc',
+ '<(chromium_root)/base/synchronization/waitable_event_watcher_win.cc',
+ '<(chromium_root)/base/synchronization/waitable_event_win.cc',
+ '<(chromium_root)/base/threading/platform_thread.h',
+ '<(chromium_root)/base/threading/platform_thread_linux.cc',
+ '<(chromium_root)/base/threading/platform_thread_mac.mm',
+ '<(chromium_root)/base/threading/platform_thread_posix.cc',
+ '<(chromium_root)/base/threading/platform_thread_win.cc',
+ '<(chromium_root)/base/threading/thread_collision_warner.cc',
+ '<(chromium_root)/base/threading/thread_collision_warner.h',
+ '<(chromium_root)/base/threading/thread_id_name_manager.cc',
+ '<(chromium_root)/base/threading/thread_id_name_manager.h',
+ '<(chromium_root)/base/threading/thread_local.h',
+ '<(chromium_root)/base/threading/thread_local_posix.cc',
+ '<(chromium_root)/base/threading/thread_local_storage.cc',
+ '<(chromium_root)/base/threading/thread_local_storage.h',
+ '<(chromium_root)/base/threading/thread_local_storage_posix.cc',
+ '<(chromium_root)/base/threading/thread_local_storage_win.cc',
+ '<(chromium_root)/base/threading/thread_local_win.cc',
+ '<(chromium_root)/base/threading/thread_restrictions.cc',
+ '<(chromium_root)/base/threading/thread_restrictions.h',
+ '<(chromium_root)/base/time/time.cc',
+ '<(chromium_root)/base/time/time.h',
+ '<(chromium_root)/base/time/time_mac.cc',
+ '<(chromium_root)/base/time/time_posix.cc',
+ '<(chromium_root)/base/time/time_win.cc',
+ '<(chromium_root)/base/tracked_objects.cc',
+ '<(chromium_root)/base/tracked_objects.h',
+ '<(chromium_root)/base/values.cc',
+ '<(chromium_root)/base/values.h',
+ '<(chromium_root)/base/vlog.cc',
+ '<(chromium_root)/base/vlog.h',
+ '<(chromium_root)/base/win/registry.cc',
+ '<(chromium_root)/base/win/registry.h',
+ '<(chromium_root)/base/win/scoped_handle.cc',
+ '<(chromium_root)/base/win/scoped_handle.h',
+ '<(chromium_root)/base/win/win_util.cc',
+ '<(chromium_root)/base/win/win_util.h',
+ '<(chromium_root)/base/win/windows_version.cc',
+ '<(chromium_root)/base/profiler/tracked_time.h',
+ '<(chromium_root)/base/profiler/tracked_time.cc',
+ '<(chromium_root)/base/profiler/alternate_timer.h',
+ '<(chromium_root)/base/profiler/alternate_timer.cc',
+ '<(chromium_root)/base/win/windows_version.h',
+ ],
+ 'include_dirs': [
+ '<(chromium_root)',
+ '<(DEPTH)',
+ ],
+ # These warnings are needed for the files in third_party\dmg_fp.
+ 'msvs_disabled_warnings': [
+ 4244, 4554, 4018, 4102,
+ ],
+ 'mac_framework_dirs': [
+ '$(SDKROOT)/System/Library/Frameworks/ApplicationServices.framework/Frameworks',
+ ],
+ 'conditions': [
+ [ 'OS != "linux" and OS != "freebsd" and OS != "openbsd" and OS != "solaris"', {
+ 'sources!': [
+ '<(chromium_root)/base/atomicops_internals_x86_gcc.cc',
+ ],
+ },],
+ ['OS != "win"', {
+ 'sources/': [ ['exclude', '^win/'] ],
+ },
+ ],
+ [ 'OS == "win"', {
+ 'sources!': [
+ '<(chromium_root)/base/strings/string16.cc',
+ ],
+ },],
+ ],
+ }],
+ ],
+ },
+ 'targets': [
+ # Older assemblers don't recognize the xgetbv opcode, and require explicit
+ # bytes instead. These can be found by searching the web; example:
+ # http://lxr.free-electrons.com/source/arch/x86/include/asm/xcr.h#L31
+ {
+ 'target_name': 'cpu_patched',
+ 'type': 'none',
+ 'sources': [
+ '<(chromium_root)/base/cpu.cc',
+ '<(chromium_root)/base/cpu_patched.cc',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'Patch cpu.cc',
+ 'inputs': [
+ '<(chromium_root)/base/cpu.cc',
+ ],
+ 'outputs': [
+ '<(chromium_root)/base/cpu_patched.cc',
+ ],
+ 'action': [
+ 'bash', '-c',
+ 'sed \'s/"xgetbv"/".byte 0x0f, 0x01, 0xd0"/\' <@(_inputs) > <@(_outputs)'
+ ],
+ 'message': 'Attempting to generate patched <@(_outputs) from <@(_inputs)',
+ },
+ ],
+ },
+ {
+ 'target_name': 'base',
+ 'type': '<(component)',
+ 'variables': {
+ 'base_target': 1,
+ },
+ 'dependencies': [
+ 'base_static',
+ 'cpu_patched',
+ '<(DEPTH)/third_party/modp_b64/modp_b64.gyp:modp_b64',
+ '<(chromium_root)/base/third_party/dynamic_annotations/dynamic_annotations.gyp:dynamic_annotations',
+ ],
+ # TODO(gregoryd): direct_dependent_settings should be shared with the
+ # 64-bit target, but it doesn't work due to a bug in gyp
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(chromium_root)',
+ '<(DEPTH)',
+ ],
+ },
+ 'conditions': [
+ [ 'OS == "linux"', {
+ 'cflags': [
+ '-Wno-write-strings',
+ '-Wno-error',
+ ],
+ 'conditions': [
+ [ 'build_nacl==0', {
+ # We do not need clock_gettime() when building for NaCL newlib.
+ 'link_settings': {
+ 'libraries': [
+ # We need rt for clock_gettime().
+ '-lrt',
+ ],
+ },
+ }],
+ ],
+ }],
+ [ 'OS == "mac"', {
+ 'link_settings': {
+ 'libraries': [
+ '$(SDKROOT)/System/Library/Frameworks/AppKit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/Carbon.framework',
+ '$(SDKROOT)/System/Library/Frameworks/CoreFoundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/Foundation.framework',
+ '$(SDKROOT)/System/Library/Frameworks/IOKit.framework',
+ '$(SDKROOT)/System/Library/Frameworks/Security.framework',
+ ],
+ },
+ },],
+ [ 'build_nacl==1', {
+ 'defines': [
+ # A super-hack. prtime.cc (and possibly other sources) call
+ # timegm, which is a non-standard function that's
+ # unavailable when compiling using NaCl newlib. mktime is
+ # essentially a drop-in replacement for timegm, modulo time
+ # zone issues, however NaCL will default to UTC which is the
+ # expected behavior for timegm, so the two should behave
+ # identically.
+ 'timegm=mktime',
+ ],
+ }],
+ ],
+ 'sources': [
+ '<(chromium_root)/base/base64.cc',
+ '<(chromium_root)/base/base64.h',
+ '<(chromium_root)/base/md5.cc',
+ '<(chromium_root)/base/md5.h',
+ '<(chromium_root)/base/strings/string16.cc',
+ '<(chromium_root)/base/strings/string16.h',
+ ],
+ },
+ ],
+}
diff --git a/src/base/mac/foundation_util.mm b/src/base/mac/foundation_util.mm
new file mode 100644
index 0000000..32a88db
--- /dev/null
+++ b/src/base/mac/foundation_util.mm
@@ -0,0 +1,35 @@
+// Copyright 2013 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Chromium's foundation_util.cc pulls a lot of mac related functions into the
+// base package. We don't need them, so strip down all the code.
+
+#include "base/mac/foundation_util.h"
+
+namespace base {
+namespace mac {
+
+void* CFTypeRefToNSObjectAutorelease(CFTypeRef cf_object) {
+ // When GC is on, NSMakeCollectable marks cf_object for GC and autorelease
+ // is a no-op.
+ //
+ // In the traditional GC-less environment, NSMakeCollectable is a no-op,
+ // and cf_object is autoreleased, balancing out the caller's ownership claim.
+ //
+ // NSMakeCollectable returns nil when used on a NULL object.
+ return [NSMakeCollectable(cf_object) autorelease];
+}
+
+} // namespace mac
+} // namespace base
diff --git a/src/base/nacl_stubs/stack_trace_posix.cc b/src/base/nacl_stubs/stack_trace_posix.cc
new file mode 100644
index 0000000..e691cd8
--- /dev/null
+++ b/src/base/nacl_stubs/stack_trace_posix.cc
@@ -0,0 +1,23 @@
+// Copyright (c) 2011 The Chromium Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style license that can be
+// found in the LICENSE file.
+//
+// NaCL newlib is not compatible with the default
+// stack_trace_posix.cc. So we provide this stubbed out version for
+// use when building for NaCL.
+
+#ifndef __native_client__
+#error This file should only be used when compiling for Native Client.
+#endif
+
+#include "base/debug/stack_trace.h"
+
+namespace base {
+namespace debug {
+
+StackTrace::StackTrace() {}
+void StackTrace::PrintBacktrace() const {}
+void StackTrace::OutputToStream(std::ostream* os) const {}
+
+} // namespace debug
+} // namespace base
diff --git a/src/base/process_util.cc b/src/base/process_util.cc
new file mode 100644
index 0000000..2b36d3e
--- /dev/null
+++ b/src/base/process_util.cc
@@ -0,0 +1,29 @@
+// Copyright 2013 Google Inc. All Rights Reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Chromium's process.cc pulls a lot of file related functions into the
+// base package. We don't need them, so strip down all the code.
+
+#include "base/logging.h"
+#include "base/process/process.h"
+
+namespace base {
+
+// Returns the id of the current process.
+ProcessId GetCurrentProcId() {
+ DCHECK(false); // we don't actually expect this to be called.
+ return 0;
+}
+
+} // namespace base
diff --git a/src/build/README.pagespeed b/src/build/README.pagespeed
new file mode 100644
index 0000000..23c847d
--- /dev/null
+++ b/src/build/README.pagespeed
@@ -0,0 +1,13 @@
+The following files in this directory were copied from chromium's repository at
+revision 256281 (https://src.chromium.org/svn/trunk/src/build/?p=256281).
+
+compiler_version.py (with local bugfix decribed at the top)
+filename_rules.gypi
+get_landmines.py
+grit_action.gypi
+gyp_chromium (with minor local modifications described at the top)
+gyp_helper.py
+java.gypi
+landmine_utils.py
+landmines.py
+release.gypi
diff --git a/src/build/all.gyp b/src/build/all.gyp
new file mode 100644
index 0000000..2f18df8
--- /dev/null
+++ b/src/build/all.gyp
@@ -0,0 +1,71 @@
+# Copyright 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'All',
+ 'type': 'none',
+ 'xcode_create_dependents_test_runner': 1,
+ 'dependencies': [
+ 'mod_pagespeed',
+ 'test',
+ 'js_minify',
+ 'pagespeed_automatic',
+ ],},
+ {
+ 'target_name': 'mod_pagespeed',
+ 'type': 'none',
+ 'dependencies': [
+ '../net/instaweb/instaweb.gyp:instaweb_rewriter',
+ '../net/instaweb/instaweb_apr.gyp:*',
+ '../net/instaweb/mod_pagespeed.gyp:mod_pagespeed',
+ 'install.gyp:*',
+ ],
+ 'conditions': [
+ ['use_system_apache_dev==0', {
+ 'dependencies+': [
+ '../net/instaweb/mod_pagespeed.gyp:mod_pagespeed_ap24',
+ ],
+ }],
+ ]},
+ {
+ 'target_name': 'pagespeed_automatic',
+ 'type': 'none',
+ 'dependencies': [
+ '../net/instaweb/test.gyp:pagespeed_automatic_test',
+ '../net/instaweb/instaweb.gyp:automatic_util',
+ ],},
+ {
+ 'target_name': 'test',
+ 'type': 'none',
+ 'xcode_create_dependents_test_runner': 1,
+ 'dependencies': [
+ '../net/instaweb/instaweb.gyp:*',
+ '../net/instaweb/instaweb_core.gyp:*',
+ '../net/instaweb/instaweb_apr.gyp:*',
+ '../net/instaweb/test.gyp:mod_pagespeed_test',
+ '../net/instaweb/test.gyp:mod_pagespeed_speed_test',
+ 'install.gyp:*',
+ ]
+ },
+ {
+ 'target_name': 'js_minify',
+ 'type': 'none',
+ 'dependencies': [
+ '../net/instaweb/instaweb.gyp:js_minify',
+ ],
+ },
+ ],
+}
diff --git a/src/build/android/create_standalone_apk_action.gypi b/src/build/android/create_standalone_apk_action.gypi
new file mode 100644
index 0000000..d17af7c
--- /dev/null
+++ b/src/build/android/create_standalone_apk_action.gypi
@@ -0,0 +1,41 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# combines a directory of shared libraries and an incomplete APK into a
+# standalone APK.
+#
+# To use this, create a gyp action with the following form:
+# {
+# 'action_name': 'some descriptive action name',
+# 'variables': {
+# 'inputs': [ 'input_path1', 'input_path2' ],
+# 'input_apk_path': '<(unsigned_apk_path)',
+# 'output_apk_path': '<(unsigned_standalone_apk_path)',
+# 'libraries_top_dir': '<(libraries_top_dir)',
+# },
+# 'includes': [ 'relative/path/to/create_standalone_apk_action.gypi' ],
+# },
+
+{
+ 'message': 'Creating standalone APK: <(output_apk_path)',
+ 'variables': {
+ 'inputs': [],
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+ '<(input_apk_path)',
+ '>@(inputs)',
+ ],
+ 'outputs': [
+ '<(output_apk_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/create_standalone_apk.py',
+ '--libraries-top-dir=<(libraries_top_dir)',
+ '--input-apk-path=<(input_apk_path)',
+ '--output-apk-path=<(output_apk_path)',
+ ],
+}
diff --git a/src/build/android/dex_action.gypi b/src/build/android/dex_action.gypi
new file mode 100644
index 0000000..7e24d1e
--- /dev/null
+++ b/src/build/android/dex_action.gypi
@@ -0,0 +1,56 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that dexes
+# compiled java files. If proguard_enabled == "true" and CONFIGURATION_NAME ==
+# "Release", then it will dex the proguard_enabled_input_path instead of the
+# normal dex_input_paths/dex_generated_input_paths.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'action_name': 'some name for the action'
+# 'actions': [
+# 'variables': {
+# 'dex_input_paths': [ 'files to dex (when proguard is not used) and add to input paths' ],
+# 'dex_generated_input_dirs': [ 'dirs that contain generated files to dex' ],
+#
+# # For targets that use proguard:
+# 'proguard_enabled': 'true',
+# 'proguard_enabled_input_path': 'path to dex when using proguard',
+# },
+# 'includes': [ 'relative/path/to/dex_action.gypi' ],
+# ],
+# },
+#
+
+{
+ 'message': 'Creating dex file: <(output_path)',
+ 'variables': {
+ 'dex_input_paths': [],
+ 'dex_generated_input_dirs': [],
+ 'proguard_enabled%': 'false',
+ 'proguard_enabled_input_path%': '',
+ 'dex_no_locals%': 0,
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/util/md5_check.py',
+ '<(DEPTH)/build/android/gyp/dex.py',
+ '>@(dex_input_paths)',
+ ],
+ 'outputs': [
+ '<(output_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/dex.py',
+ '--dex-path=<(output_path)',
+ '--android-sdk-tools=<(android_sdk_tools)',
+ '--configuration-name=<(CONFIGURATION_NAME)',
+ '--proguard-enabled=<(proguard_enabled)',
+ '--proguard-enabled-input-path=<(proguard_enabled_input_path)',
+ '--no-locals=<(dex_no_locals)',
+ '>@(dex_input_paths)',
+ '>@(dex_generated_input_dirs)',
+ ]
+}
diff --git a/src/build/android/finalize_apk_action.gypi b/src/build/android/finalize_apk_action.gypi
new file mode 100644
index 0000000..52aaa93
--- /dev/null
+++ b/src/build/android/finalize_apk_action.gypi
@@ -0,0 +1,40 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide an action that
+# signs and zipaligns an APK.
+#
+# To use this, create a gyp action with the following form:
+# {
+# 'action_name': 'some descriptive action name',
+# 'variables': {
+# 'input_apk_path': 'relative/path/to/input.apk',
+# 'output_apk_path': 'relative/path/to/output.apk',
+# },
+# 'includes': [ '../../build/android/finalize_apk.gypi' ],
+# },
+#
+
+{
+ 'message': 'Signing/aligning <(_target_name) APK: <(input_apk_path)',
+ 'variables': {
+ 'keystore_path%': '<(DEPTH)/build/android/ant/chromium-debug.keystore',
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/finalize_apk.py',
+ '<(keystore_path)',
+ '<(input_apk_path)',
+ ],
+ 'outputs': [
+ '<(output_apk_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/finalize_apk.py',
+ '--android-sdk-root=<(android_sdk_root)',
+ '--unsigned-apk-path=<(input_apk_path)',
+ '--final-apk-path=<(output_apk_path)',
+ '--keystore-path=<(keystore_path)',
+ ],
+}
diff --git a/src/build/android/instr_action.gypi b/src/build/android/instr_action.gypi
new file mode 100644
index 0000000..fa6d062
--- /dev/null
+++ b/src/build/android/instr_action.gypi
@@ -0,0 +1,53 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule that
+# instruments either java class files, or jars.
+
+{
+ 'variables': {
+ 'instr_type%': 'jar',
+ 'input_path%': '',
+ 'output_path%': '',
+ 'stamp_path%': '',
+ 'extra_instr_args': [
+ '--coverage-file=<(_target_name).em',
+ '--sources-file=<(_target_name)_sources.txt',
+ ],
+ 'emma_jar': '<(android_sdk_root)/tools/lib/emma.jar',
+ 'conditions': [
+ ['emma_instrument != 0', {
+ 'extra_instr_args': [
+ '--sources=<(java_in_dir)/src >(additional_src_dirs) >(generated_src_dirs)',
+ '--src-root=<(DEPTH)',
+ '--emma-jar=<(emma_jar)',
+ '--filter-string=<(emma_filter)',
+ ],
+ 'conditions': [
+ ['instr_type == "jar"', {
+ 'instr_action': 'instrument_jar',
+ }, {
+ 'instr_action': 'instrument_classes',
+ }]
+ ],
+ }, {
+ 'instr_action': 'copy',
+ 'extra_instr_args': [],
+ }]
+ ]
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/emma_instr.py',
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/pylib/utils/command_option_parser.py',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/emma_instr.py',
+ '<(instr_action)',
+ '--input-path=<(input_path)',
+ '--output-path=<(output_path)',
+ '--stamp=<(stamp_path)',
+ '<@(extra_instr_args)',
+ ]
+}
diff --git a/src/build/android/lint_action.gypi b/src/build/android/lint_action.gypi
new file mode 100644
index 0000000..dd0bbd2
--- /dev/null
+++ b/src/build/android/lint_action.gypi
@@ -0,0 +1,39 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to provide a rule to
+# run lint on java/class files.
+
+{
+ 'action_name': 'lint_<(_target_name)',
+ 'message': 'Linting <(_target_name)',
+ 'variables': {
+ 'conditions': [
+ ['chromium_code != 0 and android_lint != 0 and never_lint == 0', {
+ 'is_enabled': '--enable',
+ }, {
+ 'is_enabled': '',
+ }]
+ ]
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/lint.py',
+ '<(DEPTH)/build/android/lint/suppressions.xml',
+ '<(DEPTH)/build/android/AndroidManifest.xml',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/lint.py',
+ '--lint-path=<(android_sdk_root)/tools/lint',
+ '--config-path=<(DEPTH)/build/android/lint/suppressions.xml',
+ '--processed-config-path=<(config_path)',
+ '--manifest-path=<(DEPTH)/build/android/AndroidManifest.xml',
+ '--result-path=<(result_path)',
+ '--product-dir=<(PRODUCT_DIR)',
+ '--src-dirs=>(src_dirs)',
+ '--classes-dir=<(classes_dir)',
+ '--stamp=<(stamp_path)',
+ '<(is_enabled)',
+ ],
+}
diff --git a/src/build/build_util.gyp b/src/build/build_util.gyp
new file mode 100644
index 0000000..59bbf5b
--- /dev/null
+++ b/src/build/build_util.gyp
@@ -0,0 +1,86 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'version_py_path': 'version.py',
+ 'instaweb_path': '<(DEPTH)/net/instaweb',
+ 'version_path': '<(instaweb_path)/public/VERSION',
+ 'version_h_in_path': '<(instaweb_path)/public/version.h.in',
+ 'public_path' : 'net/instaweb/public',
+ 'version_h_path': '<(SHARED_INTERMEDIATE_DIR)/<(public_path)/version.h',
+ 'lastchange_out_path': '<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
+ },
+ 'targets': [
+ {
+ 'target_name': 'lastchange',
+ 'type': 'none',
+ 'variables': {
+ 'default_lastchange_path': '../LASTCHANGE.in',
+ },
+ 'actions': [
+ {
+ 'action_name': 'lastchange',
+ 'inputs': [
+ # Note: <(default_lastchange_path) is optional,
+ # so it doesn't show up in inputs.
+ '<(DEPTH)/build/lastchange.sh',
+ ],
+ 'outputs': [
+ '<(lastchange_out_path).always',
+ '<(lastchange_out_path)',
+ ],
+ 'action': [
+ '/bin/sh', '<@(_inputs)',
+ '<(DEPTH)',
+ '-o', '<(lastchange_out_path)',
+ '-d', '<(default_lastchange_path)',
+ ],
+ 'message': 'Extracting last change to <(lastchange_out_path)',
+ 'process_outputs_as_sources': '1',
+ },
+ ],
+ },
+ {
+ 'target_name': 'mod_pagespeed_version_header',
+ 'type': 'none',
+ 'dependencies': [
+ 'lastchange',
+ ],
+ 'actions': [
+ {
+ 'action_name': 'version_header',
+ 'inputs': [
+ '<(version_path)',
+ '<(lastchange_out_path)',
+ '<(version_h_in_path)',
+ ],
+ 'outputs': [
+ '<(version_h_path)',
+ ],
+ 'action': [
+ 'python',
+ '<(version_py_path)',
+ '-f', '<(version_path)',
+ '-f', '<(lastchange_out_path)',
+ '<(version_h_in_path)',
+ '<@(_outputs)',
+ ],
+ 'message': 'Generating version header file: <@(_outputs)',
+ },
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': [
+ '<(SHARED_INTERMEDIATE_DIR)',
+ ],
+ },
+ },
+ ]
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/build/clang_version.py b/src/build/clang_version.py
new file mode 100755
index 0000000..e67c1fc
--- /dev/null
+++ b/src/build/clang_version.py
@@ -0,0 +1,57 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+
+"""Compiler version checking tool for clang.
+
+(Based on corresponding tool for gcc in Chromium build system).
+
+Prints X*100 + Y if $CXX is pointing to clang X.Y.*. Prints 0 otherwise.
+Note that this output convention is different from compiler_version.py's. This
+also never returns a failing status, since we want to run this even on systems
+without clang, and gyp will complain on a non-successful status.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+
+def GetVersion(compiler):
+ try:
+ compiler = compiler + " --version"
+ pipe = subprocess.Popen(compiler, shell=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ output, error = pipe.communicate()
+ if pipe.returncode:
+ raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+ result = re.search(r"clang version (\d+)\.?(\d+)?", output)
+ if result is None:
+ return "0"
+ minor_version = result.group(2)
+ if minor_version is None:
+ minor_version = "0"
+ return str(int(result.group(1)) * 100 + int(minor_version))
+ except Exception, e:
+ if error:
+ sys.stderr.write(error)
+ print >> sys.stderr, "clang_version.py failed to execute:", compiler
+ print >> sys.stderr, e
+ return "0"
+
+
+def main():
+ # Check if CXX environment variable exists, and if it does use that compiler.
+ cxx = os.getenv("CXX", None)
+ if cxx:
+ print GetVersion(cxx)
+ else:
+ print "0"
+
+if __name__ == "__main__":
+ main()
+ sys.exit(0)
diff --git a/src/build/common.gypi b/src/build/common.gypi
new file mode 100644
index 0000000..4aef4f2
--- /dev/null
+++ b/src/build/common.gypi
@@ -0,0 +1,182 @@
+# Copyright 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+{
+ 'variables': {
+ # This should normally be passed in by gclient's hooks
+ 'chromium_revision%': 256281,
+
+ # Make sure we link statically so everything gets linked into a
+ # single shared object.
+ 'library': 'static_library',
+
+ # We're building a shared library, so everything needs to be built
+ # with Position-Independent Code.
+ 'linux_fpic': 1,
+
+ 'instaweb_src_root': 'net/instaweb',
+
+ # Define the overridable use_system_libs variable in its own
+ # nested block, so it's available for use in the conditions block
+ # below.
+ 'variables': {
+ 'use_system_libs%': 0,
+ },
+
+ # Which versions development is usually done with. These version will
+ # get -Werror
+ 'gcc_devel_version%': '46',
+ 'gcc_devel_version2%': '48',
+
+ # We need inter-process mutexes to support POSIX shared memory, and they're
+ # unfortunately not supported on some common systems.
+ 'support_posix_shared_mem%': 0,
+
+ 'conditions': [
+ # TODO(morlovich): AIX, Solaris, FreeBSD10?
+ ['OS == "linux"', {
+ 'support_posix_shared_mem': 1
+ }],
+ ['use_system_libs==1', {
+ 'use_system_apache_dev': 1,
+ 'use_system_icu': 1,
+ 'use_system_libjpeg': 1,
+ 'use_system_libpng': 1,
+ 'use_system_opencv': 1,
+ 'use_system_openssl': 1,
+ 'use_system_zlib': 1,
+ },{
+ 'use_system_apache_dev%': 0,
+ }],
+ ],
+ },
+ 'includes': [
+ # Import base Chromium build system, and pagespeed customizations of it.
+ '../third_party/chromium/src/build/common.gypi',
+ 'pagespeed_overrides.gypi',
+ ],
+ 'target_defaults': {
+ 'conditions': [
+ ['support_posix_shared_mem == 1', {
+ 'defines': [ 'PAGESPEED_SUPPORT_POSIX_SHARED_MEM', ],
+ }],
+ ['OS == "linux"', {
+ # Disable -Werror when not using the version of gcc that development
+ # is generally done with, to avoid breaking things for users with
+ # something older or newer (which produces different warnings).
+ 'conditions': [
+ ['<(gcc_version) != <(gcc_devel_version) and '
+ '<(gcc_version) != <(gcc_devel_version2)', {
+ 'cflags!': ['-Werror']
+ }],
+ # Newer Chromium common.gypi adds -Wno-unused-but-set-variable
+ # (unconditionally). This is wrong for gcc < 4.6, since the flag
+ # was added in 4.6, but very much needed for >= 4.6 since
+ # otherwise ICU headers don't build with -Werror.
+ #
+ # At the moment, we need to support both building with gcc < 4.6
+ # and building with old Chromium --- so we remove the flag for
+ # < 4.6 gcc, and add it for newer versions.
+ # TODO(morlovich): Upstream, but how high?
+ ['<(gcc_version) < 46', {
+ 'cflags!': ['-Wno-unused-but-set-variable']
+ }, {
+ 'cflags+': ['-Wno-unused-but-set-variable']
+ }],
+ # Similarly, there is no -Wno-unused-result for gcc < 4.5
+ ['<(gcc_version) < 45', {
+ 'cflags!': ['-Wno-unused-result']
+ }],
+ ['<(gcc_version) == 46', {
+ 'cflags+': ['-Wno-sign-compare']
+ }],
+ ],
+ 'cflags': [
+ # Our dependency on OpenCV need us to turn on exceptions.
+ '-fexceptions',
+ # Now we are using exceptions. -fno-asynchronous-unwind-tables is
+ # set in libpagespeed's common.gypi. Now enable it.
+ '-fasynchronous-unwind-tables',
+ # We'd like to add '-Wtype-limits', but this does not work on
+ # earlier versions of g++ on supported operating systems.
+ ],
+ 'cflags_cc!': [
+ # Newer Chromium build adds -Wsign-compare which we have some
+ # difficulty with. Remove it for now.
+ '-Wsign-compare',
+ '-fno-rtti', # Same reason as using -frtti below.
+ ],
+ 'cflags_cc': [
+ '-frtti', # Hardy's g++ 4.2 <trl/function> uses typeid
+ ],
+ 'defines!': [
+ # testing/gtest.gyp defines GTEST_HAS_RTTI=0 for itself and all deps.
+ # This breaks when we turn rtti on, so must be removed.
+ 'GTEST_HAS_RTTI=0',
+ # third_party/protobuf/protobuf.gyp defines GOOGLE_PROTOBUF_NO_RTTI
+ # for itself and all deps. I assume this is just a ticking time bomb
+ # like GTEST_HAS_RTTI=0 was, so remove it as well.
+ 'GOOGLE_PROTOBUF_NO_RTTI',
+ ],
+ 'defines': [
+ 'GTEST_HAS_RTTI=1', # gtest requires this set to indicate RTTI on.
+ ],
+ # Disable -z,defs in linker.
+ # This causes mod_pagespeed.so to fail because it doesn't link apache
+ # libraries.
+ 'ldflags!': [
+ '-Wl,-z,defs',
+ ],
+ }],
+ ['OS == "mac"', {
+ 'xcode_settings':{
+ 'GCC_ENABLE_CPP_EXCEPTIONS': 'YES', # -fexceptions
+ 'GCC_ENABLE_CPP_RTTI': 'YES', # -frtti
+
+ # The Google CSS parser escapes from functions without
+ # returning anything. Only with flow analysis that is,
+ # evidently, beyond the scope of the g++ configuration in
+ # MacOS, do we see those paths cannot be reached.
+ 'OTHER_CFLAGS': ['-funsigned-char', '-Wno-error'],
+ },
+ }],
+ ],
+
+ 'defines': [ 'CHROMIUM_REVISION=<(chromium_revision)',
+ # See https://gcc.gnu.org/onlinedocs/libstdc++/manual/using_dual_abi.html
+ '_GLIBCXX_USE_CXX11_ABI=0',
+ '__STDC_LIMIT_MACROS',],
+
+ # We don't want -std=gnu++0x (enabled by some versions of libpagespeed)
+ # since it can cause binary compatibility problems; see issue 453.
+ 'cflags!': [
+ '-std=gnu++0x'
+ ],
+
+ # Permit building us with coverage information
+ 'configurations': {
+ 'Debug_Coverage': {
+ 'inherit_from': ['Debug'],
+ 'cflags': [
+ '-ftest-coverage',
+ '-fprofile-arcs',
+ ],
+ 'ldflags': [
+ # takes care of -lgcov for us, but can be in a build configuration
+ '-ftest-coverage -fprofile-arcs',
+ ],
+ },
+ },
+ },
+}
diff --git a/src/build/compiler_version.py b/src/build/compiler_version.py
new file mode 100755
index 0000000..cd02bad
--- /dev/null
+++ b/src/build/compiler_version.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This version contains a bugfix for the compiler returning a single digit
+# version number, as is the case for gcc 5.
+
+"""Compiler version checking tool for gcc
+
+Print gcc version as XY if you are running gcc X.Y.*.
+This is used to tweak build flags for gcc 4.4.
+"""
+
+import os
+import re
+import subprocess
+import sys
+
+def GetVersion(compiler):
+ try:
+ # Note that compiler could be something tricky like "distcc g++".
+ compiler = compiler + " -dumpversion"
+ pipe = subprocess.Popen(compiler, shell=True,
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ gcc_output, gcc_error = pipe.communicate()
+ if pipe.returncode:
+ raise subprocess.CalledProcessError(pipe.returncode, compiler)
+
+ result = re.match(r"(\d+)\.?(\d+)?", gcc_output)
+ minor_version = result.group(2)
+ if minor_version is None:
+ minor_version = "0"
+ return result.group(1) + minor_version
+ except Exception, e:
+ if gcc_error:
+ sys.stderr.write(gcc_error)
+ print >> sys.stderr, "compiler_version.py failed to execute:", compiler
+ print >> sys.stderr, e
+ return ""
+
+def main():
+ # Check if CXX environment variable exists and
+ # if it does use that compiler.
+ cxx = os.getenv("CXX", None)
+ if cxx:
+ cxxversion = GetVersion(cxx)
+ if cxxversion != "":
+ print cxxversion
+ return 0
+ else:
+ # Otherwise we check the g++ version.
+ gccversion = GetVersion("g++")
+ if gccversion != "":
+ print gccversion
+ return 0
+
+ return 1
+
+if __name__ == "__main__":
+ sys.exit(main())
diff --git a/src/build/dir_exists.py b/src/build/dir_exists.py
new file mode 100755
index 0000000..0a89bc8
--- /dev/null
+++ b/src/build/dir_exists.py
@@ -0,0 +1,15 @@
+#!/usr/bin/env python
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+"""Writes True if the argument is a directory."""
+
+import os.path
+import sys
+
+def main():
+ sys.stdout.write(str(os.path.isdir(sys.argv[1])))
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/build/features_override.gypi b/src/build/features_override.gypi
new file mode 100644
index 0000000..a655733
--- /dev/null
+++ b/src/build/features_override.gypi
@@ -0,0 +1,18 @@
+# Copyright 2009 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Chromium expects this file to be here, but for our (Page Speed) purposes, it
+# doesn't need to actually do anything.
+
+{}
diff --git a/src/build/filename_rules.gypi b/src/build/filename_rules.gypi
new file mode 100644
index 0000000..9bb76c5
--- /dev/null
+++ b/src/build/filename_rules.gypi
@@ -0,0 +1,125 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This gypi file defines the patterns used for determining whether a
+# file is excluded from the build on a given platform. It is
+# included by common.gypi for chromium_code.
+
+{
+ 'target_conditions': [
+ ['OS!="win" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_win(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)win/'],
+ ['exclude', '(^|/)win_[^/]*\\.(h|cc)$'] ],
+ }],
+ ['OS!="mac" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_(cocoa|mac)(_unittest)?\\.(h|cc|mm?)$'],
+ ['exclude', '(^|/)(cocoa|mac)/'] ],
+ }],
+ ['OS!="ios" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_ios(_unittest)?\\.(h|cc|mm?)$'],
+ ['exclude', '(^|/)ios/'] ],
+ }],
+ ['(OS!="mac" and OS!="ios") or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '\\.mm?$' ] ],
+ }],
+ # Do not exclude the linux files on *BSD since most of them can be
+ # shared at this point.
+ # In case a file is not needed, it is going to be excluded later on.
+ # TODO(evan): the above is not correct; we shouldn't build _linux
+ # files on non-linux.
+ ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_linux(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)linux/'],
+ ],
+ }],
+ ['OS!="android" or _toolset=="host"', {
+ 'sources/': [
+ ['exclude', '_android(_unittest)?\\.cc$'],
+ ['exclude', '(^|/)android/'],
+ ],
+ }],
+ ['OS=="win" and >(nacl_untrusted_build)==0', {
+ 'sources/': [
+ ['exclude', '_posix(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)posix/'],
+ ],
+ }],
+ ['<(chromeos)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_chromeos(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)chromeos/'],
+ ],
+ }],
+ ['>(nacl_untrusted_build)==0', {
+ 'sources/': [
+ ['exclude', '_nacl(_unittest)?\\.(h|cc)$'],
+ ],
+ }],
+ ['OS!="linux" and OS!="openbsd" and OS!="freebsd" or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_xdg(_unittest)?\\.(h|cc)$'],
+ ],
+ }],
+ ['<(use_x11)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_(x|x11)(_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)x11_[^/]*\\.(h|cc)$'],
+ ['exclude', '(^|/)x11/'],
+ ['exclude', '(^|/)x/'],
+ ],
+ }],
+ ['<(toolkit_uses_gtk)!=1 or >(nacl_untrusted_build)==1', {
+ 'sources/': [
+ ['exclude', '_gtk(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)gtk/'],
+ ['exclude', '(^|/)gtk_[^/]*\\.(h|cc)$'],
+ ],
+ }],
+ ['<(toolkit_views)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_views\\.(h|cc)$'] ]
+ }],
+ ['<(use_aura)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_aura(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)aura/'],
+ ]
+ }],
+ ['<(use_aura)==0 or <(use_x11)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_aurax11(_browsertest|_unittest)?\\.(h|cc)$'] ]
+ }],
+ ['<(use_aura)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_aurawin\\.(h|cc)$'] ]
+ }],
+ ['<(use_aura)==0 or OS!="linux" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_auralinux\\.(h|cc)$'] ]
+ }],
+ ['<(use_ash)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_ash(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)ash/'],
+ ]
+ }],
+ ['<(use_ash)==0 or OS!="win" or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_ashwin\\.(h|cc)$'] ]
+ }],
+ ['<(use_ozone)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_ozone(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)ozone/'],
+ ]
+ }],
+ ['<(use_ozone_evdev)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_evdev(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)evdev/'],
+ ]
+ }],
+ ['<(ozone_platform_dri)==0 or >(nacl_untrusted_build)==1', {
+ 'sources/': [ ['exclude', '_dri(_browsertest|_unittest)?\\.(h|cc)$'],
+ ['exclude', '(^|/)dri/'],
+ ]
+ }],
+ ['<(use_pango)==0', {
+ 'sources/': [ ['exclude', '(^|_)pango(_util|_browsertest|_unittest)?\\.(h|cc)$'], ],
+ }],
+ ]
+}
diff --git a/src/build/get_landmines.py b/src/build/get_landmines.py
new file mode 100755
index 0000000..ab71608
--- /dev/null
+++ b/src/build/get_landmines.py
@@ -0,0 +1,71 @@
+#!/usr/bin/env python
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This file emits the list of reasons why a particular build needs to be clobbered
+(or a list of 'landmines').
+"""
+
+import optparse
+import sys
+
+import landmine_utils
+
+
+builder = landmine_utils.builder
+distributor = landmine_utils.distributor
+gyp_defines = landmine_utils.gyp_defines
+gyp_msvs_version = landmine_utils.gyp_msvs_version
+platform = landmine_utils.platform
+
+
+def print_landmines(target):
+ """
+ ALL LANDMINES ARE EMITTED FROM HERE.
+ target can be one of {'Release', 'Debug', 'Debug_x64', 'Release_x64'}.
+ """
+ if (distributor() == 'goma' and platform() == 'win32' and
+ builder() == 'ninja'):
+ print 'Need to clobber winja goma due to backend cwd cache fix.'
+ if platform() == 'android':
+ print 'Clobber: Autogen java file needs to be removed (issue 159173002)'
+ if platform() == 'win' and builder() == 'ninja':
+ print 'Compile on cc_unittests fails due to symbols removed in r185063.'
+ if platform() == 'linux' and builder() == 'ninja':
+ print 'Builders switching from make to ninja will clobber on this.'
+ if platform() == 'mac':
+ print 'Switching from bundle to unbundled dylib (issue 14743002).'
+ if platform() in ('win', 'mac'):
+ print ('Improper dependency for create_nmf.py broke in r240802, '
+ 'fixed in r240860.')
+ if (platform() == 'win' and builder() == 'ninja' and
+ gyp_msvs_version() == '2012' and
+ gyp_defines().get('target_arch') == 'x64' and
+ gyp_defines().get('dcheck_always_on') == '1'):
+ print "Switched win x64 trybots from VS2010 to VS2012."
+ if (platform() == 'win' and builder() == 'ninja' and
+ gyp_msvs_version().startswith('2013')):
+ print "Switched win from VS2010 to VS2013."
+ print 'Need to clobber everything due to an IDL change in r154579 (blink)'
+ if (platform() != 'ios'):
+ print 'Clobber to get rid of obselete test plugin after r248358'
+
+
+def main():
+ parser = optparse.OptionParser()
+ parser.add_option('-t', '--target',
+ help=='Target for which the landmines have to be emitted')
+
+ options, args = parser.parse_args()
+
+ if args:
+ parser.error('Unknown arguments %s' % args)
+
+ print_landmines(options.target)
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/build/grit_action.gypi b/src/build/grit_action.gypi
new file mode 100644
index 0000000..fef961f
--- /dev/null
+++ b/src/build/grit_action.gypi
@@ -0,0 +1,42 @@
+# Copyright (c) 2011 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into an action to invoke grit in a
+# consistent manner. To use this the following variables need to be
+# defined:
+# grit_grd_file: string: grd file path
+# grit_out_dir: string: the output directory path
+
+# It would be really nice to do this with a rule instead of actions, but it
+# would need to determine inputs and outputs via grit_info on a per-file
+# basis. GYP rules don’t currently support that. They could be extended to
+# do this, but then every generator would need to be updated to handle this.
+
+{
+ 'variables': {
+ 'grit_cmd': ['python', '<(DEPTH)/tools/grit/grit.py'],
+ 'grit_resource_ids%': '<(DEPTH)/tools/gritsettings/resource_ids',
+ # This makes it possible to add more defines in specific targets,
+ # instead of build/common.gypi .
+ 'grit_additional_defines%': [],
+ 'grit_rc_header_format%': [],
+ },
+ 'inputs': [
+ '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+ '--inputs <(grit_grd_file) -f "<(grit_resource_ids)")',
+ ],
+ 'outputs': [
+ '<!@pymod_do_main(grit_info <@(grit_defines) <@(grit_additional_defines) '
+ '--outputs \'<(grit_out_dir)\' '
+ '<(grit_grd_file) -f "<(grit_resource_ids)")',
+ ],
+ 'action': ['<@(grit_cmd)',
+ '-i', '<(grit_grd_file)', 'build',
+ '-f', '<(grit_resource_ids)',
+ '-o', '<(grit_out_dir)',
+ '<@(grit_defines)',
+ '<@(grit_additional_defines)',
+ '<@(grit_rc_header_format)'],
+ 'message': 'Generating resources from <(grit_grd_file)',
+}
diff --git a/src/build/gyp_chromium b/src/build/gyp_chromium
new file mode 100755
index 0000000..8674013
--- /dev/null
+++ b/src/build/gyp_chromium
@@ -0,0 +1,572 @@
+#!/usr/bin/env python
+
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This script is wrapper for Chromium that adds some support for how GYP
+# is invoked by Chromium beyond what can be done in the gclient hooks.
+
+# This was copied from the chromium repository at revision 256281. The only
+# change was adding back support and making default Makefile generation, instead
+# of ninja support. This was removed as discussed in crbug.com/348686.
+
+import glob
+import gyp_helper
+import json
+import os
+import pipes
+import shlex
+import shutil
+import subprocess
+import string
+import sys
+import tempfile
+
+script_dir = os.path.dirname(os.path.realpath(__file__))
+chrome_src = os.path.abspath(os.path.join(script_dir, os.pardir))
+
+sys.path.insert(0, os.path.join(chrome_src, 'tools', 'gyp', 'pylib'))
+import gyp
+
+# Assume this file is in a one-level-deep subdirectory of the source root.
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
+
+# Add paths so that pymod_do_main(...) can import files.
+sys.path.insert(1, os.path.join(chrome_src, 'tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'generate_shim_headers'))
+sys.path.insert(1, os.path.join(chrome_src, 'tools', 'grit'))
+sys.path.insert(1, os.path.join(chrome_src, 'chrome', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'native_client_sdk', 'src',
+ 'build_tools'))
+sys.path.insert(1, os.path.join(chrome_src, 'remoting', 'tools', 'build'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'liblouis'))
+sys.path.insert(1, os.path.join(chrome_src, 'third_party', 'WebKit',
+ 'Source', 'build', 'scripts'))
+
+# On Windows, Psyco shortens warm runs of build/gyp_chromium by about
+# 20 seconds on a z600 machine with 12 GB of RAM, from 90 down to 70
+# seconds. Conversely, memory usage of build/gyp_chromium with Psyco
+# maxes out at about 158 MB vs. 132 MB without it.
+#
+# Psyco uses native libraries, so we need to load a different
+# installation depending on which OS we are running under. It has not
+# been tested whether using Psyco on our Mac and Linux builds is worth
+# it (the GYP running time is a lot shorter, so the JIT startup cost
+# may not be worth it).
+if sys.platform == 'win32':
+ try:
+ sys.path.insert(0, os.path.join(chrome_src, 'third_party', 'psyco_win32'))
+ import psyco
+ except:
+ psyco = None
+else:
+ psyco = None
+
+
+def GetSupplementalFiles():
+ """Returns a list of the supplemental files that are included in all GYP
+ sources."""
+ return glob.glob(os.path.join(chrome_src, '*', 'supplement.gypi'))
+
+
+def FormatKeyForGN(key):
+ """Returns the given GYP key reformatted for GN.
+
+ GYP dictionary keys can be almost anything, but in GN they are identifiers
+ and must follow the same rules. This reformats such keys to be valid GN
+ identifiers."""
+ return ''.join([c if c in string.ascii_letters else '_' for c in key])
+
+
+def EscapeStringForGN(s):
+ """Converts a string to a GN string literal."""
+ for old, new in [('\\', '\\\\'), ('$', '\\$'), ('"', '\\"')]:
+ s = s.replace(old, new)
+ return '"' + s + '"'
+
+
+def ProcessGypDefinesItems(items):
+ """Converts a list of strings to a list of key-value pairs."""
+ result = []
+ for item in items:
+ tokens = item.split('=', 1)
+ # Some GYP variables have hyphens, which we don't support.
+ key = FormatKeyForGN(tokens[0])
+ if len(tokens) == 2:
+ result += [(key, tokens[1])]
+ else:
+ # No value supplied, treat it as a boolean and set it. Note that we
+ # use the string '1' here so we have a consistent definition whether
+ # you do 'foo=1' or 'foo'.
+ result += [(key, '1')]
+ return result
+
+
+def GetGypVarsForGN(supplemental_files):
+ """Returns a dictionary of all GYP vars that we will be passing to GN."""
+ # Find the .gyp directory in the user's home directory.
+ home_dot_gyp = os.environ.get('GYP_CONFIG_DIR', None)
+ if home_dot_gyp:
+ home_dot_gyp = os.path.expanduser(home_dot_gyp)
+ if not home_dot_gyp:
+ home_vars = ['HOME']
+ if sys.platform in ('cygwin', 'win32'):
+ home_vars.append('USERPROFILE')
+ for home_var in home_vars:
+ home = os.getenv(home_var)
+ if home != None:
+ home_dot_gyp = os.path.join(home, '.gyp')
+ if not os.path.exists(home_dot_gyp):
+ home_dot_gyp = None
+ else:
+ break
+
+ if home_dot_gyp:
+ include_gypi = os.path.join(home_dot_gyp, "include.gypi")
+ if os.path.exists(include_gypi):
+ supplemental_files += [include_gypi]
+
+ # GYP defines from the supplemental.gypi files.
+ supp_items = []
+ for supplement in supplemental_files:
+ with open(supplement, 'r') as f:
+ try:
+ file_data = eval(f.read(), {'__builtins__': None}, None)
+ except SyntaxError, e:
+ e.filename = os.path.abspath(supplement)
+ raise
+ variables = file_data.get('variables', [])
+ for v in variables:
+ supp_items += [(FormatKeyForGN(v), str(variables[v]))]
+
+ # GYP defines from the environment.
+ env_items = ProcessGypDefinesItems(
+ shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+ # GYP defines from the command line. We can't use optparse since we want
+ # to ignore all arguments other than "-D".
+ cmdline_input_items = []
+ for i in range(len(sys.argv))[1:]:
+ if sys.argv[i].startswith('-D'):
+ if sys.argv[i] == '-D' and i + 1 < len(sys.argv):
+ cmdline_input_items += [sys.argv[i + 1]]
+ elif len(sys.argv[i]) > 2:
+ cmdline_input_items += [sys.argv[i][2:]]
+ cmdline_items = ProcessGypDefinesItems(cmdline_input_items)
+
+ vars_dict = dict(supp_items + env_items + cmdline_items)
+ # It's not possible to set a default value for cpu_arch in GN, so do it here
+ # for now (http://crbug.com/344767).
+ if vars_dict.get('OS') == 'android' and not 'target_arch' in vars_dict:
+ vars_dict['target_arch'] = 'arm'
+ return vars_dict
+
+
+def GetOutputDirectory():
+ """Returns the output directory that GYP will use."""
+ # GYP generator flags from the command line. We can't use optparse since we
+ # want to ignore all arguments other than "-G".
+ needle = '-Goutput_dir='
+ cmdline_input_items = []
+ for item in sys.argv[1:]:
+ if item.startswith(needle):
+ return item[len(needle):]
+
+ env_items = shlex.split(os.environ.get('GYP_GENERATOR_FLAGS', ''))
+ needle = 'output_dir='
+ for item in env_items:
+ if item.startswith(needle):
+ return item[len(needle):]
+
+ return "out"
+
+
+def GetArgsStringForGN(vars_dict):
+ """Returns the args to pass to GN.
+ Based on a subset of the GYP variables that have been rewritten a bit."""
+ gn_args = ''
+
+ # Note: These are the additional flags passed to various builds by builders
+ # on the main waterfall. We'll probably need to add these at some point:
+ # mac_strip_release=1 http://crbug.com/330301
+ # linux_dump_symbols=0 http://crbug.com/330300
+ # host_os=linux Probably can skip, GN knows the host OS.
+ # order_text_section=<path> http://crbug.com/330299
+ # chromium_win_pch=0 http://crbug.com/297678
+ # chromium_ios_signing=0 http://crbug.com/330302
+ # use_allocator=tcmalloc http://crbug.com/330303, 345554
+ # release_extra_flags=... http://crbug.com/330305
+
+ # These tuples of (key, value, gn_arg_string) use the gn_arg_string for
+ # gn when the key is set to the given value in the GYP arguments.
+ remap_cases = [
+ ('android_webview_build', '1', 'is_android_webview_build=true'),
+ ('branding', 'Chrome', 'is_chrome_branded=true'),
+ ('build_for_tool', 'drmemory', 'disable_iterator_debugging=true'),
+ ('build_for_tool', 'tsan', 'disable_iterator_debugging=true'),
+ ('buildtype', 'Official', 'is_official_build=true'),
+ ('component', 'shared_library', 'is_component_build=true'),
+ ('clang', '1', 'is_clang=true'),
+ ('clang_use_chrome_plugins', '0', 'clang_use_chrome_plugins=false'),
+ ('disable_glibcxx_debug', '1', 'disable_iterator_debugging=true'),
+ ('enable_mdns', '0', 'enable_mdns=false'),
+ ('enable_mdns', '1', 'enable_mdns=true'),
+ ('enable_plugins', '0', 'enable_plugins=false'),
+ ('enable_plugins', '1', 'enable_plugins=true'),
+ ('target_arch', 'ia32', 'cpu_arch="x86"'),
+ ('target_arch', 'x64', 'cpu_arch="x64" force_win64=true'),
+ ('target_arch', 'arm', 'cpu_arch="arm"'),
+ ('target_arch', 'mipsel', 'cpu_arch="mipsel"'),
+ ('fastbuild', '0', 'symbol_level=2'),
+ ('fastbuild', '1', 'symbol_level=1'),
+ ('fastbuild', '2', 'symbol_level=0'),
+ ('OS', 'ios', 'os="ios"'),
+ ('OS', 'android', 'os="android"'),
+ ('chromeos', '1', 'os="chromeos"'),
+ ('use_aura', '1', 'use_aura=true'),
+ ('use_goma', '1', 'use_goma=true'),
+ ('use_openssl', '0', 'use_openssl=false'),
+ ('use_openssl', '1', 'use_openssl=true'),
+ ('asan', '1', 'is_asan=true'),
+ ('lsan', '1', 'is_lsan=true'),
+ ('msan', '1', 'is_msan=true'),
+ ('tsan', '1', 'is_tsan=true'),
+ ]
+ for i in remap_cases:
+ if i[0] in vars_dict and vars_dict[i[0]] == i[1]:
+ gn_args += ' ' + i[2]
+
+ # These string arguments get passed directly as GN strings.
+ for v in ['android_src', 'arm_float_abi', 'ios_deployment_target',
+ 'ios_sdk_path', 'windows_sdk_path']:
+ if v in vars_dict:
+ gn_args += ' ' + v + '=' + EscapeStringForGN(vars_dict[v])
+
+ # gomadir is renamed goma_dir in the GN build.
+ if 'gomadir' in vars_dict:
+ gn_args += ' goma_dir=%s' % EscapeStringForGN(vars_dict['gomadir'])
+
+ # Set the "use_ios_simulator" flag if the ios_sdk_path is set.
+ if 'ios_sdk_path' in vars_dict:
+ if os.path.basename(vars_dict['ios_sdk_path']).lower().startswith(
+ 'iphonesimulator'):
+ gn_args += ' use_ios_simulator=true'
+ else:
+ gn_args += ' use_ios_simulator=false'
+
+ # These arguments get passed directly as integers (avoiding the quoting and
+ # escaping of the string ones above).
+ for v in ['arm_version']:
+ if v in vars_dict:
+ gn_args += ' %s=%s' % (v, vars_dict[v])
+
+ # Some other flags come from GYP environment variables.
+ gyp_msvs_version = os.environ.get('GYP_MSVS_VERSION', '')
+ if gyp_msvs_version:
+ gn_args += ' visual_studio_version=' + EscapeStringForGN(gyp_msvs_version)
+ gyp_msvs_override_path = os.environ.get('GYP_MSVS_OVERRIDE_PATH', '')
+ if gyp_msvs_override_path:
+ gn_args += ' visual_studio_path=' + \
+ EscapeStringForGN(gyp_msvs_override_path)
+
+ # Set the GYP flag so BUILD files know they're being invoked in GYP mode.
+ gn_args += ' is_gyp=true'
+
+ gyp_outdir = GetOutputDirectory()
+ gn_args += ' gyp_output_dir=\"%s\"' % gyp_outdir
+
+ return gn_args.strip()
+
+
+def additional_include_files(supplemental_files, args=[]):
+ """
+ Returns a list of additional (.gypi) files to include, without duplicating
+ ones that are already specified on the command line. The list of supplemental
+ include files is passed in as an argument.
+ """
+ # Determine the include files specified on the command line.
+ # This doesn't cover all the different option formats you can use,
+ # but it's mainly intended to avoid duplicating flags on the automatic
+ # makefile regeneration which only uses this format.
+ specified_includes = set()
+ for arg in args:
+ if arg.startswith('-I') and len(arg) > 2:
+ specified_includes.add(os.path.realpath(arg[2:]))
+
+ result = []
+ def AddInclude(path):
+ if os.path.realpath(path) not in specified_includes:
+ result.append(path)
+
+ # Always include common.gypi.
+ AddInclude(os.path.join(script_dir, 'common.gypi'))
+
+ # Optionally add supplemental .gypi files if present.
+ for supplement in supplemental_files:
+ AddInclude(supplement)
+
+ return result
+
+
+def RunGN(vars_dict):
+ """Runs GN, returning True if it succeeded, printing an error and returning
+ false if not."""
+
+ # The binaries in platform-specific subdirectories in src/tools/gn/bin.
+ gnpath = SRC_DIR + '/tools/gn/bin/'
+ if sys.platform in ('cygwin', 'win32'):
+ gnpath += 'win/gn.exe'
+ elif sys.platform.startswith('linux'):
+ # On Linux we have 32-bit and 64-bit versions.
+ if subprocess.check_output(["getconf", "LONG_BIT"]).find("64") >= 0:
+ gnpath += 'linux/gn'
+ else:
+ gnpath += 'linux/gn32'
+ elif sys.platform == 'darwin':
+ gnpath += 'mac/gn'
+ else:
+ print 'Unknown platform for GN: ', sys.platform
+ return False
+
+ print 'Generating gyp files from GN...'
+
+ # Need to pass both the source root (the bots don't run this command from
+ # within the source tree) as well as set the is_gyp value so the BUILD files
+ # to know they're being run under GYP.
+ args = [gnpath, 'gyp', '-q',
+ '--root=' + chrome_src,
+ '--args=' + GetArgsStringForGN(vars_dict),
+ '--output=//' + GetOutputDirectory() + '/gn_build/']
+ return subprocess.call(args) == 0
+
+
+def GetDesiredVsToolchainHashes():
+ """Load a list of SHA1s corresponding to the toolchains that we want installed
+ to build with."""
+ sha1path = os.path.join(script_dir, 'toolchain_vs2013.hash')
+ with open(sha1path, 'rb') as f:
+ return f.read().strip().splitlines()
+
+
+def DownloadVsToolChain():
+ """Download the Visual Studio toolchain on Windows.
+
+ If on Windows, request that depot_tools install/update the automatic
+ toolchain, and then use it (unless opted-out) and return a tuple containing
+ the x64 and x86 paths. Otherwise return None.
+ """
+ vs2013_runtime_dll_dirs = None
+ depot_tools_win_toolchain = \
+ bool(int(os.environ.get('DEPOT_TOOLS_WIN_TOOLCHAIN', '1')))
+ if sys.platform in ('win32', 'cygwin') and depot_tools_win_toolchain:
+ import find_depot_tools
+ depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+ temp_handle, data_file = tempfile.mkstemp(suffix='.json')
+ os.close(temp_handle)
+ get_toolchain_args = [
+ sys.executable,
+ os.path.join(depot_tools_path,
+ 'win_toolchain',
+ 'get_toolchain_if_necessary.py'),
+ '--output-json', data_file,
+ ] + GetDesiredVsToolchainHashes()
+ subprocess.check_call(get_toolchain_args)
+
+ with open(data_file, 'r') as tempf:
+ toolchain_data = json.load(tempf)
+ os.unlink(data_file)
+
+ toolchain = toolchain_data['path']
+ version = toolchain_data['version']
+ version_is_pro = version[-1] != 'e'
+ win8sdk = toolchain_data['win8sdk']
+ wdk = toolchain_data['wdk']
+ # TODO(scottmg): The order unfortunately matters in these. They should be
+ # split into separate keys for x86 and x64. (See CopyVsRuntimeDlls call
+ # below). http://crbug.com/345992
+ vs2013_runtime_dll_dirs = toolchain_data['runtime_dirs']
+
+ os.environ['GYP_MSVS_OVERRIDE_PATH'] = toolchain
+ os.environ['GYP_MSVS_VERSION'] = version
+ # We need to make sure windows_sdk_path is set to the automated
+ # toolchain values in GYP_DEFINES, but don't want to override any
+ # otheroptions.express
+ # values there.
+ gyp_defines_dict = gyp.NameValueListToDict(gyp.ShlexEnv('GYP_DEFINES'))
+ gyp_defines_dict['windows_sdk_path'] = win8sdk
+ os.environ['GYP_DEFINES'] = ' '.join('%s=%s' % (k, pipes.quote(str(v)))
+ for k, v in gyp_defines_dict.iteritems())
+ os.environ['WINDOWSSDKDIR'] = win8sdk
+ os.environ['WDK_DIR'] = wdk
+ # Include the VS runtime in the PATH in case it's not machine-installed.
+ runtime_path = ';'.join(vs2013_runtime_dll_dirs)
+ os.environ['PATH'] = runtime_path + ';' + os.environ['PATH']
+ print('Using automatic toolchain in %s (%s edition).' % (
+ toolchain, 'Pro' if version_is_pro else 'Express'))
+ return vs2013_runtime_dll_dirs
+
+
+def CopyVsRuntimeDlls(output_dir, runtime_dirs):
+ """Copies the VS runtime DLLs from the given |runtime_dirs| to the output
+ directory so that even if not system-installed, built binaries are likely to
+ be able to run.
+
+ This needs to be run after gyp has been run so that the expected target
+ output directories are already created.
+ """
+ assert sys.platform.startswith(('win32', 'cygwin'))
+
+ def copy_runtime(target_dir, source_dir, dll_pattern):
+ """Copy both the msvcr and msvcp runtime DLLs, only if the target doesn't
+ exist, but the target directory does exist."""
+ for which in ('p', 'r'):
+ dll = dll_pattern % which
+ target = os.path.join(target_dir, dll)
+ source = os.path.join(source_dir, dll)
+ # If gyp generated to that output dir, and the runtime isn't already
+ # there, then copy it over.
+ if (os.path.isdir(target_dir) and
+ (not os.path.isfile(target) or
+ os.stat(target).st_mtime != os.stat(source).st_mtime)):
+ print 'Copying %s to %s...' % (source, target)
+ if os.path.exists(target):
+ os.unlink(target)
+ shutil.copy2(source, target)
+
+ x86, x64 = runtime_dirs
+ out_debug = os.path.join(output_dir, 'Debug')
+ out_debug_nacl64 = os.path.join(output_dir, 'Debug', 'x64')
+ out_release = os.path.join(output_dir, 'Release')
+ out_release_nacl64 = os.path.join(output_dir, 'Release', 'x64')
+ out_debug_x64 = os.path.join(output_dir, 'Debug_x64')
+ out_release_x64 = os.path.join(output_dir, 'Release_x64')
+
+ if os.path.exists(out_debug) and not os.path.exists(out_debug_nacl64):
+ os.makedirs(out_debug_nacl64)
+ if os.path.exists(out_release) and not os.path.exists(out_release_nacl64):
+ os.makedirs(out_release_nacl64)
+ copy_runtime(out_debug, x86, 'msvc%s120d.dll')
+ copy_runtime(out_release, x86, 'msvc%s120.dll')
+ copy_runtime(out_debug_x64, x64, 'msvc%s120d.dll')
+ copy_runtime(out_release_x64, x64, 'msvc%s120.dll')
+ copy_runtime(out_debug_nacl64, x64, 'msvc%s120d.dll')
+ copy_runtime(out_release_nacl64, x64, 'msvc%s120.dll')
+
+
+if __name__ == '__main__':
+ args = sys.argv[1:]
+
+ if int(os.environ.get('GYP_CHROMIUM_NO_ACTION', 0)):
+ print 'Skipping gyp_chromium due to GYP_CHROMIUM_NO_ACTION env var.'
+ sys.exit(0)
+
+ # Use the Psyco JIT if available.
+ if psyco:
+ psyco.profile()
+ print "Enabled Psyco JIT."
+
+ # Fall back on hermetic python if we happen to get run under cygwin.
+ # TODO(bradnelson): take this out once this issue is fixed:
+ # http://code.google.com/p/gyp/issues/detail?id=177
+ if sys.platform == 'cygwin':
+ import find_depot_tools
+ depot_tools_path = find_depot_tools.add_depot_tools_to_path()
+ python_dir = sorted(glob.glob(os.path.join(depot_tools_path,
+ 'python2*_bin')))[-1]
+ env = os.environ.copy()
+ env['PATH'] = python_dir + os.pathsep + env.get('PATH', '')
+ p = subprocess.Popen(
+ [os.path.join(python_dir, 'python.exe')] + sys.argv,
+ env=env, shell=False)
+ p.communicate()
+ sys.exit(p.returncode)
+
+ gyp_helper.apply_chromium_gyp_env()
+
+ # This could give false positives since it doesn't actually do real option
+ # parsing. Oh well.
+ gyp_file_specified = False
+ for arg in args:
+ if arg.endswith('.gyp'):
+ gyp_file_specified = True
+ break
+
+ # If we didn't get a file, check an env var, and then fall back to
+ # assuming 'all.gyp' from the same directory as the script.
+ if not gyp_file_specified:
+ gyp_file = os.environ.get('CHROMIUM_GYP_FILE')
+ if gyp_file:
+ # Note that CHROMIUM_GYP_FILE values can't have backslashes as
+ # path separators even on Windows due to the use of shlex.split().
+ args.extend(shlex.split(gyp_file))
+ else:
+ args.append(os.path.join(script_dir, 'all.gyp'))
+
+ # There shouldn't be a circular dependency relationship between .gyp files,
+ # but in Chromium's .gyp files, on non-Mac platforms, circular relationships
+ # currently exist. The check for circular dependencies is currently
+ # bypassed on other platforms, but is left enabled on the Mac, where a
+ # violation of the rule causes Xcode to misbehave badly.
+ # TODO(mark): Find and kill remaining circular dependencies, and remove this
+ # option. http://crbug.com/35878.
+ # TODO(tc): Fix circular dependencies in ChromiumOS then add linux2 to the
+ # list.
+ if sys.platform not in ('darwin',):
+ args.append('--no-circular-check')
+
+ # Default to make if no generator has
+ # explicitly been set.
+ if not os.environ.get('GYP_GENERATORS'):
+ os.environ['GYP_GENERATORS'] = 'make'
+ elif sys.platform == 'darwin' and not os.environ.get('GYP_GENERATORS') and \
+ not 'OS=ios' in os.environ.get('GYP_DEFINES', []):
+ os.environ['GYP_GENERATORS'] = 'make'
+
+ vs2013_runtime_dll_dirs = DownloadVsToolChain()
+
+ # If CHROMIUM_GYP_SYNTAX_CHECK is set to 1, it will invoke gyp with --check
+ # to enfore syntax checking.
+ syntax_check = os.environ.get('CHROMIUM_GYP_SYNTAX_CHECK')
+ if syntax_check and int(syntax_check):
+ args.append('--check')
+
+ supplemental_includes = GetSupplementalFiles()
+ gn_vars_dict = GetGypVarsForGN(supplemental_includes)
+
+ # Automatically turn on crosscompile support for platforms that need it.
+ # (The Chrome OS build sets CC_host / CC_target which implicitly enables
+ # this mode.)
+ if all(('ninja' in os.environ.get('GYP_GENERATORS', ''),
+ gn_vars_dict.get('OS') in ['android', 'ios'],
+ 'GYP_CROSSCOMPILE' not in os.environ)):
+ os.environ['GYP_CROSSCOMPILE'] = '1'
+
+ # TODO(brettw) bug 350974 either turn back on GN or delete all of this code.
+ #if not RunGN(gn_vars_dict):
+ # sys.exit(1)
+ args.extend(
+ ['-I' + i for i in additional_include_files(supplemental_includes, args)])
+
+ args.extend(['-D', 'gyp_output_dir=' + GetOutputDirectory()])
+
+ print 'Updating projects from gyp files...'
+ sys.stdout.flush()
+
+ # Off we go...
+ gyp_rc = gyp.main(args)
+
+ # Check for landmines (reasons to clobber the build). This must be run here,
+ # rather than a separate runhooks step so that any environment modifications
+ # from above are picked up.
+ print 'Running build/landmines.py...'
+ subprocess.check_call(
+ [sys.executable, os.path.join(script_dir, 'landmines.py')])
+
+ if vs2013_runtime_dll_dirs:
+ x64_runtime, x86_runtime = vs2013_runtime_dll_dirs
+ CopyVsRuntimeDlls(os.path.join(chrome_src, GetOutputDirectory()),
+ (x86_runtime, x64_runtime))
+
+ sys.exit(gyp_rc)
diff --git a/src/build/gyp_helper.py b/src/build/gyp_helper.py
new file mode 100644
index 0000000..eadc7a5
--- /dev/null
+++ b/src/build/gyp_helper.py
@@ -0,0 +1,54 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file helps gyp_chromium and landmines correctly set up the gyp
+# environment from chromium.gyp_env on disk
+
+import os
+
+SCRIPT_DIR = os.path.dirname(os.path.realpath(__file__))
+CHROME_SRC = os.path.dirname(SCRIPT_DIR)
+
+
+def apply_gyp_environment_from_file(file_path):
+ """Reads in a *.gyp_env file and applies the valid keys to os.environ."""
+ if not os.path.exists(file_path):
+ return
+ with open(file_path, 'rU') as f:
+ file_contents = f.read()
+ try:
+ file_data = eval(file_contents, {'__builtins__': None}, None)
+ except SyntaxError, e:
+ e.filename = os.path.abspath(file_path)
+ raise
+ supported_vars = (
+ 'CC',
+ 'CC_wrapper',
+ 'CHROMIUM_GYP_FILE',
+ 'CHROMIUM_GYP_SYNTAX_CHECK',
+ 'CXX',
+ 'CXX_wrapper',
+ 'GYP_DEFINES',
+ 'GYP_GENERATOR_FLAGS',
+ 'GYP_CROSSCOMPILE',
+ 'GYP_GENERATOR_OUTPUT',
+ 'GYP_GENERATORS',
+ 'GYP_MSVS_VERSION',
+ )
+ for var in supported_vars:
+ file_val = file_data.get(var)
+ if file_val:
+ if var in os.environ:
+ print 'INFO: Environment value for "%s" overrides value in %s.' % (
+ var, os.path.abspath(file_path)
+ )
+ else:
+ os.environ[var] = file_val
+
+
+def apply_chromium_gyp_env():
+ if 'SKIP_CHROMIUM_GYP_ENV' not in os.environ:
+ # Update the environment based on chromium.gyp_env
+ path = os.path.join(os.path.dirname(CHROME_SRC), 'chromium.gyp_env')
+ apply_gyp_environment_from_file(path)
diff --git a/src/build/install.gyp b/src/build/install.gyp
new file mode 100644
index 0000000..d64d02c
--- /dev/null
+++ b/src/build/install.gyp
@@ -0,0 +1,192 @@
+# Copyright (c) 2010 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'variables': {
+ 'install_path': '<(DEPTH)/install',
+ 'version_py_path': '<(DEPTH)/build/version.py',
+ 'version_path': '<(DEPTH)/net/instaweb/public/VERSION',
+ 'lastchange_path': '<(SHARED_INTERMEDIATE_DIR)/build/LASTCHANGE',
+ 'branding_dir': '<(install_path)/common',
+ },
+ 'conditions': [
+ ['OS=="linux"', {
+ 'variables': {
+ 'version' : '<!(python <(version_py_path) -f <(version_path) -t "@MAJOR@.@MINOR@.@BUILD@.@PATCH@")',
+ 'revision' : '<!(if [ -f <(DEPTH)/LASTCHANGE.in ]; then cat <(DEPTH)/LASTCHANGE.in | cut -d= -f2; else git rev-list --all --count; fi)',
+ 'packaging_files_common': [
+ '<(install_path)/common/apt.include',
+ '<(install_path)/common/mod-pagespeed/mod-pagespeed.info',
+ '<(install_path)/common/installer.include',
+ '<(install_path)/common/repo.cron',
+ '<(install_path)/common/rpm.include',
+ '<(install_path)/common/rpmrepo.cron',
+ '<(install_path)/common/updater',
+ '<(install_path)/common/variables.include',
+ '<(install_path)/common/BRANDING',
+ '<(install_path)/common/pagespeed.load.template',
+ '<(install_path)/common/pagespeed.conf.template',
+ ],
+ 'packaging_files_deb': [
+ '<(install_path)/debian/build.sh',
+ '<(install_path)/debian/changelog.template',
+ '<(install_path)/debian/conffiles',
+ '<(install_path)/debian/control.template',
+ '<(install_path)/debian/postinst',
+ '<(install_path)/debian/postrm',
+ '<(install_path)/debian/prerm',
+ ],
+ 'packaging_files_rpm': [
+ '<(install_path)/rpm/build.sh',
+ '<(install_path)/rpm/mod-pagespeed.spec.template',
+ ],
+ 'packaging_files_binaries': [
+ '<(PRODUCT_DIR)/libmod_pagespeed.so',
+ '<(PRODUCT_DIR)/libmod_pagespeed_ap24.so',
+ ],
+ 'flock_bash': ['flock', '--', '/tmp/linux_package_lock', 'bash'],
+ 'deb_build': '<(PRODUCT_DIR)/install/debian/build.sh',
+ 'rpm_build': '<(PRODUCT_DIR)/install/rpm/build.sh',
+ 'deb_cmd': ['<@(flock_bash)', '<(deb_build)', '-o' '<(PRODUCT_DIR)',
+ '-b', '<(PRODUCT_DIR)', '-a', '<(target_arch)'],
+ 'rpm_cmd': ['<@(flock_bash)', '<(rpm_build)', '-o' '<(PRODUCT_DIR)',
+ '-b', '<(PRODUCT_DIR)', '-a', '<(target_arch)'],
+ 'conditions': [
+ ['target_arch=="ia32"', {
+ 'deb_arch': 'i386',
+ 'rpm_arch': 'i386',
+ }],
+ ['target_arch=="x64"', {
+ 'deb_arch': 'amd64',
+ 'rpm_arch': 'x86_64',
+ }],
+ ],
+ },
+ 'targets': [
+ {
+ 'target_name': 'linux_installer_configs',
+ 'type': 'none',
+ # Add these files to the build output so the build archives will be
+ # "hermetic" for packaging.
+ 'copies': [
+ {
+ 'destination': '<(PRODUCT_DIR)/install/debian/',
+ 'files': [
+ '<@(packaging_files_deb)',
+ ]
+ },
+ {
+ 'destination': '<(PRODUCT_DIR)/install/rpm/',
+ 'files': [
+ '<@(packaging_files_rpm)',
+ ]
+ },
+ {
+ 'destination': '<(PRODUCT_DIR)/install/common/',
+ 'files': [
+ '<@(packaging_files_common)',
+ ]
+ },
+ ],
+ 'actions': [
+ {
+ 'action_name': 'save_build_info',
+ 'inputs': [
+ '<(branding_dir)/BRANDING',
+ '<(version_path)',
+ '<(lastchange_path)',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/installer/version.txt',
+ ],
+ # Just output the default version info variables.
+ 'action': [
+ 'python', '<(version_py_path)',
+ '-f', '<(branding_dir)/BRANDING',
+ '-f', '<(version_path)',
+ '-f', '<(lastchange_path)',
+ '-o', '<@(_outputs)'
+ ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'linux_packages',
+ 'suppress_wildcard': 1,
+ 'type': 'none',
+ 'dependencies': [
+ 'linux_package_deb',
+ 'linux_package_rpm',
+ ],
+ },
+ {
+ 'target_name': 'linux_package_deb',
+ 'suppress_wildcard': 1,
+ 'type': 'none',
+ 'dependencies': [
+ 'all.gyp:All',
+ 'linux_installer_configs',
+ ],
+ 'actions': [
+ {
+ 'variables': {
+ 'channel%': 'beta',
+ },
+ 'action_name': 'deb_package_<(channel)',
+ 'process_outputs_as_sources': 1,
+ 'inputs': [
+ '<(deb_build)',
+ '<@(packaging_files_binaries)',
+ '<@(packaging_files_common)',
+ '<@(packaging_files_deb)',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/mod-pagespeed-<(channel)-<(version)-r<(revision)_<(deb_arch).deb',
+ ],
+ 'action': [ '<@(deb_cmd)', '-c', '<(channel)', ],
+ },
+ ],
+ },
+ {
+ 'target_name': 'linux_package_rpm',
+ 'suppress_wildcard': 1,
+ 'type': 'none',
+ 'dependencies': [
+ 'all.gyp:All',
+ 'linux_installer_configs',
+ ],
+ 'actions': [
+ {
+ 'variables': {
+ 'channel%': 'beta',
+ },
+ 'action_name': 'rpm_package_<(channel)',
+ 'process_outputs_as_sources': 1,
+ 'inputs': [
+ '<(rpm_build)',
+ '<(PRODUCT_DIR)/install/rpm/mod-pagespeed.spec.template',
+ '<@(packaging_files_binaries)',
+ '<@(packaging_files_common)',
+ '<@(packaging_files_rpm)',
+ ],
+ 'outputs': [
+ '<(PRODUCT_DIR)/mod-pagespeed-<(channel)-<(version)-r<(revision).<(rpm_arch).rpm',
+ ],
+ 'action': [ '<@(rpm_cmd)', '-c', '<(channel)', ],
+ },
+ ],
+ },
+ ],
+ },{
+ 'targets': [
+ ],
+ }],
+ ],
+}
+
+# Local Variables:
+# tab-width:2
+# indent-tabs-mode:nil
+# End:
+# vim: set expandtab tabstop=2 shiftwidth=2:
diff --git a/src/build/java.gypi b/src/build/java.gypi
new file mode 100644
index 0000000..67ff12c
--- /dev/null
+++ b/src/build/java.gypi
@@ -0,0 +1,398 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# This file is meant to be included into a target to provide a rule
+# to build Java in a consistent manner.
+#
+# To use this, create a gyp target with the following form:
+# {
+# 'target_name': 'my-package_java',
+# 'type': 'none',
+# 'variables': {
+# 'java_in_dir': 'path/to/package/root',
+# },
+# 'includes': ['path/to/this/gypi/file'],
+# }
+#
+# Required variables:
+# java_in_dir - The top-level java directory. The src should be in
+# <java_in_dir>/src.
+# Optional/automatic variables:
+# additional_input_paths - These paths will be included in the 'inputs' list to
+# ensure that this target is rebuilt when one of these paths changes.
+# additional_src_dirs - Additional directories with .java files to be compiled
+# and included in the output of this target.
+# generated_src_dirs - Same as additional_src_dirs except used for .java files
+# that are generated at build time. This should be set automatically by a
+# target's dependencies. The .java files in these directories are not
+# included in the 'inputs' list (unlike additional_src_dirs).
+# input_jars_paths - The path to jars to be included in the classpath. This
+# should be filled automatically by depending on the appropriate targets.
+# javac_includes - A list of specific files to include. This is by default
+# empty, which leads to inclusion of all files specified. May include
+# wildcard, and supports '**/' for recursive path wildcards, ie.:
+# '**/MyFileRegardlessOfDirectory.java', '**/IncludedPrefix*.java'.
+# has_java_resources - Set to 1 if the java target contains an
+# Android-compatible resources folder named res. If 1, R_package and
+# R_package_relpath must also be set.
+# R_package - The java package in which the R class (which maps resources to
+# integer IDs) should be generated, e.g. org.chromium.content.
+# R_package_relpath - Same as R_package, but replace each '.' with '/'.
+# java_strings_grd - The name of the grd file from which to generate localized
+# strings.xml files, if any.
+# res_extra_dirs - A list of extra directories containing Android resources.
+# These directories may be generated at build time.
+# res_extra_files - A list of the files in res_extra_dirs.
+# never_lint - Set to 1 to not run lint on this target.
+
+{
+ 'dependencies': [
+ '<(DEPTH)/build/android/setup.gyp:build_output_dirs'
+ ],
+ 'variables': {
+ 'android_jar': '<(android_sdk)/android.jar',
+ 'input_jars_paths': [ '<(android_jar)' ],
+ 'additional_src_dirs': [],
+ 'javac_includes': [],
+ 'jar_name': '<(_target_name).jar',
+ 'jar_dir': '<(PRODUCT_DIR)/lib.java',
+ 'jar_path': '<(intermediate_dir)/<(jar_name)',
+ 'jar_final_path': '<(jar_dir)/<(jar_name)',
+ 'jar_excluded_classes': [ '*/R.class', '*/R##*.class' ],
+ 'instr_stamp': '<(intermediate_dir)/instr.stamp',
+ 'additional_input_paths': [],
+ 'dex_path': '<(PRODUCT_DIR)/lib.java/<(_target_name).dex.jar',
+ 'generated_src_dirs': ['>@(generated_R_dirs)'],
+ 'generated_R_dirs': [],
+ 'has_java_resources%': 0,
+ 'java_strings_grd%': '',
+ 'res_extra_dirs': [],
+ 'res_extra_files': [],
+ 'res_v14_verify_only%': 0,
+ 'resource_input_paths': ['>@(res_extra_files)'],
+ 'intermediate_dir': '<(SHARED_INTERMEDIATE_DIR)/<(_target_name)',
+ 'classes_dir': '<(intermediate_dir)/classes',
+ 'compile_stamp': '<(intermediate_dir)/compile.stamp',
+ 'lint_stamp': '<(intermediate_dir)/lint.stamp',
+ 'lint_result': '<(intermediate_dir)/lint_result.xml',
+ 'lint_config': '<(intermediate_dir)/lint_config.xml',
+ 'never_lint%': 0,
+ 'proguard_config%': '',
+ 'proguard_preprocess%': '0',
+ 'variables': {
+ 'variables': {
+ 'proguard_preprocess%': 0,
+ 'emma_never_instrument%': 0,
+ },
+ 'conditions': [
+ ['proguard_preprocess == 1', {
+ 'javac_jar_path': '<(intermediate_dir)/<(_target_name).pre.jar'
+ }, {
+ 'javac_jar_path': '<(jar_path)'
+ }],
+ ['chromium_code != 0 and emma_coverage != 0 and emma_never_instrument == 0', {
+ 'emma_instrument': 1,
+ }, {
+ 'emma_instrument': 0,
+ }],
+ ],
+ },
+ 'emma_instrument': '<(emma_instrument)',
+ 'javac_jar_path': '<(javac_jar_path)',
+ },
+ # This all_dependent_settings is used for java targets only. This will add the
+ # jar path to the classpath of dependent java targets.
+ 'all_dependent_settings': {
+ 'variables': {
+ 'input_jars_paths': ['<(jar_final_path)'],
+ 'library_dexed_jars_paths': ['<(dex_path)'],
+ },
+ },
+ 'conditions': [
+ ['has_java_resources == 1', {
+ 'variables': {
+ 'res_dir': '<(java_in_dir)/res',
+ 'res_crunched_dir': '<(intermediate_dir)/res_crunched',
+ 'res_v14_compatibility_stamp': '<(intermediate_dir)/res_v14_compatibility.stamp',
+ 'res_v14_compatibility_dir': '<(intermediate_dir)/res_v14_compatibility',
+ 'res_input_dirs': ['<(res_dir)', '<@(res_extra_dirs)'],
+ 'resource_input_paths': ['<!@(find <(res_dir) -type f)'],
+ 'R_dir': '<(intermediate_dir)/java_R',
+ 'R_text_file': '<(R_dir)/R.txt',
+ 'R_stamp': '<(intermediate_dir)/resources.stamp',
+ 'generated_src_dirs': ['<(R_dir)'],
+ 'additional_input_paths': ['<(R_stamp)',
+ '<(res_v14_compatibility_stamp)',],
+ 'additional_res_dirs': [],
+ 'dependencies_res_input_dirs': [],
+ 'dependencies_res_files': [],
+ },
+ 'all_dependent_settings': {
+ 'variables': {
+ # Dependent jars include this target's R.java file via
+ # generated_R_dirs and include its resources via
+ # dependencies_res_files.
+ 'generated_R_dirs': ['<(R_dir)'],
+ 'additional_input_paths': ['<(R_stamp)',
+ '<(res_v14_compatibility_stamp)',],
+ 'dependencies_res_files': ['<@(resource_input_paths)'],
+
+ 'dependencies_res_input_dirs': ['<@(res_input_dirs)'],
+
+ # Dependent APKs include this target's resources via
+ # additional_res_dirs, additional_res_packages, and
+ # additional_R_text_files.
+ 'additional_res_dirs': ['<(res_crunched_dir)',
+ '<(res_v14_compatibility_dir)',
+ '<@(res_input_dirs)'],
+ 'additional_res_packages': ['<(R_package)'],
+ 'additional_R_text_files': ['<(R_text_file)'],
+ },
+ },
+ 'conditions': [
+ ['java_strings_grd != ""', {
+ 'variables': {
+ 'res_grit_dir': '<(intermediate_dir)/res_grit',
+ 'res_input_dirs': ['<(res_grit_dir)'],
+ 'grit_grd_file': '<(java_in_dir)/strings/<(java_strings_grd)',
+ 'resource_input_paths': ['<!@pymod_do_main(grit_info <@(grit_defines) --outputs "<(res_grit_dir)" <(grit_grd_file))'],
+ },
+ 'actions': [
+ {
+ 'action_name': 'generate_localized_strings_xml',
+ 'variables': {
+ 'grit_additional_defines': ['-E', 'ANDROID_JAVA_TAGGED_ONLY=false'],
+ 'grit_out_dir': '<(res_grit_dir)',
+ # resource_ids is unneeded since we don't generate .h headers.
+ 'grit_resource_ids': '',
+ },
+ 'includes': ['../build/grit_action.gypi'],
+ },
+ ],
+ }],
+ ],
+ 'actions': [
+ # Generate R.java and crunch image resources.
+ {
+ 'action_name': 'process_resources',
+ 'message': 'processing resources for <(_target_name)',
+ 'variables': {
+ 'android_manifest': '<(DEPTH)/build/android/AndroidManifest.xml',
+ # Include the dependencies' res dirs so that references to
+ # resources in dependencies can be resolved.
+ 'all_res_dirs': ['<@(res_input_dirs)',
+ '>@(dependencies_res_input_dirs)',],
+ # Write the inputs list to a file, so that the action command
+ # line won't exceed the OS limits when calculating the checksum
+ # of the list.
+ 'inputs_list_file': '>|(inputs_list.<(_target_name).gypcmd >@(_inputs))'
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/process_resources.py',
+ '>@(resource_input_paths)',
+ '>@(dependencies_res_files)',
+ ],
+ 'outputs': [
+ '<(R_stamp)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/process_resources.py',
+ '--android-sdk', '<(android_sdk)',
+ '--android-sdk-tools', '<(android_sdk_tools)',
+ '--R-dir', '<(R_dir)',
+ '--res-dirs', '>(all_res_dirs)',
+ '--crunch-input-dir', '>(res_dir)',
+ '--crunch-output-dir', '<(res_crunched_dir)',
+ '--android-manifest', '<(android_manifest)',
+ '--non-constant-id',
+ '--custom-package', '<(R_package)',
+ '--stamp', '<(R_stamp)',
+
+ # Add hash of inputs to the command line, so if inputs change
+ # (e.g. if a resource if removed), the command will be re-run.
+ # TODO(newt): remove this once crbug.com/177552 is fixed in ninja.
+ '--ignore=>!(md5sum >(inputs_list_file))',
+ ],
+ },
+ # Generate API 14 resources.
+ {
+ 'action_name': 'generate_api_14_resources_<(_target_name)',
+ 'message': 'Generating Android API 14 resources <(_target_name)',
+ 'variables' : {
+ 'res_v14_additional_options': [],
+ },
+ 'conditions': [
+ ['res_v14_verify_only == 1', {
+ 'variables': {
+ 'res_v14_additional_options': ['--verify-only']
+ },
+ }],
+ ],
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
+ '>@(resource_input_paths)',
+ ],
+ 'outputs': [
+ '<(res_v14_compatibility_stamp)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/generate_v14_compatible_resources.py',
+ '--res-dir=<(res_dir)',
+ '--res-v14-compatibility-dir=<(res_v14_compatibility_dir)',
+ '--stamp', '<(res_v14_compatibility_stamp)',
+ '<@(res_v14_additional_options)',
+ ]
+ },
+ ],
+ }],
+ ['proguard_preprocess == 1', {
+ 'actions': [
+ {
+ 'action_name': 'proguard_<(_target_name)',
+ 'message': 'Proguard preprocessing <(_target_name) jar',
+ 'inputs': [
+ '<(android_sdk_root)/tools/proguard/bin/proguard.sh',
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/proguard.py',
+ '<(javac_jar_path)',
+ '<(proguard_config)',
+ ],
+ 'outputs': [
+ '<(jar_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/proguard.py',
+ '--proguard-path=<(android_sdk_root)/tools/proguard/bin/proguard.sh',
+ '--input-path=<(javac_jar_path)',
+ '--output-path=<(jar_path)',
+ '--proguard-config=<(proguard_config)',
+ '--classpath=<(android_sdk_jar) >(input_jars_paths)',
+ ]
+ },
+ ],
+ }],
+ ],
+ 'actions': [
+ {
+ 'action_name': 'javac_<(_target_name)',
+ 'message': 'Compiling <(_target_name) java sources',
+ 'variables': {
+ 'all_src_dirs': [
+ '>(java_in_dir)/src',
+ '>@(additional_src_dirs)',
+ '>@(generated_src_dirs)',
+ ],
+ },
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/javac.py',
+ '>!@(find >(java_in_dir)/src >(additional_src_dirs) -name "*.java")',
+ '>@(input_jars_paths)',
+ '>@(additional_input_paths)',
+ ],
+ 'outputs': [
+ '<(compile_stamp)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/javac.py',
+ '--output-dir=<(classes_dir)',
+ '--classpath=>(input_jars_paths)',
+ '--src-dirs=>(all_src_dirs)',
+ '--javac-includes=<(javac_includes)',
+ '--chromium-code=<(chromium_code)',
+ '--stamp=<(compile_stamp)',
+
+ # TODO(newt): remove this once http://crbug.com/177552 is fixed in ninja.
+ '--ignore=>!(echo \'>(_inputs)\' | md5sum)',
+ ]
+ },
+ {
+ 'variables': {
+ 'src_dirs': [
+ '<(java_in_dir)/src',
+ '>@(additional_src_dirs)',
+ ],
+ 'stamp_path': '<(lint_stamp)',
+ 'result_path': '<(lint_result)',
+ 'config_path': '<(lint_config)',
+ },
+ 'inputs': [
+ '<(compile_stamp)',
+ ],
+ 'outputs': [
+ '<(lint_stamp)',
+ ],
+ 'includes': [ 'android/lint_action.gypi' ],
+ },
+ {
+ 'action_name': 'jar_<(_target_name)',
+ 'message': 'Creating <(_target_name) jar',
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/util/md5_check.py',
+ '<(DEPTH)/build/android/gyp/jar.py',
+ '<(compile_stamp)',
+ ],
+ 'outputs': [
+ '<(javac_jar_path)',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/jar.py',
+ '--classes-dir=<(classes_dir)',
+ '--jar-path=<(javac_jar_path)',
+ '--excluded-classes=<(jar_excluded_classes)',
+ ]
+ },
+ {
+ 'action_name': 'instr_jar_<(_target_name)',
+ 'message': 'Instrumenting <(_target_name) jar',
+ 'variables': {
+ 'input_path': '<(jar_path)',
+ 'output_path': '<(jar_final_path)',
+ 'stamp_path': '<(instr_stamp)',
+ 'instr_type': 'jar',
+ },
+ 'outputs': [
+ '<(jar_final_path)',
+ ],
+ 'inputs': [
+ '<(jar_path)',
+ ],
+ 'includes': [ 'android/instr_action.gypi' ],
+ },
+ {
+ 'action_name': 'jar_toc_<(_target_name)',
+ 'message': 'Creating <(_target_name) jar.TOC',
+ 'inputs': [
+ '<(DEPTH)/build/android/gyp/util/build_utils.py',
+ '<(DEPTH)/build/android/gyp/util/md5_check.py',
+ '<(DEPTH)/build/android/gyp/jar_toc.py',
+ '<(jar_final_path)',
+ ],
+ 'outputs': [
+ '<(jar_final_path).TOC',
+ ],
+ 'action': [
+ 'python', '<(DEPTH)/build/android/gyp/jar_toc.py',
+ '--jar-path=<(jar_final_path)',
+ '--toc-path=<(jar_final_path).TOC',
+ ]
+ },
+ {
+ 'action_name': 'dex_<(_target_name)',
+ 'variables': {
+ 'conditions': [
+ ['emma_instrument != 0', {
+ 'dex_no_locals': 1,
+ }],
+ ],
+ 'dex_input_paths': [ '<(jar_final_path)' ],
+ 'output_path': '<(dex_path)',
+ },
+ 'includes': [ 'android/dex_action.gypi' ],
+ },
+ ],
+}
diff --git a/src/build/landmine_utils.py b/src/build/landmine_utils.py
new file mode 100644
index 0000000..7737832
--- /dev/null
+++ b/src/build/landmine_utils.py
@@ -0,0 +1,114 @@
+# Copyright 2013 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+
+import functools
+import logging
+import os
+import shlex
+import sys
+
+
+def memoize(default=None):
+ """This decorator caches the return value of a parameterless pure function"""
+ def memoizer(func):
+ val = []
+ @functools.wraps(func)
+ def inner():
+ if not val:
+ ret = func()
+ val.append(ret if ret is not None else default)
+ if logging.getLogger().isEnabledFor(logging.INFO):
+ print '%s -> %r' % (func.__name__, val[0])
+ return val[0]
+ return inner
+ return memoizer
+
+
+@memoize()
+def IsWindows():
+ return sys.platform in ['win32', 'cygwin']
+
+
+@memoize()
+def IsLinux():
+ return sys.platform.startswith(('linux', 'freebsd'))
+
+
+@memoize()
+def IsMac():
+ return sys.platform == 'darwin'
+
+
+@memoize()
+def gyp_defines():
+ """Parses and returns GYP_DEFINES env var as a dictionary."""
+ return dict(arg.split('=', 1)
+ for arg in shlex.split(os.environ.get('GYP_DEFINES', '')))
+
+@memoize()
+def gyp_msvs_version():
+ return os.environ.get('GYP_MSVS_VERSION', '')
+
+@memoize()
+def distributor():
+ """
+ Returns a string which is the distributed build engine in use (if any).
+ Possible values: 'goma', 'ib', ''
+ """
+ if 'goma' in gyp_defines():
+ return 'goma'
+ elif IsWindows():
+ if 'CHROME_HEADLESS' in os.environ:
+ return 'ib' # use (win and !goma and headless) as approximation of ib
+
+
+@memoize()
+def platform():
+ """
+ Returns a string representing the platform this build is targetted for.
+ Possible values: 'win', 'mac', 'linux', 'ios', 'android'
+ """
+ if 'OS' in gyp_defines():
+ if 'android' in gyp_defines()['OS']:
+ return 'android'
+ else:
+ return gyp_defines()['OS']
+ elif IsWindows():
+ return 'win'
+ elif IsLinux():
+ return 'linux'
+ else:
+ return 'mac'
+
+
+@memoize()
+def builder():
+ """
+ Returns a string representing the build engine (not compiler) to use.
+ Possible values: 'make', 'ninja', 'xcode', 'msvs', 'scons'
+ """
+ if 'GYP_GENERATORS' in os.environ:
+ # for simplicity, only support the first explicit generator
+ generator = os.environ['GYP_GENERATORS'].split(',')[0]
+ if generator.endswith('-android'):
+ return generator.split('-')[0]
+ elif generator.endswith('-ninja'):
+ return 'ninja'
+ else:
+ return generator
+ else:
+ if platform() == 'android':
+ # Good enough for now? Do any android bots use make?
+ return 'ninja'
+ elif platform() == 'ios':
+ return 'xcode'
+ elif IsWindows():
+ return 'ninja'
+ elif IsLinux():
+ return 'ninja'
+ elif IsMac():
+ return 'ninja'
+ else:
+ assert False, 'Don\'t know what builder we\'re using!'
diff --git a/src/build/landmines.py b/src/build/landmines.py
new file mode 100755
index 0000000..857585a
--- /dev/null
+++ b/src/build/landmines.py
@@ -0,0 +1,132 @@
+#!/usr/bin/env python
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""
+This script runs every build as a hook. If it detects that the build should
+be clobbered, it will touch the file <build_dir>/.landmine_triggered. The
+various build scripts will then check for the presence of this file and clobber
+accordingly. The script will also emit the reasons for the clobber to stdout.
+
+A landmine is tripped when a builder checks out a different revision, and the
+diff between the new landmines and the old ones is non-null. At this point, the
+build is clobbered.
+"""
+
+import difflib
+import logging
+import optparse
+import os
+import sys
+import subprocess
+import time
+
+import landmine_utils
+
+
+SRC_DIR = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
+
+
+def get_target_build_dir(build_tool, target, is_iphone=False):
+ """
+ Returns output directory absolute path dependent on build and targets.
+ Examples:
+ r'c:\b\build\slave\win\build\src\out\Release'
+ '/mnt/data/b/build/slave/linux/build/src/out/Debug'
+ '/b/build/slave/ios_rel_device/build/src/xcodebuild/Release-iphoneos'
+
+ Keep this function in sync with tools/build/scripts/slave/compile.py
+ """
+ ret = None
+ if build_tool == 'xcode':
+ ret = os.path.join(SRC_DIR, 'xcodebuild',
+ target + ('-iphoneos' if is_iphone else ''))
+ elif build_tool in ['make', 'ninja', 'ninja-ios']: # TODO: Remove ninja-ios.
+ ret = os.path.join(SRC_DIR, 'out', target)
+ elif build_tool in ['msvs', 'vs', 'ib']:
+ ret = os.path.join(SRC_DIR, 'build', target)
+ else:
+ raise NotImplementedError('Unexpected GYP_GENERATORS (%s)' % build_tool)
+ return os.path.abspath(ret)
+
+
+def set_up_landmines(target, new_landmines):
+ """Does the work of setting, planting, and triggering landmines."""
+ out_dir = get_target_build_dir(landmine_utils.builder(), target,
+ landmine_utils.platform() == 'ios')
+
+ landmines_path = os.path.join(out_dir, '.landmines')
+ if not os.path.exists(out_dir):
+ os.makedirs(out_dir)
+
+ if not os.path.exists(landmines_path):
+ with open(landmines_path, 'w') as f:
+ f.writelines(new_landmines)
+ else:
+ triggered = os.path.join(out_dir, '.landmines_triggered')
+ with open(landmines_path, 'r') as f:
+ old_landmines = f.readlines()
+ if old_landmines != new_landmines:
+ old_date = time.ctime(os.stat(landmines_path).st_ctime)
+ diff = difflib.unified_diff(old_landmines, new_landmines,
+ fromfile='old_landmines', tofile='new_landmines',
+ fromfiledate=old_date, tofiledate=time.ctime(), n=0)
+
+ with open(triggered, 'w') as f:
+ f.writelines(diff)
+ elif os.path.exists(triggered):
+ # Remove false triggered landmines.
+ os.remove(triggered)
+
+
+def process_options():
+ """Returns a list of landmine emitting scripts."""
+ parser = optparse.OptionParser()
+ parser.add_option(
+ '-s', '--landmine-scripts', action='append',
+ default=[os.path.join(SRC_DIR, 'build', 'get_landmines.py')],
+ help='Path to the script which emits landmines to stdout. The target '
+ 'is passed to this script via option -t. Note that an extra '
+ 'script can be specified via an env var EXTRA_LANDMINES_SCRIPT.')
+ parser.add_option('-v', '--verbose', action='store_true',
+ default=('LANDMINES_VERBOSE' in os.environ),
+ help=('Emit some extra debugging information (default off). This option '
+ 'is also enabled by the presence of a LANDMINES_VERBOSE environment '
+ 'variable.'))
+
+ options, args = parser.parse_args()
+
+ if args:
+ parser.error('Unknown arguments %s' % args)
+
+ logging.basicConfig(
+ level=logging.DEBUG if options.verbose else logging.ERROR)
+
+ extra_script = os.environ.get('EXTRA_LANDMINES_SCRIPT')
+ if extra_script:
+ return options.landmine_scripts + [extra_script]
+ else:
+ return options.landmine_scripts
+
+
+def main():
+ landmine_scripts = process_options()
+
+ if landmine_utils.builder() == 'dump_dependency_json':
+ return 0
+
+ for target in ('Debug', 'Release', 'Debug_x64', 'Release_x64'):
+ landmines = []
+ for s in landmine_scripts:
+ proc = subprocess.Popen([sys.executable, s, '-t', target],
+ stdout=subprocess.PIPE)
+ output, _ = proc.communicate()
+ landmines.extend([('%s\n' % l.strip()) for l in output.splitlines()])
+ set_up_landmines(target, landmines)
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/build/lastchange.sh b/src/build/lastchange.sh
new file mode 100755
index 0000000..f800534
--- /dev/null
+++ b/src/build/lastchange.sh
@@ -0,0 +1,55 @@
+#!/bin/sh
+#
+# Copyright 2013 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: morlovich@google.com (Maksim Orlovich)
+#
+# Determine last git revision containing an actual change on a given branch
+# Usage: lastchange.sh gitpath [-d default_file] [-o out_file]
+set -e
+set -u
+
+SVN_PATH=$1
+shift 1
+DEFAULT_FILE=
+OUT_FILE=/dev/stdout
+
+while [ $# -ge 2 ]; do
+ case $1 in
+ -d)
+ # -d has no effect if file doesn't exist.
+ if [ -f $2 ]; then
+ DEFAULT_FILE=$2
+ fi
+ shift 2
+ ;;
+ -o)
+ OUT_FILE=$2
+ shift 2
+ ;;
+ *)
+ echo "Usage: lastchange.sh gitpath [-d default_file] [-o out_file]"
+ exit 1
+ ;;
+ esac
+done
+
+if [ -z $DEFAULT_FILE ]; then
+ KEY='Last Changed Rev: '
+ REVISION=0
+ echo LASTCHANGE=$REVISION > $OUT_FILE
+else
+ echo LASTCHANGE=0 > $OUT_FILE
+fi
diff --git a/src/build/lastchange_wrapper.py b/src/build/lastchange_wrapper.py
new file mode 100755
index 0000000..bb8feb1
--- /dev/null
+++ b/src/build/lastchange_wrapper.py
@@ -0,0 +1,25 @@
+# Copyright 2013 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: morlovich@google.com (Maksim Orlovich)
+#
+# This simply forwards to the Chromium's lastchange.py script, but runs it
+# from the mod_pagespeed repo so it gets the mod_pagespeed revision and not
+# the chromium one.
+import sys
+sys.path.append('util')
+from lastchange import main
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/build/libwebp.gyp b/src/build/libwebp.gyp
new file mode 100644
index 0000000..2e013d2
--- /dev/null
+++ b/src/build/libwebp.gyp
@@ -0,0 +1,190 @@
+# Copyright (c) 2012 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+{
+ 'targets': [
+ {
+ 'target_name': 'libwebp_dec',
+ 'type': 'static_library',
+ 'dependencies' : [
+ 'libwebp_dsp',
+ 'libwebp_dsp_neon',
+ 'libwebp_utils',
+ ],
+ 'include_dirs': ['.'],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/src/dec/alpha.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/buffer.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/frame.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/idec.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/io.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/quant.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/tree.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/vp8.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/vp8l.c',
+ '<(DEPTH)/third_party/libwebp/src/dec/webp.c',
+ ],
+ },
+ {
+ 'target_name': 'libwebp_demux',
+ 'type': 'static_library',
+ 'include_dirs': ['.'],
+ 'sources': [
+ 'demux/demux.c',
+ ],
+ },
+ {
+ 'target_name': 'libwebp_dsp',
+ 'type': 'static_library',
+ 'include_dirs': ['.'],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/src/dsp/alpha_processing.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/cpu.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/dec.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/dec_clip_tables.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/dec_mips32.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/dec_sse2.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/enc.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/enc_avx2.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/enc_mips32.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/enc_sse2.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/lossless.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/lossless_mips32.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/lossless_sse2.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/upsampling.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/upsampling_sse2.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/yuv.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/yuv_mips32.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/yuv_sse2.c',
+ ],
+# 'conditions': [
+# ['OS == "android"', {
+# 'includes': [ 'android/cpufeatures.gypi' ],
+# }],
+# ['order_profiling != 0', {
+# 'target_conditions' : [
+# ['_toolset=="target"', {
+# 'cflags!': [ '-finstrument-functions' ],
+# }],
+# ],
+# }],
+# ],
+ },
+ {
+ 'target_name': 'libwebp_dsp_neon',
+ 'conditions': [
+ ['target_arch == "arm" and arm_version >= 7 and (arm_neon == 1 or arm_neon_optional == 1)', {
+ 'type': 'static_library',
+ 'include_dirs': ['.'],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/src/dsp/dec_neon.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/enc_neon.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/lossless_neon.c',
+ '<(DEPTH)/third_party/libwebp/src/dsp/upsampling_neon.c',
+ ],
+ # behavior similar to *.c.neon in an Android.mk
+ 'cflags!': [ '-mfpu=vfpv3-d16' ],
+ 'cflags': [ '-mfpu=neon' ],
+ },{ # "target_arch != "arm" or arm_version < 7"
+ 'type': 'none',
+ }],
+ ['order_profiling != 0', {
+ 'target_conditions' : [
+ ['_toolset=="target"', {
+ 'cflags!': [ '-finstrument-functions' ],
+ }],
+ ],
+ }],
+ ],
+ },
+ {
+ 'target_name': 'libwebp_enc',
+ 'type': 'static_library',
+ 'include_dirs': ['.'],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/src/enc/alpha.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/analysis.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/backward_references.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/config.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/cost.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/filter.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/frame.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/histogram.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/iterator.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/picture.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/picture_csp.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/picture_psnr.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/picture_rescale.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/picture_tools.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/quant.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/syntax.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/token.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/tree.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/vp8l.c',
+ '<(DEPTH)/third_party/libwebp/src/enc/webpenc.c',
+ ],
+ },
+ {
+ 'target_name': 'libwebp_utils',
+ 'type': 'static_library',
+ 'include_dirs': ['.'],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/src/utils/bit_reader.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/bit_writer.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/color_cache.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/filters.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/huffman.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/huffman_encode.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/quant_levels.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/quant_levels_dec.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/random.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/rescaler.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/thread.c',
+ '<(DEPTH)/third_party/libwebp/src/utils/utils.c',
+ ],
+ },
+ {
+ 'target_name': 'libwebp_mux',
+ 'type': 'static_library',
+ 'include_dirs': ['.'],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/src/mux/muxedit.c',
+ '<(DEPTH)/third_party/libwebp/src/mux/muxinternal.c',
+ '<(DEPTH)/third_party/libwebp/src/mux/muxread.c',
+ ],
+ },
+ {
+ 'target_name': 'libwebp_enc_mux',
+ 'type': 'static_library',
+ 'dependencies': [
+ 'libwebp_mux',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/third_party/libwebp/src',
+ ],
+ 'sources': [
+ '<(DEPTH)/third_party/libwebp/examples/gif2webp_util.c',
+ ],
+ },
+ {
+ 'target_name': 'libwebp',
+ 'type': 'none',
+ 'dependencies' : [
+ 'libwebp_dec',
+ 'libwebp_demux',
+ 'libwebp_dsp',
+ 'libwebp_dsp_neon',
+ 'libwebp_enc',
+ 'libwebp_enc_mux',
+ 'libwebp_utils',
+ ],
+ 'direct_dependent_settings': {
+ 'include_dirs': ['.'],
+ },
+ 'conditions': [
+ ['OS!="win"', {'product_name': 'webp'}],
+ ],
+ },
+ ],
+}
diff --git a/src/build/linux/detect_host_arch.py b/src/build/linux/detect_host_arch.py
new file mode 100644
index 0000000..2686461
--- /dev/null
+++ b/src/build/linux/detect_host_arch.py
@@ -0,0 +1,35 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+"""Outputs host CPU architecture in format recognized by gyp."""
+
+import platform
+import re
+import sys
+
+
+def main():
+ host_arch = platform.machine()
+
+ # Convert machine type to format recognized by gyp.
+ if re.match(r'i.86', host_arch) or host_arch == 'i86pc':
+ host_arch = 'ia32'
+ elif host_arch in ['x86_64', 'amd64']:
+ host_arch = 'x64'
+ elif host_arch.startswith('arm'):
+ host_arch = 'arm'
+
+ # platform.machine is based on running kernel. It's possible to use 64-bit
+ # kernel with 32-bit userland, e.g. to give linker slightly more memory.
+ # Distinguish between different userland bitness by querying
+ # the python binary.
+ if host_arch == 'x64' and platform.architecture()[0] == '32bit':
+ host_arch = 'ia32'
+
+ print host_arch
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/build/mod_pagespeed.map b/src/build/mod_pagespeed.map
new file mode 100644
index 0000000..8c3a1b0
--- /dev/null
+++ b/src/build/mod_pagespeed.map
@@ -0,0 +1,5 @@
+{
+ /* Make sure we don't export anything unneeded */
+ global: pagespeed_module;
+ local: *;
+};
diff --git a/src/build/output_dll_copy.rules b/src/build/output_dll_copy.rules
new file mode 100644
index 0000000..c6e9051
--- /dev/null
+++ b/src/build/output_dll_copy.rules
@@ -0,0 +1,17 @@
+<?xml version="1.0" encoding="utf-8"?>
+<VisualStudioToolFile
+ Name="Output DLL copy"
+ Version="8.00"
+ >
+ <Rules>
+ <CustomBuildRule
+ Name="Output DLL copy"
+ CommandLine="xcopy /R /C /Y $(InputPath) $(OutDir)"
+ Outputs="$(OutDir)\$(InputFileName)"
+ FileExtensions="*.dll"
+ >
+ <Properties>
+ </Properties>
+ </CustomBuildRule>
+ </Rules>
+</VisualStudioToolFile>
diff --git a/src/build/pagespeed_overrides.gypi b/src/build/pagespeed_overrides.gypi
new file mode 100644
index 0000000..e2fb7a2
--- /dev/null
+++ b/src/build/pagespeed_overrides.gypi
@@ -0,0 +1,142 @@
+# Copyright 2013 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# PageSpeed overrides for Chromium's common.gypi.
+{
+ 'variables': {
+ # Putting a variables dict inside another variables dict looks
+ # kind of weird. This is done so that some variables are defined
+ # as variables within the outer variables dict here. This is
+ # necessary to get these variables defined for the conditions
+ # within this variables dict that operate on these variables.
+ 'variables': {
+ # Whether or not we are building for native client.
+ 'build_nacl%': 0,
+ },
+
+ # Copy conditionally-set variables out one scope.
+ 'build_nacl%': '<(build_nacl)',
+
+ # Conditions that operate on our variables defined above.
+ 'conditions': [
+ ['build_nacl==1', {
+ # Disable position-independent code when building under Native
+ # Client.
+ 'linux_fpic': 0,
+ }],
+ ],
+
+
+ # Override a few Chromium variables:
+
+ # Chromium uses system shared libraries on Linux by default
+ # (Chromium already has transitive dependencies on these libraries
+ # via gtk). We want to link these libraries into our binaries so
+ # we change the default behavior.
+ 'use_system_libjpeg': 0,
+ 'use_system_libpng': 0,
+ 'use_system_zlib': 0,
+
+ # We don't use google API keys in the PageSpeed build, so disable them.
+ 'use_official_google_api_keys': 0,
+
+ # Disable the chromium linting plugins since our code doesn't
+ # (yet) meet their requirements.
+ 'clang_use_chrome_plugins': 0,
+
+ # Disable use of special ld gold flags, since it isn't installed
+ # by default.
+ 'linux_use_gold_binary': 0,
+ 'linux_use_gold_flags': 0,
+ },
+ 'target_defaults': {
+ # Make sure our shadow view of chromium source is available to
+ # targets that don't explicitly declare their dependencies and
+ # assume chromium source headers are available from the root
+ # (third_party/modp_b64 is one such target).
+ 'include_dirs': [
+ '<(DEPTH)/third_party/chromium/src',
+ ],
+ 'defines': [
+ 'CHROMIUM_REVISION=<(chromium_revision)',
+ ],
+
+ # ABI-incompatible changes are trouble when you have a library, so turn off
+ # _GLIBCXX_DEBUG --- it makes various STL objects have different types and
+ # sizes.
+ 'defines!': [
+ '_GLIBCXX_DEBUG=1'
+ ],
+ },
+ 'conditions': [
+ ['build_nacl==1', {
+ 'target_defaults': {
+ 'defines': [
+ # NaCL newlib's libpthread.a provides the
+ # GetRunningOnValgrind symbol already, so we should not
+ # provide it.
+ 'DYNAMIC_ANNOTATIONS_PROVIDE_RUNNING_ON_VALGRIND=0',
+ ],
+ 'include_dirs': [
+ '<(DEPTH)/build/nacl_header_stubs',
+ ],
+ },
+ }],
+ ['os_posix==1 and OS!="mac"', {
+ 'target_defaults': {
+ 'ldflags': [
+ # Fail to link if there are any undefined symbols.
+ '-Wl,-z,defs',
+ ],
+ }
+ }],
+ ['OS=="mac"', {
+ 'target_defaults': {
+ 'xcode_settings': {
+ 'conditions': [
+ ['clang==1', {
+ # Chromium's common.gypi does not currently scope the
+ # clang binary paths relative to DEPTH, so we must
+ # override the paths here.
+ 'CC': '$(SOURCE_ROOT)/<(DEPTH)/third_party/llvm-build/Release+Asserts/bin/clang',
+ 'LDPLUSPLUS': '$(SOURCE_ROOT)/<(DEPTH)/third_party/llvm-build/Release+Asserts/bin/clang++',
+ }],
+ ]
+ },
+ },
+ }],
+ ['OS=="win"', {
+ 'target_defaults': {
+ # Remove the following defines, which are normally defined by
+ # Chromium's common.gypi.
+ 'defines!': [
+ # Chromium's common.gypi disables tr1. We need it for tr1
+ # regex so remove their define to disable it.
+ '_HAS_TR1=0',
+
+ # Chromium disables exceptions in some environments, but our
+ # use of tr1 regex requires exception support, so we have to
+ # re-enable it here.
+ '_HAS_EXCEPTIONS=0',
+ ],
+ 'msvs_settings': {
+ 'VCCLCompilerTool': {
+ 'ExceptionHandling': '1', # /EHsc
+ },
+ },
+ },
+ }]
+ ],
+}
+
diff --git a/src/build/release.gypi b/src/build/release.gypi
new file mode 100644
index 0000000..7595ef5
--- /dev/null
+++ b/src/build/release.gypi
@@ -0,0 +1,17 @@
+{
+ 'conditions': [
+ # Handle build types.
+ ['buildtype=="Dev"', {
+ 'includes': ['internal/release_impl.gypi'],
+ }],
+ ['buildtype=="Official"', {
+ 'includes': ['internal/release_impl_official.gypi'],
+ }],
+ # TODO(bradnelson): may also need:
+ # checksenabled
+ # coverage
+ # dom_stats
+ # pgo_instrument
+ # pgo_optimize
+ ],
+}
diff --git a/src/build/version.py b/src/build/version.py
new file mode 100755
index 0000000..29e8d18
--- /dev/null
+++ b/src/build/version.py
@@ -0,0 +1,201 @@
+#!/usr/bin/env python
+# Copyright 2014 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+# (See http://src.chromium.org/viewvc/chrome/trunk/src/LICENSE)
+# This file itself is from
+# http://src.chromium.org/viewvc/chrome/trunk/src/build/util/ as of
+# revision r252481
+
+"""
+version.py -- Chromium version string substitution utility.
+"""
+
+import getopt
+import os
+import sys
+
+
+class Usage(Exception):
+ def __init__(self, msg):
+ self.msg = msg
+
+
+def fetch_values_from_file(values_dict, file_name):
+ """
+ Fetches KEYWORD=VALUE settings from the specified file.
+
+ Everything to the left of the first '=' is the keyword,
+ everything to the right is the value. No stripping of
+ white space, so beware.
+
+ The file must exist, otherwise you get the Python exception from open().
+ """
+ for line in open(file_name, 'r').readlines():
+ key, val = line.rstrip('\r\n').split('=', 1)
+ values_dict[key] = val
+
+
+def fetch_values(file_list):
+ """
+ Returns a dictionary of values to be used for substitution, populating
+ the dictionary with KEYWORD=VALUE settings from the files in 'file_list'.
+
+ Explicitly adds the following value from internal calculations:
+
+ OFFICIAL_BUILD
+ """
+ CHROME_BUILD_TYPE = os.environ.get('CHROME_BUILD_TYPE')
+ if CHROME_BUILD_TYPE == '_official':
+ official_build = '1'
+ else:
+ official_build = '0'
+
+ values = dict(
+ OFFICIAL_BUILD = official_build,
+ )
+
+ for file_name in file_list:
+ fetch_values_from_file(values, file_name)
+
+ return values
+
+
+def subst_template(contents, values):
+ """
+ Returns the template with substituted values from the specified dictionary.
+
+ Keywords to be substituted are surrounded by '@': @KEYWORD@.
+
+ No attempt is made to avoid recursive substitution. The order
+ of evaluation is random based on the order of the keywords returned
+ by the Python dictionary. So do NOT substitute a value that
+ contains any @KEYWORD@ strings expecting them to be recursively
+ substituted, okay?
+ """
+ for key, val in values.iteritems():
+ try:
+ contents = contents.replace('@' + key + '@', val)
+ except TypeError:
+ print repr(key), repr(val)
+ return contents
+
+
+def subst_file(file_name, values):
+ """
+ Returns the contents of the specified file_name with substited
+ values from the specified dictionary.
+
+ This is like subst_template, except it operates on a file.
+ """
+ template = open(file_name, 'r').read()
+ return subst_template(template, values);
+
+
+def write_if_changed(file_name, contents):
+ """
+ Writes the specified contents to the specified file_name
+ iff the contents are different than the current contents.
+ """
+ try:
+ old_contents = open(file_name, 'r').read()
+ except EnvironmentError:
+ pass
+ else:
+ if contents == old_contents:
+ return
+ os.unlink(file_name)
+ open(file_name, 'w').write(contents)
+
+
+def main(argv=None):
+ if argv is None:
+ argv = sys.argv
+
+ short_options = 'e:f:i:o:t:h'
+ long_options = ['eval=', 'file=', 'help']
+
+ helpstr = """\
+Usage: version.py [-h] [-f FILE] ([[-i] FILE] | -t TEMPLATE) [[-o] FILE]
+
+ -f FILE, --file=FILE Read variables from FILE.
+ -i FILE, --input=FILE Read strings to substitute from FILE.
+ -o FILE, --output=FILE Write substituted strings to FILE.
+ -t TEMPLATE, --template=TEMPLATE Use TEMPLATE as the strings to substitute.
+ -e VAR=VAL, --eval=VAR=VAL Evaluate VAL after reading variables. Can
+ be used to synthesize variables. e.g.
+ -e 'PATCH_HI=int(PATCH)/256'.
+ -h, --help Print this help and exit.
+"""
+
+ evals = {}
+ variable_files = []
+ in_file = None
+ out_file = None
+ template = None
+
+ try:
+ try:
+ opts, args = getopt.getopt(argv[1:], short_options, long_options)
+ except getopt.error, msg:
+ raise Usage(msg)
+ for o, a in opts:
+ if o in ('-e', '--eval'):
+ try:
+ evals.update(dict([a.split('=',1)]))
+ except ValueError:
+ raise Usage("-e requires VAR=VAL")
+ elif o in ('-f', '--file'):
+ variable_files.append(a)
+ elif o in ('-i', '--input'):
+ in_file = a
+ elif o in ('-o', '--output'):
+ out_file = a
+ elif o in ('-t', '--template'):
+ template = a
+ elif o in ('-h', '--help'):
+ print helpstr
+ return 0
+ while len(args) and (in_file is None or out_file is None or
+ template is None):
+ if in_file is None:
+ in_file = args.pop(0)
+ elif out_file is None:
+ out_file = args.pop(0)
+ if args:
+ msg = 'Unexpected arguments: %r' % args
+ raise Usage(msg)
+ except Usage, err:
+ sys.stderr.write(err.msg)
+ sys.stderr.write('; Use -h to get help.\n')
+ return 2
+
+ values = fetch_values(variable_files)
+ for key, val in evals.iteritems():
+ values[key] = str(eval(val, globals(), values))
+
+ if template is not None:
+ contents = subst_template(template, values)
+ elif in_file:
+ contents = subst_file(in_file, values)
+ else:
+ # Generate a default set of version information.
+ contents = """MAJOR=%(MAJOR)s
+MINOR=%(MINOR)s
+BUILD=%(BUILD)s
+PATCH=%(PATCH)s
+LASTCHANGE=%(LASTCHANGE)s
+OFFICIAL_BUILD=%(OFFICIAL_BUILD)s
+""" % values
+
+
+ if out_file:
+ write_if_changed(out_file, contents)
+ else:
+ print contents
+
+ return 0
+
+
+if __name__ == '__main__':
+ sys.exit(main())
diff --git a/src/build/wrappers/ar.sh b/src/build/wrappers/ar.sh
new file mode 100755
index 0000000..80d1f04
--- /dev/null
+++ b/src/build/wrappers/ar.sh
@@ -0,0 +1,28 @@
+#!/bin/sh
+#
+# Copyright 2013 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: morlovich@google.com (Maksim Orlovich)
+#
+# A very simple wrapper around system 'ar' that drops the T (thin archive)
+# option from archive commands, as gyp always put it in on Linux, while
+# our build machines don't have it.
+
+if [ "$1" = "crsT" ]; then
+ shift 1
+ exec /usr/bin/ar crs "$@"
+else
+ exec /usr/bin/ar "$@"
+fi
diff --git a/src/install/Makefile b/src/install/Makefile
new file mode 100644
index 0000000..376def9
--- /dev/null
+++ b/src/install/Makefile
@@ -0,0 +1,615 @@
+# This Makefile is used to help drive the installation of mod_pagespeed into
+# an Apache installation.
+#
+# Note that the location of the Apache configuration files may vary by
+# Linux distribution. For example, we have seen the following installation
+# directories for the default Apache install.
+#
+# Ubuntu /etc/apache2/mods-enabled/*.conf
+# CentOS /etc/httpd/conf.d/*.conf
+# Custom Apache build from source /usr/local/apache2/conf/extra/
+#
+# In the case of the custom Apache build, you must also edit
+# /usr/local/apache2/conf to add "Include conf/extra/pagespeed.conf"
+#
+# The goal of this Makefile is to help generate basic default
+# configuration files that can then be edited to tune the HTML
+# performance based on the Apache installation, internet-visible
+# hostnames, and the specific needs of the site.
+#
+# The usage model of this Makefile is that, as an unpriviledged user, you
+# create the desired configuration files in /tmp, where you can examine
+# them before installing them. You can then do either of these:
+#
+# (a) Run "make -n install" to see the recommended installation commands,
+# and execute them by hand
+# (b) Run "sudo make install" to install them automatically.
+#
+#
+# To install mod_pagespeed properly, we need to know the locations of
+# Apache configuration scripts and binaries. These can are specified
+# as Makefile variables which can be overridden on the command line.
+# They have defaults, which will often need to be changed.
+
+
+# The location of the Apache root installation directory. This helps form
+# defaults for other variables, but each of those can be overridden.
+APACHE_ROOT = /etc/httpd
+
+# The installation directory for modules (mod*.so)
+APACHE_MODULES = $(APACHE_ROOT)/modules
+
+# The root directory Apache uses for serving files.
+APACHE_DOC_ROOT = /var/www
+# The domain Apache is serving from
+#APACHE_DOMAIN = localhost:8080 # For test-server.
+APACHE_DOMAIN = localhost
+APACHE_HTTPS_DOMAIN = localhost
+APACHE_PORT = 80
+APACHE_SECONDARY_PORT = 8084
+APACHE_TERTIARY_PORT = 8085
+RCPORT1 = 9091
+RCPORT2 = 9092
+RCPORT3 = 9093
+RCPORT4 = 9094
+RCPORT5 = 9095
+RCPORT6 = 9096
+RCPORT7 = 9097
+SSL_CERT_DIR = /etc/ssl/certs
+SSL_CERT_FILE_COMMAND =
+
+# These are set via command-line when run via 'ubuntu.sh', 'centos.sh',
+# or 'opensuse.sh'. However during development we use this Makefile
+# directly, so we set defaults from an Ubuntu version on our dev
+# boxes (whose Apache lacks 'graceful-stop').
+APACHE_CONTROL_PROGRAM = /etc/init.d/httpd
+APACHE_START = $(APACHE_CONTROL_PROGRAM) start
+APACHE_STOP_COMMAND = stop
+APACHE_PIDFILE = /var/run/apache2.pid
+APACHE_PROGRAM = /usr/sbin/apache2
+
+# For testing proxying of an external domain, this represents the domain we
+# are proxying from.
+PAGESPEED_TEST_HOST ?= modpagespeed.com
+export PAGESPEED_TEST_HOST
+
+# The installation directory for executables
+BINDIR = /usr/bin
+
+# A temp directory to stage generated configuration files. This must be
+# writable by the user, and readable by root.
+STAGING_DIR = /tmp/mod_pagespeed.install
+
+# The mod_pagespeed module is specified relative to the install directory,
+# which is src/install.
+MOD_PAGESPEED_ROOT = $(shell dirname `pwd`)
+PAGESPEED_MODULE = $(MOD_PAGESPEED_ROOT)/out/Release/libmod_pagespeed.so
+PAGESPEED_MODULE_24 = $(MOD_PAGESPEED_ROOT)/out/Release/libmod_pagespeed_ap24.so
+PAGESPEED_JS_MINIFY = $(MOD_PAGESPEED_ROOT)/out/Release/js_minify
+
+# On systems dervied from the NCSA configuration files by Rob McCool,
+# you enable a module by writing its .conf file into
+# $(APACHE_ROOT)/mods-available/pagespeed.conf, and a single Load command into
+# $(APACHE_ROOT)/mods-enabled/pagespeed.conf. So if that exists, then we'll
+# try to automate that.
+MODS_ENABLED_DIR = $(shell if [ -d $(APACHE_ROOT)/mods-enabled ]; then \
+ echo $(APACHE_ROOT)/mods-enabled; fi)
+MODS_AVAILABLE_DIR = $(shell if [ -d $(APACHE_ROOT)/mods-available ]; then \
+ echo $(APACHE_ROOT)/mods-available; fi)
+
+# Determines where mod_pagespeed should put cache.
+MOD_PAGESPEED_CACHE = /var/cache/mod_pagespeed
+
+# Determines where mod_pagespeed should write various logs.
+MOD_PAGESPEED_LOG = /var/log/pagespeed
+
+# The username used to run apache. This is needed to create the directory
+# used to store mod_pagespeed files and cache data.
+APACHE_USER = www-data
+
+# Set this to 1 to enable mod_proxy and mod_rewrite
+ENABLE_PROXY = 0
+
+.PHONY : config_file echo_vars
+
+echo_vars :
+ @echo Run "restart" to add default instaweb config to apache
+ @echo Or run "stop", "staging", "install", and "start".
+ @echo These configuration variables can be reset on the make command line,
+ @echo e.g. \"make config_file\"
+ @echo ""
+ @echo " APACHE_CONF_FILE=$(APACHE_CONF_FILE)"
+ @echo " APACHE_MODULES=$(APACHE_MODULES)"
+ @echo " APACHE_ROOT=$(APACHE_ROOT)"
+ @echo " APACHE_START=$(APACHE_START)"
+ @echo " APACHE_STOP_COMMAND=$(APACHE_STOP_COMMAND)"
+ @echo " MOD_PAGESPEED_CACHE=$(MOD_PAGESPEED_CACHE)"
+ @echo " MOD_PAGESPEED_LOG=$(MOD_PAGESPEED_LOG)"
+ @echo " MODS_ENABLED_DIR=$(MODS_ENABLED_DIR)"
+ @echo " MODS_AVAILABLE_DIR=$(MODS_AVAILABLE_DIR)"
+ @echo " STAGING_DIR=$(STAGING_DIR)"
+ @echo " ENABLE_PROXY=${ENABLE_PROXY}"
+ @echo " SLURP_DIR=${SLURP_DIR}"
+ @echo " SHARED_MEM_LOCKS=${SHARED_MEM_LOCKS}"
+
+
+# In some Linux distributions, such as Ubuntu, there are two commands
+# in the default root config file:
+# Include /etc/apache2/modes-enabled/*.load
+# Include /etc/apache2/modes-enabled/*.conf
+# we need to write a one-line '.load' file and put that and our '.conf' file
+# into .../mods-enabled.
+#
+# In other distributions, such as CentOS, there is an 'Include DIR/*.conf',
+# but there is no implicit loading of modules, so we write our Load line
+# directly into our config file.
+
+# In either case, independent configuration files go here (this directory
+# is read by both distributions on startup after the modules load).
+APACHE_CONF_D = $(APACHE_ROOT)/conf.d
+
+ifeq ($(MODS_ENABLED_DIR),)
+
+# This is a CentOS-like installation, where there is no explicit .load
+# file, and we instead pre-pend the LoadModule command to the .conf file.
+APACHE_CONF_DIR = $(APACHE_CONF_D)
+CONF_SOURCES = $(STAGING_DIR)/pagespeed.load $(STAGING_DIR)/pagespeed.conf
+
+else
+
+# This is an Ubuntu-like installation, where the .load files are placed
+# separately into a mods-enabled directory, and the .conf file is loaded
+# independently.
+MODS_ENABLED_INSTALL_COMMANDS = \
+ rm -f $(MODS_ENABLED_DIR)/pagespeed.load ; \
+ cp -f $(STAGING_DIR)/pagespeed.load $(MODS_AVAILABLE_DIR) ; \
+ cd $(MODS_ENABLED_DIR) && ln -s ../mods-available/pagespeed.load ; \
+ rm -f $(MODS_ENABLED_DIR)/headers.load ; \
+ cd $(MODS_ENABLED_DIR) && ln -s ../mods-available/headers.load ; \
+ rm -f $(MODS_ENABLED_DIR)/deflate.load ; \
+ cd $(MODS_ENABLED_DIR) && ln -s ../mods-available/deflate.load
+
+APACHE_CONF_DIR = $(MODS_AVAILABLE_DIR)
+CONF_SOURCES = $(STAGING_DIR)/pagespeed.conf
+
+endif
+
+
+# We will generate 'proxy.conf' in the staging area
+# unconditiontionally, but we will load it into the
+# Apache server only if the user installs with ENABLE_PROXY=1
+ifeq ($(ENABLE_PROXY),1)
+CONF_SOURCES += $(STAGING_DIR)/proxy.conf
+endif
+
+APACHE_SLURP_READ_ONLY_COMMAND=\#ModPagespeedSlurpReadOnly on
+
+ifeq ($(SLURP_DIR),)
+ APACHE_SLURP_DIR_COMMAND = \#ModPagespeedSlurpDirectory ...
+else
+ APACHE_SLURP_DIR_COMMAND = ModPagespeedSlurpDirectory $(SLURP_DIR)
+ ifeq ($(SLURP_WRITE),1)
+ APACHE_SLURP_READ_ONLY_COMMAND=ModPagespeedSlurpReadOnly off
+ else
+ APACHE_SLURP_READ_ONLY_COMMAND=ModPagespeedSlurpReadOnly on
+ endif
+endif
+
+ifeq ($(STRESS_TEST),1)
+ # remove prefix
+ STRESS_TEST_SED_PATTERN=^\#STRESS
+else
+ # remove whole line
+ STRESS_TEST_SED_PATTERN=^\#STRESS.*\n
+endif
+
+ifeq ($(REWRITE_TEST),1)
+ # remove prefix
+ REWRITE_TEST_SED_PATTERN=^\#REWRITE
+else
+ # remove whole line
+ REWRITE_TEST_SED_PATTERN=^\#REWRITE.*\n
+endif
+
+ifeq ($(LOADTEST_TRACE_TEST),1)
+ # remove coverage prefix
+ LOADTEST_TEST_SED_PATTERN=^\#LOADTEST
+else
+ # remove coverage lines
+ LOADTEST_TEST_SED_PATTERN=^\#LOADTEST.*\n
+endif
+
+ifeq ($(PROXY_TEST),1)
+ # remove prefix
+ PROXY_TEST_SED_PATTERN=^\#PROXY
+else
+ # remove whole line
+ PROXY_TEST_SED_PATTERN=^\#PROXY.*\n
+endif
+
+ifeq ($(SLURP_TEST),1)
+ # remove prefix
+ SLURP_TEST_SED_PATTERN=^\#SLURP
+else
+ # remove whole line
+ SLURP_TEST_SED_PATTERN=^\#SLURP.*\n
+endif
+
+ifeq ($(SHARED_MEM_LOCK_TEST),1)
+ # remove prefix
+ SHARED_MEM_LOCK_TEST_SED_PATTERN=^\#SHARED_MEM_LOCKS
+else
+ # remove whole line
+ SHARED_MEM_LOCK_TEST_SED_PATTERN=^\#SHARED_MEM_LOCKS.*\n
+endif
+
+ifeq ($(MEMCACHED_TEST),1)
+ # remove prefix
+ MEMCACHED_TEST_SED_PATTERN=^\#MEMCACHED
+else
+ # remove whole line
+ MEMCACHED_TEST_SED_PATTERN=^\#MEMCACHED.*\n
+endif
+
+ifeq ($(IPRO_PRESERVE_LOADTEST_TEST),1)
+ # remove prefix
+ IPRO_PRESERVE_LOADTEST_TEST_SED_PATTERN=^\#IPRO_PRESERVE_LOADTEST
+else
+ # remove whole line
+ IPRO_PRESERVE_LOADTEST_TEST_SED_PATTERN=^\#IPRO_PRESERVE_LOADTEST.*\n
+endif
+
+ifeq ($(MEMCACHE_LOADTEST_TEST),1)
+ # remove prefix
+ MEMCACHE_LOADTEST_TEST_SED_PATTERN=^\#MEMCACHE_LOADTEST
+else
+ # remove whole line
+ MEMCACHE_LOADTEST_TEST_SED_PATTERN=^\#MEMCACHE_LOADTEST.*\n
+endif
+
+ifeq ($(PURGING_LOADTEST_TEST),1)
+ # remove prefix
+ PURGING_LOADTEST_TEST_SED_PATTERN=^\#PURGING_LOADTEST
+else
+ # remove whole line
+ PURGING_LOADTEST_TEST_SED_PATTERN=^\#PURGING_LOADTEST.*\n
+endif
+
+ifeq ($(IUR_LOADTEST_TEST),1)
+ # remove prefix
+ IUR_LOADTEST_TEST_SED_PATTERN=^\#IUR_LOADTEST
+ # remove whole explicit domain authorization line
+ DOMAIN_AUTH_SED_PATTERN=^\#DOMAIN_AUTH_LOADTEST.*\n
+else
+ # remove whole line
+ IUR_LOADTEST_TEST_SED_PATTERN=^\#IUR_LOADTEST.*\n
+ ifeq ($(LOADTEST_TRACE_TEST),1)
+ # remove prefix for explicit domain authorization line
+ DOMAIN_AUTH_SED_PATTERN=^\#DOMAIN_AUTH_LOADTEST
+ endif
+endif
+
+ifeq ($(SPELING_TEST),1)
+ # remove prefix
+ SPELING_TEST_SED_PATTERN=^\#SPELING
+else
+ # remove whole line
+ SPELING_TEST_SED_PATTERN=^\#SPELING.*\n
+endif
+
+ifeq ($(REWRITE_TEST),1)
+ # remove prefix
+ REWRITE_TEST_SED_PATTERN=^\#REWRITE
+else
+ # remove whole line
+ REWRITE_TEST_SED_PATTERN=^\#REWRITE.*\n
+endif
+
+ifeq ($(GZIP_TEST),1)
+ # remove prefix
+ GZIP_TEST_SED_PATTERN=^\#GZIP
+else
+ # remove whole line
+ GZIP_TEST_SED_PATTERN=^\#GZIP.*\n
+endif
+
+ifeq ($(EXPERIMENT_GA_TEST),1)
+ # remove prefix
+ EXPERIMENT_GA_TEST_SED_PATTERN=^\#EXPERIMENT_GA
+else
+ # remove whole line
+ EXPERIMENT_GA_TEST_SED_PATTERN=^\#EXPERIMENT_GA.*\n
+endif
+
+ifeq ($(EXPERIMENT_NO_GA_TEST),1)
+ # remove prefix
+ EXPERIMENT_NO_GA_TEST_SED_PATTERN=^\#EXPERIMENT_NO_GA
+else
+ # remove whole line
+ EXPERIMENT_NO_GA_TEST_SED_PATTERN=^\#EXPERIMENT_NO_GA.*\n
+endif
+
+ifeq ($(HTTPS_TEST),1)
+ # remove prefix
+ HTTPS_TEST_SED_PATTERN=^\#HTTPS
+else
+ # remove whole line
+ HTTPS_TEST_SED_PATTERN=^\#HTTPS.*\n
+endif
+
+ifeq ($(ALL_DIRECTIVES_TEST),1)
+ # remove prefix
+ ALL_DIRECTIVES_TEST_SED_PATTERN=^\#ALL_DIRECTIVES
+else
+ # remove whole line
+ ALL_DIRECTIVES_TEST_SED_PATTERN=^\#ALL_DIRECTIVES.*\n
+endif
+
+ifeq ($(PER_VHOST_STATS_TEST),1)
+ # remove prefix
+ PER_VHOST_STATS_TEST_SED_PATTERN=^\#PER_VHOST_STATS
+else
+ # remove whole line
+ PER_VHOST_STATS_TEST_SED_PATTERN=^\#PER_VHOST_STATS.*\n
+endif
+
+ifeq ($(NO_PER_VHOST_STATS_TEST),1)
+ # remove prefix
+ NO_PER_VHOST_STATS_TEST_SED_PATTERN=^\#NO_PER_VHOST_STATS
+else
+ # remove whole line
+ NO_PER_VHOST_STATS_TEST_SED_PATTERN=^\#NO_PER_VHOST_STATS.*\n
+endif
+
+ifeq ($(STATS_LOGGING_TEST),1)
+ # remove prefix
+ STATS_LOGGING_TEST_SED_PATTERN=^\#STATS_LOGGING
+else
+ # remove whole line
+ STATS_LOGGING_TEST_SED_PATTERN=^\#STATS_LOGGING.*\n
+endif
+
+# Note that the quoted sed replacement for APACHE_SLURP_DIR_COMMAND is because
+# that might have embedded spaces, and 'sed' is interpreted first by bash.
+
+$(STAGING_DIR)/pagespeed.conf : common/pagespeed.conf.template debug.conf.template
+ sed -e "s!@@APACHE_DOC_ROOT@@!$(APACHE_DOC_ROOT)!g" \
+ -e "s!@@APACHE_DOMAIN@@!$(APACHE_DOMAIN)!g" \
+ -e "s!@@APACHE_HTTPS_DOMAIN@@!$(APACHE_HTTPS_DOMAIN)!g" \
+ -e "s!@@APACHE_MODULES@@!$(APACHE_MODULES)!g" \
+ -e "s!@@APACHE_SECONDARY_PORT@@!$(APACHE_SECONDARY_PORT)!g" \
+ -e "s!@@APACHE_TERTIARY_PORT@@!$(APACHE_TERTIARY_PORT)!g" \
+ -e "s!@@PAGESPEED-TEST-HOST@@!$(PAGESPEED_TEST_HOST)!g" \
+ -e "s!@@MOD_PAGESPEED_CACHE@@!$(MOD_PAGESPEED_CACHE)!g" \
+ -e "s!@@MOD_PAGESPEED_LOG@@!$(MOD_PAGESPEED_LOG)!g" \
+ -e "s!@@RCPORT1@@!$(RCPORT1)!g" \
+ -e "s!@@RCPORT2@@!$(RCPORT2)!g" \
+ -e "s!@@RCPORT3@@!$(RCPORT3)!g" \
+ -e "s!@@RCPORT4@@!$(RCPORT4)!g" \
+ -e "s!@@RCPORT5@@!$(RCPORT5)!g" \
+ -e "s!@@RCPORT6@@!$(RCPORT6)!g" \
+ -e "s!@@RCPORT7@@!$(RCPORT7)!g" \
+ -e "s!@@SSL_CERT_DIR@@!$(SSL_CERT_DIR)!g" \
+ -e "s!@@SSL_CERT_FILE_COMMAND@@!$(SSL_CERT_FILE_COMMAND)!g" \
+ -e "s@# ModPagespeedSlurpDirectory ...@$(APACHE_SLURP_DIR_COMMAND)@g" \
+ -e "s@# ModPagespeedSlurpReadOnly on@$(APACHE_SLURP_READ_ONLY_COMMAND)@g" \
+ -e "s|@@TMP_SLURP_DIR@@|$(TMP_SLURP_DIR)|g" \
+ -e "s|@@MEMCACHED_PORT@@|$(MEMCACHED_PORT)|g" \
+ -e "s@$(STRESS_TEST_SED_PATTERN)@@" \
+ -e "s@$(REWRITE_TEST_SED_PATTERN)@@" \
+ -e "s@$(LOADTEST_TEST_SED_PATTERN)@@" \
+ -e "s@$(PROXY_TEST_SED_PATTERN)@@" \
+ -e "s@$(SLURP_TEST_SED_PATTERN)@@" \
+ -e "s@$(SHARED_MEM_LOCK_TEST_SED_PATTERN)@@" \
+ -e "s@$(SPELING_TEST_SED_PATTERN)@@" \
+ -e "s@$(MEMCACHED_TEST_SED_PATTERN)@@" \
+ -e "s@$(IPRO_PRESERVE_LOADTEST_TEST_SED_PATTERN)@@" \
+ -e "s@$(MEMCACHE_LOADTEST_TEST_SED_PATTERN)@@" \
+ -e "s@$(PURGING_LOADTEST_TEST_SED_PATTERN)@@" \
+ -e "s@$(IUR_LOADTEST_TEST_SED_PATTERN)@@" \
+ -e "s@$(DOMAIN_AUTH_SED_PATTERN)@@" \
+ -e "s@$(GZIP_TEST_SED_PATTERN)@@" \
+ -e "s@$(HTTPS_TEST_SED_PATTERN)@@" \
+ -e "s@$(EXPERIMENT_GA_TEST_SED_PATTERN)@@" \
+ -e "s@$(EXPERIMENT_NO_GA_TEST_SED_PATTERN)@@" \
+ -e "s@$(ALL_DIRECTIVES_TEST_SED_PATTERN)@@" \
+ -e "s@$(PER_VHOST_STATS_TEST_SED_PATTERN)@@" \
+ -e "s@$(NO_PER_VHOST_STATS_TEST_SED_PATTERN)@@" \
+ -e "s@$(STATS_LOGGING_TEST_SED_PATTERN)@@" \
+ $^ > $@
+ ! grep '@@' $@ # Make sure we don't have any remaining @@variables@@
+
+$(STAGING_DIR)/proxy.conf : proxy.conf.template
+ sed -e s@APACHE_MODULES@$(APACHE_MODULES)@g \
+ $< > $@
+
+CONF_TEMPLATES = $(STAGING_DIR)/pagespeed.conf \
+ $(STAGING_DIR)/proxy.conf
+
+setup_staging_dir :
+ rm -rf $(STAGING_DIR)
+ mkdir -p $(STAGING_DIR)
+
+LIBRARY_CONF_SOURCE = \
+ $(MOD_PAGESPEED_ROOT)/net/instaweb/genfiles/conf/pagespeed_libraries.conf
+
+# Generate a configuration file and copy it to the staging area.
+# Also copy the example tree, and the built Apache module
+staging_except_module : setup_staging_dir $(CONF_TEMPLATES)
+ cat common/pagespeed.load.template | \
+ sed s~@@APACHE_MODULEDIR@@~$(APACHE_MODULES)~ | \
+ sed s/@@COMMENT_OUT_DEFLATE@@// > $(STAGING_DIR)/pagespeed.load
+ cp -f $(LIBRARY_CONF_SOURCE) $(STAGING_DIR)/pagespeed_libraries.conf
+ $(MODS_ENABLED_STAGING_COMMANDS)
+ cp -rp mod_pagespeed_example mod_pagespeed_test $(STAGING_DIR)
+
+staging : staging_except_module
+ cp $(PAGESPEED_MODULE) $(STAGING_DIR)/mod_pagespeed.so
+ cp $(PAGESPEED_MODULE_24) $(STAGING_DIR)/mod_pagespeed_ap24.so
+ cp $(PAGESPEED_JS_MINIFY) $(STAGING_DIR)/pagespeed_js_minify
+
+install_except_module : mod_pagespeed_file_root
+ $(MODS_ENABLED_INSTALL_COMMANDS)
+ cat $(CONF_SOURCES) > $(APACHE_CONF_DIR)/pagespeed.conf
+ cp -f $(STAGING_DIR)/pagespeed_libraries.conf \
+ $(APACHE_CONF_D)/pagespeed_libraries.conf
+ rm -rf $(APACHE_DOC_ROOT)/mod_pagespeed_example \
+ $(APACHE_DOC_ROOT)/mod_pagespeed_test
+ cp -r $(STAGING_DIR)/mod_pagespeed_example \
+ $(STAGING_DIR)/mod_pagespeed_test $(APACHE_DOC_ROOT)
+ chown -R $(APACHE_USER) $(APACHE_DOC_ROOT)/mod_pagespeed_example \
+ $(APACHE_DOC_ROOT)/mod_pagespeed_test
+
+# To install the mod_pagespeed configuration into the system, you must
+# run this as root, or under sudo.
+install : install_except_module
+ cp $(STAGING_DIR)/mod_pagespeed.so $(APACHE_MODULES)
+ cp $(STAGING_DIR)/mod_pagespeed_ap24.so $(APACHE_MODULES)
+ cp $(STAGING_DIR)/pagespeed_js_minify $(BINDIR)
+
+mod_pagespeed_file_root :
+ mkdir -p $(MOD_PAGESPEED_CACHE)
+ chown -R $(APACHE_USER) $(MOD_PAGESPEED_CACHE)
+
+ mkdir -p $(MOD_PAGESPEED_CACHE)-alt
+ chown -R $(APACHE_USER) $(MOD_PAGESPEED_CACHE)-alt
+
+ mkdir -p $(MOD_PAGESPEED_LOG)
+ chown -R $(APACHE_USER) $(MOD_PAGESPEED_LOG)
+
+flush_disk_cache :
+ rm -rf $(MOD_PAGESPEED_CACHE)
+ $(MAKE) MOD_PAGESPEED_CACHE=$(MOD_PAGESPEED_CACHE) \
+ MOD_PAGESPEED_LOG=$(MOD_PAGESPEED_LOG) \
+ APACHE_USER=$(APACHE_USER) mod_pagespeed_file_root
+
+# Starts Apache server
+start :
+ sudo $(APACHE_START)
+stop :
+ sudo ./stop_apache.sh $(APACHE_CONTROL_PROGRAM) \
+ $(APACHE_PIDFILE) \
+ $(APACHE_PROGRAM) \
+ $(APACHE_STOP_COMMAND) \
+ $(APACHE_PORT)
+
+# To run a complete iteration, stopping Apache, reconfiguring
+# it, and and restarting it, you can run 'make restart [args...]
+restart : stop
+ $(MAKE) staging
+ sudo $(MAKE) install \
+ APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ APACHE_ROOT=$(APACHE_ROOT) \
+ STAGING_DIR=$(STAGING_DIR) \
+ APACHE_CONF_FILE=$(APACHE_CONF_FILE) \
+ APACHE_MODULES=$(APACHE_MODULES) \
+ MODS_ENABLED_DIR=$(MODS_ENABLED_DIR) \
+ MODS_AVAILABLE_DIR=$(MODS_AVAILABLE_DIR) \
+ APACHE_USER=$(APACHE_USER) \
+ ENABLE_PROXY=$(ENABLE_PROXY)
+ sudo $(APACHE_START)
+
+# Tests that the installed mod_pagespeed server is working.
+test :
+ CACHE_FLUSH_TEST=on APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ ../system_test.sh localhost
+
+# Now hook in the full integration test suite. It needs to be run as root.
+# Each test is on it's own line for better diff/merge support.
+apache_install_conf :
+ $(MAKE) staging_except_module \
+ $(OPT_REWRITE_TEST) \
+ $(OPT_PROXY_TEST) \
+ $(OPT_SLURP_TEST) \
+ $(OPT_MEMCACHED_TEST) \
+ $(OPT_IPRO_PRESERVE_LOADTEST_TEST) \
+ $(OPT_MEMCACHE_LOADTEST_TEST) \
+ $(OPT_PURGING_LOADTEST_TEST) \
+ $(OPT_IUR_LOADTEST_TEST) \
+ $(OPT_SPELING_TEST) \
+ $(OPT_LOADTEST_TRACE_TEST) \
+ $(OPT_STRESS_TEST) \
+ $(OPT_HTTPS_TEST) \
+ $(OPT_SHARED_MEM_LOCK_TEST) \
+ $(OPT_GZIP_TEST) \
+ $(OPT_EXPERIMENT_GA_TEST) \
+ $(OPT_EXPERIMENT_NO_GA_TEST) \
+ $(OPT_ALL_DIRECTIVES_TEST) \
+ $(OPT_PER_VHOST_STATS_TEST) \
+ $(OPT_NO_PER_VHOST_STATS_TEST) \
+ $(OPT_STATS_LOGGING_TEST)
+ $(MAKE) install_except_module \
+ $(OPT_REWRITE_TEST) \
+ $(OPT_PROXY_TEST) \
+ $(OPT_SLURP_TEST) \
+ $(OPT_MEMCACHED_TEST) \
+ $(OPT_IPRO_PRESERVE_LOADTEST_TEST) \
+ $(OPT_MEMCACHE_LOADTEST_TEST) \
+ $(OPT_PURGING_LOADTEST_TEST) \
+ $(OPT_IUR_LOADTEST_TEST) \
+ $(OPT_SPELING_TEST) \
+ $(OPT_LOADTEST_TRACE_TEST) \
+ $(OPT_STRESS_TEST) \
+ $(OPT_HTTPS_TEST) \
+ $(OPT_SHARED_MEM_LOCK_TEST) \
+ $(OPT_GZIP_TEST) \
+ $(OPT_EXPERIMENT_GA_TEST) \
+ $(OPT_EXPERIMENT_NO_GA_TEST) \
+ $(OPT_ALL_DIRECTIVES_TEST) \
+ $(OPT_PER_VHOST_STATS_TEST) \
+ $(OPT_NO_PER_VHOST_STATS_TEST) \
+ $(OPT_STATS_LOGGING_TEST)
+
+# Sets up a machine to run as a server for running mod_pagespeed tests.
+# The tests include the fetching and proxying of a variety of content,
+# including some of the /mod_pagespeed_example directory, plus some other
+# content in /do_not_modify and /no_content.
+setup_test_machine :
+ APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ APACHE_CONF_FILE=$(APACHE_CONF_FILE) \
+ MOD_PAGESPEED_ROOT=$(MOD_PAGESPEED_ROOT) \
+ APACHE_CONF_D=$(APACHE_CONF_D) \
+ ./setup_test_machine.sh
+ sudo $(APACHE_CONTROL_PROGRAM) restart
+
+apache_debug_restart :
+ $(APACHE_CONTROL_PROGRAM) restart
+
+apache_debug_stop : stop
+
+# Enables a few ports that are needed by system tests. This is needed on
+# CentOS only to work around barriers erected by SELinux. See
+# http://linux.die.net/man/8/semanage
+# http://wiki.centos.org/HowTos/
+# SELinux#head-ad837f60830442ae77a81aedd10c20305a811388
+#
+# The port-list below must be kept in sync with debug.conf.template. 1023 is
+# used to test connection-refused handling via modpagespeed.com. We don't
+# actually create a VirtualHost on 1023.
+enable_ports_and_file_access :
+ set -x; \
+ for port in 1023 8081 8082 8084; do \
+ /usr/sbin/semanage port -a -t http_port_t -p tcp $$port || \
+ /usr/sbin/semanage port -m -t http_port_t -p tcp $$port; \
+ done
+ set -x; \
+ for dir in $(MOD_PAGESPEED_CACHE) \
+ $(MOD_PAGESPEED_CACHE)-alt \
+ $(MOD_PAGESPEED_LOG) ; do \
+ mkdir -p $$dir; \
+ chown $(APACHE_USER) $$dir; \
+ chcon -R --reference=$(APACHE_DOC_ROOT) $$dir; \
+ done
+
+# Hooks for tests we can only run in development due to needing extensive
+# configuration changes in Apache (and potentially different build flags).
+# Stubbed out here.
+apache_debug_leak_test :
+apache_debug_proxy_test :
+apache_debug_slurp_test :
+
+APACHE_HTTPS_PORT=
+APACHE_DEBUG_PAGESPEED_CONF=$(APACHE_CONF_DIR)/pagespeed.conf
+INSTALL_DATA_DIR=.
+
+include Makefile.tests
diff --git a/src/install/Makefile.tests b/src/install/Makefile.tests
new file mode 100644
index 0000000..9d0696e
--- /dev/null
+++ b/src/install/Makefile.tests
@@ -0,0 +1,478 @@
+# This makefile includes various integration tests, and is meant to be usable
+# in both development and deployment settings.
+#
+# Its interface is as follows:
+# Exports:
+# apache_system_tests
+# apache_vm_system_tests (includes tests that can be run on VMs)
+# Imports:
+# apache_install_conf (should read OPT_REWRITE_TEST, OPT_PROXY_TEST,
+# OPT_SLURP_TEST, OPT_SPELING_TEST, OPT_MEMCACHED_TEST,
+# OPT_MEMCACHE_LOADTEST_TEST, OPT_PURGING_LOADTEST_TEST,
+# OPT_IUR_LOADTEST_TEST, OPT_IPRO_PRESERVE_LOADTEST_TEST,
+# OPT_HTTPS_TEST,
+# OPT_LOADTEST_TRACE_TEST, OPT_STRESS_TEST,
+# OPT_SHARED_MEM_LOCK_TEST, OPT_GZIP_TEST,
+# OPT_EXPERIMENT_GA_TEST, OPT_EXPERIMENT_NO_GA_TEST,
+# OPT_PER_VHOST_STATS_TEST, OPT_NO_PER_VHOST_STATS_TEST,
+# OPT_ALL_DIRECTIVES_TEST)
+# stop, start (to stop and start Apache)
+# apache_debug_restart
+# apache_debug_stop
+# apache_debug_leak_test, apache_debug_proxy_test, apache_debug_slurp_test
+# APACHE_PORT
+# APACHE_HTTPS_PORT
+# APACHE_DEBUG_PAGESPEED_CONF
+# MOD_PAGESPEED_CACHE
+# MOD_PAGESPEED_ROOT
+# INSTALL_DATA_DIR
+# RCPORT1, RCPORT2, RCPORT3, RCPORT4, RCPORT5, RCPORT6, RCPORT7
+
+# We want order of dependencies honored..
+.NOTPARALLEL :
+
+# Want |& support; and /bin/sh doesn't provide it at least on Ubuntu 11.04
+SHELL=/bin/bash
+
+# Make conf, log, and cache file locations accessible to apache/system_test.sh
+export APACHE_DEBUG_PAGESPEED_CONF
+export APACHE_LOG
+export MOD_PAGESPEED_CACHE
+
+apache_vm_system_tests : setup_doc_root
+ $(MAKE) FAST_RESTART=1 apache_debug_smoke_test
+ $(MAKE) FAST_RESTART=1 apache_debug_downstream_caching_test
+ $(MAKE) FAST_RESTART=1 apache_debug_per_vhost_stats_test
+ $(MAKE) FAST_RESTART=1 apache_debug_memcached_test
+ $(MAKE) FAST_RESTART=1 apache_debug_leak_test
+ $(MAKE) FAST_RESTART=1 apache_debug_rewrite_test
+ $(MAKE) FAST_RESTART=1 apache_debug_proxy_test
+ $(MAKE) FAST_RESTART=1 apache_debug_slurp_test
+ $(MAKE) FAST_RESTART=1 apache_debug_speling_test
+ $(MAKE) FAST_RESTART=1 apache_debug_gzip_test
+ $(MAKE) FAST_RESTART=1 apache_debug_experiment_test
+ $(MAKE) FAST_RESTART=1 apache_debug_vhost_only_test
+ $(MAKE) FAST_RESTART=1 apache_debug_global_off_test
+ $(MAKE) FAST_RESTART=1 apache_debug_shared_mem_lock_sanity_test
+ $(MAKE) apache_debug_https_fetch_test
+ $(MAKE) FAST_RESTART=1 apache_debug_stats_logging_test
+ $(MAKE) FAST_RESTART=1 apache_debug_all_directives_test
+ $(MAKE) FAST_RESTART=1 apache_debug_unplugged_test
+ $(MAKE) apache_install_conf
+# 'apache_install_conf' should always be last, to leave your debug
+# Apache server in a consistent state.
+
+# apache_debug_serf_empty_header_test fails when testing on VMs for
+# release builds. This appears to be due to the complicated proxy
+# setup.
+# TODO(jmarantz): fix this.
+apache_system_tests : apache_vm_system_tests
+ $(MAKE) FAST_RESTART=1 apache_debug_serf_empty_header_test
+ $(MAKE) apache_install_conf
+ $(MAKE) FAST_RESTART=1 apache_debug_restart
+
+APACHE_HOST = localhost
+ifeq ($(APACHE_PORT),80)
+ APACHE_SERVER = $(APACHE_HOST)
+else
+ APACHE_SERVER = $(APACHE_HOST):$(APACHE_PORT)
+endif
+APACHE_SECONDARY_SERVER = $(APACHE_HOST):$(APACHE_SECONDARY_PORT)
+
+WGET = wget
+WGET_PROXY = http_proxy=$(APACHE_SERVER) $(WGET) -q -O -
+WGET_NO_PROXY = $(WGET) --no-proxy
+export WGET
+
+ifeq ($(APACHE_HTTPS_PORT),)
+ APACHE_HTTPS_SERVER =
+else ifeq ($(APACHE_HTTPS_PORT),443)
+ APACHE_HTTPS_SERVER = localhost
+else
+ APACHE_HTTPS_SERVER = localhost:$(APACHE_HTTPS_PORT)
+endif
+EXAMPLE = $(APACHE_SERVER)/mod_pagespeed_example
+EXAMPLE_IMAGE = $(EXAMPLE)/images/Puzzle.jpg.pagespeed.ce.91_WewrLtP.jpg
+EXAMPLE_BIG_CSS = $(EXAMPLE)/styles/big.css.pagespeed.ce.01O-NppLwe.css
+EXAMPLE_COMBINE_CSS = $(EXAMPLE)/combine_css.html
+TEST_ROOT = $(APACHE_SERVER)/mod_pagespeed_test
+
+APACHE_SYSTEM_TEST = $(shell \
+ if [ -f $(MOD_PAGESPEED_ROOT)/pagespeed/apache/system_test.sh ]; then \
+ echo $(MOD_PAGESPEED_ROOT)/pagespeed/apache/system_test.sh ;\
+ else \
+ echo $(MOD_PAGESPEED_ROOT)/third_party/pagespeed/apache/system_test.sh ;\
+ fi)
+
+# Installs debug configuration and runs a smoke test against it.
+# This will blow away your existing pagespeed.conf,
+# and clear the cache. It will also run with statistics off at the end,
+# restoring it at the end
+apache_debug_smoke_test : apache_install_conf apache_debug_restart
+ @echo '***' System-test with cold cache
+ $(MAKE) stop
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+ $(MAKE) start
+ CACHE_FLUSH_TEST=on \
+ RCPORT1=$(RCPORT1) \
+ RCPORT2=$(RCPORT2) \
+ RCPORT3=$(RCPORT3) \
+ RCPORT4=$(RCPORT4) \
+ RCPORT5=$(RCPORT5) \
+ RCPORT6=$(RCPORT6) \
+ RCPORT7=$(RCPORT7) \
+ APACHE_SECONDARY_PORT=$(APACHE_SECONDARY_PORT) \
+ APACHE_TERTIARY_PORT=$(APACHE_TERTIARY_PORT) \
+ APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ FIRST_RUN=true \
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+ #
+ @echo '***' System-test with warm cache
+ CACHE_FLUSH_TEST=on \
+ RCPORT1=$(RCPORT1) \
+ RCPORT2=$(RCPORT2) \
+ RCPORT3=$(RCPORT3) \
+ RCPORT4=$(RCPORT4) \
+ RCPORT5=$(RCPORT5) \
+ RCPORT6=$(RCPORT6) \
+ RCPORT7=$(RCPORT7) \
+ APACHE_SECONDARY_PORT=$(APACHE_SECONDARY_PORT) \
+ APACHE_TERTIARY_PORT=$(APACHE_TERTIARY_PORT) \
+ APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+ @echo '***' System-test with statistics off
+ mv $(APACHE_DEBUG_PAGESPEED_CONF) $(APACHE_DEBUG_PAGESPEED_CONF).save
+ sed -e "s/# ModPagespeedStatistics off/ModPagespeedStatistics off/" \
+ < $(APACHE_DEBUG_PAGESPEED_CONF).save \
+ > $(APACHE_DEBUG_PAGESPEED_CONF)
+ grep ModPagespeedStatistics $(APACHE_DEBUG_PAGESPEED_CONF)
+ grep ModPagespeedInheritVHostConfig $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) stop
+ $(MAKE) start
+ RCPORT1=$(RCPORT1) \
+ RCPORT2=$(RCPORT2) \
+ RCPORT3=$(RCPORT3) \
+ RCPORT4=$(RCPORT4) \
+ RCPORT5=$(RCPORT5) \
+ RCPORT6=$(RCPORT6) \
+ RCPORT7=$(RCPORT7) \
+ APACHE_SECONDARY_PORT=$(APACHE_SECONDARY_PORT) \
+ APACHE_TERTIARY_PORT=$(APACHE_TERTIARY_PORT) \
+ APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ CACHE_FLUSH_TEST=on \
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+ #
+ # Now turn off ModPagespeedInheritVHostConfig, turn stats back on.
+ @echo '***' System-test without InheritVhostConfig, stats on.
+ sed -e "s/InheritVHostConfig on/InheritVHostConfig off/" \
+ < $(APACHE_DEBUG_PAGESPEED_CONF).save \
+ > $(APACHE_DEBUG_PAGESPEED_CONF)
+ grep ModPagespeedStatistics $(APACHE_DEBUG_PAGESPEED_CONF)
+ grep ModPagespeedInheritVHostConfig $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) stop
+ $(MAKE) start
+ CACHE_FLUSH_TEST=on \
+ NO_VHOST_MERGE=on \
+ RCPORT1=$(RCPORT1) \
+ RCPORT2=$(RCPORT2) \
+ RCPORT3=$(RCPORT3) \
+ RCPORT4=$(RCPORT4) \
+ RCPORT5=$(RCPORT5) \
+ RCPORT6=$(RCPORT6) \
+ RCPORT7=$(RCPORT7) \
+ APACHE_SECONDARY_PORT=$(APACHE_SECONDARY_PORT) \
+ APACHE_TERTIARY_PORT=$(APACHE_TERTIARY_PORT) \
+ APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+ #
+ # Restore config
+ mv $(APACHE_DEBUG_PAGESPEED_CONF).save $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) apache_debug_stop
+ [ -z "`grep leaked_rewrite_drivers $(APACHE_LOG)`" ]
+ @echo PASS
+
+apache_debug_rewrite_test : rewrite_test_prepare apache_install_conf \
+ apache_debug_restart
+ sleep 2
+ $(WGET_NO_PROXY) -q -O - --save-headers $(EXAMPLE_IMAGE) \
+ | head -14 | grep "Content-Type: image/jpeg"
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/mod_pagespeed_statistics \
+ | grep cache_hits
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/shortcut.html \
+ | grep "Filter Examples"
+
+rewrite_test_prepare :
+ $(eval OPT_REWRITE_TEST="REWRITE_TEST=1")
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+
+# This test checks that when mod_speling is enabled we handle the
+# resource requests properly by nulling out request->filename. If
+# we fail to do that then mod_speling rewrites the result to be a 300
+# (multiple choices).
+apache_debug_speling_test : speling_test_prepare apache_install_conf \
+ apache_debug_restart
+ @echo Testing compatibility with mod_speling:
+ $(WGET_NO_PROXY) -O /dev/null --save-headers $(EXAMPLE_IMAGE) 2>&1 \
+ | head | grep "HTTP request sent, awaiting response... 200 OK"
+
+speling_test_prepare :
+ $(eval OPT_SPELING_TEST="SPELING_TEST=1")
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+
+apache_debug_memcached_test :
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+ # run_program_with_memcached.sh exports $MEMCACHED_PORT which is
+ # needed by apache_install_conf so pagespeed.conf gets the
+ # right ModPagespeedMemcachedServers setup. MEMCACHED_PORT will
+ # be different on each run, which makes it easier for people to
+ # run system-tests and unit-tests in parallel with one another.
+ env APACHE_DOC_ROOT=$(APACHE_DOC_ROOT) \
+ $(INSTALL_DATA_DIR)/run_program_with_memcached.sh \
+ $(MAKE) apache_debug_memcached_test_run
+ $(MAKE) apache_debug_stop
+ [ -z "`grep leaked_rewrite_drivers $(APACHE_LOG)`" ]
+ @echo PASS
+
+apache_debug_memcached_test_run :
+ $(MAKE) apache_install_conf OPT_MEMCACHED_TEST="MEMCACHED_TEST=1"
+ $(MAKE) apache_debug_restart
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+ CACHE_FLUSH_TEST=on \
+ RCPORT1=$(RCPORT1) \
+ RCPORT2=$(RCPORT2) \
+ RCPORT3=$(RCPORT3) \
+ RCPORT4=$(RCPORT4) \
+ RCPORT5=$(RCPORT5) \
+ RCPORT6=$(RCPORT6) \
+ RCPORT7=$(RCPORT7) \
+ APACHE_SECONDARY_PORT=$(APACHE_SECONDARY_PORT) \
+ APACHE_TERTIARY_PORT=$(APACHE_TERTIARY_PORT) \
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+
+# This test checks that when ModPagespeedFetchWithGzip is enabled we
+# fetch resources from origin with the gzip flag. Note that big.css
+# uncompressed is 4307 bytes. As of Jan 2012 we get 339 bytes, but
+# the compression is done by mod_deflate which might change. So we
+# do a cumbersome range-check that the 4307 bytes gets compressed to
+# somewhere between 200 and 500 bytes.
+apache_debug_gzip_test : gzip_test_prepare apache_install_conf \
+ apache_debug_restart
+ @echo Testing efficacy of ModPagespeedFetchWithGzip:
+ # Note: The client request will not served with gzip because we do not
+ # have an Accept-Encoding header, we are testing that the backend fetch
+ # uses gzip.
+ $(WGET_NO_PROXY) -O /dev/null --save-headers $(EXAMPLE_BIG_CSS) 2>&1 \
+ | head | grep "HTTP request sent, awaiting response... 200 OK"
+ # TODO(sligocki): The serf_fetch_bytes_count should be available on
+ # this vhost's mod_pagespeed_statistics page. Why isn't it?
+ bytes=`$(WGET_NO_PROXY) -q -O - \
+ $(APACHE_SERVER)/mod_pagespeed_global_statistics \
+ | sed -n 's/serf_fetch_bytes_count: *//p'`; \
+ echo Compressed big.css took $$bytes bytes; \
+ test $$bytes -gt 200 -a $$bytes -lt 500
+
+gzip_test_prepare :
+ $(eval OPT_GZIP_TEST="GZIP_TEST=1")
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+
+# Test to make sure the experiment framework is sending its headers.
+# TODO(nforman): Make this run multiple times and make sure we don't *always*
+# get the same result.
+apache_debug_experiment_test :
+ $(MAKE) apache_debug_experiment_ga_test
+ $(MAKE) apache_debug_experiment_no_ga_test
+
+apache_debug_experiment_ga_test : experiment_ga_test_prepare \
+ apache_install_conf apache_debug_restart
+ $(INSTALL_DATA_DIR)/apache_experiment_ga_test.sh $(APACHE_SERVER)
+
+apache_debug_experiment_no_ga_test : experiment_no_ga_test_prepare \
+ apache_install_conf apache_debug_restart
+ $(INSTALL_DATA_DIR)/apache_experiment_no_ga_test.sh $(APACHE_SERVER)
+
+experiment_ga_test_prepare :
+ $(eval OPT_EXPERIMENT_GA_TEST="EXPERIMENT_GA_TEST=1")
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+
+experiment_no_ga_test_prepare :
+ $(eval OPT_EXPERIMENT_NO_GA_TEST="EXPERIMENT_NO_GA_TEST=1")
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+
+apache_debug_downstream_caching_test :
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+ $(MAKE) apache_install_conf apache_debug_restart
+ $(INSTALL_DATA_DIR)/apache_downstream_caching_test.sh $(APACHE_SERVER)
+
+# Test to make sure we don't crash if we're off for global but on for vhosts.
+# We use the stress test config as a base for that, as it has the vhosts all
+# setup nicely; we just need to turn off ourselves for the global scope.
+apache_debug_vhost_only_test :
+ $(MAKE) apache_install_conf \
+ OPT_LOADTEST_TRACE_TEST=LOADTEST_TRACE_TEST=1 \
+ OPT_STRESS_TEST=STRESS_TEST=1
+ echo 'ModPagespeed off' >> $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) apache_debug_restart
+ $(WGET_NO_PROXY) -O /dev/null --save-headers $(EXAMPLE) 2>&1 \
+ | head | grep "HTTP request sent, awaiting response... 200 OK"
+
+# Regression test for serf fetching something with an empty header.
+# We use a slurp-serving server to produce that.
+EMPTY_HEADER_URL=http://www.modpagespeed.com/empty_header.html
+apache_debug_serf_empty_header_test :
+ $(MAKE) apache_install_conf \
+ OPT_LOADTEST_TRACE_TEST=LOADTEST_TRACE_TEST=1 \
+ OPT_STRESS_TEST=STRESS_TEST=1 \
+ SLURP_DIR=$(PWD)/$(INSTALL_DATA_DIR)/mod_pagespeed_test/slurp
+ $(MAKE) apache_debug_restart
+ # Make sure we can fetch a URL with empty header correctly..
+ $(WGET_PROXY) $(EMPTY_HEADER_URL) > /dev/null
+
+
+# Test to make sure we don't crash due to uninitialized statistics if we
+# are off by default but turned on in some place.
+apache_debug_global_off_test :
+ $(MAKE) apache_install_conf
+ echo 'ModPagespeed off' >> $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) apache_debug_restart
+ $(WGET_NO_PROXY) -O /dev/null --save-headers $(EXAMPLE)?PageSpeed=on 2>&1 \
+ | head | grep "HTTP request sent, awaiting response... 200 OK"
+
+# Test to make sure HTTPS fetching works.
+apache_debug_https_fetch_test :
+ $(MAKE) apache_install_conf
+ $(MAKE) apache_debug_restart
+ $(INSTALL_DATA_DIR)/apache_https_fetch_test.sh \
+ $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+
+# Sanity-check that enabling shared-memory locks don't cause the
+# system to crash, and a rewrite does successfully happen.
+apache_debug_shared_mem_lock_sanity_test : shared_mem_lock_test_prepare \
+ apache_install_conf apache_debug_restart
+ $(WGET_NO_PROXY) -q -O /dev/null \
+ $(EXAMPLE_COMBINE_CSS)?PageSpeedFilters=combine_css
+ sleep 1
+ $(WGET_NO_PROXY) -q -O - \
+ $(EXAMPLE_COMBINE_CSS)?PageSpeedFilters=combine_css \
+ | grep "\.pagespeed\.cc\."
+
+shared_mem_lock_test_prepare :
+ $(eval OPT_SLURP_TEST="SHARED_MEM_LOCK_TEST=1")
+ $(MAKE) stop
+ # "Directory not empty" failures were observed here, so add sleep to
+ # wait for pending cache-writes to finish.
+ sleep 2
+ rm -rf $(MOD_PAGESPEED_CACHE)/*
+ $(MAKE) start
+ sleep 2
+
+# Test that all directives are accepted by the options parser.
+apache_debug_all_directives_test :
+ $(MAKE) apache_install_conf \
+ OPT_ALL_DIRECTIVES_TEST="ALL_DIRECTIVES_TEST=1"
+ $(MAKE) apache_debug_restart
+
+# Test to make sure that per-vhost stats work.
+STATS = mod_pagespeed_statistics
+GLOBAL_STATS = mod_pagespeed_global_statistics
+TRIM_PATH = \
+ "mod_pagespeed_example/trim_urls.html?PageSpeedFilters=trim_urls"
+
+apache_debug_per_vhost_stats_test :
+ $(MAKE) apache_install_conf \
+ OPT_PER_VHOST_STATS_TEST="PER_VHOST_STATS_TEST=1" \
+ OPT_NO_PER_VHOST_STATS_TEST="NO_PER_VHOST_STATS_TEST=0"
+ $(MAKE) apache_debug_restart
+ @echo Fetch a trim URL example from VHost: 8080 -- that should bump
+ @echo the stat for that there + global but not on :8083
+ $(WGET_NO_PROXY) -q -O /dev/null $(APACHE_SERVER)/$(TRIM_PATH)
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 1
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 0
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SERVER)/$(GLOBAL_STATS) \
+ | grep url_trims | grep -w 1
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/$(GLOBAL_STATS) \
+ | grep url_trims | grep -w 1
+ @echo Now on :8083 as well.
+ $(WGET_NO_PROXY) -q -O /dev/null $(APACHE_SECONDARY_SERVER)/$(TRIM_PATH)
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 1
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 1
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SERVER)/$(GLOBAL_STATS) \
+ | grep url_trims | grep -w 2
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/$(GLOBAL_STATS) \
+ | grep url_trims | grep -w 2
+ # Without per-vhost stats.
+ $(MAKE) apache_install_conf \
+ OPT_PER_VHOST_STATS_TEST="PER_VHOST_STATS_TEST=0" \
+ OPT_NO_PER_VHOST_STATS_TEST="NO_PER_VHOST_STATS_TEST=1"
+ $(MAKE) apache_debug_restart
+ @echo Now try without global stats -- both should update at once.
+ $(WGET_NO_PROXY) -q -O /dev/null $(APACHE_SERVER)/$(TRIM_PATH)
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 1
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 1
+ $(WGET_NO_PROXY) -q -O /dev/null $(APACHE_SECONDARY_SERVER)/$(TRIM_PATH)
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 2
+ $(WGET_NO_PROXY) -q -O - $(APACHE_SECONDARY_SERVER)/$(STATS) \
+ | grep url_trims | grep -w 2
+
+# Test that statistics logging works.
+apache_debug_stats_logging_test :
+ $(MAKE) apache_install_conf \
+ OPT_STATS_LOGGING_TEST="STATS_LOGGING_TEST=1"
+ $(MAKE) apache_debug_restart
+ $(APACHE_SYSTEM_TEST) $(APACHE_SERVER) $(APACHE_HTTPS_SERVER)
+
+# Tests that setting the root configuration 'unplugged' can't be overridden
+# with a query-param, but can be overridden with an explicit vhost setting.
+EXAMPLE_INDEX = mod_pagespeed_example/index.html
+apache_debug_unplugged_test : apache_install_conf apache_debug_restart
+ $(MAKE) stop
+ mv $(APACHE_DEBUG_PAGESPEED_CONF) $(APACHE_DEBUG_PAGESPEED_CONF).save
+ # Note - there are multiple occurrences of "ModPagespeed on" in our
+ # debug pagespeed.conf. This article shows how to replace only the
+ # first one with sed: http://stackoverflow.com/questions/148451/
+ # how-to-use-sed-to-replace-only-the-first-occurrence-in-a-file
+ sed -e '0,/ModPagespeed on/{s/ModPagespeed on/ModPagespeed unplugged/}' \
+ < $(APACHE_DEBUG_PAGESPEED_CONF).save \
+ > $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) start
+ # Fetching from the root should be off, even if we try to turn it on.
+ [ -z "`$(WGET) -q --save-headers -O - \
+ $(APACHE_SERVER)/$(EXAMPLE_INDEX)?ModPagespeed=on | \
+ grep X-Mod-Pagespeed`" ]
+ # Fetching from the secondary host should be on.
+ http_proxy=$(APACHE_SECONDARY_SERVER) $(WGET) -q --save-headers -O - \
+ http://secondary.example.com/$(EXAMPLE_INDEX) | \
+ grep X-Mod-Pagespeed
+ #
+ # Restore config
+ mv $(APACHE_DEBUG_PAGESPEED_CONF).save $(APACHE_DEBUG_PAGESPEED_CONF)
+ $(MAKE) apache_debug_stop
+ @echo PASS
+
+# Target used to populate the docroot for a webserver, including the
+# read-only mod_pagespeed_example, mod_pagespeed_test, and do_not_modify,
+# plus the read/write areas for cache purging and flush tests. The
+# read-only directories are symlinked in for speed, and the read/write
+# directories are deep-copied, but they are small.
+setup_doc_root :
+ mkdir -p $(APACHE_DOC_ROOT)
+ rm -fr "$(APACHE_DOC_ROOT)/mod_pagespeed_example"
+ ln -Tsf "$(realpath $(INSTALL_DATA_DIR))/mod_pagespeed_example" \
+ $(APACHE_DOC_ROOT)/mod_pagespeed_example
+ rm -fr "$(APACHE_DOC_ROOT)/mod_pagespeed_test"
+ ln -Tsf "$(realpath $(INSTALL_DATA_DIR))/mod_pagespeed_test" \
+ $(APACHE_DOC_ROOT)/mod_pagespeed_test
+ rm -fr "$(APACHE_DOC_ROOT)/do_not_modify"
+ ln -Tsf "$(realpath $(INSTALL_DATA_DIR))/do_not_modify" \
+ $(APACHE_DOC_ROOT)/do_not_modify
+ rm -rf "$(APACHE_DOC_ROOT)/cache_flush"
+ cp -r "$(INSTALL_DATA_DIR)/mod_pagespeed_test/cache_flush" \
+ $(APACHE_DOC_ROOT)
+ rm -rf "$(APACHE_DOC_ROOT)/purge"
+ cp -r "$(INSTALL_DATA_DIR)/mod_pagespeed_test/purge" $(APACHE_DOC_ROOT)
diff --git a/src/install/apache_downstream_caching_test.sh b/src/install/apache_downstream_caching_test.sh
new file mode 100755
index 0000000..83757a3
--- /dev/null
+++ b/src/install/apache_downstream_caching_test.sh
@@ -0,0 +1,230 @@
+#!/bin/bash
+#
+# Copyright 2013 Google Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Author: anupama@google.com (Anupama Dutta)
+#
+# Runs all Apache-specific downstream caching tests.
+#
+# Environment variables that are used by this test:
+# 1) VARNISH_SERVER should be set to the varnish caching layer
+# host:port for testing to be complete. If VARNISH_SERVER is empty,
+# minimal testing of the feature is done.
+# 2) MUST_BACKUP_DEFAULT_VCL should be set to 1 or 0 depending on
+# whether we want the existing default.vcl to be backed up and restored
+# before and after the test or it can be overwritten by the updated
+# debug_conf.v3.vcl. By default, it is assumed to have the value 1.
+
+this_dir="$( dirname "${BASH_SOURCE[0]}" )"
+PAGESPEED_CODE_DIR="$this_dir/../../../third_party/pagespeed"
+if [ ! -e "$PAGESPEED_CODE_DIR" ] ; then
+ PAGESPEED_CODE_DIR="$this_dir/../pagespeed"
+fi
+SERVER_NAME=apache
+source "$PAGESPEED_CODE_DIR/automatic/system_test_helpers.sh" || exit 1
+
+DEFAULT_VCL="/etc/varnish/default.vcl"
+BACKUP_DEFAULT_VCL=$TESTTMP"/default.vcl.bak"
+DEBUG_CONF_VCL="$this_dir/debug_conf.v3.vcl"
+TMP_DEBUG_CONF_VCL=$TESTTMP"/debug_conf.v3.vcl"
+
+# Environment variables.
+# MUST_BACKUP_DEFAULT_VCL is 1 by default because we would not like to overwrite
+# the file without explicit permission.
+: ${MUST_BACKUP_DEFAULT_VCL:=1}
+
+# Helper method to print out varnish setup instructions.
+print_varnish_setup_instructions() {
+ echo "*** Please follow these instructions to install and start varnish"
+ echo "*** on your system, so that apache_downstream_caching_test.sh can"
+ echo "*** run successfully."
+ echo "*** 1) sudo apt-get install varnish"
+ echo "*** 2) sudo tee -a /etc/default/varnish <<EOF"
+ echo " DAEMON_OPTS=\"-a :8020 \ "
+ echo " -T localhost:6082 \ "
+ echo " -f /etc/varnish/default.vcl \ "
+ echo " -S /etc/varnish/secret \ "
+ echo " -s file,/var/lib/varnish/\$INSTANCE/varnish_storage.bin,1G\" "
+ echo "EOF"
+ echo "*** 3) sudo cp $DEBUG_CONF_VCL $DEFAULT_VCL"
+ echo "*** 4) sudo service varnish restart"
+ echo "*** 5) export VARNISH_SERVER=\"localhost:8020\""
+ echo "*** 6) Rerun apache_downstream_caching_tests.sh"
+}
+
+OUT_CONTENTS_FILE="$OUTDIR/gzipped.html"
+OUT_HEADERS_FILE="$OUTDIR/headers.html"
+GZIP_WGET_ARGS="-q -S --header=Accept-Encoding:gzip -o $OUT_HEADERS_FILE -O - "
+
+# Helper method that does a wget and verifies that the rewriting status matches
+# the $1 argument that is passed to this method.
+check_rewriting_status() {
+ $WGET $WGET_ARGS $CACHABLE_HTML_LOC > $OUT_CONTENTS_FILE
+ if $1; then
+ check zgrep -q "pagespeed.ic" $OUT_CONTENTS_FILE
+ else
+ check_not zgrep -q "pagespeed.ic" $OUT_CONTENTS_FILE
+ fi
+ # Reset WGET_ARGS.
+ WGET_ARGS=""
+}
+
+# Helper method that obtains a gzipped response and verifies that rewriting
+# has happened. Also takes an extra parameter that identifies extra headers
+# to be added during wget.
+check_for_rewriting() {
+ WGET_ARGS="$GZIP_WGET_ARGS $1"
+ check_rewriting_status true
+}
+
+# Helper method that obtains a gzipped response and verifies that no rewriting
+# has happened.
+check_for_no_rewriting() {
+ WGET_ARGS="$GZIP_WGET_ARGS"
+ check_rewriting_status false
+}
+
+# Helper method to check that a variable in the statistics file has the expected
+# value.
+check_statistic_value() {
+ check_from "$CURRENT_STATS" egrep -q "$1:[[:space:]]*$2"
+}
+
+check_num_downstream_cache_purge_attempts() {
+ check_statistic_value $ATTEMPTS_VAR $1
+}
+
+check_num_successful_downstream_cache_purges() {
+ check_statistic_value $SUCCESS_VAR $1
+}
+
+restore_default_vcl_from_backup() {
+ sudo mv -f $BACKUP_DEFAULT_VCL $DEFAULT_VCL
+ sudo service varnish restart
+}
+
+# Portions of the below test will be skipped if no VARNISH_SERVER is specified.
+have_varnish_downstream_cache="1"
+if [ -z ${VARNISH_SERVER:-} ]; then
+ have_varnish_downstream_cache="0"
+ echo "*** Skipping parts of the test because varnish server host:port has"
+ echo "*** not been specified. If you'd like to run all parts of this test,"
+ echo "*** please follow these instructions:"
+ print_varnish_setup_instructions
+ CACHABLE_HTML_HOST_PORT="http://${HOSTNAME}"
+else
+ CACHABLE_HTML_HOST_PORT="http://$VARNISH_SERVER"
+ # Check for the presence of $DEFAULT_VCL file to confirm that varnish is
+ # installed on the system. If varnish is not installed, print out
+ # instructions for it.
+ if [ ! -f $DEFAULT_VCL ]; then
+ print_varnish_setup_instructions
+ exit 1
+ fi
+ # Check whether the default.vcl being used by varnish is different from
+ # debug_conf.v3.vcl.
+ # a) If there are no differences, we assume that varnish has been restarted
+ # after debug_conf.v3.vcl contents were copied over to default.vcl and
+ # continue with the tests.
+ cp -f $DEBUG_CONF_VCL $TMP_DEBUG_CONF_VCL
+ if ! cmp -s $DEFAULT_VCL $TMP_DEBUG_CONF_VCL; then
+ # Copy over the permissions and ownership attributes for $DEFAULT_VCL onto
+ # $TMP_DEBUG_CONF_VCL.
+ sudo chmod --reference=$DEFAULT_VCL $TMP_DEBUG_CONF_VCL
+ sudo chown --reference=$DEFAULT_VCL $TMP_DEBUG_CONF_VCL
+ if [ "$MUST_BACKUP_DEFAULT_VCL" = "1" ]; then
+ # b) If there are differences, and MUST_BACKUP_DEFAULT_VCL is set to true,
+ # we backup the default vcl.
+ sudo mv $DEFAULT_VCL $BACKUP_DEFAULT_VCL
+ trap restore_default_vcl_from_backup 0
+ else
+ # c) If there are differences, and MUST_BACKUP_DEFAULT_VCL is set to
+ # false, we assume that the user would like to permanently copy over
+ # debug_conf.v3.vcl into default.vcl for continuous testing purposes.
+ echo "*** Overwriting /etc/varnish/default.vcl with the latest version"
+ echo "*** of debug_conf.v3.vcl and restarting varnish."
+ echo "*** You only need to do this once for every update to"
+ echo "*** debug_conf.v3.vcl, which should not be very frequent."
+ fi
+ sudo cp -fp $TMP_DEBUG_CONF_VCL $DEFAULT_VCL
+ fi
+ # Restart varnish to clear its cache.
+ sudo service varnish restart
+fi
+
+CACHABLE_HTML_LOC="$CACHABLE_HTML_HOST_PORT/mod_pagespeed_test"
+CACHABLE_HTML_LOC+="/cachable_rewritten_html/downstream_caching.html"
+
+STATS_URL="${HOSTNAME}/mod_pagespeed_statistics"
+ATTEMPTS_VAR="downstream_cache_purge_attempts"
+SUCCESS_VAR="successful_downstream_cache_purges"
+
+# Number of downstream cache purges should be 0 here.
+start_test Check that downstream cache purges are 0 initially.
+CURRENT_STATS=$($WGET_DUMP $STATS_URL)
+check_num_downstream_cache_purge_attempts 0
+check_num_successful_downstream_cache_purges 0
+
+# Output should not be rewritten and 1 successful purge should have
+# occurred here.
+start_test Check for case where rewritten cache should get purged.
+check_for_no_rewriting
+# Fetch until the purge happens.
+fetch_until $STATS_URL "grep -c $ATTEMPTS_VAR:[[:space:]]*1" 1
+if [ $have_varnish_downstream_cache = "1" ]; then
+ CURRENT_STATS=$($WGET_DUMP $STATS_URL)
+ check_num_successful_downstream_cache_purges 1
+ check egrep -q "X-Cache: MISS" $OUT_HEADERS_FILE
+ fi
+
+# Output should be fully rewritten here.
+start_test Check for case where rewritten cache should not get purged.
+check_for_rewriting "--header=X-PSA-Blocking-Rewrite:psatest"
+# Number of downstream cache purges should still be 1.
+CURRENT_STATS=$($WGET_DUMP $STATS_URL)
+check_num_downstream_cache_purge_attempts 1
+if [ $have_varnish_downstream_cache = "1" ]; then
+ check_num_successful_downstream_cache_purges 1
+ check egrep -q "X-Cache: MISS" $OUT_HEADERS_FILE
+fi
+
+# Output should be fully rewritten here and we should have a HIT.
+start_test Check for case when there should be a varnish cache hit.
+check_for_rewriting ""
+# Number of downstream cache purges should still be 1.
+CURRENT_STATS=$($WGET_DUMP $STATS_URL)
+check_num_downstream_cache_purge_attempts 1
+if [ $have_varnish_downstream_cache = "1" ]; then
+ check_num_successful_downstream_cache_purges 1
+ check egrep -q "X-Cache: HIT" $OUT_HEADERS_FILE
+fi
+
+if [ $have_varnish_downstream_cache = "1" ]; then
+ # Enable one of the beaconing dependent filters and verify interaction
+ # between beaconing and downstream caching logic, by verifying that
+ # whenever beaconing code is present in the rewritten page, the
+ # output is also marked as a cache-miss, indicating that the instrumentation
+ # was done by the backend.
+ start_test Check whether beaconing is accompanied by a MISS always.
+ WGET_ARGS="-S"
+ CACHABLE_HTML_LOC+="?ModPagespeedFilters=lazyload_images"
+ fetch_until -gzip $CACHABLE_HTML_LOC \
+ "zgrep -c \"pagespeed\.CriticalImages\.Run\"" 1
+ check fgrep -q 'X-Cache: MISS' $WGET_OUTPUT
+ check fgrep -q 'Cache-Control: no-cache, max-age=0' $WGET_OUTPUT
+fi
+
+
+check_failures_and_exit
diff --git a/src/install/apache_experiment_ga_test.sh b/src/install/apache_experiment_ga_test.sh
new file mode 100755
index 0000000..1202322
--- /dev/null
+++ b/src/install/apache_experiment_ga_test.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+# Author: jefftk@google.com (Jeff Kaufman)
+#
+# Runs all Apache-specific experiment framework tests that depend on AnalyticsID
+# being set.
+#
+# See apache_experiment_test for usage.
+#
+this_dir=$(dirname $0)
+source "$this_dir/apache_experiment_test.sh" || exit 1
+
+EXAMPLE="$1/mod_pagespeed_example"
+EXTEND_CACHE="$EXAMPLE/extend_cache.html"
+
+start_test Analytics javascript is added for the experimental group.
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=2' $EXTEND_CACHE)
+check_from "$OUT" fgrep -q 'Experiment: 2'
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=7' $EXTEND_CACHE)
+check_from "$OUT" fgrep -q 'Experiment: 7'
+
+start_test Analytics javascript is not added for the no-experiment group.
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=0' $EXTEND_CACHE)
+check_not_from "$OUT" fgrep -q 'Experiment:'
+
+check_failures_and_exit
diff --git a/src/install/apache_experiment_no_ga_test.sh b/src/install/apache_experiment_no_ga_test.sh
new file mode 100755
index 0000000..db2d4f9
--- /dev/null
+++ b/src/install/apache_experiment_no_ga_test.sh
@@ -0,0 +1,25 @@
+#!/bin/bash
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+# Author: jefftk@google.com (Jeff Kaufman)
+#
+# Runs all Apache-specific experiment framework tests that depend on AnalyticsID
+# being unset.
+#
+# See apache_experiment_test for usage.
+#
+this_dir=$(dirname $0)
+source "$this_dir/apache_experiment_test.sh" || exit 1
+
+EXAMPLE="$1/mod_pagespeed_example"
+EXTEND_CACHE="$EXAMPLE/extend_cache.html"
+
+start_test Analytics javascript is not added for any group.
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=2' $EXTEND_CACHE)
+check_not_from "$OUT" fgrep -q 'Experiment:'
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=7' $EXTEND_CACHE)
+check_not_from "$OUT" fgrep -q 'Experiment:'
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=0' $EXTEND_CACHE)
+check_not_from "$OUT" fgrep -q 'Experiment:'
+
+check_failures_and_exit
diff --git a/src/install/apache_experiment_test.sh b/src/install/apache_experiment_test.sh
new file mode 100755
index 0000000..2805fc1
--- /dev/null
+++ b/src/install/apache_experiment_test.sh
@@ -0,0 +1,106 @@
+#!/bin/bash
+#
+# Copyright 2012 Google Inc. All Rights Reserved.
+# Author: jefftk@google.com (Jeff Kaufman)
+#
+# Runs all Apache-specific experiment framework tests that don't depend on
+# Google Analytics.
+#
+# See automatic/system_test_helpers.sh for usage.
+#
+# Not intended to be run stand-alone. Should be run only by
+# apache_experiment_no_ge_test and apache_experiment_ga_test.
+#
+
+this_dir="$( dirname "${BASH_SOURCE[0]}" )"
+PAGESPEED_CODE_DIR="$this_dir/../../../third_party/pagespeed"
+if [ ! -e "$PAGESPEED_CODE_DIR" ] ; then
+ PAGESPEED_CODE_DIR="$this_dir/../pagespeed"
+fi
+SERVER_NAME=apache
+source "$PAGESPEED_CODE_DIR/automatic/system_test_helpers.sh" || exit 1
+
+EXAMPLE="$1/mod_pagespeed_example"
+EXTEND_CACHE="$EXAMPLE/extend_cache.html"
+MPS_TEST="$1/mod_pagespeed_test"
+ARIS="$MPS_TEST/avoid_renaming_introspective_javascript__off.html"
+
+echo Testing whether or not the experiment framework is working.
+start_test PageSpeedExperiment cookie is set.
+OUT=$($WGET_DUMP $EXTEND_CACHE)
+check_from "$OUT" fgrep "PageSpeedExperiment="
+
+start_test PageSpeedFilters query param should disable experiments.
+OUT=$($WGET_DUMP "$EXTEND_CACHE?PageSpeed=on&PageSpeedFilters=rewrite_css")
+check_not_from "$OUT" fgrep 'PageSpeedExperiment='
+
+start_test ModPagespeedFilters query param should also disable experiments.
+OUT=$($WGET_DUMP \
+ "$EXTEND_CACHE?ModPagespeed=on&ModPagespeedFilters=rewrite_css")
+check_not_from "$OUT" fgrep 'PageSpeedExperiment='
+
+start_test experiment assignment can be forced
+OUT=$($WGET_DUMP \
+ "$EXTEND_CACHE?PageSpeedEnrollExperiment=2")
+check_from "$OUT" fgrep 'PageSpeedExperiment=2'
+
+start_test experiment assignment can be forced to a 0% experiment
+OUT=$($WGET_DUMP \
+ "$EXTEND_CACHE?PageSpeedEnrollExperiment=3")
+check_from "$OUT" fgrep 'PageSpeedExperiment=3'
+
+start_test experiment assignment can be forced even if already assigned
+OUT=$($WGET_DUMP --header Cookie:PageSpeedExperiment=7 \
+ "$EXTEND_CACHE?PageSpeedEnrollExperiment=2")
+check_from "$OUT" fgrep 'PageSpeedExperiment=2'
+
+start_test If the user is already assigned, no need to assign them again.
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=2' $EXTEND_CACHE)
+check_not_from "$OUT" fgrep 'PageSpeedExperiment='
+
+start_test The beacon should include the experiment id.
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=2' $EXTEND_CACHE)
+check_from "$OUT" grep "pagespeed.addInstrumentationInit('/mod_pagespeed_beacon', 'load', '&exptid=2', 'http://localhost[:0-9]*/mod_pagespeed_example/extend_cache.html');"
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=7' $EXTEND_CACHE)
+check_from "$OUT" grep "pagespeed.addInstrumentationInit('/mod_pagespeed_beacon', 'load', '&exptid=7', 'http://localhost[:0-9]*/mod_pagespeed_example/extend_cache.html');"
+
+start_test The no-experiment group beacon should not include an experiment id.
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=0' $EXTEND_CACHE)
+check_not_from "$OUT" grep 'mod_pagespeed_beacon.*exptid'
+
+# We expect id=7 to be index=a and id=2 to be index=b because that's the
+# order they're defined in the config file.
+start_test Resource urls are rewritten to include experiment indexes.
+WGET_ARGS="--header Cookie:PageSpeedExperiment=7" fetch_until $EXTEND_CACHE \
+ "fgrep -c .pagespeed.a.ic." 1
+WGET_ARGS="--header Cookie:PageSpeedExperiment=2" fetch_until $EXTEND_CACHE \
+ "fgrep -c .pagespeed.b.ic." 1
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=7' $EXTEND_CACHE)
+check_from "$OUT" fgrep ".pagespeed.a.ic."
+OUT=$($WGET_DUMP --header='Cookie: PageSpeedExperiment=2' $EXTEND_CACHE)
+check_from "$OUT" fgrep ".pagespeed.b.ic."
+
+start_test Options are respected.
+# For id 2 ARIS is on. First fetch until normal.js is rewritten, after which
+# we expect introspective.js would be rewritten if it were going to be.
+WGET_ARGS="--header Cookie:PageSpeedExperiment=2" fetch_until -save $ARIS \
+ 'grep -c "src=\"normal.js\""' 0
+check [ $(grep -c "src=\"introspection.js\"" $FETCH_UNTIL_OUTFILE) = 1 ]
+
+# For id 7 ARIS is off. Repeat this test, expecting it to get renamed.
+WGET_ARGS="--header Cookie:PageSpeedExperiment=7" fetch_until -save $ARIS \
+ 'grep -c "src=\"normal.js\""' 0
+check [ $(grep -c "src=\"introspection.js\"" $FETCH_UNTIL_OUTFILE) = 0 ]
+
+start_test Images are different when the url specifies different experiments.
+# While the images are the same, image B should be smaller because in the config
+# file we enable convert_jpeg_to_progressive only for id=2 (side B). Ideally we
+# would check that it was actually progressive, by checking whether "identify
+# -verbose filename" produced "Interlace: JPEG" or "Interlace: None", but that
+# would introduce a dependency on imagemagick. This is just as accurate, but
+# more brittle (because changes to our compression code would change the
+# computed file sizes).
+IMG_A="$EXAMPLE/images/xPuzzle.jpg.pagespeed.a.ic.fakehash.jpg"
+IMG_B="$EXAMPLE/images/xPuzzle.jpg.pagespeed.b.ic.fakehash.jpg"
+fetch_until $IMG_A 'wc -c' 102902 "" -le
+fetch_until $IMG_B 'wc -c' 98276 "" -le
diff --git a/src/install/apache_https_fetch_test.sh b/src/install/apache_https_fetch_test.sh
new file mode 100755
index 0000000..3620a9c
--- /dev/null
+++ b/src/install/apache_https_fetch_test.sh
@@ -0,0 +1,22 @@
+#!/bin/bash
+
+# Tests that mod_pagespeed can fetch HTTPS resources. Note that mod_pagespeed
+# does not work like this by default: a flag must be specified in
+# pagespeed.conf:
+# ModPagespeedFetchHttps enable
+
+echo Testing that HTTPS fetching is enabled and working in mod_pagespeed.
+echo Note that this test will fail with timeouts if the serf fetcher has not
+echo been compiled in.
+
+this_dir="$( dirname "${BASH_SOURCE[0]}" )"
+PAGESPEED_CODE_DIR="$this_dir/../../../third_party/pagespeed"
+if [ ! -e "$PAGESPEED_CODE_DIR" ] ; then
+ PAGESPEED_CODE_DIR="$this_dir/../pagespeed"
+fi
+SERVER_NAME=apache
+source "$PAGESPEED_CODE_DIR/automatic/system_test_helpers.sh" || exit 1
+
+echo Test that we can rewrite an HTTPS resource from a domain with a valid cert.
+fetch_until $TEST_ROOT/https_fetch/https_fetch.html \
+ 'grep -c /https_gstatic_dot_com/1.gif.pagespeed.ce' 1
diff --git a/src/install/build_release_platform.sh b/src/install/build_release_platform.sh
new file mode 100755
index 0000000..b7c8243
--- /dev/null
+++ b/src/install/build_release_platform.sh
@@ -0,0 +1,362 @@
+#!/bin/bash
+#
+# Builds a mod_pagespeed distribution.
+#
+# Usage:
+# 1. [optional] Log into buildbot
+# 2. ./build_release_platform.sh [-clean] $RELEASE $CHANNEL [patch_file] [tag]
+#
+# Where $RELEASE is either the 4 segment version number, e.g 0.10.21.2, or the
+# word "trunk" and $CHANNEL is either "beta" or "stable".
+#
+# The optional "patch_file" will be applied after runing 'gclient sync'.
+# Also note that if you re-run the same build command multiple times,
+# pre-applied patches will remain.
+#
+# If tag is specified it will be used to pick what gets checked out, instead of
+# $RELEASE; this is useful if applying an unreleased patch that changes the
+# version number for a security release.
+#
+# Updating:
+# If you make any changes to this script, please scp it up to the builbotd:
+# scp build_release_platform.sh [IP-OF-CENTOS-BUILDBOT]:
+# scp build_release_platform.sh [IP-OF-UBUNTU-BUILDBOT]:
+#
+# The builbots have hardlinks between /home/builbot/build_release_platform.sh
+# and /var/chroot/[chroot-name]/home/buildbot/build_release_platform.sh, which
+# means that when you update the copy in ~/build_release_platform.sh you're
+# automatically updating the one in the chroot as well.
+
+
+set -e # exit script if any command returns an error
+set -u # exit the script if any variable is uninitialized
+export VIRTUALBOX_TEST="VIRTUALBOX_TEST" # to skip some tests that fail in VM
+
+# Cleanup /var/html if desired before running.
+if [ "$1" = "-clean" ]; then
+ clean=1
+ shift
+else
+ clean=0
+fi
+
+RELEASE=$1
+CHANNEL=$2
+TAG=$RELEASE
+
+do_patch="0"
+
+DOC_ROOT=/var/www
+[ -d /var/www/html ] && DOC_ROOT=/var/www/html
+
+if [ -d $DOC_ROOT/do_not_modify -a \
+ ! -L $DOC_ROOT/do_not_modify ] || \
+ [ -d $DOC_ROOT/mod_pagespeed_example -a \
+ ! -L $DOC_ROOT/mod_pagespeed_example ] || \
+ [ -d $DOC_ROOT/mod_pagespeed_test -a \
+ ! -L $DOC_ROOT/mod_pagespeed_test ]; then
+ if [ $clean -eq 0 ]; then
+ echo Stale directories in $DOC_ROOT exist. Clean them
+ echo yourself or specify -clean as first arg to let the script do that
+ echo for you.
+ exit 1
+ fi
+ set -x
+ sudo rm -rf $DOC_ROOT/do_not_modify
+ sudo rm -rf $DOC_ROOT/mod_pagespeed_example
+ sudo rm -rf $DOC_ROOT/mod_pagespeed_test
+else
+ set -x
+fi
+
+# Apply optional patch-file to apply before building. To create a patch from a
+# commit, do:
+# git show [commit id] > patchfile
+if [ $# -ge 3 ]; then
+ patch_file=$(readlink -f $3)
+ do_patch="1"
+fi
+
+if [ $# -eq 4 ]; then
+ TAG=$4
+fi
+
+if [ -d ~/bin/depot_tools ]; then
+ cd ~/bin/depot_tools
+ git pull
+else
+ mkdir -p ~/bin
+ cd ~/bin
+ git clone https://chromium.googlesource.com/chromium/tools/depot_tools.git
+fi
+
+PATH=~/bin/depot_tools:$PATH
+
+# Are we on CentOS or Ubuntu?
+# TODO(jmarantz): make this work on RedHat.
+#
+# Note that the 'force' directives on the install commands are only
+# used for this RPM/DEB build/install/test script, and are not
+# exported as a recommended flow for users.
+#
+# If the force directives are not included, then the installation will
+# fail if run a second time.
+if grep -q CentOS /etc/issue; then
+ # We redirect stderr to stdout because on Centos5 "httpd -M" writes to stderr
+ # for some reason.
+ /usr/sbin/httpd -M 2>&1 | grep -q php5_module || \
+ sudo yum install php php-mbstring
+
+ EXT=rpm
+ INSTALL="rpm --install"
+ RESTART="./centos.sh apache_debug_restart"
+ TEST="./centos.sh enable_ports_and_file_access apache_vm_system_tests"
+
+ # To build on Centos 5/6 we need gcc 4.8 from scientific linux. We can't
+ # export CC and CXX because some steps still use a literal "g++". But #$%^
+ # devtoolset includes its own sudo, and we don't want that because it doesn't
+ # support -E, so rename it if it exists before updating PATH.
+ DEVTOOLSET_BIN=/opt/rh/devtoolset-2/root/usr/bin/
+ if [ -e "$DEVTOOLSET_BIN/sudo" ]; then
+ sudo mv "$DEVTOOLSET_BIN/sudo" "$DEVTOOLSET_BIN/sudo.ignored"
+ fi
+ export PATH="$DEVTOOLSET_BIN:$PATH"
+
+ echo We appear to be running on CentOS. Building rpm...
+else
+ echo Making sure PHP is installed ...
+ apache2ctl -M | grep -q php5_module || \
+ sudo apt-get install libapache2-mod-php5
+
+ EXT=deb
+ INSTALL="dpkg --install"
+ RESTART="./ubuntu.sh apache_debug_restart"
+ TEST="./ubuntu.sh apache_vm_system_tests"
+
+ if [ -d /usr/lib/gcc-mozilla/bin ]; then
+ export PATH=/usr/lib/gcc-mozilla/bin:$PATH
+ fi
+
+ echo We appear to NOT be running on CentOS. Building deb...
+fi
+
+if [ $(uname -m) = x86_64 ]; then
+ BIT_SIZE_NAME=x64
+else
+ BIT_SIZE_NAME=ia32
+fi
+
+if [[ "$RELEASE" == 1.9.32.* || "$RELEASE" == 1.10.33.* ]]; then
+ # Without https://github.com/pagespeed/mod_pagespeed/commit/0e79a08 the font
+ # inlining tests will fail, and we never backported that fix to 1.9 or 1.10.
+ export DISABLE_FONT_API_TESTS=1
+fi
+
+build_dir="$HOME/build/$RELEASE/$BIT_SIZE_NAME"
+release_dir="$HOME/release/$RELEASE/$BIT_SIZE_NAME"
+log_dir="$build_dir/log"
+rm -rf $log_dir
+mkdir -p $log_dir
+rm -rf "$release_dir"
+mkdir -p "$release_dir"
+
+# Usage:
+# check log_filename.log command args...
+#
+# The log file will placed in $log_dir, and will be 'tail'ed if the
+# command fails.
+function check() {
+ # We are explicitly checking error status here and tailing the log
+ # file so turn off auto-exit-on-error tempoararily.
+ set +e
+ log_filename="$log_dir/$1"
+ shift
+ echo "[$(date '+%k:%M:%S')] $@ >> $log_filename"
+ echo $@ >> "$log_filename"
+ $@ >> "$log_filename" 2>&1
+ rc=$?
+ if [ $rc != 0 ]; then
+ echo '***' status is $rc
+ tail "$log_filename"
+ echo Failed at $(date)
+ exit 1
+ fi
+ set -e
+}
+
+# We do the building in build/ which generates all kinds of crap which
+# we don't ship in our binaries, such as .o and .a files.
+#
+# We put what we want to ship into release/ so that it can be scp'd onto
+# the signing server with only one password.
+mkdir -p $build_dir
+rm -rf "$build_dir/src"
+cd $build_dir && git clone https://github.com/pagespeed/mod_pagespeed.git src
+cd $build_dir/src
+if [ "$TAG" != "trunk" ]; then # Just treat "trunk" as master.
+ git checkout "$TAG"
+fi
+
+if [ "$EXT" = "rpm" ] ; then
+ # MANYLINUX1 is required for CentOS 5 (but probably not newer CentOS):
+ # https://github.com/grpc/grpc/issues/7147
+ export CFLAGS='-DGPR_MANYLINUX1 -std=gnu99'
+
+ # On the centos buildbot we need to patch the Makefile to make
+ # apache_debug_restart to a killall -9 httpd.
+ cd $build_dir/src
+ killall_patch=$(cat <<EOF
+diff --git a/install/Makefile b/install/Makefile
+index 376def9..29fa72e 100644
+--- a/install/Makefile
++++ b/install/Makefile
+@@ -573,6 +573,7 @@ setup_test_machine :
+ >sudo \$(APACHE_CONTROL_PROGRAM) restart
+
+ apache_debug_restart :
++>killall -9 httpd || echo "not killed"
+ >\$(APACHE_CONTROL_PROGRAM) restart
+
+ apache_debug_stop : stop
+EOF
+)
+ echo "$killall_patch" | tr '>' '\t' | git apply
+fi
+cd $build_dir
+
+check gclient.log \
+ gclient config https://github.com/pagespeed/mod_pagespeed.git --unmanaged --name=src
+check gclient.log gclient sync --force
+
+cd src
+
+# Neither buildbot is using a compiler recent enough to provide stdalign.h,
+# which boringssl needs. Even on Centos 5's gcc 4.1 we do have a way to set
+# alignment, though, so following
+# https://sourceware.org/bugzilla/show_bug.cgi?id=19390 define alignas ourself
+# and put it where boringssl can find it.
+echo '#define alignas(x) __attribute__ ((aligned (x)))' > \
+ third_party/boringssl/src/crypto/stdalign.h
+
+if [ $do_patch -eq "1" ]; then
+ echo Applying patch-file $patch_file
+ git apply $patch_file
+
+ echo "Re-running gclient in case the patch touched DEPS"
+ check gclient.log gclient sync
+fi
+
+# This is needed on the vms, but not on our workstations for some reason.
+find $build_dir/src -name "*.sh" | xargs chmod +x
+cd $build_dir
+echo src/build/gyp_chromium -Dchannel=$CHANNEL
+export AR_host=$build_dir/src/build/wrappers/ar.sh
+check gyp_chromium.log python src/build/gyp_chromium -Dchannel=$CHANNEL
+
+cd src
+# It would be better to have AR.target overridden at gyp time, but
+# that functionality seems broken.
+MODPAGESPEED_ENABLE_UPDATES=1 check build.log \
+ make BUILDTYPE=Release AR.host=${AR_host} AR.target=${AR_host} V=1 \
+ linux_package_$EXT mod_pagespeed_test pagespeed_automatic_test
+
+ls -l $PWD/out/Release/mod-pagespeed-${CHANNEL}*
+mkdir -p "$release_dir"
+mv $PWD/out/Release/mod-pagespeed-${CHANNEL}* "$release_dir"
+
+if [ "$EXT" = "rpm" ] ; then
+ export SSL_CERT_DIR=/etc/pki/tls/certs
+ export SSL_CERT_FILE=/etc/pki/tls/cert.pem
+fi
+
+check unit_test.log out/Release/mod_pagespeed_test
+check unit_test.log out/Release/pagespeed_automatic_test
+
+# Buildbots should have NOPASSWD set, so won't need to be prompted for sudo
+# password.
+
+echo Purging old releases ...
+if [ "$EXT" = "rpm" ] ; then
+ # rpm --erase only succeeds if all packages listed are installed, so we need
+ # to find which one is installed and only erase that.
+ rpm --query mod-pagespeed-stable mod-pagespeed-beta | \
+ grep -v "is not installed" | \
+ xargs --no-run-if-empty sudo rpm --erase
+else
+ # dpkg --purge succeeds even if one or both of the packages is not installed.
+ sudo dpkg --purge mod-pagespeed-beta mod-pagespeed-stable
+fi
+
+echo Installing release ...
+check install.log sudo $INSTALL "$release_dir"/*.$EXT
+
+echo Test restart to make sure config file is valid ...
+cd $build_dir/src/install
+check install.log sudo -E $RESTART
+
+echo Testing release ...
+check system_test.log sudo -E $TEST
+
+echo Copy the unstripped .so files to a safe place for easier debugging later.
+NBITS=$(getconf LONG_BIT)
+cp $build_dir/src/out/Release/libmod_pagespeed.so \
+ "$release_dir"/unstripped_libmodpagespeed_${NBITS}_${EXT}.so
+cp $build_dir/src/out/Release/libmod_pagespeed_ap24.so \
+ "$release_dir"/unstripped_libmodpagespeed_ap24_${NBITS}_${EXT}.so
+
+# Because we now build on the build-bots which are not on the internal network,
+# you need to manually pull the builds from ~/release on each server.
+echo Build succeeded at $(date)
+
+# This doesn't necessarily need to be limited to CentOS, but we only need to
+# build PSOL libraries on one system, and CentOS has the oldest GCC, so we
+# build it there.
+if [ "$EXT" = "rpm" -a "$CHANNEL" = "beta" ]; then
+ echo Building PSOL binaries ...
+
+ for buildtype in Release Debug; do
+ cd $build_dir/src
+ check psol_build.log make BUILDTYPE=$buildtype \
+ AR.host=${AR_host} AR.target=${AR_host} V=1 \
+ mod_pagespeed_test pagespeed_automatic_test
+
+
+ if [[ "$RELEASE" == 1.9.32.* ]]; then
+ # On 1.9 (and earlier, but we don't build them anymore) automatic/ was in
+ # a different place.
+ automatic_dir=net/instaweb/automatic/
+ else
+ automatic_dir=pagespeed/automatic/
+ fi
+
+ cd $automatic_dir
+
+ # TODO(sligocki): Fix and use
+ # check psol_automatic_build.log
+ set +e
+ make MOD_PAGESPEED_ROOT=$build_dir/src BUILDTYPE=$buildtype \
+ AR.host=${AR_host} AR.target=${AR_host} V=1 \
+ CXXFLAGS="-DSERF_HTTPS_FETCHING=1" \
+ all \
+ >> psol_automatic_build.log 2>&1
+ set -e
+ cd $build_dir/src
+
+ BINDIR=$HOME/psol_release/$RELEASE/psol/lib/$buildtype/linux/$BIT_SIZE_NAME
+ mkdir -p $BINDIR/
+ mv $automatic_dir/pagespeed_automatic.a $BINDIR/
+ if [ "$buildtype" = "Release" ]; then
+ mv out/$buildtype/js_minify $BINDIR/pagespeed_js_minify
+ fi
+
+ # Sync release binaries incrementally as they're built so we don't
+ # lose progress.
+ echo PSOL $buildtype build succeeded at $(date)
+
+ # VMs are running low on disk space, so clean up between builds.
+ rm -rf out/$buildtype
+ done
+fi
+
+
+exit 0
diff --git a/src/install/centos.sh b/src/install/centos.sh
new file mode 100755
index 0000000..18b38bf
--- /dev/null
+++ b/src/install/centos.sh
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+set -x
+exec make \
+ APACHE_CONTROL_PROGRAM=/etc/init.d/httpd \
+ APACHE_DOC_ROOT=/var/www/html \
+ APACHE_LOG=/var/log/httpd/error_log \
+ APACHE_MODULES=/etc/httpd/modules \
+ APACHE_CONF_FILE=/etc/httpd/conf/httpd.conf \
+ APACHE_PIDFILE=/var/run/httpd.pid \
+ APACHE_PROGRAM=/usr/sbin/httpd \
+ APACHE_ROOT=/etc/httpd \
+ APACHE_STOP_COMMAND=stop \
+ APACHE_USER=apache \
+ BINDIR=/usr/local/bin \
+ SSL_CERT_DIR=/etc/pki/tls/certs \
+ SSL_CERT_FILE_COMMAND="ModPagespeedSslCertFile /etc/pki/tls/cert.pem" \
+ $*
diff --git a/src/install/common/BRANDING b/src/install/common/BRANDING
new file mode 100644
index 0000000..e0a7de9
--- /dev/null
+++ b/src/install/common/BRANDING
@@ -0,0 +1,5 @@
+COMPANY_FULLNAME=Google Inc.
+COMPANY_SHORTNAME=Google Inc.
+PRODUCT_FULLNAME=mod_pagespeed
+PRODUCT_SHORTNAME=mod_pagespeed
+COPYRIGHT=Copyright (C) 2010.
diff --git a/src/install/common/apt.include b/src/install/common/apt.include
new file mode 100644
index 0000000..e9a9f60
--- /dev/null
+++ b/src/install/common/apt.include
@@ -0,0 +1,255 @@
+@@include@@variables.include
+
+APT_GET="`which apt-get 2> /dev/null`"
+APT_CONFIG="`which apt-config 2> /dev/null`"
+
+SOURCES_PREAMBLE="### THIS FILE IS AUTOMATICALLY CONFIGURED ###
+# You may comment out this entry, but any other modifications may be lost.\n"
+
+# Parse apt configuration and return requested variable value.
+apt_config_val() {
+ APTVAR="$1"
+ if [ -x "$APT_CONFIG" ]; then
+ "$APT_CONFIG" dump | sed -e "/^$APTVAR /"'!d' -e "s/^$APTVAR \"\(.*\)\".*/\1/"
+ fi
+}
+
+# Install the repository signing key (see also:
+# http://www.google.com/linuxrepositories/aboutkey.html)
+install_key() {
+ APT_KEY="`which apt-key 2> /dev/null`"
+ if [ -x "$APT_KEY" ]; then
+ "$APT_KEY" add - >/dev/null 2>&1 <<KEYDATA
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.2.2 (GNU/Linux)
+
+mQGiBEXwb0YRBADQva2NLpYXxgjNkbuP0LnPoEXruGmvi3XMIxjEUFuGNCP4Rj/a
+kv2E5VixBP1vcQFDRJ+p1puh8NU0XERlhpyZrVMzzS/RdWdyXf7E5S8oqNXsoD1z
+fvmI+i9b2EhHAA19Kgw7ifV8vMa4tkwslEmcTiwiw8lyUl28Wh4Et8SxzwCggDcA
+feGqtn3PP5YAdD0km4S4XeMEAJjlrqPoPv2Gf//tfznY2UyS9PUqFCPLHgFLe80u
+QhI2U5jt6jUKN4fHauvR6z3seSAsh1YyzyZCKxJFEKXCCqnrFSoh4WSJsbFNc4PN
+b0V0SqiTCkWADZyLT5wll8sWuQ5ylTf3z1ENoHf+G3um3/wk/+xmEHvj9HCTBEXP
+78X0A/0Tqlhc2RBnEf+AqxWvM8sk8LzJI/XGjwBvKfXe+l3rnSR2kEAvGzj5Sg0X
+4XmfTg4Jl8BNjWyvm2Wmjfet41LPmYJKsux3g0b8yzQxeOA4pQKKAU3Z4+rgzGmf
+HdwCG5MNT2A5XxD/eDd+L4fRx0HbFkIQoAi1J3YWQSiTk15fw7RMR29vZ2xlLCBJ
+bmMuIExpbnV4IFBhY2thZ2UgU2lnbmluZyBLZXkgPGxpbnV4LXBhY2thZ2VzLWtl
+eW1hc3RlckBnb29nbGUuY29tPohjBBMRAgAjAhsDBgsJCAcDAgQVAggDBBYCAwEC
+HgECF4AFAkYVdn8CGQEACgkQoECDD3+sWZHKSgCfdq3HtNYJLv+XZleb6HN4zOcF
+AJEAniSFbuv8V5FSHxeRimHx25671az+uQINBEXwb0sQCACuA8HT2nr+FM5y/kzI
+A51ZcC46KFtIDgjQJ31Q3OrkYP8LbxOpKMRIzvOZrsjOlFmDVqitiVc7qj3lYp6U
+rgNVaFv6Qu4bo2/ctjNHDDBdv6nufmusJUWq/9TwieepM/cwnXd+HMxu1XBKRVk9
+XyAZ9SvfcW4EtxVgysI+XlptKFa5JCqFM3qJllVohMmr7lMwO8+sxTWTXqxsptJo
+pZeKz+UBEEqPyw7CUIVYGC9ENEtIMFvAvPqnhj1GS96REMpry+5s9WKuLEaclWpd
+K3krttbDlY1NaeQUCRvBYZ8iAG9YSLHUHMTuI2oea07Rh4dtIAqPwAX8xn36JAYG
+2vgLAAMFB/wKqaycjWAZwIe98Yt0qHsdkpmIbarD9fGiA6kfkK/UxjL/k7tmS4Vm
+CljrrDZkPSQ/19mpdRcGXtb0NI9+nyM5trweTvtPw+HPkDiJlTaiCcx+izg79Fj9
+KcofuNb3lPdXZb9tzf5oDnmm/B+4vkeTuEZJ//IFty8cmvCpzvY+DAz1Vo9rA+Zn
+cpWY1n6z6oSS9AsyT/IFlWWBZZ17SpMHu+h4Bxy62+AbPHKGSujEGQhWq8ZRoJAT
+G0KSObnmZ7FwFWu1e9XFoUCt0bSjiJWTIyaObMrWu/LvJ3e9I87HseSJStfw6fki
+5og9qFEkMrIrBCp3QGuQWBq/rTdMuwNFiEkEGBECAAkFAkXwb0sCGwwACgkQoECD
+D3+sWZF/WACfeNAu1/1hwZtUo1bR+MWiCjpvHtwAnA1R3IHqFLQ2X3xJ40XPuAyY
+/FJG
+=Quqp
+-----END PGP PUBLIC KEY BLOCK-----
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mQINBFcMjNMBEAC6Wr5QuLIFgz1V1EFPlg8ty2TsjQEl4VWftUAqWlMevJFWvYEx
+BOsOZ6kNFfBfjAxgJNWTkxZrHzDl74R7KW/nUx6X57bpFjUyRaB8F3/NpWKSeIGS
+pJT+0m2SgUNhLAn1WY/iNJGNaMl7lgUnaP+/ZsSNT9hyTBiH3Ev5VvAtMGhVI/u8
+P0EtTjXp4o2U+VqFTBGmZ6PJVhCFjZUeRByloHw8dGOshfXKgriebpioHvU8iQ2U
+GV3WNIirB2Rq1wkKxXJ/9Iw+4l5m4GmXMs7n3XaYQoBj28H86YA1cYWSm5LR5iU2
+TneI1fJ3vwF2vpSXVBUUDk67PZhg6ZwGRT7GFWskC0z8PsWd5jwK20mA8EVKq0vN
+BFmMK6i4fJU+ux17Rgvnc9tDSCzFZ1/4f43EZ41uTmmNXIDsaPCqwjvSS5ICadt2
+xeqTWDlzONUpOs5yBjF1cfJSdVxsfshvln2JXUwgIdKl4DLbZybuNFXnPffNLb2v
+PtRJHO48O2UbeXS8n27PcuMoLRd7+r7TsqG2vBH4t/cB/1vsvWMbqnQlaJ5VsjeW
+Tp8Gv9FJiKuU8PKiWsF4EGR/kAFyCB8QbJeQ6HrOT0CXLOaYHRu2TvJ4taY9doXn
+98TgU03XTLcYoSp49cdkkis4K+9hd2dUqARVCG7UVd9PY60VVCKi47BVKQARAQAB
+tFRHb29nbGUgSW5jLiAoTGludXggUGFja2FnZXMgU2lnbmluZyBBdXRob3JpdHkp
+IDxsaW51eC1wYWNrYWdlcy1rZXltYXN0ZXJAZ29vZ2xlLmNvbT6JAjgEEwECACIF
+AlcMjNMCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEHch9jvTi0eW5CAP
+/RELE/OAoA4o1cMBxJsljWgCgDig2Ge91bFCN0vExLcP0iByra7qPWJowXDJ5sCj
+UBnCkrxGo5D15U7cW5FC0+qWU73q0AuG3OjKDQ49ecdRkYHwcvwWQvT5Lz3DwOGW
+4armfEuzWXcUDeShR7AgfcTq+Pfoo3dHqdB8TmtNySu/AdJFmVH/xTiWYWrOSibh
+yLuaSW/0cTkHW0GDk06MlDkcdkTzhO5GMDO7PUxBgCysTXFR0T9TVWDo9VwvuMww
+2pE5foleA0X6PD/6GQpy3aX2xry8rhFvYplEa5zwXhqsscdKXlp1ZPZ4PMvvwe49
+5mY9n/1Rx1TmMvIcLHKP61sURMOve97Gipk/iD6oaeeT8I0khexHCQy7JMROoPMr
+z5onVOt2rAGZScIZsm5FYGSt9eDKBWI6qpJ/5QoVhkRWjOXOchZlJHo+kLdg6jq2
+vOnIlFnXo0p6Rqf/IEq5PMh70vVZpk4tNYNy4zRx03ZTA9qXRLW+ftxSQIYMY5eC
+Z31lqSH4EjqgtUG+zn2A6juKayb1nkt2O3F1wWOm6oTzNsAP5LdReJRlw151Jp4U
+4ftGtw7ygq+nvokXL7YLuu8sbFqfFXcTPrAZa5M9gnC7GCnIQyF/WvqUnrcaC1jp
+qBc+pkSJhROhN12QY8Po8AT8/UaUh/dPIiW5A4o8pOPEiEYEEBECAAYFAlcNtn8A
+CgkQoECDD3+sWZGy3wCfWTMZWsipX+yG/VB4Q1FunIfEVHYAnimEXCjZ3IVyy5F1
+yU36PihDCjWqiEYEEBECAAYFAlcNtvEACgkQMUcsOzG36APnRwCeJ/bfGf8FBa4q
+5TMw8p1GS1jWT5EAn2sc02481HHdTmZiW/CGWXmgE+OPuQINBFcMjcgBEACrL9gH
+hdr6gQX4ZMA5slp628xOrHCsdLO54WNdPRKeFHXJqSSJi3fs8FxBWI4FnejeKUGb
+F+MrOlFpKqELxaMje7bwZyap3izztZHszP3YmOoTBJvREGKdCkL82cLsChYD/Prg
+E8crvkhSnq9evcsKAnziMxg/wDCChUL3Evqo29BeoB81f+E9wkrUTMCT/kVxt3pG
+RalKX0UhrtKrpm8yRfjufJfwjkdwgvinkRGZ2GrWHj4LzMbi9/udYaJZ66Yw0hEU
+4USxUB9vNtmSFrb4EB91T2rhc68dgQ4jYBI7K4Ebb8XaWAxb+IAq31l1UkiEA32F
+4qUMoL6rChB4y6nHxOnTvs+XEb5TBwXVogjLRKTQs5U/HV9l7j+HAchk5y3im2N2
+UKmMxHqotvPZZUZPdaCRxUedQf9gR0yLZV+U9BcDuwjzL/zjrthNZYlEGJ6HZ/TL
+STp4dDH+uXuLqMVWy5iquKtnbrnNTQtv5twD+Ajpgy60YLOJ9YaiJ4GjifOpzSk8
+3e1rJ3p/pX6B5NWQinVLZJzxyeOoh3iMjdmCDSnEXLrCmYv5g6jyV/Wbd4GYFuMK
+8TT7+PQdWLcbZ/Lxc5w0s+c7+f5OfmKXO5KPHnnUsrF5DBaKRPjScpwePQitxeIg
+lUgEMDkNruBhu1PzCxd3BtXgu++K3WdoH3VcgwARAQABiQREBBgBAgAPBQJXDI3I
+AhsCBQkFo5qAAikJEHch9jvTi0eWwV0gBBkBAgAGBQJXDI3IAAoJEBOXvFNkDbVR
+QSYP/0Ewr3T7e0soTz8g4QJLLVqZDZdX8Iez04idNHuvAu0AwdZ2wl0C+tMkD7l4
+R2aI6BKe/9wPndk/NJe+ZYcD/uzyiKIJQD48PrifNnwvHu9A80rE4BppQnplENeh
+ibbWaGNJQONGFJx7QTYlFjS5LNlG1AX6mQjxvb423zOWSOmEamYXYBmYyMG6vkr/
+XTPzsldky8XFuPrJUZslL/Wlx31XQ1IrtkHHOYqWwr0hTc50/2O8H0ewl/dBZLq3
+EminZZ+tsTugof0j4SbxYhplw99nGwbN1uXy4L8/dWOUXnY5OgaTKZPF15zRMxXN
+9FeylBVYpp5kzre/rRI6mQ2lafYHdbjvd7ryHF5JvYToSDXd0mzF2nLzm6jwsO84
+7ZNd5GdTD6/vcef1IJta1nSwA/hhLtgtlz6/tNncp3lEdCjAMx29jYPDX+Lqs9JA
+xcJHufr82o6wM9TF24Q8ra8NbvB63odVidCfiHoOsIFDUrazH8XuaQzyZkI0bbzL
+mgMAvMO6u1zPfe/TK6LdJg7AeAKScOJS38D5mmwaD1bABr67ebA/X5HdaomSDKVd
+UYaewfTGBIsrWmCmKpdb+WfX4odFpNzXW/qskiBp5WSesKvN1QUkLJZDZD1kz2++
+Xul5B97s5LxLTLRwvgLoNaUFr3lnejzNLgdBpf6FnkA59syRUuIP/jiAZ2uJzXVK
+PeRJqMGL+Ue2HiVEe8ima3SQIceqW8jKS7c7Nic6dMWxgnDpk5tJmVjrgfc0a9c1
+FY4GomUBbZFj+j73+WRk3EaVKIsty+xz48+rlJjdYFVCJo0Jp67jjjXOt6EOHTni
+OA/ANtzRIzDMnWrwJZ7AxCGJ4YjLShkcRM9S30X0iuAkxNILX++SNOd8aqc2bFof
+yTCkcbk6CIc1W00vffv1QGTNjstNpVSl9+bRmlJDqJWnDGk5Nl4Ncqd8X51V0tYE
+g6WEK4OM83wx5Ew/TdTRq5jJkbCu2GYNaNNNgXW7bXSvT5VINbuP6dmbi1/8s0jK
+JQOEBI3RxxoB+01Dgx9YdNfjsCM3hvQvykaWMALeZIpzbXxV118Y9QQUIRe2L+4X
+ZACEAhWjj2K1wP7ODGTQrrM4q4sIw1l3l7yO9aXXN7likAAddT4WEpGV0CiorReO
+J1y/sKJRJSI/npN1UK7wMazZ+yzhxN0qzG8sqREKJQnNuuGQQ/qIGb/oe4dPO0Fi
+hAUGkWoa0bgtGVijN5fQSbMbV50kZYqaa9GnNQRnchmZb+pK2xLcK85hD1np37/A
+m5o2ggoONj3qI3JaRHsZaOs1qPQcyd46OyIFUpHJIfk4nezDCoQYd93bWUGqDwxI
+/n/CsdO0365yqDO/ADscehlVqdAupVv2
+=dmoF
+-----END PGP PUBLIC KEY BLOCK-----
+KEYDATA
+ fi
+}
+
+# Set variables for the locations of the apt sources lists.
+find_apt_sources() {
+ APTDIR=$(apt_config_val Dir)
+ APTETC=$(apt_config_val 'Dir::Etc')
+ APT_SOURCES="$APTDIR$APTETC$(apt_config_val 'Dir::Etc::sourcelist')"
+ APT_SOURCESDIR="$APTDIR$APTETC$(apt_config_val 'Dir::Etc::sourceparts')"
+}
+
+# Update the Google repository if it's not set correctly.
+# Note: this doesn't necessarily enable the repository, it just makes sure the
+# correct settings are available in the sources list.
+# Returns:
+# 0 - no update necessary
+# 2 - error
+update_bad_sources() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ find_apt_sources
+
+ SOURCELIST="$APT_SOURCESDIR/@@PACKAGE@@.list"
+ # Don't do anything if the file isn't there, since that probably means the
+ # user disabled it.
+ if [ ! -r "$SOURCELIST" ]; then
+ return 0
+ fi
+
+ # Basic check for active configurations (non-blank, non-comment lines).
+ ACTIVECONFIGS=$(grep -v "^[[:space:]]*\(#.*\)\?$" "$SOURCELIST" 2>/dev/null)
+
+ # Check if the correct repository configuration is in there.
+ REPOMATCH=$(grep "^[[:space:]#]*\b$REPOCONFIG\b" "$SOURCELIST" \
+ 2>/dev/null)
+
+ # Check if the correct repository is disabled.
+ MATCH_DISABLED=$(echo "$REPOMATCH" | grep "^[[:space:]]*#" 2>/dev/null)
+
+ # Now figure out if we need to fix things.
+ BADCONFIG=1
+ if [ "$REPOMATCH" ]; then
+ # If it's there and active, that's ideal, so nothing to do.
+ if [ ! "$MATCH_DISABLED" ]; then
+ BADCONFIG=0
+ else
+ # If it's not active, but neither is anything else, that's fine too.
+ if [ ! "$ACTIVECONFIGS" ]; then
+ BADCONFIG=0
+ fi
+ fi
+ fi
+
+ if [ $BADCONFIG -eq 0 ]; then
+ return 0
+ fi
+
+ # At this point, either the correct configuration is completely missing, or
+ # the wrong configuration is active. In that case, just abandon the mess and
+ # recreate the file with the correct configuration. If there were no active
+ # configurations before, create the new configuration disabled.
+ DISABLE=""
+ if [ ! "$ACTIVECONFIGS" ]; then
+ DISABLE="#"
+ fi
+ printf "$SOURCES_PREAMBLE" > "$SOURCELIST"
+ printf "$DISABLE$REPOCONFIG\n" >> "$SOURCELIST"
+ if [ $? -eq 0 ]; then
+ return 0
+ fi
+ return 2
+}
+
+# Add the Google repository to the apt sources.
+# Returns:
+# 0 - sources list was created
+# 2 - error
+create_sources_lists() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ find_apt_sources
+
+ SOURCELIST="$APT_SOURCESDIR/@@PACKAGE@@.list"
+ if [ -d "$APT_SOURCESDIR" ]; then
+ printf "$SOURCES_PREAMBLE" > "$SOURCELIST"
+ printf "$REPOCONFIG\n" >> "$SOURCELIST"
+ if [ $? -eq 0 ]; then
+ return 0
+ fi
+ fi
+ return 2
+}
+
+# Remove our custom sources list file.
+# Returns:
+# 0 - successfully removed, or not configured
+# !0 - failed to remove
+clean_sources_lists() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ find_apt_sources
+
+ rm -f "$APT_SOURCESDIR/@@PACKAGE@@.list" \
+ "$APT_SOURCESDIR/@@PACKAGE@@-@@CHANNEL@@.list"
+}
+
+# Detect if the repo config was disabled by distro upgrade and enable if
+# necessary.
+handle_distro_upgrade() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ find_apt_sources
+ SOURCELIST="$APT_SOURCESDIR/@@PACKAGE@@.list"
+ if [ -r "$SOURCELIST" ]; then
+ REPOLINE=$(grep -E "^[[:space:]]*#[[:space:]]*$REPOCONFIG[[:space:]]*# disabled on upgrade to .*" "$SOURCELIST")
+ if [ $? -eq 0 ]; then
+ sed -i -e "s,^[[:space:]]*#[[:space:]]*\($REPOCONFIG\)[[:space:]]*# disabled on upgrade to .*,\1," \
+ "$SOURCELIST"
+ LOGGER=$(which logger 2> /dev/null)
+ if [ "$LOGGER" ]; then
+ "$LOGGER" -t "$0" "Reverted repository modification: $REPOLINE."
+ fi
+ fi
+ fi
+}
+
diff --git a/src/install/common/installer.include b/src/install/common/installer.include
new file mode 100644
index 0000000..6d260a0
--- /dev/null
+++ b/src/install/common/installer.include
@@ -0,0 +1,128 @@
+# Recursively replace @@include@@ template variables with the referenced file,
+# and write the resulting text to stdout.
+process_template_includes() {
+ INCSTACK+="$1->"
+ # Includes are relative to the file that does the include.
+ INCDIR=$(dirname $1)
+ # Clear IFS so 'read' doesn't trim whitespace
+ local OLDIFS="$IFS"
+ IFS=''
+ while read -r LINE
+ do
+ INCLINE=$(sed -e '/^[[:space:]]*@@include@@/!d' <<<$LINE)
+ if [ -n "$INCLINE" ]; then
+ INCFILE=$(echo $INCLINE | sed -e "s#@@include@@\(.*\)#\1#")
+ # Simple filename match to detect cyclic includes.
+ CYCLE=$(sed -e "\#$INCFILE#"'!d' <<<$INCSTACK)
+ if [ "$CYCLE" ]; then
+ echo "ERROR: Possible cyclic include detected." 1>&2
+ echo "$INCSTACK$INCFILE" 1>&2
+ exit 1
+ fi
+ if [ ! -r "$INCDIR/$INCFILE" ]; then
+ echo "ERROR: Couldn't read include file: $INCDIR/$INCFILE" 1>&2
+ exit 1
+ fi
+ process_template_includes "$INCDIR/$INCFILE"
+ else
+ echo "$LINE"
+ fi
+ done < "$1"
+ IFS="$OLDIFS"
+ INCSTACK=${INCSTACK%"$1->"}
+}
+
+# Replace template variables (@@VARNAME@@) in the given template file. If a
+# second argument is given, save the processed text to that filename, otherwise
+# modify the template file in place.
+process_template() (
+ # Don't worry if some of these substitution variables aren't set.
+ # Note that this function is run in a sub-shell so we don't leak this
+ # setting, since we still want unbound variables to be an error elsewhere.
+ set +u
+
+ local TMPLIN="$1"
+ if [ -z "$2" ]; then
+ local TMPLOUT="$TMPLIN"
+ else
+ local TMPLOUT="$2"
+ fi
+ # Process includes first so included text also gets substitutions.
+ TMPLINCL="$(process_template_includes "$TMPLIN")"
+ sed \
+ -e "s#@@PACKAGE@@#${PACKAGE}#g" \
+ -e "s#@@CHANNEL@@#${CHANNEL}#g" \
+ -e "s#@@COMPANY_FULLNAME@@#${COMPANY_FULLNAME}#g" \
+ -e "s#@@VERSION@@#${VERSION}#g" \
+ -e "s#@@REVISION@@#${REVISION}#g" \
+ -e "s#@@VERSIONFULL@@#${VERSIONFULL}#g" \
+ -e "s#@@BUILDDIR@@#${BUILDDIR}#g" \
+ -e "s#@@STAGEDIR@@#${STAGEDIR}#g" \
+ -e "s#@@SCRIPTDIR@@#${SCRIPTDIR}#g" \
+ -e "s#@@PRODUCTURL@@#${PRODUCTURL}#g" \
+ -e "s#@@PREDEPENDS@@#${PREDEPENDS}#g" \
+ -e "s#@@DEPENDS@@#${DEPENDS}#g" \
+ -e "s#@@PROVIDES@@#${PROVIDES}#g" \
+ -e "s#@@REPLACES@@#${REPLACES}#g" \
+ -e "s#@@CONFLICTS@@#${CONFLICTS}#g" \
+ -e "s#@@ARCHITECTURE@@#${HOST_ARCH}#g" \
+ -e "s#@@MAINTNAME@@#${MAINTNAME}#g" \
+ -e "s#@@MAINTMAIL@@#${MAINTMAIL}#g" \
+ -e "s#@@REPOCONFIG@@#${REPOCONFIG}#g" \
+ -e "s#@@SHORTDESC@@#${SHORTDESC}#g" \
+ -e "s#@@FULLDESC@@#${FULLDESC}#g" \
+ -e "s#@@APACHE_CONFDIR@@#${APACHE_CONFDIR}#g" \
+ -e "s#@@APACHE_MODULEDIR@@#${APACHE_MODULEDIR}#g" \
+ -e "s#@@APACHE_USER@@#${APACHE_USER}#g" \
+ -e "s#@@MOD_PAGESPEED_CACHE@@#${MOD_PAGESPEED_CACHE}#g" \
+ -e "s#@@MOD_PAGESPEED_LOG@@#${MOD_PAGESPEED_LOG}#g" \
+ -e "s#@@MODPAGESPEED_ENABLE_UPDATES@@#${MODPAGESPEED_ENABLE_UPDATES}#g" \
+ -e "s#@@COMMENT_OUT_DEFLATE@@#${COMMENT_OUT_DEFLATE}#g" \
+ -e "s#@@SSL_CERT_DIR@@#${SSL_CERT_DIR}#g" \
+ -e "s#@@SSL_CERT_FILE_COMMAND@@#${SSL_CERT_FILE_COMMAND}#g" \
+ > "$TMPLOUT" <<< "$TMPLINCL"
+
+ if grep "@@" "$TMPLOUT"; then
+ echo "ERROR: $TMPLOUT contains @@-variables that were not"
+ echo "substituted by installer.include."
+ exit 1
+ fi
+)
+
+# Setup the installation directory hierachy in the package staging area.
+prep_staging_common() {
+ install -m 755 -d \
+ "${STAGEDIR}${APACHE_CONFDIR}" \
+ "${STAGEDIR}${APACHE_MODULEDIR}" \
+ "${STAGEDIR}${MOD_PAGESPEED_CACHE}" \
+ "${STAGEDIR}${MOD_PAGESPEED_LOG}"
+}
+
+get_version_info() {
+ # Default to a bogus low version, so if somebody creates and installs
+ # a package with no version info, it won't prevent upgrading when
+ # trying to install a properly versioned package (i.e. a proper
+ # package will always be "newer").
+ VERSION="0.0.0.0"
+ # Use epoch timestamp so packages with bogus versions still increment
+ # and will upgrade older bogus-versioned packages.
+ REVISION=$(date +"%s")
+ # Default to non-official build since official builds set this
+ # properly.
+ OFFICIAL_BUILD=0
+
+ VERSIONFILE="${BUILDDIR}/installer/version.txt"
+ if [ -f "${VERSIONFILE}" ]; then
+ source "${VERSIONFILE}"
+ VERSION="${MAJOR}.${MINOR}.${BUILD}.${PATCH}"
+ REVISION="${LASTCHANGE}"
+ fi
+}
+
+stage_install_common() {
+ echo "Staging common install files in '${STAGEDIR}'..."
+
+ # app and resources
+ install -m 644 -s "${BUILDDIR}/libmod_pagespeed.so" "${STAGEDIR}${APACHE_MODULEDIR}/mod_pagespeed.so"
+ install -m 644 -s "${BUILDDIR}/libmod_pagespeed_ap24.so" "${STAGEDIR}${APACHE_MODULEDIR}/mod_pagespeed_ap24.so"
+}
diff --git a/src/install/common/mod-pagespeed/mod-pagespeed.info b/src/install/common/mod-pagespeed/mod-pagespeed.info
new file mode 100644
index 0000000..9c1e2fd
--- /dev/null
+++ b/src/install/common/mod-pagespeed/mod-pagespeed.info
@@ -0,0 +1,20 @@
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This file provides common configuration information for building
+# mod-pagespeed packages for various platforms.
+
+# Base name of the package.
+PACKAGE="mod-pagespeed"
+
+# Brief package description.
+SHORTDESC="Apache 2 module to optimize web content."
+
+# Detailed package description.
+FULLDESC="mod_pagespeed is an Apache module that aims to speed up load time of pages by applying web performance best practices automatically."
+
+# Package maintainer information.
+MAINTNAME="mod_pagespeed developers"
+MAINTMAIL="mod-pagespeed-dev@googlegroups.com"
+PRODUCTURL="http://code.google.com/p/modpagespeed/"
diff --git a/src/install/common/pagespeed.conf.template b/src/install/common/pagespeed.conf.template
new file mode 100644
index 0000000..3e35831
--- /dev/null
+++ b/src/install/common/pagespeed.conf.template
@@ -0,0 +1,351 @@
+<IfModule pagespeed_module>
+ # Turn on mod_pagespeed. To completely disable mod_pagespeed, you
+ # can set this to "off".
+ ModPagespeed on
+
+ # We want VHosts to inherit global configuration.
+ # If this is not included, they'll be independent (except for inherently
+ # global options), at least for backwards compatibility.
+ ModPagespeedInheritVHostConfig on
+
+ # Direct Apache to send all HTML output to the mod_pagespeed
+ # output handler.
+ AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER text/html
+
+ # If you want mod_pagespeed process XHTML as well, please uncomment this
+ # line.
+ # AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER application/xhtml+xml
+
+ # The ModPagespeedFileCachePath directory must exist and be writable
+ # by the apache user (as specified by the User directive).
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@/"
+
+ # LogDir is needed to store various logs, including the statistics log
+ # required for the console.
+ ModPagespeedLogDir "@@MOD_PAGESPEED_LOG@@"
+
+ # The locations of SSL Certificates is distribution-dependent.
+ ModPagespeedSslCertDirectory "@@SSL_CERT_DIR@@"
+ @@SSL_CERT_FILE_COMMAND@@
+
+ # If you want, you can use one or more memcached servers as the store for
+ # the mod_pagespeed cache.
+ # ModPagespeedMemcachedServers localhost:11211
+
+ # A portion of the cache can be kept in memory only, to reduce load on disk
+ # (or memcached) from many small files.
+ # ModPagespeedCreateSharedMemoryMetadataCache "@@MOD_PAGESPEED_CACHE@@/" 51200
+
+ # Override the mod_pagespeed 'rewrite level'. The default level
+ # "CoreFilters" uses a set of rewrite filters that are generally
+ # safe for most web pages. Most sites should not need to change
+ # this value and can instead fine-tune the configuration using the
+ # ModPagespeedDisableFilters and ModPagespeedEnableFilters
+ # directives, below. Valid values for ModPagespeedRewriteLevel are
+ # PassThrough, CoreFilters and TestingCoreFilters.
+ #
+ # ModPagespeedRewriteLevel PassThrough
+
+ # Explicitly disables specific filters. This is useful in
+ # conjuction with ModPagespeedRewriteLevel. For instance, if one
+ # of the filters in the CoreFilters needs to be disabled for a
+ # site, that filter can be added to
+ # ModPagespeedDisableFilters. This directive contains a
+ # comma-separated list of filter names, and can be repeated.
+ #
+ # ModPagespeedDisableFilters rewrite_images
+
+ # Explicitly enables specific filters. This is useful in
+ # conjuction with ModPagespeedRewriteLevel. For instance, filters
+ # not included in the CoreFilters may be enabled using this
+ # directive. This directive contains a comma-separated list of
+ # filter names, and can be repeated.
+ #
+ # ModPagespeedEnableFilters rewrite_javascript,rewrite_css
+ # ModPagespeedEnableFilters collapse_whitespace,elide_attributes
+
+ # Explicitly forbids the enabling of specific filters using either query
+ # parameters or request headers. This is useful, for example, when we do
+ # not want the filter to run for performance or security reasons. This
+ # directive contains a comma-separated list of filter names, and can be
+ # repeated.
+ #
+ # ModPagespeedForbidFilters rewrite_images
+
+ # How long mod_pagespeed will wait to return an optimized resource
+ # (per flush window) on first request before giving up and returning the
+ # original (unoptimized) resource. After this deadline is exceeded the
+ # original resource is returned and the optimization is pushed to the
+ # background to be completed for future requests. Increasing this value will
+ # increase page latency, but might reduce load time (for instance on a
+ # bandwidth-constrained link where it's worth waiting for image
+ # compression to complete). If the value is less than or equal to zero
+ # mod_pagespeed will wait indefinitely for the rewrite to complete before
+ # returning.
+ #
+ # ModPagespeedRewriteDeadlinePerFlushMs 10
+
+ # ModPagespeedDomain
+ # authorizes rewriting of JS, CSS, and Image files found in this
+ # domain. By default only resources with the same origin as the
+ # HTML file are rewritten. For example:
+ #
+ # ModPagespeedDomain cdn.myhost.com
+ #
+ # This will allow resources found on http://cdn.myhost.com to be
+ # rewritten in addition to those in the same domain as the HTML.
+ #
+ # Other domain-related directives (like ModPagespeedMapRewriteDomain
+ # and ModPagespeedMapOriginDomain) can also authorize domains.
+ #
+ # Wildcards (* and ?) are allowed in the domain specification. Be
+ # careful when using them as if you rewrite domains that do not
+ # send you traffic, then the site receiving the traffic will not
+ # know how to serve the rewritten content.
+
+ # If you use downstream caches such as varnish or proxy_cache for caching
+ # HTML, you can configure pagespeed to work with these caches correctly
+ # using the following directives. Note that the values for
+ # ModPagespeedDownstreamCachePurgeLocationPrefix and
+ # ModPagespeedDownstreamCacheRebeaconingKey are deliberately left empty here
+ # in order to force the webmaster to choose appropriate value for these.
+ #
+ # ModPagespeedDownstreamCachePurgeLocationPrefix
+ # ModPagespeedDownstreamCachePurgeMethod PURGE
+ # ModPagespeedDownstreamCacheRewrittenPercentageThreshold 95
+ # ModPagespeedDownstreamCacheRebeaconingKey
+
+ # Other defaults (cache sizes and thresholds):
+ #
+ # ModPagespeedFileCacheSizeKb 102400
+ # ModPagespeedFileCacheCleanIntervalMs 3600000
+ # ModPagespeedLRUCacheKbPerProcess 1024
+ # ModPagespeedLRUCacheByteLimit 16384
+ # ModPagespeedCssFlattenMaxBytes 102400
+ # ModPagespeedCssInlineMaxBytes 2048
+ # ModPagespeedCssImageInlineMaxBytes 0
+ # ModPagespeedImageInlineMaxBytes 3072
+ # ModPagespeedJsInlineMaxBytes 2048
+ # ModPagespeedCssOutlineMinBytes 3000
+ # ModPagespeedJsOutlineMinBytes 3000
+ # ModPagespeedMaxCombinedCssBytes -1
+ # ModPagespeedMaxCombinedJsBytes 92160
+
+ # Limit the number of inodes in the file cache. Set to 0 for no limit.
+ # The default value if this paramater is not specified is 0 (no limit).
+ ModPagespeedFileCacheInodeLimit 500000
+
+ # Bound the number of images that can be rewritten at any one time; this
+ # avoids overloading the CPU. Set this to 0 to remove the bound.
+ #
+ # ModPagespeedImageMaxRewritesAtOnce 8
+
+ # You can also customize the number of threads per Apache process
+ # mod_pagespeed will use to do resource optimization. Plain
+ # "rewrite threads" are used to do short, latency-sensitive work,
+ # while "expensive rewrite threads" are used for actual optimization
+ # work that's more computationally expensive. If you live these unset,
+ # or use values <= 0 the defaults will be used, which is 1 for both
+ # values when using non-threaded MPMs (e.g. prefork) and 4 for both
+ # on threaded MPMs (e.g. worker and event). These settings can only
+ # be changed globally, and not per virtual host.
+ #
+ # ModPagespeedNumRewriteThreads 4
+ # ModPagespeedNumExpensiveRewriteThreads 4
+
+ # Randomly drop rewrites (*) to increase the chance of optimizing
+ # frequently fetched resources and decrease the chance of optimizing
+ # infrequently fetched resources. This can reduce CPU load. The default
+ # value of this parameter is 0 (no drops). 90 means that a resourced
+ # fetched once has a 10% probability of being optimized while a resource
+ # that is fetched 50 times has a 99.65% probability of being optimized.
+ #
+ # (*) Currently only CSS files and images are randomly dropped. Images
+ # within CSS files are not randomly dropped.
+ #
+ # ModPagespeedRewriteRandomDropPercentage 90
+
+ # Many filters modify the URLs of resources in HTML files. This is typically
+ # harmless but pages whose Javascript expects to read or modify the original
+ # URLs may break. The following parameters prevent filters from modifying
+ # URLs of their respective types.
+ #
+ # ModPagespeedJsPreserveURLs on
+ # ModPagespeedImagePreserveURLs on
+ # ModPagespeedCssPreserveURLs on
+
+ # When PreserveURLs is on, it is still possible to enable browser-specific
+ # optimizations (for example, webp images can be served to browsers that
+ # will accept them). They'll be served with Vary: Accept or Vary:
+ # User-Agent headers as appropriate. Note that this may require configuring
+ # reverse proxy caches such as varnish to handle these headers properly.
+ #
+ # ModPagespeedFilters in_place_optimize_for_browser
+
+ # Internet Explorer has difficulty caching resources with Vary: headers.
+ # They will either be uncached (older IE) or require revalidation. See:
+ # http://blogs.msdn.com/b/ieinternals/archive/2009/06/17/vary-header-prevents-caching-in-ie.aspx
+ # As a result we serve them as Cache-Control: private instead by default.
+ # If you are using a reverse proxy or CDN configured to cache content with
+ # the Vary: Accept header you should turn this setting off.
+ #
+ # ModPagespeedPrivateNotVaryForIE on
+
+ # Settings for image optimization:
+ #
+ # Lossy image recompression quality (0 to 100, -1 just strips metadata):
+ # ModPagespeedImageRecompressionQuality 85
+ #
+ # Jpeg recompression quality (0 to 100, -1 uses ImageRecompressionQuality):
+ # ModPagespeedJpegRecompressionQuality -1
+ # ModPagespeedJpegRecompressionQualityForSmallScreens 70
+ #
+ # WebP recompression quality (0 to 100, -1 uses ImageRecompressionQuality):
+ # ModPagespeedWebpRecompressionQuality 80
+ # ModPagespeedWebpRecompressionQualityForSmallScreens 70
+ #
+ # Timeout for conversions to WebP format, in
+ # milliseconds. Negative values mean no timeout is applied. The
+ # default value is -1:
+ # ModPagespeedWebpTimeoutMs 5000
+ #
+ # Percent of original image size below which optimized images are retained:
+ # ModPagespeedImageLimitOptimizedPercent 100
+ #
+ # Percent of original image area below which image resizing will be
+ # attempted:
+ # ModPagespeedImageLimitResizeAreaPercent 100
+
+ # Settings for inline preview images
+ #
+ # Setting this to n restricts preview images to the first n images found on
+ # the page. The default of -1 means preview images can appear anywhere on
+ # the page (if those images appear above the fold).
+ # ModPagespeedMaxInlinedPreviewImagesIndex -1
+
+ # Sets the minimum size in bytes of any image for which a low quality image
+ # is generated.
+ # ModPagespeedMinImageSizeLowResolutionBytes 3072
+
+ # The maximum URL size is generally limited to about 2k characters
+ # due to IE: See http://support.microsoft.com/kb/208427/EN-US.
+ # Apache servers by default impose a further limitation of about
+ # 250 characters per URL segment (text between slashes).
+ # mod_pagespeed circumvents this limitation, but if you employ
+ # proxy servers in your path you may need to re-impose it by
+ # overriding the setting here. The default setting is 1024
+ # characters.
+ #
+ # ModPagespeedMaxSegmentLength 250
+
+ # Uncomment this if you want to prevent mod_pagespeed from combining files
+ # (e.g. CSS files) across paths
+ #
+ # ModPagespeedCombineAcrossPaths off
+
+ # Renaming JavaScript URLs can sometimes break them. With this
+ # option enabled, mod_pagespeed uses a simple heuristic to decide
+ # not to rename JavaScript that it thinks is introspective.
+ #
+ # You can uncomment this to let mod_pagespeed rename all JS files.
+ #
+ # ModPagespeedAvoidRenamingIntrospectiveJavascript off
+
+ # Certain common JavaScript libraries are available from Google, which acts
+ # as a CDN and allows you to benefit from browser caching if a new visitor
+ # to your site previously visited another site that makes use of the same
+ # libraries as you do. Enable the following filter to turn on this feature.
+ #
+ # ModPagespeedEnableFilters canonicalize_javascript_libraries
+
+ # The following line configures a library that is recognized by
+ # canonicalize_javascript_libraries. This will have no effect unless you
+ # enable this filter (generally by uncommenting the last line in the
+ # previous stanza). The format is:
+ # ModPagespeedLibrary bytes md5 canonical_url
+ # Where bytes and md5 are with respect to the *minified* JS; use
+ # js_minify --print_size_and_hash to obtain this data.
+ # Note that we can register multiple hashes for the same canonical url;
+ # we do this if there are versions available that have already been minified
+ # with more sophisticated tools.
+ #
+ # Additional library configuration can be found in
+ # pagespeed_libraries.conf included in the distribution. You should add
+ # new entries here, though, so that file can be automatically upgraded.
+ # ModPagespeedLibrary 43 1o978_K0_LNE5_ystNklf http://www.modpagespeed.com/rewrite_javascript.js
+
+ # Explicitly tell mod_pagespeed to load some resources from disk.
+ # This will speed up load time and update frequency.
+ #
+ # This should only be used for static resources which do not need
+ # specific headers set or other processing by Apache.
+ #
+ # Both URL and filesystem path should specify directories and
+ # filesystem path must be absolute (for now).
+ #
+ # ModPagespeedLoadFromFile "http://example.com/static/" "/var/www/static/"
+
+
+ # Enables server-side instrumentation and statistics. If this rewriter is
+ # enabled, then each rewritten HTML page will have instrumentation javacript
+ # added that sends latency beacons to /mod_pagespeed_beacon. These
+ # statistics can be accessed at /mod_pagespeed_statistics. You must also
+ # enable the mod_pagespeed_statistics and mod_pagespeed_beacon handlers
+ # below.
+ #
+ # ModPagespeedEnableFilters add_instrumentation
+
+ # The add_instrumentation filter sends a beacon after the page onload
+ # handler is called. The user might navigate to a new URL before this. If
+ # you enable the following directive, the beacon is sent as part of an
+ # onbeforeunload handler, for pages where navigation happens before the
+ # onload event.
+ #
+ # ModPagespeedReportUnloadTime on
+
+ # Uncomment the following line so that ModPagespeed will not cache or
+ # rewrite resources with Vary: in the header, e.g. Vary: User-Agent.
+ # Note that ModPagespeed always respects Vary: headers on html content.
+ # ModPagespeedRespectVary on
+
+ # Uncomment the following line if you want to disable statistics entirely.
+ #
+ # ModPagespeedStatistics off
+
+ # These handlers are central entry-points into the admin pages.
+ # By default, pagespeed_admin and pagespeed_global_admin present
+ # the same data, and differ only when
+ # ModPagespeedUsePerVHostStatistics is enabled. In that case,
+ # /pagespeed_global_admin sees aggregated data across all vhosts,
+ # and the /pagespeed_admin sees data only for a particular vhost.
+ #
+ # You may insert other "Allow from" lines to add hosts you want to
+ # allow to look at generated statistics. Another possibility is
+ # to comment out the "Order" and "Allow" options from the config
+ # file, to allow any client that can reach your server to access
+ # and change server state, such as statistics, caches, and
+ # messages. This might be appropriate in an experimental setup.
+ <Location /pagespeed_admin>
+ Order allow,deny
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler pagespeed_admin
+ </Location>
+ <Location /pagespeed_global_admin>
+ Order allow,deny
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler pagespeed_global_admin
+ </Location>
+
+ # Enable logging of mod_pagespeed statistics, needed for the console.
+ ModPagespeedStatisticsLogging on
+
+ # Page /mod_pagespeed_message lets you view the latest messages from
+ # mod_pagespeed, regardless of log-level in your httpd.conf
+ # ModPagespeedMessageBufferSize is the maximum number of bytes you would
+ # like to dump to your /mod_pagespeed_message page at one time,
+ # its default value is 100k bytes.
+ # Set it to 0 if you want to disable this feature.
+ ModPagespeedMessageBufferSize 100000
+</IfModule>
diff --git a/src/install/common/pagespeed.load.template b/src/install/common/pagespeed.load.template
new file mode 100644
index 0000000..ecfaf8f
--- /dev/null
+++ b/src/install/common/pagespeed.load.template
@@ -0,0 +1,24 @@
+# Attempt to load mod_version if it wasn't loaded or compiled in (eg on Debian)
+<IfModule !mod_version.c>
+ LoadModule version_module @@APACHE_MODULEDIR@@/mod_version.so
+</IfModule>
+
+<IfVersion < 2.4>
+ LoadModule pagespeed_module @@APACHE_MODULEDIR@@/mod_pagespeed.so
+</IfVersion>
+<IfVersion >= 2.4.2>
+ # As default pagespeed.conf configuration uses old-style ACLs using
+ # 'allow' and 'deny', we load mod_access_compat in Apache 2.4. If that's a
+ # problem, the blocks using that can be converted to the newer 'Require'
+ # syntax.
+ <IfModule !access_compat_module>
+ LoadModule access_compat_module @@APACHE_MODULEDIR@@/mod_access_compat.so
+ </IfModule>
+
+ LoadModule pagespeed_module @@APACHE_MODULEDIR@@/mod_pagespeed_ap24.so
+</IfVersion>
+
+# Only attempt to load mod_deflate if it hasn't been loaded already.
+<IfModule !mod_deflate.c>
+@@COMMENT_OUT_DEFLATE@@ LoadModule deflate_module @@APACHE_MODULEDIR@@/mod_deflate.so
+</IfModule>
diff --git a/src/install/common/repo.cron b/src/install/common/repo.cron
new file mode 100644
index 0000000..fbb3e74
--- /dev/null
+++ b/src/install/common/repo.cron
@@ -0,0 +1,42 @@
+#!/bin/sh
+#
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This script is part of the @@PACKAGE@@ package.
+#
+# It creates the repository configuration file for package updates, and it
+# monitors that config to see if it has been disabled by the overly aggressive
+# distro upgrade process (e.g. intrepid -> jaunty). When this situation is
+# detected, the respository will be re-enabled. If the respository is disabled
+# for any other reason, this won't re-enable it.
+#
+# This functionality can be controlled by creating the $DEFAULTS_FILE and
+# setting "repo_add_once" and/or "repo_reenable_on_distupgrade" to "true" or
+# "false" as desired. An empty $DEFAULTS_FILE is the same as setting both values
+# to "false".
+
+@@include@@apt.include
+
+## MAIN ##
+DEFAULTS_FILE="/etc/default/@@PACKAGE@@"
+if [ -r "$DEFAULTS_FILE" ]; then
+ . "$DEFAULTS_FILE"
+fi
+
+if [ "$repo_add_once" = "true" ]; then
+ install_key
+ create_sources_lists
+ RES=$?
+ # Sources creation succeeded, so stop trying.
+ if [ $RES -ne 2 ]; then
+ sed -i -e 's/[[:space:]]*repo_add_once=.*/repo_add_once="false"/' "$DEFAULTS_FILE"
+ fi
+else
+ update_bad_sources
+fi
+
+if [ "$repo_reenable_on_distupgrade" = "true" ]; then
+ handle_distro_upgrade
+fi
diff --git a/src/install/common/rpm.include b/src/install/common/rpm.include
new file mode 100644
index 0000000..6e2b05b
--- /dev/null
+++ b/src/install/common/rpm.include
@@ -0,0 +1,375 @@
+@@include@@variables.include
+
+# Install the repository signing key (see also:
+# http://www.google.com/linuxrepositories/aboutkey.html)
+install_rpm_key() {
+ # Check to see if key already exists.
+ rpm -q gpg-pubkey-7fac5991-4615767f > /dev/null 2>&1
+ if [ "$?" -eq "0" ]; then
+ # Key already exists
+ return 0
+ fi
+ # This is to work around a bug in RPM 4.7.0. (see http://crbug.com/22312)
+ rpm -q gpg-pubkey-7fac5991-45f06f46 > /dev/null 2>&1
+ if [ "$?" -eq "0" ]; then
+ # Key already exists
+ return 0
+ fi
+
+ # RPM on Mandriva 2009 is dumb and does not understand "rpm --import -"
+ TMPKEY=$(mktemp /tmp/google.sig.XXXXXX)
+ if [ -n "$TMPKEY" ]; then
+ cat > "$TMPKEY" <<KEYDATA
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1.4.2.2 (GNU/Linux)
+
+mQGiBEXwb0YRBADQva2NLpYXxgjNkbuP0LnPoEXruGmvi3XMIxjEUFuGNCP4Rj/a
+kv2E5VixBP1vcQFDRJ+p1puh8NU0XERlhpyZrVMzzS/RdWdyXf7E5S8oqNXsoD1z
+fvmI+i9b2EhHAA19Kgw7ifV8vMa4tkwslEmcTiwiw8lyUl28Wh4Et8SxzwCggDcA
+feGqtn3PP5YAdD0km4S4XeMEAJjlrqPoPv2Gf//tfznY2UyS9PUqFCPLHgFLe80u
+QhI2U5jt6jUKN4fHauvR6z3seSAsh1YyzyZCKxJFEKXCCqnrFSoh4WSJsbFNc4PN
+b0V0SqiTCkWADZyLT5wll8sWuQ5ylTf3z1ENoHf+G3um3/wk/+xmEHvj9HCTBEXP
+78X0A/0Tqlhc2RBnEf+AqxWvM8sk8LzJI/XGjwBvKfXe+l3rnSR2kEAvGzj5Sg0X
+4XmfTg4Jl8BNjWyvm2Wmjfet41LPmYJKsux3g0b8yzQxeOA4pQKKAU3Z4+rgzGmf
+HdwCG5MNT2A5XxD/eDd+L4fRx0HbFkIQoAi1J3YWQSiTk15fw7RMR29vZ2xlLCBJ
+bmMuIExpbnV4IFBhY2thZ2UgU2lnbmluZyBLZXkgPGxpbnV4LXBhY2thZ2VzLWtl
+eW1hc3RlckBnb29nbGUuY29tPohjBBMRAgAjAhsDBgsJCAcDAgQVAggDBBYCAwEC
+HgECF4AFAkYVdn8CGQEACgkQoECDD3+sWZHKSgCfdq3HtNYJLv+XZleb6HN4zOcF
+AJEAniSFbuv8V5FSHxeRimHx25671az+uQINBEXwb0sQCACuA8HT2nr+FM5y/kzI
+A51ZcC46KFtIDgjQJ31Q3OrkYP8LbxOpKMRIzvOZrsjOlFmDVqitiVc7qj3lYp6U
+rgNVaFv6Qu4bo2/ctjNHDDBdv6nufmusJUWq/9TwieepM/cwnXd+HMxu1XBKRVk9
+XyAZ9SvfcW4EtxVgysI+XlptKFa5JCqFM3qJllVohMmr7lMwO8+sxTWTXqxsptJo
+pZeKz+UBEEqPyw7CUIVYGC9ENEtIMFvAvPqnhj1GS96REMpry+5s9WKuLEaclWpd
+K3krttbDlY1NaeQUCRvBYZ8iAG9YSLHUHMTuI2oea07Rh4dtIAqPwAX8xn36JAYG
+2vgLAAMFB/wKqaycjWAZwIe98Yt0qHsdkpmIbarD9fGiA6kfkK/UxjL/k7tmS4Vm
+CljrrDZkPSQ/19mpdRcGXtb0NI9+nyM5trweTvtPw+HPkDiJlTaiCcx+izg79Fj9
+KcofuNb3lPdXZb9tzf5oDnmm/B+4vkeTuEZJ//IFty8cmvCpzvY+DAz1Vo9rA+Zn
+cpWY1n6z6oSS9AsyT/IFlWWBZZ17SpMHu+h4Bxy62+AbPHKGSujEGQhWq8ZRoJAT
+G0KSObnmZ7FwFWu1e9XFoUCt0bSjiJWTIyaObMrWu/LvJ3e9I87HseSJStfw6fki
+5og9qFEkMrIrBCp3QGuQWBq/rTdMuwNFiEkEGBECAAkFAkXwb0sCGwwACgkQoECD
+D3+sWZF/WACfeNAu1/1hwZtUo1bR+MWiCjpvHtwAnA1R3IHqFLQ2X3xJ40XPuAyY
+/FJG
+=Quqp
+-----END PGP PUBLIC KEY BLOCK-----
+-----BEGIN PGP PUBLIC KEY BLOCK-----
+Version: GnuPG v1
+
+mQINBFcMjNMBEAC6Wr5QuLIFgz1V1EFPlg8ty2TsjQEl4VWftUAqWlMevJFWvYEx
+BOsOZ6kNFfBfjAxgJNWTkxZrHzDl74R7KW/nUx6X57bpFjUyRaB8F3/NpWKSeIGS
+pJT+0m2SgUNhLAn1WY/iNJGNaMl7lgUnaP+/ZsSNT9hyTBiH3Ev5VvAtMGhVI/u8
+P0EtTjXp4o2U+VqFTBGmZ6PJVhCFjZUeRByloHw8dGOshfXKgriebpioHvU8iQ2U
+GV3WNIirB2Rq1wkKxXJ/9Iw+4l5m4GmXMs7n3XaYQoBj28H86YA1cYWSm5LR5iU2
+TneI1fJ3vwF2vpSXVBUUDk67PZhg6ZwGRT7GFWskC0z8PsWd5jwK20mA8EVKq0vN
+BFmMK6i4fJU+ux17Rgvnc9tDSCzFZ1/4f43EZ41uTmmNXIDsaPCqwjvSS5ICadt2
+xeqTWDlzONUpOs5yBjF1cfJSdVxsfshvln2JXUwgIdKl4DLbZybuNFXnPffNLb2v
+PtRJHO48O2UbeXS8n27PcuMoLRd7+r7TsqG2vBH4t/cB/1vsvWMbqnQlaJ5VsjeW
+Tp8Gv9FJiKuU8PKiWsF4EGR/kAFyCB8QbJeQ6HrOT0CXLOaYHRu2TvJ4taY9doXn
+98TgU03XTLcYoSp49cdkkis4K+9hd2dUqARVCG7UVd9PY60VVCKi47BVKQARAQAB
+tFRHb29nbGUgSW5jLiAoTGludXggUGFja2FnZXMgU2lnbmluZyBBdXRob3JpdHkp
+IDxsaW51eC1wYWNrYWdlcy1rZXltYXN0ZXJAZ29vZ2xlLmNvbT6JAjgEEwECACIF
+AlcMjNMCGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJEHch9jvTi0eW5CAP
+/RELE/OAoA4o1cMBxJsljWgCgDig2Ge91bFCN0vExLcP0iByra7qPWJowXDJ5sCj
+UBnCkrxGo5D15U7cW5FC0+qWU73q0AuG3OjKDQ49ecdRkYHwcvwWQvT5Lz3DwOGW
+4armfEuzWXcUDeShR7AgfcTq+Pfoo3dHqdB8TmtNySu/AdJFmVH/xTiWYWrOSibh
+yLuaSW/0cTkHW0GDk06MlDkcdkTzhO5GMDO7PUxBgCysTXFR0T9TVWDo9VwvuMww
+2pE5foleA0X6PD/6GQpy3aX2xry8rhFvYplEa5zwXhqsscdKXlp1ZPZ4PMvvwe49
+5mY9n/1Rx1TmMvIcLHKP61sURMOve97Gipk/iD6oaeeT8I0khexHCQy7JMROoPMr
+z5onVOt2rAGZScIZsm5FYGSt9eDKBWI6qpJ/5QoVhkRWjOXOchZlJHo+kLdg6jq2
+vOnIlFnXo0p6Rqf/IEq5PMh70vVZpk4tNYNy4zRx03ZTA9qXRLW+ftxSQIYMY5eC
+Z31lqSH4EjqgtUG+zn2A6juKayb1nkt2O3F1wWOm6oTzNsAP5LdReJRlw151Jp4U
+4ftGtw7ygq+nvokXL7YLuu8sbFqfFXcTPrAZa5M9gnC7GCnIQyF/WvqUnrcaC1jp
+qBc+pkSJhROhN12QY8Po8AT8/UaUh/dPIiW5A4o8pOPEiEYEEBECAAYFAlcNtn8A
+CgkQoECDD3+sWZGy3wCfWTMZWsipX+yG/VB4Q1FunIfEVHYAnimEXCjZ3IVyy5F1
+yU36PihDCjWqiEYEEBECAAYFAlcNtvEACgkQMUcsOzG36APnRwCeJ/bfGf8FBa4q
+5TMw8p1GS1jWT5EAn2sc02481HHdTmZiW/CGWXmgE+OPuQINBFcMjcgBEACrL9gH
+hdr6gQX4ZMA5slp628xOrHCsdLO54WNdPRKeFHXJqSSJi3fs8FxBWI4FnejeKUGb
+F+MrOlFpKqELxaMje7bwZyap3izztZHszP3YmOoTBJvREGKdCkL82cLsChYD/Prg
+E8crvkhSnq9evcsKAnziMxg/wDCChUL3Evqo29BeoB81f+E9wkrUTMCT/kVxt3pG
+RalKX0UhrtKrpm8yRfjufJfwjkdwgvinkRGZ2GrWHj4LzMbi9/udYaJZ66Yw0hEU
+4USxUB9vNtmSFrb4EB91T2rhc68dgQ4jYBI7K4Ebb8XaWAxb+IAq31l1UkiEA32F
+4qUMoL6rChB4y6nHxOnTvs+XEb5TBwXVogjLRKTQs5U/HV9l7j+HAchk5y3im2N2
+UKmMxHqotvPZZUZPdaCRxUedQf9gR0yLZV+U9BcDuwjzL/zjrthNZYlEGJ6HZ/TL
+STp4dDH+uXuLqMVWy5iquKtnbrnNTQtv5twD+Ajpgy60YLOJ9YaiJ4GjifOpzSk8
+3e1rJ3p/pX6B5NWQinVLZJzxyeOoh3iMjdmCDSnEXLrCmYv5g6jyV/Wbd4GYFuMK
+8TT7+PQdWLcbZ/Lxc5w0s+c7+f5OfmKXO5KPHnnUsrF5DBaKRPjScpwePQitxeIg
+lUgEMDkNruBhu1PzCxd3BtXgu++K3WdoH3VcgwARAQABiQREBBgBAgAPBQJXDI3I
+AhsCBQkFo5qAAikJEHch9jvTi0eWwV0gBBkBAgAGBQJXDI3IAAoJEBOXvFNkDbVR
+QSYP/0Ewr3T7e0soTz8g4QJLLVqZDZdX8Iez04idNHuvAu0AwdZ2wl0C+tMkD7l4
+R2aI6BKe/9wPndk/NJe+ZYcD/uzyiKIJQD48PrifNnwvHu9A80rE4BppQnplENeh
+ibbWaGNJQONGFJx7QTYlFjS5LNlG1AX6mQjxvb423zOWSOmEamYXYBmYyMG6vkr/
+XTPzsldky8XFuPrJUZslL/Wlx31XQ1IrtkHHOYqWwr0hTc50/2O8H0ewl/dBZLq3
+EminZZ+tsTugof0j4SbxYhplw99nGwbN1uXy4L8/dWOUXnY5OgaTKZPF15zRMxXN
+9FeylBVYpp5kzre/rRI6mQ2lafYHdbjvd7ryHF5JvYToSDXd0mzF2nLzm6jwsO84
+7ZNd5GdTD6/vcef1IJta1nSwA/hhLtgtlz6/tNncp3lEdCjAMx29jYPDX+Lqs9JA
+xcJHufr82o6wM9TF24Q8ra8NbvB63odVidCfiHoOsIFDUrazH8XuaQzyZkI0bbzL
+mgMAvMO6u1zPfe/TK6LdJg7AeAKScOJS38D5mmwaD1bABr67ebA/X5HdaomSDKVd
+UYaewfTGBIsrWmCmKpdb+WfX4odFpNzXW/qskiBp5WSesKvN1QUkLJZDZD1kz2++
+Xul5B97s5LxLTLRwvgLoNaUFr3lnejzNLgdBpf6FnkA59syRUuIP/jiAZ2uJzXVK
+PeRJqMGL+Ue2HiVEe8ima3SQIceqW8jKS7c7Nic6dMWxgnDpk5tJmVjrgfc0a9c1
+FY4GomUBbZFj+j73+WRk3EaVKIsty+xz48+rlJjdYFVCJo0Jp67jjjXOt6EOHTni
+OA/ANtzRIzDMnWrwJZ7AxCGJ4YjLShkcRM9S30X0iuAkxNILX++SNOd8aqc2bFof
+yTCkcbk6CIc1W00vffv1QGTNjstNpVSl9+bRmlJDqJWnDGk5Nl4Ncqd8X51V0tYE
+g6WEK4OM83wx5Ew/TdTRq5jJkbCu2GYNaNNNgXW7bXSvT5VINbuP6dmbi1/8s0jK
+JQOEBI3RxxoB+01Dgx9YdNfjsCM3hvQvykaWMALeZIpzbXxV118Y9QQUIRe2L+4X
+ZACEAhWjj2K1wP7ODGTQrrM4q4sIw1l3l7yO9aXXN7likAAddT4WEpGV0CiorReO
+J1y/sKJRJSI/npN1UK7wMazZ+yzhxN0qzG8sqREKJQnNuuGQQ/qIGb/oe4dPO0Fi
+hAUGkWoa0bgtGVijN5fQSbMbV50kZYqaa9GnNQRnchmZb+pK2xLcK85hD1np37/A
+m5o2ggoONj3qI3JaRHsZaOs1qPQcyd46OyIFUpHJIfk4nezDCoQYd93bWUGqDwxI
+/n/CsdO0365yqDO/ADscehlVqdAupVv2
+=dmoF
+-----END PGP PUBLIC KEY BLOCK-----
+KEYDATA
+ rpm --import "$TMPKEY"
+ rc=$?
+ rm -f "$TMPKEY"
+ if [ "$rc" -eq "0" ]; then
+ return 0
+ fi
+ fi
+ return 1
+}
+
+determine_rpm_package_manager() {
+ local RELEASE
+ LSB_RELEASE="$(which lsb_release 2> /dev/null)"
+ if [ -x "$LSB_RELEASE" ]; then
+ RELEASE=$(lsb_release -i 2> /dev/null)
+ case $DISTRIB_ID in
+ "Fedora")
+ PACKAGEMANAGER=yum
+ ;;
+ "MandrivaLinux")
+ PACKAGEMANAGER=urpmi
+ ;;
+ "SUSE LINUX")
+ PACKAGEMANAGER=yast
+ ;;
+ esac
+ fi
+
+ if [ "$PACKAGEMANAGER" ]; then
+ return
+ fi
+
+ # Fallback methods that are probably unnecessary on modern systems.
+ if [ -f "/etc/lsb-release" ]; then
+ # file missing on Fedora, does not contain DISTRIB_ID on OpenSUSE.
+ eval $(sed -e '/DISTRIB_ID/!d' /etc/lsb-release)
+ case $DISTRIB_ID in
+ MandrivaLinux)
+ PACKAGEMANAGER=urpmi
+ ;;
+ esac
+ fi
+
+ if [ "$PACKAGEMANAGER" ]; then
+ return
+ fi
+
+ if [ -f "/etc/fedora-release" ] || [ -f "/etc/redhat-release" ]; then
+ PACKAGEMANAGER=yum
+ elif [ -f "/etc/system-release" ] && \
+ grep -q "Amazon Linux AMI" /etc/system-release ; then
+ PACKAGEMANAGER=yum
+ elif [ -f "/etc/SuSE-release" ]; then
+ PACKAGEMANAGER=yast
+ elif [ -f "/etc/mandriva-release" ]; then
+ PACKAGEMANAGER=urpmi
+ fi
+}
+
+DEFAULT_ARCH="@@ARCHITECTURE@@"
+YUM_REPO_FILE="/etc/yum.repos.d/@@PACKAGE@@.repo"
+ZYPPER_REPO_FILE="/etc/zypp/repos.d/@@PACKAGE@@.repo"
+URPMI_REPO_FILE="/etc/urpmi/urpmi.cfg"
+
+install_yum() {
+ install_rpm_key
+
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ if [ -d "/etc/yum.repos.d" ]; then
+cat > "$YUM_REPO_FILE" << REPOCONTENT
+[@@PACKAGE@@]
+name=@@PACKAGE@@
+baseurl=$REPOCONFIG/$DEFAULT_ARCH
+enabled=1
+gpgcheck=1
+REPOCONTENT
+ fi
+}
+
+# This is called by the cron job, rather than in the RPM postinstall.
+# We cannot do this during the install when urpmi is running due to
+# database locking. We also need to enable the repository, and we can
+# only do that while we are online.
+# see: https://qa.mandriva.com/show_bug.cgi?id=31893
+configure_urpmi() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ urpmq --list-media | grep -q -s "^@@PACKAGE@@$"
+ if [ "$?" -eq "0" ]; then
+ # Repository already configured
+ return 0
+ fi
+ urpmi.addmedia --update \
+ "@@PACKAGE@@" "$REPOCONFIG/$DEFAULT_ARCH"
+}
+
+install_urpmi() {
+ # urpmi not smart enough to pull media_info/pubkey from the repository?
+ install_rpm_key
+
+ # Defer urpmi.addmedia to configure_urpmi() in the cron job.
+ # See comment there.
+ #
+ # urpmi.addmedia --update \
+ # "@@PACKAGE@@" "$REPOCONFIG/$DEFAULT_ARCH"
+}
+
+install_yast() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ # We defer adding the key to later. See comment in the cron job.
+
+ # Ideally, we would run: zypper addrepo -t YUM -f \
+ # "$REPOCONFIG/$DEFAULT_ARCH" "@@PACKAGE@@"
+ # but that does not work when zypper is running.
+ if [ -d "/etc/zypp/repos.d" ]; then
+cat > "$ZYPPER_REPO_FILE" << REPOCONTENT
+[@@PACKAGE@@]
+name=@@PACKAGE@@
+enabled=1
+autorefresh=1
+baseurl=$REPOCONFIG/$DEFAULT_ARCH
+type=rpm-md
+keeppackages=0
+REPOCONTENT
+ fi
+}
+
+# Check if the automatic repository configuration is done, so we know when to
+# stop trying.
+verify_install() {
+ # It's probably enough to see that the repo configs have been created. If they
+ # aren't configured properly, update_bad_repo should catch that when it's run.
+ case $1 in
+ "yum")
+ [ -f "$YUM_REPO_FILE" ]
+ ;;
+ "yast")
+ [ -f "$ZYPPER_REPO_FILE" ]
+ ;;
+ "urpmi")
+ urpmq --list-url | grep -q -s "\b@@PACKAGE@@\b"
+ ;;
+ esac
+}
+
+# Update the Google repository if it's not set correctly.
+update_bad_repo() {
+ if [ ! "$REPOCONFIG" ]; then
+ return 0
+ fi
+
+ determine_rpm_package_manager
+
+ case $PACKAGEMANAGER in
+ "yum")
+ update_repo_file "$YUM_REPO_FILE"
+ ;;
+ "yast")
+ update_repo_file "$ZYPPER_REPO_FILE"
+ ;;
+ "urpmi")
+ update_urpmi_cfg
+ ;;
+ esac
+}
+
+update_repo_file() {
+ REPO_FILE="$1"
+
+ # Don't do anything if the file isn't there, since that probably means the
+ # user disabled it.
+ if [ ! -r "$REPO_FILE" ]; then
+ return 0
+ fi
+
+ # Check if the correct repository configuration is in there.
+ REPOMATCH=$(grep "^baseurl=$REPOCONFIG/$DEFAULT_ARCH" "$REPO_FILE" \
+ 2>/dev/null)
+ # If it's there, nothing to do
+ if [ "$REPOMATCH" ]; then
+ return 0
+ fi
+
+ # Check if it's there but disabled by commenting out (as opposed to using the
+ # 'enabled' setting).
+ MATCH_DISABLED=$(grep "^[[:space:]]*#.*baseurl=$REPOCONFIG/$DEFAULT_ARCH" \
+ "$REPO_FILE" 2>/dev/null)
+ if [ "$MATCH_DISABLED" ]; then
+ # It's OK for it to be disabled, as long as nothing bogus is enabled in its
+ # place.
+ ACTIVECONFIGS=$(grep "^baseurl=.*" "$REPO_FILE" 2>/dev/null)
+ if [ ! "$ACTIVECONFIGS" ]; then
+ return 0
+ fi
+ fi
+
+ # If we get here, the correct repository wasn't found, or something else is
+ # active, so fix it. This assumes there is a 'baseurl' setting, but if not,
+ # then that's just another way of disabling, so we won't try to add it.
+ sed -i -e "s,^baseurl=.*,baseurl=$REPOCONFIG/$DEFAULT_ARCH," "$REPO_FILE"
+}
+
+update_urpmi_cfg() {
+ REPOCFG=$(urpmq --list-url | grep "\b@@PACKAGE@@\b")
+ if [ ! "$REPOCFG" ]; then
+ # Don't do anything if the repo isn't there, since that probably means the
+ # user deleted it.
+ return 0
+ fi
+
+ # See if it's the right repo URL
+ REPOMATCH=$(echo "$REPOCFG" | grep "\b$REPOCONFIG/$DEFAULT_ARCH\b")
+ # If so, nothing to do
+ if [ "$REPOMATCH" ]; then
+ return 0
+ fi
+
+ # Looks like it's the wrong URL, so recreate it.
+ urpmi.removemedia "@@PACKAGE@@" && \
+ urpmi.addmedia --update "@@PACKAGE@@" "$REPOCONFIG/$DEFAULT_ARCH"
+}
+
+# We only remove the repository configuration during a purge. Since RPM has
+# no equivalent to dpkg --purge, the code below is actually never used. We
+# keep it only for reference purposes, should we ever need it.
+#
+#remove_yum() {
+# rm -f "$YUM_REPO_FILE"
+#}
+#
+#remove_urpmi() {
+# # Ideally, we would run: urpmi.removemedia "@@PACKAGE@@"
+# # but that does not work when urpmi is running.
+# # Sentinel comment text does not work either because urpmi.update removes
+# # all comments. So we just delete the entry that matches what we originally
+# # inserted. If such an entry was added manually, that's tough luck.
+# if [ -f "$URPMI_REPO_FILE" ]; then
+# sed -i '\_^@@PACKAGE@@ $REPOCONFIG/$DEFAULT_ARCH {$_,/^}$/d' "$URPMI_REPO_FILE"
+# fi
+#}
+#
+#remove_yast() {
+# # Ideally, we would run: zypper removerepo "@@PACKAGE@@"
+# # but that does not work when zypper is running.
+# rm -f /etc/zypp/repos.d/@@PACKAGE@@.repo
+#}
diff --git a/src/install/common/rpmrepo.cron b/src/install/common/rpmrepo.cron
new file mode 100644
index 0000000..8b0043a
--- /dev/null
+++ b/src/install/common/rpmrepo.cron
@@ -0,0 +1,56 @@
+#!/bin/sh
+#
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+#
+# This script is part of the @@PACKAGE@@ package.
+#
+# It creates the repository configuration file for package updates, since
+# we cannot do this during the @@PACKAGE@@ installation since the repository
+# is locked.
+#
+# This functionality can be controlled by creating the $DEFAULTS_FILE and
+# setting "repo_add_once" to "true" or "false" as desired. An empty
+# $DEFAULTS_FILE is the same as setting the value to "false".
+
+@@include@@rpm.include
+
+## MAIN ##
+DEFAULTS_FILE="/etc/default/@@PACKAGE@@"
+if [ -r "$DEFAULTS_FILE" ]; then
+ . "$DEFAULTS_FILE"
+fi
+
+if [ "$repo_add_once" = "true" ]; then
+ determine_rpm_package_manager
+
+ case $PACKAGEMANAGER in
+ "urpmi")
+ # We need to configure urpmi after the install has finished.
+ # See configure_urpmi() for details.
+ configure_urpmi
+ ;;
+ "yast")
+ # It looks as though yast/zypper has a lock on the RPM DB during
+ # postinstall, so we cannot add the signing key with install_rpm_key().
+ # Instead, we attempt to do this here. If the user attempt to update before
+ # the cron job imports the key, Yast will grab the key from our server and
+ # prompt the user to accept the key.
+ install_rpm_key
+ ;;
+ esac
+
+ if [ $? -eq 0 ]; then
+ # Before we quit auto-configuration, check that everything looks sane, since
+ # part of this happened during package install and we don't have the return
+ # value of that process.
+ verify_install $PACKAGEMANAGER
+ if [ $? -eq 0 ]; then
+ sed -i -e 's/[[:space:]]*repo_add_once=.*/repo_add_once="false"/' \
+ "$DEFAULTS_FILE"
+ fi
+ fi
+else
+ update_bad_repo
+fi
diff --git a/src/install/common/updater b/src/install/common/updater
new file mode 100755
index 0000000..53a8a80
--- /dev/null
+++ b/src/install/common/updater
@@ -0,0 +1,26 @@
+#!/bin/sh
+#
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+# TODO
+# - handle other distros (e.g. non-apt).
+
+@@include@@apt.include
+
+if [ -x "$APT_GET" ]; then
+ update_sources_lists
+ # If the repo was just added, force a cache update.
+ if [ $? -eq 1 ]; then
+ install_key
+ "$APT_GET" -qq update
+ fi
+
+ # TODO(mmoss) detect if apt cache is stale (> 1 day) and force update?
+
+ # Just try to install the packge. If it's already installed, apt-get won't do
+ # anything.
+ "$APT_GET" install -y -q @@PACKAGE@@
+fi
+
diff --git a/src/install/common/variables.include b/src/install/common/variables.include
new file mode 100644
index 0000000..f3a17cd
--- /dev/null
+++ b/src/install/common/variables.include
@@ -0,0 +1,5 @@
+# System-wide package configuration.
+DEFAULTS_FILE="/etc/default/@@PACKAGE@@"
+
+# sources.list setting for @@PACKAGE@@ updates.
+REPOCONFIG="@@REPOCONFIG@@"
diff --git a/src/install/debian/build.sh b/src/install/debian/build.sh
new file mode 100755
index 0000000..a119773
--- /dev/null
+++ b/src/install/debian/build.sh
@@ -0,0 +1,279 @@
+#!/bin/bash
+#
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+set -e
+if [ "$VERBOSE" ]; then
+ set -x
+fi
+set -u
+
+# Create the Debian changelog file needed by dpkg-gencontrol. This just adds a
+# placeholder change, indicating it is the result of an automatic build.
+gen_changelog() {
+ rm -f "${DEB_CHANGELOG}"
+ process_template "${SCRIPTDIR}/changelog.template" "${DEB_CHANGELOG}"
+ debchange -a --nomultimaint -m --changelog "${DEB_CHANGELOG}" \
+ --distribution UNRELEASED "automatic build"
+}
+
+# Create the Debian control file needed by dpkg-deb.
+gen_control() {
+ dpkg-gencontrol -v"${VERSIONFULL}" -c"${DEB_CONTROL}" -l"${DEB_CHANGELOG}" \
+ -f"${DEB_FILES}" -p"${PACKAGE}-${CHANNEL}" -P"${STAGEDIR}" -T"${DEB_SUBST}" \
+ -O > "${STAGEDIR}/DEBIAN/control"
+ rm -f "${DEB_CONTROL}"
+}
+
+# Create the Debian substvars file needed by dpkg-gencontrol.
+gen_substvars() {
+ # dpkg-shlibdeps requires a control file in debian/control, so we're
+ # forced to prepare a fake debian directory.
+ mkdir "${SUBSTFILEDIR}/debian"
+ cp "${DEB_CONTROL}" "${SUBSTFILEDIR}/debian"
+ pushd "${SUBSTFILEDIR}" >/dev/null
+ dpkg-shlibdeps "${STAGEDIR}${APACHE_MODULEDIR}/mod_pagespeed.so" \
+ -O >> "${DEB_SUBST}" 2>/dev/null
+ dpkg-shlibdeps "${STAGEDIR}${APACHE_MODULEDIR}/mod_pagespeed_ap24.so" \
+ -O >> "${DEB_SUBST}" 2>/dev/null
+ popd >/dev/null
+}
+
+# Setup the installation directory hierachy in the package staging area.
+prep_staging_debian() {
+ prep_staging_common
+ install -m 755 -d "${STAGEDIR}/DEBIAN" \
+ "${STAGEDIR}/etc/cron.daily" \
+ "${STAGEDIR}/etc/apache2/conf.d" \
+ "${STAGEDIR}/usr/bin"
+}
+
+# Put the package contents in the staging area.
+stage_install_debian() {
+ prep_staging_debian
+ stage_install_common
+ echo "Staging Debian install files in '${STAGEDIR}'..."
+ process_template "${BUILDDIR}/install/common/repo.cron" \
+ "${STAGEDIR}/etc/cron.daily/${PACKAGE}"
+ chmod 755 "${STAGEDIR}/etc/cron.daily/${PACKAGE}"
+ process_template "${BUILDDIR}/install/debian/postinst" \
+ "${STAGEDIR}/DEBIAN/postinst"
+ chmod 755 "${STAGEDIR}/DEBIAN/postinst"
+ process_template "${BUILDDIR}/install/debian/prerm" \
+ "${STAGEDIR}/DEBIAN/prerm"
+ chmod 755 "${STAGEDIR}/DEBIAN/prerm"
+ process_template "${BUILDDIR}/install/debian/postrm" \
+ "${STAGEDIR}/DEBIAN/postrm"
+ chmod 755 "${STAGEDIR}/DEBIAN/postrm"
+ install -m 644 "${BUILDDIR}/install/debian/conffiles" \
+ "${STAGEDIR}/DEBIAN/conffiles"
+ echo "/etc/cron.daily/${PACKAGE}" >> "${STAGEDIR}/DEBIAN/conffiles"
+ process_template "${BUILDDIR}/install/common/pagespeed.load.template" \
+ "${STAGEDIR}${APACHE_CONFDIR}/pagespeed.load"
+ chmod 644 "${STAGEDIR}${APACHE_CONFDIR}/pagespeed.load"
+ process_template "${BUILDDIR}/install/common/pagespeed.conf.template" \
+ "${STAGEDIR}${APACHE_CONFDIR}/pagespeed.conf"
+ install -m 755 "${BUILDDIR}/js_minify" \
+ "${STAGEDIR}/usr/bin/pagespeed_js_minify"
+ chmod 644 "${STAGEDIR}${APACHE_CONFDIR}/pagespeed.conf"
+ install -m 644 \
+ "${BUILDDIR}/../../net/instaweb/genfiles/conf/pagespeed_libraries.conf" \
+ "${STAGEDIR}${APACHE_CONF_D_DIR}/pagespeed_libraries.conf"
+}
+
+# Build the deb file within a fakeroot.
+do_package_in_fakeroot() {
+ FAKEROOTFILE=$(mktemp -t fakeroot.tmp.XXXXXX) || exit 1
+ fakeroot -s "${FAKEROOTFILE}" -- \
+ chown -R ${APACHE_USER}:${APACHE_USER} ${STAGEDIR}${MOD_PAGESPEED_CACHE}
+ fakeroot -s "${FAKEROOTFILE}" -i "${FAKEROOTFILE}" -- \
+ chown -R ${APACHE_USER}:${APACHE_USER} ${STAGEDIR}${MOD_PAGESPEED_LOG}
+ fakeroot -i "${FAKEROOTFILE}" -- \
+ dpkg-deb -b "${STAGEDIR}" .
+ rm -f "${FAKEROOTFILE}"
+}
+
+# Actually generate the package file.
+do_package() {
+ export HOST_ARCH="$1"
+ echo "Packaging ${HOST_ARCH}..."
+ PREDEPENDS="$COMMON_PREDEPS"
+ DEPENDS="${COMMON_DEPS}"
+
+ # Generate Conflicts: and Replaces: headers for the other channel to get
+ # dpkg to seamlessly switch channels on -i
+ case $CHANNEL in
+ stable )
+ CONFLICTS=mod-pagespeed-beta
+ ;;
+ beta )
+ CONFLICTS=mod-pagespeed-stable
+ ;;
+ * )
+ echo
+ echo "ERROR: '$CHANNEL' is not a valid channel type."
+ echo
+ exit 1
+ ;;
+ esac
+ REPLACES="${CONFLICTS}"
+
+ gen_changelog
+ process_template "${SCRIPTDIR}/control.template" "${DEB_CONTROL}"
+ export DEB_HOST_ARCH="${HOST_ARCH}"
+ gen_substvars
+ if [ -f "${DEB_CONTROL}" ]; then
+ gen_control
+ fi
+
+ do_package_in_fakeroot
+}
+
+# Remove temporary files and unwanted packaging output.
+cleanup() {
+ echo "Cleaning..."
+ rm -rf "${STAGEDIR}"
+ rm -rf "${TMPFILEDIR}"
+ rm -rf "${SUBSTFILEDIR}"
+}
+
+usage() {
+ echo "usage: $(basename $0) [-c channel] [-a target_arch] [-o 'dir'] [-b 'dir']"
+ echo "-c channel the package channel (unstable, beta, stable)"
+ echo "-a arch package architecture (ia32 or x64)"
+ echo "-o dir package output directory [${OUTPUTDIR}]"
+ echo "-b dir build input directory [${BUILDDIR}]"
+ echo "-h this help message"
+}
+
+# Check that the channel name is one of the allowable ones.
+verify_channel() {
+ case $CHANNEL in
+ stable )
+ CHANNEL=stable
+ ;;
+ testing|beta )
+ CHANNEL=beta
+ ;;
+ * )
+ echo
+ echo "ERROR: '$CHANNEL' is not a valid channel type."
+ echo
+ exit 1
+ ;;
+ esac
+}
+
+process_opts() {
+ while getopts ":o:b:c:a:h" OPTNAME
+ do
+ case $OPTNAME in
+ o )
+ OUTPUTDIR="$OPTARG"
+ mkdir -p "${OUTPUTDIR}"
+ ;;
+ b )
+ BUILDDIR=$(readlink -f "${OPTARG}")
+ ;;
+ c )
+ CHANNEL="$OPTARG"
+ ;;
+ a )
+ TARGETARCH="$OPTARG"
+ ;;
+ h )
+ usage
+ exit 0
+ ;;
+ \: )
+ echo "'-$OPTARG' needs an argument."
+ usage
+ exit 1
+ ;;
+ * )
+ echo "invalid command-line option: $OPTARG"
+ usage
+ exit 1
+ ;;
+ esac
+ done
+}
+
+#=========
+# MAIN
+#=========
+
+SCRIPTDIR=$(readlink -f "$(dirname "$0")")
+OUTPUTDIR="${PWD}"
+STAGEDIR=$(mktemp -d -t deb.build.XXXXXX) || exit 1
+TMPFILEDIR=$(mktemp -d -t deb.tmp.XXXXXX) || exit 1
+SUBSTFILEDIR=$(mktemp -d -t deb.subst.XXXXXX) || exit 1
+DEB_CHANGELOG="${TMPFILEDIR}/changelog"
+DEB_FILES="${TMPFILEDIR}/files"
+DEB_CONTROL="${TMPFILEDIR}/control"
+DEB_SUBST="${SUBSTFILEDIR}/debian/substvars"
+CHANNEL="beta"
+# Default target architecture to same as build host.
+if [ "$(uname -m)" = "x86_64" ]; then
+ TARGETARCH="x64"
+else
+ TARGETARCH="ia32"
+fi
+
+# call cleanup() on exit
+trap cleanup 0
+process_opts "$@"
+if [ ! "$BUILDDIR" ]; then
+ BUILDDIR=$(readlink -f "${SCRIPTDIR}/../../out/Release")
+fi
+
+source ${BUILDDIR}/install/common/installer.include
+
+get_version_info
+VERSIONFULL="${VERSION}-r${REVISION}"
+
+source "${BUILDDIR}/install/common/mod-pagespeed.info"
+eval $(sed -e "s/^\([^=]\+\)=\(.*\)$/export \1='\2'/" \
+ "${BUILDDIR}/install/common/BRANDING")
+
+REPOCONFIG="deb http://dl.google.com/linux/${PACKAGE#google-}/deb/ stable main"
+verify_channel
+
+# Some Debian packaging tools want these set.
+export DEBFULLNAME="${MAINTNAME}"
+export DEBEMAIL="${MAINTMAIL}"
+
+# Make everything happen in the OUTPUTDIR.
+cd "${OUTPUTDIR}"
+
+COMMON_DEPS="apache2.2-common|apache2-api-20120211"
+COMMON_PREDEPS="dpkg (>= 1.14.0)"
+
+APACHE_MODULEDIR="/usr/lib/apache2/modules"
+APACHE_CONFDIR="/etc/apache2/mods-available"
+APACHE_CONF_D_DIR="/etc/apache2/conf.d"
+MOD_PAGESPEED_CACHE="/var/cache/mod_pagespeed"
+MOD_PAGESPEED_LOG="/var/log/pagespeed"
+APACHE_USER="www-data"
+COMMENT_OUT_DEFLATE=
+SSL_CERT_DIR="/etc/ssl/certs"
+SSL_CERT_FILE_COMMAND=
+
+case "$TARGETARCH" in
+ ia32 )
+ stage_install_debian
+ do_package "i386"
+ ;;
+ x64 )
+ stage_install_debian
+ do_package "amd64"
+ ;;
+ * )
+ echo
+ echo "ERROR: Don't know how to build DEBs for '$TARGETARCH'."
+ echo
+ exit 1
+ ;;
+esac
diff --git a/src/install/debian/changelog.template b/src/install/debian/changelog.template
new file mode 100644
index 0000000..4ed22e5
--- /dev/null
+++ b/src/install/debian/changelog.template
@@ -0,0 +1,4 @@
+@@PACKAGE@@-@@CHANNEL@@ (@@VERSIONFULL@@) UNRELEASED; urgency=low
+ * No changes
+
+ -- @@MAINTNAME@@ <@@MAINTMAIL@@> Wed, 20 Oct 2010 14:54:35 -0800
diff --git a/src/install/debian/conffiles b/src/install/debian/conffiles
new file mode 100644
index 0000000..4dd02d9
--- /dev/null
+++ b/src/install/debian/conffiles
@@ -0,0 +1,3 @@
+/etc/apache2/mods-available/pagespeed.load
+/etc/apache2/mods-available/pagespeed.conf
+/etc/apache2/conf.d/pagespeed_libraries.conf
diff --git a/src/install/debian/control.template b/src/install/debian/control.template
new file mode 100644
index 0000000..cc9b1d0
--- /dev/null
+++ b/src/install/debian/control.template
@@ -0,0 +1,16 @@
+Source: @@PACKAGE@@-@@CHANNEL@@
+Section: httpd
+Priority: optional
+Maintainer: @@MAINTNAME@@ <@@MAINTMAIL@@>
+Build-Depends: dpkg-dev, devscripts, fakeroot
+Standards-Version: 3.8.0
+
+Package: @@PACKAGE@@-@@CHANNEL@@
+Provides: @@PROVIDES@@
+Replaces: @@REPLACES@@
+Conflicts: @@CONFLICTS@@
+Pre-Depends: @@PREDEPENDS@@
+Depends: ${shlibs:Depends}, @@DEPENDS@@
+Architecture: @@ARCHITECTURE@@
+Description: @@SHORTDESC@@
+ @@FULLDESC@@
diff --git a/src/install/debian/postinst b/src/install/debian/postinst
new file mode 100755
index 0000000..370207d
--- /dev/null
+++ b/src/install/debian/postinst
@@ -0,0 +1,31 @@
+#!/bin/sh
+
+# Based on postinst from Chromium and Google Talk.
+
+@@include@@../common/apt.include
+
+MODPAGESPEED_ENABLE_UPDATES=@@MODPAGESPEED_ENABLE_UPDATES@@
+
+case "$1" in
+ configure)
+ if [ -n "${MODPAGESPEED_ENABLE_UPDATES}" -a ! -e "$DEFAULTS_FILE" ]; then
+ echo 'repo_add_once="true"' > "$DEFAULTS_FILE"
+ echo 'repo_reenable_on_distupgrade="true"' >> "$DEFAULTS_FILE"
+ fi
+
+ # Run the cron job immediately to perform repository
+ # configuration.
+ nohup sh /etc/cron.daily/@@PACKAGE@@ > /dev/null 2>&1 &
+
+ test ! -e /etc/apache2/mods-enabled/pagespeed.load && \
+ a2enmod pagespeed
+ ;;
+ abort-upgrade|abort-remove|abort-deconfigure)
+ ;;
+ *)
+ echo "postinst called with unknown argument \`$1'" >&2
+ exit 1
+ ;;
+esac
+
+exit 0
diff --git a/src/install/debian/postrm b/src/install/debian/postrm
new file mode 100755
index 0000000..0dc3058
--- /dev/null
+++ b/src/install/debian/postrm
@@ -0,0 +1,27 @@
+#!/bin/sh
+#
+# Copyright (c) 2009 The Chromium Authors. All rights reserved.
+# Use of this source code is governed by a BSD-style license that can be
+# found in the LICENSE file.
+
+action="$1"
+
+# Only do complete clean-up on purge.
+if [ "$action" != "purge" ] ; then
+ exit 0
+fi
+
+@@include@@../common/apt.include
+
+# Only remove the defaults file if it is not empty. An empty file was probably
+# put there by the sysadmin to disable automatic repository configuration, as
+# per the instructions on the package download page.
+if [ -s "$DEFAULTS_FILE" ]; then
+ # Make sure the package defaults are removed before the repository config,
+ # otherwise it could result in the repository config being removed, but the
+ # package defaults remain and are set to not recreate the repository config.
+ # In that case, future installs won't recreate it and won't get auto-updated.
+ rm "$DEFAULTS_FILE" || exit 1
+fi
+# Remove any Google repository added by the package.
+clean_sources_lists
diff --git a/src/install/debian/prerm b/src/install/debian/prerm
new file mode 100755
index 0000000..d9ab1f9
--- /dev/null
+++ b/src/install/debian/prerm
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+case "$1" in
+ remove)
+ test -e /etc/apache2/mods-enabled/pagespeed.load && a2dismod pagespeed
+ ;;
+ upgrade|deconfigure|failed-upgrade)
+ ;;
+ *)
+ echo "prerm called with unknown argument \`$1'" >&2
+ exit 1
+ ;;
+esac
+
+exit 0
diff --git a/src/install/debug.conf.template b/src/install/debug.conf.template
new file mode 100644
index 0000000..48b1874
--- /dev/null
+++ b/src/install/debug.conf.template
@@ -0,0 +1,2035 @@
+# Port map:
+# 8080 -- master configuration.
+# 8081 -- LOADTEST / PROXY / SLURP
+# 8082 -- SLURP
+# @@APACHE_SECONDARY_PORT@@ -- secondary configuration (8083 debug, 8084 root)
+# 8085 --- loopback route testing
+#
+# Note that when system-testing on CentOS installs we need to authorize these
+# ports via semanage. Any new ports should thus be added to the Makefile in
+# this directory, target enable_ports_and_file_access.
+
+# We don't enable XHTML in the default pagespeed.conf. For the
+# add_instrumentation xhtml tests to work, though, we have to enable
+# it here. If we change pagespeed.conf.template to enable XHTML by
+# default then we can eliminate this line.
+AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER application/xhtml+xml
+
+# Do testing using memcached in lieu of the file cache.
+#MEMCACHED ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+
+# When testing memcached in system tests, let's make sure we are really
+# hitting it and not servicing the lookups in-memory. Note that we will
+# cover having a non-zero lru-cache with memcached in load tests.
+#MEMCACHED ModPagespeedLRUCacheKbPerProcess 0
+
+# If X-PSA-Blocking-Rewrite request header is present and its value matches the
+# value of ModPagespeedBlockingRewriteKey below, the response will be fully
+# rewritten before being flushed to the client.
+ModPagespeedBlockingRewriteKey psatest
+
+# By default we test without image beaconing, and use a separate vhost to
+# enable image beaconing explicitly.
+ModPagespeedCriticalImagesBeaconEnabled false
+
+# By default, resources will not be used for inlining without explicit
+# authorization. Supported values are off or a comma-separated list of strings
+# from {Script,Stylesheet}.
+ModPagespeedInlineResourcesWithoutExplicitAuthorization off
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/max_html_parse_size" >
+ ModPagespeedMaxHtmlParseBytes 5000
+</Directory>
+
+# This will cause an error report in the startup log if mod_pagespeed is not
+# built with ssl, but otherwise will cause no trouble.
+ModPagespeedFetchHttps enable
+
+ModPagespeedLibrary 43 1o978_K0_LNE5_ystNklf http://www.modpagespeed.com/rewrite_javascript.js
+ModPagespeedRetainComment " google_ad_section*"
+
+# Test proxying of non-.pagespeed. resources.
+ModPagespeedMapProxyDomain http://@@APACHE_DOMAIN@@/modpagespeed_http \
+ http://@@PAGESPEED-TEST-HOST@@/do_not_modify
+ModPagespeedMapProxyDomain http://@@APACHE_DOMAIN@@/content_type_present \
+ http://@@PAGESPEED-TEST-HOST@@:8091
+ModPagespeedMapProxyDomain http://@@APACHE_DOMAIN@@/content_type_absent \
+ http://@@PAGESPEED-TEST-HOST@@:8092
+
+# Turn on "KeepAlive" so we can test it in system_test.sh.
+KeepAlive On
+KeepAliveTimeout 60
+LogFormat "%v %X %P %h %l %u %t \"%r\" %>s %b" common
+
+# Set logging level to 'info' to catch some informational messages which have
+# been downgraded from 'error'.
+LogLevel info
+
+ModPagespeedStaticAssetPrefix /mod_pagespeed_static/
+
+# We want to test some vhosts without a shared memory cache configured, and if
+# the default shared memory metadata cache is enabled we can't do that. So
+# disable it globally and enable it where we need it.
+ModPagespeedDefaultSharedMemoryCacheKB 0
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/https_fetch" >
+ ModPagespeedDisableFilters inline_images
+ ModPagespeedDomain https://@@PAGESPEED-TEST-HOST@@
+ ModPagespeedMapProxyDomain http://@@APACHE_DOMAIN@@/https_gstatic_dot_com \
+ https://www.gstatic.com/psa/static
+</Directory>
+
+# These caching headers are set up for the document root, and
+# also serve as a demonstration of good values to set for the entire
+# site, if it is to be optimized by mod_pagespeed.
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_example" >
+ <IfModule headers_module>
+ # To enable to show that mod_pagespeed to rewrites web pages, we must
+ # turn off Etags for HTML files and eliminate caching altogether.
+ # mod_pagespeed should rewrite HTML files each time they are served.
+ # The first time mod_pagespeed sees an HTML file, it may not optimize
+ # it fully. It will optimize better after the second view. Caching
+ # defeats this behavior.
+ <FilesMatch "\.(html|htm)$">
+ Header unset Etag
+ Header set Cache-control "max-age=0, no-cache"
+ </FilesMatch>
+
+ # Images, styles, and javascript are all cache-extended for
+ # a year by rewriting URLs to include a content hash. mod_pagespeed
+ # can only do this if the resources are cacheable in the first place.
+ # The origin caching policy, set here to 10 minutes, dictates how
+ # frequently mod_pagespeed must re-read the content files and recompute
+ # the content-hash. As long as the content doesn't actually change,
+ # the content-hash will remain the same, and the resources stored
+ # in browser caches will stay relevant.
+ <FilesMatch "\.(jpg|jpeg|gif|png|js|css)$">
+ Header unset Etag
+ Header set Cache-control "max-age=600"
+ </FilesMatch>
+ </IfModule>
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/shard" >
+ ModPagespeedShardDomain "@@APACHE_DOMAIN@@" shard1,shard2
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters extend_cache
+</Directory>
+
+# add_instrumentation must be enabled so that we can test /mod_pagespeed_beacon.
+ModPagespeedEnableFilters add_instrumentation
+
+<Directory "@@APACHE_DOC_ROOT@@/" >
+ # This is enabled to make sure we don't crash mod_negotiation.
+ Options +MultiViews
+</Directory>
+
+# Enable htaccess
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/" >
+ AllowOverride All
+</Directory>
+
+# Enable resize_rendered_image_dimensions
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/resize_rendered_dimensions" >
+ ModPagespeedCriticalImagesBeaconEnabled true
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/" >
+ AllowOverride All
+ # Some versions of mod_rewrite will refuse to do any work if
+ # symlink handling is off.
+ Options +FollowSymLinks
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/close_connection/" >
+ AllowOverride All
+ # Helps tests whether we successfully strip Connection:close
+ # results from the origin.
+ Options +SymLinksIfOwnerMatch
+ Header append 'Connection' 'close'
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/compressed/" >
+ # Files in this directory are already compressed so always add
+ # the right header.
+ Header set Cache-control "max-age=600"
+ Header append 'Content-Encoding' 'gzip'
+ AddType text/javascript .custom_ext
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/" >
+ # Files in this directory should include an err_headers_out
+ # header. 'Header always' writes to err_headers_out.
+ Header always set X-TestHeader "hello"
+</Directory>
+
+ModPagespeedLoadFromFile "http://@@APACHE_DOMAIN@@/mod_pagespeed_test/ipro/instant/" \
+ "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/instant/"
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/cookie/" >
+ # Add Vary:Cookie. This should prevent us from optimizing the
+ # vary_cookie.css even though ModPagespeedRespectVary is off.
+ # apache/system_test.sh does the fetches test with and without cookies.
+ Header append Vary Cookie
+ ModPagespeedRespectVary off
+ ModPagespeedInPlaceWaitForOptimized on
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/cookie2/" >
+ # Add Vary:Cookie2. This should prevent us from optimizing the
+ # vary_cookie2.css even though ModPagespeedRespectVary is off.
+ # apache/system_test.sh does the fetches test with and without cookie2.
+ Header append Vary Cookie2
+ ModPagespeedRespectVary off
+ ModPagespeedInPlaceWaitForOptimized on
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/mod_deflate/" >
+ AddOutputFilterByType DEFLATE text/css
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/vary/no_respect/" >
+ ModPagespeedDisableFilters add_instrumentation,inline_css
+ ModPagespeedRespectVary off
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/no_cache/" >
+ # Files in this directory should be served uncacheable.
+ Header set Cache-control "no-cache"
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/remote_config/" >
+ # Files in this directory should be served uncacheable.
+ Header set Cache-control "max-age=7200, must-revalidate"
+</Directory>
+
+# Set jpeg Quality
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/jpeg_rewriting/" >
+ ModPagespeedJpegRecompressionQuality 70
+</Directory>
+
+# Set Generic Image Quality
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/webp_rewriting" >
+ ModPagespeedImageRecompressionQuality 75
+</Directory>
+
+# Set Webp Quality
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/image_rewriting" >
+ ModPagespeedWebpRecompressionQuality 65
+</Directory>
+
+# Enable split_html
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/split_html/" >
+ ModPagespeedEnableFilters split_html,split_html_helper,lazyload_images
+ ModPagespeedServeSplitHtmlInTwoChunks on
+ ModPagespeedCriticalLineConfig div[@id="container"]/div[4]
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/disable_no_transform" >
+ ModPagespeedDisableRewriteOnNoTransform off
+ <IfModule headers_module>
+ <FilesMatch "\.(js|css)$">
+ Header append 'Cache-Control' 'no-transform'
+ </FilesMatch>
+ </IfModule>
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/no_transform" >
+ Header append 'Cache-Control' 'no-transform'
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/strip_subresource_hints/default" >
+ ModPagespeedRewriteLevel CoreFilters
+ ModPagespeedDisableFilters add_instrumentation
+ ModPagespeedDisallow *dontrewriteme*
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/strip_subresource_hints/preserve_on/" >
+ ModPagespeedPreserveSubresourceHints on
+ ModPagespeedRewriteLevel CoreFilters
+ ModPagespeedDisableFilters add_instrumentation
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/strip_subresource_hints/preserve_off/" >
+ ModPagespeedPreserveSubresourceHints off
+ ModPagespeedRewriteLevel CoreFilters
+ ModPagespeedDisableFilters add_instrumentation
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/strip_subresource_hints/default_passthrough/" >
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedDisableFilters add_instrumentation
+</Directory>
+
+# This Directory does not even exist, but by setting some options in that
+# scope we test to make sure the options we claim are really settable in
+# .htaccess. Note that <Directory> and .htaccess are enforced the same way.
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/htaccess_test" >
+ ModPagespeed on
+ ModPagespeedAllow *
+ ModPagespeedAvoidRenamingIntrospectiveJavascript on
+ ModPagespeedBeaconUrl foo
+ ModPagespeedCombineAcrossPaths off
+ ModPagespeedCssFlattenMaxBytes 100
+ ModPagespeedCssImageInlineMaxBytes 100
+ ModPagespeedCssInlineMaxBytes 100
+ ModPagespeedCssOutlineMinBytes 10000
+ ModPagespeedDisableFilters rewrite_images
+ ModPagespeedDisallow *bad*
+ ModPagespeedDomain example.com
+ ModPagespeedEnableFilters extend_cache
+ ModPagespeedImageInlineMaxBytes 100
+ ModPagespeedImageLimitOptimizedPercent 50
+ ModPagespeedImageLimitResizeAreaPercent 50
+ ModPagespeedJpegRecompressionQuality 85
+ ModPagespeedJsInlineMaxBytes 100
+ ModPagespeedJsOutlineMinBytes 10000
+ ModPagespeedLowercaseHtmlNames on
+ ModPagespeedMapOriginDomain localhost example.com
+ ModPagespeedMapRewriteDomain cdn.com example.com
+ ModPagespeedMaxHtmlParseBytes 100000
+ ModPagespeedMaxInlinedPreviewImagesIndex 10
+ ModPagespeedMinImageSizeLowResolutionBytes 100
+ ModPagespeedModifyCachingHeaders off
+ ModPagespeedRetainComment *ad_tag*
+ ModPagespeedRewriteLevel PassThrough
+</Directory>
+
+# Establish a proxy mapping where the current server proxies an image
+# stored on ref.pssdemos.com. We use ref.pssdemos.com rather than
+# modpagespeed.com so that we use a single html file both for:
+# 1. a demo of the feature on modpagespeed.com. It would
+# be confusing to demo a Proxying when the origin was the
+# same as the proxy.
+# 2. A system-test of the feature to be run with our usual testing.
+ModPagespeedMapProxyDomain @@APACHE_DOMAIN@@/gstatic_images \
+ http://www.gstatic.com/psa/static
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/forbid_all_disabled/disabled" >
+ # Prevent the enabling of these filters for files in this directory
+ # -and- all subdirectories (they can't override it, deliberately).
+ ModPagespeedForbidAllDisabledFilters true
+ ModPagespeedDisableFilters remove_quotes,remove_comments
+ ModPagespeedDisableFilters collapse_whitespace
+ # Enable this, which was disabled in ../.htaccess, to test that we can
+ # enable something already disabled at the same time as we forbid all.
+ ModPagespeedEnableFilters inline_css
+</Directory>
+
+# This is needed for the server-side includes test in
+# apache/system_test.sh. See mod_pagespeed_test/ssi/.htaccess as well.
+<IfModule !include_module>
+ LoadModule include_module @@APACHE_MODULES@@/mod_include.so
+</IfModule>
+AddType text/html .shtml
+AddOutputFilter INCLUDES .shtml
+
+# For the mod_rewrite test in apache/system_test.sh
+<IfModule !rewrite_module>
+ LoadModule rewrite_module @@APACHE_MODULES@@/mod_rewrite.so
+</IfModule>
+
+# For the vary: handling test
+<IfModule !headers_module>
+ LoadModule headers_module @@APACHE_MODULES@@/mod_headers.so
+</IfModule>
+
+# Helps tests that extra headers supplied by the apache conf
+# survive single-resource rewrites
+#
+# http://code.google.com/p/modpagespeed/issues/detail?id=324
+Header append 'X-Extra-Header' '1'
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/nostore" >
+ Header append 'Cache-Control' 'no-store'
+</Directory>
+
+# Build a configuration hierarchy where at the root we have turned on a few
+# filters, several of which do not preserve URLs, and in a subdirectory we have
+# turned on preserve URLs.
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/preserveurls" >
+#ModPagespeedEnableFilters inline_preview_images,lazyload_images
+#ModPagespeedEnableFilters resize_mobile_images,rewrite_images,sprite_images
+#ModPagespeedEnableFilters fallback_rewrite_css_urls,flatten_css_imports
+#ModPagespeedEnableFilters inline_css,move_css_above_scripts,move_css_to_head
+#ModPagespeedEnableFilters outline_css,rewrite_css,combine_css
+#ModPagespeedEnableFilters combine_javascript,defer_javascript,inline_javascript
+#ModPagespeedEnableFilters outline_javascript,rewrite_javascript
+ModPagespeedRewriteLevel CoreFilters
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/preserveurls/on" >
+ModPagespeedJsPreserveURLs On
+ModPagespeedImagePreserveURLs On
+ModPagespeedCssPreserveURLs On
+
+# TODO(jmarantz): PreserveURLs should override the explicit setting at
+# the level above that turns on inline_preview_images, resize_mobile_images,
+# and lazyload_images
+ModPagespeedDisableFilters inline_preview_images,resize_mobile_images
+ModPagespeedDisableFilters lazyload_images
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/experimental_js_minifier" >
+ ModPagespeedUseExperimentalJsMinifier on
+</Directory>
+
+# However we should not allow user-specified cache-control on
+# rewritten HTML or resources. This setting helps us make
+# sure that we strip any user-specified cache-control when
+# we rewrite HTML. We test this in apache/system_test.sh.
+Header set Cache-Control "max-age=600"
+
+# For regression test of connection failing.
+ModPagespeedDomain modpagespeed.com:1023
+
+# Test LoadFromFile mapping by mapping one dir to another.
+ModPagespeedLoadFromFile "http://@@APACHE_DOMAIN@@/mod_pagespeed_test/load_from_file/web_dir/" "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/load_from_file/file_dir/"
+ModPagespeedLoadFromFileMatch "^http://@@APACHE_DOMAIN@@/mod_pagespeed_test/load_from_file_match/web_([^/]*)/" "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/load_from_file/file_\1/"
+ModPagespeedLoadFromFileRule Disallow "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/load_from_file/file_dir/httponly/"
+ModPagespeedLoadFromFileRuleMatch Disallow \.ssp.css$
+ModPagespeedLoadFromFileRuleMatch Allow exception\.ssp\.css$
+
+# Print out detail about connection-refused errors. We don't negative-test
+# this here cause it's a hassle; we just depend on the unit-tests for that.
+ModPagespeedListOutstandingUrlsOnError on
+
+ModPagespeedAvoidRenamingIntrospectiveJavascript off
+
+# Test coverage for <ModPagespeedIf>
+<ModPagespeedIf spdy>
+ # A setting one might want...
+ ModPagespeedDisableFilters combine_css
+
+ # Settings that doesn't make much sense, but which we want for tests.
+ ModPagespeedDisableFilters rewrite_css
+ ModPagespeedDisableFilters extend_cache
+ ModPagespeedDisableFilters rewrite_images
+</ModPagespeedIf>
+
+<ModPagespeedIf !spdy>
+ ModPagespeedShardDomain nonspdy.example.com s1.example.com,s2.example.com
+</ModPagespeedIf>
+
+# These will be sent to the origin domain when fetching subresources.
+ModPagespeedCustomFetchHeader header value
+ModPagespeedCustomFetchHeader x-other False
+<Location /mod_pagespeed_log_request_headers.js>
+ SetHandler mod_pagespeed_log_request_headers
+</Location>
+
+ModPagespeedInPlaceResourceOptimization on
+
+ModPagespeedRespectXForwardedProto on
+
+<Location ~ "/mod_pagespeed_test/response_headers.html*">
+ SetHandler mod_pagespeed_response_options_handler
+</Location>
+
+ModPagespeedCompressMetadataCache true
+
+# Make a non-empty subdirectory config to make sure that
+# cache.flush updates get transmitted to nested configurations.
+<Directory "@@APACHE_DOC_ROOT@@/cache_flush/" >
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters inline_css
+ ModPagespeedDisableFilters add_instrumentation
+</Directory>
+
+# Directory config for gathering sample data from instrumented pages for
+# purposes of mobilization.
+<Directory "@@APACHE_DOC_ROOT@@/mobilization/" >
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters mobilize
+ ModPagespeedLogMobilizationSamples true
+</Directory>
+
+# Another VirtualHost can be enabled by default as it does no harm. This can
+# be used for testing alternate configuration settings in system tests without
+# restarting Apache.
+#
+# We use NameVirtualHost so we can have multiple VirtualHost
+# configurations sharing the same port. Note that the port is
+# different depending on whether we are running system tests as root
+# or as a normal user. Note that fetches must be done with
+# http_proxy=SECONDARY_HOST:SECONDARY_PORT.
+Listen localhost:@@APACHE_SECONDARY_PORT@@
+NameVirtualHost localhost:@@APACHE_SECONDARY_PORT@@
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName secondary.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeed on
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_secondary"
+ ModPagespeedCompressMetadataCache false
+
+ ModPagespeedMapProxyDomain secondary.example.com/gstatic_images \
+ http://www.gstatic.com/psa/static
+
+ # This is the same memcached instance as the root configuration,
+ # but with a different file-cache path.
+#MEMCACHED ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+
+ ModPagespeedCacheFlushFilename cache.flush
+ # If you uncomment this, the test will fail, proving we can disable
+ # cache-flush polling.
+ # ModPagespeedCacheFlushPollIntervalSec 0
+
+ # Helps testing whether the configuration of reporting 'unload' time works.
+ ModPagespeedReportUnloadTime on
+
+ # Make a non-empty subdirectory config to make sure that
+ # cache.flush updates get transmitted to nested configurations.
+ <Directory "@@APACHE_DOC_ROOT@@/cache_flush/" >
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters inline_css
+ ModPagespeedDisableFilters add_instrumentation
+ </Directory>
+
+#REWRITE # These lines are only needed for the mod_rewrite test, where
+#REWRITE # we are just trying to prove that we remove mod_rewrite from
+#REWRITE # the request if the URL is going to be handled by mod_pagespeed.
+#REWRITE # In the root install test, this mod_rewrite seems to only take
+#REWRITE # effect in a vhost. I haven't explored why that is because it
+#REWRITE # doesn't affect the point of the test.
+#REWRITE Options +Indexes
+#REWRITE RewriteEngine on
+#REWRITE RewriteRule (.*).jpg.pagespeed.(.*).jpg /broken.jpg
+#REWRITE RewriteRule mod_pagespeed_statistics /broken
+#REWRITE RewriteRule shortcut.html /mod_pagespeed_example/index.html
+</VirtualHost>
+
+# Sets up a logical home-page server on www.example.com
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName www.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedLoadFromFile http://cdn.example.com @@APACHE_DOC_ROOT@@
+ ModPagespeedMapRewriteDomain cdn.example.com origin.example.com
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css,rewrite_images
+</VirtualHost>
+
+# Sets up a logical origin for CDNs to fetch content from, on origin.example.com.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName origin.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedLoadFromFile http://cdn.example.com @@APACHE_DOC_ROOT@@
+ ModPagespeedMapRewriteDomain cdn.example.com origin.example.com
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css,rewrite_images
+</VirtualHost>
+
+# Sets up a logical cdn, which is where we tell browsers to fetch resources from.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName cdn.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedLoadFromFile http://cdn.example.com @@APACHE_DOC_ROOT@@
+ ModPagespeedMapRewriteDomain cdn.example.com origin.example.com
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css,rewrite_images
+</VirtualHost>
+
+# Sets up a virtual host where we can specify forbidden filters without
+# affecting any other hosts.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName forbidden.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedBlockingRewriteKey psatest
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ # Start with all core filters enabled ...
+ ModPagespeedRewriteLevel CoreFilters
+ # ... then forbid these filters ...
+ ModPagespeedForbidFilters remove_quotes,remove_comments,collapse_whitespace
+ ModPagespeedForbidFilters rewrite_css,resize_images
+ # ... and disable but not forbid this one (to ensure we retain its URL).
+ ModPagespeedDisableFilters inline_css
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName unauthorizedresources.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedInlineResourcesWithoutExplicitAuthorization Script,Stylesheet
+ ModPagespeedCssInlineMaxBytes 1000000
+</VirtualHost>
+
+# Sets up a logical home-page server on
+# max-cacheable-content-length.example.com. This server is only used to test
+# ModPagespeedMaxCacheableContentLength, i.e.,
+# max_cacheable_response_content_length.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName max-cacheable-content-length.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedBlockingRewriteKey psatest
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_javascript
+ ModPagespeedMaxCacheableContentLength 85
+</VirtualHost>
+
+# Set the value of the X-Mod-Pagespeed header
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName xheader.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedXHeaderValue "UNSPECIFIED VERSION"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName domain-hyperlinks-on.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ # Don't actually try to rewrite any resources; the ones in
+ # rewrite_domains.html don't actually exist.
+ ModPagespeedRewriteLevel PassThrough
+
+ ModPagespeedDomainRewriteHyperlinks on
+ ModPagespeedMapRewriteDomain http://dst.example.com http://src.example.com
+ ModPagespeedEnableFilters rewrite_domains
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName domain-hyperlinks-off.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ # Don't actually try to rewrite any resources; the ones in
+ # rewrite_domains.html don't actually exist.
+ ModPagespeedRewriteLevel PassThrough
+
+ ModPagespeedDomainRewriteHyperlinks off
+ ModPagespeedMapRewriteDomain http://dst.example.com http://src.example.com
+ ModPagespeedEnableFilters rewrite_domains
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName client-domain-rewrite.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ # Don't actually try to rewrite any resources; the ones in
+ # rewrite_domains.html don't actually exist.
+ ModPagespeedRewriteLevel PassThrough
+
+ ModPagespeedMapRewriteDomain http://client-domain-rewrite.example.com \
+ http://@@APACHE_DOMAIN@@
+ ModPagespeedClientDomainRewrite true
+ ModPagespeedEnableFilters rewrite_domains
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName url-attribute.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ # Don't actually try to rewrite any resources; the ones in
+ # rewrite_domains.html don't actually exist.
+ ModPagespeedRewriteLevel PassThrough
+
+ # This is used for testing dynamically defined url-valued
+ # attributes
+ ModPagespeedUrlValuedAttribute span src Hyperlink
+ ModPagespeedUrlValuedAttribute hr imgsrc Image
+ ModPagespeedDomainRewriteHyperlinks on
+ ModPagespeedMapRewriteDomain http://dst.example.com http://src.example.com
+ ModPagespeedEnableFilters rewrite_domains
+ ModPagespeedUrlValuedAttribute custom a Image
+ ModPagespeedUrlValuedAttribute custom b otherResource
+ ModPagespeedUrlValuedAttribute custom c hyperlink
+ ModPagespeedUrlValuedAttribute img alt-src Image
+ ModPagespeedUrlValuedAttribute video alt-a Image
+ ModPagespeedUrlValuedAttribute video alt-b Image
+ ModPagespeedUrlValuedAttribute video alt-b Image
+
+ ModPagespeedUrlValuedAttribute link data-stylesheet Stylesheet
+ ModPagespeedUrlValuedAttribute span data-stylesheet-a Stylesheet
+ ModPagespeedUrlValuedAttribute span data-stylesheet-b Stylesheet
+ ModPagespeedUrlValuedAttribute span data-stylesheet-c Stylesheet
+
+ # Also test that we can redefine spec-defined attributes.
+ ModPagespeedUrlValuedAttribute blockquote cite Image
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName absolute-urls.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ # This is used for testing that we don't load resources from
+ # absolute urls during resource reconstruction unless they're for
+ # our own hostname. While ModPagespeedDomain should no longer
+ # have an effect on whether we load absolute urls, we need to
+ # include it to be sure we're failing because of the code under
+ # test and not because the domain lawyer is rejecting it.
+ ModPagespeedDomain http://example.com
+</VirtualHost>
+
+<IfModule !proxy_module>
+ LoadModule proxy_module @@APACHE_MODULES@@/mod_proxy.so
+ LoadModule proxy_http_module @@APACHE_MODULES@@/mod_proxy_http.so
+</IfModule>
+
+# Proxy @@PAGESPEED-TEST-HOST@@ for testing Issue 582.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName @@PAGESPEED-TEST-HOST@@
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeed off
+ ServerAlias @@PAGESPEED-TEST-HOST@@
+
+ ProxyPass / http://@@PAGESPEED-TEST-HOST@@/
+ ProxyPassReverse / http://@@PAGESPEED-TEST-HOST@@/
+</VirtualHost>
+
+# The following three VHosts are created for Issue 599.
+# Create three sites for a MapProxyDomain experiment. The sites are:
+# cdn: forwards requests to proxy.
+# proxy: runs MPS, and optimizes data from origin.
+# origin: a normal website that is potentially external to proxy.
+
+# The CDN in our example which simply forwards requests to the proxy.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName cdn.pm.example.com
+
+ # Point the docroot somewhere useless so that we know we're not fetching
+ # from the CDN's filesystem. Note that in particular we are not attempting to
+ # run CGI scripts.
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/cgi"
+
+ # Tell mod_proxy that we need to use a proxy to reach our VirtualHost servers.
+ ProxyRemote * http://localhost:@@APACHE_SECONDARY_PORT@@
+ ProxyPass /external/ http://proxy.pm.example.com/external/
+ ProxyPassReverse /external/ http://proxy.pm.example.com/external/
+
+ # Unplugged so that it passes the .pagespeed. requests through to the proxy.
+ ModPagespeed unplugged
+</VirtualHost>
+
+# The proxy that runs MPS and can proxy data from origin. When the CDN
+# requests proxied data from the proxy the proxy knows to fetch it from the
+# origin server.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName proxy.pm.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/"
+ # The usual cache location so that it gets cleared for on_cache_flush tests.
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ # We have to fetch through our localhost proxy to get to our other vhosts.
+ ModPagespeedFetchProxy "localhost:@@APACHE_SECONDARY_PORT@@"
+
+ # Origin resources should be optimized and hosted on proxy/external but
+ # rewritten to cdn/external.
+ ModPagespeedDomain proxy.pm.example.com
+ ModPagespeedMapProxyDomain proxy.pm.example.com/external \
+ origin.pm.example.com \
+ cdn.pm.example.com/external
+ ModPagespeedRewriteLevel CoreFilters
+ ModPagespeedRewriteDeadlinePerFlushMs -1
+</VirtualHost>
+
+# The origin that serves the images to be proxied (Puzzle.jpg)
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName origin.pm.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/images"
+ ModPagespeed unplugged
+</VirtualHost>
+
+# For testing setting options by cookies.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName options-by-cookies-enabled.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_optionsbycookieson"
+ ModPagespeedAllowOptionsToBeSetByCookies true
+ ModPagespeedStickyQueryParameters sticky_secret
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters collapse_whitespace
+ ModPagespeedDisableFilters remove_comments,add_instrumentation
+</VirtualHost>
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName options-by-cookies-disabled.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_optionsbycookiesoff"
+ ModPagespeedAllowOptionsToBeSetByCookies false
+ ModPagespeedDisableFilters add_instrumentation
+</VirtualHost>
+
+# For testing request option overriding.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName request-option-override.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRequestOptionOverride abc
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters collapse_whitespace
+ ModPagespeedDisableFilters remove_comments,add_instrumentation
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT1@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-partially-invalid.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT2@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-invalid.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT3@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-out-of-scope.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT4@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-failed-fetch.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT5@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-slow-fetch.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT6@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-with-htaccess.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName remote-config-experiment.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRemoteConfigurationUrl "http://127.0.0.1:@@RCPORT7@@/remote.cfg"
+ ModPagespeedRemoteConfigurationTimeoutMs 1500
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName uses-sendfile.example.com
+ ModPagespeedBlockingRewriteKey psatest
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedEnableFilters rewrite_javascript
+ Header always set X-Sendfile blablabla
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName uses-xaccelredirect.example.com
+ ModPagespeedBlockingRewriteKey psatest
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedEnableFilters rewrite_javascript
+ Header always set X-Accel-Redirect blablabla
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName doesnt-sendfile.example.com
+ ModPagespeedBlockingRewriteKey psatest
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedEnableFilters rewrite_javascript
+</VirtualHost>
+
+# For testing ipro + load from file + unknown extensions.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName lff-ipro.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedLoadFromFile \
+ "http://lff-ipro.example.com/mod_pagespeed_test/lff_ipro" \
+ "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/lff_ipro"
+</VirtualHost>
+
+# For testing signed urls.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName signed-urls.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedUrlSigningKey helloworld
+ ModPagespeedRewriteLevel PassThrough
+</VirtualHost>
+
+# For testing signed urls, ignoring signature validity.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName signed-urls-transition.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedUrlSigningKey helloworld
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedAcceptInvalidSignatures true
+</VirtualHost>
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName unsigned-urls-transition.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ # This server will not sign URLs, but AcceptInvalidSignatures is on.
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedAcceptInvalidSignatures true
+</VirtualHost>
+
+# For testing handling of redirected requests.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName redirect.example.com
+ Redirect /redirect/ /
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters add_instrumentation,collapse_whitespace
+</VirtualHost>
+
+# Proxy + IPRO a gzip'd file for testing Issue 896.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ipro-proxy.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_ipro_proxy"
+
+ ModPagespeed on
+ ModPagespeedInPlaceResourceOptimization on
+ ModPagespeedEnableFilters rewrite_domains
+
+ ProxyPass / http://localhost:@@APACHE_TERTIARY_PORT@@/mod_pagespeed_test/ipro/mod_deflate/
+ ProxyPassReverse / http://localhost:@@APACHE_TERTIARY_PORT@@/mod_pagespeed_test/ipro/mod_deflate/
+ AddOutputFilterByType DEFLATE text/css
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName compressedcache.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeed on
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css
+ ModPagespeedHttpCacheCompressionLevel 9
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName uncompressedcache.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeed on
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css
+ ModPagespeedHttpCacheCompressionLevel 0
+ AddOutputFilterByType DEFLATE text/css
+ DeflateCompressionLevel 1
+</VirtualHost>
+
+# Backend for ipro-proxy.example.com
+Listen 127.0.0.1:@@APACHE_TERTIARY_PORT@@
+NameVirtualHost 127.0.0.1:@@APACHE_TERTIARY_PORT@@
+<VirtualHost 127.0.0.1:@@APACHE_TERTIARY_PORT@@>
+ ServerName ipro-proxy-backend.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeed unplugged
+ AddOutputFilterByType DEFLATE text/css
+</VirtualHost>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/instant/wait/" >
+ ModPagespeedInPlaceWaitForOptimized on
+
+ # Make the deadline long here for valgrind tests. We could
+ # conditionalize this.
+ ModPagespeedInPlaceRewriteDeadlineMs 20000
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/wait/" >
+ # TODO(jmarantz): ModPagespeedInPlaceWaitForOptimized should be superfluous,
+ # or made equivalent to ModPagespeedInPlaceRewriteDeadlineMs -1, which waits
+ # forever. Otherwise ModPagespeedInPlaceRewriteDeadlineMs should just have
+ # the specified deadline.
+ # # See https://github.com/pagespeed/mod_pagespeed/issues/1171 for more
+ # detailed discussion.
+ ModPagespeedInPlaceWaitForOptimized on
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/wait/long/" >
+ # Make the deadline long here for valgrind tests. We could
+ # conditionalize this.
+ ModPagespeedInPlaceRewriteDeadlineMs 10000
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/wait/short/" >
+ ModPagespeedEnableFilters in_place_optimize_for_browser
+
+ # Make the deadline short here as we expect to always miss it
+ # in tests.
+ ModPagespeedInPlaceRewriteDeadlineMs 1
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/ipro/instant/deadline/" >
+ ModPagespeedInPlaceRewriteDeadlineMs -1
+</Directory>
+
+# Test to make sure that user-authenticated resources do not get cached and
+# optimized.
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/auth" >
+ AllowOverride AuthConfig
+ AuthType Basic
+ AuthName "Restricted Files"
+ AuthBasicProvider file
+ AuthUserFile @@APACHE_DOC_ROOT@@/mod_pagespeed_test/auth/passwd.conf
+ Require user user1
+</Directory>
+
+# Enable per-vhost statistics so that tests can be somewhat independent.
+ModPagespeedUsePerVHostStatistics on
+<Location /mod_pagespeed_global_statistics>
+ Order allow,deny
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler mod_pagespeed_global_statistics
+</Location>
+
+# Declare legacy handlers. The modern one is declared in
+# pagespeed.conf.template, which handles all these functions, but
+# these are declared so we can test that the handlers still work so
+# that people upgrading to a new release don't lose funcitonality.
+<Location /mod_pagespeed_statistics>
+ Order allow,deny
+ # You may insert other "Allow from" lines to add hosts you want to
+ # allow to look at generated statistics. Another possibility is
+ # to comment out the "Order" and "Allow" options from the config
+ # file, to allow any client that can reach your server to examine
+ # statistics. This might be appropriate in an experimental setup or
+ # if the Apache server is protected by a reverse proxy that will
+ # filter URLs in some fashion.
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler mod_pagespeed_statistics
+</Location>
+<Location /pagespeed_console>
+ Order allow,deny
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler pagespeed_console
+</Location>
+<Location /mod_pagespeed_message>
+ Order allow,deny
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler mod_pagespeed_message
+</Location>
+
+# The handler for "pagespeed_admin" is fixed in name, but you can put
+# it on any URL path, and everything should work.
+<Location /alt/admin/path>
+ Order allow,deny
+ Allow from localhost
+ Allow from 127.0.0.1
+ SetHandler pagespeed_admin
+</Location>
+
+# Use a separate vhost so that statistics are separate.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ipro.example.com
+
+ ModPagespeedInPlaceResourceOptimization on
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+# Setup a vhost with the critical image beacon and lazyload filter enabled to
+# make sure that critical images are not lazyloaded.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName imagebeacon.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters lazyload_images
+ ModPagespeedCriticalImagesBeaconEnabled true
+
+ # add_instrumentation tests beacon handling with no handler specified, while
+ # here we test it with a handler (that should do nothing).
+ <Location /mod_pagespeed_beacon>
+ SetHandler mod_pagespeed_beacon
+ </Location>
+</VirtualHost>
+
+# The downstreamcacherebeacon vhost setup in the below section is used in
+# apache system tests to make sure that downstream caching and rebeaconing
+# interact correctly.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName downstreamcacherebeacon.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedCriticalImagesBeaconEnabled true
+
+ # Enable the downstream caching feature and specify a rebeaconing key.
+ ModPagespeedDownstreamCachePurgeLocationPrefix "http://localhost:8020"
+ ModPagespeedDownstreamCachePurgeMethod "PURGE"
+ ModPagespeedDownstreamCacheRebeaconingKey random_rebeaconing_key
+
+ Header set Cache-Control "private, max-age=3000"
+</VirtualHost>
+
+# The cachable_rewritten_html path is configured for testing from within
+# apache_downstream_caching_test.sh since that cannot work with a Vhost for
+# the mod_pagespeed part of the test.
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/cachable_rewritten_html" >
+ ModPagespeedRewriteDeadlinePerFlushMs 1
+
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedCriticalImagesBeaconEnabled true
+ ModPagespeedEnableFilters collapse_whitespace,extend_cache,recompress_images
+
+ # Assume that Varnish (or any other downstream cache) is running at
+ # localhost:8020.
+ # Enable the downstream caching feature and specify a rebeaconing key.
+ ModPagespeedDownstreamCachePurgeLocationPrefix "http://localhost:8020"
+ ModPagespeedDownstreamCachePurgeMethod "PURGE"
+ ModPagespeedDownstreamCacheRebeaconingKey random_rebeaconing_key
+</Directory>
+
+<Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/public" >
+ Header set Cache-control "public,max-age=600"
+ ModPagespeedPreserveUrlRelativity off
+</Directory>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName downstreamcacheresource.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedDownstreamCachePurgeLocationPrefix \
+ "http://localhost:@@APACHE_SECONDARY_PORT@@/purge"
+</VirtualHost>
+
+# Set up a reverse proxy (rproxy.) and origin (origin.) as vhosts for
+# showing that we can configure PageSpeed via response headers.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName rproxy.rmcomments.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedDisableFilters add_instrumentation,remove_comments
+ # Note that we don't enable remove_comments here; that setting comes from
+ # the response headers from origin.rmcomments.example.com
+ ProxyRemote * http://localhost:@@APACHE_SECONDARY_PORT@@
+ ProxyPass / http://origin.rmcomments.example.com/
+ ProxyPassReverse / http://origin.rmcomments.example.com/
+</VirtualHost>
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName origin.rmcomments.example.com
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeed unplugged
+ Header add PageSpeedFilters remove_comments
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName renderedimagebeacon.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters resize_rendered_image_dimensions
+ ModPagespeedCriticalImagesBeaconEnabled true
+
+ # add_instrumentation tests beacon handling with no handler specified, while
+ # here we test it with a handler (that should do nothing).
+ <Location /mod_pagespeed_beacon>
+ SetHandler mod_pagespeed_beacon
+ </Location>
+</VirtualHost>
+
+# Test host for explicit shared memory cache.
+ModPagespeedCreateSharedMemoryMetadataCache "@@MOD_PAGESPEED_CACHE@@_with_shm" 8192
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName shmcache.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_with_shm"
+
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+
+# Test hosts to cover all possible cache configurations. L1 will be filecache
+# or memcache depending on the setting of MEMCACHED_TEST. These four hosts are
+# for the four settings for the L2 cache.
+
+# 1. L2_d=LRU, L2_m=LRU
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName lrud-lrum.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_lrud_lrum"
+#MEMCACHED ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+
+ ModPagespeedLRUCacheKbPerProcess 1024
+ ModPagespeedLRUCacheByteLimit 2000
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+ModPagespeedCreateSharedMemoryMetadataCache "@@MOD_PAGESPEED_CACHE@@_lrud_shmm" 8192
+# 2. L2_d=LRU, L2_m=SHM
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName lrud-shmm.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_lrud_shmm"
+#MEMCACHED ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+
+ ModPagespeedLRUCacheKbPerProcess 1024
+ ModPagespeedLRUCacheByteLimit 2000
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+ModPagespeedCreateSharedMemoryMetadataCache "@@MOD_PAGESPEED_CACHE@@_noned_shmm" 8192
+# 3. L2_d=none, L2_m=SHM
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName noned-shmm.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_noned_shmm"
+#MEMCACHED ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+# 4. L2_d=none, L2_m=none
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName noned-nonem.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_noned_nonem"
+#MEMCACHED ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+# Test host for issue 809: losing extra headers with cache-control set early.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName issue809.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ header add Issue809 Issue809Value
+ header add Cache-Control max-age=60 early
+
+ # The tests using this vhost scan for css-combine URLs and will not find
+ # them if the combined css file is then css-minified. So we explicitly
+ # disable these and other distracting filters for easy output scanning.
+ #
+ # It is less effective to rely on 'level passthrough' because we might
+ # still inherit explicitly-enabled filters from the root configuration in
+ # some passes through system_test.sh.
+ ModPagespeedDisableFilters rewrite_css,add_instrumentation,flatten_css_imports
+ ModPagespeedEnableFilters rewrite_images,combine_css
+ ModPagespeedCriticalImagesBeaconEnabled false
+ ModPagespeedModifyCachingHeaders on
+</VirtualHost>
+
+# Build a configuration hierarchy where at the root we have turned on
+# OptimizeForBandwidth, and in various subdirectories we override settings
+# to make them more aggressive.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName optimizeforbandwidth.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedRewriteLevel OptimizeForBandwidth
+ ModPagespeedDisableFilters add_instrumentation
+
+ ModPagespeedBlockingRewriteKey psatest
+
+ <Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/optimize_for_bandwidth/inline_css" >
+ ModPagespeedEnableFilters inline_css
+ </Directory>
+ <Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/optimize_for_bandwidth/css_urls" >
+ ModPagespeedCssPreserveURLs off
+ </Directory>
+ <Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/optimize_for_bandwidth/image_urls" >
+ ModPagespeedImagePreserveURLs off
+ </Directory>
+ <Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/optimize_for_bandwidth/core_filters" >
+ ModPagespeedRewriteLevel CoreFilters
+ </Directory>
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName respectvary.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedRespectVary on
+ Header append Vary User-Agent
+</VirtualHost>
+
+#PER_VHOST_STATS ModPagespeedUsePerVHostStatistics on
+#PER_VHOST_STATS # Make sure we behave OK with this on, too.
+#PER_VHOST_STATS ModPagespeedFetchWithGzip on
+#NO_PER_VHOST_STATS # Overrides "ModPagespeedUsePerVHostStatistics on" above.
+#NO_PER_VHOST_STATS ModPagespeedUsePerVHostStatistics off
+
+# TODO(sligocki): Get rid of this once this is integrated into console.
+<Location /mod_pagespeed_temp_statistics_graphs>
+ SetHandler mod_pagespeed_temp_statistics_graphs
+</Location>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName purge.example.com
+
+ # Test purging individual URLs without flushing the entire metadata cache.
+ ModPagespeedEnableCachePurge on
+
+ ModPagespeedPurgeMethod PURGE
+ DocumentRoot "@@APACHE_DOC_ROOT@@/purge"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_purge"
+ ModPagespeedDisableFilters add_instrumentation
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css
+</VirtualHost>
+
+# For testing ModPagespeed off, but with a directory-scope turning it
+# back on.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName psoff-dir-on.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@/purge"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_mpsoff_dir_on"
+ ModPagespeed off
+ ModPagespeedEnableCachePurge on
+ ModPagespeedPurgeMethod PURGE
+ ModPagespeedDisableFilters add_instrumentation
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css
+ <Directory "@@APACHE_DOC_ROOT@@/purge">
+ ModPagespeed on
+ </Directory>
+</VirtualHost>
+
+# For testing ModPagespeed off, but with an htaccess turning it
+# back on.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName psoff-htaccess-on.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@/purge"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_mpsoff_htaccess_on"
+ ModPagespeed off
+ ModPagespeedEnableCachePurge on
+ ModPagespeedPurgeMethod PURGE
+ ModPagespeedDisableFilters add_instrumentation
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_css
+ <Directory "@@APACHE_DOC_ROOT@@/purge">
+ AllowOverride All
+ </Directory>
+</VirtualHost>
+
+ModPagespeedMessagesDomains Allow messages-allowed.example.com
+ModPagespeedMessagesDomains Allow cleared-inherited.example.com
+ModPagespeedMessagesDomains Allow cleared-inherited-reallowed.example.com
+ModPagespeedMessagesDomains Allow more-messages-allowed.example.com
+ModPagespeedMessagesDomains Allow anything-*-wildcard.example.com
+ModPagespeedMessagesDomains Allow localhost
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName messages-allowed.example.com
+ ServerAlias messages-not-allowed.example.com
+ ServerAlias more-messages-allowed.example.com
+ ServerAlias anything-a-wildcard.example.com
+ ServerAlias anything-b-wildcard.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName messages-still-not-allowed.example.com
+ ServerAlias but-this-message-allowed.example.com
+ ServerAlias and-this-one.example.com
+ ModPagespeedMessagesDomains Allow but-this-message-allowed.example.com
+ ModPagespeedMessagesDomains Allow and-this-one.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName cleared-inherited.example.com
+ ServerAlias cleared-inherited-reallowed.example.com
+ ServerAlias messages-allowed-at-vhost.example.com
+ ServerAlias messages-not-allowed-at-vhost.example.com
+ ServerAlias anything-c-wildcard.example.com
+ ModPagespeedMessagesDomains Disallow *
+ ModPagespeedMessagesDomains Allow cleared-inherited-reallowed.example.com
+ ModPagespeedMessagesDomains Allow messages-allowed-at-vhost.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName cleared-inherited-unlisted.example.com
+
+ ModPagespeedMessagesDomains Allow *
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName nothing-allowed.example.com
+ ModPagespeedMessagesDomains Disallow *
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName nothing-explicitly-allowed.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName everything-explicitly-allowed.example.com
+ ServerAlias everything-explicitly-allowed-but-aliased.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedStatisticsDomains Allow everything-explicitly-allowed.example.com
+ ModPagespeedGlobalStatisticsDomains \
+ Allow everything-explicitly-allowed.example.com
+ ModPagespeedMessagesDomains Allow everything-explicitly-allowed.example.com
+ ModPagespeedConsoleDomains Allow everything-explicitly-allowed.example.com
+ ModPagespeedAdminDomains Allow everything-explicitly-allowed.example.com
+ ModPagespeedGlobalAdminDomains Allow everything-explicitly-allowed.example.com
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName debug-filters.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters debug
+</VirtualHost>
+
+#STATS_LOGGING ModPagespeedStatistics on
+#STATS_LOGGING ModPagespeedStatisticsLogging on
+#STATS_LOGGING ModPagespeedLogDir "@@MOD_PAGESPEED_LOG@@"
+#STATS_LOGGING ModPagespeedStatisticsLoggingIntervalMs 10
+
+#SPELING # Enable mod_speling to ensure that we don't regress Issue 194
+#SPELING <IfModule !speling_module>
+#SPELING LoadModule speling_module @@APACHE_MODULES@@/mod_speling.so
+#SPELING </IfModule>
+#SPELING CheckSpelling on
+
+#GZIP ModPagespeedFetchWithGzip on
+#GZIP SetOutputFilter DEFLATE
+
+#STRESS # These lines are only needed for the stress test.
+#STRESS <Directory "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/cgi" >
+#STRESS Options +ExecCGI
+#STRESS </Directory>
+#STRESS AddHandler cgi-script .cgi
+#STRESS <IfVersion >= 2.4.2>
+#STRESS <IfModule !slotmem_shm_module>
+#STRESS LoadModule slotmem_shm_module @@APACHE_MODULES@@/mod_slotmem_shm.so
+#STRESS </IfModule>
+#STRESS </IfVersion>
+
+# Globally set the message-buffer size during load-tests.
+#
+# TODO(jmarantz): add a test-path where we set this in a VirtualHost and
+# make sure the right error message comes out.
+#LOADTEST # Don't want global slurp inheriting into :8080
+#LOADTEST ModPagespeedInheritVHostConfig off
+#LOADTEST ModPagespeedMessageBufferSize 100000
+#LOADTEST
+#LOADTEST # We want backtraces to be dumped during the load test, as
+#LOADTEST # one of its purposes is to discover crash bugs.
+#LOADTEST ModPagespeedInstallCrashHandler on
+#LOADTEST
+#LOADTEST # These lines are used for large-scale code coverage testing.
+#LOADTEST # We use 2 servers for it, one doing rewriting and fetching
+#LOADTEST # from the other one, which plays back slurps
+#LOADTEST Listen 8081
+#LOADTEST <VirtualHost *:8080>
+#LOADTEST ModPagespeed on
+#LOADTEST # Proxy using SERF
+#LOADTEST ModPagespeedTestProxy on
+#LOADTEST ModPagespeedFetchProxy "127.0.0.1:8081"
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlaceResourceOptimization on
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlaceWaitForOptimized on
+#IPRO_PRESERVE_LOADTEST ModPagespeedCssPreserveURLs on
+#IPRO_PRESERVE_LOADTEST ModPagespeedImagePreserveURLs on
+#IPRO_PRESERVE_LOADTEST ModPagespeedJsPreserveURLS on
+#LOADTEST
+#MEMCACHE_LOADTEST ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+#PURGING_LOADTEST ModPagespeedEnableCachePurge on
+#PURGING_LOADTEST ModPagespeedCompressMetadataCache on
+#LOADTEST ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_load_test"
+#LOADTEST ModPagespeedCompressMetadataCache true
+#LOADTEST ModPagespeedRewriteLevel AllFilters
+#IPRO_PRESERVE_LOADTEST ModPagespeedRewriteLevel OptimizeForBandwidth
+#IPRO_PRESERVE_LOADTEST ModPagespeedDisableFilters add_instrumentation
+#IPRO_PRESERVE_LOADTEST ModPagespeedDisableFilters local_storage_cache,debug
+#IPRO_PRESERVE_LOADTEST ModPagespeedDisableFilters lazyload_images
+#IPRO_PRESERVE_LOADTEST ModPagespeedDisableFilters add_base_tag
+#IPRO_PRESERVE_LOADTEST ModPagespeedDisableFilters dedup_inlined_images
+#IPRO_PRESERVE_LOADTEST ModPagespeedDisableFilters convert_jpeg_to_webp
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlaceRewriteDeadlineMs 1000
+#IPRO_PRESERVE_LOADTEST
+#IPRO_PRESERVE_LOADTEST # We turn preemptive rewriting off for bandwidth
+#IPRO_PRESERVE_LOADTEST # tests driven by recursive wget, where the image
+#IPRO_PRESERVE_LOADTEST # requests will come too quickly to allow any
+#IPRO_PRESERVE_LOADTEST # image rewrites to finish. Leaving preemptive
+#IPRO_PRESERVE_LOADTEST # rewrites enabled causes redundant image rewrites
+#IPRO_PRESERVE_LOADTEST # to occur.
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlacePreemptiveRewriteImages off
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlacePreemptiveRewriteCss off
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlacePreemptiveRewriteCssImages off
+#IPRO_PRESERVE_LOADTEST ModPagespeedInPlacePreemptiveRewriteJavascript off
+#LOADTEST
+#LOADTEST # flush_subsresources doesn't work in MPS but disables
+#LOADTEST # combine_css and combine_javascript, so we don't want to
+#LOADTEST # load-test it.
+#LOADTEST ModPagespeedDisableFilters flush_subresources
+#LOADTEST ModPagespeedEnableFilters elide_attributes
+#DOMAIN_AUTH_LOADTEST ModPagespeedDomain *
+#IUR_LOADTEST ModPagespeedInlineResourcesWithoutExplicitAuthorization Script,Stylesheet
+#LOADTEST ModPagespeedFileCacheSizeKb 1024000
+#LOADTEST ModPagespeedFileCacheCleanIntervalMs 120000
+#LOADTEST ModPagespeedLRUCacheKbPerProcess 1024
+#LOADTEST ModPagespeedLRUCacheByteLimit 16384
+#LOADTEST ModPagespeedCssFlattenMaxBytes 102400
+#LOADTEST ModPagespeedCssInlineMaxBytes 2048
+#LOADTEST ModPagespeedCssImageInlineMaxBytes 0
+#LOADTEST ModPagespeedImageInlineMaxBytes 3072
+#LOADTEST ModPagespeedMaxInlinedPreviewImagesIndex -1
+#LOADTEST ModPagespeedMinImageSizeLowResolutionBytes 3072
+#LOADTEST ModPagespeedJsInlineMaxBytes 2048
+#LOADTEST ModPagespeedCssOutlineMinBytes 3000
+#LOADTEST ModPagespeedJsOutlineMinBytes 3000
+#LOADTEST ModPagespeedImageMaxRewritesAtOnce 8
+#LOADTEST ModPagespeedSlurpFlushLimit 8192
+#LOADTEST ModPagespeedJpegRecompressionQuality -1
+#LOADTEST ModPagespeedImageLimitOptimizedPercent 100
+#LOADTEST ModPagespeedImageLimitResizeAreaPercent 100
+#LOADTEST
+#LOADTEST # Enabling beacons is not interesting in our current
+#LOADTEST # load-test framework as the load-tester doesn't run JS
+#LOADTEST # and thus can't send back any beacons. This means we
+#LOADTEST # won't be able to test inline images, since we don't know
+#LOADTEST # which images are critical. Better to turn it off.
+#LOADTEST # TODO(jud): Load-test the critical images beacon in a
+#LOADTEST # context that sends back beacons.
+#LOADTEST ModPagespeedCriticalImagesBeaconEnabled false
+#LOADTEST
+#LOADTEST <Location /mod_pagespeed_statistics>
+#LOADTEST Order allow,deny
+#LOADTEST Allow from localhost
+#LOADTEST SetHandler mod_pagespeed_statistics
+#LOADTEST </Location>
+#LOADTEST <Location /mod_pagespeed_message>
+#LOADTEST Allow from localhost
+#LOADTEST Allow from 127.0.0.1
+#LOADTEST SetHandler mod_pagespeed_message
+#LOADTEST </Location>
+#LOADTEST </VirtualHost>
+#LOADTEST
+#LOADTEST <VirtualHost *:8081>
+#LOADTEST ModPagespeed on
+#LOADTEST
+#LOADTEST ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@-alt"
+#LOADTEST ModPagespeedRewriteLevel PassThrough
+#LOADTEST ModPagespeedInPlaceResourceOptimization off
+#LOADTEST
+#LOADTEST # ModPagespeedSlurpDirectory ...
+#LOADTEST # ModPagespeedSlurpReadOnly on
+#LOADTEST
+#LOADTEST <Location /mod_pagespeed_message>
+#LOADTEST Allow from localhost
+#LOADTEST Allow from 127.0.0.1
+#LOADTEST SetHandler mod_pagespeed_message
+#LOADTEST </Location>
+#LOADTEST </VirtualHost>
+
+
+#PROXY # This is used for ProxyPass testing.
+#PROXY # See: http://code.google.com/p/modpagespeed/issues/detail?id=74
+#PROXY # We use 2 servers for it, one doing rewriting and fetching
+#PROXY # from the other one which does not have mod_pagespeed enabled.
+#PROXY Listen 8081
+#PROXY <VirtualHost *:8080>
+#PROXY # Host at 8080 should have no relevant content.
+#PROXY DocumentRoot /tmp/
+#PROXY
+#PROXY # Turn these declarations back on during testing if you are
+#PROXY # having trouble distinguishing which server is saying what.
+#PROXY #ErrorLog "@@MOD_PAGESPEED_CACHE@@logs/error_log"
+#PROXY #CustomLog "@@MOD_PAGESPEED_CACHE@@logs/access_log" common
+#PROXY
+#PROXY ModPagespeed on
+#PROXY
+#PROXY ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+#PROXY
+#PROXY ModPagespeedRewriteLevel PassThrough
+#PROXY ModPagespeedEnableFilters extend_cache
+#PROXY ModPagespeedDomain *
+#PROXY
+#PROXY # Test depends upon URLs being absolutified.
+#PROXY ModPagespeedPreserveUrlRelativity off
+#PROXY
+#PROXY # Proxy through to 8081.
+#PROXY ProxyPass / http://localhost:8081/
+#PROXY </VirtualHost>
+#PROXY
+#PROXY <VirtualHost *:8081>
+#PROXY # Host at 8081 sees into mod_pagespeed_examples directory.
+#PROXY DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/"
+#PROXY
+#PROXY #ErrorLog "@@MOD_PAGESPEED_CACHE@@-alt/logs/error_log"
+#PROXY #CustomLog "@@MOD_PAGESPEED_CACHE@@-alt/logs/access_log" common
+#PROXY
+#PROXY ModPagespeed off
+#PROXY ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@-alt"
+#PROXY ModPagespeedRewriteLevel PassThrough
+#PROXY </VirtualHost>
+
+#SLURP # This is used for Slurp testing. The initial slurp testing
+#SLURP # uses a slurp test dir pre-populated from source control,
+#SLURP # via a read-only slurp setup on port 8080. We also test slurp
+#SLURP # writing via an origin server on port 8081, and a slurp server
+#SLURP # on 8082 with slurping read-only off.
+#SLURP Listen 8081
+#SLURP Listen 8082
+#SLURP <VirtualHost *:8080>
+#SLURP ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+#SLURP ModPagespeedSlurpDirectory "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/slurp"
+#SLURP ModPagespeedSlurpReadOnly on
+#SLURP </VirtualHost>
+#SLURP <VirtualHost *:8081>
+#SLURP # Host at 8081 sees into mod_pagespeed_examples directory.
+#SLURP DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/"
+#SLURP
+#SLURP ModPagespeed off
+#SLURP ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@-alt"
+#SLURP ModPagespeedRewriteLevel PassThrough
+#SLURP </VirtualHost>
+#SLURP <VirtualHost *:8082>
+#SLURP ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@-alt2"
+#SLURP ModPagespeedSlurpDirectory @@TMP_SLURP_DIR@@
+#SLURP ModPagespeedSlurpReadOnly off
+#SLURP ModPagespeedDomain *
+#SLURP ModPagespeedMaxHtmlParseBytes 1000000
+#SLURP ModPagespeedSlurpFlushLimit 10000
+#SLURP </VirtualHost>
+
+#SHARED_MEM_LOCKS ModPagespeedSharedMemoryLocks on
+
+#HTTPS # This is used for testing https requests.
+#HTTPS <IfVersion >= 2.4>
+#HTTPS <IfModule !socache_shmcb_module>
+#HTTPS LoadModule socache_shmcb_module @@APACHE_MODULES@@/mod_socache_shmcb.so
+#HTTPS </IfModule>
+#HTTPS <IfModule !slotmem_shm_module>
+#HTTPS LoadModule slotmem_shm_module @@APACHE_MODULES@@/mod_slotmem_shm.so
+#HTTPS </IfModule>
+#HTTPS </IfVersion>
+
+#HTTPS Include conf/extra/httpd-ssl.conf
+#HTTPS <IfModule !ssl_module>
+#HTTPS LoadModule ssl_module @@APACHE_MODULES@@/mod_ssl.so
+#HTTPS </IfModule>
+#HTTPS SSLRandomSeed startup builtin
+#HTTPS SSLRandomSeed connect builtin
+#HTTPS ModPagespeedMapOriginDomain http://@@APACHE_DOMAIN@@ https://@@APACHE_HTTPS_DOMAIN@@
+
+#EXPERIMENT_GA # This is used for testing the experiment framework.
+#EXPERIMENT_GA ModPagespeedRunExperiment on
+#EXPERIMENT_GA ModPagespeedAnalyticsID "123-45-6734"
+#EXPERIMENT_GA ModPagespeedUseAnalyticsJs false
+#EXPERIMENT_GA ModPagespeedExperimentVariable 2
+#EXPERIMENT_GA ModPagespeedExperimentSpec "id=7;enable=recompress_images,rewrite_javascript;disable=convert_jpeg_to_progressive;percent=50;options=AvoidRenamingIntrospectiveJavascript=off,JsInlineMaxBytes=4"
+#EXPERIMENT_GA ModPagespeedExperimentSpec "id=2;enable=recompress_images,rewrite_javascript;percent=50;options=AvoidRenamingIntrospectiveJavascript=on"
+#EXPERIMENT_GA ModPagespeedExperimentSpec "id=3;default;percent=0"
+
+#EXPERIMENT_NO_GA # This is used for testing the experiment framework still
+#EXPERIMENT_NO_GA # works when no analytics ID is specified. It should assign
+#EXPERIMENT_NO_GA # users to experiments and use appropriate experimental
+#EXPERIMENT_NO_GA # options but not report back to Google Analytics. The
+#EXPERIMENT_NO_GA # instrumentation beacon, however, will still contain the
+#EXPERIMENT_NO_GA # experiment id.
+#EXPERIMENT_NO_GA ModPagespeedRunExperiment on
+#EXPERIMENT_NO_GA ModPagespeedExperimentVariable 2
+#EXPERIMENT_NO_GA ModPagespeedExperimentSpec "id=7;enable=recompress_images,rewrite_javascript;disable=convert_jpeg_to_progressive;percent=50;options=AvoidRenamingIntrospectiveJavascript=off,JsInlineMaxBytes=4;invalid=this-should-print-a-warning"
+#EXPERIMENT_NO_GA ModPagespeedExperimentSpec "id=2;enable=recompress_images,rewrite_javascript;percent=50;options=AvoidRenamingIntrospectiveJavascript=on"
+#EXPERIMENT_NO_GA ModPagespeedExperimentSpec "id=3;default;percent=0"
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName experiment.devicematch.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedRewriteLevel PassThrough
+
+ ModPagespeedRunExperiment on
+ ModPagespeedUseAnalyticsJs false
+ ModPagespeedExperimentSpec "id=1;percent=100;matches_device_type=mobile;enable=recompress_images"
+</VirtualHost>
+
+# Support for embedded configurations, where image flags in the
+# VirtualHost serving HTML is not the same as the one serving resources,
+# and thus we must embed the image flags in the rewritten image URLs.
+#
+# Note that we test with two distinct caches.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName embed-config-html.example.org
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedAddOptionsToUrls on
+ ModPagespeedJpegRecompressionQuality 73
+ ModPagespeedDisableFilters inline_css,extend_cache,inline_javascript
+ ModPagespeedDomain embed-config-resources.example.com
+
+ # Share a cache keyspace with embed-config-resources.example.com.
+ ModPagespeedCacheFragment "embed-config"
+
+ ModPagespeedLoadFromFile "http://embed-config-resources.example.com/" \
+ "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName embed-config-resources.example.com
+
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_example"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@"
+
+ ModPagespeedAddOptionsToUrls on
+ # Note that we do not set the jpeg quality here, but take
+ # it from image URL query parameters that we synthesize in
+ # from embed-config-html.example.com.
+
+ # Share a cache keyspace with embed-config-html.example.org.
+ ModPagespeedCacheFragment "embed-config"
+
+ ModPagespeedLoadFromFile "http://embed-config-resources.example.com/" \
+ "@@APACHE_DOC_ROOT@@/mod_pagespeed_example/"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ipro-for-browser.example.com
+ ModPagespeedEnableFilters rewrite_images,rewrite_css
+ ModPagespeedEnableFilters convert_to_webp_lossless
+ ModPagespeedEnableFilters in_place_optimize_for_browser
+ ModPagespeedJpegRecompressionQuality 75
+ ModPagespeedWebpRecompressionQuality 70
+ ModPagespeedInPlaceResourceOptimization on
+ ModPagespeedAllowVaryOn "Accept"
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_example"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_ipro_for_browser"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ipro-for-browser-vary-on-auto.example.com
+ ModPagespeedEnableFilters rewrite_images,rewrite_css
+ ModPagespeedEnableFilters convert_to_webp_lossless
+ ModPagespeedEnableFilters convert_to_webp_animated
+ ModPagespeedEnableFilters in_place_optimize_for_browser
+ ModPagespeedInPlaceResourceOptimization on
+ # ModPagespeedAllowVaryOn "Auto" # Default is "Auto"
+ ModPagespeedImageRecompressionQuality 90
+ ModPagespeedJpegRecompressionQuality 75
+ ModPagespeedJpegRecompressionQualityForSmallScreens 55
+ ModPagespeedJpegQualityForSaveData 35
+ ModPagespeedWebpRecompressionQuality 70
+ ModPagespeedWebpRecompressionQualityForSmallScreens 50
+ ModPagespeedWebpQualityForSaveData 30
+ ModPagespeedWebpAnimatedRecompressionQuality 60
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_example"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_ipro_for_browser"
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ipro-for-browser-vary-on-none.example.com
+ ModPagespeedEnableFilters rewrite_images,in_place_optimize_for_browser
+ ModPagespeedInPlaceResourceOptimization on
+ ModPagespeedAllowVaryOn "None"
+ ModPagespeedImageRecompressionQuality 75
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_example"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_ipro_for_browser"
+</VirtualHost>
+
+# For testing Modpagespeed unplugged.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName mpsunplugged.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeed unplugged
+</VirtualHost>
+
+# For testing Modpagespeed off.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName mpsoff.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_mpsoff"
+ ModPagespeed off
+</VirtualHost>
+
+# For testing how we handle process-scope options.
+ModPagespeedIproMaxResponseBytes 1048576001
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ps1.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_mpr"
+ ModPagespeedIproMaxResponseBytes 1048576002
+ ModPagespeedEnableFilters debug
+</VirtualHost>
+
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName ps2.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_mpr"
+ ModPagespeedIproMaxResponseBytes 1048576003
+ ModPagespeedEnableFilters debug
+</VirtualHost>
+
+# For testing with a custom origin header. In this VirtualHost,
+# /mod_pagespeed_test is included in our DocumentRoot and thus does
+# not need to be in any resource URL paths. This helps us verify that
+# we are looping back to the corect VirtualHost -- if we hit the wrong
+# one it will not work. Also we don't have a VirtualHost for
+# sharedcdn.example.com, so the default Host header used for
+# origin-mapping won't work either. Instead, we want origin-fetches
+# to go back to this VirtualHost so we rely on the new third optional
+# argument to MapOriginDomain.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName customhostheader.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_test"
+ ModPagespeed on
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedEnableFilters rewrite_images
+ ModPagespeedMapOriginDomain \
+ localhost:@@APACHE_SECONDARY_PORT@@/customhostheader \
+ sharedcdn.example.com/test \
+ customhostheader.example.com
+ ModPagespeedJpegRecompressionQuality 50
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+# Help tests detect whether mod_spdy is loaded. Only sees it if it's loaded
+# before us, unfortunately, but we can at least make it so during our own
+# testing.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName spdy.example.com
+ <IfModule !spdy_module>
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/nonspdy"
+ </IfModule>
+ <IfModule spdy_module>
+ DocumentRoot "@@APACHE_DOC_ROOT@@/mod_pagespeed_test/spdy"
+ </IfModule>
+</VirtualHost>
+
+# For testing fetching via mod_spdy.
+NameVirtualHost @@APACHE_HTTPS_DOMAIN@@
+<VirtualHost @@APACHE_HTTPS_DOMAIN@@>
+ ServerName spdyfetch.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_spdyfetch"
+ ModPagespeed on
+ ModPagespeedFetchFromModSpdy on
+</VirtualHost>
+
+<VirtualHost @@APACHE_HTTPS_DOMAIN@@>
+ ServerName nospdyfetch.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_nospdyfetch"
+ ModPagespeed on
+ ModPagespeedFetchFromModSpdy off
+</VirtualHost>
+
+# For testing where we do our loopback routing. Note that this is listening
+# on an unusual loopback IP.
+Listen 127.0.0.2:@@APACHE_TERTIARY_PORT@@
+<VirtualHost 127.0.0.2:@@APACHE_TERTIARY_PORT@@>
+ ServerName loopbackfetch.example.com
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_loopbackfetch"
+ ModPagespeedCriticalImagesBeaconEnabled false
+</VirtualHost>
+
+# Host for testing mobilization and proxy_suffix.
+<VirtualHost localhost:@@APACHE_SECONDARY_PORT@@>
+ ServerName @@PAGESPEED-TEST-HOST@@.suffix.net
+ ModPagespeedProxySuffix .suffix.net
+ DocumentRoot "@@APACHE_DOC_ROOT@@"
+ ModPagespeedFetchProxy localhost:@@APACHE_SECONDARY_PORT@@
+ ModPagespeedFileCachePath "@@MOD_PAGESPEED_CACHE@@_proxysuffix"
+ ModPagespeedRewriteLevel PassThrough
+ ModPagespeedMobLayout on
+ ModPagespeedMobNav on
+ ModPagespeedAlwaysMobilize on
+ ModPagespeedProxySuffix .suffix.net
+ ModPagespeedEnableFilters rewrite_domains,collapse_whitespace,mobilize
+ ModPagespeedEnableFilters rewrite_images,experiment_collect_mob_image_info,rewrite_css
+ ModPagespeedEnableFilters rewrite_domains,mobilize
+ ModPagespeedEnableFilters inline_javascript,rewrite_javascript
+ ModPagespeedDisableFilters add_instrumentation,inline_images
+ ModPagespeedDomainRewriteHyperlinks on
+ ModPagespeedCriticalImagesBeaconEnabled false
+ ModPagespeedRewriteDeadlinePerFlushMs 5000
+</VirtualHost>
+
+#ALL_DIRECTIVES # Invoke all ModPagespeed* directives to make sure they work:
+#ALL_DIRECTIVES ModPagespeedAllow foo
+#ALL_DIRECTIVES ModPagespeedAnalyticsID 1234
+#ALL_DIRECTIVES ModPagespeedAvoidRenamingIntrospectiveJavascript true
+#ALL_DIRECTIVES ModPagespeedAllowOptionsToBeSetByCookies true
+#ALL_DIRECTIVES ModPagespeedBeaconUrl "http://example.com/beacon"
+#ALL_DIRECTIVES ModPagespeedBlockingRewriteKey test
+#ALL_DIRECTIVES ModPagespeedCacheFlushFilename /tmp/cache.flush
+#ALL_DIRECTIVES ModPagespeedCacheFlushPollIntervalSec 10
+#ALL_DIRECTIVES ModPagespeedCacheFragment share-a-cache-please
+#ALL_DIRECTIVES ModPagespeedClientDomainRewrite false
+#ALL_DIRECTIVES ModPagespeedCollectRefererStatistics false
+#ALL_DIRECTIVES ModPagespeedCombineAcrossPaths true
+#ALL_DIRECTIVES ModPagespeedCompressMetadataCache true
+#ALL_DIRECTIVES ModPagespeedCriticalImagesBeaconEnabled true
+#ALL_DIRECTIVES ModPagespeedCreateSharedMemoryMetadataCache config 10000
+#ALL_DIRECTIVES ModPagespeedCssFlattenMaxBytes 2000
+#ALL_DIRECTIVES ModPagespeedCssImageInlineMaxBytes 2000
+#ALL_DIRECTIVES ModPagespeedCssInlineMaxBytes 2000
+#ALL_DIRECTIVES ModPagespeedCssOutlineMinBytes 2000
+#ALL_DIRECTIVES ModPagespeedCssPreserveURLs off
+#ALL_DIRECTIVES ModPagespeedDefaultSharedMemoryCacheKB 1000
+#ALL_DIRECTIVES ModPagespeedDisableFilters strip_scripts
+#ALL_DIRECTIVES ModPagespeedDisallow bar
+#ALL_DIRECTIVES ModPagespeedDomain http://example.com
+#ALL_DIRECTIVES ModPagespeedDomainRewriteHyperlinks true
+#ALL_DIRECTIVES ModPagespeedEnableAggressiveRewritersForMobile false
+#ALL_DIRECTIVES ModPagespeedEnableFilters extend_cache
+#ALL_DIRECTIVES ModPagespeedExperimentSpec "id=8;percent=10"
+#ALL_DIRECTIVES ModPagespeedExperimentVariable 3
+#ALL_DIRECTIVES ModPagespeedFetchProxy localhost:4321
+#ALL_DIRECTIVES ModPagespeedFetchWithGzip on
+#ALL_DIRECTIVES ModPagespeedFetcherTimeOutMs 1000
+#ALL_DIRECTIVES ModPagespeedFileCacheCleanIntervalMs 3600000
+#ALL_DIRECTIVES ModPagespeedFileCacheInodeLimit 10000
+#ALL_DIRECTIVES ModPagespeedFileCachePath /tmp/cache/
+#ALL_DIRECTIVES ModPagespeedFileCacheSizeKb 1000
+#ALL_DIRECTIVES ModPagespeedFinderPropertiesCacheExpirationTimeMs 300000
+#ALL_DIRECTIVES ModPagespeedForbidAllDisabledFilters true
+#ALL_DIRECTIVES ModPagespeedForbidFilters rewrite_images
+#ALL_DIRECTIVES ModPagespeedForceCaching off
+#ALL_DIRECTIVES ModPagespeedEnrollExperiment 3
+#ALL_DIRECTIVES ModPagespeedHashRefererStatistics false
+#ALL_DIRECTIVES ModPagespeedImageInlineMaxBytes 2000
+#ALL_DIRECTIVES ModPagespeedImageLimitOptimizedPercent 80
+#ALL_DIRECTIVES ModPagespeedImageLimitResizeAreaPercent 80
+#ALL_DIRECTIVES ModPagespeedImageMaxRewritesAtOnce 5
+#ALL_DIRECTIVES ModPagespeedImageRecompressionQuality 75
+#ALL_DIRECTIVES ModPagespeedImageResolutionLimitBytes 10000000
+#ALL_DIRECTIVES ModPagespeedImagePreserveURLs false
+#ALL_DIRECTIVES ModPagespeedImageWebpRecompressionQuality 85
+#ALL_DIRECTIVES ModPagespeedImageWebpRecompressionQualityForSmallScreens 85
+#ALL_DIRECTIVES ModPagespeedImplicitCacheTtlMs 60000
+#ALL_DIRECTIVES ModPagespeedLoadFromFileCacheTtlMs 60000
+#ALL_DIRECTIVES ModPagespeedInPlaceResourceOptimization on
+#ALL_DIRECTIVES ModPagespeedIncreaseSpeedTracking true
+#ALL_DIRECTIVES ModPagespeedInstallCrashHandler off
+#ALL_DIRECTIVES ModPagespeedJpegRecompressionQuality 80
+#ALL_DIRECTIVES ModPagespeedJsInlineMaxBytes 2000
+#ALL_DIRECTIVES ModPagespeedJsOutlineMinBytes 2000
+#ALL_DIRECTIVES ModPagespeedJsPreserveURLS off
+#ALL_DIRECTIVES ModPagespeedLazyloadImagesAfterOnload on
+#ALL_DIRECTIVES ModPagespeedLazyloadImagesBlankUrl "http://www.gstatic.com/psa/static/1.gif"
+#ALL_DIRECTIVES ModPagespeedLRUCacheByteLimit 1000
+#ALL_DIRECTIVES ModPagespeedLRUCacheKbPerProcess 1
+#ALL_DIRECTIVES ModPagespeedListOutstandingUrlsOnError on
+#ALL_DIRECTIVES ModPagespeedLoadFromFile http://example.com/ /var/html/example/
+#ALL_DIRECTIVES ModPagespeedLoadFromFileMatch "^http://example.com/" /var/html/example/
+#ALL_DIRECTIVES ModPagespeedLoadFromFileRule allow /var/html/example/
+#ALL_DIRECTIVES ModPagespeedLoadFromFileRuleMatch Disallow \.php.css$
+#ALL_DIRECTIVES ModPagespeedLogDir /tmp/log/
+#ALL_DIRECTIVES ModPagespeedLogRewriteTiming false
+#ALL_DIRECTIVES ModPagespeedLowercaseHtmlNames true
+#ALL_DIRECTIVES ModPagespeedMaxCombinedCssBytes 100000
+#ALL_DIRECTIVES ModPagespeedMaxCombinedJsBytes 100000
+#ALL_DIRECTIVES ModPagespeedMapOriginDomain example.com localhost
+#ALL_DIRECTIVES ModPagespeedMapRewriteDomain example.com static.example.com
+#ALL_DIRECTIVES ModPagespeedMaxImageSizeLowResolutionBytes 1000
+#ALL_DIRECTIVES ModPagespeedMaxInlinedPreviewImagesIndex 80
+#ALL_DIRECTIVES ModPagespeedMaxSegmentLength 100
+#ALL_DIRECTIVES ModPagespeedMemcachedServers localhost:@@MEMCACHED_PORT@@
+#ALL_DIRECTIVES ModPagespeedMemcachedThreads 1
+#ALL_DIRECTIVES ModPagespeedMessageBufferSize 100
+#ALL_DIRECTIVES ModPagespeedMinImageSizeLowResolutionBytes 2000
+#ALL_DIRECTIVES ModPagespeedModifyCachingHeaders true
+#ALL_DIRECTIVES ModPagespeedNumExpensiveRewriteThreads 2
+#ALL_DIRECTIVES ModPagespeedNumRewriteThreads 4
+#ALL_DIRECTIVES ModPagespeedOptionCookiesDurationMs 12345
+#ALL_DIRECTIVES ModPagespeedPreserveSubresourceHints on
+#ALL_DIRECTIVES ModPagespeedPreserveUrlRelativity on
+#ALL_DIRECTIVES ModPagespeedProgressiveJpegMinBytes 1000
+#ALL_DIRECTIVES ModPagespeedRateLimitBackgroundFetches true
+#ALL_DIRECTIVES ModPagespeedRefererStatisticsOutputLevel simple
+#ALL_DIRECTIVES ModPagespeedReportUnloadTime true
+#ALL_DIRECTIVES ModPagespeedRespectVary true
+#ALL_DIRECTIVES ModPagespeedRespectXForwardedProto off
+#ALL_DIRECTIVES ModPagespeedRetainComment "special"
+#ALL_DIRECTIVES ModPagespeedRewriteDeadlinePerFlushMs 100
+#ALL_DIRECTIVES ModPagespeedInPlaceRewriteDeadlineMs 100
+#ALL_DIRECTIVES ModPagespeedRewriteLevel CoreFilters
+#ALL_DIRECTIVES ModPagespeedRewriteRandomDropPercentage 0
+#ALL_DIRECTIVES ModPagespeedRunExperiment true
+#ALL_DIRECTIVES ModPagespeedShardDomain example.com 1.example.com,2.example.com
+#ALL_DIRECTIVES ModPagespeedSharedMemoryLocks true
+#ALL_DIRECTIVES ModPagespeedSlowFileLatencyUs 80000
+#ALL_DIRECTIVES ModPagespeedSlurpDirectory /tmp/slurp/
+#ALL_DIRECTIVES ModPagespeedSlurpFlushLimit 5
+#ALL_DIRECTIVES ModPagespeedSlurpReadOnly true
+#ALL_DIRECTIVES ModPagespeedStatistics true
+#ALL_DIRECTIVES ModPagespeedStatisticsLogging true
+#ALL_DIRECTIVES ModPagespeedStatisticsLoggingChartsCSS "example.com/css.css"
+#ALL_DIRECTIVES ModPagespeedStatisticsLoggingChartsJS "example.com/js.js"
+#ALL_DIRECTIVES ModPagespeedStatisticsLoggingFile /tmp/log/stats.log
+#ALL_DIRECTIVES ModPagespeedStatisticsLoggingIntervalMs 3000
+#ALL_DIRECTIVES ModPagespeedStatisticsLoggingMaxFileSizeKb 1024
+#ALL_DIRECTIVES ModPagespeedStickyQueryParameters something-private
+#ALL_DIRECTIVES ModPagespeedSupportNoScriptEnabled true
+#ALL_DIRECTIVES ModPagespeedTestProxy off
+#ALL_DIRECTIVES ModPagespeedUrlValuedAttribute span src Hyperlink
+#ALL_DIRECTIVES ModPagespeedUseAnalyticsJs false
+#ALL_DIRECTIVES ModPagespeedUseExperimentalJsMinifier on
+#ALL_DIRECTIVES ModPagespeedUsePerVHostStatistics on
+#ALL_DIRECTIVES ModPagespeedXHeaderValue "test"
+#ALL_DIRECTIVES ModPagespeedWebpRecompressionQuality 85
+#ALL_DIRECTIVES ModPagespeedWebpRecompressionQualityForSmallScreens 85
+#ALL_DIRECTIVES ModPagespeedImageJpegNumProgressiveScans 3
+#ALL_DIRECTIVES ModPagespeedImageJpegNumProgressiveScansForSmallScreens 2
diff --git a/src/install/debug_conf.v3.vcl b/src/install/debug_conf.v3.vcl
new file mode 100644
index 0000000..0cc5849
--- /dev/null
+++ b/src/install/debug_conf.v3.vcl
@@ -0,0 +1,177 @@
+/*
+ This is the minimal Varnish 3.x VCL configuration required for passing the
+ Apache mod_pagespeed system tests. To install varnish and start the varnish
+ server at the right port, do the following:
+ 1) sudo apt-get install varnish
+ 2) sudo vim /etc/default/varnish and put in the following lines at the
+ bottom of the file:
+ DAEMON_OPTS="-a :8020 \
+ -T localhost:6082 \
+ -f /etc/varnish/default.vcl \
+ -S /etc/varnish/secret \
+ -s file,/var/lib/varnish/$INSTANCE/varnish_storage.bin,1G"
+ 3) sudo cp /path/to/install/sample_conf.vcl /etc/varnish/default.vcl
+ 4) sudo service varnish restart
+*/
+
+import std;
+
+# Block 1: Define upstream server's host and port.
+backend default {
+ # Location of PageSpeed server.
+ .host = "127.0.0.1";
+ .port = "8080";
+}
+
+# Block 2: Define a key based on the User-Agent which can be used for hashing.
+# Also set the PS-CapabilityList header for PageSpeed server to respect.
+sub generate_user_agent_based_key {
+ # Define placeholder PS-CapabilityList header values for large and small
+ # screens with no UA dependent optimizations. Note that these placeholder
+ # values should not contain any of ll, ii, dj, jw or ws, since these
+ # codes will end up representing optimizations to be supported for the
+ # request.
+ set req.http.default_ps_capability_list_for_large_screens = "LargeScreen.SkipUADependentOptimizations:";
+ set req.http.default_ps_capability_list_for_small_screens = "TinyScreen.SkipUADependentOptimizations:";
+
+ # As a fallback, the PS-CapabilityList header that is sent to the upstream
+ # PageSpeed server should be for a large screen device with no browser
+ # specific optimizations.
+ set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_large_screens;
+
+ # Cache-fragment 1: Desktop User-Agents that support lazyload_images (ll),
+ # inline_images (ii) and defer_javascript (dj).
+ # Note: Wget is added for testing purposes only.
+ if (req.http.User-Agent ~ "(?i)Chrome/|Firefox/|Trident/6\.|Safari|Wget") {
+ set req.http.PS-CapabilityList = "ll,ii,dj:";
+ }
+ # Cache-fragment 2: Desktop User-Agents that support lazyload_images (ll),
+ # inline_images (ii), defer_javascript (dj), webp (jw) and lossless_webp
+ # (ws).
+ if (req.http.Accept ~ "webp") {
+ set req.http.PS-CapabilityList = "ll,ii,dj,jw,ws:";
+ }
+ # Cache-fragment 3: This fragment contains (a) Desktop User-Agents that
+ # should not map to fragments 1 or 2 and (b) all tablet User-Agents. These
+ # will only get optimizations that work on all browsers and use image
+ # compression qualities applicable to large screens. Note that even tablets
+ # that are capable of supporting inline or webp images, for e.g. Android
+ # 4.1.2, will not get these advanced optimizations.
+ if (req.http.User-Agent ~ "(?i)Firefox/[1-2]\.|bot|Yahoo!|Ruby|RPT-HTTPClient|(Google \(\+https\:\/\/developers\.google\.com\/\+\/web\/snippet\/\))|Android|iPad|TouchPad|Silk-Accelerated|Kindle Fire") {
+ set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_large_screens;
+ }
+ # Cache-fragment 4: Mobiles and small screen tablets will use image
+ # compression qualities applicable to small screens, but all other
+ # optimizations will be those that work on all browsers.
+ if (req.http.User-Agent ~ "(?i)Mozilla.*Android.*Mobile*|iPhone|BlackBerry|Opera Mobi|Opera Mini|SymbianOS|UP.Browser|J-PHONE|Profile/MIDP|portalmmm|DoCoMo|Obigo|Galaxy Nexus|GT-I9300|GT-N7100|HTC One|Nexus [4|7|S]|Xoom|XT907") {
+ set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_small_screens;
+ }
+ # Remove placeholder header values.
+ remove req.http.default_ps_capability_list_for_large_screens;
+ remove req.http.default_ps_capability_list_for_large_screens;
+}
+
+sub vcl_hash {
+ # Block 3: Use the PS-CapabilityList value for computing the hash.
+ hash_data(req.http.PS-CapabilityList);
+}
+
+# Block 3a: Define ACL for purge requests
+acl purge {
+ # Purge requests are only allowed from localhost.
+ "localhost";
+ "127.0.0.1";
+}
+
+# Block 3b: Issue purge when there is a cache hit for the purge request.
+sub vcl_hit {
+ if (req.request == "PURGE") {
+ purge;
+ error 200 "Purged.";
+ } else {
+ # Send 5% of the HITs to the backend for instrumentation.
+ if (std.random(0, 100) <= 5) {
+ set req.http.PS-ShouldBeacon = req.http.ps_should_beacon_key_value;
+ return (pass);
+ }
+ }
+}
+
+# Block 3c: Issue a no-op purge when there is a cache miss for the purge
+# request.
+sub vcl_miss {
+ if (req.request == "PURGE") {
+ purge;
+ error 200 "Purged.";
+ } else {
+ # Send 25% of the MISSes to the backend for instrumentation.
+ if (std.random(0, 100) <= 25) {
+ set req.http.PS-ShouldBeacon = req.http.ps_should_beacon_key_value;
+ return (pass);
+ }
+ }
+}
+
+# Block 4: In vcl_recv, on receiving a request, call the method responsible for
+# generating the User-Agent based key for hashing into the cache.
+sub vcl_recv {
+ call generate_user_agent_based_key;
+
+ # We want to support beaconing filters, i.e., one or more of inline_images,
+ # lazyload_images, inline_preview_images or prioritize_critical_css are
+ # enabled. We define a placeholder constant called ps_should_beacon_key_value
+ # so that some percentages of hits and misses can be sent to the backend
+ # with this value used for the PS-ShouldBeacon header to force beaconing.
+ # This value should match the value of the DownstreamCacheRebeaconingKey
+ # pagespeed directive used by your backend server.
+ # WARNING: Do not use "random_rebeaconing_key" for your configuration, but
+ # instead change it to something specific to your site, to keep it secure.
+ set req.http.ps_should_beacon_key_value = "random_rebeaconing_key";
+ # Incoming PS-ShouldBeacon headers should not be allowed since this will allow
+ # external entities to force the server to instrument pages.
+ remove req.http.PS-ShouldBeacon;
+
+ # Block 3d: Verify the ACL for an incoming purge request and handle it.
+ if (req.request == "PURGE") {
+ if (!client.ip ~ purge) {
+ error 405 "Not allowed.";
+ }
+ return (lookup);
+ }
+ # Blocks which decide whether cache should be bypassed or not go here.
+ # Block 5a: Bypass the cache for .pagespeed. resource. PageSpeed has its own
+ # cache for these, and these could bloat up the caching layer.
+ if (req.url ~ "\.pagespeed\.([a-z]\.)?[a-z]{2}\.[^.]{10}\.[^.]+") {
+ # Skip the cache for .pagespeed. resource. PageSpeed has its own
+ # cache for these, and these could bloat up the caching layer.
+ return (pass);
+ }
+ # Block 5b: Only cache responses to clients that support gzip. Most clients
+ # do, and the cache holds much more if it stores gzipped responses.
+ if (req.http.Accept-Encoding !~ "gzip") {
+ return (pass);
+ }
+}
+
+# Block 6: Mark HTML uncacheable by caches beyond our control.
+sub vcl_fetch {
+ if (beresp.http.Content-Type ~ "text/html") {
+ # Hide the upstream cache control headers.
+ remove beresp.http.ETag;
+ remove beresp.http.Last-Modified;
+ remove beresp.http.Cache-Control;
+ # Add no-cache Cache-Control header for html.
+ set beresp.http.Cache-Control = "no-cache, max-age=0";
+ }
+ return (deliver);
+}
+
+# Block 7: Add a header for identifying cache hits/misses.
+sub vcl_deliver {
+ set resp.http.PS-CapabilityList = req.http.PS-CapabilityList;
+ if (obj.hits > 0) {
+ set resp.http.X-Cache = "HIT";
+ } else {
+ set resp.http.X-Cache = "MISS";
+ }
+}
diff --git a/src/install/debug_conf.v4.vcl b/src/install/debug_conf.v4.vcl
new file mode 100644
index 0000000..6f48037
--- /dev/null
+++ b/src/install/debug_conf.v4.vcl
@@ -0,0 +1,168 @@
+/*
+ This is the minimal Varnish 4.x VCL configuration required for passing the
+ Apache mod_pagespeed system tests. To install varnish and start the varnish
+ server at the right port, do the following:
+ 1) sudo apt-get install varnish
+ 2) sudo vim /etc/default/varnish and put in the following lines at the
+ bottom of the file:
+ DAEMON_OPTS="-a :8020 \
+ -T localhost:6082 \
+ -f /etc/varnish/default.vcl \
+ -S /etc/varnish/secret \
+ -s file,/var/lib/varnish/$INSTANCE/varnish_storage.bin,1G"
+ 3) sudo cp /path/to/install/sample_conf.vcl /etc/varnish/default.vcl
+ 4) sudo service varnish restart
+*/
+
+# Marker to tell the VCL compiler that this VCL has been adapted to the
+# new 4.0 format.
+vcl 4.0;
+
+import std;
+
+# Block 1: Define upstream server's host and port.
+backend default {
+ # Location of PageSpeed server.
+ .host = "127.0.0.1";
+ .port = "8080";
+}
+
+# Block 2: Define a key based on the User-Agent which can be used for hashing.
+# Also set the PS-CapabilityList header for PageSpeed server to respect.
+sub generate_user_agent_based_key {
+ # Define placeholder PS-CapabilityList header values for large and small
+ # screens with no UA dependent optimizations. Note that these placeholder
+ # values should not contain any of ll, ii, dj, jw or ws, since these
+ # codes will end up representing optimizations to be supported for the
+ # request.
+ set req.http.default_ps_capability_list_for_large_screens = "LargeScreen.SkipUADependentOptimizations:";
+ set req.http.default_ps_capability_list_for_small_screens = "TinyScreen.SkipUADependentOptimizations:";
+
+ # As a fallback, the PS-CapabilityList header that is sent to the upstream
+ # PageSpeed server should be for a large screen device with no browser
+ # specific optimizations.
+ set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_large_screens;
+
+ # Cache-fragment 1: Desktop User-Agents that support lazyload_images (ll),
+ # inline_images (ii) and defer_javascript (dj).
+ # Note: Wget is added for testing purposes only.
+ if (req.http.User-Agent ~ "(?i)Chrome/|Firefox/|Trident/6\.|Safari|Wget") {
+ set req.http.PS-CapabilityList = "ll,ii,dj:";
+ }
+ # Cache-fragment 2: Desktop User-Agents that support lazyload_images (ll),
+ # inline_images (ii), defer_javascript (dj), webp (jw) and lossless_webp
+ # (ws).
+ if (req.http.Accept ~ "webp") {
+ set req.http.PS-CapabilityList = "ll,ii,dj,jw,ws:";
+ }
+ # Cache-fragment 3: This fragment contains (a) Desktop User-Agents that
+ # should not map to fragments 1 or 2 and (b) all tablet User-Agents. These
+ # will only get optimizations that work on all browsers and use image
+ # compression qualities applicable to large screens. Note that even tablets
+ # that are capable of supporting inline or webp images, for e.g. Android
+ # 4.1.2, will not get these advanced optimizations.
+ if (req.http.User-Agent ~ "(?i)Firefox/[1-2]\.|bot|Yahoo!|Ruby|RPT-HTTPClient|(Google \(\+https\:\/\/developers\.google\.com\/\+\/web\/snippet\/\))|Android|iPad|TouchPad|Silk-Accelerated|Kindle Fire") {
+ set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_large_screens;
+ }
+ # Cache-fragment 4: Mobiles and small screen tablets will use image
+ # compression qualities applicable to small screens, but all other
+ # optimizations will be those that work on all browsers.
+ if (req.http.User-Agent ~ "(?i)Mozilla.*Android.*Mobile*|iPhone|BlackBerry|Opera Mobi|Opera Mini|SymbianOS|UP.Browser|J-PHONE|Profile/MIDP|portalmmm|DoCoMo|Obigo|Galaxy Nexus|GT-I9300|GT-N7100|HTC One|Nexus [4|7|S]|Xoom|XT907") {
+ set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_small_screens;
+ }
+ # Remove placeholder header values.
+ unset req.http.default_ps_capability_list_for_large_screens;
+ unset req.http.default_ps_capability_list_for_large_screens;
+}
+
+sub vcl_hash {
+ # Block 3: Use the PS-CapabilityList value for computing the hash.
+ hash_data(req.http.PS-CapabilityList);
+}
+
+# Block 3a: Define ACL for purge requests
+acl purge {
+ # Purge requests are only allowed from localhost.
+ "localhost";
+ "127.0.0.1";
+}
+
+sub vcl_hit {
+ # Send 5% of the HITs to the backend for instrumentation.
+ if (std.random(0, 100) <= 5) {
+ set req.http.PS-ShouldBeacon = req.http.ps_should_beacon_key_value;
+ return (pass);
+ }
+}
+
+sub vcl_miss {
+ # Send 25% of the MISSes to the backend for instrumentation.
+ if (std.random(0, 100) <= 25) {
+ set req.http.PS-ShouldBeacon = req.http.ps_should_beacon_key_value;
+ return (pass);
+ }
+}
+
+# Block 4: In vcl_recv, on receiving a request, call the method responsible for
+# generating the User-Agent based key for hashing into the cache.
+sub vcl_recv {
+ call generate_user_agent_based_key;
+
+ # We want to support beaconing filters, i.e., one or more of inline_images,
+ # lazyload_images, inline_preview_images or prioritize_critical_css are
+ # enabled. We define a placeholder constant called ps_should_beacon_key_value
+ # so that some percentages of hits and misses can be sent to the backend
+ # with this value used for the PS-ShouldBeacon header to force beaconing.
+ # This value should match the value of the DownstreamCacheRebeaconingKey
+ # pagespeed directive used by your backend server.
+ # WARNING: Do not use "random_rebeaconing_key" for your configuration, but
+ # instead change it to something specific to your site, to keep it secure.
+ set req.http.ps_should_beacon_key_value = "random_rebeaconing_key";
+ # Incoming PS-ShouldBeacon headers should not be allowed since this will allow
+ # external entities to force the server to instrument pages.
+ unset req.http.PS-ShouldBeacon;
+
+ # Block 3d: Verify the ACL for an incoming purge request and handle it.
+ if (req.method == "PURGE") {
+ if (!client.ip ~ purge) {
+ return (synth(405,"Not allowed."));
+ }
+ return (purge);
+ }
+ # Blocks which decide whether cache should be bypassed or not go here.
+ # Block 5a: Bypass the cache for .pagespeed. resource. PageSpeed has its own
+ # cache for these, and these could bloat up the caching layer.
+ if (req.url ~ "\.pagespeed\.([a-z]\.)?[a-z]{2}\.[^.]{10}\.[^.]+") {
+ # Skip the cache for .pagespeed. resource. PageSpeed has its own
+ # cache for these, and these could bloat up the caching layer.
+ return (pass);
+ }
+ # Block 5b: Only cache responses to clients that support gzip. Most clients
+ # do, and the cache holds much more if it stores gzipped responses.
+ if (req.http.Accept-Encoding !~ "gzip") {
+ return (pass);
+ }
+}
+
+# Block 6: Mark HTML uncacheable by caches beyond our control.
+sub vcl_backend_response {
+ if (beresp.http.Content-Type ~ "text/html") {
+ # Hide the upstream cache control headers.
+ unset beresp.http.ETag;
+ unset beresp.http.Last-Modified;
+ unset beresp.http.Cache-Control;
+ # Add no-cache Cache-Control header for html.
+ set beresp.http.Cache-Control = "no-cache, max-age=0";
+ }
+ return (deliver);
+}
+
+# Block 7: Add a header for identifying cache hits/misses.
+sub vcl_deliver {
+ set resp.http.PS-CapabilityList = req.http.PS-CapabilityList;
+ if (obj.hits > 0) {
+ set resp.http.X-Cache = "HIT";
+ } else {
+ set resp.http.X-Cache = "MISS";
+ }
+}
diff --git a/src/install/demo.conf b/src/install/demo.conf
new file mode 100644
index 0000000..2d43a05
--- /dev/null
+++ b/src/install/demo.conf
@@ -0,0 +1,404 @@
+SetOutputFilter DEFLATE
+SetEnvIfNoCase Request_URI \.(?:gif|jpe?g|png)$ no-gzip
+
+# Attempt to load mod_version if it wasn't loaded or compiled in (eg on Debian)
+<IfModule !mod_version.c>
+ LoadModule version_module /usr/lib/httpd/modules/mod_version.so
+</IfModule>
+
+<IfVersion < 2.4>
+ LoadModule pagespeed_module /usr/lib/httpd/modules/mod_pagespeed.so
+</IfVersion>
+<IfVersion >= 2.4.2>
+ LoadModule pagespeed_module /usr/lib/httpd/modules/mod_pagespeed_ap24.so
+</IfVersion>
+
+# Only attempt to load mod_deflate if it hasn't been loaded already.
+<IfModule !mod_deflate.c>
+ LoadModule deflate_module /usr/lib/httpd/modules/mod_deflate.so
+</IfModule>
+<IfModule pagespeed_module>
+ # Turn on mod_pagespeed. To completely disable mod_pagespeed, you
+ # can set this to "off".
+ ModPagespeed on
+
+
+ # For the http://modpagespeed.com/proxy_external_resource.html demo
+ ModPagespeedMapProxyDomain http://modpagespeed.com/static http://www.gstatic.com/psa/static
+
+#
+# BrowserMatch CriOS/36[.] mps_disable_webp
+# RequestHeader set PageSpeedFilters -convert_jpeg_to_webp \
+# env=mps_disable_webp
+
+ ModPagespeedEnableCachePurge on
+
+ # We want VHosts to inherit global configuration.
+ # If this is not included, they'll be independent (except for inherently
+ # global options), at least for backwards compatibility.
+ ModPagespeedInheritVHostConfig on
+
+ # Direct Apache to send all HTML output to the mod_pagespeed
+ # output handler.
+ AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER text/html
+
+ # If you want mod_pagespeed process XHTML as well, please uncomment this
+ # line.
+ # AddOutputFilterByType MOD_PAGESPEED_OUTPUT_FILTER application/xhtml+xml
+
+ # The ModPagespeedFileCachePath directory must exist and be writable
+ # by the apache user (as specified by the User directive).
+ ModPagespeedFileCachePath "/var/cache/mod_pagespeed/"
+
+ # LogDir is needed to store various logs, including the statistics log
+ # required for the console.
+ ModPagespeedLogDir "/var/log/pagespeed"
+
+ # The locations of SSL Certificates is distribution-dependent.
+ ModPagespeedSslCertDirectory "/etc/pki/tls/certs"
+ ModPagespeedSslCertFile /etc/pki/tls/cert.pem
+
+ # If you want, you can use one or more memcached servers as the store for
+ # the mod_pagespeed cache.
+ # ModPagespeedMemcachedServers localhost:11211
+
+ # A portion of the cache can be kept in memory only, to reduce load on disk
+ # (or memcached) from many small files.
+ # ModPagespeedCreateSharedMemoryMetadataCache "/var/cache/mod_pagespeed/" 51200
+
+ # Override the mod_pagespeed 'rewrite level'. The default level
+ # "CoreFilters" uses a set of rewrite filters that are generally
+ # safe for most web pages. Most sites should not need to change
+ # this value and can instead fine-tune the configuration using the
+ # ModPagespeedDisableFilters and ModPagespeedEnableFilters
+ # directives, below. Valid values for ModPagespeedRewriteLevel are
+ # PassThrough, CoreFilters and TestingCoreFilters.
+ #
+ # ModPagespeedRewriteLevel PassThrough
+
+ # Explicitly disables specific filters. This is useful in
+ # conjuction with ModPagespeedRewriteLevel. For instance, if one
+ # of the filters in the CoreFilters needs to be disabled for a
+ # site, that filter can be added to
+ # ModPagespeedDisableFilters. This directive contains a
+ # comma-separated list of filter names, and can be repeated.
+ #
+ # ModPagespeedDisableFilters rewrite_images
+
+ # Explicitly enables specific filters. This is useful in
+ # conjuction with ModPagespeedRewriteLevel. For instance, filters
+ # not included in the CoreFilters may be enabled using this
+ # directive. This directive contains a comma-separated list of
+ # filter names, and can be repeated.
+ #
+ # ModPagespeedEnableFilters rewrite_javascript,rewrite_css
+ # ModPagespeedEnableFilters collapse_whitespace,elide_attributes
+
+ # Explicitly forbids the enabling of specific filters using either query
+ # parameters or request headers. This is useful, for example, when we do
+ # not want the filter to run for performance or security reasons. This
+ # directive contains a comma-separated list of filter names, and can be
+ # repeated.
+ #
+ # ModPagespeedForbidFilters rewrite_images
+
+ # How long mod_pagespeed will wait to return an optimized resource
+ # (per flush window) on first request before giving up and returning the
+ # original (unoptimized) resource. After this deadline is exceeded the
+ # original resource is returned and the optimization is pushed to the
+ # background to be completed for future requests. Increasing this value will
+ # increase page latency, but might reduce load time (for instance on a
+ # bandwidth-constrained link where it's worth waiting for image
+ # compression to complete). If the value is less than or equal to zero
+ # mod_pagespeed will wait indefinitely for the rewrite to complete before
+ # returning.
+ #
+ # ModPagespeedRewriteDeadlinePerFlushMs 10
+
+ # ModPagespeedDomain
+ # authorizes rewriting of JS, CSS, and Image files found in this
+ # domain. By default only resources with the same origin as the
+ # HTML file are rewritten. For example:
+ #
+ # ModPagespeedDomain cdn.myhost.com
+ #
+ # This will allow resources found on http://cdn.myhost.com to be
+ # rewritten in addition to those in the same domain as the HTML.
+ #
+ # Other domain-related directives (like ModPagespeedMapRewriteDomain
+ # and ModPagespeedMapOriginDomain) can also authorize domains.
+ #
+ # Wildcards (* and ?) are allowed in the domain specification. Be
+ # careful when using them as if you rewrite domains that do not
+ # send you traffic, then the site receiving the traffic will not
+ # know how to serve the rewritten content.
+
+ # If you use downstream caches such as varnish or proxy_cache for caching
+ # HTML, you can configure pagespeed to work with these caches correctly
+ # using the following directives. Note that the values for
+ # ModPagespeedDownstreamCachePurgeLocationPrefix and
+ # ModPagespeedDownstreamCacheRebeaconingKey are deliberately left empty here
+ # in order to force the webmaster to choose appropriate value for these.
+ #
+ # ModPagespeedDownstreamCachePurgeLocationPrefix
+ # ModPagespeedDownstreamCachePurgeMethod PURGE
+ # ModPagespeedDownstreamCacheRewrittenPercentageThreshold 95
+ # ModPagespeedDownstreamCacheRebeaconingKey
+
+ # Other defaults (cache sizes and thresholds):
+ #
+ # ModPagespeedFileCacheSizeKb 102400
+ # ModPagespeedFileCacheCleanIntervalMs 3600000
+ # ModPagespeedLRUCacheKbPerProcess 1024
+ # ModPagespeedLRUCacheByteLimit 16384
+ # ModPagespeedCssFlattenMaxBytes 2048
+ # M.O.: bumped due to https://code.google.com/p/modpagespeed/issues/detail?id=1040
+ ModPagespeedCssInlineMaxBytes 3000
+ # ModPagespeedCssImageInlineMaxBytes 0
+ # ModPagespeedImageInlineMaxBytes 3072
+ # ModPagespeedJsInlineMaxBytes 2048
+ # ModPagespeedCssOutlineMinBytes 3000
+ # ModPagespeedJsOutlineMinBytes 3000
+ # ModPagespeedMaxCombinedCssBytes -1
+ # ModPagespeedMaxCombinedJsBytes 92160
+
+ # Limit the number of inodes in the file cache. Set to 0 for no limit.
+ # The default value if this paramater is not specified is 0 (no limit).
+ ModPagespeedFileCacheInodeLimit 500000
+
+ # Bound the number of images that can be rewritten at any one time; this
+ # avoids overloading the CPU. Set this to 0 to remove the bound.
+ #
+ # ModPagespeedImageMaxRewritesAtOnce 8
+
+ # You can also customize the number of threads per Apache process
+ # mod_pagespeed will use to do resource optimization. Plain
+ # "rewrite threads" are used to do short, latency-sensitive work,
+ # while "expensive rewrite threads" are used for actual optimization
+ # work that's more computationally expensive. If you live these unset,
+ # or use values <= 0 the defaults will be used, which is 1 for both
+ # values when using non-threaded MPMs (e.g. prefork) and 4 for both
+ # on threaded MPMs (e.g. worker and event). These settings can only
+ # be changed globally, and not per virtual host.
+ #
+ # ModPagespeedNumRewriteThreads 4
+ # ModPagespeedNumExpensiveRewriteThreads 4
+
+ # Randomly drop rewrites (*) to increase the chance of optimizing
+ # frequently fetched resources and decrease the chance of optimizing
+ # infrequently fetched resources. This can reduce CPU load. The default
+ # value of this parameter is 0 (no drops). 90 means that a resourced
+ # fetched once has a 10% probability of being optimized while a resource
+ # that is fetched 50 times has a 99.65% probability of being optimized.
+ #
+ # (*) Currently only CSS files and images are randomly dropped. Images
+ # within CSS files are not randomly dropped.
+ #
+ # ModPagespeedRewriteRandomDropPercentage 90
+
+ # Many filters modify the URLs of resources in HTML files. This is typically
+ # harmless but pages whose Javascript expects to read or modify the original
+ # URLs may break. The following parameters prevent filters from modifying
+ # URLs of their respective types.
+ #
+ # ModPagespeedJsPreserveURLs on
+ # ModPagespeedImagePreserveURLs on
+ # ModPagespeedCssPreserveURLs on
+
+ # When PreserveURLs is on, it is still possible to enable browser-specific
+ # optimizations (for example, webp images can be served to browsers that
+ # will accept them). They'll be served with Vary: Accept or Vary:
+ # User-Agent headers as appropriate. Note that this may require configuring
+ # reverse proxy caches such as varnish to handle these headers properly.
+ #
+ # ModPagespeedFilters in_place_optimize_for_browser
+
+ # Internet Explorer has difficulty caching resources with Vary: headers.
+ # They will either be uncached (older IE) or require revalidation. See:
+ # http://blogs.msdn.com/b/ieinternals/archive/2009/06/17/vary-header-prevents-caching-in-ie.aspx
+ # As a result we serve them as Cache-Control: private instead by default.
+ # If you are using a reverse proxy or CDN configured to cache content with
+ # the Vary: Accept header you should turn this setting off.
+ #
+ # ModPagespeed PrivateNotVaryForIE on
+
+ # Settings for image optimization:
+ #
+ # Lossy image recompression quality (0 to 100, -1 just strips metadata):
+ # ModPagespeedImageRecompressionQuality 85
+ #
+ # Jpeg recompression quality (0 to 100, -1 uses ImageRecompressionQuality):
+ # ModPagespeedJpegRecompressionQuality -1
+ # ModPagespeedJpegRecompressionQualityForSmallScreens 70
+ #
+ # WebP recompression quality (0 to 100, -1 uses ImageRecompressionQuality):
+ # ModPagespeedWebpRecompressionQuality 80
+ # ModPagespeedWebpRecompressionQualityForSmallScreens 70
+ #
+ # Timeout for conversions to WebP format, in
+ # milliseconds. Negative values mean no timeout is applied. The
+ # default value is -1:
+ # ModPagespeedWebpTimeoutMs 5000
+ #
+ # Percent of original image size below which optimized images are retained:
+ # ModPagespeedImageLimitOptimizedPercent 100
+ #
+ # Percent of original image area below which image resizing will be
+ # attempted:
+ # ModPagespeedImageLimitResizeAreaPercent 100
+
+ # Settings for inline preview images
+ #
+ # Setting this to n restricts preview images to the first n images found on
+ # the page. The default of -1 means preview images can appear anywhere on
+ # the page (if those images appear above the fold).
+ # ModPagespeedMaxInlinedPreviewImagesIndex -1
+
+ # Sets the minimum size in bytes of any image for which a low quality image
+ # is generated.
+ # ModPagespeedMinImageSizeLowResolutionBytes 3072
+
+ # The maximum URL size is generally limited to about 2k characters
+ # due to IE: See http://support.microsoft.com/kb/208427/EN-US.
+ # Apache servers by default impose a further limitation of about
+ # 250 characters per URL segment (text between slashes).
+ # mod_pagespeed circumvents this limitation, but if you employ
+ # proxy servers in your path you may need to re-impose it by
+ # overriding the setting here. The default setting is 1024
+ # characters.
+ #
+ # ModPagespeedMaxSegmentLength 250
+
+ # Uncomment this if you want to prevent mod_pagespeed from combining files
+ # (e.g. CSS files) across paths
+ #
+ # ModPagespeedCombineAcrossPaths off
+
+ # Renaming JavaScript URLs can sometimes break them. With this
+ # option enabled, mod_pagespeed uses a simple heuristic to decide
+ # not to rename JavaScript that it thinks is introspective.
+ #
+ # You can uncomment this to let mod_pagespeed rename all JS files.
+ #
+ # ModPagespeedAvoidRenamingIntrospectiveJavascript off
+
+ # Certain common JavaScript libraries are available from Google, which acts
+ # as a CDN and allows you to benefit from browser caching if a new visitor
+ # to your site previously visited another site that makes use of the same
+ # libraries as you do. Enable the following filter to turn on this feature.
+ #
+ # ModPagespeedEnableFilters canonicalize_javascript_libraries
+
+ # The following line configures a library that is recognized by
+ # canonicalize_javascript_libraries. This will have no effect unless you
+ # enable this filter (generally by uncommenting the last line in the
+ # previous stanza). The format is:
+ # ModPagespeedLibrary bytes md5 canonical_url
+ # Where bytes and md5 are with respect to the *minified* JS; use
+ # js_minify --print_size_and_hash to obtain this data.
+ # Note that we can register multiple hashes for the same canonical url;
+ # we do this if there are versions available that have already been minified
+ # with more sophisticated tools.
+ #
+ # Additional library configuration can be found in
+ # pagespeed_libraries.conf included in the distribution. You should add
+ # new entries here, though, so that file can be automatically upgraded.
+ # ModPagespeedLibrary 43 1o978_K0_LNE5_ystNklf http://www.modpagespeed.com/rewrite_javascript.js
+
+ # Explicitly tell mod_pagespeed to load some resources from disk.
+ # This will speed up load time and update frequency.
+ #
+ # This should only be used for static resources which do not need
+ # specific headers set or other processing by Apache.
+ #
+ # Both URL and filesystem path should specify directories and
+ # filesystem path must be absolute (for now).
+ #
+ # ModPagespeedLoadFromFile "http://example.com/static/" "/var/www/static/"
+
+
+ # Enables server-side instrumentation and statistics. If this rewriter is
+ # enabled, then each rewritten HTML page will have instrumentation javacript
+ # added that sends latency beacons to /mod_pagespeed_beacon. These
+ # statistics can be accessed at /mod_pagespeed_statistics. You must also
+ # enable the mod_pagespeed_statistics and mod_pagespeed_beacon handlers
+ # below.
+ #
+ # ModPagespeedEnableFilters add_instrumentation
+
+ # The add_instrumentation filter sends a beacon after the page onload
+ # handler is called. The user might navigate to a new URL before this. If
+ # you enable the following directive, the beacon is sent as part of an
+ # onbeforeunload handler, for pages where navigation happens before the
+ # onload event.
+ #
+ # ModPagespeedReportUnloadTime on
+
+ # Uncomment the following line so that ModPagespeed will not cache or
+ # rewrite resources with Vary: in the header, e.g. Vary: User-Agent.
+ # Note that ModPagespeed always respects Vary: headers on html content.
+ # ModPagespeedRespectVary on
+
+ # Uncomment the following line if you want to disable statistics entirely.
+ #
+ # ModPagespeedStatistics off
+
+ # These handlers are central entry-points into the admin pages.
+ # By default, pagespeed_admin and pagespeed_global_admin present
+ # the same data, and differ only when
+ # ModPagespeedUsePerVHostStatistics is enabled. In that case,
+ # /pagespeed_global_admin sees aggregated data across all vhosts,
+ # and the /pagespeed_admin sees data only for a particular vhost.
+ #
+ # You may insert other "Allow from" lines to add hosts you want to
+ # allow to look at generated statistics. Another possibility is
+ # to comment out the "Order" and "Allow" options from the config
+ # file, to allow any client that can reach your server to access
+ # and change server state, such as statistics, caches, and
+ # messages. This might be appropriate in an experimental setup.
+ <Location /pagespeed_admin>
+# Order allow,deny
+# Allow from localhost
+# Allow from 127.0.0.1
+ SetHandler pagespeed_admin
+ ModPagespeedEnableFilters debug
+ ModPagespeedDisableFilters rewrite_javascript
+ </Location>
+ <Location /pagespeed_global_admin>
+# Order allow,deny
+# Allow from localhost
+# Allow from 127.0.0.1
+ SetHandler pagespeed_global_admin
+ ModPagespeedEnableFilters debug
+ ModPagespeedDisableFilters rewrite_javascript
+ </Location>
+
+ <Location /mod_pagespeed_statistics>
+ SetHandler mod_pagespeed_statistics
+ </Location>
+ <Location /mod_pagespeed_global_statistics>
+ SetHandler mod_pagespeed_global_statistics
+ </Location>
+ <Location /mod_pagespeed_message>
+ SetHandler mod_pagespeed_message
+ </Location>
+
+ # Enable logging of mod_pagespeed statistics, needed for the console.
+ ModPagespeedStatisticsLogging on
+
+ # Page /mod_pagespeed_message lets you view the latest messages from
+ # mod_pagespeed, regardless of log-level in your httpd.conf
+ # ModPagespeedMessageBufferSize is the maximum number of bytes you would
+ # like to dump to your /mod_pagespeed_message page at one time,
+ # its default value is 100k bytes.
+ # Set it to 0 if you want to disable this feature.
+ ModPagespeedMessageBufferSize 100000
+
+ <Location /off>
+ ModPagespeed off
+ </Location>
+
+ <Location /mod_pagespeed_log_request_headers.js>
+ SetHandler mod_pagespeed_log_request_headers
+ </Location>
+</IfModule>
diff --git a/src/install/do_not_modify/Puzzle.jpg b/src/install/do_not_modify/Puzzle.jpg
new file mode 100644
index 0000000..a119109
--- /dev/null
+++ b/src/install/do_not_modify/Puzzle.jpg
Binary files differ
diff --git a/src/install/do_not_modify/README b/src/install/do_not_modify/README
new file mode 100644
index 0000000..a8b0f06
--- /dev/null
+++ b/src/install/do_not_modify/README
@@ -0,0 +1,10 @@
+Files in this directory should not be modified or removed. They are used by
+system tests, such as serf_url_async_fetcher_test, which expects them to be
+specific golden sizes and contents.
+
+Make sure httpd.conf has this:
+
+ <Directory "/var/www/html/do_not_modify/cgi" >
+ Options +ExecCGI
+ </Directory>
+ AddHandler cgi-script .cgi
diff --git a/src/install/do_not_modify/cgi/slow_js.cgi b/src/install/do_not_modify/cgi/slow_js.cgi
new file mode 100755
index 0000000..8a7aeb8
--- /dev/null
+++ b/src/install/do_not_modify/cgi/slow_js.cgi
@@ -0,0 +1,8 @@
+#!/bin/bash
+# This cgi script just sleeps for a while then returns an image. It's meant to
+# simulate a server environment where some resources are dynamically generated
+# by a process which is subject to delay (e.g. mysql, php).
+sleep 10;
+echo Content-type: application/javascript
+echo
+cat ../small_javascript.js
diff --git a/src/install/do_not_modify/cgi/verify_post.cgi b/src/install/do_not_modify/cgi/verify_post.cgi
new file mode 100644
index 0000000..fa5a884
--- /dev/null
+++ b/src/install/do_not_modify/cgi/verify_post.cgi
@@ -0,0 +1,11 @@
+#!/bin/sh
+# This cgi script just checks whether we get a POST produced by
+# a test right.
+echo Content-type: text/html
+echo
+FORM_DATA=$(cat /dev/stdin)
+if [ "$FORM_DATA" = "a=b&c=d" ]; then
+ echo "PASS"
+else
+ echo "FAIL"
+fi
diff --git a/src/install/do_not_modify/evil.html b/src/install/do_not_modify/evil.html
new file mode 100644
index 0000000..06195ba
--- /dev/null
+++ b/src/install/do_not_modify/evil.html
@@ -0,0 +1,2 @@
+<!-- here is an evil file -->
+ it has extra whitespace
diff --git a/src/install/do_not_modify/favicon.d034f46c06475a27478e98ef5dff965e.ico b/src/install/do_not_modify/favicon.d034f46c06475a27478e98ef5dff965e.ico
new file mode 100644
index 0000000..f594697
--- /dev/null
+++ b/src/install/do_not_modify/favicon.d034f46c06475a27478e98ef5dff965e.ico
Binary files differ
diff --git a/src/install/do_not_modify/logo.e80d1c59a673f560785784fb1ac10959.gif b/src/install/do_not_modify/logo.e80d1c59a673f560785784fb1ac10959.gif
new file mode 100644
index 0000000..afa22bc
--- /dev/null
+++ b/src/install/do_not_modify/logo.e80d1c59a673f560785784fb1ac10959.gif
Binary files differ
diff --git a/src/install/do_not_modify/not_really_a_font.woff b/src/install/do_not_modify/not_really_a_font.woff
new file mode 100644
index 0000000..8491a06
--- /dev/null
+++ b/src/install/do_not_modify/not_really_a_font.woff
@@ -0,0 +1,2 @@
+This is not really font data, but for the purposes of this test
+it does not matter
diff --git a/src/install/do_not_modify/small_javascript.js b/src/install/do_not_modify/small_javascript.js
new file mode 100644
index 0000000..d5ce32c
--- /dev/null
+++ b/src/install/do_not_modify/small_javascript.js
@@ -0,0 +1 @@
+alert('hello world');
diff --git a/src/install/do_not_modify/unknown_file.unknown b/src/install/do_not_modify/unknown_file.unknown
new file mode 100644
index 0000000..b123c9c
--- /dev/null
+++ b/src/install/do_not_modify/unknown_file.unknown
@@ -0,0 +1 @@
+This file should not be proxied.
diff --git a/src/install/install_apxs.sh b/src/install/install_apxs.sh
new file mode 100755
index 0000000..0497ec7
--- /dev/null
+++ b/src/install/install_apxs.sh
@@ -0,0 +1,288 @@
+#!/bin/bash
+#
+# Install Page Speed, using the Apache apxs tool to determine the
+# installation locations.
+#
+# You can specify the path to apxs to install to a non-system-default
+# Apache.
+#
+# APXS_BIN=/path/to/apxs ./install_apxs.sh
+#
+# To install to a location that does not require superuser access:
+#
+# NO_SUDO=1 ./install_apxs.sh
+
+SRC_ROOT="$(dirname $0)/.."
+BUILD_ROOT="${SRC_ROOT}/out/Release"
+MODPAGESPEED_SO_PATH="${BUILD_ROOT}/libmod_pagespeed.so"
+MODPAGESPEED_SO24_PATH="${BUILD_ROOT}/libmod_pagespeed_ap24.so"
+PAGESPEED_JS_MINIFY_PATH="${BUILD_ROOT}/js_minify"
+
+BINDIR=${BINDIR:-"/usr/local/bin"}
+MOD_PAGESPEED_CACHE=${MOD_PAGESPEED_CACHE:-"/var/cache/mod_pagespeed"}
+MOD_PAGESPEED_LOG=${MOD_PAGESPEED_LOG:-"/var/log/pagespeed"}
+APACHE_CONF_FILENAME=${APACHE_CONF_FILENAME:-"httpd.conf"}
+MODPAGESPEED_SO_NAME=${MODPAGESPEED_SO_NAME:-"mod_pagespeed.so"}
+MODPAGESPEED_SO24_NAME=${MODPAGESPEED_SO24_NAME:-"mod_pagespeed_ap24.so"}
+MODPAGESPEED_CONF_NAME=${MODPAGESPEED_CONF_NAME:-"pagespeed.conf"}
+LIBRARIES_CONF_NAME=${LIBRARIES_CONF_NAME:-"pagespeed_libraries.conf"}
+PAGESPEED_JS_MINIFY_NAME=${PAGESPEED_JS_MINIFY_NAME:-"pagespeed_js_minify"}
+
+MODPAGESPEED_FILE_USER=${MODPAGESPEED_FILE_USER:-"root"}
+MODPAGESPEED_FILE_GROUP=${MODPAGESPEED_FILE_GROUP:-${MODPAGESPEED_FILE_USER}}
+SUDO_CMD=${SUDO_CMD:-"sudo"}
+
+# If NO_SUDO was specified, then we should use a user and group that
+# matches the current user and group when installing files.
+if [ ! -z "${NO_SUDO}" ]; then
+ MODPAGESPEED_FILE_USER=$USER
+ MODPAGESPEED_FILE_GROUP=$(groups | cut -d\ -f1)
+ SUDO_CMD=""
+fi
+
+# Load the script used to perform template substitutions on our config
+# files.
+source ${SRC_ROOT}/install/common/installer.include
+
+# Args: variable name
+#
+# Takes a variable name, and makes sure that it is set.
+function is_set() {
+ local DOLLAR='$';
+ local TO_EVAL="${DOLLAR}$1"
+ local VALUE=$(eval "echo $TO_EVAL")
+ local RET=1
+ if [ ! -z "${VALUE}" ]; then
+ RET=0
+ fi
+ return $RET
+}
+
+# Args: variable name, expression, debug string
+#
+# Exits if the specified variable name does not have an assigned value
+# or if the expression evaluates to false.
+function check() {
+ if ! is_set "$1" || ! eval "$2"; then
+ echo "Unable to determine $3."
+ echo "Please set the $1 environment variable when invoking $0."
+ exit 1
+ fi
+}
+
+# Args: user, group, misc, src, dst
+#
+# Some hackery to get around the fact that 'install' refuses to take
+# owner/group arguments unless run as root.
+function do_install() {
+ local INST_USER_GROUP=""
+ if [ -z "${NO_SUDO}" ]; then
+ INST_USER_GROUP="-o $1 -g $2"
+ fi
+ eval "${SUDO_CMD} install $INST_USER_GROUP $3 $4 $5"
+}
+
+# Args: setting name
+#
+# Extract an Apache compile-time setting with the given name.
+function extract_compile_setting() {
+ EXTRACT_COMPILE_SETTING=
+ APACHE_CONF_LINE=$(${APACHE_BIN} -V | grep $1)
+ if [ ! -z "${APACHE_CONF_LINE}" ]; then
+ local SED_REGEX="s/^.*${1}=?[\"\'\ ]*//"
+ EXTRACTED_COMPILE_SETTING=$(echo "${APACHE_CONF_LINE}" |
+ sed -r "${SED_REGEX}" |
+ sed "s/[\"\'\ ]*$//")
+ fi
+}
+
+if [ ! -f "${MODPAGESPEED_SO_PATH}" ]; then
+ echo "${MODPAGESPEED_SO_PATH} doesn't exist. Need to build first."
+ exit 1
+fi
+
+# Find the apxs binary, if not specified.
+if [ -z "${APXS_BIN}" ]; then
+ APXS_BIN=$(which apxs 2> /dev/null)
+ if [ -z "${APXS_BIN}" ]; then
+ APXS_BIN=$(which apxs2 2> /dev/null)
+ fi
+ if [ -z "${APXS_BIN}" ]; then
+ # Default location when Apache is installed from source.
+ APXS_BIN="/usr/local/apache2/bin/apxs"
+ fi
+fi
+
+# Find apxs which tells us about the system.
+check APXS_BIN "[ -f ${APXS_BIN} -a -x ${APXS_BIN} ]" "path to Apache apxs"
+
+echo "Using ${APXS_BIN} to determine installation location."
+echo ""
+
+# This is an optional configuration variable. If set, the conf file
+# path is relative to it.
+APACHE_ROOT=$(${APXS_BIN} -q PREFIX)
+
+# Find the Apache shared module dir.
+APACHE_MODULEDIR=$(${APXS_BIN} -q LIBEXECDIR)
+check APACHE_MODULEDIR "[ -d ${APACHE_MODULEDIR} ]" "Apache module dir"
+
+# Find the Apache conf dir.
+APACHE_CONFDIR=$(${APXS_BIN} -q SYSCONFDIR)
+check APACHE_CONFDIR "[ -d ${APACHE_CONFDIR} ]" "Apache conf dir"
+
+APACHE_SBINDIR=$(${APXS_BIN} -q SBINDIR)
+check APACHE_SBINDIR "[ -d ${APACHE_SBINDIR} ]" "Apache bin dir"
+
+APACHE_TARGET=$(${APXS_BIN} -q TARGET)
+APACHE_BIN="${APACHE_SBINDIR}/${APACHE_TARGET}"
+check APACHE_BIN "[ -f ${APACHE_BIN} -a -x ${APACHE_BIN} ]" "Apache binary"
+
+# Find the Apache conf file.
+if [ -z "${APACHE_CONF_FILE}" ]; then
+ extract_compile_setting SERVER_CONFIG_FILE
+ APACHE_CONF_FILE="${EXTRACTED_COMPILE_SETTING}"
+fi
+if [ ! -z "${APACHE_ROOT}" ]; then
+ APACHE_CONF_FILE="${APACHE_ROOT}/${APACHE_CONF_FILE}"
+fi
+if [ -z "${APACHE_CONF_FILE}" ]; then
+ APACHE_CONF_FILE="${APACHE_CONFDIR}/${APACHE_CONF_FILENAME}"
+fi
+check APACHE_CONF_FILE "[ -f ${APACHE_CONF_FILE} ]" "Apache configuration file"
+
+# Try to grep for the Apache user.
+if [ -z "${APACHE_USER}" ]; then
+ APACHE_USER_LINE=$(egrep -i "^[[:blank:]]*User[[:blank:]]+" "${APACHE_CONF_FILE}")
+ if [ ! -z "${APACHE_USER_LINE}" ]; then
+ APACHE_USER=$(echo "${APACHE_USER_LINE}" |
+ sed -r s/^.*User[[:blank:]]+[\"\']*// |
+ sed s/[\"\'[:blank:]]*$//)
+ fi
+fi
+
+# Try to grep for the Apache group.
+if [ -z "${APACHE_GROUP}" ]; then
+ APACHE_GROUP_LINE=$(egrep -i "^[[:blank:]]*Group[[:blank:]]+" "${APACHE_CONF_FILE}")
+ if [ ! -z "${APACHE_GROUP_LINE}" ]; then
+ APACHE_GROUP=$(echo "${APACHE_GROUP_LINE}" |
+ sed -r s/^.*Group[[:blank:]]+[\"\']*// |
+ sed s/[\"\'[:blank:]]*$//)
+ fi
+fi
+
+# Make sure we have an Apache user and group.
+check APACHE_USER "[ ! -z \'${APACHE_USER}\' ]" "Apache user"
+check APACHE_GROUP "[ ! -z \'${APACHE_GROUP}\' ]" "Apache group"
+
+# Make sure the user is valid.
+check APACHE_USER "id '${APACHE_USER}' &> /dev/null" "valid Apache user '${APACHE_USER}'"
+
+# Make sure the group is valid.
+# TODO(bmcquade): is there a way to ask the system if a group exists, similar
+# to the 'id' command?
+check APACHE_GROUP "egrep -q '^${APACHE_GROUP}:' /etc/group" "valid Apache group '${APACHE_GROUP}'"
+
+MODPAGESPEED_CONFDIR=${MODPAGESPEED_CONFDIR:-${APACHE_CONFDIR}}
+
+echo "mod_pagespeed needs to cache optimized resources on the file system."
+echo "The default location for this cache is '${MOD_PAGESPEED_CACHE}'."
+read -p "Would you like to specify a different location? (y/N) " -n1 PROMPT
+if [ "${PROMPT}" = "y" -o "${PROMPT}" = "Y" ]; then
+ echo ""
+ read -p "Location for mod_pagespeed file cache: " MOD_PAGESPEED_CACHE
+fi
+
+if [ -z "${MOD_PAGESPEED_CACHE}" ]; then
+ echo ""
+ echo "Must specify a mod_pagespeed file cache."
+ exit 1
+fi
+
+echo ""
+echo "Preparing to install to the following locations:"
+echo "${APACHE_MODULEDIR}/${MODPAGESPEED_SO_NAME} (${MODPAGESPEED_FILE_USER}:${MODPAGESPEED_FILE_GROUP})"
+echo "${APACHE_MODULEDIR}/${MODPAGESPEED_SO24_NAME} (${MODPAGESPEED_FILE_USER}:${MODPAGESPEED_FILE_GROUP})"
+echo "${BINDIR}/${PAGESPEED_JS_MINIFY_NAME} (${MODPAGESPEED_FILE_USER}:${MODPAGESPEED_FILE_GROUP})"
+echo "${MODPAGESPEED_CONFDIR}/${MODPAGESPEED_CONF_NAME} (${MODPAGESPEED_FILE_USER}:${MODPAGESPEED_FILE_GROUP})"
+echo "${MODPAGESPEED_CONFDIR}/${LIBRARIES_CONF_NAME} (${MODPAGESPEED_FILE_USER}:${MODPAGESPEED_FILE_GROUP})"
+echo "${MOD_PAGESPEED_CACHE} (${APACHE_USER}:${APACHE_GROUP})"
+echo ""
+if [ -z "${NO_PROMPT}" ]; then
+ echo -n "Continue? (y/N) "
+ read -n1 PROMPT
+ echo ""
+ if [ "${PROMPT}" != "y" -a "${PROMPT}" != "Y" ]; then
+ echo "Not continuing."
+ exit 1
+ fi
+fi
+
+if [ -d "${MOD_PAGESPEED_CACHE}" ]; then
+ echo "${MOD_PAGESPEED_CACHE} already exists. Not creating."
+fi
+
+# Only attempt to load mod_deflate in our conf file if it's actually
+# present on the system.
+COMMENT_OUT_DEFLATE='\#'
+if [ -f "${APACHE_MODULEDIR}/mod_deflate.so" ]; then
+ COMMENT_OUT_DEFLATE=
+else
+ echo "Unable to find mod_deflate.so. HTTP compression support not enabled!"
+fi
+
+TMP_CONF=$(mktemp -t conf.tmp.XXXXXX) || exit 1
+process_template "${SRC_ROOT}/install/common/pagespeed.conf.template" "${TMP_CONF}"
+TMP_LOAD=$(mktemp -t load.tmp.XXXXXX) || exit 1
+process_template "${SRC_ROOT}/install/common/pagespeed.load.template" "${TMP_LOAD}"
+cat "${TMP_CONF}" >> "${TMP_LOAD}"
+
+INSTALLATION_SUCCEEDED=0
+if (
+do_install "${MODPAGESPEED_FILE_USER}" "${MODPAGESPEED_FILE_GROUP}" \
+ "-m 644 -s" \
+ "${MODPAGESPEED_SO_PATH}" \
+ "${APACHE_MODULEDIR}/${MODPAGESPEED_SO_NAME}" &&
+do_install "${MODPAGESPEED_FILE_USER}" "${MODPAGESPEED_FILE_GROUP}" \
+ "-m 644 -s" \
+ "${MODPAGESPEED_SO24_PATH}" \
+ "${APACHE_MODULEDIR}/${MODPAGESPEED_SO24_NAME}" &&
+do_install "${MODPAGESPEED_FILE_USER}" "${MODPAGESPEED_FILE_GROUP}" \
+ "-m 755 -s" \
+ "${PAGESPEED_JS_MINIFY_PATH}" \
+ "${BINDIR}/${PAGESPEED_JS_MINIFY_NAME}" &&
+do_install "${MODPAGESPEED_FILE_USER}" "${MODPAGESPEED_FILE_GROUP}" "-m 644" \
+ "${TMP_LOAD}" \
+ "${MODPAGESPEED_CONFDIR}/${MODPAGESPEED_CONF_NAME}" &&
+do_install "${MODPAGESPEED_FILE_USER}" "${MODPAGESPEED_FILE_GROUP}" "-m 644" \
+ "${SRC_ROOT}/net/instaweb/genfiles/conf/pagespeed_libraries.conf" \
+ "${MODPAGESPEED_CONFDIR}/${LIBRARIES_CONF_NAME}" &&
+do_install "${APACHE_USER}" "${APACHE_GROUP}" "-m 755 -d" \
+ "${MOD_PAGESPEED_CACHE}" &&
+do_install "${APACHE_USER}" "${APACHE_GROUP}" "-m 755 -d" "${MOD_PAGESPEED_LOG}"
+); then
+ MODPAGESPEED_LOAD_LINE="Include ${MODPAGESPEED_CONFDIR}/${MODPAGESPEED_CONF_NAME}"
+ if ! grep -q "${MODPAGESPEED_LOAD_LINE}" "${APACHE_CONF_FILE}"; then
+ echo "Adding a load line for mod_pagespeed to ${APACHE_CONF_FILE}."
+ ${SUDO_CMD} sh -c "echo ${MODPAGESPEED_LOAD_LINE} >> ${APACHE_CONF_FILE}"
+ fi
+ LIBRARIES_LOAD_LINE="Include ${MODPAGESPEED_CONFDIR}/${LIBRARIES_CONF_NAME}"
+ if ! grep -q "${LIBRARIES_LOAD_LINE}" "${APACHE_CONF_FILE}"; then
+ echo "Adding a load line for pagespeed_libraries to ${APACHE_CONF_FILE}."
+ ${SUDO_CMD} sh -c "echo ${LIBRARIES_LOAD_LINE} >> ${APACHE_CONF_FILE}"
+ fi
+ if grep -q "${MODPAGESPEED_LOAD_LINE}" "${APACHE_CONF_FILE}" && \
+ grep -q "${LIBRARIES_LOAD_LINE}" "${APACHE_CONF_FILE}"; then
+ INSTALLATION_SUCCEEDED=1
+ fi
+fi
+
+echo ""
+if [ $INSTALLATION_SUCCEEDED -eq 1 ]; then
+ echo "Installation succeeded."
+ echo "Restart apache to enable mod_pagespeed."
+else
+ echo "Installation failed."
+fi
+
+rm -f "${TMP_CONF}" "${TMP_LOAD}"
diff --git a/src/install/mod_pagespeed_example/add_instrumentation.html b/src/install/mod_pagespeed_example/add_instrumentation.html
new file mode 100644
index 0000000..2ac266a
--- /dev/null
+++ b/src/install/mod_pagespeed_example/add_instrumentation.html
@@ -0,0 +1,8 @@
+<html>
+ <head>
+ <title>add_instrumentation example</title>
+ </head>
+ <body>
+ A small page to demonstrate how instrumentation is added to a page.
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/canonicalize_javascript_libraries.html b/src/install/mod_pagespeed_example/canonicalize_javascript_libraries.html
new file mode 100644
index 0000000..fbc94af
--- /dev/null
+++ b/src/install/mod_pagespeed_example/canonicalize_javascript_libraries.html
@@ -0,0 +1,17 @@
+<!doctype html>
+<html>
+ <head>
+ <title>canonicalize_javascript_libraries example</title>
+ <script>var state = 0;</script>
+ </head>
+ <body>
+ <p>
+ Expected: External 0
+ <p>
+ Actual:
+ <script src="rewrite_javascript.js"></script>
+ <p>
+ The script tag above refers to the a local script using a relative url.
+ We rewrite the local script to point to the copy on www.modpagespeed.com.
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/cgi/slow.cgi b/src/install/mod_pagespeed_example/cgi/slow.cgi
new file mode 100755
index 0000000..5cd5d54
--- /dev/null
+++ b/src/install/mod_pagespeed_example/cgi/slow.cgi
@@ -0,0 +1,8 @@
+#!/bin/bash
+# This cgi script just sleeps for a while then returns an image. It's meant to
+# simulate a server environment where some resources are dynamically generated
+# by a process which is subject to delay (e.g. mysql, php).
+sleep 10;
+echo Content-type: image/jpeg
+echo
+cat ../images/Puzzle.jpg
diff --git a/src/install/mod_pagespeed_example/collapse_whitespace.html b/src/install/mod_pagespeed_example/collapse_whitespace.html
new file mode 100644
index 0000000..957ef8e
--- /dev/null
+++ b/src/install/mod_pagespeed_example/collapse_whitespace.html
@@ -0,0 +1,19 @@
+<html>
+
+ <head>
+ <title>collapse_whitespace example </title>
+ <script> var x = 'Hello, world!';</script>
+ </head>
+
+ <body>
+ Whitespace is collapsed
+
+ in general.
+ <pre>
+ But not in a
+
+ <pre> tag.
+ </pre>
+ </body>
+
+</html>
diff --git a/src/install/mod_pagespeed_example/combine_css.html b/src/install/mod_pagespeed_example/combine_css.html
new file mode 100644
index 0000000..c4803ef
--- /dev/null
+++ b/src/install/mod_pagespeed_example/combine_css.html
@@ -0,0 +1,14 @@
+<html>
+ <head>
+ <title>combine_css example</title>
+ <link rel="stylesheet" type="text/css" href="styles/yellow.css">
+ <link rel="stylesheet" type="text/css" href="styles/blue.css">
+ <link rel="stylesheet" type="text/css" href="styles/big.css">
+ <link rel="stylesheet" type="text/css" href="styles/bold.css">
+ </head>
+ <body>
+ <div class="blue yellow big bold">
+ Hello, PageSpeed!
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/combine_css_debug.html b/src/install/mod_pagespeed_example/combine_css_debug.html
new file mode 100644
index 0000000..a8d563d
--- /dev/null
+++ b/src/install/mod_pagespeed_example/combine_css_debug.html
@@ -0,0 +1,46 @@
+<html>
+ <head>
+ <title>combine_css example</title>
+ <link rel="stylesheet" type="text/css" href="styles/yellow.css">
+ <link rel="stylesheet" type="text/css" href="styles/blue.css">
+ <link rel="stylesheet" type="text/css" href="styles/big.css">
+ <link rel="stylesheet" type="text/css" href="styles/bold.css">
+
+ <!-- The ones below this comment not marked as "ok" shouldn't be combined
+ for various reasons, and +debug should indicate why. The ones marked
+ "ok" are there as separators to elicit the debug messages. These won't
+ create errors on their own, but they also won't be combined because
+ there's nothing to combine them with. -->
+ <link rel="stylesheet" id="theres-an-id" type="text/css"
+ href="styles/big.css">
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ <link rel="stylesheet"
+ data-foo="a data-foo attribute"
+ data-bar="a data-bar attribute"
+ type="text/css" href="styles/big.css">
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ <link rel="stylesheet"
+ data-foo="a data-foo attribute"
+ data-bar="a data-bar attribute"
+ data-baz="a data-baz attribute"
+ type="text/css" href="styles/big.css">
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ <link rel="stylesheet" media="print" type="text/css" href="styles/big.css">
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ <noscript>
+ <link rel="stylesheet" type="text/css" href="styles/big.css">
+ </noscript>
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ <style>h1 { color: green }</style>
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ <!--[if IE 6]>
+ <link rel="stylesheet" type="text/css" href="styles/big.css">
+ <![endif]-->
+ <!--ok--><link rel="stylesheet" type="text/css" href="styles/big.css">
+ </head>
+ <body>
+ <div class="blue yellow big bold">
+ Hello, PageSpeed!
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/combine_heads.html b/src/install/mod_pagespeed_example/combine_heads.html
new file mode 100644
index 0000000..2367ce6
--- /dev/null
+++ b/src/install/mod_pagespeed_example/combine_heads.html
@@ -0,0 +1,13 @@
+<html>
+ <head>
+ <title>combine_heads example</title>
+ </head>
+ <body>
+ <div class="big">
+ Hello, world!
+ </div>
+ </body>
+ <head>
+ <link rel="stylesheet" type="text/css" href="styles/big.css">
+ </head>
+</html>
diff --git a/src/install/mod_pagespeed_example/combine_javascript.html b/src/install/mod_pagespeed_example/combine_javascript.html
new file mode 100644
index 0000000..3aa286b
--- /dev/null
+++ b/src/install/mod_pagespeed_example/combine_javascript.html
@@ -0,0 +1,10 @@
+<html>
+ <head>
+ <title>combine_javascript example</title>
+ <script src="combine_javascript1.js"></script>
+ <script src="combine_javascript2.js"></script>
+ </head>
+ <body>
+ Hello, PageSpeed!
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/combine_javascript1.js b/src/install/mod_pagespeed_example/combine_javascript1.js
new file mode 100644
index 0000000..3a2d52b
--- /dev/null
+++ b/src/install/mod_pagespeed_example/combine_javascript1.js
@@ -0,0 +1,5 @@
+// Appends a plaintext message to the body, followed by a new line.
+function logText(message) {
+ document.body.appendChild(document.createTextNode(message));
+ document.body.appendChild(document.createElement("br"));
+}
diff --git a/src/install/mod_pagespeed_example/combine_javascript2.js b/src/install/mod_pagespeed_example/combine_javascript2.js
new file mode 100644
index 0000000..900ec98
--- /dev/null
+++ b/src/install/mod_pagespeed_example/combine_javascript2.js
@@ -0,0 +1,4 @@
+window.onload = function() {
+ logText("Application started.");
+ logText("Library operation confirmed.");
+}
diff --git a/src/install/mod_pagespeed_example/convert_meta_tags.html b/src/install/mod_pagespeed_example/convert_meta_tags.html
new file mode 100644
index 0000000..8e2b8f5
--- /dev/null
+++ b/src/install/mod_pagespeed_example/convert_meta_tags.html
@@ -0,0 +1,9 @@
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
+<META HTTP-EQUIV="CONTENT-LANGUAGE" CONTENT="en-US,fr">
+</head>
+<body>
+Hello World. Did my headers come out right?
+</body>
+</html>
diff --git a/src/install/mod_pagespeed_example/css_whitespace.html b/src/install/mod_pagespeed_example/css_whitespace.html
new file mode 100644
index 0000000..417d867
--- /dev/null
+++ b/src/install/mod_pagespeed_example/css_whitespace.html
@@ -0,0 +1,64 @@
+<html>
+ <head>
+ <title>Examples of the CSS white-space directive.</title>
+ </head>
+ <body>
+ <h1>Examples of the CSS <code>white-space</code> directive.</h1>
+<style>
+p#normal { white-space:normal; }
+p#pre { white-space:pre; }
+p#nowrap { white-space:nowrap; }
+p#pre-wrap { white-space:pre-wrap; }
+p#pre-line { white-space:pre-line; }
+p#inherit { white-space:inherit; }
+</style>
+ <hr/>
+ <p><code>normal:</code></p>
+ <p id="normal">
+ This is a very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ </p>
+ <hr/>
+ <p><code>nowrap:</code></p>
+ <p id="nowrap">
+ This is a very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ </p>
+ <hr/>
+ <p><code>pre:</code></p>
+ <p id="pre">
+ This is a very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ </p>
+ <hr/>
+ <p><code>pre-wrap:</code></p>
+ <p id="pre-wrap">
+ This is a very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ </p>
+ <hr/>
+ <p><code>pre-line:</code></p>
+ <p id="pre-line">
+ This is a very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ </p>
+ <hr/>
+ <p><code>inherit:</code></p>
+ <p id="inherit">
+ This is a very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very very long paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ This is a paragraph.
+ </p>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/dedup_inlined_images.html b/src/install/mod_pagespeed_example/dedup_inlined_images.html
new file mode 100644
index 0000000..fa76848
--- /dev/null
+++ b/src/install/mod_pagespeed_example/dedup_inlined_images.html
@@ -0,0 +1,24 @@
+<html>
+ <head>
+ <title>dedup_inlined_images example</title>
+ </head>
+ <body>
+ <img src="images/Puzzle.jpg"
+ title="A large image that isn't inlined."/><br/>
+ <img src="images/Cuppa.png"
+ title="A small image that is inlined."/><br/>
+ <img src="images/disclosure_open_plus.png"
+ title="A second small image (with an id) that is inlined."
+ id="open_plus"/><br/>
+ <img src="images/Puzzle.jpg"
+ title="The large image again; still not inlined."/><br/>
+ <img src="images/Cuppa.png"
+ title="The small image again that is converted to JavaScript."/><br/>
+ <img src="images/Cuppa.png"
+ title="The small image again; again converted to JavaScript."/><br/>
+ <img src="images/Cuppa.png"
+ title="The small image again; again converted to JavaScript."/><br/>
+ <img src="images/disclosure_open_plus.png"
+ title="The 2nd small image again; converted to JS with its id."/><br/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/defer_javascript.html b/src/install/mod_pagespeed_example/defer_javascript.html
new file mode 100644
index 0000000..c046f65
--- /dev/null
+++ b/src/install/mod_pagespeed_example/defer_javascript.html
@@ -0,0 +1,41 @@
+<html>
+ <head>
+ <title>defer_javascript example</title>
+ </head>
+ <body>
+ <!-- All Script tags will be deferred until page onload is triggered -->
+ <p>
+ If defer_javascript is enabled, '*'s and images will appear before script
+ text. If not the script text will be interspersed with '*'s and images.
+ </p>
+ <script>
+ function sleep(ms) {
+ var sleep_now = Number(new Date());
+ while(sleep_now+ms>Number(new Date())) { var tmp = sleep_now; }
+ }
+ sleep(2000);
+ var state = 0;
+ </script>
+ <p>
+ <script src="rewrite_javascript.js" type="text/javascript" id="ext1">
+ </script>
+ </p>
+ <p>*</p>
+ <img src="images/Puzzle.jpg"><br/>
+ <p>*</p>
+ <script>
+ sleep(2000);
+ document.write('<p>Inline Script 1</p>');
+ </script>
+ <p>*</p>
+ <img src="images/BikeCrashIcn.png"><br/>
+ <p>*</p>
+ <script>
+ sleep(2000);
+ document.write('<p>Inline Script 2</p>');
+ </script>
+ <p>*</p>
+ <img src="images/Cuppa.png"><br/>
+ <p>*</p>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/delay_images.html b/src/install/mod_pagespeed_example/delay_images.html
new file mode 100644
index 0000000..4c9e7c4
--- /dev/null
+++ b/src/install/mod_pagespeed_example/delay_images.html
@@ -0,0 +1,11 @@
+<html>
+ <head>
+ <title>delay_images example</title>
+ </head>
+ <body>
+ <br/>
+ <p>A 1023x766 image. PageSpeed will serve an inlined, low-quality
+ image until the original is loaded.</p>
+ <img src="images/Puzzle.jpg"/><br/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/elide_attributes.html b/src/install/mod_pagespeed_example/elide_attributes.html
new file mode 100644
index 0000000..305b262
--- /dev/null
+++ b/src/install/mod_pagespeed_example/elide_attributes.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>elide_attributes example</title>
+ </head>
+ <body>
+ Here is a disabled button:
+ <button name="ok" disabled="disabled">button</button><br/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/example.notpdf b/src/install/mod_pagespeed_example/example.notpdf
new file mode 100644
index 0000000..04ed374
--- /dev/null
+++ b/src/install/mod_pagespeed_example/example.notpdf
Binary files differ
diff --git a/src/install/mod_pagespeed_example/example.pdf b/src/install/mod_pagespeed_example/example.pdf
new file mode 100644
index 0000000..04ed374
--- /dev/null
+++ b/src/install/mod_pagespeed_example/example.pdf
Binary files differ
diff --git a/src/install/mod_pagespeed_example/extend_cache.html b/src/install/mod_pagespeed_example/extend_cache.html
new file mode 100644
index 0000000..e3d249d
--- /dev/null
+++ b/src/install/mod_pagespeed_example/extend_cache.html
@@ -0,0 +1,13 @@
+<html>
+ <head>
+ <title>extend_cache example</title>
+ </head>
+ <body>
+ <p>PageSpeed will rewrite this image's source to include a content-hash.
+ Note that this requires a background asynchronous fetch of the image;
+ therefore the page may not be rewritten the first time. If it isn't, wait
+ a few seconds and reload.</p>
+
+ <img src="images/Puzzle.jpg"/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/extend_cache_pdfs.html b/src/install/mod_pagespeed_example/extend_cache_pdfs.html
new file mode 100644
index 0000000..3eb8888
--- /dev/null
+++ b/src/install/mod_pagespeed_example/extend_cache_pdfs.html
@@ -0,0 +1,27 @@
+<html>
+ <head>
+ <title>extend_cache_pdfs example</title>
+ </head>
+ <body>
+ <p style="max-width:35em;">
+
+ PageSpeed will rewrite links to PDFs to include a content-hash. PDFs
+ are identified by having extension ".pdf". Note that this requires a
+ background asynchronous fetch of the PDF; therefore the page may not be
+ rewritten the first time. If it isn't, wait a few seconds and reload.</p>
+
+ <ul>
+ <li>Standard link to a pdf, will be cache extended:
+ <a href="example.pdf">example.pdf</a></li>
+ <li>Embedded pdf, will also be cache extended:
+ <br>
+ <dd><embed src="example.pdf" width="150" height="100"></li>
+ <li>Link to a PDF with extension ".notpdf". It won't be cache extended
+ because a ".pdf" extension is required:
+ <a href="example.notpdf">example.notpdf</a></li>
+ <li>Link to a PDF with a query parameter, will be cache extended:
+ <a href="example.pdf?a=b">example.pdf?a=b</a></li>
+
+ </dl>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/fallback_rewrite_css_urls.html b/src/install/mod_pagespeed_example/fallback_rewrite_css_urls.html
new file mode 100644
index 0000000..6f07c64
--- /dev/null
+++ b/src/install/mod_pagespeed_example/fallback_rewrite_css_urls.html
@@ -0,0 +1,14 @@
+<html>
+ <head>
+ <title>fallback_rewrite_css_urls example</title>
+ <style type='text/css'>
+ body { background: url(images/Cuppa.png) no-repeat center; }}}}
+ /* Extra }s make sure that CSS parser cannot parse this. */
+ </style>
+ <link rel="stylesheet" type="text/css" href="styles/fallback_rewrite_css_urls.css">
+ </head>
+ <body>
+ <p>The background image should be rewritten or cache extended.</p>
+ <div class="foo"></div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/flatten_css_imports.html b/src/install/mod_pagespeed_example/flatten_css_imports.html
new file mode 100644
index 0000000..c6ab2ac
--- /dev/null
+++ b/src/install/mod_pagespeed_example/flatten_css_imports.html
@@ -0,0 +1,16 @@
+<html>
+ <head>
+ <title>flatten_css_imports example</title>
+ <!-- link rel='stylesheet' href='styles/all_using_imports.css' -->
+ <style>
+ @import url(styles/all_using_imports.css);
+ </style>
+ </head>
+ <body>
+ <div class="blue yellow bold">
+ <style> tag should contain the contents of the yellow, blue,
+ and bold CSS files <code>@import</code>'d by
+ <code>styles/all_using_imports.css</code>.
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/images/BikeCrashIcn.png b/src/install/mod_pagespeed_example/images/BikeCrashIcn.png
new file mode 100644
index 0000000..cdfcb41
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/BikeCrashIcn.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/CradleAnimation.gif b/src/install/mod_pagespeed_example/images/CradleAnimation.gif
new file mode 100644
index 0000000..27a6d20
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/CradleAnimation.gif
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/Cuppa.png b/src/install/mod_pagespeed_example/images/Cuppa.png
new file mode 100644
index 0000000..6fb9cf3
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/Cuppa.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/IronChef2.gif b/src/install/mod_pagespeed_example/images/IronChef2.gif
new file mode 100644
index 0000000..00510a4
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/IronChef2.gif
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/OptPuzzle.jpg b/src/install/mod_pagespeed_example/images/OptPuzzle.jpg
new file mode 100644
index 0000000..69ea3ec
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/OptPuzzle.jpg
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/PageSpeedAnimation.gif b/src/install/mod_pagespeed_example/images/PageSpeedAnimation.gif
new file mode 100644
index 0000000..f471129
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/PageSpeedAnimation.gif
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/PageSpeedAnimationSmall.gif b/src/install/mod_pagespeed_example/images/PageSpeedAnimationSmall.gif
new file mode 100644
index 0000000..479cd71
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/PageSpeedAnimationSmall.gif
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/Puzzle.jpg b/src/install/mod_pagespeed_example/images/Puzzle.jpg
new file mode 100644
index 0000000..a119109
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/Puzzle.jpg
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/Puzzle2.jpg b/src/install/mod_pagespeed_example/images/Puzzle2.jpg
new file mode 100644
index 0000000..002558a
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/Puzzle2.jpg
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/blocking_rewrite_test_dont_reuse_1.jpg b/src/install/mod_pagespeed_example/images/blocking_rewrite_test_dont_reuse_1.jpg
new file mode 100644
index 0000000..a119109
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/blocking_rewrite_test_dont_reuse_1.jpg
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/blocking_rewrite_test_dont_reuse_2.jpg b/src/install/mod_pagespeed_example/images/blocking_rewrite_test_dont_reuse_2.jpg
new file mode 100644
index 0000000..a119109
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/blocking_rewrite_test_dont_reuse_2.jpg
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/disclosure_arrow_dk_grey.png b/src/install/mod_pagespeed_example/images/disclosure_arrow_dk_grey.png
new file mode 100644
index 0000000..4b342d2
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/disclosure_arrow_dk_grey.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/disclosure_arrow_dk_grey_down.png b/src/install/mod_pagespeed_example/images/disclosure_arrow_dk_grey_down.png
new file mode 100644
index 0000000..33d6869
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/disclosure_arrow_dk_grey_down.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/disclosure_open_plus.png b/src/install/mod_pagespeed_example/images/disclosure_open_plus.png
new file mode 100644
index 0000000..f2b7f14
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/disclosure_open_plus.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/gray_saved_as_rgb.webp b/src/install/mod_pagespeed_example/images/gray_saved_as_rgb.webp
new file mode 100644
index 0000000..4e85fcb
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/gray_saved_as_rgb.webp
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/pagespeed_logo.png b/src/install/mod_pagespeed_example/images/pagespeed_logo.png
new file mode 100644
index 0000000..117cb04
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/pagespeed_logo.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/images/schedule_event.svg b/src/install/mod_pagespeed_example/images/schedule_event.svg
new file mode 100644
index 0000000..3c4ca03
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/schedule_event.svg
@@ -0,0 +1,16 @@
+<?xml version="1.0" encoding="utf-8"?>
+<!-- Generator: Adobe Illustrator 15.0.0, SVG Export Plug-In -->
+<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd" [
+ <!ENTITY ns_flows "http://ns.adobe.com/Flows/1.0/">
+]>
+<svg version="1.1"
+ xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" xmlns:a="http://ns.adobe.com/AdobeSVGViewerExtensions/3.0/"
+ x="0px" y="0px" width="21px" height="21px" viewBox="0 0 21 21" overflow="visible" enable-background="new 0 0 21 21"
+ xml:space="preserve">
+<defs>
+</defs>
+<path d="M12,14h1V9h-2v1h1V14z M15,5C14.998,5,5,5,5,5C4.449,5,4.002,5.449,4,6v10c0,0.553,0.447,1,1,1h10c0.553,0,1-0.447,1-1V6
+ C15.998,5.449,15.551,5,15,5z M15,16H5V7h10V16z M10,13v-0.75c0-0.414-0.336-0.75-0.75-0.75c0.414,0,0.75-0.336,0.75-0.75V10
+ c0-0.553-0.447-1-1-1H7v1h2v1H8v1h1v1H7v1h2C9.553,14,10,13.553,10,13z"/>
+<rect opacity="0" fill="#4387FD" width="21" height="21"/>
+</svg>
diff --git a/src/install/mod_pagespeed_example/images/search_go_button.png b/src/install/mod_pagespeed_example/images/search_go_button.png
new file mode 100644
index 0000000..43684b3
--- /dev/null
+++ b/src/install/mod_pagespeed_example/images/search_go_button.png
Binary files differ
diff --git a/src/install/mod_pagespeed_example/index.html b/src/install/mod_pagespeed_example/index.html
new file mode 100644
index 0000000..f421d0e
--- /dev/null
+++ b/src/install/mod_pagespeed_example/index.html
@@ -0,0 +1,1024 @@
+<!doctype html>
+<html>
+ <head>
+ <title>PageSpeed Examples Directory</title>
+ <link rel="stylesheet" href="styles/index_style.css" />
+ </head>
+ <body>
+ <table style="padding:0px; margin: 0px 0px 10px 0px; width:100%" cellpadding="0" cellspacing="0">
+ <tbody><tr style="height: 58px; display: block;">
+ <td id="plogo">
+ <img src="http://www.gstatic.com/codesite/ph/images/search-48.gif" alt="Logo">
+ </td>
+ <td style="padding-left: 0.5em">
+ <div id="pname">mod_pagespeed</div>
+ <div id="psum">Apache module for rewriting web pages to reduce
+ latency and bandwidth</div>
+ </td>
+ </tr>
+ </table>
+
+ <ul class="menu">
+ <li><img src="images/pagespeed_logo.png" width=30 height=30 alt="pagespeed logo" /></li>
+ <li><a
+ href="https://developers.google.com/speed/pagespeed/"
+ >Google Page Speed Family</a></li>
+ <li><a
+ href="https://developers.google.com/speed/pagespeed/module/using"
+ >Doc</a></li>
+ <li><a
+ href="https://developers.google.com/speed/pagespeed/module/download"
+ >Binaries</a></li>
+ <li><a href="https://developers.google.com/speed/pagespeed/module/build_from_source"
+ >Source Builds</a></li>
+ <li><a
+ href="http://github.com/pagespeed/mod_pagespeed"
+ >Source Code Browser</a></li>
+ <li><a
+ href="http://modpagespeed.com/psol/annotated.html">API</a></li>
+ <li><a
+ href="https://groups.google.com/group/mod-pagespeed-discuss"
+ >Discussion Forum</a></li>
+ <li><a
+ href="https://github.com/pagespeed/mod_pagespeed/issues"
+ >Issue Tracker</a></li>
+ <li><a href="http://github.com/pagespeed/mod_pagespeed"
+ >Project</a></li>
+ </ul>
+ <br/>
+
+ <span class="inline_code">mod_pagespeed</span> is an open-source
+ Apache module created by Google to help Make the Web Faster by
+ rewriting web pages to reduce latency and bandwidth.
+
+ <h2>PageSpeed Filter Examples</h2>
+ <p>
+ Here are some of the most useful PageSpeed filters. Each one has
+ a simple HTML example attached; click "before" to see the
+ original file, and "after" to see what PageSpeed produces
+ with that filter (and only that filter) enabled. The two
+ versions should look exactly the same, but the "after" one will
+ be (slightly) speedier. Use "view source" to see
+ the PageSpeed difference!
+ </p>
+ <table style="width: 100%; border-collapse: collapse; text-color:
+ black;" id="filters" summary="filters">
+
+<!-- TODO(jmarantz): re-indent this whole file -->
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-instrumentation-add">
+ add_instrumentation
+ </a>
+ </td>
+ <td class="desc">
+ Adds client-side latency instrumentation.
+ </td>
+ <td class="before">
+ <a href="add_instrumentation.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="add_instrumentation.html?PageSpeed=on&PageSpeedFilters=add_instrumentation">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-canonicalize-js">
+ canonicalize_javascript_libraries
+ </a>
+ </td>
+ <td class="desc">
+ Recognizes common JavaScript libraries and redirects each to a canonical URL.
+ </td>
+ <td class="before">
+ <a href="canonicalize_javascript_libraries.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="canonicalize_javascript_libraries.html?PageSpeed=on&PageSpeedFilters=canonicalize_javascript_libraries">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-whitespace-collapse">
+ collapse_whitespace
+ </a>
+ </td>
+ <td class="desc">
+ Removes unnecessary whitespace in HTML.
+ </td>
+ <td class="before">
+ <a href="collapse_whitespace.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="collapse_whitespace.html?PageSpeed=on&PageSpeedFilters=collapse_whitespace">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-combine">
+ combine_css
+ </a>
+ </td>
+ <td class="desc">
+ Combines multiple CSS files into one.
+ </td>
+ <td class="before">
+ <a href="combine_css.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="combine_css.html?PageSpeed=on&PageSpeedFilters=combine_css">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-head-combine">
+ combine_heads
+ </a>
+ </td>
+ <td class="desc">
+ Combines multiple <head> elements into one.
+ </td>
+ <td class="before">
+ <a href="combine_heads.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="combine_heads.html?PageSpeed=on&PageSpeedFilters=combine_heads">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-js-combine">
+ combine_javascript
+ </a>
+ </td>
+ <td class="desc">
+ Combines multiple JavaScript files into one.
+ </td>
+ <td class="before">
+ <a href="combine_javascript.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="combine_javascript.html?PageSpeed=on&PageSpeedFilters=combine_javascript">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-dedup-inlined-images">
+ dedup_inlined_images
+ </a>
+ </td>
+ <td class="desc">
+ Replaces repeated inlined images with JavaScript that loads the data from
+ the first instance of the image.
+ </td>
+ <td class="before">
+ <a href="dedup_inlined_images.html?ModPagespeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="dedup_inlined_images.html?ModPagespeed=on&ModPagespeedFilters=inline_images,dedup_inlined_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-js-defer">
+ defer_javascript
+ </a>
+ </td>
+ <td class="desc">
+ Defers the execution of javascript in the HTML.
+ </td>
+ <td class="before">
+ <a href="defer_javascript.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="defer_javascript.html?PageSpeed=on&PageSpeedFilters=defer_javascript">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-attribute-elide">
+ elide_attributes
+ </a>
+ </td>
+ <td class="desc">
+ Removes unnecessary attributes in HTML tags.
+ </td>
+ <td class="before">
+ <a href="elide_attributes.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="elide_attributes.html?PageSpeed=on&PageSpeedFilters=elide_attributes">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-pedantic">
+ pedantic
+ </a>
+ </td>
+ <td class="desc">
+ Adds default type attributes to script and style tags that are missing them.
+ </td>
+ <td class="before">
+ <a href="pedantic.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="pedantic.html?PageSpeed=on&PageSpeedFilters=pedantic">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-cache-extend">
+ extend_cache
+ </a>
+ </td>
+ <td class="desc">
+ Improves cacheability.
+ </td>
+ <td class="before">
+ <a href="extend_cache.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="extend_cache.html?PageSpeed=on&PageSpeedFilters=extend_cache">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-cache-extend-pdfs">
+ extend_cache_pdfs
+ </a>
+ </td>
+ <td class="desc">
+ Improves cacheability of PDFs.
+ </td>
+ <td class="before">
+ <a href="extend_cache_pdfs.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="extend_cache_pdfs.html?PageSpeed=on&PageSpeedFilters=extend_cache_pdfs">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-rewrite">
+ fallback_rewrite_css_urls
+ </a>
+ </td>
+ <td class="desc">
+ Rewrite URLs in CSS even if CSS is not parseable.
+ </td>
+ <td class="before">
+ <a href="fallback_rewrite_css_urls.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="fallback_rewrite_css_urls.html?PageSpeed=on&PageSpeedFilters=fallback_rewrite_css_urls,rewrite_css,rewrite_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-flatten-css-imports">
+ flatten_css_imports
+ </a>
+ </td>
+ <td class="desc">
+ Flattens @import rules in CSS by replacing the rule with the contents of
+ the imported resource.
+ </td>
+ <td class="before">
+ <a href="flatten_css_imports.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="flatten_css_imports.html?PageSpeed=on&PageSpeedFilters=rewrite_css,flatten_css_imports">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-inline">
+ inline_css
+ </a>
+ </td>
+ <td class="desc">
+ Inlines small external CSS files.
+ </td>
+ <td class="before">
+ <a href="inline_css.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="inline_css.html?PageSpeed=on&PageSpeedFilters=inline_css">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-inline-google-fonts">
+ inline_google_font_css
+ </a>
+ </td>
+ <td class="desc">
+ Inlines small font-loading CSS from Google Fonts API.
+ </td>
+ <td class="before">
+ <a href="inline_google_font_css.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="inline_google_font_css.html?PageSpeed=on&PageSpeedFilters=inline_google_font_css">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-inline-import">
+ inline_import_to_link
+ </a>
+ </td>
+ <td class="desc">
+ Inlines style tags comprising <em>only</em> CSS @imports by converting
+ them to an equivalent link.
+ </td>
+ <td class="before">
+ <a href="inline_import_to_link.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="inline_import_to_link.html?PageSpeed=on&PageSpeedFilters=inline_import_to_link">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-js-inline">
+ inline_javascript
+ </a>
+ </td>
+ <td class="desc">
+ Inlines small external Javascript files.
+ </td>
+ <td class="before">
+ <a href="inline_javascript.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="inline_javascript.html?PageSpeed=on&PageSpeedFilters=inline_javascript">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-inline-preview-images.html">
+ inline_preview_images
+ </a>
+ </td>
+ <td class="desc">
+ Delays original images; serves inlined, low-quality placeholder images until
+ originals are loaded.
+ </td>
+ <td class="before">
+ <a href="inline_preview_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="inline_preview_images.html?PageSpeed=on&PageSpeedFilters=inline_preview_images,insert_image_dimensions">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-insert-dns-prefetch.html">
+ insert_dns_prefetch
+ </a>
+ </td>
+ <td class="desc">
+ Injects <link rel="dns-prefetch" href="//www.example.com"> tags in the
+ HEAD to enable the browser to do DNS prefetching.
+ </td>
+ <td class="before">
+ <a href="insert_dns_prefetch.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="insert_dns_prefetch.html?PageSpeed=on&PageSpeedFilters=insert_dns_prefetch">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-insert-ga">
+ insert_ga
+ </a>
+ </td>
+ <td class="desc">
+ Inserts Google Analytics javascript snippet.
+ </td>
+ <td class="before">
+ <a href="insert_ga.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="insert_ga.html?PageSpeed=on&PageSpeedFilters=insert_ga">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-lazyload-images">
+ lazyload_images
+ </a>
+ </td>
+ <td class="desc">
+ Loads images when they become visible in the client viewport.
+ </td>
+ <td class="before">
+ <a href="lazyload_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a
+ href="lazyload_images.html?PageSpeed=on&PageSpeedFilters=lazyload_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-local-storage-cache">
+ local_storage_cache
+ </a>
+ </td>
+ <td class="desc">
+ Loads inlined CSS and image resources into HTML5 local storage whence the
+ client fetches them subsequently rather than the server sending them again.
+ </td>
+ <td class="before">
+ <a href="local_storage_cache.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="local_storage_cache.html?PageSpeed=on&PageSpeedFilters=local_storage_cache,inline_css,inline_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-make-show-ads-async">
+ make_show_ads_async
+ </a>
+ </td>
+ <td class="desc">
+ Converts synchronous Google AdSense tags to asynchronous format.
+ </td>
+ <td class="before">
+ <a href="make_show_ads_async.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="make_show_ads_async.html?PageSpeed=on&PageSpeedFilters=make_show_ads_async">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-make-google-analytics-async">
+ make_google_analytics_async
+ </a>
+ </td>
+ <td class="desc">
+ Converts synchronous Google Analytics code to load asynchronously.
+ </td>
+ <td class="before">
+ <a href="make_google_analytics_async.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="make_google_analytics_async.html?PageSpeed=on&PageSpeedFilters=make_google_analytics_async">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-above-scripts">
+ move_css_above_scripts
+ </a>
+ </td>
+ <td class="desc">
+ Moves CSS Above Scripts.
+ </td>
+ <td class="before">
+ <a href="move_css_above_scripts.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="move_css_above_scripts.html?PageSpeed=on&PageSpeedFilters=move_css_above_scripts">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-to-head">
+ move_css_to_head
+ </a>
+ </td>
+ <td class="desc">
+ Moves CSS into the <head> element.
+ </td>
+ <td class="before">
+ <a href="move_css_to_head.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="move_css_to_head.html?PageSpeed=on&PageSpeedFilters=move_css_to_head">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-outline">
+ outline_css
+ </a>
+ </td>
+ <td class="desc">
+ Moves large inline <style> tags into external files for cacheability.
+ </td>
+ <td class="before">
+ <a href="outline_css.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="outline_css.html?PageSpeed=on&PageSpeedFilters=outline_css">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-js-outline">
+ outline_javascript
+ </a>
+ </td>
+ <td class="desc">
+ Moves large inline <script> tags into external files for cacheability.
+ </td>
+ <td class="before">
+ <a href="outline_javascript.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="outline_javascript.html?PageSpeed=on&PageSpeedFilters=outline_javascript">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-prioritize-critical-css">
+ prioritize_critical_css
+ </a>
+ </td>
+ <td class="desc">
+ Instruments the page, inlines its critical CSS at the top, and lazily loads the rest.
+ </td>
+ <td class="before">
+ <a href="prioritize_critical_css.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="prioritize_critical_css.html?PageSpeed=on&PageSpeedFilters=rewrite_css,flatten_css_imports,inline_import_to_link,prioritize_critical_css">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/domains#MapProxyDomain">
+ MapProxyDomain
+ </a>
+ </td>
+ <td class="desc">
+ Proxies/optimizes trusted resources from domains not running PageSpeed.
+ </td>
+ <td class="before">
+ <a href="proxy_external_resource.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="proxy_external_resource.html?PageSpeed=on&PageSpeedFilters=+rewrite_images,-inline_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-comment-remove">
+ remove_comments
+ </a>
+ </td>
+ <td class="desc">
+ Removes HTML comments.
+ </td>
+ <td class="before">
+ <a href="remove_comments.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="remove_comments.html?PageSpeed=on&PageSpeedFilters=remove_comments">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-quote-remove">
+ remove_quotes
+ </a>
+ </td>
+ <td class="desc">
+ Removes unnecessary quotes in HTML tags.
+ </td>
+ <td class="before">
+ <a href="remove_quotes.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="remove_quotes.html?PageSpeed=on&PageSpeedFilters=remove_quotes">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-inline-preview-images.html#resize_mobile_images">
+ resize_mobile_images
+ </a>
+ </td>
+ <td class="desc">
+ Just like inline_preview_images, but uses smaller placeholder images for mobile
+ browsers.
+ </td>
+ <td class="before">
+ <a href="resize_mobile_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="resize_mobile_images.html?PageSpeed=on&PageSpeedFilters=resize_mobile_images,insert_image_dimensions">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a
+ href="https://developers.google.com/speed/pagespeed/module/filter-image-optimize.html#resize_rendered_image_dimensions">
+ resize_rendered_image_dimensions
+ </a>
+ </td>
+ <td class="desc">
+ Resize images to rendered dimensions.
+ </td>
+ <td class="before">
+ <a href="resize_rendered_dimensions/image_resize_using_rendered_dimensions.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a
+ href="resize_rendered_dimensions/image_resize_using_rendered_dimensions.html?PageSpeed=on&PageSpeedFilters=resize_rendered_image_dimensions">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-image-responsive">
+ responsive_images
+ </a>
+ </td>
+ <td class="desc">
+ Serve responsive images using the srcset attribute.
+ </td>
+ <td class="before">
+ <a href="responsive_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="responsive_images.html?PageSpeed=on&PageSpeedFilters=responsive_images,responsive_images_zoom,rewrite_images,inline_images,resize_images,insert_image_dimensions">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-css-rewrite">
+ rewrite_css
+ </a>
+ </td>
+ <td class="desc">
+ Minifies CSS.
+ </td>
+ <td class="before">
+ <a href="rewrite_css.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_css.html?PageSpeed=on&PageSpeedFilters=rewrite_css">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ rewrite_css + extend_cache
+ </td>
+ <td class="desc">
+ Cache extends images in CSS.
+ </td>
+ <td class="before">
+ <a href="rewrite_css_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_css_images.html?PageSpeed=on&PageSpeedFilters=rewrite_css,extend_cache">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ rewrite_css + rewrite_images
+ </td>
+ <td class="desc">
+ Re-compress images in CSS.
+ </td>
+ <td class="before">
+ <a href="rewrite_css_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_css_images.html?PageSpeed=on&PageSpeedFilters=rewrite_css,rewrite_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-image-optimize">
+ rewrite_images
+ </a>
+ </td>
+ <td class="desc">
+ Rescales, and compresses images; inlines small ones.
+ </td>
+ <td class="before">
+ <a href="rewrite_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_images.html?PageSpeed=on&PageSpeedFilters=rewrite_images,inline_images,resize_images,insert_image_dimensions">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-js-minify">
+ rewrite_javascript
+ </a>
+ </td>
+ <td class="desc">
+ Minifies Javascript.
+ </td>
+ <td class="before">
+ <a href="rewrite_javascript.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_javascript.html?PageSpeed=on&PageSpeedFilters=rewrite_javascript">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-rewrite-style-attributes">
+ rewrite_style_attributes
+ </a>
+ </td>
+ <td class="desc">
+ Rewrite the CSS in style attributes by applying the configured rewrite_css
+ filter to it.
+ </td>
+ <td class="before">
+ <a href="rewrite_style_attributes.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_style_attributes.html?PageSpeed=on&PageSpeedFilters=rewrite_style_attributes,rewrite_css,rewrite_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-rewrite-style-attributes">
+ rewrite_style_attributes_with_url
+ </a>
+ </td>
+ <td class="desc">
+ Rewrite the CSS in style attributes by applying the configured rewrite_css
+ filter to it, but only if the attribute contains the text 'url('.
+ </td>
+ <td class="before">
+ <a href="rewrite_style_attributes.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="rewrite_style_attributes.html?PageSpeed=on&PageSpeedFilters=rewrite_style_attributes_with_url,rewrite_css,rewrite_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-image-sprite">
+ sprite_images
+ </a>
+ </td>
+ <td class="desc">
+ Sprites images.
+ </td>
+ <td class="before">
+ <a href="sprite_images.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="sprite_images.html?PageSpeed=on&PageSpeedFilters=rewrite_css,sprite_images">
+ after
+ </a>
+ </td>
+</tr>
+
+<tr class="filter_row">
+ <td class="code">
+ <a href="https://developers.google.com/speed/pagespeed/module/filter-trim-urls">
+ trim_urls
+ </a>
+ </td>
+ <td class="desc">
+ Removes unnecessary prefixes from URLs.
+ </td>
+ <td class="before">
+ <a href="trim_urls.html?PageSpeed=off">
+ before
+ </a>
+ </td>
+ <td class="after">
+ <a href="trim_urls.html?PageSpeed=on&PageSpeedFilters=trim_urls">
+ after
+ </a>
+ </td>
+</tr>
+
+ </table>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/inline_css.html b/src/install/mod_pagespeed_example/inline_css.html
new file mode 100644
index 0000000..58a6a3f
--- /dev/null
+++ b/src/install/mod_pagespeed_example/inline_css.html
@@ -0,0 +1,15 @@
+<html>
+ <head>
+ <title>inline_css example</title>
+ <link rel="stylesheet" href="styles/all_styles.css">
+ <link rel="stylesheet" href="styles/blue.css" media="print">
+ <link rel="stylesheet" href="styles/bold.css" media="not decodable">
+ <link rel="stylesheet" href="styles/yellow.css" media=", ,print, screen ">
+ <link rel="stylesheet" href="styles/rewrite_css_images.css" media="all">
+ </head>
+ <body>
+ <div class="blue yellow big bold">
+ CSS that was linked should be inlined in the source.
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/inline_google_font_css.html b/src/install/mod_pagespeed_example/inline_google_font_css.html
new file mode 100644
index 0000000..e61ff14
--- /dev/null
+++ b/src/install/mod_pagespeed_example/inline_google_font_css.html
@@ -0,0 +1,14 @@
+<html>
+ <head>
+ <title>inline_google_font_css example</title>
+ <link rel="stylesheet" type="text/css" href="http://fonts.googleapis.com/css?family=Roboto">
+ <style>
+ body {
+ font-family: 'Roboto', sans-serif;
+ }
+ </style>
+ </head>
+ <body>
+ The font should be slightly more robotic.
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/inline_import_to_link.html b/src/install/mod_pagespeed_example/inline_import_to_link.html
new file mode 100644
index 0000000..9cf9056
--- /dev/null
+++ b/src/install/mod_pagespeed_example/inline_import_to_link.html
@@ -0,0 +1,15 @@
+<html>
+ <head>
+ <title>inline_import_to_link example</title>
+ <style type="text/css">@import url(styles/all_styles.css);</style>
+ <style type="text/css">
+ @import url(styles/big.css);
+ @import url(styles/yellow.css);
+ </style>
+ </head>
+ <body>
+ <div class="blue yellow big bold">
+ The <style> tags should be changed to <link> tags.
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/inline_javascript.html b/src/install/mod_pagespeed_example/inline_javascript.html
new file mode 100644
index 0000000..68a5558
--- /dev/null
+++ b/src/install/mod_pagespeed_example/inline_javascript.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>inline_javascript example</title>
+ <script type="text/javascript" src="inline_javascript.js"></script>
+ </head>
+ <body>
+ JavaScript that was linked should be inlined into the source.
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/inline_javascript.js b/src/install/mod_pagespeed_example/inline_javascript.js
new file mode 100644
index 0000000..c2f30ca
--- /dev/null
+++ b/src/install/mod_pagespeed_example/inline_javascript.js
@@ -0,0 +1,2 @@
+document.write("Hello, ");
+var a = '<Script></script>';
diff --git a/src/install/mod_pagespeed_example/inline_preview_images.html b/src/install/mod_pagespeed_example/inline_preview_images.html
new file mode 100644
index 0000000..3a74f1d
--- /dev/null
+++ b/src/install/mod_pagespeed_example/inline_preview_images.html
@@ -0,0 +1,13 @@
+<html>
+ <head>
+ <title>inline_preview_images example</title>
+ </head>
+ <body>
+ <br/>
+ <p>A 1023x766 image. PageSpeed will serve an inlined, low-quality
+ image until the original is loaded.</p>
+ <img src="images/Puzzle.jpg"/><br/>
+ <p>An image with srcset.</p>
+ <img src="images/Puzzle2.jpg" srcset="images/Puzzle.jpg 2x"/><br/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/insert_dns_prefetch.html b/src/install/mod_pagespeed_example/insert_dns_prefetch.html
new file mode 100644
index 0000000..6f4a753
--- /dev/null
+++ b/src/install/mod_pagespeed_example/insert_dns_prefetch.html
@@ -0,0 +1,12 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>insert_dns_prefetch example</title>
+ </head>
+ <body>
+ <p>
+ <img src="http://ref.pssdemos.com/filter/images/Puzzle.jpg"> <br>
+ </p>
+ <script src="//ajax.googleapis.com/ajax/libs/jquery/1.8.3/jquery.min.js"></script>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/insert_ga.html b/src/install/mod_pagespeed_example/insert_ga.html
new file mode 100644
index 0000000..dc37967
--- /dev/null
+++ b/src/install/mod_pagespeed_example/insert_ga.html
@@ -0,0 +1,9 @@
+<html>
+ <head>
+ <title>Insert Google Analytics</title>
+ </head>
+ <body>
+ <p>Look at the source for this page to see the inserted
+ Google Analytics snippet.</p>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/lazyload_images.html b/src/install/mod_pagespeed_example/lazyload_images.html
new file mode 100644
index 0000000..8c0a96d
--- /dev/null
+++ b/src/install/mod_pagespeed_example/lazyload_images.html
@@ -0,0 +1,46 @@
+<html>
+ <head>
+ <title>lazyload_images example</title>
+ </head>
+ <body>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <img src="images/BikeCrashIcn.png"><br/>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <img src="images/BikeCrashIcn.png"><br/>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <img src="images/BikeCrashIcn.png"><br/>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <img src="images/BikeCrashIcn.png"><br/>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <p>*</p>
+ <img src="images/BikeCrashIcn.png"><br/>
+ <p>*</p>
+ <p>*</p>
+ <p>Should be below the fold on most screens. The images below will only be
+ loaded once you scroll down.</p>
+ <p>*</p>
+ <p>*</p>
+ <img src="images/Puzzle2.jpg" srcset="images/Puzzle.jpg 2x"><br/>
+ <img src="images/IronChef2.gif"><br/>
+ <img src="images/Cuppa.png"><br/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/local_storage_cache.html b/src/install/mod_pagespeed_example/local_storage_cache.html
new file mode 100644
index 0000000..da51d89
--- /dev/null
+++ b/src/install/mod_pagespeed_example/local_storage_cache.html
@@ -0,0 +1,14 @@
+<html>
+ <head>
+ <title>Local Storage Cache Test</title>
+ <link rel='stylesheet' href="styles/yellow.css">
+ </head>
+ <body>
+ <div id="div" class="yellow">I should be on a yellow background ...</div>
+ <img src="images/Cuppa.png"
+ alt="A cup of joe"
+ alt='A cup of joe'
+ alt='A cup of joe's "joe"'
+ alt="A cup of joe's "joe"">
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/make_google_analytics_async.html b/src/install/mod_pagespeed_example/make_google_analytics_async.html
new file mode 100644
index 0000000..2c25d37
--- /dev/null
+++ b/src/install/mod_pagespeed_example/make_google_analytics_async.html
@@ -0,0 +1,18 @@
+<html>
+ <head>
+ <title>make_google_analytics_async example</title>
+ </head>
+ <body>
+ Google Analytics loaded via document.write in main doc.
+ <script type="text/javascript">
+ var gaJsHost = (("https:" == document.location.protocol) ? "https://ssl." : "http://www.");
+ document.write(unescape("%3Cscript src='" + gaJsHost + "google-analytics.com/ga.js' type='text/javascript'%3E%3C/script%3E"));
+ </script>
+ <script type="text/javascript">
+ try {
+ var pageTracker = _gat._getTracker("XX-XXXXXXXX-X");
+ pageTracker._trackPageview();
+ } catch(err) {}
+ </script>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/make_show_ads_async.html b/src/install/mod_pagespeed_example/make_show_ads_async.html
new file mode 100644
index 0000000..5ec857f
--- /dev/null
+++ b/src/install/mod_pagespeed_example/make_show_ads_async.html
@@ -0,0 +1,20 @@
+<html>
+ <head>
+ <title>make_show_ads_async example</title>
+ </head>
+ <body>
+ There is an ad in a box below:
+ <div style="border: 2px solid blue;">
+ <script>
+ google_ad_client = "ca-google";
+ google_ad_width = 728;
+ google_ad_height = 90;
+ google_ad_format = "728x90";
+ google_adtest = "on";
+ google_ad_type = "text";
+ </script>
+ <script type="text/javascript" src="http://pagead2.googlesyndication.com/pagead/show_ads.js">
+ </script>
+ </div>
+ </body>
+</html>
\ No newline at end of file
diff --git a/src/install/mod_pagespeed_example/move_css_above_scripts.html b/src/install/mod_pagespeed_example/move_css_above_scripts.html
new file mode 100644
index 0000000..2c6ab04
--- /dev/null
+++ b/src/install/mod_pagespeed_example/move_css_above_scripts.html
@@ -0,0 +1,15 @@
+<html>
+ <head>
+ <title>move_css_above_scripts example</title>
+ </head>
+ <body>
+ <script src="combine_javascript1.js" type="text/javascript"></script>
+ <div class="blue yellow big bold">
+ Hello, world!
+ </div>
+ <p>
+ Look at the source for this page to see the stylesheet moved.
+ </p>
+ <link rel="stylesheet" type="text/css" href="styles/all_styles.css">
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/move_css_to_head.html b/src/install/mod_pagespeed_example/move_css_to_head.html
new file mode 100644
index 0000000..eb77c2f
--- /dev/null
+++ b/src/install/mod_pagespeed_example/move_css_to_head.html
@@ -0,0 +1,11 @@
+<html>
+ <head>
+ <title>move_css_to_head example</title>
+ </head>
+ <body>
+ <div class="blue yellow big bold">
+ Hello, world!
+ </div>
+ <link rel="stylesheet" type="text/css" href="styles/all_styles.css">
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/outline_css.html b/src/install/mod_pagespeed_example/outline_css.html
new file mode 100644
index 0000000..b0e73d5
--- /dev/null
+++ b/src/install/mod_pagespeed_example/outline_css.html
@@ -0,0 +1,64 @@
+<html>
+ <head>
+ <title>outline_css example</title>
+ <style type="text/css" id="small">
+ .yellow {background-color: yellow;}
+ .blue {color: blue;}
+ .big {font-size: 8em;}
+ .bold {font-weight: bold;}
+ </style>
+ <style type="text/css" id="large">
+<!-- Just padding this out so it exceeds CssOutlineMinBytes. -->
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ .yellow {background-color: yellow;} .blue {color: blue;} .big {font-size: 8em;} .bold {font-weight: bold;}
+ </style>
+ </head>
+ <body>
+ <div class="blue yellow big bold">
+ CSS which was inline should be linked.
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/outline_javascript.html b/src/install/mod_pagespeed_example/outline_javascript.html
new file mode 100644
index 0000000..1083c4b
--- /dev/null
+++ b/src/install/mod_pagespeed_example/outline_javascript.html
@@ -0,0 +1,1223 @@
+<html>
+ <head>
+ <title>outline_javascript example</title>
+ <script type="text/javascript" id="small"> var hello = 1; </script>
+ <script type="text/javascript" id="large">
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// Copyright 2006 Google Inc. All Rights Reserved.
+
+/**
+ * @fileoverview Bootstrap for the Google JS Library (Closure).
+ *
+ * In uncompiled mode base.js will write out Closure's deps file, unless the
+ * global <code>CLOSURE_NO_DEPS</code> is set to true. This allows projects to
+ * include their own deps file(s) from different locations.
+ *
+ */
+
+/**
+ * @define {boolean} Overridden to true by the compiler when --closure_pass
+ * or --mark_as_compiled is specified.
+ */
+var COMPILED = false;
+
+
+/**
+ * Base namespace for the Closure library. Checks to see goog is
+ * already defined in the current scope before assigning to prevent
+ * clobbering if base.js is loaded more than once.
+ */
+var goog = goog || {}; // Check to see if already defined in current scope
+
+
+/**
+ * Reference to the global context. In most cases this will be 'window'.
+ */
+goog.global = this;
+
+
+/**
+ * @define {boolean} DEBUG is provided as a convenience so that debugging code
+ * that should not be included in a production js_binary can be easily stripped
+ * by specifying --define goog.DEBUG=false to the JSCompiler. For example, most
+ * toString() methods should be declared inside an "if (goog.DEBUG)" conditional
+ * because they are generally used for debugging purposes and it is difficult
+ * for the JSCompiler to statically determine whether they are used.
+ */
+goog.DEBUG = true;
+
+
+/**
+ * @define {string} LOCALE defines the locale being used for compilation. It is
+ * used to select locale specific data to be compiled in js binary. BUILD rule
+ * can specify this value by "--define goog.LOCALE=<locale_name>" as JSCompiler
+ * option.
+ *
+ * Take into account that the locale code format is important. You should use
+ * the canonical Unicode format with hyphen as a delimiter. Language must be
+ * lowercase, Language Script - Capitalized, Region - UPPERCASE.
+ * There are few examples: pt-BR, en, en-US, sr-Latin-BO, zh-Hans-CN.
+ *
+ * See more info about locale codes here:
+ * http://www.unicode.org/reports/tr35/#Unicode_Language_and_Locale_Identifiers
+ *
+ * For language codes you should use values defined by ISO 693-1. See it here
+ * http://www.w3.org/WAI/ER/IG/ert/iso639.htm. There is only one exception from
+ * this rule: the Hebrew language. For legacy reasons the old code (iw) should
+ * be used instead of the new code (he), see http://wiki/Main/IIISynonyms.
+ */
+goog.LOCALE = 'en'; // default to en
+
+
+/**
+ * Indicates whether or not we can call 'eval' directly to eval code in the
+ * global scope. Set to a Boolean by the first call to goog.globalEval (which
+ * empirically tests whether eval works for globals). @see goog.globalEval
+ * @type {boolean?}
+ * @private
+ */
+goog.evalWorksForGlobals_ = null;
+
+
+/**
+ * Creates object stubs for a namespace. When present in a file, goog.provide
+ * also indicates that the file defines the indicated object. Calls to
+ * goog.provide are resolved by the compiler if --closure_pass is set.
+ * @param {string} name name of the object that this file defines.
+ */
+goog.provide = function(name) {
+ if (!COMPILED) {
+ // Ensure that the same namespace isn't provided twice. This is intended
+ // to teach new developers that 'goog.provide' is effectively a variable
+ // declaration. And when JSCompiler transforms goog.provide into a real
+ // variable declaration, the compiled JS should work the same as the raw
+ // JS--even when the raw JS uses goog.provide incorrectly.
+ if (goog.getObjectByName(name) && !goog.implicitNamespaces_[name]) {
+ throw Error('Namespace "' + name + '" already declared.');
+ }
+
+ var namespace = name;
+ while ((namespace = namespace.substring(0, namespace.lastIndexOf('.')))) {
+ goog.implicitNamespaces_[namespace] = true;
+ }
+ }
+
+ goog.exportPath_(name);
+};
+
+
+if (!COMPILED) {
+ /**
+ * Namespaces implicitly defined by goog.provide. For example,
+ * goog.provide('goog.events.Event') implicitly declares
+ * that 'goog' and 'goog.events' must be namespaces.
+ *
+ * @type {Object}
+ * @private
+ */
+ goog.implicitNamespaces_ = {};
+}
+
+
+/**
+ * Builds an object structure for the provided namespace path,
+ * ensuring that names that already exist are not overwritten. For
+ * example:
+ * "a.b.c" -> a = {};a.b={};a.b.c={};
+ * Used by goog.provide and goog.exportSymbol.
+ * @param {string} name name of the object that this file defines.
+ * @param {Object} opt_object the object to expose at the end of the path.
+ * @param {Object} opt_objectToExportTo The object to add the path to; default
+ * is |goog.global|.
+ * @private
+ */
+goog.exportPath_ = function(name, opt_object, opt_objectToExportTo) {
+ var parts = name.split('.');
+ var cur = opt_objectToExportTo || goog.global;
+
+ // Internet Explorer exhibits strange behavior when throwing errors from
+ // methods externed in this manner. See the testExportSymbolExceptions in
+ // base_test.html for an example.
+ if (!(parts[0] in cur) && cur.execScript) {
+ cur.execScript('var ' + parts[0]);
+ }
+
+ // Certain browsers cannot parse code in the form for((a in b); c;);
+ // This pattern is produced by the JSCompiler when it collapses the
+ // statement above into the conditional loop below. To prevent this from
+ // happening, use a for-loop and reserve the init logic as below.
+
+ // Parentheses added to eliminate strict JS warning in Firefox.
+ for (var part; parts.length && (part = parts.shift());) {
+ if (!parts.length && goog.isDef(opt_object)) {
+ // last part and we have an object; use it
+ cur[part] = opt_object;
+ } else if (cur[part]) {
+ cur = cur[part];
+ } else {
+ cur = cur[part] = {};
+ }
+ }
+};
+
+
+/**
+ * Returns an object based on its fully qualified external name. If you are
+ * using a compilation pass that renames property names beware that using this
+ * function will not find renamed properties.
+ *
+ * @param {string} name The fully qualified name.
+ * @param {Object} opt_obj The object within which to look; default is
+ * |goog.global|.
+ * @return {Object?} The object or, if not found, null.
+ */
+goog.getObjectByName = function(name, opt_obj) {
+ var parts = name.split('.');
+ var cur = opt_obj || goog.global;
+ for (var part; part = parts.shift(); ) {
+ if (cur[part]) {
+ cur = cur[part];
+ } else {
+ return null;
+ }
+ }
+ return cur;
+};
+
+
+/**
+ * Globalizes a whole namespace, such as goog or goog.lang.
+ *
+ * @param {Object} obj The namespace to globalize.
+ * @param {Object} opt_global The object to add the properties to.
+ * @deprecated Properties may be explicitly exported to the global scope, but
+ * this should no longer be done in bulk.
+ */
+goog.globalize = function(obj, opt_global) {
+ var global = opt_global || goog.global;
+ for (var x in obj) {
+ global[x] = obj[x];
+ }
+};
+
+
+/**
+ * Adds a dependency from a file to the files it requires.
+ * @param {string} relPath The path to the js file.
+ * @param {Array} provides An array of strings with the names of the objects
+ * this file provides.
+ * @param {Array} requires An array of strings with the names of the objects
+ * this file requires.
+ */
+goog.addDependency = function(relPath, provides, requires) {
+ if (!COMPILED) {
+ var provide, require;
+ var path = relPath.replace(/\\/g, '/');
+ var deps = goog.dependencies_;
+ for (var i = 0; provide = provides[i]; i++) {
+ deps.nameToPath[provide] = path;
+ if (!(path in deps.pathToNames)) {
+ deps.pathToNames[path] = {};
+ }
+ deps.pathToNames[path][provide] = true;
+ }
+ for (var j = 0; require = requires[j]; j++) {
+ if (!(path in deps.requires)) {
+ deps.requires[path] = {};
+ }
+ deps.requires[path][require] = true;
+ }
+ }
+};
+
+
+/**
+ * Implements a system for the dynamic resolution of dependencies
+ * that works in parallel with the BUILD system. Note that all calls
+ * to goog.require will be stripped by the JSCompiler when the
+ * --closure_pass option is used.
+ * @param {string} rule Rule to include, in the form goog.package.part.
+ */
+goog.require = function(rule) {
+
+ // if the object already exists we do not need do do anything
+ // TODO(abliss): If we start to support require based on file name this has
+ // to change.
+ // TODO(abliss): If we allow goog.foo.* this has to change.
+ // TODO(abliss): If we implement dynamic load after page load we should
+ // probably not remove this code for the compiled output.
+ if (!COMPILED) {
+ if (goog.getObjectByName(rule)) {
+ return;
+ }
+ var path = goog.getPathFromDeps_(rule);
+ if (path) {
+ goog.included_[path] = true;
+ goog.writeScripts_();
+ } else {
+ var errorMessage = 'goog.require could not find: ' + rule;
+ if (goog.global.console) {
+ goog.global.console['error'](errorMessage);
+ }
+
+ throw Error(errorMessage);
+ }
+ }
+};
+
+
+/**
+ * Whether goog.require should throw an exception if it fails.
+ * @type {boolean}
+ */
+goog.useStrictRequires = false;
+
+
+/**
+ * Path for included scripts
+ * @type {string}
+ */
+goog.basePath = '';
+
+
+/**
+ * A hook for overriding the base path.
+ * @type {string|undefined}
+ */
+goog.global.CLOSURE_BASE_PATH;
+
+
+/**
+ * Whether to write out Closure's deps file. By default,
+ * the deps are written.
+ * @type {boolean|undefined}
+ */
+goog.global.CLOSURE_NO_DEPS;
+
+
+/**
+ * Null function used for default values of callbacks, etc.
+ * @type {!Function}
+ */
+goog.nullFunction = function() {};
+
+
+/**
+ * The identity function. Returns its first argument.
+ *
+ * @param {*} var_args The arguments of the function.
+ * @return {*} The first argument.
+ * @deprecated Use goog.functions.identity instead.
+ */
+goog.identityFunction = function(var_args) {
+ return arguments[0];
+};
+
+
+/**
+ * When defining a class Foo with an abstract method bar(), you can do:
+ *
+ * Foo.prototype.bar = goog.abstractMethod
+ *
+ * Now if a subclass of Foo fails to override bar(), an error
+ * will be thrown when bar() is invoked.
+ *
+ * Note: This does not take the name of the function to override as
+ * an argument because that would make it more difficult to obfuscate
+ * our JavaScript code.
+ *
+ * @type {!Function}
+ * @throws {Error} when invoked to indicate the method should be
+ * overridden.
+ */
+goog.abstractMethod = function() {
+ throw Error('unimplemented abstract method');
+};
+
+
+/**
+ * Adds a {@code getInstance} static method that always return the same instance
+ * object.
+ * @param {!Function} ctor The constructor for the class to add the static
+ * method to.
+ */
+goog.addSingletonGetter = function(ctor) {
+ ctor.getInstance = function() {
+ return ctor.instance_ || (ctor.instance_ = new ctor());
+ };
+};
+
+
+if (!COMPILED) {
+ /**
+ * Object used to keep track of urls that have already been added. This
+ * record allows the prevention of circular dependencies.
+ * @type {Object}
+ * @private
+ */
+ goog.included_ = {};
+
+
+ /**
+ * This object is used to keep track of dependencies and other data that is
+ * used for loading scripts
+ * @private
+ * @type {Object}
+ */
+ goog.dependencies_ = {
+ pathToNames: {}, // 1 to many
+ nameToPath: {}, // 1 to 1
+ requires: {}, // 1 to many
+ visited: {}, // used when resolving dependencies to prevent us from
+ // visiting the file twice
+ written: {} // used to keep track of script files we have written
+ };
+
+
+ /**
+ * Tries to detect whether is in the context of an HTML document.
+ * @return {boolean} True if it looks like HTML document.
+ * @private
+ */
+ goog.inHtmlDocument_ = function() {
+ var doc = goog.global.document;
+ return typeof doc != 'undefined' &&
+ 'write' in doc; // XULDocument misses write.
+ };
+
+
+ /**
+ * Tries to detect the base path of the base.js script that bootstraps Closure
+ * @private
+ */
+ goog.findBasePath_ = function() {
+ if (!goog.inHtmlDocument_()) {
+ return;
+ }
+ var doc = goog.global.document;
+ if (goog.global.CLOSURE_BASE_PATH) {
+ goog.basePath = goog.global.CLOSURE_BASE_PATH;
+ return;
+ }
+ var scripts = doc.getElementsByTagName('script');
+ for (var script, i = 0; script = scripts[i]; i++) {
+ var src = script.src;
+ var l = src.length;
+ if (src.substr(l - 7) == 'base.js') {
+ goog.basePath = src.substr(0, l - 7);
+ return;
+ }
+ }
+ };
+
+
+ /**
+ * Writes a script tag if, and only if, that script hasn't already been added
+ * to the document. (Must be called at execution time)
+ * @param {string} src Script source.
+ * @private
+ */
+ goog.writeScriptTag_ = function(src) {
+ if (goog.inHtmlDocument_() &&
+ !goog.dependencies_.written[src]) {
+ goog.dependencies_.written[src] = true;
+ var doc = goog.global.document;
+ doc.write('<script type="text/javascript" src="' +
+ src + '"></' + 'script>');
+ }
+ };
+
+
+ /**
+ * Resolves dependencies based on the dependencies added using addDependency
+ * and calls writeScriptTag_ in the correct order.
+ * @private
+ */
+ goog.writeScripts_ = function() {
+ // the scripts we need to write this time
+ var scripts = [];
+ var seenScript = {};
+ var deps = goog.dependencies_;
+
+ function visitNode(path) {
+ if (path in deps.written) {
+ return;
+ }
+
+ // we have already visited this one. We can get here if we have cyclic
+ // dependencies
+ if (path in deps.visited) {
+ if (!(path in seenScript)) {
+ seenScript[path] = true;
+ scripts.push(path);
+ }
+ return;
+ }
+
+ deps.visited[path] = true;
+
+ if (path in deps.requires) {
+ for (var requireName in deps.requires[path]) {
+ if (requireName in deps.nameToPath) {
+ visitNode(deps.nameToPath[requireName]);
+ } else if (!goog.getObjectByName(requireName)) {
+ // If the required name is defined, we assume that this
+ // dependency was bootstapped by other means. Otherwise,
+ // throw an exception.
+ throw Error('Undefined nameToPath for ' + requireName);
+ }
+ }
+ }
+
+ if (!(path in seenScript)) {
+ seenScript[path] = true;
+ scripts.push(path);
+ }
+ }
+
+ for (var path in goog.included_) {
+ if (!deps.written[path]) {
+ visitNode(path);
+ }
+ }
+
+ for (var i = 0; i < scripts.length; i++) {
+ if (scripts[i]) {
+ goog.writeScriptTag_(goog.basePath + scripts[i]);
+ } else {
+ throw Error('Undefined script input');
+ }
+ }
+ };
+
+
+ /**
+ * Looks at the dependency rules and tries to determine the script file that
+ * fulfills a particular rule.
+ * @param {string} rule In the form goog.namespace.Class or project.script.
+ * @return {string?} Url corresponding to the rule, or null.
+ * @private
+ */
+ goog.getPathFromDeps_ = function(rule) {
+ if (rule in goog.dependencies_.nameToPath) {
+ return goog.dependencies_.nameToPath[rule];
+ } else {
+ return null;
+ }
+ };
+
+ goog.findBasePath_();
+
+ // Allow projects to manage the deps files themselves.
+ if (!goog.global.CLOSURE_NO_DEPS) {
+ // [MODIFIED FOR MOD_PAGESPEED_DEMO] goog.writeScriptTag_(goog.basePath + 'deps.js');
+ }
+}
+
+
+
+//==============================================================================
+// Language Enhancements
+//==============================================================================
+
+
+/**
+ * This is a "fixed" version of the typeof operator. It differs from the typeof
+ * operator in such a way that null returns 'null' and arrays return 'array'.
+ * @param {*} value The value to get the type of.
+ * @return {string} The name of the type.
+ */
+goog.typeOf = function(value) {
+ var s = typeof value;
+ if (s == 'object') {
+ if (value) {
+ // We cannot use constructor == Array or instanceof Array because
+ // different frames have different Array objects. In IE6, if the iframe
+ // where the array was created is destroyed, the array loses its
+ // prototype. Then dereferencing val.splice here throws an exception, so
+ // we can't use goog.isFunction. Calling typeof directly returns 'unknown'
+ // so that will work. In this case, this function will return false and
+ // most array functions will still work because the array is still
+ // array-like (supports length and []) even though it has lost its
+ // prototype.
+ // Mark Miller noticed that Object.prototype.toString
+ // allows access to the unforgeable [[Class]] property.
+ // 15.2.4.2 Object.prototype.toString ( )
+ // When the toString method is called, the following steps are taken:
+ // 1. Get the [[Class]] property of this object.
+ // 2. Compute a string value by concatenating the three strings
+ // "[object ", Result(1), and "]".
+ // 3. Return Result(2).
+ // and this behavior survives the destruction of the execution context.
+ if (value instanceof Array || // Works quickly in same execution context.
+ // If value is from a different execution context then
+ // !(value instanceof Object), which lets us early out in the common
+ // case when value is from the same context but not an array.
+ // The {if (value)} check above means we don't have to worry about
+ // undefined behavior of Object.prototype.toString on null/undefined.
+ //
+ // HACK: In order to use an Object prototype method on the arbitrary
+ // value, the compiler requires the value be cast to type Object,
+ // even though the ECMA spec explicitly allows it.
+ (!(value instanceof Object) &&
+ (Object.prototype.toString.call(
+ /** @type {Object} */ (value)) == '[object Array]') ||
+
+ // In IE all non value types are wrapped as objects across window
+ // boundaries (not iframe though) so we have to do object detection
+ // for this edge case
+ typeof value.length == 'number' &&
+ typeof value.splice != 'undefined' &&
+ typeof value.propertyIsEnumerable != 'undefined' &&
+ !value.propertyIsEnumerable('splice')
+
+ )) {
+ return 'array';
+ }
+ // HACK: There is still an array case that fails.
+ // function ArrayImpostor() {}
+ // ArrayImpostor.prototype = [];
+ // var impostor = new ArrayImpostor;
+ // this can be fixed by getting rid of the fast path
+ // (value instanceof Array) and solely relying on
+ // (value && Object.prototype.toString.vall(value) === '[object Array]')
+ // but that would require many more function calls and is not warranted
+ // unless closure code is receiving objects from untrusted sources.
+
+ // IE in cross-window calls does not correctly marshal the function type
+ // (it appears just as an object) so we cannot use just typeof val ==
+ // 'function'. However, if the object has a call property, it is a
+ // function.
+ if (!(value instanceof Object) &&
+ (Object.prototype.toString.call(
+ /** @type {Object} */ (value)) == '[object Function]' ||
+ typeof value.call != 'undefined' &&
+ typeof value.propertyIsEnumerable != 'undefined' &&
+ !value.propertyIsEnumerable('call'))) {
+ return 'function';
+ }
+
+
+ } else {
+ return 'null';
+ }
+
+ // In Safari typeof nodeList returns 'function', and on Firefox
+ // typeof behaves similarly for HTML{Applet,Embed,Object}Elements
+ // and RegExps. We would like to return object for those and we can
+ // detect an invalid function by making sure that the function
+ // object has a call method.
+ } else if (s == 'function' && typeof value.call == 'undefined') {
+ return 'object';
+ }
+ return s;
+};
+
+
+/**
+ * Safe way to test whether a property is enumarable. It allows testing
+ * for enumerable on objects where 'propertyIsEnumerable' is overridden or
+ * does not exist (like DOM nodes in IE). Does not use browser native
+ * Object.propertyIsEnumerable.
+ * @param {Object} object The object to test if the property is enumerable.
+ * @param {string} propName The property name to check for.
+ * @return {boolean} True if the property is enumarable.
+ * @private
+ */
+goog.propertyIsEnumerableCustom_ = function(object, propName) {
+ // KJS in Safari 2 is not ECMAScript compatible and lacks crucial methods
+ // such as propertyIsEnumerable. We therefore use a workaround.
+ // Does anyone know a more efficient work around?
+ if (propName in object) {
+ for (var key in object) {
+ if (key == propName &&
+ Object.prototype.hasOwnProperty.call(object, propName)) {
+ return true;
+ }
+ }
+ }
+ return false;
+};
+
+
+/**
+ * Safe way to test whether a property is enumarable. It allows testing
+ * for enumerable on objects where 'propertyIsEnumerable' is overridden or
+ * does not exist (like DOM nodes in IE).
+ * @param {Object} object The object to test if the property is enumerable.
+ * @param {string} propName The property name to check for.
+ * @return {boolean} True if the property is enumarable.
+ * @private
+ */
+goog.propertyIsEnumerable_ = function(object, propName) {
+ // In IE if object is from another window, cannot use propertyIsEnumerable
+ // from this window's Object. Will raise a 'JScript object expected' error.
+ if (object instanceof Object) {
+ return Object.prototype.propertyIsEnumerable.call(object, propName);
+ } else {
+ return goog.propertyIsEnumerableCustom_(object, propName);
+ }
+};
+
+
+/**
+ * Returns true if the specified value is not |undefined|.
+ * WARNING: Do not use this to test if an object has a property. Use the in
+ * operator instead. Additionally, this function assumes that the global
+ * undefined variable has not been redefined.
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is defined.
+ */
+goog.isDef = function(val) {
+ return val !== undefined;
+};
+
+
+/**
+ * Returns true if the specified value is |null|
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is null.
+ */
+goog.isNull = function(val) {
+ return val === null;
+};
+
+
+/**
+ * Returns true if the specified value is defined and not null
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is defined and not null.
+ */
+goog.isDefAndNotNull = function(val) {
+ // Note that undefined == null.
+ return val != null;
+};
+
+
+/**
+ * Returns true if the specified value is an array
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is an array.
+ */
+goog.isArray = function(val) {
+ return goog.typeOf(val) == 'array';
+};
+
+
+/**
+ * Returns true if the object looks like an array. To qualify as array like
+ * the value needs to be either a NodeList or an object with a Number length
+ * property.
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is an array.
+ */
+goog.isArrayLike = function(val) {
+ var type = goog.typeOf(val);
+ return type == 'array' || type == 'object' && typeof val.length == 'number';
+};
+
+
+/**
+ * Returns true if the object looks like a Date. To qualify as Date-like
+ * the value needs to be an object and have a getFullYear() function.
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is a like a Date.
+ */
+goog.isDateLike = function(val) {
+ return goog.isObject(val) && typeof val.getFullYear == 'function';
+};
+
+
+/**
+ * Returns true if the specified value is a string
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is a string.
+ */
+goog.isString = function(val) {
+ return typeof val == 'string';
+};
+
+
+/**
+ * Returns true if the specified value is a boolean
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is boolean.
+ */
+goog.isBoolean = function(val) {
+ return typeof val == 'boolean';
+};
+
+
+/**
+ * Returns true if the specified value is a number
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is a number.
+ */
+goog.isNumber = function(val) {
+ return typeof val == 'number';
+};
+
+
+/**
+ * Returns true if the specified value is a function
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is a function.
+ */
+goog.isFunction = function(val) {
+ return goog.typeOf(val) == 'function';
+};
+
+
+/**
+ * Returns true if the specified value is an object. This includes arrays
+ * and functions.
+ * @param {*} val Variable to test.
+ * @return {boolean} Whether variable is an object.
+ */
+goog.isObject = function(val) {
+ var type = goog.typeOf(val);
+ return type == 'object' || type == 'array' || type == 'function';
+};
+
+
+/**
+ * Adds a hash code field to an object. The hash code is unique for the
+ * given object.
+ * @param {Object} obj The object to get the hash code for.
+ * @return {number} The hash code for the object.
+ */
+goog.getHashCode = function(obj) {
+ // In IE, DOM nodes do not extend Object so they do not have this method.
+ // we need to check hasOwnProperty because the proto might have this set.
+
+ // TODO(abliss): There is a proposal to add hashcode as a global function to
+ // JS2 we should keep track of this process so we can use that
+ // whenever it starts to show up in the real world.
+ if (obj.hasOwnProperty && obj.hasOwnProperty(goog.HASH_CODE_PROPERTY_)) {
+ return obj[goog.HASH_CODE_PROPERTY_];
+ }
+ if (!obj[goog.HASH_CODE_PROPERTY_]) {
+ obj[goog.HASH_CODE_PROPERTY_] = ++goog.hashCodeCounter_;
+ }
+ return obj[goog.HASH_CODE_PROPERTY_];
+};
+
+
+/**
+ * Removes the hash code field from an object.
+ * @param {Object} obj The object to remove the field from.
+ */
+goog.removeHashCode = function(obj) {
+ // DOM nodes in IE are not instance of Object and throws exception
+ // for delete. Instead we try to use removeAttribute
+ if ('removeAttribute' in obj) {
+ obj.removeAttribute(goog.HASH_CODE_PROPERTY_);
+ }
+ /** @preserveTry */
+ try {
+ delete obj[goog.HASH_CODE_PROPERTY_];
+ } catch (ex) {
+ }
+};
+
+
+/**
+ * Name for hash code property. Initialized in a way to help avoid collisions
+ * with other closure javascript on the same page.
+ * @type {string}
+ * @private
+ */
+goog.HASH_CODE_PROPERTY_ = 'closure_hashCode_' +
+ Math.floor(Math.random() * 2147483648).toString(36);
+
+
+/**
+ * Counter for hash codes.
+ * @type {number}
+ * @private
+ */
+goog.hashCodeCounter_ = 0;
+
+
+/**
+ * Clone an object/array (recursively)
+ * @param {Object} proto Object to clone.
+ * @return {Object} Clone of x;.
+ */
+goog.cloneObject = function(proto) {
+ var type = goog.typeOf(proto);
+ if (type == 'object' || type == 'array') {
+ if (proto.clone) {
+ // TODO(abliss): Change to proto.clone() once # args warn is removed
+ return proto.clone.call(proto);
+ }
+ var clone = type == 'array' ? [] : {};
+ for (var key in proto) {
+ clone[key] = goog.cloneObject(proto[key]);
+ }
+ return clone;
+ }
+
+ return proto;
+};
+
+
+/**
+ * Forward declaration for the clone method. This is necessary until the
+ * compiler can better support duck-typing constructs as used in
+ * goog.cloneObject.
+ *
+ * TODO(abliss): Remove once the JSCompiler can infer that the check for
+ * proto.clone is safe in goog.cloneObject.
+ *
+ * @type {Function}
+ */
+Object.prototype.clone;
+
+
+/**
+ * Partially applies this function to a particular 'this object' and zero or
+ * more arguments. The result is a new function with some arguments of the first
+ * function pre-filled and the value of |this| 'pre-specified'.<br><br>
+ *
+ * Remaining arguments specified at call-time are appended to the pre-
+ * specified ones.<br><br>
+ *
+ * Also see: {@link #partial}.<br><br>
+ *
+ * Usage:
+ * <pre>var barMethBound = bind(myFunction, myObj, 'arg1', 'arg2');
+ * barMethBound('arg3', 'arg4');</pre>
+ *
+ * @param {Function} fn A function to partially apply.
+ * @param {Object|undefined} selfObj Specifies the object which |this| should
+ * point to when the function is run. If the value is null or undefined, it
+ * will default to the global object.
+ * @param {*} var_args Additional arguments that are partially
+ * applied to the function.
+ *
+ * @return {!Function} A partially-applied form of the function bind() was
+ * invoked as a method of.
+ */
+goog.bind = function(fn, selfObj, var_args) {
+ var context = selfObj || goog.global;
+
+ if (arguments.length > 2) {
+ var boundArgs = Array.prototype.slice.call(arguments, 2);
+ return function() {
+ // Prepend the bound arguments to the current arguments.
+ var newArgs = Array.prototype.slice.call(arguments);
+ Array.prototype.unshift.apply(newArgs, boundArgs);
+ return fn.apply(context, newArgs);
+ };
+
+ } else {
+ return function() {
+ return fn.apply(context, arguments);
+ };
+ }
+};
+
+
+/**
+ * Like bind(), except that a 'this object' is not required. Useful when the
+ * target function is already bound.
+ *
+ * Usage:
+ * var g = partial(f, arg1, arg2);
+ * g(arg3, arg4);
+ *
+ * @param {Function} fn A function to partially apply.
+ * @param {*} var_args Additional arguments that are partially
+ * applied to fn.
+ * @return {!Function} A partially-applied form of the function bind() was
+ * invoked as a method of.
+ */
+goog.partial = function(fn, var_args) {
+ var args = Array.prototype.slice.call(arguments, 1);
+ return function() {
+ // Prepend the bound arguments to the current arguments.
+ var newArgs = Array.prototype.slice.call(arguments);
+ newArgs.unshift.apply(newArgs, args);
+ return fn.apply(this, newArgs);
+ };
+};
+
+
+/**
+ * Copies all the members of a source object to a target object.
+ * @param {Object} target Target.
+ * @param {Object} source Source.
+ * @deprecated Use goog.object.extend instead.
+ */
+goog.mixin = function(target, source) {
+ for (var x in source) {
+ target[x] = source[x];
+ }
+
+ // For IE the for-in-loop does not contain any properties that are not
+ // enumerable on the prototype object (for example, isPrototypeOf from
+ // Object.prototype) but also it will not include 'replace' on objects that
+ // extend String and change 'replace' (not that it is common for anyone to
+ // extend anything except Object).
+};
+
+
+/**
+ * A simple wrapper for new Date().getTime().
+ *
+ * @return {number} An integer value representing the number of milliseconds
+ * between midnight, January 1, 1970 and the current time.
+ */
+goog.now = Date.now || (function() {
+ return new Date().getTime();
+});
+
+
+/**
+ * Evals javascript in the global scope. In IE this uses execScript, other
+ * browsers use goog.global.eval. If goog.global.eval does not evaluate in the
+ * global scope (for example, in Safari), appends a script tag instead.
+ * Throws an exception if neither execScript or eval is defined.
+ * @param {string} script JavaScript string.
+ */
+goog.globalEval = function(script) {
+ if (goog.global.execScript) {
+ goog.global.execScript(script, 'JavaScript');
+ } else if (goog.global.eval) {
+ // Test to see if eval works
+ if (goog.evalWorksForGlobals_ == null) {
+ goog.global.eval('var _et_ = 1;');
+ if (typeof goog.global['_et_'] != 'undefined') {
+ delete goog.global['_et_'];
+ goog.evalWorksForGlobals_ = true;
+ } else {
+ goog.evalWorksForGlobals_ = false;
+ }
+ }
+
+ if (goog.evalWorksForGlobals_) {
+ goog.global.eval(script);
+ } else {
+ var doc = goog.global.document;
+ var scriptElt = doc.createElement('script');
+ scriptElt.type = 'text/javascript';
+ scriptElt.defer = false;
+ // NOTE: can't use .innerHTML since "t('<test>')" will fail and
+ // .text doesn't work in Safari 2. Therefore we append a text node.
+ scriptElt.appendChild(doc.createTextNode(script));
+ doc.body.appendChild(scriptElt);
+ doc.body.removeChild(scriptElt);
+ }
+ } else {
+ throw Error('goog.globalEval not available');
+ }
+};
+
+
+/**
+ * A macro for defining composite types.
+ *
+ * By assigning goog.typedef to a name, this tells JSCompiler that this is not
+ * the name of a class, but rather it's the name of a composite type.
+ *
+ * For example,
+ * /** @type {Array|NodeList} / goog.ArrayLike = goog.typedef;
+ * will tell JSCompiler to replace all appearances of goog.ArrayLike in type
+ * definitions with the union of Array and NodeList.
+ *
+ * Does nothing in uncompiled code.
+ */
+goog.typedef = true;
+
+
+/**
+ * Optional map of CSS class names to obfuscated names used with
+ * goog.getCssName().
+ * @type {Object|undefined}
+ * @private
+ * @see goog.setCssNameMapping
+ */
+goog.cssNameMapping_;
+
+
+/**
+ * Handles strings that are intended to be used as CSS class names.
+ *
+ * Without JS Compiler the arguments are simple joined with a hyphen and passed
+ * through unaltered.
+ *
+ * With the JS Compiler the arguments are inlined, e.g:
+ * var x = goog.getCssName('foo');
+ * var y = goog.getCssName(this.baseClass, 'active');
+ * becomes:
+ * var x= 'foo';
+ * var y = this.baseClass + '-active';
+ *
+ * If a CSS renaming map is passed to the compiler it will replace symbols in
+ * the classname. If one argument is passed it will be processed, if two are
+ * passed only the modifier will be processed, as it is assumed the first
+ * argument was generated as a result of calling goog.getCssName.
+ *
+ * Names are split on 'hyphen' and processed in parts such that the following
+ * are equivalent:
+ * var base = goog.getCssName('baseclass');
+ * goog.getCssName(base, 'modifier');
+ * goog.getCSsName('baseclass-modifier');
+ *
+ * If any part does not appear in the renaming map a warning is logged and the
+ * original, unobfuscated class name is inlined.
+ *
+ * @param {string} className The class name.
+ * @param {string} opt_modifier A modifier to be appended to the class name.
+ * @return {string} The class name or the concatenation of the class name and
+ * the modifier.
+ */
+goog.getCssName = function(className, opt_modifier) {
+ var cssName = className + (opt_modifier ? '-' + opt_modifier : '');
+ return (goog.cssNameMapping_ && (cssName in goog.cssNameMapping_)) ?
+ goog.cssNameMapping_[cssName] : cssName;
+};
+
+
+/**
+ * Sets the map to check when returning a value from goog.getCssName(). Example:
+ * <pre>
+ * goog.setCssNameMapping({
+ * "goog-menu": "a",
+ * "goog-menu-disabled": "a-b",
+ * "CSS_LOGO": "b",
+ * "hidden": "c"
+ * });
+ *
+ * // The following evaluates to: "a a-b".
+ * goog.getCssName('goog-menu') + ' ' + goog.getCssName('goog-menu', 'disabled')
+ * </pre>
+ * When declared as a map of string literals to string literals, the JSCompiler
+ * will replace all calls to goog.getCssName() using the supplied map if the
+ * --closure_pass flag is set.
+ *
+ * @param {!Object} mapping A map of strings to strings where keys are possible
+ * arguments to goog.getCssName() and values are the corresponding values
+ * that should be returned.
+ */
+goog.setCssNameMapping = function(mapping) {
+ goog.cssNameMapping_ = mapping;
+};
+
+
+/**
+ * Abstract implementation of goog.getMsg for use with localized messages.
+ * @param {string} str Translatable string, places holders in the form {$foo}.
+ * @param {Object} opt_values Map of place holder name to value.
+ * @return {string} message with placeholders filled.
+ */
+goog.getMsg = function(str, opt_values) {
+ var values = opt_values || {};
+ for (var key in values) {
+ str = str.replace(new RegExp('\\{\\$' + key + '\\}', 'gi'), values[key]);
+ }
+ return str;
+};
+
+
+/**
+ * Exposes an unobfuscated global namespace path for the given object.
+ * Note that fields of the exported object *will* be obfuscated,
+ * unless they are exported in turn via this function or
+ * goog.exportProperty
+ *
+ * <p>Also handy for making public items that are defined in anonymous
+ * closures.
+ *
+ * ex. goog.exportSymbol('Foo', Foo);
+ *
+ * ex. goog.exportSymbol('public.path.Foo.staticFunction',
+ * Foo.staticFunction);
+ * public.path.Foo.staticFunction();
+ *
+ * ex. goog.exportSymbol('public.path.Foo.prototype.myMethod',
+ * Foo.prototype.myMethod);
+ * new public.path.Foo().myMethod();
+ *
+ * @param {string} publicPath Unobfuscated name to export.
+ * @param {Object} object Object the name should point to.
+ * @param {Object} opt_objectToExportTo The object to add the path to; default
+ * is |goog.global|.
+ */
+goog.exportSymbol = function(publicPath, object, opt_objectToExportTo) {
+ goog.exportPath_(publicPath, object, opt_objectToExportTo);
+};
+
+
+/**
+ * Exports a property unobfuscated into the object's namespace.
+ * ex. goog.exportProperty(Foo, 'staticFunction', Foo.staticFunction);
+ * ex. goog.exportProperty(Foo.prototype, 'myMethod', Foo.prototype.myMethod);
+ * @param {Object} object Object whose static property is being exported.
+ * @param {string} publicName Unobfuscated name to export.
+ * @param {Object} symbol Object the name should point to.
+ */
+goog.exportProperty = function(object, publicName, symbol) {
+ object[publicName] = symbol;
+};
+
+
+/**
+ * Inherit the prototype methods from one constructor into another.
+ *
+ * Usage:
+ * <pre>
+ * function ParentClass(a, b) { }
+ * ParentClass.prototype.foo = function(a) { }
+ *
+ * function ChildClass(a, b, c) {
+ * ParentClass.call(this, a, b);
+ * }
+ *
+ * goog.inherits(ChildClass, ParentClass);
+ *
+ * var child = new ChildClass('a', 'b', 'see');
+ * child.foo(); // works
+ * </pre>
+ *
+ * In addition, a superclass' implementation of a method can be invoked
+ * as follows:
+ *
+ * <pre>
+ * ChildClass.prototype.foo = function(a) {
+ * ChildClass.superClass_.foo.call(this, a);
+ * // other code
+ * };
+ * </pre>
+ *
+ * @param {Function} childCtor Child class.
+ * @param {Function} parentCtor Parent class.
+ */
+goog.inherits = function(childCtor, parentCtor) {
+ /** @constructor */
+ function tempCtor() {};
+ tempCtor.prototype = parentCtor.prototype;
+ childCtor.superClass_ = parentCtor.prototype;
+ childCtor.prototype = new tempCtor();
+ childCtor.prototype.constructor = childCtor;
+};
+ </script>
+ </head>
+ <body>
+ <div>
+ JavaScript which was inline should be linked.
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/pedantic.html b/src/install/mod_pagespeed_example/pedantic.html
new file mode 100644
index 0000000..290541c
--- /dev/null
+++ b/src/install/mod_pagespeed_example/pedantic.html
@@ -0,0 +1,12 @@
+<html>
+ <head>
+ <title>pedantic example</title>
+ <style>h1 {color : #ff0000;}</style>
+ <script>var x=1;</script>
+ </head>
+ <body>
+ Here is a disabled button:
+ <button name="ok" disabled="disabled">button</button><br/>
+ Here is a text input: <input name="email" type="text"/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/prioritize_critical_css.html b/src/install/mod_pagespeed_example/prioritize_critical_css.html
new file mode 100644
index 0000000..a05cafa
--- /dev/null
+++ b/src/install/mod_pagespeed_example/prioritize_critical_css.html
@@ -0,0 +1,40 @@
+<!doctype html>
+<html>
+ <head>
+ <title>prioritize_critical_css example</title>
+ <link rel="stylesheet" type="text/css" href="styles/blue.css">
+ <link rel="stylesheet" type="text/css" href="styles/big.css">
+ <style>
+ @import url(styles/all_using_imports.css);
+ </style>
+ <link rel="stylesheet" href="styles/rewrite_css_images.css" media="all">
+ </head>
+ <body>
+ <div class="foo" style="display:inline-block;"></div>
+ <span class="blue big">Prioritize Critical CSS</span>
+ <div class="bold">
+ <p>
+ This filter first instruments the page to discover which CSS selectors
+ will be used by the browser. It uses instrumentation results to select
+ the critical CSS rules from the page and inlines those in <style>
+ tags at the top of the document. It lazily loads the original CSS
+ <style> and <link> tags after the page has loaded. </p>
+ <p>
+ When you load this page, you may see one of several things:
+ <ul>
+ <li>An unoptimized, uninstrumented page if pagespeed is still loading
+ and processing the CSS resources on the page.</li>
+ <li>An unoptimized page with instrumentation code at the bottom, once
+ pagespeed has obtained all the CSS resources the page contains.</li>
+ <li>A page with the critical CSS in <style> tags at the top of the
+ document. The original CSS will be in a <noscript> block at the
+ bottom of the page, followed by a script that makes them visible to the
+ browser. This page might also contain additional instrumentation code
+ to ensure that the critical CSS is still valid.</li>
+ </ul>
+ <p>If you see a flash of unstyled content as the page is loading, it is
+ possible the beacon results that pagespeed collected did not include rules
+ that apply to your browser. </p>
+ </div>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/proxy_external_resource.html b/src/install/mod_pagespeed_example/proxy_external_resource.html
new file mode 100644
index 0000000..c7a1686
--- /dev/null
+++ b/src/install/mod_pagespeed_example/proxy_external_resource.html
@@ -0,0 +1,9 @@
+<!-- This example demonstrates proxying & optimizing a resource served
+ on another domain -->
+
+<!--
+ Example image proxied and optimized. This image is a 1x1
+ transparent image stored on Google's static content server for the
+ benefit of lazyload_images.
+-->
+<img src="http://www.gstatic.com/psa/static/1.gif"/>
diff --git a/src/install/mod_pagespeed_example/remove_comments.html b/src/install/mod_pagespeed_example/remove_comments.html
new file mode 100644
index 0000000..56df6dc
--- /dev/null
+++ b/src/install/mod_pagespeed_example/remove_comments.html
@@ -0,0 +1,14 @@
+<html>
+ <head>
+ <title>remove_comments example</title>
+ </head>
+ <body>
+ <!-- This comment will be removed -->
+ <!-- google_ad_section_start -- retained due to RetainComment directive in .htaccess -->
+ <div>Hello, world!</div>
+ <!-- Apply IE-specific CSS -->
+ <!-- [if IE ]>
+ <link href="iecss.css" rel="stylesheet" type="text/css" title="This IE directive will be preserved">
+ <![endif]-->
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/remove_quotes.html b/src/install/mod_pagespeed_example/remove_quotes.html
new file mode 100644
index 0000000..3702364
--- /dev/null
+++ b/src/install/mod_pagespeed_example/remove_quotes.html
@@ -0,0 +1,8 @@
+<html>
+ <head>
+ <title>remove_quotes example</title>
+ </head>
+ <body>
+ <img src="images/BikeCrashIcn.png" align='right' alt="" border="2" width='70' height='70'>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/resize_mobile_images.html b/src/install/mod_pagespeed_example/resize_mobile_images.html
new file mode 100644
index 0000000..4a15bdf
--- /dev/null
+++ b/src/install/mod_pagespeed_example/resize_mobile_images.html
@@ -0,0 +1,12 @@
+<html>
+ <head>
+ <title>resize_mobile_images example</title>
+ </head>
+ <body>
+ <br/>
+ <p>A 1023x766 image. PageSpeed will serve an inlined, low-quality
+ image that is scaled down for mobile browsers until the original is
+ loaded.</p>
+ <img src="images/Puzzle.jpg"/><br/>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/resize_rendered_dimensions/image_resize_using_rendered_dimensions.html b/src/install/mod_pagespeed_example/resize_rendered_dimensions/image_resize_using_rendered_dimensions.html
new file mode 100644
index 0000000..ab4f009
--- /dev/null
+++ b/src/install/mod_pagespeed_example/resize_rendered_dimensions/image_resize_using_rendered_dimensions.html
@@ -0,0 +1,13 @@
+<!DOCTYPE html>
+<html>
+ <head>
+ <title>resize_using_rendered_dimensions example</title>
+ </head>
+ <body>
+ <img src="../images/OptPuzzle.jpg" alt="resized with js" name=image0 />
+ <script type="text/javascript">
+ image0.width=150;
+ image0.height=100;
+ </script>
+ </body>
+</html>
diff --git a/src/install/mod_pagespeed_example/responsive_images.html b/src/install/mod_pagespeed_example/responsive_images.html
new file mode 100644
index 0000000..c229dd8
--- /dev/null
+++ b/src/install/mod_pagespeed_example/responsive_images.html
@@ -0,0 +1,32 @@
+<html>
+ <head>
+ <title>responsive_images example</title>
+ </head>
+ <body>
+ <p>An 1023x766 image displayed in a 256x192 thumbnail. PageSpeed
+ will resize it and also serve with a srcset for other sizes.</p>
+ <img src="images/Puzzle.jpg" width="256" height="192"/><br/>
+
+ <p>An 1023x766 image displayed at full size. PageSpeed may recompress it,
+ but will not add a srcset.</p>
+ <img src="images/Puzzle.jpg" width="1023" height="766"/><br/>
+
+ <p>An 1023x766 image displayed at 682x511 (2/3 scale). PageSpeed will
+ resize it and also serve a 1.5x version in srcset (but not 2x since
+ that would be redundant).</p>
+ <img src="images/Puzzle.jpg" width="682" height="511"/><br/>
+
+ <p>A small image. PageSpeed will inline it and not set a srcset.</p>
+ <img src="images/Cuppa.png" width="10" height="10"/><br/>
+
+ <p>A medium size image with no listed width nor height. PageSpeed
+ will losslessly re-encode it, but not resize it nor add a srcset
+ for other sizes.</p>
+ <img src="images/BikeCrashIcn.png"/><br/>
+
+ <p>An image with data-pagespeed-no-transform. PageSpeed will not
+ transform this file.</p>
+ <img src="images/disclosure_open_plus.png" width="11" height="13"
<