diff --git a/.github/ISSUE_TEMPLATE.md b/.github/ISSUE_TEMPLATE.md
new file mode 100644
index 0000000000000000000000000000000000000000..df9cfe5926f28a63e4b8b83841cd23f2f2c0a418
--- /dev/null
+++ b/.github/ISSUE_TEMPLATE.md
@@ -0,0 +1,10 @@
+Before reporting an issue on Gnocchi, please be sure to provide all necessary
+information.
+
+### Which version of Gnocchi are you using
+
+### How to reproduce your problem
+
+### What is the result that you get
+
+### What is result that you expected
diff --git a/.gitignore b/.gitignore
index 48489cc86885a60d5e59d362509586876e815a04..415c6ca086365424877c9cc43373360672723fcf 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,13 +1,16 @@
-build/
-.local/
-.testrepository/
-.stestr/
-.tox/
-func-results.json
-test-charm/
-.project
-.pydevproject
+.testrepository
 *.pyc
-**/__pycache__
-interfaces
-layers
+.tox
+*.egg-info
+AUTHORS
+ChangeLog
+etc/gnocchi/gnocchi.conf
+gnocchi/rest/gnocchi-api
+doc/build
+doc/source/rest.rst
+doc/source/gnocchi.conf.sample
+releasenotes/build
+cover
+.coverage
+dist
+upgrade/
diff --git a/.mailmap b/.mailmap
new file mode 100644
index 0000000000000000000000000000000000000000..caddbe0ce35df96a915c196352f398efbe3601ad
--- /dev/null
+++ b/.mailmap
@@ -0,0 +1 @@
+gord chung <gord@live.ca>
diff --git a/.mergify.yml b/.mergify.yml
new file mode 100644
index 0000000000000000000000000000000000000000..3bc9ae1ebb8cac68643a2561e4e4601824ab7967
--- /dev/null
+++ b/.mergify.yml
@@ -0,0 +1,66 @@
+pull_request_rules:
+  - name: automatic merge
+    actions:
+      merge:
+        method: rebase
+        rebase_fallback: merge
+        strict: true
+    conditions:
+    - label!=work-in-progress
+    - '#approved-reviews-by>=1'
+    - status-success=continuous-integration/travis-ci/pr
+  - name: automatic merge backports from Mergify
+    actions:
+      merge:
+        method: rebase
+        rebase_fallback: merge
+        strict: true
+    conditions:
+    - base~=^stable/.*
+    - label!=work-in-progress
+    - author=mergify[bot]
+    - status-success=continuous-integration/travis-ci/pr
+
+# Backports to stable branches
+  - actions:
+      backport:
+        branches:
+        - stable/3.0
+    conditions:
+    - label=backport-to-3.0
+    name: backport stable/3.0
+  - actions:
+      backport:
+        branches:
+        - stable/3.1
+    conditions:
+    - label=backport-to-3.1
+    name: backport stable/3.1
+  - actions:
+      backport:
+        branches:
+        - stable/4.0
+    conditions:
+    - label=backport-to-4.0
+    name: backport stable/4.0
+  - actions:
+      backport:
+        branches:
+        - stable/4.1
+    conditions:
+    - label=backport-to-4.1
+    name: backport stable/4.1
+  - actions:
+      backport:
+        branches:
+        - stable/4.2
+    conditions:
+    - label=backport-to-4.2
+    name: backport stable/4.2
+  - actions:
+      backport:
+        branches:
+        - stable/4.3
+    conditions:
+    - label=backport-to-4.3
+    name: backport stable/4.3
diff --git a/.testr.conf b/.testr.conf
new file mode 100644
index 0000000000000000000000000000000000000000..6e2e4a5e0d1d14880c3f0cf509d24efaca74fb99
--- /dev/null
+++ b/.testr.conf
@@ -0,0 +1,5 @@
+[DEFAULT]
+test_command=${PYTHON:-python} -m subunit.run discover -t .  ${GNOCCHI_TEST_PATH:-gnocchi/tests} $LISTOPT $IDOPTION
+test_id_option=--load-list $IDFILE
+test_list_option=--list
+group_regex=(gabbi\.suitemaker\.test_gabbi((_live_|_)([^_]+)))_
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 0000000000000000000000000000000000000000..7a22aa91e3d5467a1d6782aee966a004c0b150a4
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,81 @@
+language: generic
+sudo: required
+
+services:
+  - docker
+
+cache:
+  directories:
+    - ~/.cache/pip
+env:
+  - TARGET: pep8
+  - TARGET: docs
+  - TARGET: docs-gnocchi.xyz
+
+  - TARGET: py27-mysql-ceph-upgrade-from-4.3
+  - TARGET: py37-postgresql-file-upgrade-from-4.3
+
+  - TARGET: py27-mysql
+  - TARGET: py37-mysql
+  - TARGET: py27-postgresql
+  - TARGET: py37-postgresql
+
+before_script:
+  # NOTE(sileht): We need to fetch all tags/branches for documentation.
+  # For the multiversioning, we change all remotes refs to point to
+  # the pull request checkout. So the "master" branch will be the PR sha and not
+  # real "master" branch. This ensures the doc build use the PR code for initial
+  # doc setup.
+  - if \[ "$TRAVIS_PULL_REQUEST" != "false" -o  -n "$TRAVIS_TAG" \]; then
+      set -x;
+      case $TARGET in
+        docs*)
+          git config --get-all remote.origin.fetch;
+          git config --unset-all remote.origin.fetch;
+          git config --add remote.origin.fetch +refs/heads/*:refs/remotes/origin/*;
+          git config --get-all remote.origin.fetch;
+          git fetch --unshallow --tags;
+          ;;
+      esac ;
+      case $TARGET in
+        docs-gnocchi.xyz)
+          git branch -a | sed -n "/\/HEAD /d; /\/master$/d; s,remotes/origin/,,p;" | xargs -i git branch {} origin/{} ;
+          git branch -D master;
+          git checkout -b master;
+          git remote set-url origin file:///home/tester/src;
+          git ls-remote --heads --tags | grep heads;
+          ;;
+      esac ;
+      set +x;
+    fi
+install:
+  - if \[ "$TRAVIS_PULL_REQUEST" != "false" -o  -n "$TRAVIS_TAG" \]; then
+      docker pull gnocchixyz/ci-tools:latest;
+    fi
+script:
+  - if \[ "$TRAVIS_PULL_REQUEST" != "false" -o  -n "$TRAVIS_TAG" \]; then
+      docker run -v ~/.cache/pip:/home/tester/.cache/pip -v $(pwd):/home/tester/src gnocchixyz/ci-tools:latest tox -e ${TARGET} ;
+    fi
+
+notifications:
+  email: false
+  irc:
+    on_success: change
+    on_failure: always
+    skip_join: true
+    channels:
+      - "irc.freenode.org#gnocchi"
+
+before_deploy:
+  # Remove |substitutions| to fix rendering on pypi.
+  - sed -i -e 's/|\([a-zA-Z0-9 ]\+\)|/\1/g' README.rst
+
+deploy:
+  provider: pypi
+  user: jd
+  password:
+    secure: c+Ccx3SHCWepiy0PUxDJ7XO9r3aNYnHjkzxF5c/kjV8QaCJayAJEgXJnBKhvjroqwgn7JPUgpD6QdSWdB4FqjbZYQ3I3oHOO1YL0vYYa8wHG5HuMsMp4J8qvzgs3QNQDECPI1mXsPevn3VMfGszUN+6BQrHB3FbZsTtOmE+Kmgok5NCT+obsfEhVea/UOD0XFUkVW9VJhPjQ2ytvYvFIc46/73GQf2Er/5DCa/4GGDEBSD++bDJgp3kQj438xslCAFeZWDwGsa+cTc43PI0Y0+E144ySVY7QyVbZ1B66a1BGWVrXJuM+gW/eIBCMN1FJXmD7CDdPa22azKI8dfMF7qaH3Oiv3cVovPWpubOvhTUHUFwG8+W7Fx+zUKktCWiLer/fZvEd3W8tcgby2kNOdcUfKfDB2ImZJ+P694/OJ4jJ8T5TQerruNoP2OstzcBMon77Ry0XawXR15SZd4JhbqhSi+h7XV6EYmct1UN4zoysA7fx/cWHcBxdnm2G6R0gzmOiiGUd74ptU8lZ3IlEP6EZckK/OZOdy1I8EQeUe7aiTooXZDAn07iPkDZliYRr2e36ij/xjtWCe1AjCksn/xdKfHOKJv5UVob495DU2GuNObe01ewXzexcnldjfp9Sb8SVEFuhHx6IvH5OC+vAq+BVYu2jwvMcVfXi3VSOkB4=
+  on:
+    all_branches: true
+    tags: true
+  distributions: "sdist bdist_wheel"
diff --git a/LICENSE b/LICENSE
index d645695673349e3947e8e5ae42332d0ac3164cd7..68c771a099958211169377d766a7389422f5573d 100644
--- a/LICENSE
+++ b/LICENSE
@@ -174,29 +174,3 @@
       incurred by, or claims asserted against, such Contributor by reason
       of your accepting any such warranty or additional liability.
 
-   END OF TERMS AND CONDITIONS
-
-   APPENDIX: How to apply the Apache License to your work.
-
-      To apply the Apache License to your work, attach the following
-      boilerplate notice, with the fields enclosed by brackets "[]"
-      replaced with your own identifying information. (Don't include
-      the brackets!)  The text should be enclosed in the appropriate
-      comment syntax for the file format. We also recommend that a
-      file or class name and description of purpose be included on the
-      same "printed page" as the copyright notice for easier
-      identification within third-party archives.
-
-   Copyright [yyyy] [name of copyright owner]
-
-   Licensed under the Apache License, Version 2.0 (the "License");
-   you may not use this file except in compliance with the License.
-   You may obtain a copy of the License at
-
-       http://www.apache.org/licenses/LICENSE-2.0
-
-   Unless required by applicable law or agreed to in writing, software
-   distributed under the License is distributed on an "AS IS" BASIS,
-   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-   See the License for the specific language governing permissions and
-   limitations under the License.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644
index 0000000000000000000000000000000000000000..54a0a8cb0d1ae224ed8cbfab03e8285d73b257d1
--- /dev/null
+++ b/MANIFEST.in
@@ -0,0 +1,4 @@
+include ChangeLog
+include AUTHORS
+exclude .gitignore
+exclude .github
diff --git a/README.rst b/README.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9cd8e67e1376c2d0318d3865b63d4b58ceacb299
--- /dev/null
+++ b/README.rst
@@ -0,0 +1,34 @@
+===============================
+ Gnocchi - Metric as a Service
+===============================
+
+.. image:: https://travis-ci.org/gnocchixyz/gnocchi.png?branch=master
+    :target: https://travis-ci.org/gnocchixyz/gnocchi
+    :alt: Build Status
+
+.. image:: https://badge.fury.io/py/gnocchi.svg
+    :target: https://badge.fury.io/py/gnocchi
+
+.. image:: doc/source/_static/gnocchi-logo.png
+
+Gnocchi is an open-source time series database.
+
+The problem that Gnocchi solves is the storage and indexing of time series
+data and resources at a large scale. This is useful in modern cloud platforms
+which are not only huge but also are dynamic and potentially multi-tenant.
+Gnocchi takes all of that into account.
+
+Gnocchi has been designed to handle large amounts of aggregates being stored
+while being performant, scalable and fault-tolerant. While doing this, the goal
+was to be sure to not build any hard dependency on any complex storage system.
+
+Gnocchi takes a unique approach to time series storage: rather than storing
+raw data points, it aggregates them before storing them. This built-in feature
+is different from most other time series databases, which usually support
+this mechanism as an option and compute aggregation (average, minimum, etc.) at
+query time.
+
+Because Gnocchi computes all the aggregations at ingestion, getting the data
+back is extremely fast, as it just needs to read back the pre-computed results.
+
+You can read the full documentation online at http://gnocchi.xyz.
diff --git a/doc/source/_static/architecture.svg b/doc/source/_static/architecture.svg
new file mode 100644
index 0000000000000000000000000000000000000000..c883481efd1c6a0c279997892f775730e3fab798
--- /dev/null
+++ b/doc/source/_static/architecture.svg
@@ -0,0 +1,4 @@
+<?xml version="1.0" standalone="yes"?>
+
+<svg version="1.1" viewBox="0.0 0.0 828.0997375328084 542.5485564304462" fill="none" stroke="none" stroke-linecap="square" stroke-miterlimit="10" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"><clipPath id="p.0"><path d="m0 0l828.09973 0l0 542.5486l-828.09973 0l0 -542.5486z" clip-rule="nonzero"></path></clipPath><g clip-path="url(#p.0)"><path fill="#000000" fill-opacity="0.0" d="m0 0l828.09973 0l0 542.5486l-828.09973 0z" fill-rule="evenodd"></path><path fill="#93c47d" d="m659.46643 38.3832l0 0c0 10.2542305 33.250732 18.566929 74.2677 18.566929c41.016907 0 74.2677 -8.312698 74.2677 -18.566929l0 120.62992c0 10.254227 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312698 -74.2677 -18.566925z" fill-rule="evenodd"></path><path fill="#bedbb1" d="m659.46643 38.3832l0 0c0 -10.2542305 33.250732 -18.566929 74.2677 -18.566929c41.016907 0 74.2677 8.312698 74.2677 18.566929l0 0c0 10.2542305 -33.250793 18.566929 -74.2677 18.566929c-41.016968 0 -74.2677 -8.312698 -74.2677 -18.566929z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m808.00183 38.3832l0 0c0 10.2542305 -33.250793 18.566929 -74.2677 18.566929c-41.016968 0 -74.2677 -8.312698 -74.2677 -18.566929l0 0c0 -10.2542305 33.250732 -18.566929 74.2677 -18.566929c41.016907 0 74.2677 8.312698 74.2677 18.566929l0 120.62992c0 10.254227 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312698 -74.2677 -18.566925l0 -120.62992" fill-rule="evenodd"></path><path fill="#000000" d="m698.84814 103.90163l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm22.134521 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm15.547607 4.65625q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm3.4069824 2.0l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm16.453125 2.9375l0 -1.453125q-1.140625 1.671875 -3.125 1.671875q-0.859375 0 -1.625 -0.328125q-0.75 -0.34375 -1.125 -0.84375q-0.359375 -0.5 -0.515625 -1.234375q-0.09375 -0.5 -0.09375 -1.5625l0 -6.109375l1.671875 0l0 5.46875q0 1.3125 0.09375 1.765625q0.15625 0.65625 0.671875 1.03125q0.515625 0.375 1.265625 0.375q0.75 0 1.40625 -0.375q0.65625 -0.390625 0.921875 -1.046875q0.28125 -0.671875 0.28125 -1.9375l0 -5.28125l1.671875 0l0 9.859375l-1.5 0zm3.9069824 0l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm12.9782715 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875z" fill-rule="nonzero"></path><path fill="#000000" d="m701.9223 121.52663l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.490417 2.875l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm0.9020386 -3.421875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.266357 4.921875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm12.6658325 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm3.7819214 5.75l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm16.047607 1.9375l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875z" fill-rule="nonzero"></path><path fill="#a4c2f4" d="m659.46643 211.67061l0 0c0 10.254227 33.250732 18.566925 74.2677 18.566925c41.016907 0 74.2677 -8.312698 74.2677 -18.566925l0 120.62993c0 10.254211 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312714 -74.2677 -18.566925z" fill-rule="evenodd"></path><path fill="#c8daf8" d="m659.46643 211.67061l0 0c0 -10.254242 33.250732 -18.56694 74.2677 -18.56694c41.016907 0 74.2677 8.312698 74.2677 18.56694l0 0c0 10.254227 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312698 -74.2677 -18.566925z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m808.00183 211.67061l0 0c0 10.254227 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312698 -74.2677 -18.566925l0 0c0 -10.254242 33.250732 -18.56694 74.2677 -18.56694c41.016907 0 74.2677 8.312698 74.2677 18.56694l0 120.62993c0 10.254211 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312714 -74.2677 -18.566925l0 -120.62993" fill-rule="evenodd"></path><path fill="#000000" d="m690.65594 277.18903l5.234375 -13.59375l1.9375 0l5.5625 13.59375l-2.046875 0l-1.59375 -4.125l-5.6875 0l-1.484375 4.125l-1.921875 0zm3.921875 -5.578125l4.609375 0l-1.40625 -3.78125q-0.65625 -1.703125 -0.96875 -2.8125q-0.265625 1.3125 -0.734375 2.59375l-1.5 4.0zm9.5060425 6.390625l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm9.000732 5.921875l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm9.281982 5.109375l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm12.9782715 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm8.813232 6.6875l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm15.735107 3.890625q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm7.7351074 3.4375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm8.277039 -1.671875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875z" fill-rule="nonzero"></path><path fill="#000000" d="m701.9223 294.81403l1.6875 -0.140625q0.125 1.015625 0.5625 1.671875q0.4375 0.65625 1.359375 1.0625q0.9375 0.40625 2.09375 0.40625q1.03125 0 1.8125 -0.3125q0.796875 -0.3125 1.1875 -0.84375q0.390625 -0.53125 0.390625 -1.15625q0 -0.640625 -0.375 -1.109375q-0.375 -0.484375 -1.234375 -0.8125q-0.546875 -0.21875 -2.421875 -0.65625q-1.875 -0.453125 -2.625 -0.859375q-0.96875 -0.515625 -1.453125 -1.265625q-0.46875 -0.75 -0.46875 -1.6875q0 -1.03125 0.578125 -1.921875q0.59375 -0.90625 1.703125 -1.359375q1.125 -0.46875 2.5 -0.46875q1.515625 0 2.671875 0.484375q1.15625 0.484375 1.765625 1.4375q0.625 0.9375 0.671875 2.140625l-1.71875 0.125q-0.140625 -1.28125 -0.953125 -1.9375q-0.796875 -0.671875 -2.359375 -0.671875q-1.625 0 -2.375 0.609375q-0.75 0.59375 -0.75 1.4375q0 0.734375 0.53125 1.203125q0.515625 0.46875 2.703125 0.96875q2.203125 0.5 3.015625 0.875q1.1875 0.546875 1.75 1.390625q0.578125 0.828125 0.578125 1.921875q0 1.09375 -0.625 2.0625q-0.625 0.953125 -1.796875 1.484375q-1.15625 0.53125 -2.609375 0.53125q-1.84375 0 -3.09375 -0.53125q-1.25 -0.546875 -1.96875 -1.625q-0.703125 -1.078125 -0.734375 -2.453125zm16.490417 2.875l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm0.9020386 -3.421875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.266357 4.921875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm12.6658325 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm3.7819214 5.75l1.609375 0.25q0.109375 0.75 0.578125 1.09375q0.609375 0.453125 1.6875 0.453125q1.171875 0 1.796875 -0.46875q0.625 -0.453125 0.859375 -1.28125q0.125 -0.515625 0.109375 -2.15625q-1.09375 1.296875 -2.71875 1.296875q-2.03125 0 -3.15625 -1.46875q-1.109375 -1.46875 -1.109375 -3.515625q0 -1.40625 0.515625 -2.59375q0.515625 -1.203125 1.484375 -1.84375q0.96875 -0.65625 2.265625 -0.65625q1.75 0 2.875 1.40625l0 -1.1875l1.546875 0l0 8.515625q0 2.3125 -0.46875 3.265625q-0.46875 0.96875 -1.484375 1.515625q-1.015625 0.5625 -2.5 0.5625q-1.765625 0 -2.859375 -0.796875q-1.078125 -0.796875 -1.03125 -2.390625zm1.375 -5.921875q0 1.953125 0.765625 2.84375q0.78125 0.890625 1.9375 0.890625q1.140625 0 1.921875 -0.890625q0.78125 -0.890625 0.78125 -2.78125q0 -1.8125 -0.8125 -2.71875q-0.796875 -0.921875 -1.921875 -0.921875q-1.109375 0 -1.890625 0.90625q-0.78125 0.890625 -0.78125 2.671875zm16.047607 1.9375l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875z" fill-rule="nonzero"></path><path fill="#b4a7d6" d="m659.46643 384.958l0 0c0 10.254242 33.250732 18.566925 74.2677 18.566925c41.016907 0 74.2677 -8.312683 74.2677 -18.566925l0 120.62991c0 10.254242 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312683 -74.2677 -18.566925z" fill-rule="evenodd"></path><path fill="#d2cae6" d="m659.46643 384.958l0 0c0 -10.254242 33.250732 -18.566925 74.2677 -18.566925c41.016907 0 74.2677 8.312683 74.2677 18.566925l0 0c0 10.254242 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312683 -74.2677 -18.566925z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m808.00183 384.958l0 0c0 10.254242 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312683 -74.2677 -18.566925l0 0c0 -10.254242 33.250732 -18.566925 74.2677 -18.566925c41.016907 0 74.2677 8.312683 74.2677 18.566925l0 120.62991c0 10.254242 -33.250793 18.566925 -74.2677 18.566925c-41.016968 0 -74.2677 -8.312683 -74.2677 -18.566925l0 -120.62991" fill-rule="evenodd"></path><path fill="#000000" d="m712.6805 461.47644l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0zm4.6676636 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm16.766357 0l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm16.016296 1.75l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm8.000732 5.875l3.59375 -5.125l-3.328125 -4.734375l2.09375 0l1.515625 2.3125q0.421875 0.65625 0.671875 1.109375q0.421875 -0.609375 0.765625 -1.09375l1.65625 -2.328125l1.984375 0l-3.390625 4.640625l3.65625 5.21875l-2.046875 0l-2.03125 -3.0625l-0.53125 -0.828125l-2.59375 3.890625l-2.015625 0z" fill-rule="nonzero"></path><path fill="#d9ead3" d="m225.50345 454.8189l198.86615 0l0 64.59839l-198.86615 0z" fill-rule="evenodd"></path><path stroke="#efefef" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m225.50345 454.8189l198.86615 0l0 64.59839l-198.86615 0z" fill-rule="evenodd"></path><path fill="#38761d" d="m239.3472 475.525q0.859375 0 1.6875 0.453125q0.828125 0.4375 1.28125 1.265625q0.46875 0.828125 0.46875 1.71875q0 1.421875 -1.015625 2.4375q-1.0 1.0 -2.421875 1.0q-1.4375 0 -2.453125 -1.0q-1.0 -1.015625 -1.0 -2.4375q0 -0.90625 0.453125 -1.71875q0.46875 -0.828125 1.296875 -1.265625q0.828125 -0.453125 1.703125 -0.453125zm10.328125 11.140625l0 -11.484375l1.28125 0l0 1.078125q0.453125 -0.640625 1.015625 -0.953125q0.578125 -0.3125 1.390625 -0.3125q1.0625 0 1.875 0.546875q0.8125 0.546875 1.21875 1.546875q0.421875 0.984375 0.421875 2.171875q0 1.28125 -0.46875 2.296875q-0.453125 1.015625 -1.328125 1.5625q-0.859375 0.546875 -1.828125 0.546875q-0.703125 0 -1.265625 -0.296875q-0.546875 -0.296875 -0.90625 -0.75l0 4.046875l-1.40625 0zm1.265625 -7.296875q0 1.609375 0.640625 2.375q0.65625 0.765625 1.578125 0.765625q0.9375 0 1.609375 -0.796875q0.671875 -0.796875 0.671875 -2.453125q0 -1.59375 -0.65625 -2.375q-0.65625 -0.796875 -1.5625 -0.796875q-0.890625 0 -1.59375 0.84375q-0.6875 0.84375 -0.6875 2.4375zm7.6171875 4.109375l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm4.8125 -4.15625q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm13.3828125 1.109375l1.390625 0.1875q-0.234375 1.421875 -1.171875 2.234375q-0.921875 0.8125 -2.28125 0.8125q-1.703125 0 -2.75 -1.109375q-1.03125 -1.125 -1.03125 -3.203125q0 -1.34375 0.4375 -2.34375q0.453125 -1.015625 1.359375 -1.515625q0.921875 -0.5 1.984375 -0.5q1.359375 0 2.21875 0.6875q0.859375 0.671875 1.09375 1.9375l-1.359375 0.203125q-0.203125 -0.828125 -0.703125 -1.25q-0.484375 -0.421875 -1.1875 -0.421875q-1.0625 0 -1.734375 0.765625q-0.65625 0.75 -0.65625 2.40625q0 1.671875 0.640625 2.4375q0.640625 0.75 1.671875 0.75q0.828125 0 1.375 -0.5q0.5625 -0.515625 0.703125 -1.578125zm8.265625 0.375l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm8.0 0l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm13.0078125 2.484375l0 -8.296875l1.265625 0l0 1.171875q0.90625 -1.359375 2.640625 -1.359375q0.75 0 1.375 0.265625q0.625 0.265625 0.9375 0.703125q0.3125 0.4375 0.4375 1.046875q0.078125 0.390625 0.078125 1.359375l0 5.109375l-1.40625 0l0 -5.046875q0 -0.859375 -0.171875 -1.28125q-0.15625 -0.4375 -0.578125 -0.6875q-0.40625 -0.25 -0.96875 -0.25q-0.90625 0 -1.5625 0.578125q-0.640625 0.5625 -0.640625 2.15625l0 4.53125l-1.40625 0zm14.5703125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm9.3671875 4.953125l-2.546875 -8.296875l1.453125 0l1.328125 4.78125l0.484375 1.78125q0.03125 -0.125 0.4375 -1.703125l1.3125 -4.859375l1.453125 0l1.234375 4.8125l0.421875 1.578125l0.46875 -1.59375l1.421875 -4.796875l1.375 0l-2.59375 8.296875l-1.46875 0l-1.3125 -4.96875l-0.328125 -1.421875l-1.671875 6.390625l-1.46875 0zm14.46875 0l0 -8.296875l1.25 0l0 1.15625q0.390625 -0.609375 1.03125 -0.96875q0.65625 -0.375 1.484375 -0.375q0.921875 0 1.515625 0.390625q0.59375 0.375 0.828125 1.0625q0.984375 -1.453125 2.5625 -1.453125q1.234375 0 1.890625 0.6875q0.671875 0.671875 0.671875 2.09375l0 5.703125l-1.390625 0l0 -5.234375q0 -0.84375 -0.140625 -1.203125q-0.140625 -0.375 -0.5 -0.59375q-0.359375 -0.234375 -0.84375 -0.234375q-0.875 0 -1.453125 0.578125q-0.578125 0.578125 -0.578125 1.859375l0 4.828125l-1.40625 0l0 -5.390625q0 -0.9375 -0.34375 -1.40625q-0.34375 -0.46875 -1.125 -0.46875q-0.59375 0 -1.09375 0.3125q-0.5 0.3125 -0.734375 0.921875q-0.21875 0.59375 -0.21875 1.71875l0 4.3125l-1.40625 0zm19.0 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm13.2421875 3.921875q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.0390625 1.6875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0zm14.0 2.484375l0 -1.21875q-0.96875 1.40625 -2.640625 1.40625q-0.734375 0 -1.375 -0.28125q-0.625 -0.28125 -0.9375 -0.703125q-0.3125 -0.4375 -0.4375 -1.046875q-0.078125 -0.421875 -0.078125 -1.3125l0 -5.140625l1.40625 0l0 4.59375q0 1.109375 0.078125 1.484375q0.140625 0.5625 0.5625 0.875q0.4375 0.3125 1.0625 0.3125q0.640625 0 1.1875 -0.3125q0.5625 -0.328125 0.78125 -0.890625q0.234375 -0.5625 0.234375 -1.625l0 -4.4375l1.40625 0l0 8.296875l-1.25 0zm3.4453125 0l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0z" fill-rule="nonzero"></path><path fill="#38761d" d="m239.3472 494.525q0.859375 0 1.6875 0.453125q0.828125 0.4375 1.28125 1.265625q0.46875 0.828125 0.46875 1.71875q0 1.421875 -1.015625 2.4375q-1.0 1.0 -2.421875 1.0q-1.4375 0 -2.453125 -1.0q-1.0 -1.015625 -1.0 -2.4375q0 -0.90625 0.453125 -1.71875q0.46875 -0.828125 1.296875 -1.265625q0.828125 -0.453125 1.703125 -0.453125zm15.703125 7.953125l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm13.6328125 1.46875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.8046875 4.953125l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm9.2578125 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm10.8984375 3.6875l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm7.0546875 -1.40625l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm11.75 0.796875q0 -2.296875 1.28125 -3.40625q1.078125 -0.921875 2.609375 -0.921875q1.71875 0 2.796875 1.125q1.09375 1.109375 1.09375 3.09375q0 1.59375 -0.484375 2.515625q-0.484375 0.921875 -1.40625 1.4375q-0.90625 0.5 -2.0 0.5q-1.734375 0 -2.8125 -1.109375q-1.078125 -1.125 -1.078125 -3.234375zm1.453125 0q0 1.59375 0.6875 2.390625q0.703125 0.796875 1.75 0.796875q1.046875 0 1.734375 -0.796875q0.703125 -0.796875 0.703125 -2.4375q0 -1.53125 -0.703125 -2.328125q-0.6875 -0.796875 -1.734375 -0.796875q-1.046875 0 -1.75 0.796875q-0.6875 0.78125 -0.6875 2.375zm7.9453125 4.15625l0 -11.453125l1.40625 0l0 11.453125l-1.40625 0zm8.9609375 0l0 -1.046875q-0.78125 1.234375 -2.3125 1.234375q-1.0 0 -1.828125 -0.546875q-0.828125 -0.546875 -1.296875 -1.53125q-0.453125 -0.984375 -0.453125 -2.25q0 -1.25 0.40625 -2.25q0.421875 -1.015625 1.25 -1.546875q0.828125 -0.546875 1.859375 -0.546875q0.75 0 1.328125 0.3125q0.59375 0.3125 0.953125 0.828125l0 -4.109375l1.40625 0l0 11.453125l-1.3125 0zm-4.4375 -4.140625q0 1.59375 0.671875 2.390625q0.671875 0.78125 1.578125 0.78125q0.921875 0 1.5625 -0.75q0.65625 -0.765625 0.65625 -2.3125q0 -1.703125 -0.65625 -2.5q-0.65625 -0.796875 -1.625 -0.796875q-0.9375 0 -1.5625 0.765625q-0.625 0.765625 -0.625 2.421875zm17.8125 3.109375q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm3.3359375 4.859375l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm7.7265625 4.984375l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm7.9765625 4.296875l0 -8.296875l1.265625 0l0 1.25q0.484375 -0.875 0.890625 -1.15625q0.40625 -0.28125 0.90625 -0.28125q0.703125 0 1.4375 0.453125l-0.484375 1.296875q-0.515625 -0.296875 -1.03125 -0.296875q-0.453125 0 -0.828125 0.28125q-0.359375 0.265625 -0.515625 0.765625q-0.234375 0.75 -0.234375 1.640625l0 4.34375l-1.40625 0zm11.015625 -2.671875l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.5703125 5.640625l1.375 0.203125q0.078125 0.640625 0.46875 0.921875q0.53125 0.390625 1.4375 0.390625q0.96875 0 1.5 -0.390625q0.53125 -0.390625 0.71875 -1.09375q0.109375 -0.421875 0.109375 -1.8125q-0.921875 1.09375 -2.296875 1.09375q-1.71875 0 -2.65625 -1.234375q-0.9375 -1.234375 -0.9375 -2.96875q0 -1.1875 0.421875 -2.1875q0.4375 -1.0 1.25 -1.546875q0.828125 -0.546875 1.921875 -0.546875q1.46875 0 2.421875 1.1875l0 -1.0l1.296875 0l0 7.171875q0 1.9375 -0.390625 2.75q-0.390625 0.8125 -1.25 1.28125q-0.859375 0.46875 -2.109375 0.46875q-1.484375 0 -2.40625 -0.671875q-0.90625 -0.671875 -0.875 -2.015625zm1.171875 -4.984375q0 1.625 0.640625 2.375q0.65625 0.75 1.625 0.75q0.96875 0 1.625 -0.734375q0.65625 -0.75 0.65625 -2.34375q0 -1.53125 -0.671875 -2.296875q-0.671875 -0.78125 -1.625 -0.78125q-0.9375 0 -1.59375 0.765625q-0.65625 0.765625 -0.65625 2.265625zm13.3984375 3.265625q-0.78125 0.671875 -1.5 0.953125q-0.71875 0.265625 -1.546875 0.265625q-1.375 0 -2.109375 -0.671875q-0.734375 -0.671875 -0.734375 -1.703125q0 -0.609375 0.28125 -1.109375q0.28125 -0.515625 0.71875 -0.8125q0.453125 -0.3125 1.015625 -0.46875q0.421875 -0.109375 1.25 -0.203125q1.703125 -0.203125 2.515625 -0.484375q0 -0.296875 0 -0.375q0 -0.859375 -0.390625 -1.203125q-0.546875 -0.484375 -1.609375 -0.484375q-0.984375 0 -1.46875 0.359375q-0.46875 0.34375 -0.6875 1.21875l-1.375 -0.1875q0.1875 -0.875 0.609375 -1.421875q0.4375 -0.546875 1.25 -0.828125q0.8125 -0.296875 1.875 -0.296875q1.0625 0 1.71875 0.25q0.671875 0.25 0.984375 0.625q0.3125 0.375 0.4375 0.953125q0.078125 0.359375 0.078125 1.296875l0 1.875q0 1.96875 0.078125 2.484375q0.09375 0.515625 0.359375 1.0l-1.46875 0q-0.21875 -0.4375 -0.28125 -1.03125zm-0.109375 -3.140625q-0.765625 0.3125 -2.296875 0.53125q-0.875 0.125 -1.234375 0.28125q-0.359375 0.15625 -0.5625 0.46875q-0.1875 0.296875 -0.1875 0.65625q0 0.5625 0.421875 0.9375q0.4375 0.375 1.25 0.375q0.8125 0 1.4375 -0.34375q0.640625 -0.359375 0.9375 -0.984375q0.234375 -0.46875 0.234375 -1.40625l0 -0.515625zm6.6640625 2.90625l0.203125 1.25q-0.59375 0.125 -1.0625 0.125q-0.765625 0 -1.1875 -0.234375q-0.421875 -0.25 -0.59375 -0.640625q-0.171875 -0.40625 -0.171875 -1.671875l0 -4.765625l-1.03125 0l0 -1.09375l1.03125 0l0 -2.0625l1.40625 -0.84375l0 2.90625l1.40625 0l0 1.09375l-1.40625 0l0 4.84375q0 0.609375 0.0625 0.78125q0.078125 0.171875 0.25 0.28125q0.171875 0.09375 0.484375 0.09375q0.234375 0 0.609375 -0.0625zm7.0546875 -1.40625l1.453125 0.171875q-0.34375 1.28125 -1.28125 1.984375q-0.921875 0.703125 -2.359375 0.703125q-1.828125 0 -2.890625 -1.125q-1.0625 -1.125 -1.0625 -3.140625q0 -2.09375 1.078125 -3.25q1.078125 -1.15625 2.796875 -1.15625q1.65625 0 2.703125 1.140625q1.0625 1.125 1.0625 3.171875q0 0.125 0 0.375l-6.1875 0q0.078125 1.375 0.765625 2.109375q0.703125 0.71875 1.734375 0.71875q0.78125 0 1.328125 -0.40625q0.546875 -0.40625 0.859375 -1.296875zm-4.609375 -2.28125l4.625 0q-0.09375 -1.046875 -0.53125 -1.5625q-0.671875 -0.8125 -1.734375 -0.8125q-0.96875 0 -1.640625 0.65625q-0.65625 0.640625 -0.71875 1.71875zm7.2734375 2.46875l1.390625 -0.21875q0.109375 0.84375 0.640625 1.296875q0.546875 0.4375 1.5 0.4375q0.96875 0 1.4375 -0.390625q0.46875 -0.40625 0.46875 -0.9375q0 -0.46875 -0.40625 -0.75q-0.296875 -0.1875 -1.4375 -0.46875q-1.546875 -0.390625 -2.15625 -0.671875q-0.59375 -0.296875 -0.90625 -0.796875q-0.296875 -0.5 -0.296875 -1.109375q0 -0.5625 0.25 -1.03125q0.25 -0.46875 0.6875 -0.78125q0.328125 -0.25 0.890625 -0.40625q0.578125 -0.171875 1.21875 -0.171875q0.984375 0 1.71875 0.28125q0.734375 0.28125 1.078125 0.765625q0.359375 0.46875 0.5 1.28125l-1.375 0.1875q-0.09375 -0.640625 -0.546875 -1.0q-0.453125 -0.359375 -1.265625 -0.359375q-0.96875 0 -1.390625 0.328125q-0.40625 0.3125 -0.40625 0.734375q0 0.28125 0.171875 0.5q0.171875 0.21875 0.53125 0.375q0.21875 0.078125 1.25 0.359375q1.484375 0.390625 2.078125 0.65625q0.59375 0.25 0.921875 0.734375q0.34375 0.484375 0.34375 1.203125q0 0.703125 -0.421875 1.328125q-0.40625 0.609375 -1.1875 0.953125q-0.765625 0.34375 -1.734375 0.34375q-1.625 0 -2.46875 -0.671875q-0.84375 -0.671875 -1.078125 -2.0z" fill-rule="nonzero"></path><path fill="#f6b26b" d="m43.188244 91.46719l119.1496 0l0 58.740158l-119.1496 0z" fill-rule="evenodd"></path><path fill="#000000" d="m88.794876 114.16352l1.796875 0l0 7.84375q0 2.0625 -0.46875 3.265625q-0.453125 1.203125 -1.671875 1.96875q-1.203125 0.75 -3.171875 0.75q-1.90625 0 -3.125 -0.65625q-1.21875 -0.65625 -1.734375 -1.90625q-0.515625 -1.25 -0.515625 -3.421875l0 -7.84375l1.796875 0l0 7.84375q0 1.765625 0.328125 2.609375q0.328125 0.84375 1.125 1.296875q0.8125 0.453125 1.96875 0.453125q1.984375 0 2.828125 -0.890625q0.84375 -0.90625 0.84375 -3.46875l0 -7.84375zm3.6604462 10.65625l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375zm16.749992 -0.234375l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8124924 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.9843674 0 3.2343674 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.3437424 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.9062424 0 1.5468674 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.4843674 -2.703125l5.4999924 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.0781174 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm9.09446 5.875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm5.556427 -2.9375l1.65625 -0.265625q0.140625 1.0 0.765625 1.53125q0.640625 0.515625 1.78125 0.515625q1.15625 0 1.703125 -0.46875q0.5625 -0.46875 0.5625 -1.09375q0 -0.5625 -0.484375 -0.890625q-0.34375 -0.21875 -1.703125 -0.5625q-1.84375 -0.46875 -2.5625 -0.796875q-0.703125 -0.34375 -1.078125 -0.9375q-0.359375 -0.609375 -0.359375 -1.328125q0 -0.65625 0.296875 -1.21875q0.3125 -0.5625 0.828125 -0.9375q0.390625 -0.28125 1.0625 -0.484375q0.671875 -0.203125 1.4375 -0.203125q1.171875 0 2.046875 0.34375q0.875 0.328125 1.28125 0.90625q0.421875 0.5625 0.578125 1.515625l-1.625 0.21875q-0.109375 -0.75 -0.65625 -1.171875q-0.53125 -0.4375 -1.5 -0.4375q-1.15625 0 -1.640625 0.390625q-0.484375 0.375 -0.484375 0.875q0 0.328125 0.203125 0.59375q0.203125 0.265625 0.640625 0.4375q0.25 0.09375 1.46875 0.4375q1.765625 0.46875 2.46875 0.765625q0.703125 0.296875 1.09375 0.875q0.40625 0.578125 0.40625 1.4375q0 0.828125 -0.484375 1.578125q-0.484375 0.734375 -1.40625 1.140625q-0.921875 0.390625 -2.078125 0.390625q-1.921875 0 -2.9375 -0.796875q-1.0 -0.796875 -1.28125 -2.359375z" fill-rule="nonzero"></path><path fill="#a61c00" d="m248.00629 85.981895l0 0c0 -7.41613 6.011963 -13.428093 13.428101 -13.428093l102.26407 0c3.5613403 0 6.976837 1.4147415 9.495087 3.9329987c2.5182495 2.5182571 3.933014 5.9337463 3.933014 9.495094l0 53.710747c0 7.4161377 -6.011963 13.428085 -13.428101 13.428085l-102.26407 0c-7.4161377 0 -13.428101 -6.0119476 -13.428101 -13.428085z" fill-rule="evenodd"></path><path stroke="#dd7e6b" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m248.00629 85.981895l0 0c0 -7.41613 6.011963 -13.428093 13.428101 -13.428093l102.26407 0c3.5613403 0 6.976837 1.4147415 9.495087 3.9329987c2.5182495 2.5182571 3.933014 5.9337463 3.933014 9.495094l0 53.710747c0 7.4161377 -6.011963 13.428085 -13.428101 13.428085l-102.26407 0c-7.4161377 0 -13.428101 -6.0119476 -13.428101 -13.428085z" fill-rule="evenodd"></path><path fill="#ffffff" fill-opacity="0.6863" d="m246.99475 86.04082l0 0c0 -7.99543 6.4815674 -14.477005 14.47699 -14.477005l102.22711 0c3.8395386 0 7.52182 1.5252533 10.236786 4.240219c2.7149658 2.7149582 4.240204 6.3972397 4.240204 10.236786l0 53.597183c0 7.9954224 -6.4815674 14.47699 -14.47699 14.47699l-102.22711 0c-7.9954224 0 -14.47699 -6.4815674 -14.47699 -14.47699z" fill-rule="evenodd"></path><path fill="#a61c00" d="m255.33348 93.981895l0 0c0 -7.41613 6.0119476 -13.428093 13.428085 -13.428093l102.2641 0c3.5613403 0 6.976837 1.4147415 9.495087 3.9329987c2.5182495 2.5182571 3.9329834 5.9337463 3.9329834 9.495094l0 53.710747c0 7.4161377 -6.011963 13.428085 -13.42807 13.428085l-102.2641 0c-7.4161377 0 -13.428085 -6.0119476 -13.428085 -13.428085z" fill-rule="evenodd"></path><path stroke="#dd7e6b" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m255.33348 93.981895l0 0c0 -7.41613 6.0119476 -13.428093 13.428085 -13.428093l102.2641 0c3.5613403 0 6.976837 1.4147415 9.495087 3.9329987c2.5182495 2.5182571 3.9329834 5.9337463 3.9329834 9.495094l0 53.710747c0 7.4161377 -6.011963 13.428085 -13.42807 13.428085l-102.2641 0c-7.4161377 0 -13.428085 -6.0119476 -13.428085 -13.428085z" fill-rule="evenodd"></path><path fill="#ffffff" fill-opacity="0.3725" d="m254.31758 94.1176l0 0c0 -8.053329 6.5285187 -14.5818405 14.581833 -14.5818405l102.017426 0c3.86734 0 7.576294 1.5363007 10.310913 4.27092c2.7346497 2.7346268 4.270935 6.4435806 4.270935 10.310921l0 53.387497c0 8.053329 -6.5285034 14.581848 -14.581848 14.581848l-102.017426 0c-8.053314 0 -14.581833 -6.5285187 -14.581833 -14.581848z" fill-rule="evenodd"></path><path fill="#a61c00" d="m262.66068 101.981895l0 0c0 -7.41613 6.011963 -13.428093 13.428101 -13.428093l102.26407 0c3.5613708 0 6.976837 1.4147415 9.495117 3.9329987c2.5182495 2.5182571 3.9329834 5.9337463 3.9329834 9.495094l0 53.710747c0 7.4161377 -6.011963 13.428085 -13.428101 13.428085l-102.26407 0c-7.4161377 0 -13.428101 -6.0119476 -13.428101 -13.428085z" fill-rule="evenodd"></path><path stroke="#dd7e6b" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m262.66068 101.981895l0 0c0 -7.41613 6.011963 -13.428093 13.428101 -13.428093l102.26407 0c3.5613708 0 6.976837 1.4147415 9.495117 3.9329987c2.5182495 2.5182571 3.9329834 5.9337463 3.9329834 9.495094l0 53.710747c0 7.4161377 -6.011963 13.428085 -13.428101 13.428085l-102.26407 0c-7.4161377 0 -13.428101 -6.0119476 -13.428101 -13.428085z" fill-rule="evenodd"></path><path fill="#f3f3f3" d="m312.15433 135.75726l5.234375 -13.59375l1.9375 0l5.5625 13.59375l-2.046875 0l-1.59375 -4.125l-5.6875 0l-1.484375 4.125l-1.921875 0zm3.921875 -5.578125l4.609375 0l-1.40625 -3.78125q-0.65625 -1.703125 -0.96875 -2.8125q-0.265625 1.3125 -0.734375 2.59375l-1.5 4.0zm10.021698 5.578125l0 -13.59375l5.125 0q1.359375 0 2.078125 0.125q1.0 0.171875 1.671875 0.640625q0.671875 0.46875 1.078125 1.3125q0.421875 0.84375 0.421875 1.84375q0 1.734375 -1.109375 2.9375q-1.09375 1.203125 -3.984375 1.203125l-3.484375 0l0 5.53125l-1.796875 0zm1.796875 -7.140625l3.515625 0q1.75 0 2.46875 -0.640625q0.734375 -0.65625 0.734375 -1.828125q0 -0.859375 -0.4375 -1.46875q-0.421875 -0.609375 -1.125 -0.796875q-0.453125 -0.125 -1.671875 -0.125l-3.484375 0l0 4.859375zm10.943573 7.140625l0 -13.59375l1.8125 0l0 13.59375l-1.8125 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m163.2705 120.83727l92.06299 0" fill-rule="evenodd"></path><path stroke="#666666" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m175.27048 120.83727l68.063 0" fill-rule="evenodd"></path><path fill="#666666" stroke="#666666" stroke-width="2.0" stroke-linecap="butt" d="m175.27048 117.533806l-9.076187 3.3034668l9.076187 3.3034592z" fill-rule="evenodd"></path><path fill="#666666" stroke="#666666" stroke-width="2.0" stroke-linecap="butt" d="m243.33348 124.14073l9.076187 -3.3034592l-9.076187 -3.3034668z" fill-rule="evenodd"></path><path fill="#b6d7a8" d="m248.00629 378.01077l0 0c0 -7.4161377 6.011963 -13.428101 13.428101 -13.428101l102.26407 0c3.5613403 0 6.976837 1.4147339 9.495087 3.933014c2.5182495 2.5182495 3.933014 5.933716 3.933014 9.495087l0 53.710754c0 7.416107 -6.011963 13.42807 -13.428101 13.42807l-102.26407 0c-7.4161377 0 -13.428101 -6.011963 -13.428101 -13.42807z" fill-rule="evenodd"></path><path stroke="#6aa84f" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m248.00629 378.01077l0 0c0 -7.4161377 6.011963 -13.428101 13.428101 -13.428101l102.26407 0c3.5613403 0 6.976837 1.4147339 9.495087 3.933014c2.5182495 2.5182495 3.933014 5.933716 3.933014 9.495087l0 53.710754c0 7.416107 -6.011963 13.42807 -13.428101 13.42807l-102.26407 0c-7.4161377 0 -13.428101 -6.011963 -13.428101 -13.42807z" fill-rule="evenodd"></path><path fill="#ffffff" fill-opacity="0.6863" d="m246.99475 378.0939l0 0c0 -7.9954224 6.4815674 -14.47699 14.47699 -14.47699l102.22711 0c3.8395386 0 7.52182 1.5252686 10.236786 4.2402344c2.7149658 2.7149353 4.240204 6.397217 4.240204 10.236755l0 53.5972c0 7.9954224 -6.4815674 14.47699 -14.47699 14.47699l-102.22711 0c-7.9954224 0 -14.47699 -6.4815674 -14.47699 -14.47699z" fill-rule="evenodd"></path><path fill="#b6d7a8" d="m255.33348 386.01077l0 0c0 -7.4161377 6.0119476 -13.428101 13.428085 -13.428101l102.2641 0c3.5613403 0 6.976837 1.4147339 9.495087 3.933014c2.5182495 2.5182495 3.9329834 5.933716 3.9329834 9.495087l0 53.710754c0 7.416107 -6.011963 13.42807 -13.42807 13.42807l-102.2641 0c-7.4161377 0 -13.428085 -6.011963 -13.428085 -13.42807z" fill-rule="evenodd"></path><path stroke="#6aa84f" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m255.33348 386.01077l0 0c0 -7.4161377 6.0119476 -13.428101 13.428085 -13.428101l102.2641 0c3.5613403 0 6.976837 1.4147339 9.495087 3.933014c2.5182495 2.5182495 3.9329834 5.933716 3.9329834 9.495087l0 53.710754c0 7.416107 -6.011963 13.42807 -13.42807 13.42807l-102.2641 0c-7.4161377 0 -13.428085 -6.011963 -13.428085 -13.42807z" fill-rule="evenodd"></path><path fill="#ffffff" fill-opacity="0.3725" d="m254.31758 386.17072l0 0c0 -8.053345 6.5285187 -14.581848 14.581833 -14.581848l102.017426 0c3.86734 0 7.576294 1.5362854 10.310913 4.2709045c2.7346497 2.7346497 4.270935 6.443573 4.270935 10.310944l0 53.38748c0 8.053345 -6.5285034 14.581848 -14.581848 14.581848l-102.017426 0c-8.053314 0 -14.581833 -6.5285034 -14.581833 -14.581848z" fill-rule="evenodd"></path><path fill="#b6d7a8" d="m262.66068 394.01077l0 0c0 -7.4161377 6.011963 -13.428101 13.428101 -13.428101l102.26407 0c3.5613708 0 6.976837 1.4147339 9.495117 3.933014c2.5182495 2.5182495 3.9329834 5.933716 3.9329834 9.495087l0 53.710754c0 7.416107 -6.011963 13.42807 -13.428101 13.42807l-102.26407 0c-7.4161377 0 -13.428101 -6.011963 -13.428101 -13.42807z" fill-rule="evenodd"></path><path stroke="#6aa84f" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m262.66068 394.01077l0 0c0 -7.4161377 6.011963 -13.428101 13.428101 -13.428101l102.26407 0c3.5613708 0 6.976837 1.4147339 9.495117 3.933014c2.5182495 2.5182495 3.9329834 5.933716 3.9329834 9.495087l0 53.710754c0 7.416107 -6.011963 13.42807 -13.428101 13.42807l-102.26407 0c-7.4161377 0 -13.428101 -6.011963 -13.428101 -13.42807z" fill-rule="evenodd"></path><path fill="#000000" d="m298.04648 427.78613l0 -13.59375l2.71875 0l3.21875 9.625q0.4375 1.34375 0.640625 2.015625q0.234375 -0.75 0.734375 -2.1875l3.25 -9.453125l2.421875 0l0 13.59375l-1.734375 0l0 -11.390625l-3.953125 11.390625l-1.625 0l-3.9375 -11.578125l0 11.578125l-1.734375 0zm22.134552 -3.171875l1.71875 0.21875q-0.40625 1.5 -1.515625 2.34375q-1.09375 0.828125 -2.8125 0.828125q-2.15625 0 -3.421875 -1.328125q-1.265625 -1.328125 -1.265625 -3.734375q0 -2.484375 1.265625 -3.859375q1.28125 -1.375 3.328125 -1.375q1.984375 0 3.234375 1.34375q1.25 1.34375 1.25 3.796875q0 0.140625 -0.015625 0.4375l-7.34375 0q0.09375 1.625 0.921875 2.484375q0.828125 0.859375 2.0625 0.859375q0.90625 0 1.546875 -0.46875q0.65625 -0.484375 1.046875 -1.546875zm-5.484375 -2.703125l5.5 0q-0.109375 -1.234375 -0.625 -1.859375q-0.796875 -0.96875 -2.078125 -0.96875q-1.140625 0 -1.9375 0.78125q-0.78125 0.765625 -0.859375 2.046875zm12.766357 4.375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm1.5114136 1.5l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm6.243927 -11.6875l0 -1.90625l1.671875 0l0 1.90625l-1.671875 0zm0 11.6875l0 -9.859375l1.671875 0l0 9.859375l-1.671875 0zm10.566711 -3.609375l1.640625 0.21875q-0.265625 1.6875 -1.375 2.65625q-1.109375 0.953125 -2.734375 0.953125q-2.015625 0 -3.25 -1.3125q-1.21875 -1.328125 -1.21875 -3.796875q0 -1.59375 0.515625 -2.78125q0.53125 -1.203125 1.609375 -1.796875q1.09375 -0.609375 2.359375 -0.609375q1.609375 0 2.625 0.8125q1.015625 0.8125 1.3125 2.3125l-1.625 0.25q-0.234375 -1.0 -0.828125 -1.5q-0.59375 -0.5 -1.421875 -0.5q-1.265625 0 -2.0625 0.90625q-0.78125 0.90625 -0.78125 2.859375q0 1.984375 0.765625 2.890625q0.765625 0.890625 1.984375 0.890625q0.984375 0 1.640625 -0.59375q0.65625 -0.609375 0.84375 -1.859375zm9.28125 3.609375l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375z" fill-rule="nonzero"></path><path fill="#cfe2f3" d="m408.62466 245.44911l0 0c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596l0 -3.179596c0 0.87802124 -0.7117615 1.589798 -1.5897827 1.589798c-0.87802124 0 -1.5898132 -0.71177673 -1.5898132 -1.589798l0 3.179596l-145.98877 0l0 0c-1.7560425 0 -3.179596 1.4235535 -3.179596 3.179596l0 49.486343c0 1.7560425 1.4235535 3.179596 3.179596 3.179596c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596l0 -3.179596l145.98877 0c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596zm-152.34796 9.538788c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596l0 0c0 -0.87802124 -0.7117615 -1.589798 -1.5897827 -1.589798c-0.87802124 0 -1.5898132 0.71177673 -1.5898132 1.589798z" fill-rule="evenodd"></path><path fill="#a5b4c2" d="m256.2767 254.9879c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596l0 0c0 -0.87802124 -0.7117615 -1.589798 -1.5897827 -1.589798c-0.87802124 0 -1.5898132 0.71177673 -1.5898132 1.589798zm149.16837 -6.359192c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596c0 -1.7560425 -1.4235535 -3.179596 -3.179596 -3.179596c-1.7560425 0 -3.179596 1.4235535 -3.179596 3.179596c0 0.87802124 0.711792 1.589798 1.5898132 1.589798c0.87802124 0 1.5897827 -0.71177673 1.5897827 -1.589798z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m253.0971 251.8083c0 -1.7560425 1.4235535 -3.179596 3.179596 -3.179596l145.98877 0l0 -3.179596l0 0c0 -1.7560425 1.4235535 -3.179596 3.179596 -3.179596c1.7560425 0 3.179596 1.4235535 3.179596 3.179596l0 49.486343c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596l-145.98877 0l0 3.179596c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596c-1.7560425 0 -3.179596 -1.4235535 -3.179596 -3.179596zm149.16837 -3.179596l3.179596 0c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596m-3.179596 3.179596l0 -3.179596c0 0.87802124 -0.7117615 1.589798 -1.5897827 1.589798c-0.87802124 0 -1.5898132 -0.71177673 -1.5898132 -1.589798m-145.98877 9.538788l0 -3.179596l0 0c0 -0.87802124 0.711792 -1.589798 1.5898132 -1.589798c0.87802124 0 1.5897827 0.71177673 1.5897827 1.589798c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596c-1.7560425 0 -3.179596 -1.4235535 -3.179596 -3.179596m6.359192 0l0 46.306747" fill-rule="evenodd"></path><path stroke="#4a86e8" stroke-width="1.0" stroke-linejoin="round" stroke-linecap="butt" d="m253.0971 251.8083c0 -1.7560425 1.4235535 -3.179596 3.179596 -3.179596l145.98877 0l0 -3.179596l0 0c0 -1.7560425 1.4235535 -3.179596 3.179596 -3.179596c1.7560425 0 3.179596 1.4235535 3.179596 3.179596l0 49.486343c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596l-145.98877 0l0 3.179596c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596c-1.7560425 0 -3.179596 -1.4235535 -3.179596 -3.179596zm149.16837 -3.179596l3.179596 0c1.7560425 0 3.179596 -1.4235535 3.179596 -3.179596m-3.179596 3.179596l0 -3.179596c0 0.87802124 -0.7117615 1.589798 -1.5897827 1.589798c-0.87802124 0 -1.5898132 -0.71177673 -1.5898132 -1.589798m-145.98877 9.538788l0 -3.179596l0 0c0 -0.87802124 0.711792 -1.589798 1.5898132 -1.589798c0.87802124 0 1.5897827 0.71177673 1.5897827 1.589798c0 1.7560425 -1.4235535 3.179596 -3.179596 3.179596c-1.7560425 0 -3.179596 -1.4235535 -3.179596 -3.179596m6.359192 0l0 46.306747" fill-rule="evenodd"></path><path fill="#1c4587" d="m269.12817 265.99875l1.34375 -0.109375q0.09375 0.796875 0.4375 1.3125q0.359375 0.515625 1.078125 0.84375q0.734375 0.3125 1.65625 0.3125q0.8125 0 1.4375 -0.234375q0.625 -0.25 0.921875 -0.671875q0.3125 -0.421875 0.3125 -0.921875q0 -0.5 -0.296875 -0.875q-0.296875 -0.375 -0.96875 -0.640625q-0.4375 -0.15625 -1.921875 -0.515625q-1.46875 -0.359375 -2.0625 -0.671875q-0.765625 -0.40625 -1.15625 -1.0q-0.375 -0.59375 -0.375 -1.34375q0 -0.8125 0.453125 -1.515625q0.46875 -0.703125 1.359375 -1.0625q0.890625 -0.375 1.96875 -0.375q1.203125 0 2.109375 0.390625q0.90625 0.375 1.390625 1.125q0.5 0.75 0.546875 1.6875l-1.375 0.109375q-0.109375 -1.015625 -0.75 -1.53125q-0.625 -0.53125 -1.859375 -0.53125q-1.28125 0 -1.875 0.484375q-0.59375 0.46875 -0.59375 1.125q0 0.578125 0.421875 0.953125q0.40625 0.375 2.140625 0.765625q1.734375 0.390625 2.375 0.6875q0.9375 0.4375 1.375 1.109375q0.453125 0.65625 0.453125 1.515625q0 0.859375 -0.5 1.625q-0.484375 0.75 -1.40625 1.171875q-0.90625 0.421875 -2.0625 0.421875q-1.453125 0 -2.4375 -0.421875q-0.984375 -0.4375 -1.546875 -1.28125q-0.5625 -0.859375 -0.59375 -1.9375zm15.166229 0.609375l1.296875 0.15625q-0.203125 1.34375 -1.09375 2.109375q-0.875 0.75 -2.140625 0.75q-1.59375 0 -2.5625 -1.03125q-0.96875 -1.046875 -0.96875 -3.0q0 -1.265625 0.40625 -2.203125q0.421875 -0.953125 1.265625 -1.421875q0.859375 -0.46875 1.859375 -0.46875q1.265625 0 2.078125 0.640625q0.8125 0.640625 1.03125 1.8125l-1.28125 0.203125q-0.1875 -0.78125 -0.65625 -1.171875q-0.453125 -0.40625 -1.109375 -0.40625q-1.0 0 -1.625 0.71875q-0.625 0.71875 -0.625 2.265625q0 1.5625 0.59375 2.28125q0.609375 0.703125 1.578125 0.703125q0.78125 0 1.296875 -0.46875q0.515625 -0.484375 0.65625 -1.46875zm7.328125 1.890625q-0.734375 0.609375 -1.40625 0.875q-0.671875 0.25 -1.453125 0.25q-1.28125 0 -1.96875 -0.625q-0.6875 -0.625 -0.6875 -1.59375q0 -0.578125 0.25 -1.046875q0.265625 -0.46875 0.6875 -0.75q0.421875 -0.296875 0.953125 -0.4375q0.375 -0.109375 1.171875 -0.203125q1.59375 -0.1875 2.34375 -0.453125q0.015625 -0.265625 0.015625 -0.34375q0 -0.8125 -0.375 -1.140625q-0.515625 -0.4375 -1.5 -0.4375q-0.9375 0 -1.390625 0.328125q-0.4375 0.3125 -0.640625 1.140625l-1.296875 -0.171875q0.171875 -0.828125 0.578125 -1.328125q0.40625 -0.515625 1.171875 -0.78125q0.765625 -0.28125 1.765625 -0.28125q1.0 0 1.609375 0.234375q0.625 0.234375 0.921875 0.59375q0.296875 0.34375 0.40625 0.890625q0.0625 0.34375 0.0625 1.21875l0 1.75q0 1.84375 0.078125 2.328125q0.09375 0.484375 0.34375 0.9375l-1.375 0q-0.203125 -0.40625 -0.265625 -0.953125zm-0.109375 -2.953125q-0.71875 0.296875 -2.15625 0.5q-0.8125 0.125 -1.15625 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.28125 -0.171875 0.625q0 0.53125 0.390625 0.890625q0.40625 0.34375 1.171875 0.34375q0.765625 0 1.359375 -0.328125q0.59375 -0.34375 0.875 -0.921875q0.203125 -0.4375 0.203125 -1.3125l0 -0.484375zm3.151123 3.90625l0 -10.734375l1.328125 0l0 10.734375l-1.328125 0zm8.365509 -0.953125q-0.734375 0.609375 -1.40625 0.875q-0.671875 0.25 -1.453125 0.25q-1.28125 0 -1.96875 -0.625q-0.6875 -0.625 -0.6875 -1.59375q0 -0.578125 0.25 -1.046875q0.265625 -0.46875 0.6875 -0.75q0.421875 -0.296875 0.953125 -0.4375q0.375 -0.109375 1.171875 -0.203125q1.59375 -0.1875 2.34375 -0.453125q0.015625 -0.265625 0.015625 -0.34375q0 -0.8125 -0.375 -1.140625q-0.515625 -0.4375 -1.5 -0.4375q-0.9375 0 -1.390625 0.328125q-0.4375 0.3125 -0.640625 1.140625l-1.296875 -0.171875q0.171875 -0.828125 0.578125 -1.328125q0.40625 -0.515625 1.171875 -0.78125q0.765625 -0.28125 1.765625 -0.28125q1.0 0 1.609375 0.234375q0.625 0.234375 0.921875 0.59375q0.296875 0.34375 0.40625 0.890625q0.0625 0.34375 0.0625 1.21875l0 1.75q0 1.84375 0.078125 2.328125q0.09375 0.484375 0.34375 0.9375l-1.375 0q-0.203125 -0.40625 -0.265625 -0.953125zm-0.109375 -2.953125q-0.71875 0.296875 -2.15625 0.5q-0.8125 0.125 -1.15625 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.28125 -0.171875 0.625q0 0.53125 0.390625 0.890625q0.40625 0.34375 1.171875 0.34375q0.765625 0 1.359375 -0.328125q0.59375 -0.34375 0.875 -0.921875q0.203125 -0.4375 0.203125 -1.3125l0 -0.484375zm4.401123 3.90625l-1.21875 0l0 -10.734375l1.3125 0l0 3.828125q0.84375 -1.046875 2.140625 -1.046875q0.71875 0 1.359375 0.296875q0.640625 0.28125 1.046875 0.8125q0.421875 0.515625 0.65625 1.265625q0.234375 0.734375 0.234375 1.578125q0 2.0 -1.0 3.09375q-0.984375 1.078125 -2.375 1.078125q-1.375 0 -2.15625 -1.140625l0 0.96875zm-0.015625 -3.953125q0 1.40625 0.390625 2.03125q0.609375 1.015625 1.671875 1.015625q0.875 0 1.5 -0.75q0.625 -0.75 0.625 -2.234375q0 -1.53125 -0.609375 -2.25q-0.59375 -0.734375 -1.453125 -0.734375q-0.859375 0 -1.5 0.75q-0.625 0.75 -0.625 2.171875zm6.9167175 3.953125l0 -10.734375l1.328125 0l0 10.734375l-1.328125 0zm8.61554 -2.5l1.359375 0.15625q-0.3125 1.203125 -1.1875 1.859375q-0.875 0.65625 -2.234375 0.65625q-1.703125 0 -2.703125 -1.046875q-1.0 -1.046875 -1.0 -2.953125q0 -1.953125 1.015625 -3.03125q1.015625 -1.09375 2.625 -1.09375q1.5625 0 2.546875 1.0625q0.984375 1.0625 0.984375 2.984375q0 0.125 0 0.359375l-5.8125 0q0.078125 1.28125 0.71875 1.96875q0.65625 0.671875 1.640625 0.671875q0.71875 0 1.234375 -0.375q0.515625 -0.390625 0.8125 -1.21875zm-4.328125 -2.140625l4.34375 0q-0.09375 -0.984375 -0.5 -1.46875q-0.625 -0.765625 -1.625 -0.765625q-0.921875 0 -1.546875 0.609375q-0.609375 0.609375 -0.671875 1.625zm7.5260925 -1.640625l0 -1.5l1.5 0l0 1.5l-1.5 0zm0 6.28125l0 -1.5l1.5 0l0 1.5l-1.5 0zm7.7533264 0l0 -7.78125l1.1875 0l0 1.1875q0.453125 -0.828125 0.84375 -1.09375q0.390625 -0.265625 0.84375 -0.265625q0.671875 0 1.359375 0.421875l-0.453125 1.21875q-0.484375 -0.28125 -0.96875 -0.28125q-0.4375 0 -0.78125 0.265625q-0.34375 0.25 -0.484375 0.71875q-0.21875 0.703125 -0.21875 1.53125l0 4.078125l-1.328125 0zm10.005646 0l0 -1.140625q-0.921875 1.3125 -2.46875 1.3125q-0.6875 0 -1.296875 -0.265625q-0.59375 -0.265625 -0.890625 -0.65625q-0.28125 -0.40625 -0.390625 -0.984375q-0.09375 -0.375 -0.09375 -1.21875l0 -4.828125l1.328125 0l0 4.3125q0 1.03125 0.078125 1.390625q0.125 0.53125 0.515625 0.828125q0.40625 0.296875 1.0 0.296875q0.59375 0 1.109375 -0.296875q0.53125 -0.3125 0.75 -0.828125q0.21875 -0.53125 0.21875 -1.53125l0 -4.171875l1.3125 0l0 7.78125l-1.171875 0zm3.041748 0l0 -7.78125l1.1875 0l0 1.109375q0.859375 -1.28125 2.484375 -1.28125q0.703125 0 1.28125 0.25q0.59375 0.25 0.890625 0.671875q0.296875 0.40625 0.40625 0.96875q0.078125 0.359375 0.078125 1.28125l0 4.78125l-1.328125 0l0 -4.734375q0 -0.796875 -0.15625 -1.1875q-0.140625 -0.40625 -0.53125 -0.640625q-0.390625 -0.25 -0.921875 -0.25q-0.84375 0 -1.453125 0.53125q-0.609375 0.53125 -0.609375 2.03125l0 4.25l-1.328125 0zm17.301208 -0.953125q-0.734375 0.609375 -1.40625 0.875q-0.671875 0.25 -1.453125 0.25q-1.28125 0 -1.96875 -0.625q-0.6875 -0.625 -0.6875 -1.59375q0 -0.578125 0.25 -1.046875q0.265625 -0.46875 0.6875 -0.75q0.421875 -0.296875 0.953125 -0.4375q0.375 -0.109375 1.171875 -0.203125q1.59375 -0.1875 2.34375 -0.453125q0.015625 -0.265625 0.015625 -0.34375q0 -0.8125 -0.375 -1.140625q-0.515625 -0.4375 -1.5 -0.4375q-0.9375 0 -1.390625 0.328125q-0.4375 0.3125 -0.640625 1.140625l-1.296875 -0.171875q0.171875 -0.828125 0.578125 -1.328125q0.40625 -0.515625 1.171875 -0.78125q0.765625 -0.28125 1.765625 -0.28125q1.0 0 1.609375 0.234375q0.625 0.234375 0.921875 0.59375q0.296875 0.34375 0.40625 0.890625q0.0625 0.34375 0.0625 1.21875l0 1.75q0 1.84375 0.078125 2.328125q0.09375 0.484375 0.34375 0.9375l-1.375 0q-0.203125 -0.40625 -0.265625 -0.953125zm-0.109375 -2.953125q-0.71875 0.296875 -2.15625 0.5q-0.8125 0.125 -1.15625 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.28125 -0.171875 0.625q0 0.53125 0.390625 0.890625q0.40625 0.34375 1.171875 0.34375q0.765625 0 1.359375 -0.328125q0.59375 -0.34375 0.875 -0.921875q0.203125 -0.4375 0.203125 -1.3125l0 -0.484375zm2.6667175 1.578125l1.296875 -0.203125q0.109375 0.78125 0.609375 1.203125q0.5 0.421875 1.40625 0.421875q0.90625 0 1.34375 -0.359375q0.4375 -0.375 0.4375 -0.875q0 -0.453125 -0.390625 -0.703125q-0.265625 -0.1875 -1.34375 -0.453125q-1.453125 -0.359375 -2.015625 -0.625q-0.546875 -0.28125 -0.84375 -0.75q-0.28125 -0.46875 -0.28125 -1.046875q0 -0.515625 0.234375 -0.953125q0.234375 -0.453125 0.640625 -0.734375q0.3125 -0.234375 0.84375 -0.390625q0.53125 -0.15625 1.140625 -0.15625q0.90625 0 1.59375 0.265625q0.703125 0.265625 1.03125 0.71875q0.328125 0.4375 0.453125 1.203125l-1.28125 0.171875q-0.09375 -0.609375 -0.515625 -0.9375q-0.421875 -0.34375 -1.1875 -0.34375q-0.90625 0 -1.296875 0.3125q-0.390625 0.296875 -0.390625 0.703125q0 0.25 0.15625 0.453125q0.171875 0.21875 0.515625 0.359375q0.1875 0.0625 1.15625 0.328125q1.40625 0.375 1.953125 0.609375q0.5625 0.234375 0.875 0.703125q0.3125 0.453125 0.3125 1.125q0 0.65625 -0.390625 1.234375q-0.375 0.578125 -1.109375 0.90625q-0.71875 0.3125 -1.640625 0.3125q-1.515625 0 -2.3125 -0.625q-0.78125 -0.625 -1.0 -1.875z" fill-rule="nonzero"></path><path fill="#1c4587" d="m269.44067 287.45187l0 -7.78125l1.1875 0l0 1.09375q0.359375 -0.578125 0.96875 -0.921875q0.609375 -0.34375 1.390625 -0.34375q0.859375 0 1.40625 0.359375q0.5625 0.359375 0.78125 1.0q0.921875 -1.359375 2.40625 -1.359375q1.15625 0 1.78125 0.640625q0.625 0.640625 0.625 1.96875l0 5.34375l-1.3125 0l0 -4.90625q0 -0.78125 -0.125 -1.125q-0.125 -0.359375 -0.46875 -0.5625q-0.34375 -0.21875 -0.796875 -0.21875q-0.8125 0 -1.359375 0.546875q-0.546875 0.546875 -0.546875 1.75l0 4.515625l-1.3125 0l0 -5.046875q0 -0.890625 -0.328125 -1.328125q-0.3125 -0.4375 -1.046875 -0.4375q-0.5625 0 -1.03125 0.296875q-0.46875 0.296875 -0.6875 0.859375q-0.203125 0.5625 -0.203125 1.625l0 4.03125l-1.328125 0zm17.286896 -0.953125q-0.734375 0.609375 -1.40625 0.875q-0.671875 0.25 -1.453125 0.25q-1.28125 0 -1.96875 -0.625q-0.6875 -0.625 -0.6875 -1.59375q0 -0.578125 0.25 -1.046875q0.265625 -0.46875 0.6875 -0.75q0.421875 -0.296875 0.953125 -0.4375q0.375 -0.109375 1.171875 -0.203125q1.59375 -0.1875 2.34375 -0.453125q0.015625 -0.265625 0.015625 -0.34375q0 -0.8125 -0.375 -1.140625q-0.515625 -0.4375 -1.5 -0.4375q-0.9375 0 -1.390625 0.328125q-0.4375 0.3125 -0.640625 1.140625l-1.296875 -0.171875q0.171875 -0.828125 0.578125 -1.328125q0.40625 -0.515625 1.171875 -0.78125q0.765625 -0.28125 1.765625 -0.28125q1.0 0 1.609375 0.234375q0.625 0.234375 0.921875 0.59375q0.296875 0.34375 0.40625 0.890625q0.0625 0.34375 0.0625 1.21875l0 1.75q0 1.84375 0.078125 2.328125q0.09375 0.484375 0.34375 0.9375l-1.375 0q-0.203125 -0.40625 -0.265625 -0.953125zm-0.109375 -2.953125q-0.71875 0.296875 -2.15625 0.5q-0.8125 0.125 -1.15625 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.28125 -0.171875 0.625q0 0.53125 0.390625 0.890625q0.40625 0.34375 1.171875 0.34375q0.765625 0 1.359375 -0.328125q0.59375 -0.34375 0.875 -0.921875q0.203125 -0.4375 0.203125 -1.3125l0 -0.484375zm3.182373 3.90625l0 -7.78125l1.1875 0l0 1.109375q0.859375 -1.28125 2.484375 -1.28125q0.703125 0 1.28125 0.25q0.59375 0.25 0.890625 0.671875q0.296875 0.40625 0.40625 0.96875q0.078125 0.359375 0.078125 1.28125l0 4.78125l-1.328125 0l0 -4.734375q0 -0.796875 -0.15625 -1.1875q-0.140625 -0.40625 -0.53125 -0.640625q-0.390625 -0.25 -0.921875 -0.25q-0.84375 0 -1.453125 0.53125q-0.609375 0.53125 -0.609375 2.03125l0 4.25l-1.328125 0zm8.104218 3.0l-0.15625 -1.234375q0.4375 0.109375 0.75 0.109375q0.453125 0 0.703125 -0.15625q0.265625 -0.140625 0.4375 -0.40625q0.125 -0.1875 0.40625 -0.96875q0.03125 -0.109375 0.109375 -0.328125l-2.953125 -7.796875l1.421875 0l1.625 4.5q0.3125 0.859375 0.5625 1.8125q0.234375 -0.90625 0.546875 -1.78125l1.65625 -4.53125l1.328125 0l-2.96875 7.90625q-0.46875 1.28125 -0.734375 1.765625q-0.359375 0.65625 -0.8125 0.953125q-0.453125 0.3125 -1.078125 0.3125q-0.375 0 -0.84375 -0.15625zm16.525116 -3.953125q-0.734375 0.609375 -1.40625 0.875q-0.671875 0.25 -1.453125 0.25q-1.28125 0 -1.96875 -0.625q-0.6875 -0.625 -0.6875 -1.59375q0 -0.578125 0.25 -1.046875q0.265625 -0.46875 0.6875 -0.75q0.421875 -0.296875 0.953125 -0.4375q0.375 -0.109375 1.171875 -0.203125q1.59375 -0.1875 2.34375 -0.453125q0.015625 -0.265625 0.015625 -0.34375q0 -0.8125 -0.375 -1.140625q-0.515625 -0.4375 -1.5 -0.4375q-0.9375 0 -1.390625 0.328125q-0.4375 0.3125 -0.640625 1.140625l-1.296875 -0.171875q0.171875 -0.828125 0.578125 -1.328125q0.40625 -0.515625 1.171875 -0.78125q0.765625 -0.28125 1.765625 -0.28125q1.0 0 1.609375 0.234375q0.625 0.234375 0.921875 0.59375q0.296875 0.34375 0.40625 0.890625q0.0625 0.34375 0.0625 1.21875l0 1.75q0 1.84375 0.078125 2.328125q0.09375 0.484375 0.34375 0.9375l-1.375 0q-0.203125 -0.40625 -0.265625 -0.953125zm-0.109375 -2.953125q-0.71875 0.296875 -2.15625 0.5q-0.8125 0.125 -1.15625 0.265625q-0.328125 0.140625 -0.515625 0.421875q-0.171875 0.28125 -0.171875 0.625q0 0.53125 0.390625 0.890625q0.40625 0.34375 1.171875 0.34375q0.765625 0 1.359375 -0.328125q0.59375 -0.34375 0.875 -0.921875q0.203125 -0.4375 0.203125 -1.3125l0 -0.484375zm2.6667175 1.578125l1.296875 -0.203125q0.109375 0.78125 0.609375 1.203125q0.5 0.421875 1.40625 0.421875q0.90625 0 1.34375 -0.359375q0.4375 -0.375 0.4375 -0.875q0 -0.453125 -0.390625 -0.703125q-0.265625 -0.1875 -1.34375 -0.453125q-1.453125 -0.359375 -2.015625 -0.625q-0.546875 -0.28125 -0.84375 -0.75q-0.28125 -0.46875 -0.28125 -1.046875q0 -0.515625 0.234375 -0.953125q0.234375 -0.453125 0.640625 -0.734375q0.3125 -0.234375 0.84375 -0.390625q0.53125 -0.15625 1.140625 -0.15625q0.90625 0 1.59375 0.265625q0.703125 0.265625 1.03125 0.71875q0.328125 0.4375 0.453125 1.203125l-1.28125 0.171875q-0.09375 -0.609375 -0.515625 -0.9375q-0.421875 -0.34375 -1.1875 -0.34375q-0.90625 0 -1.296875 0.3125q-0.390625 0.296875 -0.390625 0.703125q0 0.25 0.15625 0.453125q0.171875 0.21875 0.515625 0.359375q0.1875 0.0625 1.15625 0.328125q1.40625 0.375 1.953125 0.609375q0.5625 0.234375 0.875 0.703125q0.3125 0.453125 0.3125 1.125q0 0.65625 -0.390625 1.234375q-0.375 0.578125 -1.109375 0.90625q-0.71875 0.3125 -1.640625 0.3125q-1.515625 0 -2.3125 -0.625q-0.78125 -0.625 -1.0 -1.875zm11.868866 5.328125l-0.15625 -1.234375q0.4375 0.109375 0.75 0.109375q0.453125 0 0.703125 -0.15625q0.265625 -0.140625 0.4375 -0.40625q0.125 -0.1875 0.40625 -0.96875q0.03125 -0.109375 0.109375 -0.328125l-2.953125 -7.796875l1.421875 0l1.625 4.5q0.3125 0.859375 0.5625 1.8125q0.234375 -0.90625 0.546875 -1.78125l1.65625 -4.53125l1.328125 0l-2.96875 7.90625q-0.46875 1.28125 -0.734375 1.765625q-0.359375 0.65625 -0.8125 0.953125q-0.453125 0.3125 -1.078125 0.3125q-0.375 0 -0.84375 -0.15625zm6.890625 -6.890625q0 -2.15625 1.203125 -3.203125q1.0 -0.859375 2.4375 -0.859375q1.609375 0 2.625 1.046875q1.015625 1.046875 1.015625 2.90625q0 1.5 -0.453125 2.359375q-0.4375 0.859375 -1.3125 1.34375q-0.859375 0.46875 -1.875 0.46875q-1.625 0 -2.640625 -1.046875q-1.0 -1.046875 -1.0 -3.015625zm1.359375 0q0 1.5 0.640625 2.25q0.65625 0.734375 1.640625 0.734375q0.984375 0 1.640625 -0.75q0.65625 -0.75 0.65625 -2.28125q0 -1.4375 -0.65625 -2.171875q-0.65625 -0.75 -1.640625 -0.75q-0.984375 0 -1.640625 0.734375q-0.640625 0.734375 -0.640625 2.234375zm12.385468 3.890625l0 -1.140625q-0.921875 1.3125 -2.46875 1.3125q-0.6875 0 -1.296875 -0.265625q-0.59375 -0.265625 -0.890625 -0.65625q-0.28125 -0.40625 -0.390625 -0.984375q-0.09375 -0.375 -0.09375 -1.21875l0 -4.828125l1.328125 0l0 4.3125q0 1.03125 0.078125 1.390625q0.125 0.53125 0.515625 0.828125q0.40625 0.296875 1.0 0.296875q0.59375 0 1.109375 -0.296875q0.53125 -0.3125 0.75 -0.828125q0.21875 -0.53125 0.21875 -1.53125l0 -4.171875l1.3125 0l0 7.78125l-1.171875 0zm7.1137085 0l0 -7.78125l1.1875 0l0 1.109375q0.859375 -1.28125 2.484375 -1.28125q0.703125 0 1.28125 0.25q0.59375 0.25 0.890625 0.671875q0.296875 0.40625 0.40625 0.96875q0.078125 0.359375 0.078125 1.28125l0 4.78125l-1.328125 0l0 -4.734375q0 -0.796875 -0.15625 -1.1875q-0.140625 -0.40625 -0.53125 -0.640625q-0.390625 -0.25 -0.921875 -0.25q-0.84375 0 -1.453125 0.53125q-0.609375 0.53125 -0.609375 2.03125l0 4.25l-1.328125 0zm13.479248 -2.5l1.359375 0.15625q-0.3125 1.203125 -1.1875 1.859375q-0.875 0.65625 -2.234375 0.65625q-1.703125 0 -2.703125 -1.046875q-1.0 -1.046875 -1.0 -2.953125q0 -1.953125 1.015625 -3.03125q1.015625 -1.09375 2.625 -1.09375q1.5625 0 2.546875 1.0625q0.984375 1.0625 0.984375 2.984375q0 0.125 0 0.359375l-5.8125 0q0.078125 1.28125 0.71875 1.96875q0.65625 0.671875 1.640625 0.671875q0.71875 0 1.234375 -0.375q0.515625 -0.390625 0.8125 -1.21875zm-4.328125 -2.140625l4.34375 0q-0.09375 -0.984375 -0.5 -1.46875q-0.625 -0.765625 -1.625 -0.765625q-0.921875 0 -1.546875 0.609375q-0.609375 0.609375 -0.671875 1.625zm12.479218 2.140625l1.359375 0.15625q-0.3125 1.203125 -1.1875 1.859375q-0.875 0.65625 -2.234375 0.65625q-1.703125 0 -2.703125 -1.046875q-1.0 -1.046875 -1.0 -2.953125q0 -1.953125 1.015625 -3.03125q1.015625 -1.09375 2.625 -1.09375q1.5625 0 2.546875 1.0625q0.984375 1.0625 0.984375 2.984375q0 0.125 0 0.359375l-5.8125 0q0.078125 1.28125 0.71875 1.96875q0.65625 0.671875 1.640625 0.671875q0.71875 0 1.234375 -0.375q0.515625 -0.390625 0.8125 -1.21875zm-4.328125 -2.140625l4.34375 0q-0.09375 -0.984375 -0.5 -1.46875q-0.625 -0.765625 -1.625 -0.765625q-0.921875 0 -1.546875 0.609375q-0.609375 0.609375 -0.671875 1.625zm12.197998 4.640625l0 -0.984375q-0.734375 1.15625 -2.171875 1.15625q-0.921875 0 -1.703125 -0.5q-0.78125 -0.515625 -1.21875 -1.4375q-0.421875 -0.921875 -0.421875 -2.109375q0 -1.171875 0.390625 -2.109375q0.390625 -0.953125 1.15625 -1.453125q0.78125 -0.515625 1.734375 -0.515625q0.703125 0 1.25 0.296875q0.5625 0.296875 0.90625 0.765625l0 -3.84375l1.3125 0l0 10.734375l-1.234375 0zm-4.15625 -3.875q0 1.484375 0.625 2.234375q0.625 0.734375 1.484375 0.734375q0.859375 0 1.46875 -0.703125q0.609375 -0.71875 0.609375 -2.15625q0 -1.609375 -0.625 -2.34375q-0.609375 -0.75 -1.515625 -0.75q-0.875 0 -1.46875 0.71875q-0.578125 0.71875 -0.578125 2.265625z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m336.40564 240.92224l-15.181122 -65.32283" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="2.0,6.0" d="m336.40564 240.92224l-15.181122 -65.32283" fill-rule="evenodd"></path><path fill="#a4c2f4" d="m43.188976 383.49606l119.1496 0l0 58.740143l-119.1496 0z" fill-rule="evenodd"></path><path fill="#000000" d="m65.19529 415.0205l1.796875 0.453125q-0.5625 2.21875 -2.03125 3.390625q-1.46875 1.15625 -3.59375 1.15625q-2.203125 0 -3.578125 -0.890625q-1.375 -0.90625 -2.09375 -2.59375q-0.71875 -1.703125 -0.71875 -3.65625q0 -2.125 0.796875 -3.703125q0.8125 -1.578125 2.3125 -2.390625q1.5 -0.828125 3.296875 -0.828125q2.046875 0 3.4375 1.046875q1.390625 1.03125 1.9375 2.90625l-1.765625 0.421875q-0.46875 -1.484375 -1.375 -2.15625q-0.90625 -0.6875 -2.265625 -0.6875q-1.5625 0 -2.625 0.75q-1.046875 0.75 -1.484375 2.03125q-0.421875 1.265625 -0.421875 2.609375q0 1.734375 0.5 3.03125q0.515625 1.28125 1.578125 1.921875q1.078125 0.640625 2.3125 0.640625q1.515625 0 2.5625 -0.859375q1.046875 -0.875 1.421875 -2.59375zm2.9260712 -0.15625q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm8.656967 0q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.266342 4.921875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0zm12.618927 0l0 -1.25q-0.9375 1.46875 -2.75 1.46875q-1.171875 0 -2.171875 -0.640625q-0.984375 -0.65625 -1.53125 -1.8125q-0.53125 -1.171875 -0.53125 -2.6875q0 -1.46875 0.484375 -2.671875q0.5 -1.203125 1.46875 -1.84375q0.984375 -0.640625 2.203125 -0.640625q0.890625 0 1.578125 0.375q0.703125 0.375 1.140625 0.984375l0 -4.875l1.65625 0l0 13.59375l-1.546875 0zm-5.28125 -4.921875q0 1.890625 0.796875 2.828125q0.8125 0.9375 1.890625 0.9375q1.09375 0 1.859375 -0.890625q0.765625 -0.890625 0.765625 -2.734375q0 -2.015625 -0.78125 -2.953125q-0.78125 -0.953125 -1.921875 -0.953125q-1.109375 0 -1.859375 0.90625q-0.75 0.90625 -0.75 2.859375zm9.281967 -6.765625l0 -1.90625l1.6718826 0l0 1.90625l-1.6718826 0zm0 11.6875l0 -9.859375l1.6718826 0l0 9.859375l-1.6718826 0zm4.129204 0l0 -9.859375l1.5 0l0 1.40625q1.09375 -1.625 3.140625 -1.625q0.890625 0 1.640625 0.328125q0.75 0.3125 1.109375 0.84375q0.375 0.515625 0.53125 1.21875q0.09375 0.46875 0.09375 1.625l0 6.0625l-1.671875 0l0 -6.0q0 -1.015625 -0.203125 -1.515625q-0.1875 -0.515625 -0.6875 -0.8125q-0.5 -0.296875 -1.171875 -0.296875q-1.0625 0 -1.84375 0.671875q-0.765625 0.671875 -0.765625 2.578125l0 5.375l-1.671875 0zm16.813217 -1.21875q-0.9375 0.796875 -1.796875 1.125q-0.859375 0.3125 -1.84375 0.3125q-1.609375 0 -2.484375 -0.78125q-0.875 -0.796875 -0.875 -2.03125q0 -0.734375 0.328125 -1.328125q0.328125 -0.59375 0.859375 -0.953125q0.53125 -0.359375 1.203125 -0.546875q0.5 -0.140625 1.484375 -0.25q2.03125 -0.25 2.984375 -0.578125q0 -0.34375 0 -0.4375q0 -1.015625 -0.46875 -1.4375q-0.640625 -0.5625 -1.90625 -0.5625q-1.171875 0 -1.734375 0.40625q-0.5625 0.40625 -0.828125 1.46875l-1.640625 -0.234375q0.234375 -1.046875 0.734375 -1.6875q0.515625 -0.640625 1.46875 -0.984375q0.96875 -0.359375 2.25 -0.359375q1.265625 0 2.046875 0.296875q0.78125 0.296875 1.15625 0.75q0.375 0.453125 0.515625 1.140625q0.09375 0.421875 0.09375 1.53125l0 2.234375q0 2.328125 0.09375 2.953125q0.109375 0.609375 0.4375 1.171875l-1.75 0q-0.265625 -0.515625 -0.328125 -1.21875zm-0.140625 -3.71875q-0.90625 0.359375 -2.734375 0.625q-1.03125 0.140625 -1.453125 0.328125q-0.421875 0.1875 -0.65625 0.546875q-0.234375 0.359375 -0.234375 0.796875q0 0.671875 0.5 1.125q0.515625 0.4375 1.484375 0.4375q0.96875 0 1.71875 -0.421875q0.75 -0.4375 1.109375 -1.15625q0.265625 -0.578125 0.265625 -1.671875l0 -0.609375zm7.735092 3.4375l0.234375 1.484375q-0.703125 0.140625 -1.265625 0.140625q-0.90625 0 -1.40625 -0.28125q-0.5 -0.296875 -0.703125 -0.75q-0.203125 -0.46875 -0.203125 -1.984375l0 -5.65625l-1.234375 0l0 -1.3125l1.234375 0l0 -2.4375l1.65625 -1.0l0 3.4375l1.6875 0l0 1.3125l-1.6875 0l0 5.75q0 0.71875 0.078125 0.921875q0.09375 0.203125 0.296875 0.328125q0.203125 0.125 0.578125 0.125q0.265625 0 0.734375 -0.078125zm0.90205383 -3.421875q0 -2.734375 1.53125 -4.0625q1.265625 -1.09375 3.09375 -1.09375q2.03125 0 3.3125 1.34375q1.296875 1.328125 1.296875 3.671875q0 1.90625 -0.578125 3.0q-0.5625 1.078125 -1.65625 1.6875q-1.078125 0.59375 -2.375 0.59375q-2.0625 0 -3.34375 -1.328125q-1.28125 -1.328125 -1.28125 -3.8125zm1.71875 0q0 1.890625 0.828125 2.828125q0.828125 0.9375 2.078125 0.9375q1.25 0 2.0625 -0.9375q0.828125 -0.953125 0.828125 -2.890625q0 -1.828125 -0.828125 -2.765625q-0.828125 -0.9375 -2.0625 -0.9375q-1.25 0 -2.078125 0.9375q-0.828125 0.9375 -0.828125 2.828125zm9.266342 4.921875l0 -9.859375l1.5 0l0 1.5q0.578125 -1.046875 1.0625 -1.375q0.484375 -0.34375 1.078125 -0.34375q0.84375 0 1.71875 0.546875l-0.578125 1.546875q-0.609375 -0.359375 -1.234375 -0.359375q-0.546875 0 -0.984375 0.328125q-0.421875 0.328125 -0.609375 0.90625q-0.28125 0.890625 -0.28125 1.953125l0 5.15625l-1.671875 0z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m163.08923 128.83727l102.36221 0" fill-rule="evenodd"></path><path stroke="#434343" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m175.08923 128.83727l78.36221 0" fill-rule="evenodd"></path><path fill="#434343" stroke="#434343" stroke-width="2.0" stroke-linecap="butt" d="m175.08923 125.533806l-9.076187 3.3034592l9.076187 3.3034668z" fill-rule="evenodd"></path><path fill="#434343" stroke="#434343" stroke-width="2.0" stroke-linecap="butt" d="m253.45145 132.14073l9.076202 -3.3034668l-9.076202 -3.3034592z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m163.25197 112.83727l86.17323 0" fill-rule="evenodd"></path><path stroke="#999999" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m175.25197 112.83727l62.173233 0" fill-rule="evenodd"></path><path fill="#999999" stroke="#999999" stroke-width="2.0" stroke-linecap="butt" d="m175.25197 109.533806l-9.076202 3.3034668l9.076202 3.3034592z" fill-rule="evenodd"></path><path fill="#999999" stroke="#999999" stroke-width="2.0" stroke-linecap="butt" d="m237.4252 116.14073l9.076187 -3.3034592l-9.076187 -3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m162.99347 412.86615l94.456696 0" fill-rule="evenodd"></path><path stroke="#666666" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="8.0,6.0" d="m174.99348 412.86615l70.45668 0" fill-rule="evenodd"></path><path fill="#666666" stroke="#666666" stroke-width="2.0" stroke-linecap="butt" d="m174.99348 409.56268l-9.076202 3.3034668l9.076202 3.3034668z" fill-rule="evenodd"></path><path fill="#666666" stroke="#666666" stroke-width="2.0" stroke-linecap="butt" d="m245.45018 416.16962l9.076187 -3.3034668l-9.076187 -3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m163.08923 420.86615l102.36221 0" fill-rule="evenodd"></path><path stroke="#434343" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="8.0,6.0" d="m175.08923 420.86615l78.36221 0" fill-rule="evenodd"></path><path fill="#434343" stroke="#434343" stroke-width="2.0" stroke-linecap="butt" d="m175.08923 417.56268l-9.076187 3.3034668l9.076187 3.3034668z" fill-rule="evenodd"></path><path fill="#434343" stroke="#434343" stroke-width="2.0" stroke-linecap="butt" d="m253.45145 424.16962l9.076202 -3.3034668l-9.076202 -3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m163.25197 404.86615l86.17323 0" fill-rule="evenodd"></path><path stroke="#999999" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="8.0,6.0" d="m175.25197 404.86615l62.173233 0" fill-rule="evenodd"></path><path fill="#999999" stroke="#999999" stroke-width="2.0" stroke-linecap="butt" d="m175.25197 401.56268l-9.076202 3.3034668l9.076202 3.3034668z" fill-rule="evenodd"></path><path fill="#999999" stroke="#999999" stroke-width="2.0" stroke-linecap="butt" d="m237.4252 408.16962l9.076187 -3.3034668l-9.076187 -3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m404.7967 122.698166l239.71655 0" fill-rule="evenodd"></path><path stroke="#6aa84f" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m404.79672 122.698166l227.71652 0" fill-rule="evenodd"></path><path fill="#6aa84f" stroke="#6aa84f" stroke-width="2.0" stroke-linecap="butt" d="m632.51324 126.001625l9.076172 -3.3034592l-9.076172 -3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m636.9728 265.82153l-225.82675 -125.60631" fill-rule="evenodd"></path><path stroke="#3c78d8" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m636.9728 265.82153l-215.33978 -119.77338" fill-rule="evenodd"></path><path fill="#3c78d8" stroke="#3c78d8" stroke-width="2.0" stroke-linecap="butt" d="m423.23874 143.16121l-9.537598 -1.5247955l6.326111 7.298691z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m642.0608 412.20996l-236.50397 -253.57478" fill-rule="evenodd"></path><path stroke="#674ea7" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m633.87604 403.43448l-220.13443 -236.02377" fill-rule="evenodd"></path><path fill="#674ea7" stroke="#674ea7" stroke-width="2.0" stroke-linecap="butt" d="m631.4602 405.68762l8.606384 4.384186l-3.7747803 -8.890533z" fill-rule="evenodd"></path><path fill="#674ea7" stroke="#674ea7" stroke-width="2.0" stroke-linecap="butt" d="m416.15738 165.15753l-8.606323 -4.384201l3.7747192 8.890533z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m404.05563 122.698166l239.71652 0" fill-rule="evenodd"></path><path stroke="#ff9900" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m404.0556 122.698166l227.71655 0" fill-rule="evenodd"></path><path fill="#ff9900" stroke="#ff9900" stroke-width="2.0" stroke-linecap="butt" d="m631.77216 126.001625l9.076172 -3.3034592l-9.076172 -3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m636.9728 265.82153l-225.82675 -125.60631" fill-rule="evenodd"></path><path stroke="#38761d" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m636.9728 265.82153l-215.33978 -119.77338" fill-rule="evenodd"></path><path fill="#38761d" stroke="#38761d" stroke-width="2.0" stroke-linecap="butt" d="m423.23874 143.16121l-9.537598 -1.5247955l6.326111 7.298691z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m642.0608 412.20996l-236.50397 -253.57478" fill-rule="evenodd"></path><path stroke="#674ea7" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m633.87604 403.43448l-220.13443 -236.02377" fill-rule="evenodd"></path><path fill="#674ea7" stroke="#674ea7" stroke-width="2.0" stroke-linecap="butt" d="m631.4602 405.68762l8.606384 4.384186l-3.7747803 -8.890533z" fill-rule="evenodd"></path><path fill="#674ea7" stroke="#674ea7" stroke-width="2.0" stroke-linecap="butt" d="m416.15738 165.15753l-8.606323 -4.384201l3.7747192 8.890533z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m404.05563 428.20996l239.71652 0" fill-rule="evenodd"></path><path stroke="#38761d" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m416.0556 428.20996l227.71655 0" fill-rule="evenodd"></path><path fill="#38761d" stroke="#38761d" stroke-width="2.0" stroke-linecap="butt" d="m416.0556 424.9065l-9.076202 3.3034668l9.076202 3.3034668z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m636.9728 285.0866l-225.82675 125.60629" fill-rule="evenodd"></path><path stroke="#ff9900" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m626.4858 290.91953l-215.33978 119.77338" fill-rule="evenodd"></path><path fill="#ff9900" stroke="#ff9900" stroke-width="2.0" stroke-linecap="butt" d="m628.0915 293.8065l6.326111 -7.2986755l-9.537537 1.5247803z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m642.0608 138.69817l-236.50397 253.57481" fill-rule="evenodd"></path><path stroke="#ff9900" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" d="m642.0608 138.69817l-228.31918 244.7993" fill-rule="evenodd"></path><path fill="#ff9900" stroke="#ff9900" stroke-width="2.0" stroke-linecap="butt" d="m411.32578 381.24426l-3.7747192 8.890533l8.606323 -4.384186z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m460.82727 93.50131l126.17322 0l0 29.196854l-126.17322 0z" fill-rule="evenodd"></path><path fill="#b45f06" d="m478.317 112.74131l-1.75 -5.703125l1.0 0l0.90625 3.296875l0.34375 1.21875q0.015625 -0.09375 0.296875 -1.171875l0.90625 -3.34375l0.984375 0l0.859375 3.3125l0.28125 1.078125l0.328125 -1.09375l0.984375 -3.296875l0.9375 0l-1.78125 5.703125l-1.0 0l-0.921875 -3.421875l-0.21875 -0.96875l-1.15625 4.390625l-1.0 0zm6.633087 0l0 -5.703125l0.859375 0l0 0.859375q0.34375 -0.609375 0.625 -0.796875q0.28125 -0.1875 0.609375 -0.1875q0.5 0 1.0 0.3125l-0.328125 0.890625q-0.359375 -0.203125 -0.71875 -0.203125q-0.3125 0 -0.5625 0.1875q-0.25 0.1875 -0.359375 0.53125q-0.15625 0.515625 -0.15625 1.125l0 2.984375l-0.96875 0zm3.5642395 -6.765625l0 -1.109375l0.96875 0l0 1.109375l-0.96875 0zm0 6.765625l0 -5.703125l0.96875 0l0 5.703125l-0.96875 0zm4.4768677 -0.859375l0.125 0.84375q-0.40625 0.09375 -0.71875 0.09375q-0.53125 0 -0.828125 -0.171875q-0.28125 -0.171875 -0.40625 -0.4375q-0.109375 -0.265625 -0.109375 -1.140625l0 -3.28125l-0.71875 0l0 -0.75l0.71875 0l0 -1.40625l0.953125 -0.59375l0 2.0l0.984375 0l0 0.75l-0.984375 0l0 3.34375q0 0.40625 0.046875 0.53125q0.0625 0.109375 0.171875 0.1875q0.125 0.0625 0.328125 0.0625q0.171875 0 0.4375 -0.03125zm4.7418823 -0.984375l1.0 0.125q-0.234375 0.875 -0.875 1.359375q-0.640625 0.484375 -1.625 0.484375q-1.25 0 -1.984375 -0.765625q-0.734375 -0.765625 -0.734375 -2.15625q0 -1.453125 0.734375 -2.234375q0.75 -0.796875 1.921875 -0.796875q1.15625 0 1.875 0.78125q0.71875 0.765625 0.71875 2.1875q0 0.078125 0 0.25l-4.25 0q0.046875 0.9375 0.515625 1.4375q0.484375 0.5 1.203125 0.5q0.53125 0 0.90625 -0.265625q0.375 -0.28125 0.59375 -0.90625zm-3.171875 -1.5625l3.1875 0q-0.0625 -0.71875 -0.359375 -1.078125q-0.46875 -0.546875 -1.203125 -0.546875q-0.671875 0 -1.125 0.453125q-0.453125 0.4375 -0.5 1.171875zm8.152771 3.40625l0 -5.703125l0.875 0l0 0.8125q0.625 -0.9375 1.8125 -0.9375q0.515625 0 0.953125 0.1875q0.4375 0.171875 0.640625 0.484375q0.21875 0.296875 0.3125 0.703125q0.046875 0.28125 0.046875 0.953125l0 3.5l-0.96875 0l0 -3.46875q0 -0.59375 -0.109375 -0.875q-0.109375 -0.296875 -0.40625 -0.46875q-0.28125 -0.1875 -0.671875 -0.1875q-0.609375 0 -1.0625 0.390625q-0.453125 0.390625 -0.453125 1.5l0 3.109375l-0.96875 0zm9.832733 -1.84375l1.0 0.125q-0.234375 0.875 -0.875 1.359375q-0.640625 0.484375 -1.6249695 0.484375q-1.25 0 -1.984375 -0.765625q-0.734375 -0.765625 -0.734375 -2.15625q0 -1.453125 0.734375 -2.234375q0.75 -0.796875 1.921875 -0.796875q1.1562195 0 1.8749695 0.78125q0.71875 0.765625 0.71875 2.1875q0 0.078125 0 0.25l-4.2499695 0q0.046875 0.9375 0.515625 1.4375q0.484375 0.5 1.203125 0.5q0.53125 0 0.90625 -0.265625q0.37496948 -0.28125 0.5937195 -0.90625zm-3.1718445 -1.5625l3.1874695 0q-0.0625 -0.71875 -0.359375 -1.078125q-0.46871948 -0.546875 -1.2030945 -0.546875q-0.671875 0 -1.125 0.453125q-0.453125 0.4375 -0.5 1.171875zm6.254608 3.40625l-1.75 -5.703125l1.0 0l0.90625 3.296875l0.34375 1.21875q0.015625 -0.09375 0.296875 -1.171875l0.90625 -3.34375l0.984375 0l0.859375 3.3125l0.28125 1.078125l0.328125 -1.09375l0.984375 -3.296875l0.9375 0l-1.78125 5.703125l-1.0 0l-0.921875 -3.421875l-0.21875 -0.96875l-1.15625 4.390625l-1.0 0zm9.59375 0l0 -5.703125l0.875 0l0 0.796875q0.265625 -0.421875 0.703125 -0.671875q0.453125 -0.25 1.015625 -0.25q0.640625 0 1.046875 0.265625q0.40625 0.25 0.578125 0.734375q0.671875 -1.0 1.75 -1.0q0.859375 0 1.3125 0.46875q0.453125 0.46875 0.453125 1.4375l0 3.921875l-0.953125 0l0 -3.59375q0 -0.578125 -0.09375 -0.828125q-0.09375 -0.265625 -0.34375 -0.421875q-0.25 -0.15625 -0.578125 -0.15625q-0.609375 0 -1.015625 0.40625q-0.390625 0.40625 -0.390625 1.28125l0 3.3125l-0.96875 0l0 -3.703125q0 -0.640625 -0.234375 -0.96875q-0.234375 -0.328125 -0.765625 -0.328125q-0.40625 0 -0.765625 0.21875q-0.34375 0.21875 -0.5 0.640625q-0.15625 0.40625 -0.15625 1.1875l0 2.953125l-0.96875 0zm12.78302 -1.84375l1.0 0.125q-0.234375 0.875 -0.875 1.359375q-0.640625 0.484375 -1.625 0.484375q-1.25 0 -1.984375 -0.765625q-0.734375 -0.765625 -0.734375 -2.15625q0 -1.453125 0.734375 -2.234375q0.75 -0.796875 1.921875 -0.796875q1.15625 0 1.875 0.78125q0.71875 0.765625 0.71875 2.1875q0 0.078125 0 0.25l-4.25 0q0.046875 0.9375 0.515625 1.4375q0.484375 0.5 1.203125 0.5q0.53125 0 0.90625 -0.265625q0.375 -0.28125 0.59375 -0.90625zm-3.171875 -1.5625l3.1875 0q-0.0625 -0.71875 -0.359375 -1.078125q-0.46875 -0.546875 -1.203125 -0.546875q-0.671875 0 -1.125 0.453125q-0.453125 0.4375 -0.5 1.171875zm8.926453 2.703125q-0.546875 0.453125 -1.046875 0.640625q-0.484375 0.1875 -1.0625 0.1875q-0.9375 0 -1.453125 -0.453125q-0.5 -0.46875 -0.5 -1.171875q0 -0.421875 0.1875 -0.765625q0.203125 -0.34375 0.5 -0.546875q0.3125 -0.21875 0.703125 -0.328125q0.28125 -0.078125 0.859375 -0.140625q1.171875 -0.140625 1.71875 -0.34375q0.015625 -0.1875 0.015625 -0.25q0 -0.59375 -0.28125 -0.828125q-0.359375 -0.328125 -1.09375 -0.328125q-0.6875 0 -1.015625 0.234375q-0.3125 0.234375 -0.46875 0.84375l-0.953125 -0.125q0.125 -0.609375 0.421875 -0.984375q0.296875 -0.375 0.859375 -0.5625q0.5625 -0.203125 1.296875 -0.203125q0.734375 0 1.1875 0.171875q0.453125 0.171875 0.65625 0.4375q0.21875 0.25 0.3125 0.640625q0.046875 0.25 0.046875 0.90625l0 1.28125q0 1.34375 0.0625 1.703125q0.0625 0.359375 0.25 0.6875l-1.015625 0q-0.15625 -0.296875 -0.1875 -0.703125zm-0.09375 -2.15625q-0.515625 0.21875 -1.578125 0.359375q-0.59375 0.09375 -0.84375 0.203125q-0.234375 0.09375 -0.375 0.3125q-0.140625 0.203125 -0.140625 0.453125q0 0.390625 0.296875 0.65625q0.296875 0.25 0.859375 0.25q0.5625 0 0.984375 -0.25q0.4375 -0.25 0.65625 -0.671875q0.140625 -0.328125 0.140625 -0.96875l0 -0.34375zm1.9108887 1.15625l0.953125 -0.15625q0.078125 0.578125 0.4375 0.890625q0.375 0.296875 1.03125 0.296875q0.671875 0 0.984375 -0.265625q0.328125 -0.265625 0.328125 -0.640625q0 -0.328125 -0.28125 -0.515625q-0.203125 -0.125 -0.984375 -0.328125q-1.0625 -0.265625 -1.484375 -0.453125q-0.40625 -0.203125 -0.625 -0.546875q-0.203125 -0.34375 -0.203125 -0.765625q0 -0.390625 0.171875 -0.703125q0.171875 -0.328125 0.484375 -0.546875q0.21875 -0.171875 0.609375 -0.28125q0.390625 -0.109375 0.828125 -0.109375q0.671875 0 1.171875 0.1875q0.515625 0.1875 0.75 0.53125q0.25 0.328125 0.34375 0.875l-0.9375 0.125q-0.078125 -0.4375 -0.390625 -0.671875q-0.296875 -0.25 -0.859375 -0.25q-0.671875 0 -0.953125 0.21875q-0.28125 0.21875 -0.28125 0.515625q0 0.1875 0.109375 0.328125q0.125 0.15625 0.375 0.265625q0.140625 0.046875 0.859375 0.25q1.015625 0.265625 1.421875 0.4375q0.40625 0.171875 0.640625 0.515625q0.234375 0.328125 0.234375 0.828125q0 0.484375 -0.28125 0.90625q-0.28125 0.421875 -0.8125 0.65625q-0.53125 0.234375 -1.203125 0.234375q-1.109375 0 -1.703125 -0.453125q-0.578125 -0.46875 -0.734375 -1.375zm9.453125 1.703125l0 -0.84375q-0.671875 0.96875 -1.8125 0.96875q-0.5 0 -0.953125 -0.1875q-0.4375 -0.203125 -0.65625 -0.484375q-0.203125 -0.296875 -0.28125 -0.71875q-0.0625 -0.28125 -0.0625 -0.90625l0 -3.53125l0.96875 0l0 3.15625q0 0.765625 0.0625 1.03125q0.09375 0.375 0.375 0.59375q0.296875 0.21875 0.734375 0.21875q0.4375 0 0.8125 -0.21875q0.390625 -0.234375 0.546875 -0.609375q0.15625 -0.390625 0.15625 -1.109375l0 -3.0625l0.96875 0l0 5.703125l-0.859375 0zm2.1765137 0l0 -5.703125l0.859375 0l0 0.859375q0.34375 -0.609375 0.625 -0.796875q0.28125 -0.1875 0.609375 -0.1875q0.5 0 1.0 0.3125l-0.328125 0.890625q-0.359375 -0.203125 -0.71875 -0.203125q-0.3125 0 -0.5625 0.1875q-0.25 0.1875 -0.359375 0.53125q-0.15625 0.515625 -0.15625 1.125l0 2.984375l-0.96875 0zm7.454834 -1.84375l1.0 0.125q-0.234375 0.875 -0.875 1.359375q-0.640625 0.484375 -1.625 0.484375q-1.25 0 -1.984375 -0.765625q-0.734375 -0.765625 -0.734375 -2.15625q0 -1.453125 0.734375 -2.234375q0.75 -0.796875 1.921875 -0.796875q1.15625 0 1.875 0.78125q0.71875 0.765625 0.71875 2.1875q0 0.078125 0 0.25l-4.25 0q0.046875 0.9375 0.515625 1.4375q0.484375 0.5 1.203125 0.5q0.53125 0 0.90625 -0.265625q0.375 -0.28125 0.59375 -0.90625zm-3.171875 -1.5625l3.1875 0q-0.0625 -0.71875 -0.359375 -1.078125q-0.46875 -0.546875 -1.203125 -0.546875q-0.671875 0 -1.125 0.453125q-0.453125 0.4375 -0.5 1.171875zm4.8171387 1.703125l0.953125 -0.15625q0.078125 0.578125 0.4375 0.890625q0.375 0.296875 1.03125 0.296875q0.671875 0 0.984375 -0.265625q0.328125 -0.265625 0.328125 -0.640625q0 -0.328125 -0.28125 -0.515625q-0.203125 -0.125 -0.984375 -0.328125q-1.0625 -0.265625 -1.484375 -0.453125q-0.40625 -0.203125 -0.625 -0.546875q-0.203125 -0.34375 -0.203125 -0.765625q0 -0.390625 0.171875 -0.703125q0.171875 -0.328125 0.484375 -0.546875q0.21875 -0.171875 0.609375 -0.28125q0.390625 -0.109375 0.828125 -0.109375q0.671875 0 1.171875 0.1875q0.515625 0.1875 0.75 0.53125q0.25 0.328125 0.34375 0.875l-0.9375 0.125q-0.078125 -0.4375 -0.390625 -0.671875q-0.296875 -0.25 -0.859375 -0.25q-0.671875 0 -0.953125 0.21875q-0.28125 0.21875 -0.28125 0.515625q0 0.1875 0.109375 0.328125q0.125 0.15625 0.375 0.265625q0.140625 0.046875 0.859375 0.25q1.015625 0.265625 1.421875 0.4375q0.40625 0.171875 0.640625 0.515625q0.234375 0.328125 0.234375 0.828125q0 0.484375 -0.28125 0.90625q-0.28125 0.421875 -0.8125 0.65625q-0.53125 0.234375 -1.203125 0.234375q-1.109375 0 -1.703125 -0.453125q-0.578125 -0.46875 -0.734375 -1.375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m390.38498 370.60712l109.79526 -118.45668l21.385864 19.842514l-109.79532 118.456696z" fill-rule="evenodd"></path><path fill="#b45f06" d="m411.9619 375.6311l-4.1807556 -3.8790283l0.584198 -0.63027954l0.62997437 0.5845032q-0.21304321 -0.66659546 -0.15930176 -1.0003967q0.053741455 -0.33380127 0.27679443 -0.5744324q0.3399048 -0.36669922 0.9088745 -0.520874l0.4298401 0.84643555q-0.393219 0.12539673 -0.6375122 0.38897705q-0.21243286 0.22918701 -0.24493408 0.5400696q-0.03250122 0.31088257 0.1451416 0.62490845q0.27178955 0.4653015 0.71847534 0.87976074l2.1877441 2.0298767l-0.6585388 0.71047974zm3.7161255 -6.721527l0.77142334 -0.64837646q0.48208618 0.7670288 0.40167236 1.5663147q-0.08041382 0.7992859 -0.74957275 1.5212402q-0.84973145 0.91677856 -1.9101868 0.93463135q-1.0604858 0.017852783 -2.079895 -0.92800903q-1.0652161 -0.9883728 -1.1387024 -2.058319q-0.07434082 -1.0920715 0.72229004 -1.9515381q0.78601074 -0.84802246 1.84729 -0.8437805q1.0498657 -0.006378174 2.0921936 0.9607239q0.057250977 0.053131104 0.18325806 0.17004395l-2.8890991 3.1169739q0.7191162 0.603302 1.4042969 0.59957886q0.6958008 -0.015167236 1.1844177 -0.54229736q0.3611145 -0.38961792 0.42132568 -0.8453369q0.048736572 -0.4663086 -0.26071167 -1.0518494zm-3.3016052 1.2635498l2.166809 -2.3377686q-0.56936646 -0.44302368 -1.0346375 -0.46972656q-0.71954346 -0.028167725 -1.21875 0.510437q-0.45672607 0.49273682 -0.43258667 1.1332703q0.0126953125 0.62991333 0.51916504 1.1637878zm8.049652 -4.708252q-0.0395813 0.70928955 -0.24203491 1.2035217q-0.19180298 0.4827881 -0.58480835 0.9067688q-0.6372986 0.68759155 -1.3199768 0.75756836q-0.6835327 0.047851562 -1.1989746 -0.4303589q-0.30926514 -0.2869568 -0.43377686 -0.65826416q-0.11392212 -0.38278198 -0.06100464 -0.738678q0.05206299 -0.37799072 0.23742676 -0.7388611q0.13394165 -0.2593994 0.48110962 -0.72592163q0.6935425 -0.95510864 0.91638184 -1.4943542q-0.12680054 -0.13900757 -0.17263794 -0.18151855q-0.4352417 -0.4038391 -0.7982483 -0.3569641q-0.48483276 0.040374756 -0.9840393 0.5789795q-0.4673767 0.5042114 -0.5186157 0.9042969q-0.040618896 0.38858032 0.29986572 0.9176636l-0.7395325 0.6140137q-0.36175537 -0.50616455 -0.43484497 -0.9789429q-0.07305908 -0.47277832 0.17184448 -1.0128784q0.23348999 -0.5506897 0.73269653 -1.0892944q0.49923706 -0.53860474 0.93325806 -0.7540283q0.434021 -0.21542358 0.7668152 -0.18371582q0.3319702 0.009613037 0.68203735 0.20654297q0.21514893 0.13565063 0.696228 0.58200073l0.939209 0.87145996q0.9850769 0.91397095 1.2909851 1.1125793q0.30593872 0.19857788 0.6739502 0.28424072l-0.6904297 0.74487305q-0.32382202 -0.08734131 -0.6428833 -0.34072876zm-1.6443787 -1.3978271q-0.19018555 0.526947 -0.8093567 1.401825q-0.3348999 0.49923706 -0.4246521 0.7569885q-0.09060669 0.23565674 -0.025848389 0.48757935q0.05331421 0.24130249 0.23657227 0.41134644q0.28634644 0.26568604 0.68289185 0.22860718q0.3850708 -0.047698975 0.76745605 -0.4602356q0.38238525 -0.41253662 0.48590088 -0.8919983q0.11413574 -0.49090576 -0.046417236 -0.93826294q-0.1449585 -0.32632446 -0.614563 -0.76205444l-0.25198364 -0.23379517zm6.167328 -2.4475708l-0.526886 -0.48886108q0.25738525 0.963501 -0.4649048 1.7427673q-0.45672607 0.49273682 -1.1246338 0.6616821q-0.6678772 0.16894531 -1.3728638 -0.05883789q-0.70495605 -0.2277832 -1.3463745 -0.822937q-0.62997437 -0.5845032 -0.94277954 -1.2583923q-0.31359863 -0.6959839 -0.19467163 -1.3529358q0.10748291 -0.66760254 0.5854492 -1.1832886q0.3505249 -0.37817383 0.78704834 -0.52734375q0.43649292 -0.14916992 0.8584595 -0.09869385l-2.0732117 -1.923584l0.6585388 -0.71051025l5.772888 5.3562927l-0.61605835 0.66464233zm-4.155884 0.30041504q0.80178833 0.743927 1.5107117 0.7835388q0.70809937 0.017547607 1.1329651 -0.44082642q0.43548584 -0.46984863 0.35488892 -1.1414185q-0.080566406 -0.6715698 -0.85943604 -1.3942261q-0.8590698 -0.7970886 -1.5679932 -0.83670044q-0.6983032 -0.051116943 -1.1550293 0.4416504q-0.43548584 0.46984863 -0.34344482 1.1520386q0.09118652 0.66012573 0.92733765 1.4359436zm7.679901 -4.1024475l-4.1807556 -3.8790283l0.5948181 -0.64172363l0.5841675 0.5419922q-0.12869263 -0.4817505 -0.014556885 -0.97265625q0.12475586 -0.5023804 0.5071411 -0.914917q0.43548584 -0.46984863 0.9063721 -0.5871277q0.45941162 -0.12789917 0.93133545 0.07550049q-0.27633667 -1.1729431 0.45654297 -1.9636536q0.584198 -0.630249 1.2358704 -0.6437683q0.65164185 -0.0134887695 1.3617859 0.64541626l2.875 2.667511l-0.6479492 0.69903564l-2.63443 -2.444336q-0.4237976 -0.393219 -0.6708069 -0.49450684q-0.25845337 -0.11190796 -0.54293823 -0.034820557q-0.28448486 0.077056885 -0.50753784 0.3177185q-0.4142456 0.44692993 -0.39257812 1.0211792q0.03225708 0.5628052 0.67367554 1.157959l2.4282837 2.2530212l-0.6585388 0.71047974l-2.7146301 -2.5187073q-0.469635 -0.43572998 -0.8694763 -0.4869995q-0.39987183 -0.05130005 -0.76101685 0.33831787q-0.27615356 0.29794312 -0.3600769 0.71032715q-0.07333374 0.4008789 0.1296997 0.8024292q0.19158936 0.39089966 0.76431274 0.9222717l2.1648254 2.008606l-0.6585388 0.71047974zm7.3381042 -10.629211l0.77142334 -0.648407q0.48208618 0.7670288 0.40167236 1.5663452q-0.08041382 0.7992859 -0.74957275 1.5212402q-0.84973145 0.91674805 -1.9101868 0.93460083q-1.0604858 0.017852783 -2.079895 -0.9279785q-1.0652161 -0.9883728 -1.1387024 -2.0583496q-0.07434082 -1.0920715 0.72229004 -1.9515381q0.78601074 -0.84799194 1.84729 -0.84375q1.0498657 -0.0064086914 2.0921936 0.96069336q0.057250977 0.05316162 0.18325806 0.17004395l-2.8890991 3.1170044q0.7191162 0.6032715 1.4042969 0.59957886q0.6958008 -0.015167236 1.1844177 -0.5423279q0.3611145 -0.38961792 0.42132568 -0.8453064q0.048736572 -0.4663391 -0.26071167 -1.0518494zm-3.3016052 1.2635193l2.166809 -2.337738q-0.56936646 -0.44302368 -1.0346375 -0.46972656q-0.71954346 -0.028198242 -1.21875 0.5104065q-0.45672607 0.49276733 -0.43258667 1.1333008q0.012664795 0.6298828 0.51916504 1.1637573zm8.049652 -4.7082214q-0.0395813 0.70928955 -0.24203491 1.2035217q-0.19180298 0.48275757 -0.58480835 0.9067688q-0.6372986 0.68756104 -1.3199768 0.75753784q-0.6835327 0.04788208 -1.1989746 -0.4303589q-0.30926514 -0.28692627 -0.43377686 -0.65826416q-0.11392212 -0.38278198 -0.06100464 -0.738678q0.05206299 -0.3779602 0.23742676 -0.73883057q0.13394165 -0.25942993 0.48110962 -0.72595215q0.6935425 -0.95510864 0.91638184 -1.4943542q-0.12683105 -0.13897705 -0.17263794 -0.18148804q-0.4352417 -0.4038391 -0.7982483 -0.35699463q-0.48483276 0.040405273 -0.9840393 0.57901q-0.4673767 0.5042114 -0.5186157 0.90426636q-0.040618896 0.38861084 0.29986572 0.9176941l-0.7395325 0.6140137q-0.36175537 -0.50616455 -0.43484497 -0.9789429q-0.0730896 -0.47280884 0.17184448 -1.0128784q0.23348999 -0.5506897 0.73269653 -1.0892944q0.49923706 -0.53860474 0.93325806 -0.7540283q0.434021 -0.21542358 0.7668152 -0.18374634q0.3319702 0.009613037 0.68203735 0.20654297q0.21514893 0.13568115 0.696228 0.58203125l0.939209 0.87145996q0.9850769 0.91397095 1.2909851 1.1125488q0.30593872 0.1986084 0.6739502 0.28427124l-0.6904297 0.74487305q-0.32382202 -0.08734131 -0.6428833 -0.34072876zm-1.6444092 -1.3978577q-0.19015503 0.526947 -0.8093262 1.4018555q-0.3348999 0.49923706 -0.4246521 0.7569885q-0.09060669 0.23565674 -0.025848389 0.48757935q0.05331421 0.24127197 0.23657227 0.41131592q0.28634644 0.26568604 0.68289185 0.2286377q0.3850708 -0.047698975 0.76745605 -0.4602356q0.38235474 -0.41253662 0.48590088 -0.8919983q0.11413574 -0.49090576 -0.046417236 -0.93829346q-0.1449585 -0.32629395 -0.614563 -0.7620239l-0.25201416 -0.23382568zm2.1466064 -0.61502075l0.53338623 -0.80529785q0.4768982 0.33590698 0.95028687 0.2849121q0.47253418 -0.07312012 0.91864014 -0.55441284q0.45672607 -0.49276733 0.4744568 -0.9026184q0.028320312 -0.42132568 -0.24658203 -0.6763916q-0.24050903 -0.22317505 -0.56915283 -0.1444397q-0.22973633 0.063964844 -0.9096985 0.4987793q-0.9169922 0.5985718 -1.3412476 0.78045654q-0.42504883 0.15979004 -0.8257446 0.08642578q-0.39007568 -0.08483887 -0.6993408 -0.37176514q-0.28634644 -0.26568604 -0.3985901 -0.6043091q-0.123687744 -0.34921265 -0.071624756 -0.72720337q0.022705078 -0.27734375 0.20806885 -0.6382141q0.18536377 -0.36087036 0.48275757 -0.6817627q0.4567566 -0.49273682 0.93408203 -0.7319336q0.48797607 -0.25064087 0.899292 -0.1887207q0.41046143 0.03982544 0.87509155 0.3430481l-0.5456543 0.772583q-0.37384033 -0.2402649 -0.75808716 -0.17050171q-0.3850708 0.047698975 -0.76745605 0.4602356q-0.45672607 0.49276733 -0.48754883 0.84783936q-0.030822754 0.3550415 0.1867981 0.5569763q0.13745117 0.12753296 0.31488037 0.14294434q0.19952393 0.01461792 0.449646 -0.09436035q0.12994385 -0.071258545 0.76745605 -0.4602356q0.88513184 -0.564209 1.2872925 -0.74523926q0.40216064 -0.18106079 0.81344604 -0.119140625q0.39987183 0.05130005 0.76641846 0.39135742q0.35507202 0.32946777 0.47314453 0.8226929q0.11807251 0.49319458 -0.071258545 1.0422363q-0.18933105 0.54904175 -0.6460571 1.0418091q-0.7541504 0.81362915 -1.4899292 0.94088745q-0.7366333 0.105163574 -1.5071716 -0.39663696zm7.674591 -5.7746277l-0.6185303 -0.57388306q0.25341797 1.1516724 -0.52194214 1.9882202q-0.3399048 0.36669922 -0.7853699 0.5715027q-0.44631958 0.18270874 -0.8012085 0.15185547q-0.35568237 -0.052947998 -0.7180786 -0.28259277q-0.24865723 -0.1454773 -0.7068176 -0.57055664l-2.588623 -2.401825l0.6585388 -0.71051025l2.3137207 2.146759q0.5612488 0.52075195 0.7984619 0.6555786q0.33862305 0.18630981 0.69018555 0.1288147q0.3621521 -0.06893921 0.6595764 -0.38980103q0.2973938 -0.32086182 0.39196777 -0.74468994q0.09371948 -0.44589233 -0.07495117 -0.81555176q-0.18014526 -0.38027954 -0.70703125 -0.8691406l-2.244995 -2.0830078l0.6585388 -0.71047974l4.1807556 3.8790283l-0.584198 0.63027954zm1.4795532 -1.5962524l-4.1807556 -3.8790588l0.584198 -0.63027954l0.62997437 0.5845337q-0.21304321 -0.66659546 -0.15930176 -1.0003967q0.053741455 -0.33380127 0.27679443 -0.5744629q0.3399048 -0.36669922 0.9088745 -0.5208435l0.42980957 0.846405q-0.39318848 0.12542725 -0.6374817 0.38897705q-0.21243286 0.22921753 -0.24493408 0.5401001q-0.03250122 0.31088257 0.1451416 0.62490845q0.27175903 0.4653015 0.71847534 0.87976074l2.1877441 2.0298462l-0.6585388 0.71051025zm3.7161255 -6.721527l0.7713928 -0.648407q0.4821167 0.7670288 0.40170288 1.5663452q-0.08041382 0.7992859 -0.74957275 1.5212402q-0.84973145 0.91674805 -1.9101868 0.93460083q-1.0604858 0.017852783 -2.079895 -0.9279785q-1.0652161 -0.9883728 -1.1387024 -2.0583496q-0.07434082 -1.0920715 0.72229004 -1.9515076q0.78601074 -0.84802246 1.84729 -0.8437805q1.0498657 -0.0064086914 2.0921936 0.96069336q0.057250977 0.05316162 0.18325806 0.17004395l-2.8890991 3.1170044q0.7191162 0.6032715 1.4042969 0.59957886q0.6958008 -0.015167236 1.1843872 -0.54229736q0.36114502 -0.38964844 0.4213562 -0.8453369q0.048736572 -0.4663391 -0.26071167 -1.0518494zm-3.3016052 1.2635193l2.166809 -2.337738q-0.56936646 -0.44302368 -1.0346375 -0.46972656q-0.71954346 -0.028167725 -1.21875 0.5104065q-0.45672607 0.49276733 -0.43258667 1.1333008q0.012664795 0.6298828 0.51916504 1.1637573zm4.523102 -2.3745117l0.53338623 -0.80532837q0.4768982 0.3359375 0.95028687 0.2849121q0.47253418 -0.07312012 0.91864014 -0.55441284q0.45672607 -0.49276733 0.4744568 -0.9026184q0.028320312 -0.42132568 -0.24658203 -0.6763916q-0.24050903 -0.22317505 -0.56915283 -0.14440918q-0.22973633 0.063934326 -0.9096985 0.49874878q-0.9169922 0.5986023 -1.3412476 0.78045654q-0.42504883 0.15979004 -0.8257446 0.08642578q-0.39007568 -0.08483887 -0.6993408 -0.37176514q-0.28634644 -0.26568604 -0.3985901 -0.6043091q-0.123687744 -0.34921265 -0.071624756 -0.72720337q0.022705078 -0.27734375 0.20806885 -0.6382141q0.18536377 -0.36087036 0.48275757 -0.6817322q0.4567566 -0.49276733 0.93408203 -0.7319336q0.48797607 -0.25064087 0.899292 -0.18875122q0.41046143 0.03982544 0.87509155 0.3430481l-0.5456543 0.772583q-0.37384033 -0.2402649 -0.75808716 -0.17050171q-0.3850708 0.047698975 -0.76745605 0.4602356q-0.45672607 0.49276733 -0.48754883 0.84783936q-0.030822754 0.3550415 0.1867981 0.5569763q0.13745117 0.12753296 0.31488037 0.14294434q0.19952393 0.01461792 0.449646 -0.09436035q0.12994385 -0.07122803 0.76745605 -0.4602356q0.88513184 -0.56417847 1.2872925 -0.74523926q0.40216064 -0.18103027 0.81344604 -0.119140625q0.39987183 0.05130005 0.76641846 0.39138794q0.35507202 0.32943726 0.47314453 0.82266235q0.11807251 0.49319458 -0.071258545 1.0422363q-0.18933105 0.54904175 -0.6460571 1.0418091q-0.7541504 0.81362915 -1.4899292 0.94088745q-0.7366333 0.10519409 -1.5071716 -0.39660645zm7.297333 -5.3676147l-3.630951 -3.368927l-0.5735779 0.61880493l-0.5498047 -0.5101013l0.5735779 -0.61883545l-0.4467163 -0.41445923q-0.4237976 -0.393219 -0.55563354 -0.6647339q-0.17929077 -0.3581848 -0.11743164 -0.76971436q0.07247925 -0.42297363 0.49734497 -0.8813782q0.27618408 -0.29794312 0.69625854 -0.5902405l0.5229187 0.67700195q-0.25827026 0.18670654 -0.43881226 0.38150024q-0.29742432 0.32086182 -0.28741455 0.585907q0.020599365 0.25360107 0.3985901 0.6043091l0.3894348 0.36132812l0.7541199 -0.81362915l0.5498047 0.51013184l-0.7541199 0.81362915l3.630951 3.368927l-0.6585388 0.71047974zm-0.48712158 -3.6807556q-1.1568604 -1.0733643 -1.1127014 -2.2473145q0.04107666 -0.9636841 0.7633362 -1.7429504q0.79663086 -0.85946655 1.8570862 -0.87731934q1.071106 -0.029296875 2.0675964 0.8952942q0.8132324 0.7545471 1.0598145 1.4309387q0.23510742 0.66574097 0.051605225 1.3693848q-0.17288208 0.6921997 -0.6720886 1.2307739q-0.8178711 0.8824158 -1.8889465 0.91171265q-1.0604858 0.017852783 -2.125702 -0.97052zm0.67977905 -0.73339844q0.8132324 0.7545471 1.5327759 0.78271484q0.7187195 0.0061035156 1.217926 -0.5325012q0.48861694 -0.5271301 0.40637207 -1.2428894q-0.071624756 -0.72720337 -0.8963318 -1.4924011q-0.76742554 -0.71203613 -1.4975891 -0.72875977q-0.7301636 -0.016693115 -1.2081299 0.4989624q-0.49923706 0.53860474 -0.4276123 1.2658081q0.07080078 0.70513916 0.8725891 1.4490662zm5.6893616 -1.9318848l-4.1807556 -3.8790588l0.584198 -0.630249l0.62997437 0.5845032q-0.21304321 -0.66659546 -0.15930176 -1.0003967q0.053741455 -0.33380127 0.27679443 -0.5744324q0.3399048 -0.36672974 0.9088745 -0.520874l0.4298401 0.846405q-0.393219 0.12542725 -0.6375122 0.38900757q-0.21243286 0.22918701 -0.24493408 0.5400696q-0.03250122 0.31088257 0.1451416 0.62490845q0.27175903 0.4653015 0.71847534 0.87976074l2.1877441 2.0298767l-0.6585388 0.71047974zm6.028473 -3.2861023l-5.7843323 -5.366913l0.60543823 -0.65319824l0.5383301 0.49951172q-0.09680176 -0.5161438 0.008361816 -0.951416q0.10519409 -0.43527222 0.48757935 -0.84780884q0.48858643 -0.52716064 1.145874 -0.68463135q0.6572571 -0.15750122 1.3524475 0.10385132q0.6837158 0.2507019 1.2907715 0.81396484q0.64141846 0.5951538 0.9444275 1.3025818q0.29156494 0.69680786 0.15222168 1.3987732q-0.12872314 0.69052124 -0.5748596 1.171814q-0.32925415 0.35525513 -0.7437134 0.5036011q-0.4038086 0.13687134 -0.8028259 0.107666016l2.0388184 1.8916931l-0.6585388 0.71051025zm-3.0704956 -4.0425415q0.80178833 0.743927 1.4992371 0.7729187q0.68603516 0.018371582 1.1109009 -0.44000244q0.43548584 -0.46984863 0.35406494 -1.1635132q-0.08224487 -0.71572876 -0.9298401 -1.5021667q-0.79034424 -0.7333069 -1.4992676 -0.7729492q-0.6983032 -0.051086426 -1.123169 0.4072876q-0.42486572 0.45837402 -0.33032227 1.2068481q0.10513306 0.7369995 0.918396 1.4915771zm5.4956665 -1.7919006l-4.1807556 -3.8790283l0.584198 -0.63027954l0.62997437 0.5845032q-0.21304321 -0.66656494 -0.15930176 -1.0003662q0.053741455 -0.33380127 0.27679443 -0.5744629q0.3399048 -0.36669922 0.9088745 -0.5208435l0.4298401 0.846405q-0.393219 0.12542725 -0.6375122 0.38897705q-0.21243286 0.22918701 -0.24493408 0.5400696q-0.03250122 0.31088257 0.1451416 0.62490845q0.27175903 0.4653015 0.71847534 0.87979126l2.1877441 2.0298462l-0.6585388 0.71047974zm0.071899414 -4.283844q-1.1568604 -1.0733948 -1.1127014 -2.2473145q0.04107666 -0.9637146 0.7633362 -1.7429504q0.79663086 -0.85946655 1.8570862 -0.87731934q1.071106 -0.029327393 2.0675964 0.8952637q0.8132324 0.75457764 1.0598145 1.4309387q0.23510742 0.6657715 0.051605225 1.3694153q-0.17288208 0.6921692 -0.6720886 1.2307739q-0.8178711 0.88238525 -1.8889465 0.91171265q-1.0604858 0.017822266 -2.125702 -0.97052zm0.67977905 -0.73339844q0.8132324 0.7545471 1.5327759 0.78271484q0.7187195 0.0061035156 1.217926 -0.5325012q0.48861694 -0.52716064 0.40637207 -1.2428894q-0.071624756 -0.72720337 -0.8963318 -1.4924011q-0.76742554 -0.71203613 -1.4975891 -0.72875977q-0.7301636 -0.016723633 -1.2081299 0.4989624q-0.49923706 0.53860474 -0.4276123 1.2658081q0.07080078 0.70513916 0.8725891 1.4490662zm6.6930847 -6.094818l0.72891235 -0.60253906q0.6260376 0.77264404 0.59225464 1.6365356q-0.034576416 0.8418274 -0.6612549 1.5179443q-0.79663086 0.85946655 -1.8464661 0.8658447q-1.0392456 -0.005065918 -2.081543 -0.97216797q-0.67581177 -0.6270447 -0.9779663 -1.3123779q-0.30300903 -0.707428 -0.13012695 -1.3995972q0.17288208 -0.6921997 0.6720886 -1.2308044q0.62667847 -0.67611694 1.3739319 -0.7927551q0.7472229 -0.11663818 1.4940491 0.3418274l-0.5448303 0.79467773q-0.49731445 -0.29092407 -0.9371643 -0.23010254q-0.45132446 0.050201416 -0.7805786 0.40542603q-0.48861694 0.52716064 -0.41781616 1.2322693q0.06994629 0.68304443 0.8946533 1.4482117q0.8475952 0.786438 1.5230103 0.8162842q0.6753845 0.029846191 1.163971 -0.49728394q0.38238525 -0.41256714 0.38531494 -0.9213867q0.013549805 -0.5202942 -0.45043945 -1.1000061zm3.9220886 -3.8637085l0.77142334 -0.648407q0.48208618 0.7670288 0.40167236 1.5663452q-0.08041382 0.7992859 -0.74957275 1.5212402q-0.84973145 0.91674805 -1.9101868 0.93460083q-1.0604858 0.017852783 -2.079895 -0.9279785q-1.0652161 -0.9883728 -1.1387024 -2.0583496q-0.07434082 -1.0920715 0.72229004 -1.9515381q0.78601074 -0.84799194 1.84729 -0.84375q1.0498657 -0.0064086914 2.0921936 0.96069336q0.057250977 0.05316162 0.18325806 0.17004395l-2.8890991 3.1170044q0.7191162 0.6032715 1.4042969 0.59957886q0.6958008 -0.015167236 1.1844177 -0.5423279q0.3611145 -0.38961792 0.42132568 -0.8453064q0.048736572 -0.4663391 -0.26071167 -1.0518494zm-3.3016052 1.2635193l2.166809 -2.337738q-0.56936646 -0.44302368 -1.0346375 -0.46972656q-0.71954346 -0.028198242 -1.21875 0.5104065q-0.45672607 0.49276733 -0.43258667 1.1333008q0.012664795 0.6298828 0.51916504 1.1637573zm4.523102 -2.3745422l0.53338623 -0.80529785q0.4768982 0.3359375 0.95028687 0.2849121q0.47253418 -0.07312012 0.91864014 -0.55441284q0.45672607 -0.49276733 0.4744568 -0.9026184q0.028320312 -0.42132568 -0.24658203 -0.6763916q-0.24050903 -0.22317505 -0.56915283 -0.1444397q-0.22973633 0.063964844 -0.9096985 0.4987793q-0.9169922 0.5986023 -1.3412476 0.78045654q-0.42504883 0.15979004 -0.8257446 0.08642578q-0.39007568 -0.08483887 -0.6993408 -0.37176514q-0.28634644 -0.26568604 -0.3985901 -0.6043091q-0.123687744 -0.34921265 -0.071624756 -0.72720337q0.022705078 -0.27734375 0.20806885 -0.6382141q0.18536377 -0.36087036 0.48275757 -0.6817627q0.4567566 -0.49273682 0.93408203 -0.7319336q0.48797607 -0.25061035 0.899292 -0.1887207q0.41046143 0.03982544 0.87509155 0.3430481l-0.5456543 0.772583q-0.37384033 -0.2402649 -0.75808716 -0.17050171q-0.3850708 0.047698975 -0.76745605 0.4602356q-0.45672607 0.49276733 -0.48754883 0.84783936q-0.030822754 0.3550415 0.1867981 0.5569763q0.13745117 0.12753296 0.31488037 0.14294434q0.19952393 0.01461792 0.449646 -0.09436035q0.12994385 -0.071258545 0.76745605 -0.4602356q0.88513184 -0.564209 1.2872925 -0.74523926q0.40216064 -0.18106079 0.81347656 -0.119140625q0.3998413 0.05130005 0.76638794 0.39135742q0.35507202 0.32946777 0.47314453 0.8226929q0.11807251 0.49319458 -0.071258545 1.0422363q-0.18933105 0.54904175 -0.6460571 1.0418091q-0.7541199 0.81362915 -1.4899292 0.94088745q-0.7366333 0.105163574 -1.5071716 -0.39663696zm3.6219788 -3.9076843l0.53338623 -0.80532837q0.4768982 0.3359375 0.95028687 0.2849121q0.47253418 -0.07312012 0.91864014 -0.55441284q0.45672607 -0.49276733 0.4744568 -0.9026184q0.028320312 -0.42132568 -0.24655151 -0.6763611q-0.24053955 -0.22320557 -0.56918335 -0.1444397q-0.22970581 0.063934326 -0.9096985 0.49874878q-0.9169922 0.5986023 -1.3412476 0.78048706q-0.42504883 0.15979004 -0.8257446 0.08639526q-0.39007568 -0.08480835 -0.6993408 -0.37176514q-0.28634644 -0.26568604 -0.3985901 -0.60427856q-0.123687744 -0.34924316 -0.071624756 -0.7272339q0.022705078 -0.27731323 0.20806885 -0.6382141q0.18536377 -0.36087036 0.4827881 -0.6817322q0.45672607 -0.49276733 0.9340515 -0.7319336q0.48797607 -0.25064087 0.899292 -0.1887207q0.41046143 0.03982544 0.87509155 0.34301758l-0.5456543 0.772583q-0.37384033 -0.2402649 -0.75808716 -0.17047119q-0.3850708 0.047668457 -0.76745605 0.4602356q-0.45672607 0.49273682 -0.48754883 0.84780884q-0.030822754 0.3550415 0.1867981 0.5569763q0.13745117 0.12753296 0.31488037 0.14294434q0.19952393 0.01461792 0.449646 -0.09436035q0.12994385 -0.07122803 0.76745605 -0.4602356q0.88513184 -0.56417847 1.2872925 -0.74523926q0.40216064 -0.18103027 0.81347656 -0.119140625q0.3998413 0.05130005 0.76638794 0.39138794q0.35507202 0.32943726 0.47314453 0.82266235q0.11807251 0.4932251 -0.071258545 1.0422363q-0.18933105 0.54904175 -0.6460571 1.0418091q-0.7541199 0.81362915 -1.4899292 0.94088745q-0.7366333 0.10519409 -1.5071716 -0.39660645zm0.1763916 -7.637512l-0.8132324 -0.75457764l0.6585388 -0.71047974l0.8132324 0.7545471l-0.6585388 0.71051025zm4.9596252 4.601715l-4.1807556 -3.8790588l0.6585388 -0.71047974l4.1807556 3.8790283l-0.6585388 0.71051025zm1.5987549 -1.724884l-4.1807556 -3.8790588l0.5948181 -0.64172363l0.5956116 0.5526428q-0.26239014 -1.0960388 0.54486084 -1.96698q0.3505249 -0.3781433 0.7853699 -0.5715027q0.42340088 -0.20394897 0.79055786 -0.14038086q0.366333 0.041503906 0.72787476 0.24905396q0.23803711 0.15692139 0.7305603 0.6138916l2.5657349 2.3805847l-0.6585388 0.71047974l-2.5428162 -2.359314q-0.43527222 -0.4038391 -0.7157898 -0.5149231q-0.29196167 -0.1217041 -0.6197815 -0.020874023q-0.3286438 0.07873535 -0.59417725 0.36523438q-0.4142456 0.44692993 -0.4359131 1.0449219q-0.021697998 0.59802246 0.79156494 1.3525696l2.279358 2.1148987l-0.6585388 0.71047974zm4.2555237 -3.9016724l0.74038696 -0.59191895q0.3632202 0.25170898 0.692688 0.19506836q0.43899536 -0.08291626 0.8638611 -0.5412903q0.4567566 -0.49276733 0.50631714 -0.9370117q0.06021118 -0.45568848 -0.2098999 -0.87680054q-0.16452026 -0.25924683 -0.8632202 -0.9075012q0.11431885 0.97994995 -0.53359985 1.6789856q-0.79663086 0.85946655 -1.850647 0.7554321q-1.0654602 -0.11468506 -1.9359741 -0.9223633q-0.5956116 -0.5526428 -0.8977661 -1.2379761q-0.30218506 -0.68536377 -0.20532227 -1.3414917q0.10748291 -0.66760254 0.6279297 -1.229126q0.67977905 -0.73339844 1.7215271 -0.66207886l-0.5039978 -0.46762085l0.60543823 -0.65319824l3.6080322 3.3476868q0.9850769 0.91397095 1.2053833 1.4807434q0.231781 0.5773926 0.057250977 1.2254028q-0.16308594 0.6586609 -0.74728394 1.2889099q-0.69039917 0.74487305 -1.4588928 0.88442993q-0.75701904 0.15020752 -1.4222107 -0.48828125zm-1.9667358 -2.9118652q0.82470703 0.76516724 1.5000916 0.7950134q0.68600464 0.018371582 1.1427307 -0.47436523q0.45672607 -0.49276733 0.3867798 -1.1758118q-0.06994629 -0.68304443 -0.8717346 -1.4269714q-0.76742554 -0.71203613 -1.4755249 -0.72958374q-0.70809937 -0.017547607 -1.1542053 0.46374512q-0.43548584 0.46984863 -0.35406494 1.1635132q0.06994629 0.68304443 0.82592773 1.3844604z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m408.60907 382.194l141.57483 -77.73227l14.015747 25.574799l-141.57483 77.73227z" fill-rule="evenodd"></path><path fill="#b45f06" d="m430.2287 390.33002l0.7604065 -0.59576416q0.34631348 0.46939087 0.81152344 0.5704651q0.4713745 0.0798645 1.0466309 -0.23596191q0.5889282 -0.32336426 0.735199 -0.70669556q0.15997314 -0.39086914 -0.020263672 -0.71972656q-0.15768433 -0.28775024 -0.49432373 -0.31680298q-0.23812866 -0.011871338 -1.0205688 0.18600464q-1.059021 0.27841187 -1.5189209 0.31704712q-0.45373535 0.01739502 -0.81066895 -0.17880249q-0.34326172 -0.20367432 -0.5460205 -0.5736389q-0.18771362 -0.3425598 -0.18725586 -0.6993103q-0.007019043 -0.3704834 0.16177368 -0.7127075q0.10913086 -0.25601196 0.39898682 -0.539917q0.28985596 -0.28393555 0.67333984 -0.49447632q0.58895874 -0.32336426 1.1173401 -0.39959717q0.54208374 -0.08370972 0.91275024 0.10491943q0.37683105 0.16744995 0.7218323 0.6018982l-0.76171875 0.56082153q-0.27874756 -0.34606934 -0.6653137 -0.4012146q-0.3803711 -0.07635498 -0.8734436 0.19436646q-0.5889282 0.32336426 -0.7303467 0.65057373q-0.14138794 0.32717896 0.0012817383 0.5875244q0.09011841 0.16442871 0.25357056 0.23510742q0.18466187 0.07687378 0.45635986 0.052459717q0.14581299 -0.02658081 0.8734436 -0.19436646q1.0179138 -0.25585938 1.4566345 -0.30065918q0.4387207 -0.044799805 0.8093567 0.14385986q0.36312866 0.17492676 0.6034241 0.6134033q0.23278809 0.42477417 0.18899536 0.93011475q-0.043762207 0.50531006 -0.39682007 0.9665222q-0.35302734 0.46121216 -0.94195557 0.7845764q-0.9724426 0.53393555 -1.7106934 0.42233276q-0.73202515 -0.13284302 -1.3045349 -0.85235596zm7.2673645 -3.0276184l0.5150757 0.67974854q-0.31103516 0.27774048 -0.58496094 0.4281311q-0.46569824 0.25567627 -0.8085327 0.24783325q-0.32913208 -0.015350342 -0.5663452 -0.18814087q-0.22354126 -0.18029785 -0.64404297 -0.9476013l-1.5769653 -2.8775024l-0.6300354 0.34594727l-0.36044312 -0.65771484l0.6300354 -0.34591675l-0.67581177 -1.2332153l0.55010986 -0.97940063l0.96118164 1.7538757l0.8628845 -0.47375488l0.36044312 0.65771484l-0.8628845 0.47375488l1.6069946 2.9322815q0.19522095 0.3562622 0.29638672 0.44332886q0.10736084 0.065826416 0.24078369 0.08169556q0.13961792 -0.005340576 0.31765747 -0.1031189q0.15066528 -0.08270264 0.36846924 -0.23794556zm-0.5437012 -1.9831238q-0.75842285 -1.3839417 -0.3456726 -2.4839783q0.34335327 -0.9015198 1.2746887 -1.4129028q1.0272217 -0.56399536 2.03891 -0.24603271q1.0253906 0.31045532 1.6786804 1.5025635q0.5331726 0.97283936 0.55340576 1.6925659q0.012756348 0.70599365 -0.38357544 1.3157654q-0.3826599 0.6022644 -1.0263977 0.955719q-1.054596 0.5790405 -2.0799866 0.2685852q-1.0116882 -0.31796265 -1.7100525 -1.5922852zm0.8765869 -0.48129272q0.5331421 0.9728699 1.2068481 1.2268372q0.6799011 0.23275757 1.3236389 -0.120666504q0.6300354 -0.34591675 0.7781067 -1.051117q0.16174316 -0.712677 -0.37890625 -1.6992493q-0.5031128 -0.9180603 -1.1905212 -1.1645203q-0.68740845 -0.24645996 -1.303772 0.09194946q-0.6437073 0.3534546 -0.80548096 1.0661316q-0.15557861 0.6914978 0.37008667 1.6506348zm6.007599 -0.036468506l-2.7408752 -5.001343l0.7532959 -0.41357422l0.41299438 0.7536011q0.008483887 -0.6998291 0.16488647 -0.9996033q0.1564331 -0.2998047 0.44406128 -0.45770264q0.43826294 -0.24066162 1.0267334 -0.20724487l0.14041138 0.9389343q-0.41262817 -0.0051574707 -0.72766113 0.16781616q-0.27392578 0.15039062 -0.4029541 0.43515015q-0.12902832 0.284729 -0.059692383 0.63882446q0.110839844 0.52737427 0.40371704 1.0617676l1.4342651 2.6171265l-0.8491821 0.46624756zm5.64859 -5.204773l0.9366455 -0.37164307q0.21505737 0.88012695 -0.113708496 1.6132202q-0.32876587 0.73306274 -1.1916199 1.2068481q-1.0957031 0.601593 -2.1073914 0.28363037q-1.0116882 -0.31796265 -1.6799927 -1.5374756q-0.69836426 -1.2743225 -0.43011475 -2.3128662q0.27444458 -1.0597839 1.3016968 -1.6237793q1.0135193 -0.55648804 2.0190125 -0.21728516q0.99798584 0.32546997 1.6813354 1.5723877q0.03753662 0.06851196 0.120147705 0.21923828l-3.7254028 2.0454407q0.49163818 0.79959106 1.1428223 1.0124512q0.6648865 0.20535278 1.2949219 -0.14056396q0.46566772 -0.25567627 0.6667175 -0.6690979q0.19354248 -0.42712402 0.08493042 -1.0805054zm-3.5312805 0.15634155l2.7940369 -1.5340881q-0.40020752 -0.6002197 -0.8331299 -0.77249146q-0.6737366 -0.2539673 -1.3174438 0.09945679q-0.58895874 0.32336426 -0.7683716 0.93881226q-0.18695068 0.6017456 0.12490845 1.2683105zm9.834747 0.9816284l-3.7921753 -6.919647l0.7807007 -0.42861938l0.3529358 0.64398193q0.07116699 -0.5203552 0.30844116 -0.900177q0.23727417 -0.3798523 0.7303467 -0.65057373q0.6300354 -0.34591675 1.3033142 -0.28778076q0.6733093 0.058135986 1.2502441 0.5256653q0.5694275 0.45385742 0.9674072 1.1800842q0.42053223 0.76730347 0.4845276 1.5343018q0.056488037 0.7533264 -0.29745483 1.3754578q-0.34024048 0.6145935 -0.9154968 0.93045044q-0.42459106 0.23312378 -0.864624 0.24298096q-0.42633057 0.0023498535 -0.7956543 -0.15136719l1.3366699 2.4389954l-0.8491821 0.46624756zm-1.6359558 -4.805847q0.52563477 0.9591675 1.1781311 1.2069702q0.64501953 0.23406982 1.1928711 -0.066711426q0.56155396 -0.3083191 0.7034302 -0.99227905q0.14807129 -0.7052002 -0.4076233 -1.7191467q-0.51812744 -0.9454651 -1.1781616 -1.2069702q-0.6463318 -0.26901245 -1.1941833 0.031799316q-0.54785156 0.30078125 -0.6945801 1.0408936q-0.13305664 0.732605 0.40011597 1.7054443zm5.7796326 0.035217285l-2.7408752 -5.0013123l0.7532959 -0.41360474l0.41299438 0.7536011q0.008453369 -0.6998291 0.16488647 -0.9996033q0.1564331 -0.29977417 0.44406128 -0.45770264q0.43826294 -0.24066162 1.0267334 -0.20724487l0.14041138 0.93896484q-0.41262817 -0.0051879883 -0.72766113 0.16778564q-0.27392578 0.15039062 -0.4029541 0.43515015q-0.12902832 0.284729 -0.059692383 0.63882446q0.110839844 0.52737427 0.40368652 1.0617676l1.4342651 2.6171265l-0.8491516 0.46624756zm1.4213867 -4.04245q-0.75845337 -1.3839111 -0.34570312 -2.4839783q0.34335327 -0.9015198 1.2747192 -1.4128723q1.0272217 -0.5640259 2.0388794 -0.24606323q1.0253906 0.31045532 1.6786804 1.5025635q0.5331726 0.9728699 0.5534363 1.6925659q0.01272583 0.70602417 -0.38360596 1.3157959q-0.3826599 0.6022339 -1.0263977 0.9556885q-1.054596 0.5790405 -2.0799866 0.2685852q-1.0116882 -0.31796265 -1.710022 -1.5922852zm0.8765564 -0.4812622q0.5331421 0.97283936 1.2068787 1.2268372q0.6799011 0.23275757 1.3236084 -0.12069702q0.6300354 -0.34591675 0.7781067 -1.0510864q0.16177368 -0.7127075 -0.37890625 -1.6992798q-0.5031128 -0.9180603 -1.1905212 -1.1645203q-0.68740845 -0.24642944 -1.3037415 0.09194946q-0.6437378 0.3534546 -0.8055115 1.0661621q-0.15557861 0.6914673 0.37008667 1.6506348zm8.27478 -3.6698914l0.88186646 -0.34158325q0.349823 0.9309387 0.044891357 1.7400513q-0.29870605 0.78790283 -1.1068115 1.2315979q-1.0272217 0.56399536 -2.0252075 0.23849487q-0.98428345 -0.3330078 -1.667633 -1.5799255q-0.44302368 -0.80841064 -0.5132141 -1.5541992q-0.06399536 -0.7669983 0.31866455 -1.3692627q0.3826599 -0.6022339 1.0263672 -0.9556885q0.80810547 -0.44366455 1.553833 -0.31835938q0.74572754 0.12530518 1.3094177 0.7962036l-0.7679138 0.58203125q-0.3798828 -0.43310547 -0.81637573 -0.51431274q-0.44400024 -0.09490967 -0.8685913 0.13821411q-0.6300354 0.34591675 -0.785614 1.0374146q-0.14938354 0.67025757 0.3912964 1.6567993q0.55566406 1.013977 1.1869812 1.2555847q0.6312866 0.24160767 1.261322 -0.10430908q0.4930725 -0.27072144 0.6565857 -0.752655q0.17721558 -0.48944092 -0.0798645 -1.1860962zm4.9412537 -2.4277954l0.9366455 -0.37167358q0.21508789 0.88012695 -0.11367798 1.6132202q-0.32876587 0.73309326 -1.1916504 1.2068481q-1.0957031 0.601593 -2.1073608 0.28363037q-1.0116882 -0.31796265 -1.6800232 -1.5374756q-0.69836426 -1.274292 -0.43008423 -2.3128662q0.27444458 -1.0597534 1.3016663 -1.6237793q1.0135498 -0.55648804 2.019043 -0.21728516q0.99798584 0.3255005 1.6813049 1.5723877q0.03756714 0.06851196 0.120147705 0.21923828l-3.7254028 2.0454712q0.49163818 0.79956055 1.1428528 1.0124207q0.66485596 0.20535278 1.2948914 -0.14056396q0.46569824 -0.25567627 0.66674805 -0.6690979q0.19354248 -0.42712402 0.0848999 -1.0804749zm-3.5312805 0.15634155l2.7940674 -1.5340881q-0.40020752 -0.60025024 -0.8331604 -0.772522q-0.67370605 -0.2539673 -1.3174438 0.099487305q-0.5889282 0.32336426 -0.7683716 0.93881226q-0.18692017 0.6017151 0.12490845 1.2683105zm5.041046 -0.8248596l0.760376 -0.59576416q0.346344 0.46939087 0.81152344 0.5704956q0.47140503 0.079833984 1.0466309 -0.23599243q0.58895874 -0.32336426 0.7352295 -0.70669556q0.15997314 -0.39086914 -0.020263672 -0.71972656q-0.15768433 -0.28775024 -0.49432373 -0.31680298q-0.23812866 -0.011871338 -1.0205688 0.18600464q-1.059021 0.27844238 -1.5189209 0.31704712q-0.45373535 0.01739502 -0.81066895 -0.17877197q-0.34326172 -0.20370483 -0.5460205 -0.57366943q-0.18774414 -0.3425598 -0.18725586 -0.6993103q-0.007019043 -0.3704834 0.16177368 -0.7127075q0.10913086 -0.25601196 0.39898682 -0.539917q0.28982544 -0.28390503 0.67333984 -0.49447632q0.5889282 -0.32336426 1.1173401 -0.39956665q0.54208374 -0.083740234 0.9127197 0.10491943q0.37683105 0.16741943 0.7218323 0.6018677l-0.76168823 0.56082153q-0.27874756 -0.34606934 -0.6653137 -0.40118408q-0.3803711 -0.07635498 -0.8734436 0.19436646q-0.5889282 0.32336426 -0.7303467 0.6505432q-0.14141846 0.32717896 0.0012817383 0.5875244q0.09008789 0.16442871 0.25357056 0.23510742q0.18466187 0.07687378 0.45635986 0.052459717q0.14578247 -0.026550293 0.8734436 -0.19436646q1.0179138 -0.25585938 1.4566345 -0.30065918q0.4386902 -0.044799805 0.8093567 0.14385986q0.36312866 0.17495728 0.6034241 0.61343384q0.23278809 0.42474365 0.18899536 0.9300842q-0.043762207 0.50531006 -0.39682007 0.9665222q-0.35302734 0.46121216 -0.9419861 0.7845764q-0.9724121 0.53393555 -1.7106628 0.42233276q-0.73202515 -0.13284302 -1.3045349 -0.85235596zm4.6704407 -2.564331l0.7604065 -0.59576416q0.34631348 0.46939087 0.81152344 0.5704956q0.4713745 0.079833984 1.0466309 -0.23599243q0.5889282 -0.32336426 0.735199 -0.70669556q0.15997314 -0.39086914 -0.020263672 -0.71972656q-0.15768433 -0.28775024 -0.49432373 -0.31680298q-0.23812866 -0.011871338 -1.0205688 0.18600464q-1.0589905 0.27844238 -1.5189209 0.31704712q-0.45373535 0.01739502 -0.81066895 -0.17877197q-0.34326172 -0.20370483 -0.5460205 -0.57366943q-0.18771362 -0.3425598 -0.18725586 -0.6993103q-0.007019043 -0.3704834 0.16177368 -0.7127075q0.10916138 -0.25601196 0.39898682 -0.539917q0.28985596 -0.28390503 0.67333984 -0.49447632q0.58895874 -0.32336426 1.1173401 -0.39956665q0.54208374 -0.083740234 0.91275024 0.10491943q0.37683105 0.16741943 0.7218323 0.6018677l-0.76171875 0.56082153q-0.27874756 -0.34606934 -0.6653137 -0.40118408q-0.3803711 -0.07635498 -0.8734436 0.19436646q-0.5889282 0.32336426 -0.7303467 0.6505432q-0.14138794 0.32717896 0.0012817383 0.5875244q0.09011841 0.16442871 0.25357056 0.23510742q0.18466187 0.07687378 0.45635986 0.052459717q0.14581299 -0.026550293 0.8734436 -0.19436646q1.0179443 -0.25585938 1.4566345 -0.30065918q0.4387207 -0.044799805 0.8093567 0.14385986q0.36312866 0.17495728 0.6034241 0.61343384q0.23278809 0.42474365 0.18902588 0.9300842q-0.043792725 0.50531006 -0.39682007 0.9665222q-0.35305786 0.46121216 -0.9419861 0.7845764q-0.9724426 0.53393555 -1.7106628 0.42233276q-0.73205566 -0.13284302 -1.3045654 -0.85235596zm8.355682 -4.7481384l0.9366455 -0.37167358q0.21505737 0.88012695 -0.113708496 1.6132202q-0.32876587 0.73309326 -1.1916199 1.2068481q-1.0957031 0.601593 -2.1073914 0.28363037q-1.0116882 -0.31796265 -1.6799927 -1.5374756q-0.69836426 -1.274292 -0.43011475 -2.3128662q0.27444458 -1.0597534 1.3016968 -1.6237793q1.0135193 -0.5564575 2.0190125 -0.21728516q0.99798584 0.3255005 1.6813354 1.5723877q0.03753662 0.06851196 0.120147705 0.21923828l-3.7254028 2.0454712q0.49163818 0.79956055 1.1428223 1.0124512q0.6648865 0.20532227 1.2949219 -0.14059448q0.46566772 -0.25567627 0.6667175 -0.6690979q0.19354248 -0.42712402 0.08493042 -1.0804749zm-3.5312805 0.15634155l2.7940369 -1.5340881q-0.40020752 -0.6002197 -0.8331299 -0.77249146q-0.6737366 -0.2539978 -1.3174438 0.09945679q-0.58895874 0.32336426 -0.7683716 0.93881226q-0.18695068 0.6017456 0.12490845 1.2683105zm9.434265 -1.2940369l-0.34539795 -0.63031006q-0.060180664 0.99560547 -0.99154663 1.506958q-0.5889282 0.32336426 -1.2759094 0.2727661q-0.6870117 -0.050628662 -1.2838135 -0.4894104q-0.5968323 -0.43881226 -1.0173645 -1.2061462q-0.41299438 -0.7536011 -0.4968567 -1.4918823q-0.077697754 -0.75946045 0.24264526 -1.3453369q0.3128662 -0.59957886 0.9291992 -0.9379883q0.45196533 -0.24816895 0.913208 -0.25186157q0.46124268 -0.0036621094 0.84558105 0.17745972l-1.3591919 -2.4801025l0.8491821 -0.46624756l3.784668 6.905945l-0.7944031 0.43615723zm-4.037445 -1.0274048q0.52563477 0.95913696 1.1856384 1.2206421q0.66622925 0.24029541 1.2140808 -0.060516357q0.56155396 -0.3083191 0.6972351 -0.97106934q0.13568115 -0.6627197 -0.37493896 -1.5944824q-0.5632019 -1.0276794 -1.2232056 -1.289154q-0.6463318 -0.26904297 -1.23526 0.05432129q-0.56155396 0.3083191 -0.6897278 0.9847717q-0.12200928 0.6552124 0.42617798 1.6554871zm11.51709 -3.8814697q-0.26159668 0.660553 -0.6097717 1.0656433q-0.33447266 0.3975525 -0.84124756 0.67578125q-0.82177734 0.4512024 -1.4915161 0.30200195q-0.6635437 -0.17044067 -1.0014648 -0.7870178q-0.20275879 -0.3699646 -0.20361328 -0.7616577q0.0128479 -0.39923096 0.17547607 -0.72021484q0.16879272 -0.34225464 0.45861816 -0.62615967q0.2090149 -0.20388794 0.68573 -0.53692627q0.95965576 -0.6873169 1.3414001 -1.1286621q-0.076416016 -0.17193604 -0.10644531 -0.2267456q-0.28536987 -0.5206909 -0.64453125 -0.59085083q-0.4727173 -0.11480713 -1.1164551 0.23864746q-0.6026306 0.33087158 -0.7776184 0.69433594q-0.1612854 0.3559265 -0.0053710938 0.96551514l-0.89556885 0.3491211q-0.18328857 -0.594574 -0.103271484 -1.0662842q0.08001709 -0.47174072 0.4829712 -0.90689087q0.39544678 -0.44885254 1.039154 -0.8023071q0.6437378 -0.35342407 1.1235352 -0.42077637q0.47979736 -0.067352295 0.78549194 0.06781006q0.31192017 0.11395264 0.5818176 0.41140747q0.16122437 0.19665527 0.47662354 0.77215576l0.6157532 1.1235962q0.645813 1.1783752 0.873291 1.4634705q0.22750854 0.2850647 0.54956055 0.48257446l-0.8902588 0.48880005q-0.27963257 -0.18515015 -0.5022888 -0.5263672zm-1.1184387 -1.8457947q-0.3468628 0.44000244 -1.2106323 1.0746765q-0.47540283 0.36798096 -0.6419678 0.5842285q-0.16040039 0.19500732 -0.17852783 0.4544983q-0.025634766 0.24581909 0.09451294 0.46505737q0.18771362 0.3425598 0.57559204 0.4326172q0.3804016 0.07635498 0.8734436 -0.19436646q0.4930725 -0.27072144 0.7427368 -0.69299316q0.26333618 -0.42980957 0.25234985 -0.9050598q-0.034423828 -0.3554077 -0.34231567 -0.9172058l-0.16519165 -0.30145264zm3.4525146 1.9011536l0.8893738 -0.32788086q0.26504517 0.35357666 0.5954895 0.40386963q0.442688 0.05996704 0.99053955 -0.24081421q0.5889282 -0.32336426 0.77630615 -0.72927856q0.2010498 -0.41342163 0.077819824 -0.89834595q-0.07418823 -0.29794312 -0.5322571 -1.1337891q-0.20108032 0.96603394 -1.03656 1.4247742q-1.0272217 0.56399536 -1.9942932 0.13238525q-0.97454834 -0.44528198 -1.5452576 -1.4866638q-0.3904724 -0.7125244 -0.46066284 -1.458313q-0.07015991 -0.74575806 0.22897339 -1.3378296q0.3128662 -0.59957886 0.9839783 -0.9680481q0.8765564 -0.48129272 1.8422852 -0.08459473l-0.33041382 -0.6029053l0.7807007 -0.4286499l2.3654175 4.316223q0.6457825 1.1783752 0.67578125 1.7857971q0.037506104 0.62112427 -0.3327942 1.1809387q-0.36276245 0.57351685 -1.1160583 0.9871216q-0.8902588 0.48880005 -1.6633911 0.37850952q-0.765625 -0.09655762 -1.1949768 -0.9125061zm-0.9460144 -3.3843079q0.54067993 0.98654175 1.1719666 1.2281799q0.64501953 0.23406982 1.2339478 -0.08929443q0.58895874 -0.32333374 0.7383423 -0.9935913q0.14938354 -0.67025757 -0.37625122 -1.629425q-0.5031433 -0.9180603 -1.169342 -1.1583252q-0.66619873 -0.2402649 -1.2414551 0.07556152q-0.56152344 0.3083191 -0.7034302 0.99227905q-0.14938354 0.67025757 0.34622192 1.5746155zm6.140991 0.5319824l0.8893738 -0.32788086q0.26501465 0.35357666 0.595459 0.40386963q0.442688 0.05999756 0.99053955 -0.24081421q0.58898926 -0.32336426 0.77630615 -0.72924805q0.2010498 -0.41342163 0.07788086 -0.89837646q-0.07421875 -0.29794312 -0.5322876 -1.1337585q-0.20111084 0.96603394 -1.03656 1.4247437q-1.0272522 0.56399536 -1.9942932 0.13241577q-0.97454834 -0.4453125 -1.5452576 -1.4866943q-0.39050293 -0.7125244 -0.46066284 -1.4582825q-0.07019043 -0.7457886 0.22897339 -1.3378601q0.3128357 -0.59954834 0.9839783 -0.9680481q0.8765564 -0.48129272 1.8422852 -0.08459473l-0.33041382 -0.6029053l0.7807007 -0.4286499l2.3654175 4.316223q0.64575195 1.1784058 0.67578125 1.7857971q0.037475586 0.62112427 -0.3328247 1.1809387q-0.36273193 0.57351685 -1.1160278 0.9871216q-0.8902588 0.48880005 -1.6633911 0.37854004q-0.765625 -0.096588135 -1.1949768 -0.9125366zm-0.9460144 -3.3843079q0.54067993 0.98657227 1.1719666 1.2281799q0.644989 0.23410034 1.2339172 -0.089263916q0.58898926 -0.32336426 0.7383423 -0.9936218q0.14941406 -0.67025757 -0.3762207 -1.629425q-0.5031128 -0.9180603 -1.169342 -1.1583252q-0.66619873 -0.2402649 -1.2414551 0.07556152q-0.56155396 0.3083191 -0.7034302 0.99227905q-0.14938354 0.67025757 0.34622192 1.5746155zm6.066345 0.038208008l-2.7408447 -5.0013123l0.7532959 -0.41360474l0.4130249 0.7536011q0.008422852 -0.6998291 0.16485596 -0.9996033q0.1564331 -0.29977417 0.44403076 -0.45770264q0.43829346 -0.24066162 1.0267334 -0.20724487l0.1404419 0.93896484q-0.4126587 -0.0051879883 -0.72766113 0.16778564q-0.27392578 0.15039062 -0.4029541 0.43515015q-0.12902832 0.284729 -0.059692383 0.63882446q0.110839844 0.52737427 0.40368652 1.0617676l1.4342651 2.6171265l-0.8491821 0.46624756zm5.6486206 -5.204773l0.9366455 -0.37164307q0.21502686 0.88012695 -0.113708496 1.6132202q-0.3287964 0.73309326 -1.1916504 1.2068481q-1.0957031 0.601593 -2.1073608 0.28363037q-1.0117188 -0.31796265 -1.6799927 -1.5374756q-0.69836426 -1.274292 -0.43011475 -2.3128662q0.2744751 -1.0597839 1.3016968 -1.6237793q1.0134888 -0.55648804 2.018982 -0.21728516q0.99798584 0.3255005 1.6813354 1.5723877q0.03753662 0.06851196 0.12017822 0.21923828l-3.7254028 2.0454407q0.49163818 0.79959106 1.1428223 1.0124512q0.66485596 0.20535278 1.2949219 -0.14056396q0.4656372 -0.25567627 0.666687 -0.6690979q0.19354248 -0.42712402 0.08496094 -1.0805054zm-3.531311 0.15634155l2.7940674 -1.5340881q-0.40020752 -0.6002197 -0.8331299 -0.77249146q-0.6737671 -0.2539673 -1.3174438 0.09945679q-0.58898926 0.32336426 -0.7683716 0.93881226q-0.18695068 0.6017456 0.12487793 1.2683105zm6.2628784 0.98202515l0.88934326 -0.32788086q0.26507568 0.35357666 0.59552 0.40386963q0.442688 0.05996704 0.99053955 -0.24084473q0.5889282 -0.32333374 0.77630615 -0.72924805q0.2010498 -0.41342163 0.077819824 -0.89834595q-0.07421875 -0.29794312 -0.53222656 -1.1337891q-0.20111084 0.96603394 -1.0366211 1.4247437q-1.0272217 0.5640259 -1.9942627 0.13241577q-0.97454834 -0.4453125 -1.5452881 -1.4866638q-0.3904419 -0.7125244 -0.46063232 -1.458313q-0.07019043 -0.7457886 0.2290039 -1.3378296q0.31280518 -0.59957886 0.98394775 -0.9680481q0.8765869 -0.48129272 1.8422852 -0.084625244l-0.3303833 -0.6029053l0.78063965 -0.42861938l2.3654175 4.3161926q0.645813 1.1784058 0.67578125 1.7858276q0.03753662 0.62112427 -0.33276367 1.1809387q-0.36273193 0.5734863 -1.1160889 0.98709106q-0.8902588 0.48880005 -1.6633911 0.37854004q-0.765625 -0.09655762 -1.1949463 -0.9125061zm-0.9459839 -3.3843384q0.5406494 0.98657227 1.171936 1.2281799q0.64501953 0.23410034 1.2339478 -0.089263916q0.5889282 -0.32336426 0.7383423 -0.9936218q0.14935303 -0.67025757 -0.37628174 -1.6293945q-0.5031128 -0.9180603 -1.1693115 -1.1583252q-0.66619873 -0.24029541 -1.2414551 0.07556152q-0.56152344 0.3083191 -0.7034302 0.99227905q-0.14935303 0.67025757 0.34625244 1.574585zm9.001831 -2.3756714q-0.26159668 0.6605835 -0.6097412 1.0656433q-0.3345337 0.3975525 -0.84124756 0.67578125q-0.82177734 0.4512024 -1.4915161 0.30200195q-0.6635742 -0.17044067 -1.0015259 -0.7870178q-0.20269775 -0.3699646 -0.20355225 -0.7616577q0.012817383 -0.39923096 0.17541504 -0.72021484q0.16882324 -0.34225464 0.4586792 -0.62615967q0.20898438 -0.20385742 0.68566895 -0.53692627q0.95965576 -0.6873169 1.3414307 -1.1286316q-0.076416016 -0.17196655 -0.10644531 -0.22677612q-0.2854004 -0.5206909 -0.64453125 -0.59085083q-0.4727173 -0.11477661 -1.1164551 0.23864746q-0.60266113 0.33087158 -0.7776489 0.69433594q-0.16125488 0.3559265 -0.0053710938 0.96551514l-0.8955078 0.3491211q-0.18328857 -0.59454346 -0.10333252 -1.0662842q0.08001709 -0.47174072 0.4829712 -0.90689087q0.39544678 -0.44885254 1.0391846 -0.8022766q0.6437378 -0.3534546 1.1235352 -0.42080688q0.47979736 -0.067352295 0.78552246 0.06781006q0.31188965 0.11395264 0.5817871 0.41140747q0.16125488 0.19665527 0.47662354 0.77215576l0.6157837 1.1235962q0.64575195 1.1784058 0.873291 1.4634705q0.22747803 0.2850647 0.5494995 0.48257446l-0.8902588 0.48880005q-0.27960205 -0.18515015 -0.5022583 -0.5263672zm-1.1184692 -1.8457947q-0.34680176 0.44000244 -1.2105713 1.0746765q-0.47540283 0.36798096 -0.6420288 0.5842285q-0.16033936 0.19500732 -0.17852783 0.4545288q-0.025634766 0.24578857 0.09454346 0.46502686q0.1876831 0.3425598 0.57562256 0.4326172q0.3803711 0.07635498 0.8734131 -0.19436646q0.49310303 -0.27072144 0.7427368 -0.69299316q0.2633667 -0.42980957 0.25231934 -0.9050293q-0.034423828 -0.35543823 -0.34228516 -0.9172363l-0.16522217 -0.30145264zm4.8276367 -0.36898804l0.5150757 0.67977905q-0.31103516 0.27770996 -0.58496094 0.4281311q-0.46569824 0.25567627 -0.8085327 0.24783325q-0.32910156 -0.015380859 -0.5663452 -0.18814087q-0.22357178 -0.18029785 -0.64404297 -0.94763184l-1.5769653 -2.877472l-0.6300049 0.34591675l-0.36047363 -0.65771484l0.6300659 -0.34591675l-0.6758423 -1.2332153l0.55010986 -0.97940063l0.96118164 1.7539062l0.862854 -0.4737854l0.36047363 0.65771484l-0.86291504 0.47375488l1.6069946 2.932312q0.19525146 0.3562317 0.29644775 0.44329834q0.107299805 0.065826416 0.24072266 0.081726074q0.13964844 -0.0053710938 0.317688 -0.1031189q0.15063477 -0.082733154 0.36846924 -0.23797607zm3.6834717 -3.1454163l0.9366455 -0.37167358q0.21508789 0.88012695 -0.113708496 1.6132202q-0.32873535 0.73309326 -1.1915894 1.2068481q-1.0957031 0.60162354 -2.1074219 0.28363037q-1.0116577 -0.31796265 -1.6799927 -1.5374451q-0.69836426 -1.2743225 -0.43011475 -2.3128662q0.2744751 -1.0597839 1.3016968 -1.6237793q1.0135498 -0.55648804 2.019043 -0.21731567q0.99798584 0.3255005 1.6813354 1.5724182q0.03753662 0.06851196 0.12011719 0.21923828l-3.7254028 2.0454407q0.49163818 0.79956055 1.1428223 1.0124512q0.664917 0.20535278 1.2949219 -0.14059448q0.46569824 -0.25567627 0.66674805 -0.6690979q0.19354248 -0.42712402 0.0848999 -1.0804749zm-3.53125 0.15634155l2.7940674 -1.5340881q-0.40026855 -0.6002197 -0.8331909 -0.77249146q-0.67370605 -0.2539978 -1.3174438 0.09945679q-0.5889282 0.32336426 -0.7683716 0.93881226q-0.18695068 0.6017456 0.124938965 1.2683105zm5.0410156 -0.8248596l0.760376 -0.59576416q0.3463745 0.46939087 0.81152344 0.5704956q0.47143555 0.0798645 1.0466309 -0.23599243q0.58898926 -0.32336426 0.7352295 -0.70669556q0.15997314 -0.39086914 -0.020263672 -0.71972656q-0.15765381 -0.28775024 -0.49432373 -0.31680298q-0.23809814 -0.011871338 -1.0205688 0.18600464q-1.059021 0.27844238 -1.5189209 0.31704712q-0.45373535 0.01739502 -0.81066895 -0.17877197q-0.34326172 -0.20370483 -0.5460205 -0.57366943q-0.18774414 -0.3425598 -0.18725586 -0.6993103q-0.007019043 -0.3704834 0.1618042 -0.7127075q0.10913086 -0.25601196 0.39898682 -0.539917q0.28979492 -0.28390503 0.67333984 -0.49447632q0.5889282 -0.32336426 1.1173096 -0.39956665q0.54211426 -0.083740234 0.9127197 0.10491943q0.37683105 0.16741943 0.7218628 0.6018677l-0.76171875 0.56082153q-0.27874756 -0.34606934 -0.6652832 -0.40118408q-0.3803711 -0.07635498 -0.8734741 0.19436646q-0.5889282 0.32336426 -0.7303467 0.6505432q-0.14141846 0.32717896 0.0012817383 0.5875244q0.09008789 0.16442871 0.25360107 0.23510742q0.18463135 0.07687378 0.45635986 0.052459717q0.14575195 -0.026550293 0.8734131 -0.19436646q1.0179443 -0.25585938 1.456665 -0.30065918q0.43865967 -0.044799805 0.8093262 0.14385986q0.36315918 0.17495728 0.6034546 0.61343384q0.23278809 0.42477417 0.18896484 0.9300842q-0.043762207 0.50531006 -0.39678955 0.9665222q-0.35302734 0.46121216 -0.9420166 0.7845764q-0.9724121 0.53393555 -1.7106323 0.42233276q-0.73205566 -0.13284302 -1.3045654 -0.85235596z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m450.80875 129.68065l109.82675 62.11023l-14.362183 25.417328l-109.82678 -62.110245z" fill-rule="evenodd"></path><path fill="#38761d" d="m463.10425 158.7377l2.805664 -4.965271l0.7480469 0.42303467l-0.42279053 0.74819946q0.598999 -0.36132812 0.9360657 -0.38612366q0.33703613 -0.024780273 0.62268066 0.13673401q0.43521118 0.24613953 0.7167053 0.76434326l-0.7237549 0.6138611q-0.21289062 -0.3537445 -0.5257263 -0.5306549q-0.27200317 -0.15382385 -0.58184814 -0.11364746q-0.3098755 0.04017639 -0.57418823 0.28559875q-0.38964844 0.37200928 -0.6894531 0.9025421l-1.4681396 2.5982666l-0.8432617 -0.47688293zm7.3960876 2.0645447l0.80895996 0.6010895q-0.63446045 0.64642334 -1.4303894 0.7527771q-0.79589844 0.10635376 -1.6527405 -0.3782196q-1.0880737 -0.6153259 -1.350647 -1.6434021q-0.26260376 -1.0280762 0.4215088 -2.2387848q0.71487427 -1.2651367 1.7384644 -1.5838013q1.0448303 -0.32458496 2.064911 0.25228882q1.0064392 0.56918335 1.2477417 1.6031647q0.24899292 1.0203857 -0.4505005 2.2583008q-0.03845215 0.06802368 -0.12298584 0.21766663l-3.6994019 -2.0921173q-0.42041016 0.8392792 -0.25836182 1.5053406q0.17565918 0.67375183 0.80126953 1.0275574q0.46243286 0.2615204 0.91952515 0.21485901q0.4647827 -0.060256958 0.9626465 -0.49671936zm-1.9922485 -2.921753l2.7745361 1.5690918q0.29919434 -0.65652466 0.21755981 -1.1155396q-0.13897705 -0.70687866 -0.77822876 -1.0683746q-0.58480835 -0.3307495 -1.2021484 -0.15930176q-0.60964966 0.15783691 -1.0117188 0.77412415zm6.440216 6.747589q-0.6989441 0.12530518 -1.2264099 0.042404175q-0.513855 -0.07519531 -1.0170898 -0.359787q-0.81604004 -0.46148682 -1.0419312 -1.1098175q-0.20462036 -0.65423584 0.14126587 -1.2663879q0.20755005 -0.3672943 0.53985596 -0.5742798q0.34591675 -0.19927979 0.70425415 -0.22998047q0.37963867 -0.036621094 0.773468 0.060440063q0.28323364 0.07043457 0.8172302 0.3006134q1.0892334 0.45443726 1.6651917 0.5467987q0.10583496 -0.1555481 0.13656616 -0.20996094q0.29211426 -0.51693726 0.16259766 -0.85943604q-0.1513977 -0.46258545 -0.7906494 -0.8240814q-0.5984192 -0.33843994 -0.9993286 -0.2959137q-0.3873291 0.050231934 -0.8231201 0.5038452l-0.76812744 -0.5780182q0.40856934 -0.46899414 0.85147095 -0.64933777q0.4429016 -0.18034363 1.0247498 -0.06669617q0.5895691 0.10005188 1.2287903 0.4615631q0.6392517 0.36151123 0.9490967 0.7341919q0.3098755 0.37269592 0.35601807 0.703949q0.06741333 0.3253479 -0.043151855 0.71157837q-0.08218384 0.24073792 -0.4050293 0.81207275l-0.63031006 1.1154938q-0.66104126 1.1698914 -0.78344727 1.5135498q-0.12237549 0.3436432 -0.12060547 0.72161865l-0.8840332 -0.49995422q0.010040283 -0.33538818 0.18267822 -0.7044678zm0.9791565 -1.9234314q-0.556427 -0.06336975 -1.5504456 -0.46395874q-0.56295776 -0.21066284 -0.8343811 -0.23851013q-0.25012207 -0.03375244 -0.48013306 0.08747864q-0.22235107 0.10762024 -0.3453369 0.3252716q-0.19216919 0.3400879 -0.06442261 0.7174835q0.1354065 0.36380005 0.62506104 0.6407013q0.48962402 0.276886 0.9798279 0.26690674q0.5038147 -0.0022888184 0.9017639 -0.26190186q0.28381348 -0.21644592 0.5989685 -0.7741852l0.1690979 -0.2992859zm3.8064575 5.437607l0.35357666 -0.62576294q-0.87750244 0.47306824 -1.8023682 -0.049957275q-0.58480835 -0.33073425 -0.90356445 -0.941803q-0.3187561 -0.611084 -0.2602234 -1.3498688q0.058502197 -0.73876953 0.48895264 -1.5005646q0.42279053 -0.74819946 1.0057983 -1.2082977q0.6043396 -0.46601868 1.270752 -0.5019989q0.6741028 -0.049591064 1.2861328 0.2965393q0.44882202 0.2538147 0.6948242 0.64424133q0.2460022 0.3904419 0.29452515 0.81277466l1.3912964 -2.4622345l0.8432312 0.47688293l-3.874115 6.8561554l-0.78881836 -0.44610596zm-1.2531738 -3.9756927q-0.5380554 0.95224 -0.4126892 1.651413q0.14666748 0.6932678 0.69070435 1.0009308q0.5576477 0.3153534 1.1921082 0.08180237q0.63449097 -0.23353577 1.157196 -1.1585846q0.57650757 -1.0202637 0.45111084 -1.7194366q-0.11178589 -0.69148254 -0.69662476 -1.022232q-0.5576172 -0.3153534 -1.1997986 -0.06819153q-0.6208801 0.24124146 -1.1820068 1.2342987zm9.362061 7.753723q-0.6989441 0.12530518 -1.2264099 0.042419434q-0.513855 -0.07521057 -1.0170898 -0.35980225q-0.81604004 -0.46148682 -1.0419617 -1.1098175q-0.20462036 -0.65423584 0.14129639 -1.2663879q0.20751953 -0.3672943 0.53985596 -0.5742798q0.34591675 -0.19927979 0.70425415 -0.22998047q0.37963867 -0.036621094 0.7734375 0.060440063q0.28326416 0.07043457 0.8172302 0.3006134q1.0892334 0.45443726 1.6651917 0.5467987q0.10583496 -0.1555481 0.13659668 -0.20996094q0.29208374 -0.51693726 0.16256714 -0.85943604q-0.1513977 -0.46258545 -0.7906189 -0.8240814q-0.5984192 -0.33843994 -0.99935913 -0.29589844q-0.38729858 0.050216675 -0.8230896 0.50382996l-0.76815796 -0.5780182q0.40859985 -0.46899414 0.85147095 -0.64933777q0.4429016 -0.18034363 1.0247803 -0.06669617q0.5895386 0.10005188 1.2287903 0.4615631q0.6392212 0.36151123 0.9490967 0.73420715q0.3098755 0.37269592 0.35598755 0.7039337q0.06744385 0.3253479 -0.043121338 0.71157837q-0.08218384 0.24073792 -0.4050293 0.81207275l-0.63031006 1.1154938q-0.6610718 1.1698914 -0.78344727 1.5135498q-0.122406006 0.3436432 -0.12060547 0.72161865l-0.8840637 -0.49995422q0.010040283 -0.33538818 0.18270874 -0.7044678zm0.9791565 -1.9234314q-0.556427 -0.06336975 -1.5504761 -0.46395874q-0.56292725 -0.21066284 -0.8343506 -0.23851013q-0.2501526 -0.03375244 -0.48016357 0.08747864q-0.22232056 0.10762024 -0.3453064 0.3252716q-0.19216919 0.3400879 -0.064453125 0.7174835q0.13543701 0.36380005 0.62506104 0.6407013q0.48962402 0.27690125 0.9798279 0.26690674q0.5038147 -0.0022888184 0.9017639 -0.26190186q0.283844 -0.21644592 0.598999 -0.7741852l0.1690979 -0.2992859zm0.20285034 3.9381866l0.7468567 0.5839386q-0.16082764 0.41166687 -0.029541016 0.7192688q0.18215942 0.40815735 0.7261658 0.7158203q0.58483887 0.3307495 1.0283203 0.2763977q0.4570923 -0.046661377 0.8041992 -0.4068451q0.2140503 -0.22000122 0.6829529 -1.04982q-0.9266052 0.33761597 -1.7562561 -0.13157654q-1.02005 -0.5768738 -1.1625977 -1.6268158q-0.13485718 -1.0635529 0.44934082 -2.097412q0.39971924 -0.7073822 0.9963379 -1.15979q0.59661865 -0.45240784 1.2571411 -0.50968933q0.6741028 -0.049591064 1.3405457 0.32730103q0.87042236 0.4922638 1.0419617 1.5226898l0.33822632 -0.5985565l0.77523804 0.4384308l-2.4213257 4.285095q-0.6610718 1.1698914 -1.1613159 1.5152588q-0.5079346 0.35897827 -1.1784973 0.33876038q-0.6782532 -0.0066070557 -1.4262695 -0.42964172q-0.8840637 -0.49995422 -1.1974792 -1.2157288q-0.32113647 -0.7021637 0.1459961 -1.4970856zm2.3770142 -2.5868835q-0.5534363 0.9794464 -0.42626953 1.6437225q0.14074707 0.6719818 0.72558594 1.0027161q0.58483887 0.33073425 1.2329102 0.104888916q0.6481018 -0.2258606 1.1861572 -1.1781158q0.51501465 -0.9114227 0.36834717 -1.6046906q-0.14666748 -0.6932678 -0.7178955 -1.0163116q-0.5576477 -0.31536865 -1.2134094 -0.07589722q-0.6481018 0.2258606 -1.155426 1.1236877zm2.7816772 5.5042877l0.7468567 0.58392334q-0.16079712 0.41166687 -0.029541016 0.7192688q0.18215942 0.4081726 0.7261963 0.7158356q0.58480835 0.33073425 1.0283203 0.27638245q0.4570923 -0.046661377 0.8041687 -0.40682983q0.2140503 -0.22001648 0.6829529 -1.04982q-0.9266052 0.3376007 -1.7562256 -0.1315918q-1.0200806 -0.5768585 -1.1626282 -1.6268005q-0.13485718 -1.0635529 0.44934082 -2.097412q0.39971924 -0.7073822 0.9963379 -1.1598053q0.59664917 -0.45240784 1.2571411 -0.50968933q0.6741028 -0.049575806 1.3405457 0.32730103q0.8704529 0.4922638 1.0419617 1.5227051l0.33822632 -0.5985565l0.77523804 0.43841553l-2.4213257 4.285095q-0.66104126 1.1699066 -1.1612854 1.515274q-0.5079346 0.358963 -1.1784973 0.33876038q-0.6782532 -0.0066223145 -1.4263 -0.42965698q-0.8840637 -0.49995422 -1.1974792 -1.2157288q-0.32110596 -0.7021637 0.1459961 -1.4970703zm2.3770447 -2.5868988q-0.5534363 0.97946167 -0.42630005 1.6437378q0.14077759 0.67196655 0.72558594 1.0027161q0.58483887 0.33073425 1.2329407 0.10487366q0.6480713 -0.2258606 1.1861572 -1.1781006q0.51498413 -0.911438 0.36831665 -1.6047058q-0.14666748 -0.6932678 -0.7178955 -1.0163116q-0.5576477 -0.3153534 -1.2134094 -0.07589722q-0.6480713 0.2258606 -1.1553955 1.1236877zm3.1618652 5.1808014l2.805664 -4.965271l0.7480469 0.42303467l-0.42276 0.7481842q0.598999 -0.36131287 0.93603516 -0.3861084q0.33706665 -0.024795532 0.62268066 0.13673401q0.43521118 0.24612427 0.7167053 0.764328l-0.7237549 0.61387634q-0.21289062 -0.35375977 -0.5257263 -0.5306549q-0.27200317 -0.15383911 -0.58184814 -0.11366272q-0.30984497 0.04017639 -0.5741577 0.285614q-0.38967896 0.37199402 -0.6894531 0.9025421l-1.4681702 2.5982513l-0.8432617 -0.47686768zm7.396118 2.0645294l0.80892944 0.60110474q-0.63446045 0.6464081 -1.4303894 0.75276184q-0.79589844 0.10635376 -1.6527405 -0.3782196q-1.0880737 -0.6153259 -1.350647 -1.6434021q-0.26260376 -1.0280762 0.4215393 -2.2387848q0.71484375 -1.2651215 1.7384338 -1.583786q1.0448608 -0.32458496 2.064911 0.25228882q1.0064392 0.5691681 1.2477417 1.6031647q0.24899292 1.0203857 -0.4505005 2.2583008q-0.03845215 0.06800842 -0.12298584 0.21765137l-3.6994019 -2.0921173q-0.42041016 0.83929443 -0.25836182 1.5053406q0.17565918 0.67375183 0.80130005 1.0275726q0.46240234 0.2615204 0.9194946 0.21485901q0.4647827 -0.060272217 0.962677 -0.49673462zm-1.992279 -2.9217377l2.7745361 1.5690918q0.29919434 -0.65652466 0.21755981 -1.1155548q-0.13897705 -0.7068634 -0.77819824 -1.0683746q-0.58483887 -0.33073425 -1.202179 -0.1592865q-0.60964966 0.15783691 -1.0117188 0.77412415zm2.4635315 5.84494l0.7468872 0.5839386q-0.16082764 0.41166687 -0.029541016 0.7192688q0.1821289 0.40815735 0.7261658 0.7158203q0.58480835 0.3307495 1.0283203 0.2763977q0.4570923 -0.046661377 0.8041687 -0.4068451q0.2140503 -0.22000122 0.6829529 -1.04982q-0.9266052 0.33761597 -1.7562256 -0.13157654q-1.0200806 -0.5768738 -1.1625977 -1.6268158q-0.1348877 -1.0635529 0.4493103 -2.097412q0.39971924 -0.7073822 0.9963379 -1.15979q0.59664917 -0.4524231 1.2571411 -0.50968933q0.6741333 -0.049591064 1.3405762 0.32730103q0.87042236 0.4922638 1.0419312 1.5226898l0.3381958 -0.5985565l0.77526855 0.4384308l-2.4213257 4.285095q-0.66104126 1.1698914 -1.1612854 1.5152588q-0.5079346 0.35897827 -1.1784973 0.33876038q-0.6782532 -0.0066070557 -1.4263 -0.42964172q-0.8840637 -0.49995422 -1.1974792 -1.2157288q-0.32110596 -0.7021637 0.1459961 -1.4970856zm2.3770447 -2.5868835q-0.5534363 0.9794464 -0.42630005 1.6437225q0.14077759 0.6719818 0.72558594 1.0027161q0.58483887 0.33073425 1.2329407 0.104888916q0.6481018 -0.2258606 1.1861267 -1.1781158q0.51501465 -0.9114227 0.36834717 -1.6046906q-0.14666748 -0.6932678 -0.7178955 -1.0163116q-0.5576172 -0.31536865 -1.2134094 -0.07589722q-0.6480713 0.2258606 -1.1553955 1.1236877zm6.7583313 6.4069214q-0.6989136 0.12530518 -1.2263794 0.042404175q-0.513855 -0.07519531 -1.0170898 -0.359787q-0.81604004 -0.46148682 -1.0419312 -1.1098175q-0.20465088 -0.65423584 0.14123535 -1.2663879q0.20758057 -0.3672943 0.53985596 -0.5742798q0.34594727 -0.19927979 0.70428467 -0.22998047q0.37963867 -0.036621094 0.7734375 0.060440063q0.28326416 0.07043457 0.81726074 0.3006134q1.0892334 0.45443726 1.6651611 0.5467987q0.10583496 -0.1555481 0.13659668 -0.20996094q0.29211426 -0.51693726 0.16259766 -0.85943604q-0.15142822 -0.46258545 -0.7906494 -0.8240814q-0.5984497 -0.33843994 -0.9993286 -0.2959137q-0.3873291 0.050231934 -0.8231201 0.5038452l-0.7681885 -0.5780182q0.40863037 -0.46899414 0.85150146 -0.64933777q0.44293213 -0.18034363 1.0247803 -0.06669617q0.5895386 0.10005188 1.2287598 0.4615631q0.6392822 0.36149597 0.9490967 0.7341919q0.3098755 0.37269592 0.35601807 0.703949q0.06744385 0.3253479 -0.043151855 0.71157837q-0.08215332 0.24072266 -0.4050293 0.81207275l-0.63031006 1.1154938q-0.66101074 1.1698914 -0.78344727 1.5135345q-0.12237549 0.34365845 -0.12060547 0.7216339l-0.8840332 -0.49995422q0.010070801 -0.33538818 0.18267822 -0.7044678zm0.979187 -1.9234314q-0.5564575 -0.06336975 -1.5504761 -0.463974q-0.56292725 -0.21064758 -0.8343506 -0.23849487q-0.25012207 -0.03375244 -0.48016357 0.08747864q-0.22235107 0.10762024 -0.3453369 0.3252716q-0.19213867 0.3400879 -0.06439209 0.7174835q0.13537598 0.36380005 0.62506104 0.6407013q0.48962402 0.276886 0.97979736 0.26690674q0.5038452 -0.0022888184 0.90179443 -0.26190186q0.28381348 -0.21644592 0.598938 -0.7741852l0.16912842 -0.2992859zm2.8555298 3.9125671l-0.3062744 0.79611206q-0.39971924 -0.11836243 -0.6717529 -0.27218628q-0.46240234 -0.2615204 -0.6362915 -0.55729675q-0.16027832 -0.28808594 -0.1383667 -0.58088684q0.035461426 -0.2850952 0.46588135 -1.0468903l1.6142578 -2.8567352l-0.6256714 -0.35380554l0.36895752 -0.65296936l0.6256714 0.35380554l0.69177246 -1.2243042l1.1217651 -0.04774475l-0.9838867 1.7412415l0.8568115 0.48457336l-0.36895752 0.65296936l-0.85687256 -0.48457336l-1.6449585 2.911148q-0.1998291 0.35368347 -0.22052002 0.4855957q6.1035156E-4 0.12599182 0.057373047 0.24784851q0.078063965 0.11593628 0.2548828 0.21592712q0.14959717 0.084609985 0.3961792 0.18817139zm4.6118164 1.4772339l0.80895996 0.6010895q-0.63446045 0.64642334 -1.4303589 0.7527771q-0.7959595 0.10635376 -1.652771 -0.3782196q-1.0880737 -0.6153259 -1.350647 -1.6434021q-0.26257324 -1.0280762 0.4215088 -2.2387848q0.7149048 -1.2651367 1.7384644 -1.5838013q1.0448608 -0.32458496 2.0648804 0.25228882q1.0064697 0.56918335 1.2477417 1.6031647q0.24902344 1.0203857 -0.4505005 2.2583008q-0.038391113 0.06802368 -0.12298584 0.21766663l-3.6994019 -2.0921173q-0.42034912 0.8392792 -0.25836182 1.5053406q0.17565918 0.67375183 0.80133057 1.0275574q0.46240234 0.2615204 0.9194946 0.21485901q0.4647827 -0.060256958 0.9626465 -0.49671936zm-1.9922485 -2.9217377l2.7745361 1.5690765q0.29919434 -0.65652466 0.21759033 -1.1155396q-0.13903809 -0.70687866 -0.7782593 -1.0683746q-0.58483887 -0.3307495 -1.2021484 -0.15930176q-0.6096802 0.15783691 -1.0117188 0.7741394zm3.3551636 3.854065l0.9065552 0.33314514q-0.21643066 0.5417938 -0.057373047 0.99076843q0.18041992 0.44306946 0.75164795 0.7661133q0.58477783 0.33073425 0.9874878 0.25331116q0.4163208 -0.069732666 0.60076904 -0.39622498q0.16143799 -0.2856598 0.008850098 -0.58735657q-0.11529541 -0.20881653 -0.6954346 -0.7702484q-0.7941284 -0.7542877 -1.0691528 -1.1251984q-0.2536621 -0.37683105 -0.27496338 -0.78378296q-0.0076904297 -0.39927673 0.1998291 -0.76657104q0.19213867 -0.3400879 0.4954834 -0.5275421q0.31103516 -0.20106506 0.6906738 -0.23768616q0.27496338 -0.04196167 0.66882324 0.055114746q0.39379883 0.09706116 0.77459717 0.3124237q0.58483887 0.3307495 0.92785645 0.7401123q0.35656738 0.41706848 0.3914795 0.8317261q0.056152344 0.40873718 -0.13122559 0.93099976l-0.8775635 -0.35266113q0.1472168 -0.4193573 -0.009460449 -0.77723694q-0.13543701 -0.36380005 -0.62506104 -0.6407013q-0.58483887 -0.33073425 -0.93725586 -0.2787323q-0.35247803 0.051986694 -0.49847412 0.31045532q-0.092285156 0.16325378 -0.066223145 0.33952332q0.031921387 0.1975708 0.19573975 0.41584778q0.0993042 0.11004639 0.62506104 0.6407013q0.75335693 0.73121643 1.0223999 1.0808258q0.269104 0.34962463 0.30395508 0.7642822q0.04260254 0.40104675 -0.20336914 0.8363495q-0.23828125 0.42170715 -0.6906128 0.6505585q-0.45239258 0.22885132 -1.0300903 0.17138672q-0.5777588 -0.0574646 -1.1625366 -0.38819885q-0.96569824 -0.54611206 -1.2595825 -1.2328949q-0.27264404 -0.692688 0.03717041 -1.558609z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m408.895 154.80229l104.1575 112.000015l-21.35434 19.905487l-104.15747 -111.999985z" fill-rule="evenodd"></path><path fill="#351c75" d="m413.80832 184.27687l0.5186157 0.9477234q-1.1606445 0.6333618 -2.2487793 0.4728241q-1.0660706 -0.159729 -1.9066772 -1.0636444q-0.8725586 -0.9382324 -1.0380554 -1.873352q-0.15408325 -0.94577026 0.27868652 -1.9259033q0.4433899 -0.96868896 1.2663269 -1.7357788q0.89151 -0.8310089 1.8850403 -1.1163483q0.9935608 -0.2853241 1.9216919 0.024353027q0.9501953 0.31047058 1.6631165 1.0770721q0.8086853 0.86958313 0.90560913 1.8686218q0.119018555 0.99983215 -0.4682312 1.9744568l-0.88516235 -0.58470154q0.4477234 -0.78048706 0.37167358 -1.435852q-0.06539917 -0.64393616 -0.5974426 -1.2160339q-0.6277771 -0.6750641 -1.362793 -0.8229828q-0.7236023 -0.1585846 -1.4310303 0.15908813q-0.69680786 0.32911682 -1.26828 0.8618164q-0.73147583 0.68185425 -1.088562 1.3992004q-0.34643555 0.7287903 -0.20574951 1.4307251q0.15130615 0.71336365 0.6407776 1.239685q0.59588623 0.6407471 1.3766174 0.74606323q0.7921448 0.09465027 1.6726074 -0.42703247zm-0.6022949 3.4134674l5.7604675 -5.369629l2.3835144 2.5629883q0.71292114 0.76660156 0.9253235 1.3162231q0.22384644 0.53897095 0.04727173 1.1521301q-0.16513062 0.6025238 -0.6451721 1.0499878q-0.6057739 0.56466675 -1.4010925 0.5583954q-0.78466797 0.0051879883 -1.6757202 -0.72354126q0.15014648 0.43678284 0.13989258 0.72402954q0.010620117 0.6309204 -0.16992188 1.3545685l-0.6400757 2.4550476l-0.8831787 -0.9496765l0.4871521 -1.8852844q0.20574951 -0.8112488 0.27789307 -1.2843323q0.072143555 -0.47309875 0.026824951 -0.7512665q-0.045318604 -0.27816772 -0.1706543 -0.4817505q-0.09338379 -0.16926575 -0.37005615 -0.46676636l-0.81933594 -0.88101196l-2.5601807 2.3864899l-0.71295166 -0.76660156zm3.9360352 -2.2378235l1.521637 1.6361847q0.48947144 0.5263214 0.86898804 0.7279358q0.3795166 0.20159912 0.75787354 0.12660217q0.37835693 -0.07498169 0.6526489 -0.33068848q0.4114685 -0.38354492 0.39767456 -0.92607117q-0.003112793 -0.53108215 -0.598999 -1.1718292l-1.7025146 -1.8307037l-1.8973083 1.76857zm10.576599 6.279358l0.71292114 0.76660156l-3.3259888 3.1003113q-0.8686218 0.80970764 -1.5744934 1.0831909q-0.69522095 0.2849121 -1.4940796 0.068344116q-0.788208 -0.2051239 -1.5649719 -1.04039q-0.75549316 -0.8123627 -0.960022 -1.5829468q-0.19308472 -0.78123474 0.11984253 -1.5001526q0.33502197 -0.71813965 1.2493896 -1.5704651l3.3259583 -3.1003113l0.71295166 0.76660156l-3.3259888 3.1003113q-0.7543335 0.7031708 -0.98098755 1.1707458q-0.22662354 0.46757507 -0.10168457 0.9919586q0.13635254 0.51371765 0.5939026 1.005722q0.7767639 0.83525085 1.4837952 0.83836365q0.7176819 0.014556885 1.8034668 -0.99757385l3.3259888 -3.1003113zm-4.0417786 7.217697l5.760437 -5.369629l1.851471 1.9908905q0.6278076 0.6750641 0.877655 1.1043396q0.34326172 0.59854126 0.37594604 1.2302399q0.026397705 0.8084717 -0.36654663 1.6019745q-0.38153076 0.7828522 -1.1473083 1.4966736q-0.66290283 0.6179352 -1.3147583 0.9265137q-0.64123535 0.32003784 -1.1961365 0.38871765q-0.5549011 0.06869507 -1.0152283 -0.036239624q-0.46032715 -0.10491943 -0.9356079 -0.40950012q-0.4638672 -0.31523132 -0.9533386 -0.841568l-1.9365845 -2.0824127zm1.3872375 0.13801575l1.1492004 1.2357178q0.5213928 0.5606537 0.9222107 0.785141q0.42288208 0.22528076 0.7980652 0.23866272q0.5296936 0.018920898 1.1251831 -0.25849915q0.59552 -0.27742004 1.2469788 -0.8847046q0.9029541 -0.84165955 1.1063232 -1.5866241q0.21481323 -0.7556 -0.01889038 -1.3281097q-0.17300415 -0.4154663 -0.80081177 -1.0905457l-1.1278992 -1.2128296l-4.40036 4.1017914zm5.7845764 7.573807l4.171753 -3.8887177l0.59588623 0.6407471l-0.58291626 0.5433502q0.48950195 -0.09313965 0.9703064 0.056762695q0.49145508 0.16134644 0.8745117 0.57325745q0.4362793 0.4691162 0.5186157 0.9477234q0.09378052 0.46795654 -0.14346313 0.9240875q1.1890259 -0.18984985 1.9232483 0.5996399q0.5852356 0.629303 0.55093384 1.2807312q-0.034332275 0.6514282 -0.74295044 1.3119812l-2.868805 2.6741638l-0.64907837 -0.6979523l2.6287842 -2.4504242q0.42288208 -0.39419556 0.54193115 -0.63331604q0.13043213 -0.24977112 0.07449341 -0.53938293q-0.05596924 -0.28961182 -0.27941895 -0.52989197q-0.41497803 -0.44622803 -0.98880005 -0.46672058q-0.5632019 -0.009033203 -1.2032471 0.58758545l-2.4230347 2.2586517l-0.659729 -0.70939636l2.7087708 -2.524994q0.46862793 -0.43681335 0.54904175 -0.83218384q0.0803833 -0.39535522 -0.28137207 -0.78437805q-0.27667236 -0.2975006 -0.681427 -0.41149902q-0.394104 -0.10256958 -0.80911255 0.07067871q-0.40356445 0.1625824 -0.9750366 0.695282l-2.1601562 2.0136108l-0.659729 -0.70939636zm10.053925 8.103546l0.5895691 0.81752014q-0.7996521 0.4249878 -1.5902405 0.28614807q-0.7905884 -0.13883972 -1.4609375 -0.8596802q-0.8512573 -0.91534424 -0.7913208 -1.9751587q0.059936523 -1.0598145 1.0771484 -2.0080261q1.0629578 -0.99082947 2.1345215 -0.98576355q1.0936584 0.005859375 1.8917236 0.8639984q0.78741455 0.84669495 0.7054138 1.9057312q-0.07058716 1.0483704 -1.1106567 2.0178833q-0.057159424 0.053268433 -0.18289185 0.17047119l-2.8942566 -3.1121979q-0.6538391 0.6735687 -0.7003784 1.3577576q-0.035858154 0.6956177 0.45361328 1.2219543q0.36175537 0.38902283 0.8114624 0.48251343q0.4610901 0.08282471 1.0672302 -0.18315125zm-1.0170898 -3.388092l2.1706848 2.334137q0.4831848 -0.53585815 0.5439148 -0.998291q0.08081055 -0.71614075 -0.41931152 -1.2539215q-0.45755005 -0.49198914 -1.0975952 -0.5148468q-0.6286011 -0.033493042 -1.1976929 0.43292236zm3.1199646 7.0947876l-0.53204346 0.6668396q-0.34524536 -0.23356628 -0.55807495 -0.46240234q-0.36175537 -0.38902283 -0.43823242 -0.7236023q-0.0657959 -0.32315063 0.043395996 -0.59580994q0.11981201 -0.2612152 0.7598572 -0.85783386l2.400177 -2.2373505l-0.48947144 -0.5263214l0.5486145 -0.5113983l0.48947144 0.52633667l1.028656 -0.9588623l1.0834045 0.29310608l-1.4629822 1.3637085l0.67037964 0.72084045l-0.5486145 0.51138306l-0.67037964 -0.7208252l-2.4458923 2.279953q-0.29718018 0.27700806 -0.35668945 0.39656067q-0.03744507 0.12034607 -0.020111084 0.2537079q0.03942871 0.13415527 0.17773438 0.28289795q0.11706543 0.12585449 0.32080078 0.29907227zm-0.059539795 1.1978912l4.171753 -3.8887177l0.5852356 0.629303l-0.6286011 0.585968q0.6798401 -0.16378784 1.0085144 -0.0856781q0.32870483 0.07810974 0.55215454 0.3183899q0.34048462 0.36613464 0.4524231 0.9453583l-0.87493896 0.3670044q-0.09616089 -0.40167236 -0.34091187 -0.6648407q-0.21279907 -0.22883606 -0.52020264 -0.28405762q-0.30740356 -0.055221558 -0.6333313 0.09907532q-0.48358154 0.23716736 -0.9293213 0.65267944l-2.1830444 2.034912l-0.659729 -0.70939636zm7.3762207 -2.0031738l0.8114929 -0.75642395l0.659729 0.70939636l-0.8114929 0.75642395l-0.659729 -0.70939636zm-4.948944 4.6131897l4.171753 -3.8887177l0.659729 0.70939636l-4.171753 3.8887177l-0.659729 -0.70939636zm5.6762695 3.02919l0.5470276 0.7717438q-0.8158264 0.56822205 -1.6741943 0.47120667q-0.8363037 -0.096221924 -1.4641113 -0.771286q-0.79803467 -0.85813904 -0.727478 -1.9065247q0.081207275 -1.0369263 1.1213074 -2.0064392q0.6743469 -0.6285858 1.3793945 -0.87997437q0.72714233 -0.2505951 1.4042358 -0.027297974q0.677063 0.22328186 1.177185 0.76104736q0.6277771 0.67507935 0.6892395 1.4294891q0.061462402 0.7544098 -0.45010376 1.4662323l-0.7519531 -0.60206604q0.32635498 -0.4750824 0.29797363 -0.91856384q-0.016937256 -0.45414734 -0.34680176 -0.80885315q-0.48947144 -0.5263214 -1.1972656 -0.5073395q-0.6857605 0.019760132 -1.5086975 0.78685q-0.84576416 0.7884064 -0.9250183 1.4603577q-0.07922363 0.6719513 0.4102478 1.1982727q0.38305664 0.411911 0.8898926 0.45213318q0.51745605 0.051651 1.1291199 -0.36898804zm1.5700378 4.762726l4.171753 -3.8887177l0.59588623 0.6407471l-0.58291626 0.5433655q0.48950195 -0.09315491 0.9703064 0.056747437q0.49145508 0.1613617 0.8745117 0.57325745q0.43624878 0.4691162 0.5186157 0.9477234q0.09378052 0.46795654 -0.14346313 0.9241028q1.1890259 -0.18986511 1.9232178 0.59962463q0.5852356 0.629303 0.55093384 1.2807312q-0.034301758 0.6514435 -0.7429199 1.3119965l-2.868805 2.6741486l-0.64907837 -0.6979523l2.6287842 -2.4504242q0.42288208 -0.39419556 0.54193115 -0.63331604q0.13043213 -0.24977112 0.07449341 -0.5393677q-0.05596924 -0.28961182 -0.27941895 -0.52989197q-0.41497803 -0.4462433 -0.98880005 -0.46672058q-0.5632019 -0.009048462 -1.2032471 0.58758545l-2.4230347 2.2586517l-0.659729 -0.70939636l2.7087708 -2.5250092q0.46862793 -0.43681335 0.54904175 -0.8321686q0.0803833 -0.39537048 -0.2814026 -0.7843933q-0.27664185 -0.29748535 -0.6813965 -0.41149902q-0.394104 -0.10255432 -0.80911255 0.07067871q-0.40356445 0.1625824 -0.9750366 0.695282l-2.1601868 2.0136108l-0.6596985 -0.70939636zm10.053925 8.103561l0.5895691 0.8175049q-0.7996521 0.42500305 -1.5902405 0.28616333q-0.7905884 -0.13885498 -1.4609375 -0.8596802q-0.8512573 -0.9153595 -0.7913208 -1.975174q0.059936523 -1.0598145 1.0771484 -2.0080261q1.0629578 -0.9908142 2.1345215 -0.9857483q1.0936584 0.005859375 1.8917236 0.8639984q0.78741455 0.84669495 0.7054138 1.905716q-0.07058716 1.0483704 -1.1106567 2.0178986q-0.057159424 0.053268433 -0.18289185 0.17045593l-2.8942566 -3.1121826q-0.6538391 0.6735687 -0.7003784 1.3577423q-0.035888672 0.69563293 0.45358276 1.2219543q0.3617859 0.38902283 0.8114624 0.48251343q0.4611206 0.082839966 1.0672607 -0.18313599zm-1.0170898 -3.3881073l2.1706848 2.334137q0.4831848 -0.5358429 0.5438843 -0.998291q0.08081055 -0.71614075 -0.419281 -1.2539062q-0.45755005 -0.4920044 -1.0975952 -0.5148468q-0.6286011 -0.0335083 -1.1976929 0.4329071zm3.1199646 7.0947876l-0.532074 0.66685486q-0.34521484 -0.23356628 -0.55804443 -0.46240234q-0.3617859 -0.38902283 -0.43823242 -0.72361755q-0.0657959 -0.32315063 0.04336548 -0.5957947q0.11984253 -0.2612152 0.7598877 -0.85783386l2.400177 -2.2373505l-0.48947144 -0.5263214l0.5486145 -0.5113983l0.48947144 0.5263214l1.028656 -0.9588623l1.0834045 0.29310608l-1.4629822 1.3637085l0.67037964 0.72084045l-0.5486145 0.5113983l-0.67037964 -0.72084045l-2.4458923 2.2799683q-0.29718018 0.2769928 -0.35668945 0.39656067q-0.03744507 0.12034607 -0.020111084 0.2537079q0.03942871 0.13414001 0.17773438 0.28289795q0.11706543 0.12585449 0.32080078 0.299057zm2.9979248 3.4530792q-0.70388794 -0.09150696 -1.1815491 -0.3297882q-0.4670105 -0.22685242 -0.8607178 -0.6502075q-0.63842773 -0.6865082 -0.6581421 -1.3730621q0.0024108887 -0.6857605 0.51672363 -1.1651917q0.30859375 -0.2876587 0.68774414 -0.3847351q0.3897705 -0.08564758 0.74050903 -0.0067596436q0.37283325 0.0796814 0.7188721 0.29115295q0.24865723 0.15267944 0.68807983 0.53341675q0.90093994 0.7622528 1.421936 1.0242157q0.14779663 -0.1164093 0.19351196 -0.1590271q0.43432617 -0.4048462 0.4142456 -0.7706146q-0.0047302246 -0.4868927 -0.5048523 -1.0246582q-0.46817017 -0.5034485 -0.8630676 -0.5839081q-0.38427734 -0.06903076 -0.9364319 0.23205566l-0.5576477 -0.78318787q0.5308838 -0.3239746 1.0073547 -0.36227417q0.47650146 -0.03829956 0.9967041 0.24575806q0.5316467 0.27340698 1.0317688 0.8111725q0.5001221 0.5377655 0.6829529 0.9867859q0.18286133 0.44900513 0.12689209 0.7788696q-0.03390503 0.3306427 -0.25579834 0.6656494q-0.15093994 0.20478821 -0.63098145 0.6522522l-0.93722534 0.87364197q-0.98291016 0.9162445 -1.2032471 1.2070465q-0.2203064 0.29081726 -0.3326416 0.65185547l-0.6916504 -0.74372864q0.11074829 -0.31684875 0.3866577 -0.61672974zm1.5133972 -1.5389099q-0.5111389 -0.22842407 -1.3375854 -0.9105835q-0.4729004 -0.37086487 -0.72317505 -0.47935486q-0.22817993 -0.10771179 -0.48394775 -0.061523438q-0.24435425 0.03552246 -0.42721558 0.2059784q-0.2857666 0.26635742 -0.27789307 0.6648712q0.019317627 0.38786316 0.40237427 0.7997589q0.38305664 0.411911 0.85324097 0.5503845q0.48080444 0.14990234 0.938385 0.02243042q0.3357849 -0.12075806 0.8043823 -0.5575714l0.25143433 -0.23439026zm1.9869385 6.335312l0.52575684 -0.4900818q-0.9789734 0.18629456 -1.7025146 -0.59176636q-0.45755005 -0.49198914 -0.57696533 -1.1710358q-0.119384766 -0.67904663 0.15927124 -1.3660126q0.278656 -0.6869507 0.9187012 -1.2835846q0.6286011 -0.585968 1.3230286 -0.8487854q0.7165222 -0.26203918 1.3624573 -0.095184326q0.65737915 0.15621948 1.1362 0.6710968q0.35113525 0.377594 0.46777344 0.824234q0.1166687 0.44664 0.035461426 0.8641052l2.0687256 -1.9283752l0.659729 0.70939636l-5.760437 5.3696136l-0.6171875 -0.66362zm0.0052490234 -4.1701965q-0.80007935 0.7457733 -0.89151 1.4504852q-0.06939697 0.7054901 0.3562317 1.1631622q0.4362793 0.4691162 1.1113892 0.437912q0.67510986 -0.031204224 1.452301 -0.7556915q0.85720825 -0.79904175 0.94866943 -1.5037537q0.1020813 -0.69325256 -0.35546875 -1.185257q-0.43624878 -0.4691162 -1.1228027 -0.42726135q-0.6644592 0.042663574 -1.4988098 0.82040405zm4.566345 8.053497q-0.70388794 -0.0914917 -1.1815491 -0.3297882q-0.4670105 -0.22685242 -0.8607178 -0.65019226q-0.63842773 -0.68652344 -0.6581116 -1.3730621q0.002380371 -0.6857605 0.5166931 -1.1651917q0.30859375 -0.2876587 0.68774414 -0.38475037q0.3897705 -0.08564758 0.74053955 -0.0067443848q0.37280273 0.0796814 0.71884155 0.29115295q0.24868774 0.15267944 0.68811035 0.53341675q0.9009094 0.7622528 1.4219055 1.0242157q0.14779663 -0.1164093 0.19351196 -0.1590271q0.43432617 -0.4048462 0.4142456 -0.7706146q-0.0047302246 -0.4868927 -0.5048523 -1.0246735q-0.46817017 -0.5034332 -0.8630676 -0.5839081q-0.38424683 -0.06903076 -0.9364319 0.23207092l-0.5576172 -0.78318787q0.5308533 -0.3239746 1.0073547 -0.36227417q0.47647095 -0.03829956 0.9966736 0.24575806q0.5316467 0.27340698 1.0317688 0.8111725q0.5001221 0.5377655 0.6829834 0.98677063q0.18283081 0.44900513 0.12686157 0.7788696q-0.03390503 0.33065796 -0.25579834 0.6656494q-0.15093994 0.20480347 -0.63098145 0.65226746l-0.9371948 0.8736267q-0.9829407 0.9162445 -1.2032776 1.2070618q-0.2203064 0.290802 -0.3326416 0.6518402l-0.6916504 -0.7437134q0.11077881 -0.31684875 0.3866577 -0.616745zm1.5134277 -1.5388947q-0.51116943 -0.22843933 -1.3375854 -0.91059875q-0.4729309 -0.37086487 -0.72320557 -0.47935486q-0.22817993 -0.10769653 -0.48394775 -0.061523438q-0.24435425 0.03552246 -0.42721558 0.20599365q-0.28573608 0.26634216 -0.27786255 0.66485596q0.01928711 0.38786316 0.40234375 0.79977417q0.38308716 0.411911 0.85324097 0.55036926q0.48080444 0.1499176 0.938385 0.022445679q0.3357849 -0.12075806 0.8043823 -0.55758667l0.25146484 -0.234375zm1.5408325 4.593689l-0.53204346 0.6668701q-0.34524536 -0.23358154 -0.55807495 -0.46240234q-0.3617859 -0.3890381 -0.43823242 -0.7236328q-0.0657959 -0.32312012 0.04336548 -0.5957794q0.11984253 -0.2612152 0.7598877 -0.8578491l2.400177 -2.2373352l-0.48947144 -0.52633667l0.5486145 -0.51138306l0.48947144 0.5263214l1.028656 -0.9588623l1.0834045 0.29310608l-1.4629822 1.3637085l0.67037964 0.72084045l-0.5486145 0.5113983l-0.67037964 -0.72084045l-2.4458923 2.279953q-0.29718018 0.27702332 -0.35668945 0.39656067q-0.03744507 0.12036133 -0.020111084 0.25372314q0.03942871 0.13415527 0.17773438 0.28289795q0.11706543 0.12585449 0.32080078 0.29904175zm2.9979248 3.4530945q-0.70388794 -0.0914917 -1.1815491 -0.32980347q-0.4670105 -0.22683716 -0.8607178 -0.650177q-0.63842773 -0.68652344 -0.6581116 -1.3730774q0.002380371 -0.6857605 0.5166931 -1.1651917q0.30859375 -0.2876587 0.68774414 -0.3847351q0.3897705 -0.08566284 0.74050903 -0.0067443848q0.37283325 0.0796814 0.7188721 0.2911377q0.24865723 0.15267944 0.68807983 0.53341675q0.90093994 0.76223755 1.421936 1.024231q0.14779663 -0.11642456 0.19351196 -0.1590271q0.43432617 -0.4048767 0.4142456 -0.7706299q-0.0047302246 -0.48690796 -0.5048523 -1.0246582q-0.46817017 -0.5034485 -0.8630676 -0.58392334q-0.38427734 -0.0690155 -0.9364319 0.23205566l-0.5576477 -0.7831726q0.5308838 -0.3239746 1.0073547 -0.36227417q0.47650146 -0.03829956 0.9967041 0.24575806q0.5316467 0.27340698 1.0317688 0.81118774q0.5001221 0.53775024 0.6829529 0.9867554q0.18286133 0.44900513 0.12689209 0.7788696q-0.03390503 0.33065796 -0.25579834 0.6656494q-0.15093994 0.20480347 -0.63098145 0.6522827l-0.93722534 0.8736267q-0.98291016 0.91622925 -1.2032471 1.2070312q-0.2203064 0.29083252 -0.3326416 0.65185547l-0.6916504 -0.7437134q0.11074829 -0.3168335 0.3866577 -0.61672974zm1.5134277 -1.5389099q-0.51116943 -0.22842407 -1.337616 -0.9105835q-0.4729004 -0.37088013 -0.72317505 -0.47937012q-0.22817993 -0.10769653 -0.48394775 -0.061523438q-0.24435425 0.03552246 -0.42721558 0.20599365q-0.28573608 0.26635742 -0.27789307 0.66485596q0.019317627 0.38787842 0.40237427 0.79977417q0.38305664 0.41189575 0.85324097 0.5503845q0.48080444 0.14990234 0.938385 0.02243042q0.3357849 -0.12075806 0.8043823 -0.55758667l0.25146484 -0.234375zm0.88894653 5.3611755l7.519745 -3.8908997l0.53201294 0.57211304l-7.5197144 3.8908691l-0.53204346 -0.5720825z" fill-rule="nonzero"></path><path fill="#351c75" d="m421.94885 216.1809l4.171753 -3.8887177l0.5852356 0.629303l-0.6286316 0.585968q0.6798401 -0.16378784 1.0085449 -0.0856781q0.32867432 0.07810974 0.552124 0.3183899q0.34051514 0.36613464 0.4524231 0.9453583l-0.87493896 0.3670044q-0.09616089 -0.40167236 -0.34088135 -0.6648407q-0.21282959 -0.22883606 -0.52023315 -0.28405762q-0.30740356 -0.055221558 -0.6333313 0.09907532q-0.48358154 0.23716736 -0.9293213 0.65267944l-2.1830444 2.034912l-0.6596985 -0.70939636zm6.4254456 4.201874l0.5895691 0.8175049q-0.7996521 0.42500305 -1.5902405 0.28616333q-0.7905884 -0.13883972 -1.4609375 -0.8596802q-0.8512573 -0.91534424 -0.7913208 -1.975174q0.059936523 -1.0598145 1.0771484 -2.0080109q1.0629578 -0.99082947 2.1345215 -0.98576355q1.0936584 0.005859375 1.8917236 0.8639984q0.78741455 0.84669495 0.7054138 1.9057312q-0.07058716 1.0483704 -1.1106567 2.0178833q-0.057159424 0.053268433 -0.18289185 0.17047119l-2.8942566 -3.1121979q-0.6538391 0.6735687 -0.7003784 1.3577576q-0.035858154 0.6956177 0.45361328 1.2219543q0.36175537 0.38902283 0.8114319 0.48249817q0.4611206 0.082839966 1.0672607 -0.18313599zm-1.0170898 -3.3881073l2.1706848 2.3341522q0.4831848 -0.53585815 0.5439148 -0.998291q0.08081055 -0.716156 -0.41931152 -1.2539215q-0.45755005 -0.4920044 -1.0975952 -0.5148468q-0.6286011 -0.0335083 -1.1976929 0.4329071zm2.034668 4.6887817l0.7633667 0.59140015q-0.36968994 0.45141602 -0.35354614 0.9276581q0.038208008 0.47703552 0.48513794 0.9575958q0.45755005 0.49198914 0.86465454 0.5397186q0.41775513 0.059158325 0.6920471 -0.1965332q0.24002075 -0.22373962 0.18566895 -0.5575409q-0.046905518 -0.23397827 -0.4303589 -0.9445801q-0.52926636 -0.9591522 -0.67941284 -1.395935q-0.12808228 -0.43598938 -0.025604248 -0.8305664q0.11312866 -0.38313293 0.4217224 -0.6707916q0.28573608 -0.26635742 0.6313782 -0.35357666q0.35705566 -0.09786987 0.7298889 -0.018188477q0.27468872 0.042999268 0.62072754 0.25445557q0.3460083 0.21147156 0.6439514 0.5318451q0.45755005 0.4920044 0.6608887 0.98599243q0.21398926 0.5054321 0.122161865 0.911438q-0.06976318 0.40681458 -0.40594482 0.84835815l-0.7298889 -0.60128784q0.2668152 -0.35551453 0.22546387 -0.74417114q-0.019317627 -0.38786316 -0.40237427 -0.7997589q-0.45755005 -0.4920044 -0.80908203 -0.5487976q-0.3515625 -0.05680847 -0.5687256 0.14562988q-0.137146 0.12783813 -0.16552734 0.30381775q-0.029174805 0.19807434 0.061065674 0.45573425q0.06149292 0.13493347 0.40237427 0.7997589q0.49734497 0.9248352 0.6482849 1.3395233q0.15093994 0.4146881 0.05908203 0.82069397q-0.08041382 0.39537048 -0.44613647 0.7362976q-0.35430908 0.3302765 -0.85446167 0.41197205q-0.5001221 0.081710815 -1.0333557 -0.14750671q-0.5332031 -0.22920227 -0.9907532 -0.72120667q-0.75549316 -0.8123779 -0.82839966 -1.5561371q-0.050811768 -0.7429657 0.5057068 -1.4753113zm4.484894 3.12471q1.1543579 -1.0760498 2.3209229 -0.9459076q0.9573059 0.11160278 1.6808472 0.88964844q0.7980652 0.85813904 0.73812866 1.9179535q-0.04928589 1.0712585 -1.0436401 1.998169q-0.8114929 0.75642395 -1.5035706 0.9529724q-0.68063354 0.18586731 -1.3683472 -0.048858643q-0.677063 -0.22328186 -1.177185 -0.76104736q-0.81933594 -0.8810272 -0.77005005 -1.9522858q0.059936523 -1.0598145 1.1228943 -2.050644zm0.68099976 0.73228455q-0.8114929 0.7564392 -0.89230347 1.47258q-0.058746338 0.7169342 0.44137573 1.2546997q0.48947144 0.52633667 1.2087097 0.4967041q0.7298889 -0.018203735 1.5528259 -0.7852936q0.7657776 -0.71380615 0.8359375 -1.4414062q0.07015991 -0.72758484 -0.4086609 -1.2424622q-0.5001221 -0.53778076 -1.230011 -0.519577q-0.7078247 0.018981934 -1.5078735 0.76475525zm4.061859 8.566483l0.6171875 -0.5753174q-1.1661682 0.16854858 -1.9429321 -0.6667175q-0.34051514 -0.36613464 -0.5119324 -0.82580566q-0.14935303 -0.4588623 -0.09259033 -0.81082153q0.078826904 -0.35118103 0.33422852 -0.6960449q0.16314697 -0.23753357 0.6203308 -0.6636963l2.5830688 -2.4078064l0.659729 0.70939636l-2.3087769 2.1520996q-0.5600281 0.52204895 -0.7117615 0.7489319q-0.21047974 0.32435608 -0.17895508 0.6794586q0.042175293 0.3665619 0.3400879 0.6869354q0.29794312 0.32037354 0.7133484 0.44581604q0.43743896 0.12623596 0.81817627 -0.015045166q0.39212036 -0.15193176 0.9178772 -0.64201355l2.2402039 -2.0881805l0.6596985 0.70939636l-4.171753 3.8887177l-0.5852356 -0.629303zm1.4822083 1.5937958l4.171753 -3.8887177l0.5852356 0.629303l-0.6286011 0.585968q0.6798401 -0.16377258 1.0085144 -0.0856781q0.32867432 0.07810974 0.55215454 0.3183899q0.34048462 0.36613464 0.45239258 0.9453583l-0.87493896 0.3670044q-0.09613037 -0.40167236 -0.34088135 -0.66482544q-0.21279907 -0.22883606 -0.52020264 -0.28405762q-0.30740356 -0.055221558 -0.6333618 0.09906006q-0.48355103 0.23716736 -0.9293213 0.65267944l-2.183014 2.034912l-0.659729 -0.70939636zm6.4913025 3.905548l0.5469971 0.7717438q-0.8158264 0.5682373 -1.6741943 0.47122192q-0.8363037 -0.09623718 -1.4641113 -0.77130127q-0.79803467 -0.85813904 -0.727478 -1.9065094q0.08123779 -1.0369415 1.1213074 -2.0064545q0.6743469 -0.6285858 1.3793945 -0.8799591q0.72714233 -0.2505951 1.4042358 -0.027313232q0.677063 0.22328186 1.177185 0.76104736q0.6278076 0.67507935 0.68927 1.4294891q0.061462402 0.7544098 -0.45013428 1.4662476l-0.7519226 -0.60206604q0.32632446 -0.4750824 0.29794312 -0.9185791q-0.016937256 -0.45414734 -0.34677124 -0.80885315q-0.48947144 -0.5263214 -1.1972961 -0.5073395q-0.6857605 0.01977539 -1.5086975 0.78686523q-0.84576416 0.7883911 -0.9249878 1.4603424q-0.07925415 0.6719513 0.4102173 1.198288q0.38305664 0.411911 0.8898926 0.45211792q0.51745605 0.05166626 1.1291504 -0.36898804zm3.5626526 4.1980133l0.5895691 0.8175049q-0.7996826 0.42500305 -1.590271 0.28616333q-0.79055786 -0.13885498 -1.4609375 -0.8596802q-0.8512573 -0.9153595 -0.7913208 -1.975174q0.059936523 -1.0598145 1.0771484 -2.0080261q1.0629578 -0.9908142 2.134552 -0.9857483q1.0936279 0.005844116 1.8916931 0.8639984q0.78741455 0.84669495 0.7054138 1.905716q-0.07058716 1.0483704 -1.1106567 2.0178986q-0.057159424 0.053268433 -0.18286133 0.17045593l-2.894287 -3.1121826q-0.6538391 0.6735687 -0.7003479 1.3577423q-0.035888672 0.69563293 0.45358276 1.2219543q0.3617859 0.38902283 0.8114624 0.48251343q0.4610901 0.082839966 1.0672607 -0.18313599zm-1.0171204 -3.3881073l2.1706848 2.334137q0.48321533 -0.5358429 0.5439148 -0.998291q0.08081055 -0.71614075 -0.41931152 -1.2539062q-0.45755005 -0.4920044 -1.0975952 -0.5148468q-0.6286011 -0.0335083 -1.1976929 0.4329071zm2.034668 4.6887665l0.7633667 0.5914154q-0.36968994 0.45141602 -0.35354614 0.9276581q0.038238525 0.47702026 0.48513794 0.95758057q0.45755005 0.4920044 0.86465454 0.5397186q0.41775513 0.059173584 0.69207764 -0.1965332q0.24002075 -0.22372437 0.18563843 -0.55752563q-0.046905518 -0.23397827 -0.4303589 -0.9445801q-0.52926636 -0.9591675 -0.67941284 -1.3959503q-0.12805176 -0.43598938 -0.02557373 -0.8305664q0.113098145 -0.38313293 0.4216919 -0.6707916q0.28573608 -0.26634216 0.6313782 -0.3535614q0.35708618 -0.09786987 0.7298889 -0.018188477q0.27468872 0.04298401 0.62072754 0.25445557q0.3460083 0.21147156 0.6439514 0.5318451q0.45755005 0.49198914 0.6609192 0.9859772q0.21398926 0.5054321 0.12213135 0.91145325q-0.06976318 0.40679932 -0.40594482 0.8483429l-0.7298889 -0.6012726q0.2668457 -0.3555298 0.22546387 -0.74417114q-0.019317627 -0.38786316 -0.40237427 -0.79977417q-0.45755005 -0.49198914 -0.80908203 -0.5487976q-0.3515625 -0.056793213 -0.5687256 0.14562988q-0.137146 0.1278534 -0.16552734 0.303833q-0.029174805 0.19807434 0.06109619 0.455719q0.061462402 0.13493347 0.40234375 0.79977417q0.49734497 0.9248352 0.6482849 1.3395233q0.15093994 0.41467285 0.05908203 0.82069397q-0.0803833 0.39535522 -0.44613647 0.7362976q-0.35430908 0.33026123 -0.85443115 0.41197205q-0.5001526 0.081710815 -1.0333557 -0.14750671q-0.53323364 -0.22921753 -0.9907837 -0.7212219q-0.75549316 -0.8123627 -0.82836914 -1.5561218q-0.050842285 -0.7429657 0.50567627 -1.4753265z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m460.82727 426.01096l126.17322 0.03149414l0 29.196838l-126.17322 -0.03149414z" fill-rule="evenodd"></path><path fill="#38761d" d="m474.28864 445.2543l0 -5.703125l0.859375 2.1362305E-4l0 0.859375q0.34375 -0.60928345 0.625 -0.7967224q0.28125 -0.18743896 0.609375 -0.18734741q0.5 1.2207031E-4 1.0 0.31274414l-0.328125 0.89056396q-0.359375 -0.20321655 -0.71875 -0.2033081q-0.3125 -9.1552734E-5 -0.5625 0.18734741q-0.25 0.18743896 -0.359375 0.53115845q-0.15625 0.5155945 -0.15625 1.1249695l0 2.984375l-0.96875 -2.4414062E-4zm7.4548645 -1.8418884l1.0 0.12524414q-0.234375 0.87493896 -0.875 1.3591614q-0.640625 0.4842224 -1.625 0.48397827q-1.25 -3.0517578E-4 -1.984375 -0.7661133q-0.734375 -0.7658081 -0.734375 -2.156433q0 -1.453125 0.734375 -2.234192q0.75 -0.7966919 1.921875 -0.79641724q1.15625 3.0517578E-4 1.875 0.7817383q0.71875 0.7657776 0.71875 2.1876526q0 0.078125 0 0.25l-4.25 -0.0010375977q0.046875 0.9375 0.515625 1.4376221q0.484375 0.5001221 1.203125 0.5003052q0.53125 1.2207031E-4 0.90625 -0.26541138q0.375 -0.28115845 0.59375 -0.9060974zm-3.171875 -1.5632935l3.1875 7.9345703E-4q-0.0625 -0.71875 -0.359375 -1.0782166q-0.46875 -0.5469971 -1.203125 -0.5471802q-0.671875 -1.5258789E-4 -1.125 0.45285034q-0.453125 0.43737793 -0.5 1.1717529zm8.926483 2.7053528q-0.546875 0.45300293 -1.046875 0.64038086q-0.484375 0.18737793 -1.0625 0.18722534q-0.9375 -2.4414062E-4 -1.453125 -0.4534912q-0.5 -0.46887207 -0.5 -1.1719971q0 -0.421875 0.1875 -0.7655945q0.203125 -0.34368896 0.5 -0.5467529q0.3125 -0.21865845 0.703125 -0.3279419q0.28125 -0.07803345 0.859375 -0.14041138q1.171875 -0.14031982 1.71875 -0.34332275q0.015625 -0.1875 0.015625 -0.25q0 -0.59375 -0.28125 -0.82818604q-0.359375 -0.32821655 -1.09375 -0.32839966q-0.6875 -1.5258789E-4 -1.015625 0.23413086q-0.3125 0.23428345 -0.46875 0.8436279l-0.953125 -0.12524414q0.125 -0.6093445 0.421875 -0.9842529q0.296875 -0.37493896 0.859375 -0.5622864q0.5625 -0.20300293 1.296875 -0.20281982q0.734375 1.8310547E-4 1.1875 0.17218018q0.453125 0.17199707 0.65625 0.4376526q0.21875 0.25006104 0.3125 0.64071655q0.046875 0.25 0.046875 0.90625l0 1.28125q0 1.34375 0.0625 1.7031555q0.0625 0.359375 0.25 0.68756104l-1.015625 -2.4414062E-4q-0.15625 -0.29693604 -0.1875 -0.70318604zm-0.09375 -2.1562805q-0.515625 0.21862793 -1.578125 0.35897827q-0.59375 0.09362793 -0.84375 0.2029419q-0.234375 0.093688965 -0.375 0.31237793q-0.140625 0.20309448 -0.140625 0.45309448q0 0.390625 0.296875 0.65634155q0.296875 0.25006104 0.859375 0.25021362q0.5625 1.2207031E-4 0.984375 -0.24975586q0.4375 -0.24990845 0.65625 -0.6717224q0.140625 -0.32809448 0.140625 -0.9687195l0 -0.34375zm5.9890137 2.8608704l0 -0.71875q-0.53125 0.8436279 -1.59375 0.8433533q-0.671875 -1.5258789E-4 -1.25 -0.37530518q-0.578125 -0.3751526 -0.890625 -1.0470886q-0.3125 -0.67196655 -0.3125 -1.5469666q0 -0.859375 0.28125 -1.5467834q0.296875 -0.70306396 0.859375 -1.0622864q0.5625 -0.37487793 1.265625 -0.37469482q0.515625 1.2207031E-4 0.921875 0.21896362q0.40625 0.21887207 0.65625 0.5626831l0 -2.828125l0.96875 2.4414062E-4l0 7.875l-0.90625 -2.4414062E-4zm-3.046875 -2.844513q0 1.09375 0.453125 1.6407471q0.46875 0.5313721 1.09375 0.53152466q0.640625 1.5258789E-4 1.078125 -0.51535034q0.4375 -0.51553345 0.4375 -1.5780334q0 -1.171875 -0.453125 -1.7188416q-0.4375 -0.5469971 -1.109375 -0.54714966q-0.640625 -1.8310547E-4 -1.078125 0.53097534q-0.421875 0.5155029 -0.421875 1.6561279zm8.230896 2.8458252l0 -5.703125l0.875 2.1362305E-4l0 0.796875q0.265625 -0.42181396 0.703125 -0.6716919q0.453125 -0.24990845 1.015625 -0.24975586q0.640625 1.5258789E-4 1.046875 0.26589966q0.40625 0.25009155 0.578125 0.7344971q0.671875 -0.9998169 1.75 -0.99954224q0.859375 2.1362305E-4 1.3125 0.46905518q0.453125 0.46887207 0.453125 1.4376221l0 3.921875l-0.953125 -2.4414062E-4l0 -3.59375q0 -0.578125 -0.09375 -0.8281555q-0.09375 -0.265625 -0.34375 -0.42193604q-0.25 -0.15631104 -0.578125 -0.15640259q-0.609375 -1.5258789E-4 -1.015625 0.40600586q-0.390625 0.40612793 -0.390625 1.2811279l0 3.3125l-0.96875 -2.4414062E-4l0 -3.703125q0 -0.640625 -0.234375 -0.9687805q-0.234375 -0.32818604 -0.765625 -0.32833862q-0.40625 -9.1552734E-5 -0.765625 0.2185669q-0.34375 0.21865845 -0.5 0.6405029q-0.15625 0.40621948 -0.15625 1.1874695l0 2.953125l-0.96875 -2.4414062E-4zm12.7829895 -1.8405762l0.9999695 0.12524414q-0.234375 0.8749695 -0.8749695 1.3591614q-0.640625 0.4842224 -1.625 0.48397827q-1.25 -3.0517578E-4 -1.984375 -0.7661133q-0.734375 -0.7658081 -0.734375 -2.156433q0 -1.453125 0.734375 -2.234192q0.75 -0.7966919 1.921875 -0.7963867q1.15625 2.746582E-4 1.875 0.78170776q0.7187195 0.7658081 0.7187195 2.187683q0 0.078125 0 0.25l-4.2499695 -0.0010681152q0.046875 0.9375 0.515625 1.4376221q0.484375 0.5001221 1.203125 0.5003052q0.53125 1.2207031E-4 0.90625 -0.26541138q0.375 -0.28112793 0.59375 -0.9060974zm-3.171875 -1.5632935l3.1875 7.9345703E-4q-0.0625 -0.71875 -0.359375 -1.0782166q-0.46875 -0.54696655 -1.203125 -0.54714966q-0.671875 -1.8310547E-4 -1.125 0.45281982q-0.453125 0.43740845 -0.5 1.1717529zm7.317108 2.548706l0.125 0.8437805q-0.40625 0.09365845 -0.71875 0.093566895q-0.53125 -1.2207031E-4 -0.828125 -0.1720581q-0.28125 -0.17196655 -0.40625 -0.43762207q-0.109375 -0.26565552 -0.109375 -1.1406555l0 -3.28125l-0.71875 -1.8310547E-4l0 -0.75l0.71875 1.8310547E-4l0 -1.40625l0.953125 -0.59350586l0 2.0l0.984375 2.4414062E-4l0 0.75l-0.984375 -2.4414062E-4l0 3.34375q0 0.40625 0.046875 0.5312805q0.0625 0.109375 0.171875 0.18753052q0.125 0.06253052 0.328125 0.06259155q0.171875 3.0517578E-5 0.4375 -0.031158447zm0.8356323 0.8595886l0 -5.703125l0.859375 2.1362305E-4l0 0.859375q0.34375 -0.60928345 0.625 -0.7967224q0.28125 -0.18743896 0.609375 -0.18734741q0.5 1.2207031E-4 1.0 0.31274414l-0.328125 0.89056396q-0.359375 -0.20321655 -0.71875 -0.2033081q-0.3125 -9.1552734E-5 -0.5625 0.18734741q-0.25 0.18743896 -0.359375 0.53115845q-0.15625 0.5155945 -0.15625 1.1249695l0 2.984375l-0.96875 -2.4414062E-4zm3.56427 -6.76474l0 -1.109375l0.96875 2.4414062E-4l0 1.109375l-0.96875 -2.4414062E-4zm0 6.765625l0 -5.703125l0.96875 2.4414062E-4l0 5.703125l-0.96875 -2.4414062E-4zm6.0862427 -2.0922241l0.9375 0.12524414q-0.140625 0.98431396 -0.796875 1.5466614q-0.640625 0.5467224 -1.5625 0.5464783q-1.171875 -2.746582E-4 -1.890625 -0.76608276q-0.703125 -0.7658081 -0.703125 -2.187683q0 -0.921875 0.296875 -1.6092834q0.3125 -0.70306396 0.9375 -1.0466614q0.625 -0.3435974 1.359375 -0.3434143q0.921875 2.4414062E-4 1.515625 0.46914673q0.59375 0.4689026 0.765625 1.3283081l-0.953125 0.14038086q-0.125 -0.5625305 -0.46875 -0.8438721q-0.34375 -0.29693604 -0.828125 -0.2970581q-0.71875 -1.8310547E-4 -1.1875 0.5309448q-0.453125 0.5155029 -0.453125 1.6405029q0 1.15625 0.4375 1.6719971q0.4375 0.51571655 1.15625 0.51589966q0.5625 1.5258789E-4 0.9375 -0.34350586q0.390625 -0.34365845 0.5 -1.0780029zm4.5543823 2.0948792l0 -5.703125l0.875 2.1362305E-4l0 0.796875q0.265625 -0.42178345 0.703125 -0.6716919q0.453125 -0.24987793 1.015625 -0.24975586q0.640625 1.8310547E-4 1.046875 0.26589966q0.40625 0.25009155 0.578125 0.7345276q0.671875 -0.9998474 1.75 -0.99957275q0.859375 2.1362305E-4 1.3125 0.4690857q0.453125 0.46884155 0.453125 1.4375916l0 3.921875l-0.953125 -2.1362305E-4l0 -3.59375q0 -0.578125 -0.09375 -0.8281555q-0.09375 -0.26565552 -0.34375 -0.42196655q-0.25 -0.15631104 -0.578125 -0.15640259q-0.609375 -1.5258789E-4 -1.015625 0.40600586q-0.390625 0.40615845 -0.390625 1.2811584l0 3.3125l-0.96875 -2.4414062E-4l0 -3.703125q0 -0.640625 -0.234375 -0.96881104q-0.234375 -0.32818604 -0.765625 -0.3283081q-0.40625 -1.2207031E-4 -0.765625 0.2185669q-0.34375 0.21865845 -0.5 0.6404724q-0.15625 0.40621948 -0.15625 1.1874695l0 2.953125l-0.96875 -2.4414062E-4zm12.782959 -1.8405457l1.0 0.12524414q-0.234375 0.87493896 -0.875 1.3591614q-0.640625 0.4842224 -1.625 0.48397827q-1.25 -3.3569336E-4 -1.984375 -0.7661438q-0.734375 -0.7658081 -0.734375 -2.156433q0 -1.453125 0.734375 -2.234192q0.75 -0.7966919 1.921875 -0.7963867q1.15625 2.746582E-4 1.875 0.78170776q0.71875 0.7658081 0.71875 2.187683q0 0.078125 0 0.25l-4.25 -0.0010681152q0.046875 0.9375305 0.515625 1.4376526q0.484375 0.5001221 1.203125 0.5003052q0.53125 1.2207031E-4 0.90625 -0.26541138q0.375 -0.28115845 0.59375 -0.9060974zm-3.171875 -1.5632935l3.1875 7.9345703E-4q-0.0625 -0.7187805 -0.359375 -1.0782166q-0.46875 -0.5469971 -1.203125 -0.5471802q-0.671875 -1.5258789E-4 -1.125 0.45285034q-0.453125 0.43737793 -0.5 1.1717529zm7.3171387 2.548706l0.125 0.8437805q-0.40625 0.09362793 -0.71875 0.093566895q-0.53125 -1.5258789E-4 -0.828125 -0.17208862q-0.28125 -0.17193604 -0.40625 -0.43759155q-0.109375 -0.26565552 -0.109375 -1.1406555l0 -3.28125l-0.71875 -1.8310547E-4l0 -0.75l0.71875 1.8310547E-4l0 -1.40625l0.953125 -0.59350586l0 2.0l0.984375 2.4414062E-4l0 0.75l-0.984375 -2.4414062E-4l0 3.34375q0 0.40625 0.046875 0.53125q0.0625 0.10940552 0.171875 0.18753052q0.125 0.06253052 0.328125 0.06259155q0.171875 6.1035156E-5 0.4375 -0.03112793zm4.5700073 0.15737915q-0.546875 0.45300293 -1.046875 0.64038086q-0.484375 0.18737793 -1.0625 0.18722534q-0.9375 -2.4414062E-4 -1.453125 -0.4534912q-0.5 -0.46887207 -0.5 -1.1719971q0 -0.421875 0.1875 -0.7655945q0.203125 -0.34368896 0.5 -0.5467529q0.3125 -0.21865845 0.703125 -0.3279419q0.28125 -0.07803345 0.859375 -0.14041138q1.171875 -0.14031982 1.71875 -0.34332275q0.015625 -0.1875 0.015625 -0.25q0 -0.59375 -0.28125 -0.82818604q-0.359375 -0.32821655 -1.09375 -0.32839966q-0.6875 -1.5258789E-4 -1.015625 0.23413086q-0.3125 0.23428345 -0.46875 0.8436279l-0.953125 -0.12524414q0.125 -0.6093445 0.421875 -0.9842529q0.296875 -0.37493896 0.859375 -0.5622864q0.5625 -0.20300293 1.296875 -0.20281982q0.734375 1.8310547E-4 1.1875 0.17218018q0.453125 0.17199707 0.65625 0.4376526q0.21875 0.25006104 0.3125 0.64071655q0.046875 0.25 0.046875 0.90625l0 1.28125q0 1.34375 0.0625 1.7031555q0.0625 0.359375 0.25 0.68756104l-1.015625 -2.4414062E-4q-0.15625 -0.29693604 -0.1875 -0.70318604zm-0.09375 -2.1562805q-0.515625 0.21862793 -1.578125 0.35897827q-0.59375 0.09362793 -0.84375 0.2029419q-0.234375 0.093688965 -0.375 0.31237793q-0.140625 0.20309448 -0.140625 0.45309448q0 0.390625 0.296875 0.65634155q0.296875 0.25006104 0.859375 0.25021362q0.5625 1.2207031E-4 0.984375 -0.24975586q0.4375 -0.24990845 0.65625 -0.6717224q0.140625 -0.32809448 0.140625 -0.9687195l0 -0.34375zm5.9890137 2.8608704l0 -0.71875q-0.53125 0.8436279 -1.59375 0.8433533q-0.671875 -1.5258789E-4 -1.25 -0.37530518q-0.578125 -0.3751526 -0.890625 -1.0470886q-0.3125 -0.67196655 -0.3125 -1.5469666q0 -0.859375 0.28125 -1.546814q0.296875 -0.70303345 0.859375 -1.0622559q0.5625 -0.37487793 1.265625 -0.37469482q0.515625 1.2207031E-4 0.921875 0.21896362q0.40625 0.21887207 0.65625 0.5626831l0 -2.828125l0.96875 2.4414062E-4l0 7.875l-0.90625 -2.4414062E-4zm-3.046875 -2.844513q0 1.09375 0.453125 1.6407471q0.46875 0.5313721 1.09375 0.53152466q0.640625 1.5258789E-4 1.078125 -0.51535034q0.4375 -0.51553345 0.4375 -1.5780334q0 -1.171875 -0.453125 -1.7188416q-0.4375 -0.5469971 -1.109375 -0.54714966q-0.640625 -1.8310547E-4 -1.078125 0.53097534q-0.421875 0.5155029 -0.421875 1.6561279zm9.004639 2.1428833q-0.546875 0.45300293 -1.046875 0.64035034q-0.484375 0.18740845 -1.0625 0.18725586q-0.9375 -2.4414062E-4 -1.453125 -0.4534912q-0.5 -0.46887207 -0.5 -1.1719971q0 -0.421875 0.1875 -0.7655945q0.203125 -0.34368896 0.5 -0.5467529q0.3125 -0.21865845 0.703125 -0.3279419q0.28125 -0.078063965 0.859375 -0.14041138q1.171875 -0.14031982 1.71875 -0.34332275q0.015625 -0.1875 0.015625 -0.25q0 -0.59375 -0.28125 -0.82818604q-0.359375 -0.32821655 -1.09375 -0.32839966q-0.6875 -1.5258789E-4 -1.015625 0.23413086q-0.3125 0.23428345 -0.46875 0.8436279l-0.953125 -0.12524414q0.125 -0.6093445 0.421875 -0.9842529q0.296875 -0.37493896 0.859375 -0.5622864q0.5625 -0.20300293 1.296875 -0.20281982q0.734375 1.8310547E-4 1.1875 0.17218018q0.453125 0.17199707 0.65625 0.4376526q0.21875 0.25006104 0.3125 0.64071655q0.046875 0.25 0.046875 0.90625l0 1.28125q0 1.34375 0.0625 1.7031555q0.0625 0.359375 0.25 0.68756104l-1.015625 -2.4414062E-4q-0.15625 -0.29693604 -0.1875 -0.70318604zm-0.09375 -2.1562805q-0.515625 0.21862793 -1.578125 0.35897827q-0.59375 0.09359741 -0.84375 0.20291138q-0.234375 0.09371948 -0.375 0.31240845q-0.140625 0.20309448 -0.140625 0.45309448q0 0.390625 0.296875 0.65634155q0.296875 0.25006104 0.859375 0.25021362q0.5625 1.2207031E-4 0.984375 -0.24975586q0.4375 -0.24990845 0.65625 -0.6717224q0.140625 -0.32809448 0.140625 -0.9687195l0 -0.34375zm4.4108887 2.0010986l0.125 0.8437805q-0.40625 0.09365845 -0.71875 0.093566895q-0.53125 -1.2207031E-4 -0.828125 -0.1720581q-0.28125 -0.17196655 -0.40625 -0.43762207q-0.109375 -0.26565552 -0.109375 -1.1406555l0 -3.28125l-0.71875 -1.5258789E-4l0 -0.75l0.71875 1.5258789E-4l0 -1.40625l0.953125 -0.59350586l0 2.0l0.984375 2.4414062E-4l0 0.75l-0.984375 -2.4414062E-4l0 3.34375q0 0.40625 0.046875 0.5312805q0.0625 0.109375 0.171875 0.18753052q0.125 0.06253052 0.328125 0.06259155q0.171875 3.0517578E-5 0.4375 -0.031158447zm4.5700073 0.15740967q-0.546875 0.4529724 -1.046875 0.64035034q-0.484375 0.18737793 -1.0625 0.18722534q-0.9375 -2.1362305E-4 -1.453125 -0.4534607q-0.5 -0.4689026 -0.5 -1.1720276q0 -0.421875 0.1875 -0.76556396q0.203125 -0.34368896 0.5 -0.5467529q0.3125 -0.21865845 0.703125 -0.3279419q0.28125 -0.078063965 0.859375 -0.14041138q1.171875 -0.14035034 1.71875 -0.34332275q0.015625 -0.1875 0.015625 -0.25q0 -0.59375 -0.28125 -0.82818604q-0.359375 -0.32821655 -1.09375 -0.32839966q-0.6875 -1.8310547E-4 -1.015625 0.23413086q-0.3125 0.23428345 -0.46875 0.8436279l-0.953125 -0.12524414q0.125 -0.6093445 0.421875 -0.98428345q0.296875 -0.37490845 0.859375 -0.5622864q0.5625 -0.20297241 1.296875 -0.2027893q0.734375 1.8310547E-4 1.1875 0.17218018q0.453125 0.17196655 0.65625 0.4376526q0.21875 0.25006104 0.3125 0.64071655q0.046875 0.25 0.046875 0.90625l0 1.28125q0 1.34375 0.0625 1.7031555q0.0625 0.359375 0.25 0.68756104l-1.015625 -2.746582E-4q-0.15625 -0.29690552 -0.1875 -0.7031555zm-0.09375 -2.1562805q-0.515625 0.21862793 -1.578125 0.35897827q-0.59375 0.09359741 -0.84375 0.20291138q-0.234375 0.093688965 -0.375 0.31240845q-0.140625 0.20309448 -0.140625 0.45309448q0 0.390625 0.296875 0.65631104q0.296875 0.25009155 0.859375 0.25021362q0.5625 1.5258789E-4 0.984375 -0.24975586q0.4375 -0.24987793 0.65625 -0.6716919q0.140625 -0.32809448 0.140625 -0.9687195l0 -0.34375z" fill-rule="nonzero"></path><path fill="#000000" fill-opacity="0.0" d="m120.56693 371.23615l121.826775 -211.3071" fill-rule="evenodd"></path><path stroke="#434343" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="8.0,6.0" d="m126.5606 360.84018l109.83944 -190.51518" fill-rule="evenodd"></path><path fill="#434343" stroke="#434343" stroke-width="2.0" stroke-linecap="butt" d="m123.698715 359.1902l-1.6714249 9.51297l7.395195 -6.212982z" fill-rule="evenodd"></path><path fill="#434343" stroke="#434343" stroke-width="2.0" stroke-linecap="butt" d="m239.26192 171.97499l1.6714172 -9.51297l-7.3952026 6.212982z" fill-rule="evenodd"></path><path fill="#000000" fill-opacity="0.0" d="m336.40564 304.8641l-15.181122 65.322815" fill-rule="evenodd"></path><path stroke="#000000" stroke-width="2.0" stroke-linejoin="round" stroke-linecap="butt" stroke-dasharray="2.0,6.0" d="m336.40564 304.8641l-15.181122 65.322815" fill-rule="evenodd"></path></g></svg>
+
diff --git a/doc/source/_static/gnocchi-icon-source.png b/doc/source/_static/gnocchi-icon-source.png
new file mode 100644
index 0000000000000000000000000000000000000000..d6108c4182d54422ec09aa935f2fd0594aa2d68f
Binary files /dev/null and b/doc/source/_static/gnocchi-icon-source.png differ
diff --git a/doc/source/_static/gnocchi-icon.ico b/doc/source/_static/gnocchi-icon.ico
new file mode 100644
index 0000000000000000000000000000000000000000..783bde939396df274f409f34cfbae74b43b0aa68
Binary files /dev/null and b/doc/source/_static/gnocchi-icon.ico differ
diff --git a/doc/source/_static/gnocchi-logo.png b/doc/source/_static/gnocchi-logo.png
new file mode 100644
index 0000000000000000000000000000000000000000..e3fc8903851d419a6f0d64c8d79f7d9d6c2042f6
Binary files /dev/null and b/doc/source/_static/gnocchi-logo.png differ
diff --git a/doc/source/_static/grafana-screenshot.png b/doc/source/_static/grafana-screenshot.png
new file mode 100644
index 0000000000000000000000000000000000000000..c2c07af9c62cc558489a6939fc2903c132a47ca2
Binary files /dev/null and b/doc/source/_static/grafana-screenshot.png differ
diff --git a/doc/source/alternatives.rst b/doc/source/alternatives.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9fe9cad4fa1449833f8d77e70f46ce79fa022b46
--- /dev/null
+++ b/doc/source/alternatives.rst
@@ -0,0 +1,56 @@
+Comparisons To Alternatives
+---------------------------
+
+The following table summarises feature comparison between different existing
+open source time series database. More details are written below, if needed.
+
+.. include:: comparison-table.rst
+
+Gnocchi vs Prometheus
+~~~~~~~~~~~~~~~~~~~~~
+`Prometheus <https://prometheus.io/>`_ is a full-featured solution that
+includes everything from polling the metrics to storing and archiving them. It
+offers advanced features such as alerting.
+
+In comparison, Gnocchi does not offer polling as it prefers to leverage
+existing solutions (e.g. `collectd <http://collectd.org>`_). However, it
+provides high-availability and horizontal scalablity as well as multi-tenancy.
+
+
+Gnocchi vs InfluxDB
+~~~~~~~~~~~~~~~~~~~
+
+`InfluxDB <http://influxdb.org>`_ is a time series database storing metrics
+into local files. It offers a variety of input protocol support and created its
+own query language, InfluxQL, inspired from SQL. The HTTP API it offers is just
+a way to pass InfluxQL over the wire. Horizontal scalability is only provided
+in the commercial version. The data model is based on time series with labels
+associated to it.
+
+In comparison, Gnocchi offers scalability and multi-tenancy. Its data model
+differs as it does not provide labels, but |resources| to attach to |metrics|.
+
+Gnocchi vs OpenTSDB
+~~~~~~~~~~~~~~~~~~~
+
+`OpenTSDB <http://opentsdb.net/>`_ is a distributed time series database that
+uses `Hadoop <http://hadoop.apache.org/>`_ and `HBase
+<http://hbase.apache.org/>`_ to store its data. That makes it easy to scale
+horizontally. However, its querying feature are rather simple.
+
+In comparison, Gnocchi offers a proper query language with more features. The
+usage of Hadoop might be a show-stopper for many as it's quite heavy to deploy
+and operate.
+
+Gnocchi vs Graphite
+~~~~~~~~~~~~~~~~~~~
+
+`Graphite <http://graphite.readthedocs.org/en/latest/>`_ is essentially a data
+metric storage composed of flat files (Whisper), and focuses on rendering those
+time series. Each time series stored is composed of points that are stored
+regularly and are related to the current date and time.
+
+In comparison, Gnocchi offers much more scalability, a better file format and
+no relativity to the current time and date.
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/amqp1d.rst b/doc/source/amqp1d.rst
new file mode 100644
index 0000000000000000000000000000000000000000..200cbf748bfd728c9c607361e1778857a1dd5bb4
--- /dev/null
+++ b/doc/source/amqp1d.rst
@@ -0,0 +1,37 @@
+=====================
+AMQP 1.0 Daemon Usage
+=====================
+
+Gnocchi provides a daemon `gnocchi-amqp1d` that is compatible with the `AMQP
+1.0`_ (Advanced Messaging Queuing Protocol 1.0 (ISO/IEC 19464)) protocol and
+can listen to |metrics| sent over the network via the amqp1 `collectd`_  plugin
+named `amqp1`_.
+
+.. _`amqp1`: https://github.com/collectd/collectd/blob/master/src/amqp1.c
+.. _`collectd`: https://github.com/collectd/collectd
+.. _`AMQP 1.0`: https://www.amqp.org/resources/specifications
+
+`amqp1` collectd write plugin enables collectd output to be sent to an Advanced
+Messaging Queuing Protocol 1.0 intermediary such as the Apache Qpid Dispatch
+Router or Apache Artemis Broker.
+
+How It Works?
+=============
+In order to enable amqp1d support in Gnocchi, you need to configure the
+`[amqp1d]` option group in the configuration file. You need to provide a
+host with port and topic name that amqp1 collectd plugin is publishing metric
+to and a |resource| name that will be used as the main  |resource| where all
+the |metrics| will be attached with host name as an attribute, a user and
+project id that will be associated with the |resource| and |metrics|,
+and an |archive policy| name that will be used to create the |metrics|.
+
+All the |metrics| will be created dynamically as the |metrics| are sent to
+`gnocchi-amqp1d`, and attached with the source host name to the |resource|
+name you configured.
+
+To use it, Gnocchi must be installed with the `amqp1` flavor::
+
+  pip install -e .[postgresql,file,amqp1]
+
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/client.rst b/doc/source/client.rst
new file mode 100644
index 0000000000000000000000000000000000000000..4be6893cdaa526f62197918a5aa7fa1a366a21f5
--- /dev/null
+++ b/doc/source/client.rst
@@ -0,0 +1,29 @@
+========
+ Client
+========
+
+Python
+------
+
+Gnocchi officially provides a Python client and SDK which can be installed
+using *pip*::
+
+  pip install gnocchiclient
+
+This package provides the `gnocchi` command line tool that can be used to send
+requests to Gnocchi. You can read the `full documentation online`_.
+
+Go
+--
+
+There is an open source Go implementation for the SDK, provided by the
+`Gophercloud` project.
+It can be installed using *go get*::
+
+  go get github.com/gophercloud/utils/gnocchi
+
+This package provides the Go SDK only. You can read the `godoc reference`_.
+
+.. _full documentation online: http://gnocchi.xyz/gnocchiclient
+.. _Gophercloud: https://github.com/gophercloud
+.. _godoc reference: https://godoc.org/github.com/gophercloud/utils
diff --git a/doc/source/collectd.rst b/doc/source/collectd.rst
new file mode 100644
index 0000000000000000000000000000000000000000..6c80bd805db458732b3fe96718f4c5d9d2814594
--- /dev/null
+++ b/doc/source/collectd.rst
@@ -0,0 +1,47 @@
+==================
+ Collectd support
+==================
+
+`Collectd`_ can use Gnocchi to store its data through a plugin called
+`collectd-gnocchi` or via the `gnocchi-amqp1d` daemon.
+
+
+collectd-gnocchi
+================
+
+It can be installed with *pip*::
+
+     pip install collectd-gnocchi
+
+`Sources and documentation`_ are also available.
+
+
+gnocchi-amqp1d
+==============
+
+You need first to setup the Collectd `amqp1 write plugin`::
+
+    <Plugin amqp1>
+      <Transport "name">
+        Host "localhost"
+        Port "5672"
+        Address "collectd"
+        <Instance "telemetry">
+            Format JSON
+        </Instance>
+      </Transport>
+    </Plugin>
+
+
+Then configure the AMQP 1.0 url in gnocchi.conf::
+
+    [amqp1d]
+    url = localhost:5672/u/collectd/telemetry
+
+
+.. _`Collectd`: https://www.collectd.org/
+.. _`Sources and documentation`: https://github.com/gnocchixyz/collectd-gnocchi
+.. _`amqp1 write plugin`: https://github.com/ajssmith/collectd/blob/d4cc32c4dddb01081c49a67d13ab4a737cda0ed0/src/collectd.conf.pod#plugin-amqp1
+.. TODO(sileht): Change the link when
+   https://collectd.org/documentation/manpages/collectd.conf.5.shtml will be
+   up2date
diff --git a/doc/source/comparison-table.rst b/doc/source/comparison-table.rst
new file mode 100644
index 0000000000000000000000000000000000000000..82d3f1e80c7e22c56bc26d9030f33494d560a595
--- /dev/null
+++ b/doc/source/comparison-table.rst
@@ -0,0 +1,21 @@
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Features         | Gnocchi                                                           | Prometheus | InfluxDB         | OpenTSDB | Graphite  |
++==================+===================================================================+============+==================+==========+===========+
+| Metric polling   | No                                                                | Yes        | No               | No       | No        |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Resource history | Yes                                                               | No         | No               | No       | No        |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Multi-tenant     | Yes                                                               | No         | No               | No       | No        |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Query interface  | REST API                                                          | REST API   | HTTP             | TCP      | None      |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| High-available   | Yes                                                               | No         | With *Relay*     | Yes      | No        |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Scalable         | Yes                                                               | No         | Commercial only  | Yes      | No        |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Alerting         | No (`roadmap <https://github.com/gnocchixyz/gnocchi/issues/71>`_) | Yes        | With *Kapacitor* | No       | No        |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| Grafana support  | Yes                                                               | Yes        | Yes              | Yes      | Yes       |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
+| collectd support | Yes                                                               | Yes        | Yes              | Yes      | Yes       |
++------------------+-------------------------------------------------------------------+------------+------------------+----------+-----------+
diff --git a/doc/source/conf.py b/doc/source/conf.py
new file mode 100644
index 0000000000000000000000000000000000000000..841ab0d4a571744b4c89c5d216e86a5bc8fee25f
--- /dev/null
+++ b/doc/source/conf.py
@@ -0,0 +1,192 @@
+# -*- coding: utf-8 -*-
+#
+# Gnocchi documentation build configuration file
+#
+# This file is execfile()d with the current directory set to its containing dir.
+#
+# Note that not all possible configuration values are present in this
+# autogenerated file.
+#
+# All configuration values have a default; values that are commented out
+# serve to show the default.
+
+import datetime
+import os
+import pkg_resources
+
+# If extensions (or modules to document with autodoc) are in another directory,
+# add these directories to sys.path here. If the directory is relative to the
+# documentation root, use os.path.abspath to make it absolute, like shown here.
+#sys.path.insert(0, os.path.abspath('.'))
+
+# -- General configuration -----------------------------------------------------
+
+# If your documentation needs a minimal Sphinx version, state it here.
+#needs_sphinx = '1.0'
+
+# Add any Sphinx extension module names here, as strings. They can be extensions
+# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
+extensions = [
+    'gnocchi.gendoc',
+    'sphinxcontrib.httpdomain',
+    'sphinx.ext.autodoc',
+    'reno.sphinxext',
+]
+
+# Add any paths that contain templates here, relative to this directory.
+templates_path = ['_templates']
+
+# The suffix of source filenames.
+source_suffix = '.rst'
+
+# The encoding of source files.
+#source_encoding = 'utf-8-sig'
+
+# The master toctree document.
+master_doc = 'index'
+
+# General information about the project.
+project = u'Gnocchi'
+copyright = u'%s, The Gnocchi Developers' % datetime.date.today().year
+
+# The version info for the project you're documenting, acts as replacement for
+# |version| and |release|, also used in various other places throughout the
+# built documents.
+#
+# The short X.Y version.
+release = pkg_resources.get_distribution('gnocchi').version
+
+# The language for content autogenerated by Sphinx. Refer to documentation
+# for a list of supported languages.
+#language = None
+
+# There are two options for replacing |today|: either, you set today to some
+# non-false value, then it is used:
+#today = ''
+# Else, today_fmt is used as the format for a strftime call.
+#today_fmt = '%B %d, %Y'
+
+# List of patterns, relative to source directory, that match files and
+# directories to ignore when looking for source files.
+exclude_patterns = []
+
+# The reST default role (used for this markup: `text`) to use for all documents.
+#default_role = None
+
+# If true, '()' will be appended to :func: etc. cross-reference text.
+#add_function_parentheses = True
+
+# If true, the current module name will be prepended to all description
+# unit titles (such as .. function::).
+#add_module_names = True
+
+# If true, sectionauthor and moduleauthor directives will be shown in the
+# output. They are ignored by default.
+#show_authors = False
+
+# The name of the Pygments (syntax highlighting) style to use.
+pygments_style = 'sphinx'
+
+# A list of ignored prefixes for module index sorting.
+#modindex_common_prefix = []
+
+
+# -- Options for HTML output ---------------------------------------------------
+
+# The theme to use for HTML and HTML Help pages.  See the documentation for
+# a list of builtin themes.
+html_theme = 'sphinx_rtd_theme'
+
+# Theme options are theme-specific and customize the look and feel of a theme
+# further.  For a list of options available for each theme, see the
+# documentation.
+import sphinx_rtd_theme
+html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
+
+
+# The name for this set of Sphinx documents.  If None, it defaults to
+# "<project> v<release> documentation".
+#html_title = None
+
+# A shorter title for the navigation bar.  Default is the same as html_title.
+#html_short_title = None
+
+# The name of an image file (relative to this directory) to place at the top
+# of the sidebar.
+html_logo = '_static/gnocchi-logo.png'
+
+# The name of an image file (within the static path) to use as favicon of the
+# docs.  This file should be a Windows icon file (.ico) being 16x16 or 32x32
+# pixels large.
+html_favicon = '_static/gnocchi-icon.ico'
+
+# Add any paths that contain custom static files (such as style sheets) here,
+# relative to this directory. They are copied after the builtin static files,
+# so a file named "default.css" will overwrite the builtin "default.css".
+html_static_path = ['_static']
+
+# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
+# using the given strftime format.
+#html_last_updated_fmt = '%b %d, %Y'
+
+# If true, SmartyPants will be used to convert quotes and dashes to
+# typographically correct entities.
+#html_use_smartypants = True
+
+# Custom sidebar templates, maps document names to template names.
+#html_sidebars = {}
+
+# Additional templates that should be rendered to pages, maps page names to
+# template names.
+#html_additional_pages = {}
+
+# If false, no module index is generated.
+#html_domain_indices = True
+
+# If false, no index is generated.
+#html_use_index = True
+
+# If true, the index is split into individual pages for each letter.
+#html_split_index = False
+
+# If true, links to the reST sources are added to the pages.
+#html_show_sourcelink = True
+
+# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
+#html_show_sphinx = True
+
+# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
+#html_show_copyright = True
+
+# If true, an OpenSearch description file will be output, and all pages will
+# contain a <link> tag referring to it.  The value of this option must be the
+# base URL from which the finished HTML is served.
+#html_use_opensearch = ''
+
+# This is the file name suffix for HTML files (e.g. ".xhtml").
+#html_file_suffix = None
+
+# Output file base name for HTML help builder.
+htmlhelp_basename = 'gnocchidoc'
+
+html_theme_options = {
+    'logo_only': True,
+}
+
+# Multiversion docs
+scv_sort = ('semver',)
+scv_show_banner = True
+scv_banner_main_ref = 'stable/4.3'
+scv_priority = 'branches'
+scv_whitelist_branches = ('master', '^stable/([3-9]\.)')
+scv_whitelist_tags = ("^$",)
+
+here = os.path.dirname(os.path.realpath(__file__))
+html_static_path_abs = ",".join([os.path.join(here, p) for p in html_static_path])
+# NOTE(sileht): Override some conf for old version.
+scv_overflow = ("-D", "html_theme=sphinx_rtd_theme",
+                "-D", "html_theme_options.logo_only=True",
+                "-D", "html_logo=_static/gnocchi-logo.png",
+                "-D", "html_favicon=_static/gnocchi-icon.ico",
+                "-D", "html_static_path=%s" % html_static_path_abs,
+                "-W")
diff --git a/doc/source/contributing.rst b/doc/source/contributing.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1cfc0b76814c0fd187360c7b2e4c93752fd3b101
--- /dev/null
+++ b/doc/source/contributing.rst
@@ -0,0 +1,81 @@
+==============
+ Contributing
+==============
+
+Issues
+------
+
+We use the `GitHub issue tracker`_ for reporting issues. Before opening a new
+issue, ensure the bug was not already reported by searching on Issue tracker
+first.
+
+If you're unable to find an open issue addressing the problem, open a new one.
+Be sure to include a title and clear description, as much relevant information
+as possible, and a code sample or an executable test case demonstrating the
+expected behavior that is not occurring.
+
+If you are looking to contribute for the first time, some issues are tagged
+with the "`good first issue`_" label and are easy targets for newcomers.
+
+.. _`GitHub issue tracker`: https://github.com/gnocchixyz/gnocchi/issues
+.. _`good first issue`: https://github.com/gnocchixyz/gnocchi/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22
+
+
+Pull-requests
+-------------
+
+When opening a pull-request, make sure that:
+
+* You write a comprehensive summary of your problem and the solution you
+  implemented.
+* If you update or fix your pull-request, make sure the commits are atomic. Do
+  not include fix-up commits in your history, rewrite it properly using e.g.
+  `git rebase --interactive` and/or `git commit --amend`.
+* We recommend using `git pull-request`_ to send your pull-requests.
+
+All sent pull-requests are checked using `Travis-CI`_, which is in charge of
+running the tests suites. There are different scenarios being run: `PEP 8`_
+compliance tests, upgrade tests, unit and functional tests.
+
+All pull-requests must be reviewed by `members of the Gnocchi project`_.
+
+When a pull-request is approved by at least two of the members and when
+Travis-CI confirms that all the tests run fine, the patch will be merged.
+
+The Gnocchi project leverages `Mergify`_ in order to schedule the merge of the
+different pull-requests. Mergify is in charge of making sure that the
+pull-request is up-to-date with respect to the `master` branch and that the
+tests pass. Pull-requests are always merged in a serialized manner in order to
+make sure that no pull-request can break another one.
+
+`Gnocchi's Mergify dashboard`_ shows the current status of the merge queue.
+
+.. _`git pull-request`: https://github.com/jd/git-pull-request
+.. _`PEP 8`: https://www.python.org/dev/peps/pep-0008/
+.. _`Travis-CI`: http://travis-ci.org
+.. _`members of the Gnocchi project`: https://github.com/orgs/gnocchixyz/people
+.. _`Mergify`: https://mergify.io
+.. _`Gnocchi's Mergify dashboard`: https://gh.mergify.io/gnocchixyz
+
+
+Running the Tests
+-----------------
+
+Tests are run using `tox <https://tox.readthedocs.io/en/latest/>`_. Tox creates
+a virtual environment for each test environment, so make sure you are using an
+up to date version of `virtualenv <https://pypi.python.org/pypi/virtualenv>`_.
+
+Different test environments and configurations can be found by running the
+``tox -l`` command. For example, to run tests with Python 2.7, PostgreSQL as
+indexer, and file as storage backend:
+
+::
+
+    tox -e py27-postgresql-file
+
+
+To run tests with Python 2.7, MySQL as indexer, and Ceph as storage backend:
+
+::
+
+    tox -e py35-mysql-ceph
diff --git a/doc/source/glossary.rst b/doc/source/glossary.rst
new file mode 100644
index 0000000000000000000000000000000000000000..7f26d2fc04f8dab634b0f12a22a5e7723c3fb534
--- /dev/null
+++ b/doc/source/glossary.rst
@@ -0,0 +1,48 @@
+========
+Glossary
+========
+
+.. glossary::
+   :sorted:
+
+   Resource
+     An entity representing anything in your infrastructure that you will
+     associate |metric|\ (s) with. It is identified by a unique ID and can
+     contain attributes.
+
+   Metric
+     An entity storing |aggregates| identified by an UUID. It can be attached
+     to a |resource| using a name. How a metric stores its |aggregates| is
+     defined by the |archive policy| it is associated to.
+
+   Measure
+     An incoming datapoint tuple sent to Gnocchi by the api. It is composed
+     of a timestamp and a value.
+
+   Archive policy
+     An |aggregate| storage policy attached to a |metric|. It determines how
+     long |aggregates| will be kept in a |metric| and
+     :term:`how they will be aggregated<aggregation method>`\ .
+
+   Granularity
+     The time between two |aggregates| in an aggregated |time series| of a
+     |metric|.
+
+   Time series
+     A list of |aggregates| ordered by time.
+
+   Aggregation method
+     Function used to aggregate multiple |measures| into an |aggregate|. For
+     example, the `min` aggregation method will aggregate the values of
+     different |measures| to the minimum value of all the |measures| in the
+     time range.
+
+   Aggregate
+     A datapoint tuple generated from several |measures| according to the
+     |archive policy| definition. It is composed of a timestamp and a value.
+
+   Timespan
+     The time period for which a |metric| keeps its |aggregates|. It is used in
+     the context of |archive policy|.
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/grafana.rst b/doc/source/grafana.rst
new file mode 100644
index 0000000000000000000000000000000000000000..93b1178cd67e7d88fa8e2646f746fbe9f1f4473a
--- /dev/null
+++ b/doc/source/grafana.rst
@@ -0,0 +1,57 @@
+=================
+Grafana support
+=================
+
+`Grafana`_ has support for Gnocchi through a plugin. It can be installed with
+grafana-cli::
+
+     sudo grafana-cli plugins install gnocchixyz-gnocchi-datasource
+
+`Source`_ and `Documentation`_ are also available.
+
+Grafana has 2 modes of operation: proxy or direct mode. In proxy mode, your
+browser only communicates with Grafana, and Grafana communicates with Gnocchi.
+In direct mode, your browser communicates with Grafana, Gnocchi, and possibly
+Keystone.
+
+Picking the right mode depends if your Gnocchi server is reachable by your
+browser and/or by your Grafana server.
+
+In order to use Gnocchi with Grafana in proxy mode, you just need to:
+
+1. Install Grafana and its Gnocchi plugin
+2. Configure a new datasource in Grafana with the Gnocchi URL.
+   If you are using the Keystone middleware for authentication, you can also
+   provide an authentication token.
+
+In order to use Gnocchi with Grafana in direct mode, you need to do a few more
+steps:
+
+1. Configure the CORS middleware in `gnocchi.conf` to allow request from
+   Grafana::
+
+     [cors]
+     allowed_origin = http://grafana.fqdn
+
+2. Configure the CORS middleware in Keystone to allow request from Grafana too::
+
+     [cors]
+     allowed_origin = http://grafana.fqdn
+
+3. Configure a new datasource in Grafana with the Keystone URL, a user, a
+   project and a password. Your browser will query Keystone for a token, and
+   then query Gnocchi based on what Grafana needs.
+
+.. note::
+
+    `allowed_origin` format is format: `<protocol>://<host>[:<port>]`. No path,
+    no query string and no trailing `/`.
+
+.. image:: _static/grafana-screenshot.png
+  :align: center
+  :alt: Grafana screenshot
+
+.. _`Grafana`: http://grafana.org
+.. _`Documentation`: https://grafana.net/plugins/gnocchixyz-gnocchi-datasource
+.. _`Source`: https://github.com/gnocchixyz/grafana-gnocchi-datasource
+.. _`CORS`: https://en.wikipedia.org/wiki/Cross-origin_resource_sharing
diff --git a/doc/source/include/term-substitution.rst b/doc/source/include/term-substitution.rst
new file mode 100644
index 0000000000000000000000000000000000000000..37b8020bf2da11bdbf99520d0cda15facc0f6aab
--- /dev/null
+++ b/doc/source/include/term-substitution.rst
@@ -0,0 +1,24 @@
+.. |resource| replace:: :term:`resource<resource>`
+.. |resources| replace:: :term:`resources<resource>`
+
+.. |metric| replace:: :term:`metric<metric>`
+.. |metrics| replace:: :term:`metrics<metric>`
+
+.. |measure| replace:: :term:`measure<measure>`
+.. |measures| replace:: :term:`measures<measure>`
+
+.. |archive policy| replace:: :term:`archive policy<archive policy>`
+.. |archive policies| replace:: :term:`archive policies<archive policy>`
+
+.. |granularity| replace:: :term:`granularity<granularity>`
+.. |granularities| replace:: :term:`granularities<granularity>`
+
+.. |time series| replace:: :term:`time series<time series>`
+
+.. |aggregation method| replace:: :term:`aggregation method<aggregation method>`
+.. |aggregation methods| replace:: :term:`aggregation methods<aggregation method>`
+
+.. |aggregate| replace:: :term:`aggregate<aggregate>`
+.. |aggregates| replace:: :term:`aggregates<aggregate>`
+
+.. |timespan| replace:: :term:`timespan<timespan>`
diff --git a/doc/source/index.rst b/doc/source/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..63934d8e331ee7f554355cc69fad47a7214d15e4
--- /dev/null
+++ b/doc/source/index.rst
@@ -0,0 +1,62 @@
+==================================
+Gnocchi – Metric as a Service
+==================================
+
+.. include:: ../../README.rst
+   :start-line: 13
+
+Key Features
+------------
+
+Gnocchi's main features are:
+
+- HTTP REST interface
+- Horizontal scalability
+- Metric aggregation
+- Measures batching support
+- Archiving policy
+- Metric value search
+- Structured resources
+- Resource history
+- Queryable resource indexer
+- Multi-tenant
+- Grafana support
+- Prometheus Remote Write support
+- Nagios/Icinga support
+- Statsd protocol support
+- Collectd plugin support
+- InfluxDB line protocol ingestion support
+- AMQP 1.0 protocol support
+
+Community
+---------
+You can join Gnocchi's community via the following channels:
+
+- Source code: https://github.com/gnocchixyz/gnocchi
+- Bug tracker: https://github.com/gnocchixyz/gnocchi/issues
+- IRC: #gnocchi on `Freenode <https://freenode.net>`_
+
+Documentation
+-------------
+
+.. toctree::
+   :maxdepth: 1
+
+   intro
+   install
+   operating
+   client
+   rest
+   statsd
+   amqp1d
+   grafana
+   prometheus
+   influxdb
+   nagios
+   collectd
+   alternatives
+   glossary
+   releasenotes/index.rst
+   contributing
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/influxdb.rst b/doc/source/influxdb.rst
new file mode 100644
index 0000000000000000000000000000000000000000..87e3deb4f58e4eee95d196b47c9917e300b2df03
--- /dev/null
+++ b/doc/source/influxdb.rst
@@ -0,0 +1,52 @@
+============================
+ InfluxDB ingestion support
+============================
+
+Gnocchi implements some part of the InfluxDB REST API. That allows tool that
+are used to write to InfluxDB to write directly to Gnocchi instead, such as
+`Telegraf`_.
+
+The endpoint is available at `/v1/influxdb`. It supports:
+
+* `GET /v1/influxdb/ping`
+* `POST /v1/influxdb/query` where the only query that is handled is `CREATE
+  DATABASE <db>`. That will create a new resource type named after the database
+  handle.
+* `POST /v1/influxdb/write?db=<db>`. The `db` parameter should be an existing
+  resource type that does not require any attributes to be set. The body should
+  follow the `InfluxDB line protocol`_.
+
+In order to map InfluxDB data to Gnocchi data model, the following
+transformation happen when writing metrics:
+
+* For each measure sent, one of the tag value is used as the original resource
+  id. By default the `host` tag is used. This can be overriden by passing the
+  `X-Gnocchi-InfluxDB-Tag-Resource-ID` HTTP header.
+
+* The metric names associated to the resource have the format:
+  `<measurement>.<field_key>[@<tag_key>=<tag_value>,…]`. The tag are sorted
+  by keys.
+
+
+Telegraf configuration
+======================
+
+In order to use `Telegraf`_ with Gnocchi, you can use the following
+configuration example::
+
+  [[outputs.influxdb]]
+    urls = ["http://admin:localhost:8041/v1/influxdb"]
+    http_headers = {"X-Gnocchi-InfluxDB-Tag-Resource-ID" = "host"}
+
+
+Gnocchi configuration
+=====================
+
+The default Gnocchi API server does not support the chunked encoding required
+by the InfluxDB compatible endpoint. To enable chunked encoding, you must put a
+real HTTP Server (Apache/NGINX/...) on front of Gnocchi API, and set
+`[api]/uwsgi_mode = http-socket`.
+
+
+.. _`Telegraf`: https://github.com/influxdata/telegraf
+.. _`InfluxDB line protocol`: https://docs.influxdata.com/influxdb/v1.3/write_protocols/line_protocol_reference/
diff --git a/doc/source/install.rst b/doc/source/install.rst
new file mode 100644
index 0000000000000000000000000000000000000000..079df6d7985ba7dc97ef28d2c26239a299b6368d
--- /dev/null
+++ b/doc/source/install.rst
@@ -0,0 +1,216 @@
+==============
+ Installation
+==============
+
+.. _installation:
+
+Installation
+============
+
+Gnocchi can be installed using `pip`. Depending on the drivers and features you
+want to use (see :doc:`intro` for which driver to pick), you need to specify
+the extra variants you need. For example::
+
+  pip install gnocchi[postgresql,ceph,keystone]
+
+This would install PostgreSQL support for the indexer, Ceph support for
+storage, and Keystone support for authentication and authorization.
+
+The list of variants available is:
+
+* `keystone` – provides Keystone authentication support
+* `mysql` - provides MySQL indexer support
+* `postgresql` – provides PostgreSQL indexer support
+* `swift` – provides OpenStack Swift storage support
+* `s3` – provides Amazon S3 storage support
+* `ceph` – provides Ceph (>= 0.80) storage support
+* `ceph_alternative` – provides Ceph (>= 12.2.0) storage support
+* `redis` – provides Redis storage support
+* `prometheus` – provides Prometheus Remote Write support
+* `amqp1` – provides AMQP 1.0 support
+* `doc` – documentation building support
+* `test` – unit and functional tests support
+
+To install Gnocchi from source, run the standard Python installation
+procedure::
+
+  pip install -e .
+
+Again, depending on the drivers and features you want to use, you need to
+install extra variants using, for example::
+
+  pip install -e .[postgresql,ceph,ceph_recommended_lib]
+
+
+Ceph requirements
+-----------------
+
+The Ceph driver needs to have a Ceph user and a pool already created. They can
+be created for example with:
+
+::
+
+    ceph osd pool create metrics 8 8
+    ceph auth get-or-create client.gnocchi mon "allow r" osd "allow rwx pool=metrics"
+
+
+Gnocchi leverages some *librados* features (omap, async, operation context)
+available in the Python binding only since *python-rados* >= 12.2.0. To handle
+this, Gnocchi uses *cradox* python library which has exactly the same API but
+works with Ceph >= 0.80.0.
+
+If Ceph and python-rados are >= 12.2.0, the cradox Python library becomes
+optional but is still recommended.
+
+
+Configuration
+=============
+
+Configuration file
+-------------------
+
+No config file is provided with the source code; it will be created during the
+installation. In the case where no configuration file is installed, one can be
+easily created by running:
+
+::
+
+    gnocchi-config-generator > /path/to/gnocchi.conf
+
+By default, gnocchi looks for its configuration file in the following places,
+in order:
+
+* ``~/.gnocchi/gnocchi.conf``
+* ``~/gnocchi.conf``
+* ``/etc/gnocchi/gnocchi.conf``
+* ``/etc/gnocchi.conf``
+* ``~/gnocchi/gnocchi.conf.d``
+* ``~/gnocchi.conf.d``
+* ``/etc/gnocchi/gnocchi.conf.d``
+* ``/etc/gnocchi.conf.d``
+
+Configure Gnocchi by editing the appropriate file.
+
+The configuration file should be pretty explicit, but here are some of the base
+options you want to change and configure:
+
++---------------------+---------------------------------------------------+
+| Option name         | Help                                              |
++=====================+===================================================+
+| storage.driver      | The storage driver for |metrics|.                 |
++---------------------+---------------------------------------------------+
+| indexer.url         | URL to your indexer.                              |
++---------------------+---------------------------------------------------+
+| storage.file_*      | Configuration options to store files              |
+|                     | if you use the file storage driver.               |
++---------------------+---------------------------------------------------+
+| storage.swift_*     | Configuration options to access Swift             |
+|                     | if you use the Swift storage driver.              |
++---------------------+---------------------------------------------------+
+| storage.ceph_*      | Configuration options to access Ceph              |
+|                     | if you use the Ceph storage driver.               |
++---------------------+---------------------------------------------------+
+| storage.s3_*        | Configuration options to access S3                |
+|                     | if you use the S3 storage driver.                 |
++---------------------+---------------------------------------------------+
+| storage.redis_*     | Configuration options to access Redis             |
+|                     | if you use the Redis storage driver.              |
++---------------------+---------------------------------------------------+
+
+The same options are also available as `incoming.<drivername>_*` for
+configuring the incoming storage. If no incoming storage is set, the default is
+to use the configured storage driver.
+
+Configuring authentication
+-----------------------------
+
+The API server supports different authentication methods:
+
+* `basic` (the default) which uses the standard HTTP `Authorization` header. By
+  default, only the user `admin` has some special permissions (e.g. create
+  archive policies). The password of the authentication is not used.
+
+* `keystone` to use `OpenStack Keystone`_. If you successfully installed the
+  `keystone` flavor using `pip` (see :ref:`installation`), you can set
+  `api.auth_mode` to `keystone` to enable Keystone authentication.
+  You also need to configure the `keystone_authtoken` section in `gnocchi.conf`
+  with the proper value so Gnocchi is able to validate tokens.
+
+* `remoteuser` where Gnocchi will look at the HTTP server `REMOTE_USER`
+  environment variable to get the username. Then the permissions model is the
+  same as the `basic` mode.
+
+.. _`OpenStack Keystone`: http://launchpad.net/keystone
+
+Initialization
+==============
+
+Once you have configured Gnocchi properly you need to initialize the indexer
+and storage:
+
+::
+
+    gnocchi-upgrade
+
+Upgrading
+=========
+In order to upgrade from a previous version of Gnocchi, you need to make sure
+that your indexer and storage are properly upgraded.
+
+.. warning::
+
+   Upgrade is only supported between one major version to another or between
+   minor versions, e.g.:
+
+   - version 2.0 to version 2.1 or 2.2 is supported
+
+   - version 2.1 to version 3.0 is supported
+
+   - version 2 to version 4 is **not** supported.
+
+Run the following:
+
+1. Stop the old version of Gnocchi API server and `gnocchi-statsd` daemon
+
+2. Stop the old version of `gnocchi-metricd` daemon
+
+.. warning::
+
+   Data in backlog is never migrated between versions. Ensure the backlog is
+   empty before any upgrade to ensure data is not lost.
+
+3. Install the new version of Gnocchi
+
+4. Run `gnocchi-upgrade`.
+
+   This will take from a few minutes to several hours depending on the size of
+   your index and storage.
+
+5. Start the new Gnocchi API server, `gnocchi-metricd`
+   and `gnocchi-statsd` daemons
+
+
+Installation using Docker
+=========================
+The `gnocchi-docker repository`_ contains the needed Dockerfile and script to
+build a Docker image containing Gnocchi latest version (fetched from PyPI). It
+also provides an example of docker-compose file in order to run a full
+deployment on Gnocchi (indexer and storage included).
+
+.. _gnocchi-docker repository: https://github.com/gnocchixyz/gnocchi-docker
+
+Installation using OpenShift
+============================
+The `gnocchi-openshift repository`_ contains the needed Dockerfile and script
+to build a Docker image containing Gnocchi latest version (fetched from PyPI).
+
+.. _gnocchi-openshift repository: https://github.com/gnocchixyz/gnocchi-openshift
+
+
+Gnocchi Configuration sample
+============================
+
+.. literalinclude:: gnocchi.conf.sample
+
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/intro.rst b/doc/source/intro.rst
new file mode 100644
index 0000000000000000000000000000000000000000..61997840f440904aee3072ef9197da1c546b7c1c
--- /dev/null
+++ b/doc/source/intro.rst
@@ -0,0 +1,101 @@
+Getting started
+---------------
+
+Architecture overview
+~~~~~~~~~~~~~~~~~~~~~
+
+Gnocchi consists of several services: a HTTP REST API (see :doc:`rest`), an
+optional statsd-compatible daemon (see :doc:`statsd`), and an asynchronous
+processing daemon (named `gnocchi-metricd`). Data is received via the HTTP REST
+API or statsd daemon. `gnocchi-metricd` performs operations (statistics
+computing, |metric| cleanup, etc...) on the received data in the background.
+
+.. image:: _static/architecture.svg
+  :align: center
+  :width: 95%
+  :alt: Gnocchi architecture
+
+.. image source: https://docs.google.com/drawings/d/1aHV86TPNFt7FlCLEjsTvV9FWoFYxXCaQOzfg7NdXVwM/edit?usp=sharing
+
+All those services are stateless and therefore horizontally scalable. Contrary
+to many time series databases, there is no limit on the number of
+`gnocchi-metricd` daemons or `gnocchi-api` endpoints that you can run. If your
+load starts to increase, you just need to spawn more daemons to handle the flow
+of new requests. The same applies if you want to handle high-availability
+scenarios: just start more Gnocchi daemons on independent servers.
+
+As you can see on the architecture diagram above, there are three external
+components that Gnocchi needs to work correctly:
+
+- An incoming measure storage
+- An aggregated metric storage
+- An index
+
+Those three parts are provided by drivers. Gnocchi is entirely pluggable and
+offer different options for those services.
+
+Incoming and storage drivers
+++++++++++++++++++++++++++++
+
+Gnocchi can leverage different storage systems for its incoming |measures| and
+aggregated |metrics|, such as:
+
+* File (default)
+* `Ceph`_ (preferred)
+* `OpenStack Swift`_
+* `Amazon S3`_
+* `Redis`_
+
+Depending on the size of your architecture, using the file driver and storing
+your data on a disk might be enough. If you need to scale the number of server
+with the file driver, you can export and share the data via NFS among all
+Gnocchi processes. Ultimately, the S3, Ceph, and Swift drivers are more
+scalable storage options. Ceph also offers better consistency, and hence is the
+recommended driver.
+
+A typical recommendation for medium to large scale deployment is to use
+`Redis`_ as an incoming measure storage and `Ceph`_ as an aggregate storage.
+
+.. _`OpenStack Swift`: http://docs.openstack.org/developer/swift/
+.. _`Ceph`: https://ceph.com
+.. _`Amazon S3`: https://aws.amazon.com/s3/
+.. _`Redis`: https://redis.io
+
+Indexer driver
+++++++++++++++
+
+You also need a database to index the resources and metrics that Gnocchi will
+handle. The supported drivers are:
+
+* `PostgreSQL`_ (preferred)
+* `MySQL`_ (at least version 5.6.4)
+
+The *indexer* is responsible for storing the index of all |resources|, |archive
+policies| and |metrics|, along with their definitions, types and properties.
+The indexer is also responsible for linking |resources| with |metrics| and the
+relationships of |resources|..
+
+.. _PostgreSQL: http://postgresql.org
+.. _MySQL: http://mysql.org
+
+
+Understanding aggregation
+~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The way data points are aggregated is configurable on a per-metric basis, using
+an archive policy.
+
+An archive policy defines which aggregations to compute and how many aggregates
+to keep. Gnocchi supports a variety of aggregation methods, such as minimum,
+maximum, average, Nth percentile, standard deviation, etc. Those aggregations
+are computed over a period of time (called granularity) and are kept for a
+defined timespan.
+
+
+Gnocchi uses three different back-ends for storing data: one for storing new
+incoming |measures| (the *incoming* driver), one for storing the |time series|
+|aggregates| (the *storage* driver) and one for indexing the data (the *index*
+driver). By default, the *incoming* driver is configured to use the same value
+as the *storage* driver.
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/nagios.rst b/doc/source/nagios.rst
new file mode 100644
index 0000000000000000000000000000000000000000..f2e693e54b01307431c47fec502be951c34d7099
--- /dev/null
+++ b/doc/source/nagios.rst
@@ -0,0 +1,21 @@
+=====================
+Nagios/Icinga support
+=====================
+
+`Nagios`_ and `Icinga`_ has support for Gnocchi through a Gnocchi-nagios
+service. It can be installed with pip::
+
+     pip install gnocchi-nagios
+
+`Source`_ and `Documentation`_ are also available.
+
+Gnocchi-nagios collects perfdata files generated by `Nagios`_ or `Icinga`_;
+transforms them into Gnocchi |resources|, |metrics| and |measures| format; and
+publishes them to the Gnocchi REST API.
+
+.. _`Nagios`: https://www.nagios.org/
+.. _`Icinga`: https://www.icinga.com/
+.. _`Documentation`: http://gnocchi-nagios.readthedocs.io/en/latest/
+.. _`Source`: https://github.com/sileht/gnocchi-nagios
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/operating.rst b/doc/source/operating.rst
new file mode 100644
index 0000000000000000000000000000000000000000..fed78a3c4b5407e18f17bb03840d130a07b10725
--- /dev/null
+++ b/doc/source/operating.rst
@@ -0,0 +1,361 @@
+===============
+Running Gnocchi
+===============
+
+Once Gnocchi is properly installed, you need to launch Gnocchi. Simply run the
+HTTP server and metric daemon:
+
+::
+
+    gnocchi-api
+    gnocchi-metricd
+
+You can run these services as background daemons.
+
+Running API As A WSGI Application
+=================================
+
+To run Gnocchi API, you can use the provided `gnocchi-api`. It wraps around
+`uwsgi` – makes sure that `uWSGI`_ is installed. If one Gnocchi API server is
+not enough, you can spawn any number of new API server to scale Gnocchi out,
+even on different machines.
+
+Since Gnocchi API tier runs using WSGI, it can alternatively be run using
+`Apache httpd`_ and `mod_wsgi`_, or any other HTTP daemon.
+
+uWSGI
+-----
+
+If you want to deploy using `uWSGI`_ yourself, the following uWSGI
+configuration file can be used as a base::
+
+  [uwsgi]
+  http = localhost:8041
+  # Set the correct path depending on your installation
+  wsgi-file = /usr/local/bin/gnocchi-api
+  master = true
+  die-on-term = true
+  threads = 32
+  # Adjust based on the number of CPU
+  processes = 32
+  enabled-threads = true
+  thunder-lock = true
+  plugins = python
+  buffer-size = 65535
+  lazy-apps = true
+  add-header = Connection: close
+
+You should configure the number of processes according to the number of CPU you
+have, usually around 1.5 × number of CPU.
+
+Once written to `/etc/gnocchi/uwsgi.ini`, it can be launched this way::
+
+  uwsgi /etc/gnocchi/uwsgi.ini
+
+Apache mod_wsgi
+---------------
+
+If you want to use Apache httpd `mod_wsgi`_, here's an example configuration
+file::
+
+  <VirtualHost *:8041>
+    WSGIDaemonProcess gnocchi user=gnocchi processes=4 threads=32 display-name=%{GROUP}
+    WSGIProcessGroup gnocchi
+    WSGIScriptAlias / /usr/local/bin/gnocchi-api
+    WSGIPassAuthorization On
+    WSGIApplicationGroup %{GLOBAL}
+
+    <Directory />
+        Require all granted
+    </Directory>
+  </VirtualHost>
+
+
+.. _Apache httpd: http://httpd.apache.org/
+.. _mod_wsgi: https://modwsgi.readthedocs.org/
+.. _uWSGI: https://uwsgi-docs.readthedocs.org/
+
+How to define archive policies
+==============================
+
+The |archive policies| define how the |metrics| are aggregated and how long
+they are stored. Each |archive policy| definition is expressed as the number of
+points over a |timespan|.
+
+If your |archive policy| defines a policy of 10 points with a |granularity| of
+1 second, the |time series| archive will keep up to 10 seconds, each
+representing an aggregation over 1 second. This means the |time series| will at
+maximum retain 10 seconds of data between the more recent point and the oldest
+point. That does not mean it will be 10 consecutive seconds: there might be a
+gap if data is fed irregularly.
+
+**There is no expiry of data relative to the current timestamp. Data is only
+expired according to timespan.**
+
+Each |archive policy| also defines which |aggregation methods| will be used.
+The default is set to `default_aggregation_methods` which is by default set to
+*mean*, *min*, *max*, *sum*, *std*, *count*.
+
+Therefore, both the |archive policy| and the |granularity| entirely depends on
+your use case. Depending on the usage of your data, you can define several
+|archive policies|. A typical low grained use case could be::
+
+    1440 points with a granularity of 1 minute = 24 hours
+
+The worst case scenario for storing compressed data points is 8.04 bytes per
+point, whereas best case scenario can compress up to 0.05 bytes per point.
+Knowing that, it is possible to compute the worst case scenario for storage in
+order to plan for data storage capacity.
+
+An archive policy of 1440 points would need 1440 points × 8.04 bytes = 11.3 KiB
+per |aggregation method|. If you use the 6 standard |aggregation method|
+proposed by Gnocchi, your |metric| will take up to 6 × 11.3 KiB = 67.8 KiB of
+disk space per metric.
+
+Be aware that the more definitions you set in an |archive policy|, the more CPU
+it will consume. Therefore, creating an |archive policy| with 2 definitons
+(e.g. 1 second granularity for 1 day and 1 minute granularity for 1 month) may
+consume twice CPU than just one definition (e.g. just 1 second granularity for
+1 day).
+
+Default archive policies
+------------------------
+
+By default, 4 |archive policies| are created when calling `gnocchi-upgrade`:
+*bool*, *low*, *medium* and *high*. The name both describes the storage space
+and CPU usage needs.
+
+The `bool` |archive policy| is designed to store only boolean values (i.e. 0
+and 1). It only stores one data point for each second (using the `last`
+|aggregation method|), with a one year retention period. The maximum optimistic
+storage size is estimated based on the assumption that no other value than 0
+and 1 are sent as |measures|. If other values are sent, the maximum pessimistic
+storage size is taken into account.
+
+- low
+
+  * 5 minutes granularity over 30 days
+  * aggregation methods used: `default_aggregation_methods`
+  * maximum estimated size per metric: 406 KiB
+
+- medium
+
+  * 1 minute granularity over 7 days
+  * 1 hour granularity over 365 days
+  * aggregation methods used: `default_aggregation_methods`
+  * maximum estimated size per metric: 887 KiB
+
+- high
+
+  * 1 second granularity over 1 hour
+  * 1 minute granularity over 1 week
+  * 1 hour granularity over 1 year
+  * aggregation methods used: `default_aggregation_methods`
+  * maximum estimated size per metric: 1 057 KiB
+
+- bool
+
+  * 1 second granularity over 1 year
+  * aggregation methods used: *last*
+  * maximum optimistic size per metric: 1 539 KiB
+  * maximum pessimistic size per metric: 277 172 KiB
+
+How to plan for Gnocchi’s storage
+=================================
+
+Gnocchi uses a custom file format based on its library *Carbonara*. In Gnocchi,
+a |time series| is a collection of points, where a point is a given |aggregate|
+or sample, in the lifespan of a |time series|. The storage format is compressed
+using various techniques, therefore the computing of a |time series|' size can
+be estimated based on its **worst** case scenario with the following formula::
+
+    number of points × 8 bytes = size in bytes
+
+The number of points you want to keep is usually determined by the following
+formula::
+
+    number of points = timespan ÷ granularity
+
+For example, if you want to keep a year of data with a one minute resolution::
+
+    number of points = (365 days × 24 hours × 60 minutes) ÷ 1 minute
+    number of points = 525 600
+
+Then::
+
+    size in bytes = 525 600 points × 8 bytes = 4 204 800 bytes = 4 106 KiB
+
+This is just for a single aggregated |time series|. If your |archive policy|
+uses the 6 default |aggregation methods| (mean, min, max, sum, std, count) with
+the same "one year, one minute aggregations" resolution, the space used will go
+up to a maximum of 6 × 4.1 MiB = 24.6 MiB.
+
+Metricd
+=======
+
+Metricd is the daemon responsible for processing measures, computing their
+aggregates and storing them into the aggregate storage. It also handles a few
+other cleanup tasks, such as deleting metrics marked for deletion.
+
+Metricd therefore is responsible for most of the CPU usage and I/O job in
+Gnocchi. The archive policy of each metric will influence how fast it performs.
+
+In order to process new measures, metricd checks the incoming storage for new
+measures from time to time. The delay between each check is can be configured
+by changing the `[metricd]metric_processing_delay` configuration option.
+
+Some incoming driver (only Redis currently) are able to inform metricd that new
+measures are available for processing. In that case, metricd will not respect
+the `[metricd]metric_processing_delay` parameter and start processing the new
+measures right away. This behaviour can be disabled by turning off the
+`[metricd]greedy` option.
+
+How many metricd workers do I need to run
+-----------------------------------------
+
+By default, `gnocchi-metricd` daemon spans all your CPU power in order to
+maximize CPU utilisation when computing |metric| aggregation. You can use the
+`gnocchi status` command to query the HTTP API and get the cluster status for
+|metric| processing. It’ll show you the number of |metric| to process, known as
+the processing backlog for `gnocchi-metricd`. As long as this backlog is not
+continuously increasing, that means that `gnocchi-metricd` is able to cope with
+the amount of |metric| that are being sent. In case this number of |measures|
+to process is continuously increasing, you will need to (maybe temporarily)
+increase the number of `gnocchi-metricd` daemons. You can run any number of
+metricd daemon on any number of servers.
+
+How to scale measure processing
+-------------------------------
+
+Measurement data pushed to Gnocchi is divided into "sacks" for better
+distribution.  Incoming |metrics| are pushed to specific sacks and
+each sack is assigned to one or more `gnocchi-metricd` daemons for
+processing.
+
+The number of sacks should be set based on the number of active
+|metrics| the system will capture. Additionally, the number of sacks
+should be higher than the total number of active `gnocchi-metricd`
+workers.
+
+In general, use the following equation to determine the appropriate `sacks`
+value to set::
+
+   sacks value = number of **active** metrics / 300
+
+If the estimated number of |metrics| is the absolute maximum, divide
+the value by 500 instead. If the estimated number of active |metrics|
+is conservative and expected to grow, divide the value by 100 instead
+to accommodate growth.
+
+How do we change sack size
+--------------------------
+
+In the event your system grows to capture significantly more |metrics|
+than originally anticipated, the number of sacks can be changed to
+maintain good distribution. To avoid any loss of data when modifying
+the number of `sacks`, the value should be changed in the following
+order:
+
+1. Stop all input services (api, statsd).
+
+2. Stop all metricd services once backlog is cleared.
+
+3. Run ``gnocchi-change-sack-size <number of sacks>`` to set new sack
+   size. Note that the sack value can only be changed if the backlog
+   is empty.
+
+4. Restart all gnocchi services (api, statsd, metricd) with the new
+   configuration.
+
+Alternatively, to minimize API downtime:
+
+1. Run gnocchi-upgrade but use a new incoming storage target such as a new
+   ceph pool, file path, etc. Additionally, set |aggregate| storage to a
+   new target as well.
+
+2. Run ``gnocchi-change-sack-size <number of sacks>`` against the new
+   target.
+
+3. Stop all input services (api, statsd).
+
+4. Restart all input services but target the newly created incoming
+   storage.
+
+5. When done clearing backlog from original incoming storage, switch
+   all metricd daemons to target the new incoming storage but maintain
+   original |aggregate| storage.
+
+How to monitor Gnocchi
+======================
+
+The `/v1/status` endpoint of the HTTP API returns various information, such as
+the number of |measures| to process (|measures| backlog), which you can easily
+monitor (see `How many metricd workers do I need to run`_). The Gnocchi client
+can show this output by running `gnocchi status`.
+
+Making sure that the HTTP server and `gnocchi-metricd` daemon are running and
+are not writing anything alarming in their logs is a sign of good health of the
+overall system.
+
+Total |measures| for backlog status may not accurately reflect the number of
+points to be processed when |measures| are submitted via batch.
+
+How to backup and restore Gnocchi
+=================================
+
+In order to be able to recover from an unfortunate event, you need to backup
+both the index and the storage. That means creating a database dump (PostgreSQL
+or MySQL) and doing snapshots or copy of your data storage (Ceph, S3, Swift or
+your file system). The procedure to restore is no more complicated than initial
+deployment: restore your index and storage backups, reinstall Gnocchi if
+necessary, and restart it.
+
+How to clear Gnocchi data
+=========================
+
+If you ever want to start fresh or need to clean Gnocchi data, this can be
+easily done. You need to clean the measures (incoming), aggregates (storage)
+and indexer data storage.
+
+Once that is done, if you want to re-initialize Gnocchi, you need to call
+`gnocchi-upgrade` so it re-initialize the different drivers.
+
+Index storage
+-------------
+
+Both MySQL and PostgreSQL drivers uses a single database. Delete the database.
+If you want to install Gnocchi again, create back that database with the same
+name before calling `gnocchi-upgrade`.
+
+Incoming data
+-------------
+
+Depending on the driver you use, the data are stored in different places:
+
+* **Ceph**: delete the `gnocchi-config` object and the objects whose names
+  start with `incoming` in the Ceph pool. Alternatively you can delete the Ceph
+  pool (and recreate it if needed).
+* **OpenStack Swift**: delete the `gnocchi-config` container and containers
+  whose names start with `incoming` in the Swift account.
+* **Redis**: delete the `gnocchi-config` key and the keys whose names start
+  with `incoming`.
+* **File**: delete `${incoming.file_basepath}/tmp` and the directories whose
+  names start with `${incoming.file_basepath}/incoming`.
+* **Amazon S3**: delete the bucket whose name start with `incoming`.
+
+Storage data
+------------
+
+Depending on the driver you use, the data are stored in different places:
+
+* **Ceph**: delete the objects whose names start with `gnocchi_` in the Ceph
+  pool. Alternatively you can delete the Ceph pool (and recreate it if needed).
+* **OpenStack Swift**: delete the containers whose names start with
+  `$storage.swift_container_prefix` in the Swift account.
+* **Redis**: delete the keys whose names start with `timeseries`.
+* **File**: delete the directories whose names are UUIDs under
+  `$incoming.file_basepath`.
+* **Amazon S3**: delete the bucket whose name start with
+  `$storage.s3_bucket_prefix`.
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/prometheus.rst b/doc/source/prometheus.rst
new file mode 100644
index 0000000000000000000000000000000000000000..1c42949a9cc8b5f7c7409ce21b1b6b793b69443d
--- /dev/null
+++ b/doc/source/prometheus.rst
@@ -0,0 +1,29 @@
+====================
+ Prometheus support
+====================
+
+`Prometheus`_ can use Gnocchi to store its data through `Remote Write
+Adapter`_. Gnocchi needs to be installed with the `prometheus` flavor.
+
+Example of Prometheus configuration::
+
+  remote_write:
+  - url: "http://localhost:8041/v1/prometheus/write"
+    basic_auth:
+      username: "admin"
+      password: "whatever"
+
+
+The `/v1/prometheus/write` endpoint handles the `WriteRequest` protobuf
+message.
+
+Gnocchi maps Prometheus metrics to its data model.
+
+For each metric sent by Prometheus, Gnocchi maintains a corresponding resource
+based on each `job` and `instance` pair. This resource is created with the
+`prometheus` resource type and contains two attributes, `job` and `instance`.
+The metrics sent by Prometheus with this pair are attached to that resource and
+filled with the provided measures.
+
+.. _`Prometheus`: https://prometheus.io/
+.. _`Remote Write Adapter`: https://prometheus.io/docs/operating/configuration/#<remote_write>
diff --git a/doc/source/releasenotes/3.0.rst b/doc/source/releasenotes/3.0.rst
new file mode 100644
index 0000000000000000000000000000000000000000..4f664099adf7defd5d0e15dff7ab9cc3f2e26f19
--- /dev/null
+++ b/doc/source/releasenotes/3.0.rst
@@ -0,0 +1,6 @@
+===================================
+ 3.0 Series Release Notes
+===================================
+
+.. release-notes::
+   :branch: origin/stable/3.0
diff --git a/doc/source/releasenotes/3.1.rst b/doc/source/releasenotes/3.1.rst
new file mode 100644
index 0000000000000000000000000000000000000000..9673b4a818921784369c52a7079bbfe9ebbe7666
--- /dev/null
+++ b/doc/source/releasenotes/3.1.rst
@@ -0,0 +1,6 @@
+===================================
+ 3.1 Series Release Notes
+===================================
+
+.. release-notes::
+   :branch: origin/stable/3.1
diff --git a/doc/source/releasenotes/4.0.rst b/doc/source/releasenotes/4.0.rst
new file mode 100644
index 0000000000000000000000000000000000000000..8e290057954e87380c47d8456f0ac8f5e7523be2
--- /dev/null
+++ b/doc/source/releasenotes/4.0.rst
@@ -0,0 +1,6 @@
+===================================
+ 4.0 Series Release Notes
+===================================
+
+.. release-notes::
+   :branch: origin/stable/4.0
diff --git a/doc/source/releasenotes/4.1.rst b/doc/source/releasenotes/4.1.rst
new file mode 100644
index 0000000000000000000000000000000000000000..d33a607f10980779ac3200f649b828192fa43edd
--- /dev/null
+++ b/doc/source/releasenotes/4.1.rst
@@ -0,0 +1,6 @@
+===================================
+ 4.1 Series Release Notes
+===================================
+
+.. release-notes::
+   :branch: origin/stable/4.1
diff --git a/doc/source/releasenotes/4.2.rst b/doc/source/releasenotes/4.2.rst
new file mode 100644
index 0000000000000000000000000000000000000000..f93871731081a3934e19a544df0dcb9b4a9b5590
--- /dev/null
+++ b/doc/source/releasenotes/4.2.rst
@@ -0,0 +1,6 @@
+===================================
+ 4.2 Series Release Notes
+===================================
+
+.. release-notes::
+   :branch: origin/stable/4.2
diff --git a/doc/source/releasenotes/4.3.rst b/doc/source/releasenotes/4.3.rst
new file mode 100644
index 0000000000000000000000000000000000000000..c3da2577780493cb2dd2bb29d011d24d7f51499e
--- /dev/null
+++ b/doc/source/releasenotes/4.3.rst
@@ -0,0 +1,6 @@
+===================================
+ 4.3 Series Release Notes
+===================================
+
+.. release-notes::
+   :branch: origin/stable/4.3
diff --git a/doc/source/releasenotes/index.rst b/doc/source/releasenotes/index.rst
new file mode 100644
index 0000000000000000000000000000000000000000..dee60b313727eeac2a56f11875036011ada40410
--- /dev/null
+++ b/doc/source/releasenotes/index.rst
@@ -0,0 +1,13 @@
+Release Notes
+=============
+
+.. toctree::
+   :maxdepth: 2
+
+   unreleased
+   4.3
+   4.2
+   4.1
+   4.0
+   3.1
+   3.0
diff --git a/doc/source/releasenotes/unreleased.rst b/doc/source/releasenotes/unreleased.rst
new file mode 100644
index 0000000000000000000000000000000000000000..875030f9d0c2861c5d462afec734f496a485fc4f
--- /dev/null
+++ b/doc/source/releasenotes/unreleased.rst
@@ -0,0 +1,5 @@
+============================
+Current Series Release Notes
+============================
+
+.. release-notes::
diff --git a/doc/source/rest.j2 b/doc/source/rest.j2
new file mode 100644
index 0000000000000000000000000000000000000000..443c6ac3d53ffaf0acaed066ae761a25c596fb95
--- /dev/null
+++ b/doc/source/rest.j2
@@ -0,0 +1,1061 @@
+================
+ REST API Usage
+================
+
+Authentication
+==============
+
+By default, the authentication is configured to the `"basic" mode`_. You need
+to provide an `Authorization` header in your HTTP requests with a valid
+username(the password is not used). The "admin" username is granted all
+privileges, whereas any other username is recognize as having standard
+permissions.
+
+.. _"basic" mode: https://tools.ietf.org/html/rfc7617
+
+You can customize permissions by specifying a different `policy_file` than the
+default one.
+
+If you set the `api.auth_mode` value to `keystone`, the OpenStack Keystone
+middleware will be enabled for authentication. It is then needed to
+authenticate against Keystone and provide a `X-Auth-Token` header with a valid
+token for each request sent to Gnocchi's API.
+
+If you set the `api.auth_mode` value to `remoteuser`, Gnocchi will look at the
+HTTP server REMOTE_USER environment variable to get the username. Then the
+permissions model is the same as the "basic" mode.
+
+Metrics
+=======
+
+Gnocchi provides an object type that is called |metric|. A |metric|
+designates any thing that can be measured: the CPU usage of a server, the
+temperature of a room or the number of bytes sent by a network interface.
+
+A |metric| only has a few properties: a UUID to identify it, a name, the
+|archive policy| that will be used to store and aggregate the |measures|.
+
+Create
+------
+
+To create a |metric|, the following API request should be used:
+
+{{ scenarios['create-metric']['doc'] }}
+
+.. note::
+
+  Once the |metric| is created, the |archive policy| attribute is fixed and
+  unchangeable. The definition of the |archive policy| can be changed through
+  the :ref:`archive_policy endpoint<archive-policy-patch>` though.
+
+Read
+----
+
+Once created, you can retrieve the |metric| information:
+
+{{ scenarios['get-metric']['doc'] }}
+
+List
+----
+
+To retrieve the list of all the |metrics| created, use the following request:
+
+{{ scenarios['list-metric']['doc'] }}
+
+Pagination
+~~~~~~~~~~
+
+Considering the large volume of |metrics| Gnocchi will store, query results
+are limited to `max_limit` value set in the configuration file. Returned
+results are ordered by |metrics|' id values. To retrieve the next page of
+results, the id of a |metric| should be given as `marker` for the beginning
+of the next page of results.
+
+Default ordering and limits as well as page start can be modified
+using query parameters:
+
+{{ scenarios['list-metric-pagination']['doc'] }}
+
+Delete
+------
+
+Metrics can be deleted through a request:
+
+{{ scenarios['delete-metric']['doc'] }}
+
+See also :ref:`Resources <resources-endpoint>` for similar operations specific
+to metrics associated with a |resource|.
+
+Measures
+========
+
+Push
+----
+
+It is possible to send |measures| to the |metric|:
+
+{{ scenarios['post-measures']['doc'] }}
+
+If there are no errors, Gnocchi does not return a response body, only a simple
+status code. It is possible to provide any number of |measures|.
+
+.. IMPORTANT::
+
+   While it is possible to send any number of (timestamp, value), they still
+   need to honor constraints defined by the |archive policy| used by the
+   |metric|, such as the maximum |timespan|.
+
+Batch
+~~~~~
+
+It is also possible to batch |measures| sending, i.e. send several |measures|
+for different |metrics| in a simple call:
+
+{{ scenarios['post-measures-batch']['doc'] }}
+
+Or using named |metrics| of |resources|:
+
+{{ scenarios['post-measures-batch-named']['doc'] }}
+
+If some named |metrics| specified in the batch request do not exist, Gnocchi
+can try to create them as long as an |archive policy| rule matches:
+
+{{ scenarios['post-measures-batch-named-create']['doc'] }}
+
+Read
+----
+
+Once |measures| are sent, it is possible to retrieve |aggregates| using *GET*
+on the same endpoint:
+
+{{ scenarios['get-measures']['doc'] }}
+
+The list of points returned is composed of tuples with (timestamp,
+|granularity|, value) sorted by timestamp. The |granularity| is the |timespan|
+covered by aggregation for this point.
+
+Refresh
+~~~~~~~
+
+Depending on the driver, there may be some lag after pushing |measures| before
+they are processed and queryable. To ensure your query returns all |aggregates|
+that have been pushed and processed, you can force any unprocessed |measures|
+to be handled:
+
+{{ scenarios['get-measures-refresh']['doc'] }}
+
+.. note::
+
+   Depending on the amount of data that is unprocessed, `refresh` may add
+   some overhead to your query.
+
+Filter
+~~~~~~
+
+Time range
+``````````
+
+It is possible to filter the |aggregates| over a time range by specifying the
+*start* and/or *stop* parameters to the query with timestamp. The timestamp
+format can be either a floating number (UNIX epoch) or an ISO8601 formated
+timestamp:
+
+{{ scenarios['get-measures-from']['doc'] }}
+
+Aggregation
+```````````
+
+By default, the aggregated values that are returned use the *mean*
+|aggregation method|. It is possible to request for any other method defined
+by the policy by specifying the *aggregation* query parameter:
+
+{{ scenarios['get-measures-max']['doc'] }}
+
+Granularity
+```````````
+
+It's possible to provide the |granularity| argument to specify the
+|granularity| to retrieve, rather than all the |granularities| available:
+
+{{ scenarios['get-measures-granularity']['doc'] }}
+
+Resample
+~~~~~~~~
+
+In addition to |granularities| defined by the |archive policy|, |aggregates|
+can be resampled to a new |granularity|.
+
+{{ scenarios['get-measures-resample']['doc'] }}
+
+Time-series data can also be grouped by calendar dates beyond a standard day.
+The resulting groupings are tied to the leading date of the group. For example,
+grouping on month returns a monthly aggregate linked to the first of the month.
+
+{{ scenarios['get-measures-resample-calendar']['doc'] }}
+
+Available calendar groups are:
+
+* `Y` – by year
+* `H` – by half
+* `Q` – by quarter
+* `M` – by month
+* `W` – by week, starting on Sunday
+
+.. note::
+
+   If you plan to execute the query often, it is recommended for performance
+   to leverage an |archive policy| with the needed |granularity| instead of
+   resampling the time series on each query.
+
+.. note::
+
+   Depending on the |aggregation method| and frequency of |measures|, resampled
+   data may lack accuracy as it is working against previously aggregated data.
+
+.. note::
+
+   Gnocchi has an :ref:`aggregates <aggregates>` endpoint which provides
+   resampling as well as additional capabilities.
+
+
+Archive Policy
+==============
+
+When sending |measures| for a |metric| to Gnocchi, the values are dynamically
+aggregated. That means that Gnocchi does not store all sent |measures|, but
+aggregates them over a certain period of time.
+
+Gnocchi provides several |aggregation methods| that are builtin. The list of
+|aggregation method| available is: *mean*, *sum*, *last*, *max*, *min*, *std*,
+*median*, *first*, *count* and *Npct* (with 0 < N < 100). Those can be prefix
+by `rate:` to compute the rate of change before doing the aggregation.
+
+An |archive policy| is defined by a list of items in the `definition` field.
+Each item is composed of: the |timespan|; the |granularity|, which is the level
+of precision that must be kept when aggregating data; and the number of points.
+The |archive policy| is determined using at least 2 of the points,
+|granularity| and |timespan| fields. For example, an item might be defined
+as 12 points over 1 hour (one point every 5 minutes), or 1 point every 1 hour
+over 1 day (24 points).
+
+By default, new |measures| can only be processed if they have timestamps in the
+future or part of the last aggregation period. The last aggregation period size
+is based on the largest |granularity| defined in the |archive policy|
+definition. To allow processing |measures| that are older than the period, the
+`back_window` parameter can be used to set the number of coarsest periods to
+keep. That way it is possible to process |measures| that are older than the
+last timestamp period boundary.
+
+For example, if an |archive policy| is defined with coarsest aggregation of 1
+hour, and the last point processed has a timestamp of 14:34, it's possible to
+process |measures| back to 14:00 with a `back_window` of 0. If the
+`back_window` is set to 2, it will be possible to send |measures| with
+timestamp back to 12:00 (14:00 minus 2 times 1 hour).
+
+Create
+------
+
+The REST API allows to create |archive policies| in this way:
+
+{{ scenarios['create-archive-policy']['doc'] }}
+
+By default, the |aggregation methods| computed and stored are the ones defined
+with `default_aggregation_methods` in the configuration file. It is possible to
+change the |aggregation methods| used in an |archive policy| by specifying the
+list of |aggregation method| to use in the `aggregation_methods` attribute of
+an |archive policy|.
+
+{{ scenarios['create-archive-policy-without-max']['doc'] }}
+
+The list of |aggregation methods| can either be:
+
+- a list of |aggregation methods| to use, e.g. `["mean", "max"]`
+
+- a list of methods to remove (prefixed by `-`) and/or to add (prefixed by `+`)
+  to the default list (e.g. `["+mean", "-last"]`)
+
+If `*` is included in the list, it's substituted by the list of all supported
+|aggregation methods|.
+
+Read
+----
+
+Once the |archive policy| is created, the complete set of properties is
+computed and returned, with the URL of the |archive policy|. This URL can be
+used to retrieve the details of the |archive policy| later:
+
+{{ scenarios['get-archive-policy']['doc'] }}
+
+List
+----
+
+It is also possible to list |archive policies|:
+
+{{ scenarios['list-archive-policy']['doc'] }}
+
+.. _archive-policy-patch:
+
+Update
+------
+
+Existing |archive policies| can be modified to retain more or less data
+depending on requirements. If the policy coverage is expanded, |aggregates| are
+not retroactively calculated as backfill to accommodate the new |timespan|:
+
+{{ scenarios['update-archive-policy']['doc'] }}
+
+.. note::
+
+   |Granularities| cannot be changed to a different rate. Also, |granularities|
+   cannot be added or dropped from a policy.
+
+Delete
+------
+
+It is possible to delete an |archive policy| if it is not used by any |metric|:
+
+{{ scenarios['delete-archive-policy']['doc'] }}
+
+.. note::
+
+   An |archive policy| cannot be deleted until all |metrics| associated with it
+   are removed by a metricd daemon.
+
+
+Archive Policy Rule
+===================
+
+Gnocchi provides the ability to define a mapping called `archive_policy_rule`.
+An |archive policy| rule defines a mapping between a |metric| and an
+|archive policy|. This gives users the ability to pre-define rules so an
+|archive policy| is assigned to |metrics| based on a matched pattern.
+
+An |archive policy| rule has a few properties: a name to identify it, an
+|archive policy| name that will be used to store the policy name and |metric|
+pattern to match |metric| names.
+
+An |archive policy| rule for example could be a mapping to default a medium
+|archive policy| for any volume |metric| with a pattern matching `volume.*`.
+When a sample |metric| is posted with a name of `volume.size`, that would match
+the pattern and the rule applies and sets the |archive policy| to medium. If
+multiple rules match, the longest matching rule is taken. For example, if two
+rules exists which match `*` and `disk.*`, a `disk.io.rate` |metric| would
+match the `disk.*` rule rather than `*` rule.
+
+Create
+------
+
+To create a rule, the following API request should be used:
+
+{{ scenarios['create-archive-policy-rule']['doc'] }}
+
+The `metric_pattern` is used to pattern match so as some examples,
+
+- `*` matches anything
+- `disk.*` matches disk.io
+- `disk.io.*` matches disk.io.rate
+
+Read
+----
+
+Once created, you can retrieve the rule information:
+
+{{ scenarios['get-archive-policy-rule']['doc'] }}
+
+List
+----
+
+It is also possible to list |archive policy| rules. The result set is ordered
+by the `metric_pattern`, in reverse alphabetical order:
+
+{{ scenarios['list-archive-policy-rule']['doc'] }}
+
+Update
+------
+
+It is possible to rename an archive policy rule:
+
+{{ scenarios['rename-archive-policy-rule']['doc'] }}
+
+Delete
+------
+
+It is possible to delete an |archive policy| rule:
+
+{{ scenarios['delete-archive-policy-rule']['doc'] }}
+
+.. _resources-endpoint:
+
+Resources
+=========
+
+Gnocchi provides the ability to store and index |resources|. Each |resource|
+has a type. The basic type of |resources| is *generic*, but more specialized
+subtypes also exist, especially to describe OpenStack resources.
+
+Create
+------
+
+To create a generic |resource|:
+
+{{ scenarios['create-resource-generic']['doc'] }}
+
+The *user_id* and *project_id* attributes may be any arbitrary string. The
+:ref:`timestamps<timestamp-format>` describing the lifespan of the
+|resource| are optional, and *started_at* is by default set to the current
+timestamp.
+
+The *id* attribute may be a UUID or some other arbitrary string.  If it is
+a UUID, Gnocchi will use it verbatim. If it is not a UUID, the original
+value will be stored in the *original_resource_id* attribute and Gnocchi
+will generate a new UUID that is unique for the user.  That is, if two
+users submit create requests with the same non-UUID *id* attribute, the
+resulting resources will have different UUID values in their respective
+*id* attributes.
+
+You may use either of the *id* or the *original_resource_id* attributes to
+refer to the |resource|.  The value returned by the create operation
+includes a `Location` header referencing the *id*.
+
+Non-generic resources
+~~~~~~~~~~~~~~~~~~~~~
+
+More specialized |resources| can be created. For example, the *instance* is
+used to describe an OpenStack instance as managed by Nova_.
+
+{{ scenarios['create-resource-instance']['doc'] }}
+
+All specialized types have their own optional and mandatory attributes,
+but they all include attributes from the generic type as well.
+
+.. _Nova: http://launchpad.net/nova
+
+With metrics
+~~~~~~~~~~~~
+
+Each |resource| can be linked to any number of |metrics| on creation:
+
+{{ scenarios['create-resource-instance-with-metrics']['doc'] }}
+
+It is also possible to create |metrics| at the same time you create a |resource|
+to save some requests:
+
+{{ scenarios['create-resource-instance-with-dynamic-metrics']['doc'] }}
+
+Read
+----
+
+To retrieve a |resource| by its URL provided by the `Location` header at
+creation time:
+
+{{ scenarios['get-resource-generic']['doc'] }}
+
+List
+----
+
+All |resources| can be listed, either by using the `generic` type that will
+list all types of |resources|, or by filtering on their |resource| type:
+
+{{ scenarios['list-resource-generic']['doc'] }}
+
+Specific resource type
+~~~~~~~~~~~~~~~~~~~~~~
+
+No attributes specific to the |resource| type are retrieved when using the
+`generic` endpoint. To retrieve the details, either list using the specific
+|resource| type endpoint:
+
+{{ scenarios['list-resource-instance']['doc'] }}
+
+With details
+~~~~~~~~~~~~
+
+To retrieve a more detailed view of the resources, use `details=true` in the
+query parameter:
+
+{{ scenarios['list-resource-generic-details']['doc'] }}
+
+Limit attributes
+~~~~~~~~~~~~~~~~
+
+To limit response attributes, use `attrs=id&attrs=started_at&attrs=user_id` in the query
+parameter:
+
+{{ scenarios['list-resource-generic-limit-attrs']['doc'] }}
+
+Pagination
+~~~~~~~~~~
+
+Similar to |metric| list, query results are limited to `max_limit` value set
+in the configuration file. Returned results represent a single page of data and
+are ordered by resouces' revision_start time and started_at values:
+
+{{ scenarios['list-resource-generic-pagination']['doc'] }}
+
+List resource metrics
+---------------------
+
+The |metrics| associated with a |resource| can be accessed and manipulated
+using the usual `/v1/metric` endpoint or using the named relationship with the
+|resource|:
+
+{{ scenarios['get-resource-named-metrics-measures']['doc'] }}
+
+Update
+------
+
+It's possible to modify a |resource| by re-uploading it partially with the
+modified fields:
+
+{{ scenarios['patch-resource']['doc'] }}
+
+It is also possible to associate additional |metrics| with a |resource|:
+
+{{ scenarios['append-metrics-to-resource']['doc'] }}
+
+History
+-------
+
+And to retrieve a |resource|'s modification history:
+
+{{ scenarios['get-patched-instance-history']['doc'] }}
+
+Delete
+------
+
+It is possible to delete a |resource| altogether:
+
+{{ scenarios['delete-resource-generic']['doc'] }}
+
+It is also possible to delete a batch of |resources| based on attribute values,
+and returns a number of deleted |resources|.
+
+Batch
+~~~~~
+
+To delete |resources| based on ids:
+
+{{ scenarios['delete-resources-by-ids']['doc'] }}
+
+or delete |resources| based on time:
+
+{{ scenarios['delete-resources-by-time']['doc']}}
+
+.. IMPORTANT::
+
+  When a |resource| is deleted, all its associated |metrics| are deleted at the
+  same time.
+
+  When a batch of |resources| are deleted, an attribute filter is required to
+  avoid deletion of the entire database.
+
+
+Resource Types
+==============
+
+Gnocchi is able to manage |resource| types with custom attributes.
+
+Create
+------
+
+To create a new |resource| type:
+
+{{ scenarios['create-resource-type']['doc'] }}
+
+Read
+----
+
+Then to retrieve its description:
+
+{{ scenarios['get-resource-type']['doc'] }}
+
+List
+----
+
+All |resource| types can be listed like this:
+
+{{ scenarios['list-resource-type']['doc'] }}
+
+Update
+------
+
+Attributes can be added or removed:
+
+{{ scenarios['patch-resource-type']['doc'] }}
+
+Delete
+------
+
+It can also be deleted if no more |resources| are associated to it:
+
+{{ scenarios['delete-resource-type']['doc'] }}
+
+.. note::
+
+   Creating |resource| type means creation of new tables on the indexer
+   backend. This is heavy operation that will lock some tables for a short
+   amount of time. When the |resource| type is created, its initial `state` is
+   `creating`. When the new tables have been created, the state switches to
+   `active` and the new |resource| type is ready to be used. If something
+   unexpected occurs during this step, the state switches to `creation_error`.
+
+   The same behavior occurs when the |resource| type is deleted. The state
+   starts to switch to `deleting`, the |resource| type is no longer usable.
+   Then the tables are removed and then finally the resource_type is really
+   deleted from the database. If some unexpected error occurs the state
+   switches to `deletion_error`.
+
+
+
+Search
+======
+
+Gnocchi's search API supports to the ability to execute a query across
+|resources| or |metrics|. This API provides a language to construct more
+complex matching contraints beyond basic filtering.
+
+Usage and format
+----------------
+
+You can specify a time range to look for by specifying the `start` and/or
+`stop` query parameter, and the |aggregation method| to use by specifying the
+`aggregation` query parameter.
+
+Query can be expressed in two formats: `JSON` or `STRING`.
+
+The supported operators are: equal to (`=`, `==` or `eq`), lesser than (`<` or
+`lt`), greater than (`>` or `gt`), less than or equal to (`<=`, `le` or `≤`)
+greater than or equal to (`>=`, `ge` or `≥`) not equal to (`!=`, `ne` or `≠`),
+addition (`+` or `add`), substraction (`-` or `sub`), multiplication (`*`,
+`mul` or `×`), division (`/`, `div` or `÷`). In JSON format, these operations
+take only one argument, the second argument being automatically set to the
+field value. In STRING format, this is just `<first argument> <operator>
+<second argument>`
+
+The operators or (`or` or `∨`), and (`and` or `∧`) and `not` are also
+supported. In JSON format, it takes a list of arguments as parameters. Using
+STRING, the format is `<left group> and/or <right group>` or `not <right
+group>`. With the STRING format, parenthesis can be used to create group.
+
+An example of the JSON format::
+
+    ["and",
+      ["=", ["host", "example1"]],
+      ["like", ["owner", "admin-%"]],
+    ]
+
+And its STRING format equivalent::
+
+    host = "example1" or owner like "admin-%"
+
+.. _search-resource:
+
+Resource
+--------
+
+It's possible to search for |resources| using a query mechanism by using the
+`POST` method and uploading a JSON formatted query or by passing a
+STRING a formatted query URL-encoded in the ``filter`` parameter.
+
+Single filter
+~~~~~~~~~~~~~
+
+When listing |resources|, it is possible to filter |resources| based on
+attributes values:
+
+{{ scenarios['search-resource-for-user']['doc'] }}
+
+Or even:
+
+{{ scenarios['search-resource-for-host-like']['doc'] }}
+
+For the ``filter`` parameter version, the value is the URL-encoded version of
+``{{ scenarios['search-resource-for-host-like-filter']['filter'] }}``
+
+{{ scenarios['search-resource-for-host-like-filter']['doc'] }}
+
+Multiple filters
+~~~~~~~~~~~~~~~~
+
+Complex operators such as `and` and `or` are also available:
+
+{{ scenarios['search-resource-for-user-after-timestamp']['doc'] }}
+
+``filter`` version is
+``{{ scenarios['search-resource-for-user-after-timestamp-filter']['filter'] }}``
+URL-encoded.
+
+{{ scenarios['search-resource-for-user-after-timestamp-filter']['doc'] }}
+
+With details
+~~~~~~~~~~~~
+
+Details about the |resource| can also be retrieved at the same time:
+
+{{ scenarios['search-resource-for-user-details']['doc'] }}
+
+Limit attributes
+~~~~~~~~~~~~~~~~
+
+To limit response attributes, use `attrs=id&attrs=started_at&attrs=user_id` in the query
+parameter:
+
+{{ scenarios['search-resource-for-user-limit-attrs']['doc'] }}
+
+History
+~~~~~~~
+
+It's possible to search for old revisions of |resources| in the same ways:
+
+{{ scenarios['search-resource-history']['doc'] }}
+
+Time range
+``````````
+
+The timerange of the history can be set, too:
+
+{{ scenarios['search-resource-history-partial']['doc'] }}
+
+This can be done with the ``filter`` parameter too:
+
+``{{ scenarios['search-resource-history-partial-filter']['filter'] }}``
+
+
+{{ scenarios['search-resource-history-partial-filter']['doc'] }}
+
+
+Magic
+~~~~~
+
+The special attribute `lifespan` which is equivalent to `ended_at - started_at`
+is also available in the filtering queries.
+
+{{ scenarios['search-resource-lifespan']['doc'] }}
+
+Metric
+------
+
+It is possible to search for values in |metrics|. For example, this will look
+for all values that are greater than or equal to 50 if we add 23 to them and
+that are not equal to 55. You have to specify the list of |metrics| to look
+into by using the `metric_id` query parameter several times.
+
+{{ scenarios['search-value-in-metric']['doc'] }}
+
+And it is possible to search for values in |metrics| by using one or more
+|granularities|:
+
+{{ scenarios['search-value-in-metrics-by-granularity']['doc'] }}
+
+
+.. _aggregates:
+
+Dynamic Aggregates
+==================
+
+Gnocchi supports the ability to make on-the-fly reaggregations of existing
+|metrics| and the ability to manipulate and transform |metrics| as required.
+This is accomplished by passing an `operations` value describing the actions
+to apply to the |metrics|.
+
+.. note::
+
+   `operations` can also be passed as a string, for example:
+   `"operations": "(aggregate mean (metric (metric-id aggregation) (metric-id aggregation))"`
+
+Cross-metric Usage
+------------------
+
+Aggregation across multiple |metrics| have different behavior depending
+on whether boundary values are set (`start` and `stop`) and if `needed_overlap`
+is set.
+
+Overlap percentage
+~~~~~~~~~~~~~~~~~~
+
+Gnocchi expects that time series have a certain percentage of timestamps in
+common. This percent is controlled by the `needed_overlap` needed_overlap,
+which by default expects 100% overlap. If this percentage is not reached, an
+error is returned.
+
+.. note::
+
+   If `start` or `stop` boundary is not set, Gnocchi will set the missing
+   boundary to the first or last timestamp common across all series.
+
+Backfill
+~~~~~~~~
+
+The ability to fill in missing points from a subset of time series is supported
+by specifying a `fill` value. Valid fill values include any float, `dropna` or
+`null`. In the case of `null`, Gnocchi will compute the aggregation using only
+the existing points. `dropna` is like `null` but remove NaN from the result.
+The `fill` parameter will not backfill timestamps which contain no points in
+any of the time series. Only timestamps which have datapoints in at least one
+of the time series is returned.
+
+{{ scenarios['get-aggregates-by-metric-ids-fill']['doc'] }}
+
+
+Search and aggregate
+--------------------
+
+It's also possible to do that aggregation on |metrics| linked to |resources|.
+In order to select these |resources|, the following endpoint accepts a query
+such as the one described in the :ref:`resource search API <search-resource>`.
+
+{{ scenarios['get-aggregates-by-attributes-lookup']['doc'] }}
+
+And metric name can be `wildcard` too.
+
+{{ scenarios['get-aggregates-by-attributes-lookup-wildcard']['doc'] }}
+
+Groupby
+~~~~~~~
+
+It is possible to group the |resource| search results by any attribute of the
+requested |resource| type, and then compute the aggregation:
+
+{{ scenarios['get-aggregates-by-attributes-lookup-groupby']['doc'] }}
+
+List of supported <operations>
+------------------------------
+
+Get one or more metrics
+~~~~~~~~~~~~~~~~~~~~~~~
+
+::
+
+   (metric <metric-id> <aggregation>)
+   (metric ((<metric-id> <aggregation>), (<metric-id> <aggregation>), ...))
+
+   metric-id: the id of a metric to retrieve
+   aggregation: the aggregation method to retrieve
+
+.. note::
+
+   When used alone, this provides the ability to retrieve multiple |metrics| in a
+   single request.
+
+Rolling window aggregation
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+::
+
+   (rolling <aggregation method> <rolling window> (<operations>))
+
+   aggregation method: the aggregation method to use to compute the rolling window.
+                       (mean, median, std, min, max, sum, var, count)
+   rolling window: number of previous values to aggregate
+
+Aggregation across metrics
+~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+::
+
+   aggregate <aggregation method> ((<operations>), (<operations>), ...))
+
+   aggregation method: the aggregation method to use to compute the aggregate between metrics
+                       (mean, median, std, min, max, sum, var, count)
+
+Resample
+~~~~~~~~
+
+::
+
+   (resample <aggregation method> <granularity> (<operations>))
+
+   aggregation method: the aggregation method to use to compute the aggregate between metrics
+                       (mean, median, std, min, max, sum, var, count)
+
+   granularity: the granularity (e.g.: 1d, 60s, ...)
+
+.. note::
+
+   If you plan to execute the query often, it is recommended for performance
+   to leverage an |archive policy| with the needed |granularity| instead of
+   resampling the time series on each query.
+
+
+Math operations
+~~~~~~~~~~~~~~~
+
+::
+
+   (<operator> <operations_or_float> <operations_or_float>)
+
+   operator: %, mod, +, add, -, sub, *, ×, mul, /, ÷, div, **, ^, pow
+
+Boolean operations
+~~~~~~~~~~~~~~~~~~
+
+::
+
+   (<operator> <operations_or_float> <operations_or_float>)
+
+   operator: =, ==, eq, <, lt, >, gt, <=, ≤, le, =, ≥, ge, !=, ≠, ne
+
+Function operations
+~~~~~~~~~~~~~~~~~~~
+
+::
+
+   (abs (<operations>))
+   (absolute (<operations>))
+   (neg (<operations>))
+   (negative (<operations>))
+   (cos (<operations>))
+   (sin (<operations>))
+   (tan (<operations>))
+   (floor (<operations>))
+   (ceil (<operations>))
+   (clip (<operations>))
+   (clip_min (<operations>))
+   (clip_max (<operations>))
+   (rateofchange (<operations>))
+
+
+
+Examples
+--------
+
+Aggregate then math
+~~~~~~~~~~~~~~~~~~~
+
+The following computes the mean aggregates with `all` metrics listed in
+`metrics` and then multiples it by `4`.
+
+{{ scenarios['get-aggregates-by-metric-ids']['doc'] }}
+
+Between metrics
+~~~~~~~~~~~~~~~
+
+Operations between metrics can also be done, such as:
+
+{{ scenarios['get-aggregates-between-metrics']['doc'] }}
+
+List the top N resources that consume the most CPU during the last hour
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+The following is configured so that `stop` - `start` = `granularity` will get
+only one point per instance.
+
+This will give all information needed to order by `cpu.util` timeseries which
+can be filtered down to N results.
+
+
+{{ scenarios['use-case1-top-cpuutil-per-instances']['doc'] }}
+
+Aggregation across metrics (deprecated)
+=======================================
+
+.. Note::
+
+   This API have been replaced by the more flexible :ref:`aggregates API <aggregates>`
+
+
+Gnocchi supports on-the-fly aggregation of previously aggregated data of
+|metrics|.
+
+It can be done by providing the list of |metrics| to aggregate:
+
+{{ scenarios['get-across-metrics-measures-by-metric-ids']['doc'] }}
+
+.. Note::
+
+   This aggregation is done against the |aggregates| built and updated for
+   a |metric| when new measurements are posted in Gnocchi. Therefore, the
+   aggregate of this already aggregated data may not have sense for certain
+   kind of |aggregation method| (e.g. stdev).
+
+By default, the |measures| are aggregated using the |aggregation method|
+provided, e.g. you'll get a mean of means, or a max of maxs. You can specify
+what method to use over the retrieved aggregation by using the `reaggregation`
+parameter:
+
+{{ scenarios['get-across-metrics-measures-by-metric-ids-reaggregate']['doc'] }}
+
+It's also possible to do that aggregation on |metrics| linked to |resources|.
+In order to select these |resources|, the following endpoint accepts a query
+such as the one described in the :ref:`resource search API <search-resource>`.
+
+{{ scenarios['get-across-metrics-measures-by-attributes-lookup']['doc'] }}
+
+Like for searching resource, the query
+``{{ scenarios['get-across-metrics-measures-by-attributes-lookup-filter']['filter'] }}``
+can be passed in ``filter`` parameter
+
+{{ scenarios['get-across-metrics-measures-by-attributes-lookup-filter']['doc'] }}
+
+It is possible to group the |resource| search results by any attribute of the
+requested |resource| type, and then compute the aggregation:
+
+{{ scenarios['get-across-metrics-measures-by-attributes-lookup-groupby']['doc'] }}
+
+Similar to retrieving |aggregates| for a single |metric|, the `refresh`
+parameter can be provided to force all POSTed |measures| to be processed across
+all |metrics| before computing the result. The `resample` parameter may be used
+as well.
+
+.. note::
+
+   Tranformations (eg: resample, absolute, ...) are done prior to any
+   reaggregation if both parameters are specified.
+
+Also, aggregation across |metrics| have different behavior depending
+on whether boundary values are set ('start' and 'stop') and if 'needed_overlap'
+is set.
+
+Gnocchi expects that we have a certain percent of timestamps common between
+time series. This percent is controlled by needed_overlap, which by default
+expects 100% overlap. If this percentage is not reached, an error is returned.
+
+.. note::
+
+   If `start` or `stop` boundary is not set, Gnocchi will set the missing
+   boundary to the first or last timestamp common across all series.
+
+The ability to fill in missing points from a subset of time series is supported
+by specifying a `fill` value. Valid fill values include any float, `dropna` or
+`null`. In the case of `null`, Gnocchi will compute the aggregation using only
+the existing points. `dropna` is like `null` but remove NaN from the result.
+The `fill` parameter will not backfill timestamps which contain no points in
+any of the time series. Only timestamps which have datapoints in at least one
+of the time series is returned.
+
+{{ scenarios['get-across-metrics-measures-by-metric-ids-fill']['doc'] }}
+
+
+Capabilities
+============
+
+The list |aggregation methods| that can be used in Gnocchi are extendable and
+can differ between deployments. It is possible to get the supported list of
+|aggregation methods| from the API server:
+
+{{ scenarios['get-capabilities']['doc'] }}
+
+Status
+======
+
+The overall status of the Gnocchi installation can be retrieved via an API call
+reporting values such as the number of new |measures| to process for each
+|metric|:
+
+{{ scenarios['get-status']['doc'] }}
+
+.. _timestamp-format:
+
+Timestamp format
+================
+
+Timestamps used in Gnocchi are always returned using the ISO 8601 format.
+Gnocchi is able to understand a few formats of timestamp when querying or
+creating |resources|, for example
+
+- "2014-01-01 12:12:34" or "2014-05-20T10:00:45.856219", ISO 8601 timestamps.
+- "10 minutes", which means "10 minutes from now".
+- "-2 days", which means "2 days ago".
+- 1421767030, a Unix epoch based timestamp.
+
+.. include:: include/term-substitution.rst
diff --git a/doc/source/rest.yaml b/doc/source/rest.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..705dca526546d13267ecbfdfda914b56be77bafa
--- /dev/null
+++ b/doc/source/rest.yaml
@@ -0,0 +1,911 @@
+- name: create-archive-policy
+  request: |
+    POST /v1/archive_policy HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "short",
+      "back_window": 0,
+      "definition": [
+        {
+          "granularity": "1h",
+          "timespan": "7 day"
+        },
+        {
+          "granularity": "1s",
+          "timespan": "1 hour"
+        },
+        {
+          "points": 48,
+          "timespan": "1 day"
+        }
+      ]
+    }
+
+- name: create-archive-policy-without-max
+  request: |
+    POST /v1/archive_policy HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "short-without-max",
+      "aggregation_methods": ["-max", "-min"],
+      "back_window": 0,
+      "definition": [
+        {
+          "granularity": "1s",
+          "timespan": "1 hour"
+        },
+        {
+          "points": 48,
+          "timespan": "1 day"
+        }
+      ]
+    }
+
+- name: get-archive-policy
+  request: GET /v1/archive_policy/{{ scenarios['create-archive-policy']['response'].json['name'] }} HTTP/1.1
+
+- name: list-archive-policy
+  request: GET /v1/archive_policy HTTP/1.1
+
+- name: update-archive-policy
+  request: |
+    PATCH /v1/archive_policy/{{ scenarios['create-archive-policy']['response'].json['name'] }} HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "definition": [
+        {
+          "granularity": "1h",
+          "timespan": "7 day"
+        },
+        {
+          "granularity": "1s",
+          "timespan": "1 hour"
+        },
+        {
+          "points": 48,
+          "timespan": "1 day"
+        }
+      ]
+    }
+
+- name: create-archive-policy-to-delete
+  request: |
+    POST /v1/archive_policy HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "some-archive-policy",
+      "back_window": 0,
+      "definition": [
+        {
+          "granularity": "1s",
+          "timespan": "1 hour"
+        },
+        {
+          "points": 48,
+          "timespan": "1 day"
+        }
+      ]
+    }
+
+- name: delete-archive-policy
+  request: DELETE /v1/archive_policy/{{ scenarios['create-archive-policy-to-delete']['response'].json['name'] }} HTTP/1.1
+
+- name: create-metric
+  request: |
+    POST /v1/metric HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "archive_policy_name": "high"
+    }
+
+- name: create-metric-2
+  request: |
+    POST /v1/metric HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "archive_policy_name": "low"
+    }
+
+- name: create-metric-3
+  request: |
+    POST /v1/metric HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "archive_policy_name": "medium"
+    }
+
+- name: delete-metric
+  request: DELETE /v1/metric/{{ scenarios['create-metric-3']['response'].json['id'] }} HTTP/1.1
+
+- name: create-archive-policy-rule
+  request: |
+    POST /v1/archive_policy_rule HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "test_rule",
+      "metric_pattern": "disk.io.*",
+      "archive_policy_name": "low"
+    }
+
+- name: get-archive-policy-rule
+  request: GET /v1/archive_policy_rule/{{ scenarios['create-archive-policy-rule']['response'].json['name'] }} HTTP/1.1
+
+- name: list-archive-policy-rule
+  request: GET /v1/archive_policy_rule HTTP/1.1
+
+- name: create-archive-policy-rule-to-delete
+  request: |
+    POST /v1/archive_policy_rule HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "test_rule_delete",
+      "metric_pattern": "disk.io.*",
+      "archive_policy_name": "low"
+    }
+
+- name: delete-archive-policy-rule
+  request: DELETE /v1/archive_policy_rule/{{ scenarios['create-archive-policy-rule-to-delete']['response'].json['name'] }} HTTP/1.1
+
+- name: create-archive-policy-rule-to-rename
+  request: |
+    POST /v1/archive_policy_rule HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "test_rule_rename",
+      "metric_pattern": "disk.io.*",
+      "archive_policy_name": "low"
+    }
+
+- name: rename-archive-policy-rule
+  request: |
+    PATCH /v1/archive_policy_rule/{{ scenarios['create-archive-policy-rule-to-rename']['response'].json['name'] }} HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "name": "new_name"
+    }
+
+- name: get-metric
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }} HTTP/1.1
+
+- name: list-metric
+  request: GET /v1/metric HTTP/1.1
+
+- name: list-metric-pagination
+  request: GET /v1/metric?limit=100&sort=name:asc HTTP/1.1
+
+- name: post-measures
+  request: |
+    POST /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures HTTP/1.1
+    Content-Type: application/json
+
+    [
+      {
+        "timestamp": "2014-10-06T14:33:57",
+        "value": 43.1
+      },
+      {
+        "timestamp": "2014-10-06T14:34:12",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:20",
+        "value": 2
+      }
+    ]
+
+- name: post-measures-batch
+  request: |
+    POST /v1/batch/metrics/measures HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "{{ scenarios['create-metric']['response'].json['id'] }}":
+      [
+        {
+          "timestamp": "2014-10-06T14:34:12",
+          "value": 12
+        },
+        {
+          "timestamp": "2014-10-06T14:34:20",
+          "value": 2
+        }
+      ],
+      "{{ scenarios['create-metric-2']['response'].json['id'] }}":
+      [
+        {
+          "timestamp": "2014-10-06T16:12:12",
+          "value": 3
+        },
+        {
+          "timestamp": "2014-10-06T18:14:52",
+          "value": 4
+        }
+      ]
+    }
+
+- name: search-value-in-metric
+  request: |
+    POST /v1/search/metric?metric_id={{ scenarios['create-metric']['response'].json['id'] }} HTTP/1.1
+    Content-Type: application/json
+
+    {"and": [{">=": [{"+": 23}, 50]}, {"!=": 55}]}
+
+- name: create-metric-a
+  request: |
+    POST /v1/metric HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "archive_policy_name": "short"
+    }
+
+- name: post-measures-for-granularity-search
+  request: |
+    POST /v1/metric/{{ scenarios['create-metric-a']['response'].json['id'] }}/measures HTTP/1.1
+    Content-Type: application/json
+
+    [
+      {
+        "timestamp": "2014-10-06T14:34:12",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:14",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:16",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:18",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:20",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:22",
+        "value": 12
+      },
+      {
+        "timestamp": "2014-10-06T14:34:24",
+        "value": 12
+      }
+    ]
+
+- name: search-value-in-metrics-by-granularity
+  request: |
+    POST /v1/search/metric?metric_id={{ scenarios['create-metric-a']['response'].json['id'] }}&granularity=1second&granularity=1800s HTTP/1.1
+    Content-Type: application/json
+
+    {"=": 12}
+
+- name: get-measures
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures HTTP/1.1
+
+- name: get-measures-from
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures?start=2014-10-06T14:34 HTTP/1.1
+
+- name: get-measures-max
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures?aggregation=max HTTP/1.1
+
+- name: get-measures-granularity
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures?granularity=1 HTTP/1.1
+
+- name: get-measures-refresh
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures?refresh=true HTTP/1.1
+
+- name: get-measures-resample
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures?granularity=1&resample=5 HTTP/1.1
+
+- name: get-measures-resample-calendar
+  request: GET /v1/metric/{{ scenarios['create-metric']['response'].json['id'] }}/measures?granularity=1&resample=W HTTP/1.1
+
+- name: create-resource-generic
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "75C44741-CC60-4033-804E-2D3098C7D2E9",
+      "user_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "project_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D"
+    }
+
+- name: create-resource-type-instance
+  request: |
+    POST /v1/resource_type HTTP/1.1
+    Content-Type: application/json
+
+    {
+        "name": "instance",
+        "attributes": {
+            "display_name": {"type": "string", "required": true},
+            "flavor_id": {"type": "string", "required": true},
+            "image_ref": {"type": "string", "required": true},
+            "host": {"type": "string", "required": true},
+            "server_group": {"type": "string", "required": false},
+            "launched_at": {"type": "datetime", "required": false}
+        }
+    }
+
+- name: create-resource-instance
+  request: |
+    POST /v1/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "6868DA77-FA82-4E67-ABA9-270C5AE8CBCA",
+      "user_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "project_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "started_at": "2014-01-02 23:23:34",
+      "ended_at": "2014-01-04 10:00:12",
+      "flavor_id": "2",
+      "launched_at": "2017-12-10T08:10:42Z",
+      "image_ref": "http://image",
+      "host": "compute1",
+      "display_name": "myvm",
+      "metrics": {}
+    }
+
+- name: list-resource-generic
+  request: GET /v1/resource/generic HTTP/1.1
+
+- name: list-resource-instance
+  request: GET /v1/resource/instance HTTP/1.1
+
+- name: list-resource-generic-details
+  request: GET /v1/resource/generic?details=true HTTP/1.1
+
+- name: list-resource-generic-limit-attrs
+  request: GET /v1/resource/generic?attrs=id&attrs=started_at&attrs=user_id HTTP/1.1
+
+- name: list-resource-generic-pagination
+  request: GET /v1/resource/generic?limit=2&sort=id:asc HTTP/1.1
+
+- name: search-resource-for-user
+  request: |
+    POST /v1/search/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {"=": {"user_id": "{{ scenarios['create-resource-instance']['response'].json['user_id'] }}"}}
+
+- name: search-resource-for-host-like
+  request: |
+    POST /v1/search/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {"like": {"host": "compute%"}}
+
+- name: search-resource-for-host-like-filter
+  filter: host like "compute%"
+  request: |
+    POST /v1/search/resource/instance?filter={{ scenarios['search-resource-for-host-like-filter']['filter'] | urlencode }} HTTP/1.1
+    Content-Type: application/json
+
+- name: search-resource-for-user-details
+  request: |
+    POST /v1/search/resource/generic?details=true HTTP/1.1
+    Content-Type: application/json
+
+    {"=": {"user_id": "{{ scenarios['create-resource-instance']['response'].json['user_id'] }}"}}
+
+- name: search-resource-for-user-limit-attrs
+  request: |
+    POST /v1/search/resource/generic?attrs=id&attrs=started_at&attrs=user_id HTTP/1.1
+    Content-Type: application/json
+
+    {"=": {"user_id": "{{ scenarios['create-resource-instance']['response'].json['user_id'] }}"}}
+
+- name: search-resource-for-user-after-timestamp
+  request: |
+    POST /v1/search/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {"and": [
+      {"=": {"user_id": "{{ scenarios['create-resource-instance']['response'].json['user_id'] }}"}},
+      {">=": {"started_at": "2010-01-01"}}
+    ]}
+
+- name: search-resource-for-user-after-timestamp-filter
+  filter: user_id = "{{ scenarios['create-resource-instance']['response'].json['user_id'] }}" and started_at >= "2010-01-01"
+  request: |
+    POST /v1/search/resource/instance?filter={{ scenarios['search-resource-for-user-after-timestamp-filter']['filter'] | urlencode }} HTTP/1.1
+    Content-Type: application/json
+
+- name: search-resource-lifespan
+  request: |
+    POST /v1/search/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {">=": {"lifespan": "30 min"}}
+
+- name: get-resource-generic
+  request: GET /v1/resource/generic/{{ scenarios['create-resource-generic']['response'].json['id'] }} HTTP/1.1
+
+- name: get-instance
+  request: GET /v1/resource/instance/{{ scenarios['create-resource-instance']['response'].json['id'] }} HTTP/1.1
+
+- name: create-resource-instance-bis
+  request: |
+    POST /v1/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "AB0B5802-E79B-4C84-8998-9237F60D9CAE",
+      "user_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "project_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "flavor_id": "2",
+      "image_ref": "http://image",
+      "host": "compute1",
+      "display_name": "myvm",
+      "metrics": {}
+    }
+
+- name: patch-resource
+  request: |
+    PATCH /v1/resource/instance/{{ scenarios['create-resource-instance']['response'].json['id'] }} HTTP/1.1
+    Content-Type: application/json
+
+    {"host": "compute2"}
+
+- name: get-patched-instance-history
+  request: GET /v1/resource/instance/{{ scenarios['create-resource-instance']['response'].json['id'] }}/history HTTP/1.1
+
+- name: get-patched-instance
+  request: GET /v1/resource/instance/{{ scenarios['create-resource-instance']['response'].json['id'] }} HTTP/1.1
+
+
+- name: create-resource-type
+  request: |
+    POST /v1/resource_type HTTP/1.1
+    Content-Type: application/json
+
+    {
+        "name": "my_custom_type",
+        "attributes": {
+            "myid": {"type": "uuid"},
+            "display_name": {"type": "string", "required": true},
+            "prefix": {"type": "string", "required": false, "max_length": 8, "min_length": 3},
+            "size": {"type": "number", "min": 5, "max": 32.8},
+            "enabled": {"type": "bool", "required": false},
+            "launched_at": {"type": "datetime", "required": false}
+        }
+    }
+
+- name: create-resource-type-2
+  request: |
+    POST /v1/resource_type HTTP/1.1
+    Content-Type: application/json
+
+    {"name": "my_other_type"}
+
+- name: get-resource-type
+  request: GET /v1/resource_type/my_custom_type HTTP/1.1
+
+- name: list-resource-type
+  request: GET /v1/resource_type HTTP/1.1
+
+- name: patch-resource-type
+  request: |
+    PATCH /v1/resource_type/my_custom_type HTTP/1.1
+    Content-Type: application/json-patch+json
+
+    [
+        {
+            "op": "add",
+            "path": "/attributes/awesome-stuff",
+            "value": {"type": "bool", "required": false}
+         },
+         {
+            "op": "add",
+            "path": "/attributes/required-stuff",
+            "value": {"type": "bool", "required": true, "options": {"fill": true}}
+         },
+         {
+            "op": "add",
+            "path": "/attributes/required-datetime",
+            "value": {"type": "datetime", "required": true, "options": {"fill": "2017-12-11T08:12:42Z"}}
+         },
+         {
+            "op": "remove",
+            "path": "/attributes/prefix"
+         }
+    ]
+
+
+- name: delete-resource-type
+  request: DELETE /v1/resource_type/my_custom_type HTTP/1.1
+
+- name: search-resource-history
+  request: |
+    POST /v1/search/resource/instance?history=true HTTP/1.1
+    Content-Type: application/json
+
+    {"=": {"id": "{{ scenarios['create-resource-instance']['response'].json['id'] }}"}}
+
+- name: search-resource-history-partial
+  request: |
+    POST /v1/search/resource/instance?history=true HTTP/1.1
+    Content-Type: application/json
+    Accept: application/json
+
+    {"and": [
+        {"=": {"host": "compute1"}},
+        {">=": {"revision_start": "{{ scenarios['get-instance']['response'].json['revision_start'] }}"}},
+        {"or": [{"<=": {"revision_end": "{{ scenarios['get-patched-instance']['response'].json['revision_start'] }}"}},
+            {"=": {"revision_end": null}}]}
+    ]}
+
+- name: search-resource-history-partial-filter
+  filter: host = 'compute1' and revision_start >= "{{ scenarios['get-instance']['response'].json['revision_start'] }}" and (revision_end <= "{{ scenarios['get-patched-instance']['response'].json['revision_start'] }}" or revision_end == null)
+  request: |
+    POST /v1/search/resource/instance?history=true&filter={{ scenarios['search-resource-history-partial-filter']['filter'] | urlencode }} HTTP/1.1
+    Content-Type: application/json
+    Accept: application/json
+
+    {"and": [
+        {"=": {"host": "compute1"}},
+        {">=": {"revision_start": "{{ scenarios['get-instance']['response'].json['revision_start'] }}"}},
+        {"or": [{"<=": {"revision_end": "{{ scenarios['get-patched-instance']['response'].json['revision_start'] }}"}},
+            {"=": {"revision_end": null}}]}
+    ]}
+
+- name: create-resource-instance-with-metrics
+  request: |
+    POST /v1/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "6F24EDD9-5A2F-4592-B708-FFBED821C5D2",
+      "user_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "project_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "flavor_id": "2",
+      "image_ref": "http://image",
+      "host": "compute1",
+      "display_name": "myvm2",
+      "server_group": "my_autoscaling_group",
+      "metrics": {"cpu.util": "{{ scenarios['create-metric']['response'].json['id'] }}"}
+    }
+
+- name: create-resource-instance-with-dynamic-metrics
+  request: |
+    POST /v1/resource/instance HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "15e9c872-7ca9-11e4-a2da-2fb4032dfc09",
+      "user_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "project_id": "BD3A1E52-1C62-44CB-BF04-660BD88CD74D",
+      "flavor_id": "2",
+      "image_ref": "http://image",
+      "host": "compute2",
+      "display_name": "myvm3",
+      "server_group": "my_autoscaling_group",
+      "metrics": {"cpu.util": {"archive_policy_name": "{{ scenarios['create-archive-policy']['response'].json['name'] }}"}}
+    }
+
+- name: post-measures-batch-named
+  request: |
+    POST /v1/batch/resources/metrics/measures HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "{{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['id'] }}": {
+        "cpu.util": {
+            "archive_policy_name": "{{ scenarios['create-archive-policy']['response'].json['name'] }}",
+            "measures": [
+                { "timestamp": "2014-10-06T14:34:12", "value": 12 },
+                { "timestamp": "2014-10-06T14:34:20", "value": 2 }
+            ]
+        }
+      },
+      "{{ scenarios['create-resource-instance-with-metrics']['response'].json['id'] }}": {
+        "cpu.util": {
+            "archive_policy_name": "{{ scenarios['create-archive-policy']['response'].json['name'] }}",
+            "measures": [
+                { "timestamp": "2014-10-06T14:34:12", "value": 6 },
+                { "timestamp": "2014-10-06T14:34:20", "value": 25 }
+            ]
+        }
+      }
+    }
+
+- name: post-measures-batch-named-create
+  request: |
+    POST /v1/batch/resources/metrics/measures?create_metrics=true HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "{{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['id'] }}": {
+        "disk.io.test": [
+            { "timestamp": "2014-10-06T14:34:12", "value": 71 },
+            { "timestamp": "2014-10-06T14:34:20", "value": 81 }
+        ]
+      }
+    }
+
+- name: delete-resource-generic
+  request: DELETE /v1/resource/generic/{{ scenarios['create-resource-generic']['response'].json['id'] }} HTTP/1.1
+
+- name: create-resources-a
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "340102AA-AA19-BBE0-E1E2-2D3JDC7D289R",
+      "user_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ",
+      "project_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ"
+    }
+
+- name: create-resources-b
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "340102AA-AAEF-AA90-E1E2-2D3JDC7D289R",
+      "user_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ",
+      "project_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ"
+    }
+
+- name: create-resources-c
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "340102AA-AAEF-BCEF-E112-2D3JDC7D289R",
+      "user_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ",
+      "project_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ"
+    }
+
+- name: create-resources-d
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "340102AA-AAEF-BCEF-E112-2D15DC7D289R",
+      "user_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ",
+      "project_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ"
+    }
+
+- name: create-resources-e
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "340102AA-AAEF-BCEF-E112-2D3JDC30289R",
+      "user_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ",
+      "project_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ"
+    }
+
+- name: create-resources-f
+  request: |
+    POST /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "id": "340102AA-AAEF-BCEF-E112-2D15349D109R",
+      "user_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ",
+      "project_id": "BD3A1E52-KKKC-2123-BGLH-WWUUD88CD7WZ"
+    }
+
+- name: delete-resources-by-ids
+  request: |
+    DELETE /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "in": {
+        "id": [
+          "{{ scenarios['create-resources-a']['response'].json['id'] }}",
+          "{{ scenarios['create-resources-b']['response'].json['id'] }}",
+          "{{ scenarios['create-resources-c']['response'].json['id'] }}"
+        ]
+      }
+    }
+
+- name: delete-resources-by-time
+  request: |
+    DELETE /v1/resource/generic HTTP/1.1
+    Content-Type: application/json
+
+    {
+      ">=": {"started_at": "{{ scenarios['create-resources-f']['response'].json['started_at'] }}"}
+    }
+
+
+- name: get-resource-named-metrics-measures
+  request: GET /v1/resource/generic/{{ scenarios['create-resource-instance-with-metrics']['response'].json['id'] }}/metric/cpu.util/measures?start=2014-10-06T14:34 HTTP/1.1
+
+- name: post-resource-named-metrics-measures1
+  request: |
+    POST /v1/resource/generic/{{ scenarios['create-resource-instance-with-metrics']['response'].json['id'] }}/metric/cpu.util/measures HTTP/1.1
+    Content-Type: application/json
+
+    [
+      {
+        "timestamp": "2014-10-06T14:33:57",
+        "value": 3.5
+      },
+      {
+        "timestamp": "2014-10-06T14:34:12",
+        "value": 20
+      },
+      {
+        "timestamp": "2014-10-06T14:34:20",
+        "value": 9
+      }
+    ]
+
+- name: post-resource-named-metrics-measures2
+  request: |
+    POST /v1/resource/generic/{{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['id'] }}/metric/cpu.util/measures HTTP/1.1
+    Content-Type: application/json
+
+    [
+      {
+        "timestamp": "2014-10-06T14:33:57",
+        "value": 25.1
+      },
+      {
+        "timestamp": "2014-10-06T14:34:12",
+        "value": 4.5
+      },
+      {
+        "timestamp": "2014-10-06T14:34:20",
+        "value": 14.2
+      }
+    ]
+
+- name: get-across-metrics-measures-by-attributes-lookup
+  request: |
+    POST /v1/aggregation/resource/instance/metric/cpu.util?start=2014-10-06T14:34&aggregation=mean HTTP/1.1
+    Content-Type: application/json
+
+    {"=": {"server_group": "my_autoscaling_group"}}
+
+- name: get-across-metrics-measures-by-attributes-lookup-filter
+  filter: server_group = "my_autoscaling_group"
+  request: |
+    POST /v1/aggregation/resource/instance/metric/cpu.util?start=2014-10-06T14:34&aggregation=mean&filter={{ scenarios['get-across-metrics-measures-by-attributes-lookup-filter']['filter'] | urlencode }} HTTP/1.1
+    Content-Type: application/json
+
+- name: get-across-metrics-measures-by-attributes-lookup-groupby
+  request: |
+    POST /v1/aggregation/resource/instance/metric/cpu.util?groupby=host&groupby=flavor_id HTTP/1.1
+    Content-Type: application/json
+
+    {"=": {"server_group": "my_autoscaling_group"}}
+
+- name: get-across-metrics-measures-by-metric-ids
+  request: |
+    GET /v1/aggregation/metric?metric={{ scenarios['create-resource-instance-with-metrics']['response'].json['metrics']['cpu.util'] }}&metric={{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['metrics']['cpu.util'] }}&start=2014-10-06T14:34&aggregation=mean HTTP/1.1
+
+- name: get-across-metrics-measures-by-metric-ids-reaggregate
+  request: |
+    GET /v1/aggregation/metric?metric={{ scenarios['create-resource-instance-with-metrics']['response'].json['metrics']['cpu.util'] }}&metric={{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['metrics']['cpu.util'] }}&aggregation=mean&reaggregation=min HTTP/1.1
+
+- name: get-across-metrics-measures-by-metric-ids-fill
+  request: |
+    GET /v1/aggregation/metric?metric={{ scenarios['create-resource-instance-with-metrics']['response'].json['metrics']['cpu.util'] }}&metric={{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['metrics']['cpu.util'] }}&fill=0&granularity=1 HTTP/1.1
+
+- name: append-metrics-to-resource
+  request: |
+    POST /v1/resource/generic/{{ scenarios['create-resource-instance-with-metrics']['response'].json['id'] }}/metric HTTP/1.1
+    Content-Type: application/json
+
+    {"memory": {"archive_policy_name": "low"}}
+
+- name: get-aggregates-by-metric-ids
+  request: |
+    POST /v1/aggregates?start=2014-10-06T14:34&stop=2017-10-06T14:34 HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "operations": [
+        "*",
+        ["aggregate", "mean", [
+            "metric",
+            ["{{ scenarios['create-resource-instance-with-metrics']['response'].json['metrics']['cpu.util'] }}", "mean"],
+            ["{{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['metrics']['cpu.util'] }}", "mean"]
+        ]],
+        4
+      ]
+    }
+
+- name: get-aggregates-between-metrics
+  request: |
+    POST /v1/aggregates?start=2014-10-06T14:34&stop=2017-10-06T14:34 HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "operations": [
+        "absolute",
+        [
+          "**",
+          ["metric", "{{ scenarios['create-resource-instance-with-metrics']['response'].json['metrics']['cpu.util'] }}", "mean"],
+          ["metric", "{{ scenarios['create-resource-instance-with-dynamic-metrics']['response'].json['metrics']['cpu.util'] }}", "mean"]
+        ]
+      ]
+    }
+
+
+- name: get-aggregates-by-metric-ids-fill
+  request: |
+    POST /v1/aggregates?fill=0&granularity=1 HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "operations": "(* (aggregate mean (metric {{ scenarios['create-resource-instance-with-metrics']['response'].json['metrics']['cpu.util'] }} mean)) 4)"
+    }
+
+- name: get-aggregates-by-attributes-lookup
+  request: |
+    POST /v1/aggregates?start=2014-10-06T14:34 HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "resource_type": "instance",
+      "search": {"=": {"server_group": "my_autoscaling_group"}},
+      "operations": ["*", ["aggregate", "mean", ["metric", "cpu.util", "mean"]], 4]
+    }
+
+- name: get-aggregates-by-attributes-lookup-wildcard
+  request: |
+    POST /v1/aggregates?start=2014-10-06T14:34 HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "resource_type": "instance",
+      "search": {"=": {"server_group": "my_autoscaling_group"}},
+      "operations": ["*", ["aggregate", "mean", ["metric", "cpu*", "mean"]], 4]
+    }
+
+- name: get-aggregates-by-attributes-lookup-groupby
+  request: |
+    POST /v1/aggregates?start=2014-10-06T14:34&groupby=host&groupby=flavor_id HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "resource_type": "instance",
+      "search": "server_group='my_autoscaling_group'",
+      "operations": "(* (aggregate mean (metric cpu.util mean)) 4)"
+    }
+
+- name: get-capabilities
+  request: GET /v1/capabilities HTTP/1.1
+
+- name: get-status
+  request: GET /v1/status HTTP/1.1
+
+
+- name: use-case1-top-cpuutil-per-instances
+  request: |
+    POST /v1/aggregates?start=2014-10-06T14:00&stop=2014-10-06T15:00&groupby=original_resource_id&groupby=display_name&granularity=3600.0 HTTP/1.1
+    Content-Type: application/json
+
+    {
+      "resource_type": "instance",
+      "search": "server_group='my_autoscaling_group'",
+      "operations": "(metric cpu.util mean)"
+    }
+
+
diff --git a/doc/source/statsd.rst b/doc/source/statsd.rst
new file mode 100644
index 0000000000000000000000000000000000000000..969ce17970ae0cc50cd0ba74214f0c6c6496079f
--- /dev/null
+++ b/doc/source/statsd.rst
@@ -0,0 +1,45 @@
+===================
+Statsd Daemon Usage
+===================
+
+What Is It?
+===========
+`Statsd`_ is a network daemon that listens for statistics sent over the network
+using TCP or UDP, and then sends |aggregates| to another backend.
+
+Gnocchi provides a daemon that is compatible with the statsd protocol and can
+listen to |metrics| sent over the network, named `gnocchi-statsd`.
+
+.. _`Statsd`: https://github.com/etsy/statsd/
+
+How Does It Work?
+=================
+In order to enable statsd support in Gnocchi, you need to configure the
+`[statsd]` option group in the configuration file. You need to provide a
+|resource| ID that will be used as the main generic |resource| where all the
+|metrics| will be attached, a user and project id that will be associated with
+the |resource| and |metrics|, and an |archive policy| name that will be used to
+create the |metrics|.
+
+All the |metrics| will be created dynamically as the |metrics| are sent to
+`gnocchi-statsd`, and attached with the provided name to the |resource| ID you
+configured.
+
+The `gnocchi-statsd` may be scaled, but trade-offs have been made due to the
+nature of the statsd protocol. That means that if you use |metrics| of type
+`counter`_ or sampling (`c` in the protocol), you should always send those
+|metrics| to the same daemon – or not use them at all. The other supported
+types (`timing`_ and `gauges`_) do not suffer this limitation, but be aware
+that you might have more |measures| than expected if you send the same |metric|
+to different `gnocchi-statsd` servers, as neither their cache nor their flush
+delay are synchronized.
+
+.. _`counter`: https://github.com/etsy/statsd/blob/master/docs/metric_types.md#counting
+.. _`timing`: https://github.com/etsy/statsd/blob/master/docs/metric_types.md#timing
+.. _`gauges`: https://github.com/etsy/statsd/blob/master/docs/metric_types.md#gauges
+
+.. note ::
+   The statsd protocol support is incomplete: relative gauge values with +/-
+   and sets are not supported yet.
+
+.. include:: include/term-substitution.rst
diff --git a/gnocchi/__init__.py b/gnocchi/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..f56466ff6998fdf364e54eea8da3bdc48dbe7c19
--- /dev/null
+++ b/gnocchi/__init__.py
@@ -0,0 +1,21 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import pkg_resources
+
+try:
+    __version__ = pkg_resources.get_distribution(__name__).version
+except pkg_resources.DistributionNotFound:
+    # package is not installed
+    pass
diff --git a/gnocchi/amqp1d.py b/gnocchi/amqp1d.py
new file mode 100644
index 0000000000000000000000000000000000000000..8ccfac6e18344f98616cced79931283c0e2ff796
--- /dev/null
+++ b/gnocchi/amqp1d.py
@@ -0,0 +1,234 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import collections
+import itertools
+import uuid
+
+import daiquiri
+import proton.handlers
+import proton.reactor
+import six
+import ujson
+
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import service
+from gnocchi import utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class BatchProcessor(object):
+    def __init__(self, conf):
+        self.conf = conf
+        self.incoming = incoming.get_driver(self.conf)
+        self.indexer = indexer.get_driver(self.conf)
+        self._ensure_resource_type_exists()
+
+        self._hosts = {}
+        self._measures = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+
+    def reset(self):
+        self._hosts.clear()
+        self._measures.clear()
+
+    def add_measures(self, host, name, measures):
+        host_id = "%s:%s" % (self.conf.amqp1d.resource_type,
+                             host.replace("/", "_"))
+        self._hosts[host_id] = host
+        self._measures[host_id][name].extend(measures)
+
+    def flush(self):
+        try:
+            self._flush()
+        except Exception:
+            LOG.error("Unepected error during flush()", exc_info=True)
+        self.reset()
+
+    def _flush(self):
+        archive_policies = {}
+        resources = self._get_resources(self._measures.keys())
+        for host_id, measures_by_names in six.iteritems(self._measures):
+            resource = resources[host_id]
+
+            names = set(measures_by_names.keys())
+            for name in names:
+                if name not in archive_policies:
+                    archive_policies[name] = (
+                        self.indexer.get_archive_policy_for_metric(name))
+            known_metrics = self.indexer.list_metrics(attribute_filter={
+                "and": [{"=": {"resource_id": resource.id}},
+                        {"in": {"name": names}}]
+            })
+            known_names = set((m.name for m in known_metrics))
+            already_exists_names = []
+            for name in (names - known_names):
+                try:
+                    m = self.indexer.create_metric(
+                        uuid.uuid4(),
+                        creator=self.conf.amqp1d.creator,
+                        resource_id=resource.id,
+                        name=name,
+                        archive_policy_name=archive_policies[name].name)
+                except indexer.NamedMetricAlreadyExists as e:
+                    already_exists_names.append(e.metric)
+                except indexer.IndexerException as e:
+                    LOG.error("Unexpected error, dropping metric %s",
+                              name, exc_info=True)
+                else:
+                    known_metrics.append(m)
+
+            if already_exists_names:
+                # Add metrics created in the meantime
+                known_names.extend(already_exists_names)
+                known_metrics.extend(
+                    self.indexer.list_metrics(attribute_filter={
+                        "and": [{"=": {"resource_id": resource.id}},
+                                {"in": {"name": already_exists_names}}]
+                    }))
+
+            self.incoming.add_measures_batch(
+                dict((metric.id,
+                     measures_by_names[metric.name])
+                     for metric in known_metrics))
+
+    def _get_resources(self, host_ids):
+
+        resource_ids = set((utils.ResourceUUID(host_id,
+                                               self.conf.amqp1d.creator)
+                            for host_id in host_ids))
+
+        resources = self.indexer.list_resources(
+            resource_type=self.conf.amqp1d.resource_type,
+            attribute_filter={"in": {"id": resource_ids}})
+
+        resources_by_host_id = {r.original_resource_id: r for r in resources}
+
+        missing_host_ids = set(host_ids) - set(resources_by_host_id.keys())
+
+        for host_id in missing_host_ids:
+            resource_id = utils.ResourceUUID(host_id,
+                                             self.conf.amqp1d.creator)
+            try:
+                r = self.indexer.create_resource(
+                    self.conf.amqp1d.resource_type,
+                    resource_id,
+                    self.conf.amqp1d.creator,
+                    original_resource_id=host_id,
+                    host=self._hosts[host_id])
+            except indexer.ResourceAlreadyExists:
+                r = self.indexer.get_resource(
+                    self.conf.amqp1d.resource_type,
+                    resource_id)
+            resources_by_host_id[host_id] = r
+
+        return resources_by_host_id
+
+    def _ensure_resource_type_exists(self):
+        try:
+            self.resource_type = self.indexer.get_resource_type(
+                self.conf.amqp1d.resource_type)
+        except indexer.NoSuchResourceType:
+            try:
+                mgr = self.indexer.get_resource_type_schema()
+                rtype = mgr.resource_type_from_dict(
+                    self.conf.amqp1d.resource_type, {
+                        "host": {"type": "string", "required": True,
+                                 "min_length": 0, "max_length": 255},
+                    }, "creating")
+                self.indexer.create_resource_type(rtype)
+            except indexer.ResourceTypeAlreadyExists:
+                LOG.debug("Resource type %s already exists",
+                          self.conf.amqp1d.resource_type)
+            else:
+                LOG.info("Created resource type %s",
+                         self.conf.amqp1d.resource_type)
+                self.resource_type = self.indexer.get_resource_type(
+                    self.conf.amqp1d.resource_type)
+        else:
+            LOG.info("Found resource type %s",
+                     self.conf.amqp1d.resource_type)
+
+
+class CollectdFormatHandler(object):
+    def __init__(self, processor):
+        self.processor = processor
+
+    @staticmethod
+    def _serialize_identifier(index, message):
+        """Based of FORMAT_VL from collectd/src/daemon/common.h.
+
+        The biggest difference is that we don't prepend the host and append the
+        index of the value, and don't use slash.
+
+        """
+        suffix = ("-%s" % message["dsnames"][index]
+                  if len(message["dsnames"]) > 1 else "")
+        return (message["plugin"] + ("-" + message["plugin_instance"]
+                                     if message["plugin_instance"] else "")
+                + "@"
+                + message["type"] + ("-" + message["type_instance"]
+                                     if message["type_instance"] else "")
+                + suffix)
+
+    def on_message(self, event):
+        json_message = ujson.loads(event.message.body)
+        timestamp = utils.dt_in_unix_ns(utils.utcnow())
+        measures_by_host_and_name = sorted((
+            (message["host"],
+             self._serialize_identifier(index, message),
+             value)
+            for message in json_message
+            for index, value in enumerate(message["values"])
+        ))
+        for (host, name), values in itertools.groupby(
+                measures_by_host_and_name, key=lambda x: x[0:2]):
+            measures = (incoming.Measure(timestamp, v[2]) for v in values)
+            self.processor.add_measures(host, name, measures)
+
+
+class AMQP1Server(proton.handlers.MessagingHandler):
+
+    def __init__(self, conf):
+        super(AMQP1Server, self).__init__()
+        self.peer_close_is_error = True
+        self.conf = conf
+
+        self.processor = BatchProcessor(conf)
+
+        # Only collectd format is supported for now
+        self.data_source_handler = {
+            "collectd": CollectdFormatHandler
+        }[self.conf.amqp1d.data_source](self.processor)
+
+    def on_start(self, event):
+        event.container.schedule(self.conf.amqp1d.flush_delay, self)
+
+    def on_message(self, event):
+        self.data_source_handler.on_message(event)
+
+    def on_timer_task(self, event):
+        event.container.schedule(self.conf.amqp1d.flush_delay, self)
+        self.processor.flush()
+
+
+def start():
+    conf = service.prepare_service()
+    server = proton.reactor.Container(AMQP1Server(conf))
+    try:
+        server.run()
+    except KeyboardInterrupt:
+        pass
diff --git a/gnocchi/archive_policy.py b/gnocchi/archive_policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..e92773312dd3b094e72fbbf91cc733c18e666cf0
--- /dev/null
+++ b/gnocchi/archive_policy.py
@@ -0,0 +1,306 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright (c) 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import collections
+import datetime
+import operator
+
+import numpy
+from oslo_config import cfg
+from oslo_config import types
+import six
+
+from gnocchi import carbonara
+from gnocchi import utils
+
+
+ATTRGETTER_GRANULARITY = operator.attrgetter("granularity")
+
+
+class ArchivePolicy(object):
+
+    DEFAULT_AGGREGATION_METHODS = ()
+
+    # TODO(eglynn): figure out how to accommodate multi-valued aggregation
+    #               methods, where there is no longer just a single aggregate
+    #               value to be stored per-period (e.g. ohlc)
+    VALID_AGGREGATION_METHODS = set(
+        ('mean', 'sum', 'last', 'max', 'min',
+         'std', 'median', 'first', 'count')).union(
+             set((str(i) + 'pct' for i in six.moves.range(1, 100))))
+
+    VALID_AGGREGATION_METHODS = VALID_AGGREGATION_METHODS.union(
+        set(map(lambda s: "rate:" + s,
+                VALID_AGGREGATION_METHODS)))
+
+    # Set that contains all the above values + their minus equivalent (-mean)
+    # and the "*" entry.
+    VALID_AGGREGATION_METHODS_VALUES = VALID_AGGREGATION_METHODS.union(
+        set(('*',)),
+        set(map(lambda s: "-" + s,
+                VALID_AGGREGATION_METHODS)),
+        set(map(lambda s: "+" + s,
+                VALID_AGGREGATION_METHODS)))
+
+    def __init__(self, name, back_window, definition,
+                 aggregation_methods=None):
+        self.name = name
+        self.back_window = back_window
+        self.definition = []
+        for d in definition:
+            if isinstance(d, ArchivePolicyItem):
+                self.definition.append(d)
+            elif isinstance(d, dict):
+                self.definition.append(ArchivePolicyItem(**d))
+            elif len(d) == 2:
+                self.definition.append(
+                    ArchivePolicyItem(points=d[0], granularity=d[1]))
+            else:
+                raise ValueError(
+                    "Unable to understand policy definition %s" % d)
+
+        duplicate_granularities = [
+            granularity
+            for granularity, count in collections.Counter(
+                d.granularity for d in self.definition).items()
+            if count > 1
+        ]
+        if duplicate_granularities:
+            raise ValueError(
+                "More than one archive policy "
+                "uses granularity `%s'"
+                % utils.timespan_total_seconds(duplicate_granularities[0])
+            )
+
+        if aggregation_methods is None:
+            self.aggregation_methods = self.DEFAULT_AGGREGATION_METHODS
+        else:
+            self.aggregation_methods = set(aggregation_methods)
+
+    def get_aggregation(self, method, granularity):
+        # Find the timespan
+        for d in self.definition:
+            if d.granularity == granularity:
+                return carbonara.Aggregation(
+                    method, d.granularity, d.timespan)
+
+    def get_aggregations_for_method(self, method):
+        """Return a list of aggregation for a method.
+
+        List is sorted by granularity, desc.
+
+        :param method: Aggregation method.
+        """
+        return [carbonara.Aggregation(method, d.granularity, d.timespan)
+                for d in sorted(self.definition,
+                                key=ATTRGETTER_GRANULARITY, reverse=True)]
+
+    @property
+    def aggregations(self):
+        return [carbonara.Aggregation(method, d.granularity, d.timespan)
+                for d in sorted(self.definition, key=ATTRGETTER_GRANULARITY)
+                for method in self.aggregation_methods]
+
+    @property
+    def aggregation_methods(self):
+        if '*' in self._aggregation_methods:
+            agg_methods = self.VALID_AGGREGATION_METHODS.copy()
+        elif all(map(lambda s: s.startswith('-') or s.startswith('+'),
+                     self._aggregation_methods)):
+            agg_methods = set(self.DEFAULT_AGGREGATION_METHODS)
+        else:
+            agg_methods = set(self._aggregation_methods)
+
+        for entry in self._aggregation_methods:
+            if entry:
+                if entry[0] == '-':
+                    agg_methods -= set((entry[1:],))
+                elif entry[0] == '+':
+                    agg_methods.add(entry[1:])
+
+        return agg_methods
+
+    @aggregation_methods.setter
+    def aggregation_methods(self, value):
+        value = set(value)
+        rest = value - self.VALID_AGGREGATION_METHODS_VALUES
+        if rest:
+            raise ValueError("Invalid value for aggregation_methods: %s" %
+                             rest)
+        self._aggregation_methods = value
+
+    @classmethod
+    def from_dict(cls, d):
+        return cls(d['name'],
+                   d['back_window'],
+                   d['definition'],
+                   d.get('aggregation_methods'))
+
+    def __eq__(self, other):
+        return (isinstance(other, ArchivePolicy)
+                and self.name == other.name
+                and self.back_window == other.back_window
+                and self.definition == other.definition
+                and self.aggregation_methods == other.aggregation_methods)
+
+    def jsonify(self):
+        return {
+            "name": self.name,
+            "back_window": self.back_window,
+            "definition": self.definition,
+            "aggregation_methods": self.aggregation_methods,
+        }
+
+    @property
+    def max_block_size(self):
+        # The biggest block size is the coarse grained archive definition
+        return sorted(self.definition,
+                      key=operator.attrgetter("granularity"))[-1].granularity
+
+
+OPTS = [
+    cfg.ListOpt(
+        'default_aggregation_methods',
+        item_type=types.String(
+            choices=ArchivePolicy.VALID_AGGREGATION_METHODS),
+        default=['mean', 'min', 'max', 'sum', 'std', 'count'],
+        help='Default aggregation methods to use in created archive policies'),
+]
+
+
+class ArchivePolicyItem(dict):
+    def __init__(self, granularity=None, points=None, timespan=None):
+        if (granularity is not None
+           and points is not None
+           and timespan is not None):
+            if timespan != granularity * points:
+                raise ValueError(
+                    u"timespan ≠ granularity × points")
+
+        if granularity is not None:
+            if not isinstance(granularity, numpy.timedelta64):
+                granularity = numpy.timedelta64(int(granularity * 10e8), 'ns')
+            if granularity <= numpy.timedelta64(0, 'ns'):
+                raise ValueError("Granularity should be > 0")
+
+        if points is not None and points <= 0:
+            raise ValueError("Number of points should be > 0")
+
+        if (timespan is not None
+           and not isinstance(timespan, numpy.timedelta64)):
+            timespan = numpy.timedelta64(int(timespan * 10e8), 'ns')
+
+        if granularity is None:
+            if points is None or timespan is None:
+                raise ValueError(
+                    "At least two of granularity/points/timespan "
+                    "must be provided")
+            granularity = timespan / float(points)
+
+        if points is None:
+            if timespan is None:
+                self['timespan'] = None
+            else:
+                points = int(timespan / granularity)
+                if points <= 0:
+                    raise ValueError("Calculated number of points is < 0")
+                self['timespan'] = granularity * points
+        else:
+            points = int(points)
+            self['timespan'] = granularity * points
+
+        self['points'] = points
+        self['granularity'] = granularity
+
+    @property
+    def granularity(self):
+        return self['granularity']
+
+    @property
+    def points(self):
+        return self['points']
+
+    @property
+    def timespan(self):
+        return self['timespan']
+
+    def jsonify(self):
+        """Return a dict representation with human readable values."""
+        return {
+            'timespan': six.text_type(
+                datetime.timedelta(
+                    seconds=utils.timespan_total_seconds(
+                        self.timespan)))
+            if self.timespan is not None
+            else None,
+            'granularity': six.text_type(
+                datetime.timedelta(
+                    seconds=utils.timespan_total_seconds(
+                        self.granularity))),
+            'points': self.points,
+        }
+
+    def serialize(self):
+        return {
+            'timespan': None
+            if self.timespan is None
+            else float(utils.timespan_total_seconds(self.timespan)),
+            'granularity': float(
+                utils.timespan_total_seconds(self.granularity)),
+            'points': self.points,
+        }
+
+
+DEFAULT_ARCHIVE_POLICIES = {
+    'bool': ArchivePolicy(
+        "bool", 3600, [
+            # 1 second resolution for 365 days
+            ArchivePolicyItem(granularity=numpy.timedelta64(1, 's'),
+                              timespan=numpy.timedelta64(365, 'D')),
+        ],
+        aggregation_methods=("last",),
+    ),
+    'low': ArchivePolicy(
+        "low", 0, [
+            # 5 minutes resolution for 30 days
+            ArchivePolicyItem(granularity=numpy.timedelta64(5, 'm'),
+                              timespan=numpy.timedelta64(30, 'D')),
+        ],
+    ),
+    'medium': ArchivePolicy(
+        "medium", 0, [
+            # 1 minute resolution for 7 days
+            ArchivePolicyItem(granularity=numpy.timedelta64(1, 'm'),
+                              timespan=numpy.timedelta64(7, 'D')),
+            # 1 hour resolution for 365 days
+            ArchivePolicyItem(granularity=numpy.timedelta64(1, 'h'),
+                              timespan=numpy.timedelta64(365, 'D')),
+        ],
+    ),
+    'high': ArchivePolicy(
+        "high", 0, [
+            # 1 second resolution for an hour
+            ArchivePolicyItem(granularity=numpy.timedelta64(1, 's'),
+                              timespan=numpy.timedelta64(1, 'h')),
+            # 1 minute resolution for a week
+            ArchivePolicyItem(granularity=numpy.timedelta64(1, 'm'),
+                              timespan=numpy.timedelta64(7, 'D')),
+            # 1 hour resolution for a year
+            ArchivePolicyItem(granularity=numpy.timedelta64(1, 'h'),
+                              timespan=numpy.timedelta64(365, 'D')),
+        ],
+    ),
+}
diff --git a/gnocchi/calendar.py b/gnocchi/calendar.py
new file mode 100644
index 0000000000000000000000000000000000000000..9134ea9713636a047080ea11d7426fcc4dcd41c9
--- /dev/null
+++ b/gnocchi/calendar.py
@@ -0,0 +1,100 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Gregorian calendar grouping helpers."""
+
+import numpy
+
+
+def _month_of_year(datetimes):
+    return (datetimes.astype('datetime64[M]', copy=False) -
+            datetimes.astype('datetime64[Y]', copy=False) + 1)
+
+
+def month_of_year(datetimes):
+    """Return the calendar month of given dates."""
+    return _month_of_year(datetimes).astype(int)
+
+
+def iso_week_of_year(datetimes):
+    """Return the ISO week of the year of given dates."""
+    dates_offset = (datetimes.astype('datetime64[D]', copy=False) +
+                    numpy.timedelta64(3, 'D')).astype(
+                        'datetime64[W]', copy=False)
+    return numpy.ceil(
+        (dates_offset.astype('datetime64[D]', copy=False) -
+         dates_offset.astype('datetime64[Y]', copy=False) + 1).astype(int) /
+        7.0)
+
+
+def week_and_year(datetimes):
+    """Return the week of the year, grouped on Sunday, for given dates."""
+    return ((datetimes.astype('datetime64[D]', copy=False) +
+             numpy.timedelta64(4, 'D')).astype('datetime64[W]', copy=False) -
+            numpy.timedelta64(4, 'D'))
+
+
+def day_of_year(datetimes):
+    """Return the day of the year of given dates."""
+    return (datetimes.astype('datetime64[D]', copy=False) -
+            datetimes.astype('datetime64[Y]', copy=False)).astype(int)
+
+
+def day_of_month(datetimes):
+    """Return the day of the month of given dates."""
+    return (datetimes.astype('datetime64[D]', copy=False) -
+            datetimes.astype('datetime64[M]', copy=False) + 1).astype(int)
+
+
+def day_of_week(datetimes):
+    """Return the day of the week of given dates. Sunday(0) to Saturday(6)."""
+    return (datetimes.astype('datetime64[D]', copy=False) +
+            numpy.timedelta64(4, 'D')).astype(int) % 7
+
+
+def month_and_year(datetimes):
+    """Return the month and year of given dates."""
+    return datetimes.astype('datetime64[M]', copy=False)
+
+
+def quarter_and_year(datetimes):
+    """Return the quarter per year of given dates."""
+    return (((_month_of_year(datetimes) - 1) // 3) * 3) + year(datetimes)
+
+
+def quarter_of_year(datetimes):
+    """Return the quarter of the year of given dates."""
+    return ((_month_of_year(datetimes) - 1) // 3 + 1).astype(int)
+
+
+def half_and_year(datetimes):
+    """Return the half per year of given dates."""
+    return (((_month_of_year(datetimes) - 1) // 6) * 6) + year(datetimes)
+
+
+def half_of_year(datetimes):
+    """Return the half of the year of given dates."""
+    return ((_month_of_year(datetimes) - 1) // 6 + 1).astype(int)
+
+
+def year(datetimes):
+    """Return the year of given dates."""
+    return datetimes.astype('datetime64[Y]', copy=False)
+
+
+GROUPINGS = {
+    'Y': year,
+    'H': half_and_year,
+    'Q': quarter_and_year,
+    'M': month_and_year,
+    'W': week_and_year}
diff --git a/gnocchi/carbonara.py b/gnocchi/carbonara.py
new file mode 100644
index 0000000000000000000000000000000000000000..02eeacca89b4987d3ec5b854bce00f7b0dd32150
--- /dev/null
+++ b/gnocchi/carbonara.py
@@ -0,0 +1,917 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2018 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Time series data manipulation, better with pancetta."""
+
+import collections
+import functools
+import math
+import operator
+import random
+import re
+import struct
+import time
+
+import lz4.block
+import numpy
+import six
+
+from gnocchi import calendar
+
+
+UNIX_UNIVERSAL_START64 = numpy.datetime64("1970", 'ns')
+ONE_SECOND = numpy.timedelta64(1, 's')
+
+
+class BeforeEpochError(Exception):
+    """Error raised when a timestamp before Epoch is used."""
+
+    def __init__(self, timestamp):
+        self.timestamp = timestamp
+        super(BeforeEpochError, self).__init__(
+            "%s is before Epoch" % timestamp)
+
+
+class UnknownAggregationMethod(Exception):
+    """Error raised when the aggregation method is unknown."""
+    def __init__(self, agg):
+        self.aggregation_method = agg
+        super(UnknownAggregationMethod, self).__init__(
+            "Unknown aggregation method `%s'" % agg)
+
+
+class InvalidData(ValueError):
+    """Error raised when data are corrupted."""
+    def __init__(self):
+        super(InvalidData, self).__init__("Unable to unpack, invalid data")
+
+
+def datetime64_to_epoch(dt):
+    return (dt - UNIX_UNIVERSAL_START64) / ONE_SECOND
+
+
+def round_timestamp(ts, freq):
+    return UNIX_UNIVERSAL_START64 + numpy.floor(
+        (ts - UNIX_UNIVERSAL_START64) / freq) * freq
+
+
+TIMESERIES_ARRAY_DTYPE = [('timestamps', '<datetime64[ns]'),
+                          ('values', '<d')]
+
+
+def make_timeseries(timestamps, values):
+    """Return a Numpy array representing a timeseries.
+
+    This array specifies correctly the data types, which is important for
+    Numpy to operate fastly.
+    """
+    l = len(timestamps)
+    if l != len(values):
+        raise ValueError("Timestamps and values must have the same length")
+    arr = numpy.zeros(l, dtype=TIMESERIES_ARRAY_DTYPE)
+    arr['timestamps'] = timestamps
+    arr['values'] = values
+    return arr
+
+
+def combine_timeseries(ts1, ts2):
+    """Combine a timeseries into this one.
+
+    The timeseries does not need to be sorted.
+
+    If a timestamp is present in both `ts1` and `ts2`, then value from `ts1`
+    is used.
+
+    :param ts: The timeseries to combine.
+    :return: A new timeseries.
+    """
+    ts = numpy.concatenate((ts1, ts2))
+    _, index = numpy.unique(ts['timestamps'], return_index=True)
+    return ts[index]
+
+
+class GroupedTimeSeries(object):
+    def __init__(self, ts, granularity, start=None):
+        # NOTE(sileht): The whole class assumes ts is ordered and don't have
+        # duplicate timestamps, it uses numpy.unique that sorted list, but
+        # we always assume the orderd to be the same as the input.
+        self.granularity = granularity
+        self.can_derive = isinstance(granularity, numpy.timedelta64)
+        self.start = start
+        if start is None:
+            self._ts = ts
+            self._ts_for_derive = ts
+        else:
+            self._ts = ts[numpy.searchsorted(ts['timestamps'], start):]
+            if self.can_derive:
+                start_derive = start - granularity
+                self._ts_for_derive = ts[
+                    numpy.searchsorted(ts['timestamps'], start_derive):
+                ]
+        if self.can_derive:
+            self.indexes = round_timestamp(self._ts['timestamps'], granularity)
+        elif calendar.GROUPINGS.get(granularity):
+            self.indexes = calendar.GROUPINGS.get(granularity)(
+                self._ts['timestamps'])
+        self.tstamps, self.counts = numpy.unique(self.indexes,
+                                                 return_counts=True)
+
+    def mean(self):
+        series = self.sum()
+        series['values'] /= self.counts
+        return series
+
+    def sum(self):
+        return make_timeseries(self.tstamps, numpy.bincount(
+            numpy.repeat(numpy.arange(self.counts.size), self.counts),
+            weights=self._ts['values']))
+
+    def min(self):
+        ordered = self._ts['values'].argsort()
+        uniq_inv = numpy.repeat(numpy.arange(self.counts.size), self.counts)
+        values = numpy.zeros(self.tstamps.size)
+        values[uniq_inv[ordered][::-1]] = self._ts['values'][ordered][::-1]
+        return make_timeseries(self.tstamps, values)
+
+    def max(self):
+        ordered = self._ts['values'].argsort()
+        uniq_inv = numpy.repeat(numpy.arange(self.counts.size), self.counts)
+        values = numpy.zeros(self.tstamps.size)
+        values[uniq_inv[ordered]] = self._ts['values'][ordered]
+        return make_timeseries(self.tstamps, values)
+
+    def median(self):
+        ordered = numpy.lexsort((self._ts['values'], self.indexes))
+        # TODO(gordc): can use np.divmod when centos supports numpy 1.13
+        mid_diff = numpy.floor_divide(self.counts, 2)
+        odd = numpy.mod(self.counts, 2)
+        mid_floor = (numpy.cumsum(self.counts) - 1) - mid_diff
+        mid_ceil = mid_floor + (odd + 1) % 2
+        return make_timeseries(
+            self.tstamps, (self._ts['values'][ordered][mid_floor] +
+                           self._ts['values'][ordered][mid_ceil]) / 2.0)
+
+    def std(self):
+        mean_ts = self.mean()
+        diff_sq = numpy.square(self._ts['values'] -
+                               numpy.repeat(mean_ts['values'], self.counts))
+        bin_sum = numpy.bincount(
+            numpy.repeat(numpy.arange(self.counts.size), self.counts),
+            weights=diff_sq)
+        return make_timeseries(self.tstamps[self.counts > 1],
+                               numpy.sqrt(bin_sum[self.counts > 1] /
+                                          (self.counts[self.counts > 1] - 1)))
+
+    def count(self):
+        return make_timeseries(self.tstamps, self.counts)
+
+    def last(self):
+        cumcounts = numpy.cumsum(self.counts) - 1
+        values = self._ts['values'][cumcounts]
+        return make_timeseries(self.tstamps, values)
+
+    def first(self):
+        cumcounts = numpy.cumsum(self.counts) - self.counts
+        values = self._ts['values'][cumcounts]
+        return make_timeseries(self.tstamps, values)
+
+    def quantile(self, q):
+        ordered = numpy.lexsort((self._ts['values'], self.indexes))
+        min_pos = numpy.cumsum(self.counts) - self.counts
+        real_pos = min_pos + (self.counts - 1) * (q / 100)
+        floor_pos = numpy.floor(real_pos).astype(numpy.int, copy=False)
+        ceil_pos = numpy.ceil(real_pos).astype(numpy.int, copy=False)
+        values = (
+            self._ts['values'][ordered][floor_pos] * (ceil_pos - real_pos) +
+            self._ts['values'][ordered][ceil_pos] * (real_pos - floor_pos))
+        # NOTE(gordc): above code doesn't compute proper value if pct lands on
+        # exact index, it sets it to 0. we need to set it properly here
+        exact_pos = numpy.equal(floor_pos, ceil_pos)
+        values[exact_pos] = self._ts['values'][ordered][floor_pos][exact_pos]
+        return make_timeseries(self.tstamps, values)
+
+    def derived(self):
+        if not self.can_derive:
+            raise TypeError('Cannot derive aggregates on calendar '
+                            'granularities.')
+        timestamps = self._ts_for_derive['timestamps'][1:]
+        values = numpy.diff(self._ts_for_derive['values'])
+        # FIXME(sileht): create some alternative __init__ to avoid creating
+        # useless Numpy object, recounting, timestamps convertion, ...
+        return GroupedTimeSeries(make_timeseries(timestamps, values),
+                                 self.granularity, self.start)
+
+
+class TimeSerie(object):
+    """A representation of series of a timestamp with a value.
+
+    Duplicate timestamps are not allowed and will be filtered to use the
+    last in the group when the TimeSerie is created or extended.
+    """
+
+    def __init__(self, ts=None):
+        if ts is None:
+            ts = make_timeseries([], [])
+        self.ts = ts
+
+    def __iter__(self):
+        return six.moves.zip(self.ts['timestamps'], self.ts['values'])
+
+    @classmethod
+    def from_data(cls, timestamps=None, values=None):
+        return cls(make_timeseries(timestamps, values))
+
+    def __eq__(self, other):
+        return (isinstance(other, TimeSerie) and
+                numpy.array_equal(self.ts,  other.ts))
+
+    def __getitem__(self, key):
+        if isinstance(key, numpy.datetime64):
+            idx = numpy.searchsorted(self.timestamps, key)
+            if self.timestamps[idx] == key:
+                return self[idx]
+            raise KeyError(key)
+        if isinstance(key, slice):
+            if isinstance(key.start, numpy.datetime64):
+                start = numpy.searchsorted(self.timestamps, key.start)
+            else:
+                start = key.start
+            if isinstance(key.stop, numpy.datetime64):
+                stop = numpy.searchsorted(self.timestamps, key.stop)
+            else:
+                stop = key.stop
+            key = slice(start, stop, key.step)
+        return self.ts[key]
+
+    def _merge(self, ts):
+        """Merge a Numpy timeseries into this one."""
+        self.ts = combine_timeseries(ts, self.ts)
+
+    def merge(self, ts):
+        """Merge a TimeSerie into this one."""
+        return self._merge(ts.ts)
+
+    def set_values(self, values):
+        """Set values into this timeseries.
+
+        :param values: A list of tuple (timestamp, value).
+        """
+        return self._merge(values)
+
+    def __len__(self):
+        return len(self.ts)
+
+    @property
+    def timestamps(self):
+        return self.ts['timestamps']
+
+    @property
+    def values(self):
+        return self.ts['values']
+
+    @property
+    def first(self):
+        try:
+            return self.timestamps[0]
+        except IndexError:
+            return
+
+    @property
+    def last(self):
+        try:
+            return self.timestamps[-1]
+        except IndexError:
+            return
+
+    def group_serie(self, granularity, start=None):
+        # NOTE(jd) Our whole serialization system is based on Epoch, and we
+        # store unsigned integer, so we can't store anything before Epoch.
+        # Sorry!
+        if len(self.ts) != 0 and self.first < UNIX_UNIVERSAL_START64:
+            raise BeforeEpochError(self.first)
+
+        return GroupedTimeSeries(self.ts, granularity, start)
+
+    @staticmethod
+    def _compress(payload):
+        # FIXME(jd) lz4 > 0.9.2 returns bytearray instead of bytes. But Cradox
+        # does not accept bytearray but only bytes, so make sure that we have a
+        # byte type returned.
+        return memoryview(lz4.block.compress(payload)).tobytes()
+
+
+class BoundTimeSerie(TimeSerie):
+    def __init__(self, ts=None, block_size=None, back_window=0):
+        """A time serie that is limited in size.
+
+        Used to represent the full-resolution buffer of incoming raw
+        datapoints associated with a metric.
+
+        The maximum size of this time serie is expressed in a number of block
+        size, called the back window.
+        When the timeserie is truncated, a whole block is removed.
+
+        You cannot set a value using a timestamp that is prior to the last
+        timestamp minus this number of blocks. By default, a back window of 0
+        does not allow you to go back in time prior to the current block being
+        used.
+
+        """
+        super(BoundTimeSerie, self).__init__(ts)
+        self.block_size = block_size
+        self.back_window = back_window
+
+    @classmethod
+    def from_data(cls, timestamps=None, values=None,
+                  block_size=None, back_window=0):
+        return cls(make_timeseries(timestamps, values),
+                   block_size=block_size, back_window=back_window)
+
+    def __eq__(self, other):
+        return (isinstance(other, BoundTimeSerie)
+                and super(BoundTimeSerie, self).__eq__(other)
+                and self.block_size == other.block_size
+                and self.back_window == other.back_window)
+
+    def set_values(self, values, before_truncate_callback=None):
+        """Set the timestamps and values in this timeseries.
+
+        :param values: A sorted timeseries array.
+        :param before_truncate_callback: A callback function to call before
+                                         truncating the BoundTimeSerie to its
+                                         maximum size.
+        :return: None of the return value of before_truncate_callback
+        """
+        if self.block_size is not None and len(self.ts) != 0:
+            index = numpy.searchsorted(values['timestamps'],
+                                       self.first_block_timestamp())
+            values = values[index:]
+        super(BoundTimeSerie, self).set_values(values)
+        if before_truncate_callback:
+            return_value = before_truncate_callback(self)
+        else:
+            return_value = None
+        self._truncate()
+        return return_value
+
+    _SERIALIZATION_TIMESTAMP_VALUE_LEN = struct.calcsize("<Qd")
+    _SERIALIZATION_TIMESTAMP_LEN = struct.calcsize("<Q")
+
+    @classmethod
+    def unserialize(cls, data, block_size, back_window):
+        uncompressed = lz4.block.decompress(data)
+        nb_points = (
+            len(uncompressed) // cls._SERIALIZATION_TIMESTAMP_VALUE_LEN
+        )
+
+        try:
+            timestamps = numpy.frombuffer(uncompressed, dtype='<Q',
+                                          count=nb_points)
+            values = numpy.frombuffer(
+                uncompressed, dtype='<d',
+                offset=nb_points * cls._SERIALIZATION_TIMESTAMP_LEN)
+        except ValueError:
+            raise InvalidData
+
+        return cls.from_data(
+            numpy.cumsum(timestamps),
+            values,
+            block_size=block_size,
+            back_window=back_window)
+
+    def serialize(self):
+        # NOTE(jd) Use a double delta encoding for timestamps
+        timestamps = numpy.empty(self.timestamps.size, dtype='<Q')
+        timestamps[0] = self.first
+        timestamps[1:] = numpy.diff(self.timestamps)
+        return self._compress(timestamps.tobytes() + self.values.tobytes())
+
+    @classmethod
+    def benchmark(cls):
+        """Run a speed benchmark!"""
+        points = SplitKey.POINTS_PER_SPLIT
+        serialize_times = 50
+
+        now = numpy.datetime64("2015-04-03 23:11")
+        timestamps = numpy.sort(numpy.array(
+            [now + numpy.timedelta64(random.randint(1000000, 10000000), 'us')
+             for i in six.moves.range(points)]))
+
+        print(cls.__name__)
+        print("=" * len(cls.__name__))
+
+        for title, values in [
+                ("Simple continuous range", six.moves.range(points)),
+                ("All 0", [float(0)] * points),
+                ("All 1", [float(1)] * points),
+                ("0 and 1", [0, 1] * (points // 2)),
+                ("1 and 0 random",
+                 [random.randint(0, 1)
+                  for x in six.moves.range(points)]),
+                ("Small number random pos/neg",
+                 [random.randint(-100000, 10000)
+                  for x in six.moves.range(points)]),
+                ("Small number random pos",
+                 [random.randint(0, 20000) for x in six.moves.range(points)]),
+                ("Small number random neg",
+                 [random.randint(-20000, 0) for x in six.moves.range(points)]),
+                ("Sin(x)", list(map(math.sin, six.moves.range(points)))),
+                ("random ", [random.random()
+                             for x in six.moves.range(points)]),
+        ]:
+            print(title)
+            ts = cls.from_data(timestamps, values)
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                s = ts.serialize()
+            t1 = time.time()
+            print("  Serialization speed: %.2f MB/s"
+                  % (((points * 2 * 8)
+                      / ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))
+            print("   Bytes per point: %.2f" % (len(s) / float(points)))
+
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                cls.unserialize(s, ONE_SECOND, 1)
+            t1 = time.time()
+            print("  Unserialization speed: %.2f MB/s"
+                  % (((points * 2 * 8)
+                      / ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))
+
+    def first_block_timestamp(self):
+        """Return the timestamp of the first block."""
+        rounded = round_timestamp(self.timestamps[-1], self.block_size)
+        return rounded - (self.block_size * self.back_window)
+
+    def _truncate(self):
+        """Truncate the timeserie."""
+        if self.block_size is not None and len(self.ts) != 0:
+            # Change that to remove the amount of block needed to have
+            # the size <= max_size. A block is a number of "seconds" (a
+            # timespan)
+            self.ts = self[self.first_block_timestamp():]
+
+
+@functools.total_ordering
+class SplitKey(object):
+    """A class representing a split key.
+
+    A split key is basically a timestamp that can be used to split
+    `AggregatedTimeSerie` objects in multiple parts. Each part will contain
+    `SplitKey.POINTS_PER_SPLIT` points. The split key for a given granularity
+    are regularly spaced.
+    """
+
+    __slots__ = (
+        'key',
+        'sampling',
+    )
+
+    POINTS_PER_SPLIT = 3600
+
+    def __init__(self, value, sampling):
+        if isinstance(value, SplitKey):
+            self.key = value.key
+        else:
+            self.key = value
+
+        self.sampling = sampling
+
+    @classmethod
+    def from_timestamp_and_sampling(cls, timestamp, sampling):
+        return cls(
+            round_timestamp(
+                timestamp,
+                freq=sampling * cls.POINTS_PER_SPLIT),
+            sampling)
+
+    def __next__(self):
+        """Get the split key of the next split.
+
+        :return: A `SplitKey` object.
+        """
+        return self.__class__(
+            self.key + self.sampling * self.POINTS_PER_SPLIT,
+            self.sampling)
+
+    next = __next__
+
+    def __iter__(self):
+        return self
+
+    def __hash__(self):
+        return hash(str(self.key.astype('datetime64[ns]')) +
+                    str(self.sampling.astype('timedelta64[ns]')))
+
+    def _compare(self, op, other):
+        if isinstance(other, SplitKey):
+            if self.sampling != other.sampling:
+                if op == operator.eq:
+                    return False
+                if op == operator.ne:
+                    return True
+                raise TypeError(
+                    "Cannot compare %s with different sampling" %
+                    self.__class__.__name__)
+            return op(self.key, other.key)
+        if isinstance(other, numpy.datetime64):
+            return op(self.key, other)
+        raise TypeError("Cannot compare %r with %r" % (self, other))
+
+    def __lt__(self, other):
+        return self._compare(operator.lt, other)
+
+    def __eq__(self, other):
+        return self._compare(operator.eq, other)
+
+    def __ne__(self, other):
+        # neither total_ordering nor py2 sets ne as the opposite of eq
+        return self._compare(operator.ne, other)
+
+    def __str__(self):
+        return str(float(self))
+
+    def __float__(self):
+        return datetime64_to_epoch(self.key)
+
+    def __repr__(self):
+        return "<%s: %s / %s>" % (self.__class__.__name__,
+                                  self.key,
+                                  self.sampling)
+
+
+Aggregation = collections.namedtuple(
+    "Aggregation",
+    ["method", "granularity", "timespan"],
+)
+
+
+class AggregatedTimeSerie(TimeSerie):
+
+    _AGG_METHOD_PCT_RE = re.compile(r"([1-9][0-9]?)pct")
+
+    PADDED_SERIAL_LEN = struct.calcsize("<?d")
+    COMPRESSED_SERIAL_LEN = struct.calcsize("<Hd")
+    COMPRESSED_TIMESPAMP_LEN = struct.calcsize("<H")
+
+    def __init__(self, aggregation, ts=None):
+        """A time serie that is downsampled.
+
+        Used to represent the downsampled timeserie for a single
+        granularity/aggregation-function pair stored for a metric.
+
+        """
+        super(AggregatedTimeSerie, self).__init__(ts)
+        self.aggregation = aggregation
+
+    def resample(self, sampling):
+        return AggregatedTimeSerie.from_grouped_serie(
+            self.group_serie(sampling),
+            Aggregation(self.aggregation.method, sampling,
+                        self.aggregation.timespan))
+
+    @classmethod
+    def from_data(cls, aggregation, timestamps, values):
+        return cls(aggregation=aggregation,
+                   ts=make_timeseries(timestamps, values))
+
+    @staticmethod
+    def _get_agg_method(aggregation_method):
+        q = None
+        m = AggregatedTimeSerie._AGG_METHOD_PCT_RE.match(aggregation_method)
+        if m:
+            q = float(m.group(1))
+            aggregation_method_func_name = 'quantile'
+        else:
+            if not hasattr(GroupedTimeSeries, aggregation_method):
+                raise UnknownAggregationMethod(aggregation_method)
+            aggregation_method_func_name = aggregation_method
+        return aggregation_method_func_name, q
+
+    def truncate(self, oldest_point=None):
+        """Truncate the time series up to oldest_point excluded.
+
+        :param oldest_point: Oldest point to keep from, this excluded.
+                             Default is the aggregation timespan.
+        :type oldest_point: numpy.datetime64 or numpy.timedelta64
+        :return: The oldest point that could have been kept.
+        """
+        last = self.last
+        if last is None:
+            return
+        if oldest_point is None:
+            oldest_point = self.aggregation.timespan
+            if oldest_point is None:
+                return
+        if isinstance(oldest_point, numpy.timedelta64):
+            oldest_point = last - oldest_point
+        index = numpy.searchsorted(self.ts['timestamps'], oldest_point,
+                                   side='right')
+        self.ts = self.ts[index:]
+        return oldest_point
+
+    def split(self):
+        # NOTE(sileht): We previously use groupby with
+        # SplitKey.from_timestamp_and_sampling, but
+        # this is slow because pandas can do that on any kind DataFrame
+        # but we have ordered timestamps, so don't need
+        # to iter the whole series.
+        freq = self.aggregation.granularity * SplitKey.POINTS_PER_SPLIT
+        keys, counts = numpy.unique(
+            round_timestamp(self.timestamps, freq),
+            return_counts=True)
+        start = 0
+        for key, count in six.moves.zip(keys, counts):
+            end = start + count
+            yield (SplitKey(key, self.aggregation.granularity),
+                   AggregatedTimeSerie(self.aggregation, self[start:end]))
+            start = end
+
+    @classmethod
+    def from_timeseries(cls, timeseries, aggregation):
+        # NOTE(gordc): Indices must be unique across all timeseries. Also,
+        # timeseries should be a list that is ordered within list and series.
+        if timeseries:
+            ts = numpy.concatenate([ts.ts for ts in timeseries])
+        else:
+            ts = None
+        return cls(aggregation=aggregation, ts=ts)
+
+    @classmethod
+    def from_grouped_serie(cls, grouped_serie, aggregation):
+        if aggregation.method.startswith("rate:"):
+            grouped_serie = grouped_serie.derived()
+            aggregation_method_name = aggregation.method[5:]
+        else:
+            aggregation_method_name = aggregation.method
+        agg_name, q = cls._get_agg_method(aggregation_method_name)
+        return cls(aggregation,
+                   ts=cls._resample_grouped(grouped_serie, agg_name, q))
+
+    def __eq__(self, other):
+        return (isinstance(other, AggregatedTimeSerie)
+                and super(AggregatedTimeSerie, self).__eq__(other)
+                and self.aggregation == other.aggregation)
+
+    def __repr__(self):
+        return "<%s 0x%x granularity=%s agg_method=%s>" % (
+            self.__class__.__name__,
+            id(self),
+            self.aggregation.granularity,
+            self.aggregation.method,
+        )
+
+    @staticmethod
+    def is_compressed(serialized_data):
+        """Check whatever the data was serialized with compression."""
+        return six.indexbytes(serialized_data, 0) == ord("c")
+
+    @classmethod
+    def unserialize(cls, data, key, aggregation):
+        """Unserialize an aggregated timeserie.
+
+        :param data: Raw data buffer.
+        :param key: A :class:`SplitKey` key.
+        :param aggregation: The Aggregation object of this timeseries.
+        """
+        x, y = [], []
+
+        if data:
+            if cls.is_compressed(data):
+                # Compressed format
+                uncompressed = lz4.block.decompress(
+                    memoryview(data)[1:].tobytes())
+                nb_points = len(uncompressed) // cls.COMPRESSED_SERIAL_LEN
+
+                try:
+                    y = numpy.frombuffer(uncompressed, dtype='<H',
+                                         count=nb_points)
+                    x = numpy.frombuffer(
+                        uncompressed, dtype='<d',
+                        offset=nb_points*cls.COMPRESSED_TIMESPAMP_LEN)
+                except ValueError:
+                    raise InvalidData()
+                y = numpy.cumsum(y * key.sampling) + key.key
+            else:
+                # Padded format
+                try:
+                    everything = numpy.frombuffer(data, dtype=[('b', '<?'),
+                                                               ('v', '<d')])
+                except ValueError:
+                    raise InvalidData()
+                index = numpy.nonzero(everything['b'])[0]
+                y = index * key.sampling + key.key
+                x = everything['v'][index]
+
+        return cls.from_data(aggregation, y, x)
+
+    def get_split_key(self, timestamp=None):
+        """Return the split key for a particular timestamp.
+
+        :param timestamp: If None, the first timestamp of the timeseries
+                          is used.
+        :return: A SplitKey object or None if the timeseries is empty.
+        """
+        if timestamp is None:
+            timestamp = self.first
+            if timestamp is None:
+                return
+        return SplitKey.from_timestamp_and_sampling(
+            timestamp, self.aggregation.granularity)
+
+    def serialize(self, start, compressed=True):
+        """Serialize an aggregated timeserie.
+
+        The serialization starts with a byte that indicate the serialization
+        format: 'c' for compressed format, '\x00' or '\x01' for uncompressed
+        format. Both format can be unserialized using the `unserialize` method.
+
+        The offset returned indicates at which offset the data should be
+        written from. In the case of compressed data, this is always 0.
+
+        :param start: SplitKey to start serialization at.
+        :param compressed: Serialize in a compressed format.
+        :return: a tuple of (offset, data)
+
+        """
+        offset_div = self.aggregation.granularity
+        # calculate how many seconds from start the series runs until and
+        # initialize list to store alternating delimiter, float entries
+        if compressed:
+            # NOTE(jd) Use a double delta encoding for timestamps
+            timestamps = numpy.empty(self.timestamps.size, dtype='<H')
+            timestamps[0] = (self.first - start.key) / offset_div
+            timestamps[1:] = numpy.diff(self.timestamps) / offset_div
+            payload = (timestamps.tobytes() + self.values.tobytes())
+            return None, b"c" + self._compress(payload)
+        # NOTE(gordc): this binary serializes series based on the split
+        # time. the format is 1B True/False flag which denotes whether
+        # subsequent 8B is a real float or zero padding. every 9B
+        # represents one second from start time. this is intended to be run
+        # on data already split. ie. False,0,True,0 serialization means
+        # start datapoint is padding, and 1s after start time, the
+        # aggregate value is 0. calculate how many seconds from start the
+        # series runs until and initialize list to store alternating
+        # delimiter, float entries
+        first = self.first  # NOTE(jd) needed because faster
+        e_offset = int((self.last - first) / offset_div) + 1
+
+        locs = numpy.zeros(self.timestamps.size, dtype=numpy.int)
+        locs[1:] = numpy.cumsum(numpy.diff(self.timestamps)) / offset_div
+
+        # Fill everything with zero and set
+        serial = numpy.zeros((e_offset,), dtype=[('b', '<?'), ('v', '<d')])
+        serial['b'][locs] = numpy.ones_like(self.values, dtype='<?')
+        serial['v'][locs] = self.values
+
+        offset = int((first - start.key) / offset_div) * self.PADDED_SERIAL_LEN
+        return offset, serial.tobytes()
+
+    @staticmethod
+    def _resample_grouped(grouped_serie, agg_name, q=None):
+        agg_func = getattr(grouped_serie, agg_name)
+        return agg_func(q) if agg_name == 'quantile' else agg_func()
+
+    def fetch(self, from_timestamp=None, to_timestamp=None):
+        """Fetch aggregated time value.
+
+        Returns a sorted list of tuples (timestamp, granularity, value).
+        """
+        # Round timestamp to our granularity so we're sure that if e.g. 17:02
+        # is requested and we have points for 17:00 and 17:05 in a 5min
+        # granularity, we do return the 17:00 point and not nothing
+        if from_timestamp is None:
+            from_ = None
+        else:
+            from_ = round_timestamp(from_timestamp,
+                                    self.aggregation.granularity)
+        return self.__class__(self.aggregation, ts=self[from_:to_timestamp])
+
+    @classmethod
+    def benchmark(cls):
+        """Run a speed benchmark!"""
+        points = SplitKey.POINTS_PER_SPLIT
+        sampling = numpy.timedelta64(5, 's')
+        resample = numpy.timedelta64(35, 's')
+
+        now = numpy.datetime64("2015-04-03 23:11")
+        timestamps = numpy.sort(numpy.array(
+            [now + i * sampling
+             for i in six.moves.range(points)]))
+
+        print(cls.__name__)
+        print("=" * len(cls.__name__))
+
+        for title, values in [
+                ("Simple continuous range", six.moves.range(points)),
+                ("All 0", [float(0)] * points),
+                ("All 1", [float(1)] * points),
+                ("0 and 1", [0, 1] * (points // 2)),
+                ("1 and 0 random",
+                 [random.randint(0, 1)
+                  for x in six.moves.range(points)]),
+                ("Small number random pos/neg",
+                 [random.randint(-100000, 10000)
+                  for x in six.moves.range(points)]),
+                ("Small number random pos",
+                 [random.randint(0, 20000) for x in six.moves.range(points)]),
+                ("Small number random neg",
+                 [random.randint(-20000, 0) for x in six.moves.range(points)]),
+                ("Sin(x)", list(map(math.sin, six.moves.range(points)))),
+                ("random ", [random.random()
+                             for x in six.moves.range(points)]),
+        ]:
+            print(title)
+            serialize_times = 50
+            aggregation = Aggregation("mean", sampling, None)
+            ts = cls.from_data(aggregation, timestamps, values)
+            t0 = time.time()
+            key = ts.get_split_key()
+            for i in six.moves.range(serialize_times):
+                e, s = ts.serialize(key, compressed=False)
+            t1 = time.time()
+            print("  Uncompressed serialization speed: %.2f MB/s"
+                  % (((points * 2 * 8)
+                      / ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))
+            print("   Bytes per point: %.2f" % (len(s) / float(points)))
+
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                cls.unserialize(s, key, 'mean')
+            t1 = time.time()
+            print("  Unserialization speed: %.2f MB/s"
+                  % (((points * 2 * 8)
+                      / ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))
+
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                o, s = ts.serialize(key, compressed=True)
+            t1 = time.time()
+            print("  Compressed serialization speed: %.2f MB/s"
+                  % (((points * 2 * 8)
+                      / ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))
+            print("   Bytes per point: %.2f" % (len(s) / float(points)))
+
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                cls.unserialize(s, key, 'mean')
+            t1 = time.time()
+            print("  Uncompression speed: %.2f MB/s"
+                  % (((points * 2 * 8)
+                      / ((t1 - t0) / serialize_times)) / (1024.0 * 1024.0)))
+
+            def per_sec(t1, t0):
+                return 1 / ((t1 - t0) / serialize_times)
+
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                list(ts.split())
+            t1 = time.time()
+            print("  split() speed: %.2f Hz" % per_sec(t1, t0))
+
+            # NOTE(sileht): propose a new series with half overload timestamps
+            pts = ts.ts.copy()
+            tsbis = cls(ts=pts, aggregation=aggregation)
+            tsbis.ts['timestamps'] = (
+                tsbis.timestamps - numpy.timedelta64(
+                    sampling * points / 2, 's')
+            )
+
+            t0 = time.time()
+            for i in six.moves.range(serialize_times):
+                ts.merge(tsbis)
+            t1 = time.time()
+            print("  merge() speed %.2f Hz" % per_sec(t1, t0))
+
+            for agg in ['mean', 'sum', 'max', 'min', 'std', 'median', 'first',
+                        'last', 'count', '5pct', '90pct']:
+                serialize_times = 3 if agg.endswith('pct') else 10
+                ts = cls(ts=pts, aggregation=aggregation)
+                t0 = time.time()
+                for i in six.moves.range(serialize_times):
+                    ts.resample(resample)
+                t1 = time.time()
+                print("  resample(%s) speed: %.2f Hz"
+                      % (agg, per_sec(t1, t0)))
+
+
+if __name__ == '__main__':
+    import sys
+    args = sys.argv[1:]
+    if not args or "--boundtimeserie" in args:
+        BoundTimeSerie.benchmark()
+    if not args or "--aggregatedtimeserie" in args:
+        AggregatedTimeSerie.benchmark()
diff --git a/gnocchi/chef.py b/gnocchi/chef.py
new file mode 100644
index 0000000000000000000000000000000000000000..f90c29afa197c6c70dc25864092c989754a2a2fe
--- /dev/null
+++ b/gnocchi/chef.py
@@ -0,0 +1,184 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright (c) 2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import hashlib
+
+import daiquiri
+import six
+
+from gnocchi import indexer
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class SackAlreadyLocked(Exception):
+    def __init__(self, sack):
+        self.sack = sack
+        super(SackAlreadyLocked, self).__init__(
+            "Sack %s already locked" % sack)
+
+
+class Chef(object):
+    """A master of cooking gnocchi.
+
+    Give it a few tools and it'll make you happy!
+
+    The Chef is responsible for executing actions that requires several drivers
+    at the same time, such as the coordinator, the incoming and storage
+    drivers, or the indexer.
+
+    """
+
+    def __init__(self, coord, incoming, index, storage):
+        self.coord = coord
+        self.incoming = incoming
+        self.index = index
+        self.storage = storage
+
+    def expunge_metrics(self, sync=False):
+        """Remove deleted metrics.
+
+        :param sync: If True, then delete everything synchronously and raise
+                     on error
+        :type sync: bool
+        """
+        metrics_to_expunge = self.index.list_metrics(status='delete')
+        metrics_by_id = {m.id: m for m in metrics_to_expunge}
+        for sack, metric_ids in self.incoming.group_metrics_by_sack(
+                metrics_by_id.keys()):
+            try:
+                lock = self.get_sack_lock(sack)
+                if not lock.acquire(blocking=sync):
+                    # Retry later
+                    LOG.debug(
+                        "Sack %s is locked, cannot expunge metrics", sack)
+                    continue
+                # NOTE(gordc): no need to hold lock because the metric has been
+                # already marked as "deleted" in the indexer so no measure
+                # worker is going to process it anymore.
+                lock.release()
+            except Exception:
+                if sync:
+                    raise
+                LOG.error("Unable to lock sack %s for expunging metrics",
+                          sack, exc_info=True)
+            else:
+                for metric_id in metric_ids:
+                    metric = metrics_by_id[metric_id]
+                    LOG.debug("Deleting metric %s", metric)
+                    try:
+                        self.incoming.delete_unprocessed_measures_for_metric(
+                            metric.id)
+                        self.storage._delete_metric(metric)
+                        try:
+                            self.index.expunge_metric(metric.id)
+                        except indexer.NoSuchMetric:
+                            # It's possible another process deleted or is
+                            # deleting the metric, not a big deal
+                            pass
+                    except Exception:
+                        if sync:
+                            raise
+                        LOG.error("Unable to expunge metric %s from storage",
+                                  metric, exc_info=True)
+
+    def refresh_metrics(self, metrics, timeout=None, sync=False):
+        """Process added measures in background for some metrics only.
+
+        :param metrics: The list of `indexer.Metric` to refresh.
+        :param timeout: Time to wait for the process to happen.
+        :param sync: If an error occurs, raise, otherwise just log it.
+        """
+        # process only active metrics. deleted metrics with unprocessed
+        # measures will be skipped until cleaned by janitor.
+        metrics_by_id = {m.id: m for m in metrics}
+        for sack, metric_ids in self.incoming.group_metrics_by_sack(
+                metrics_by_id.keys()):
+            lock = self.get_sack_lock(sack)
+            # FIXME(jd) timeout should be global for all sack locking
+            if not lock.acquire(blocking=timeout):
+                raise SackAlreadyLocked(sack)
+            try:
+                LOG.debug("Processing measures for %d metrics",
+                          len(metric_ids))
+                with self.incoming.process_measure_for_metrics(
+                        metric_ids) as metrics_and_measures:
+                    if metrics_and_measures:
+                        self.storage.add_measures_to_metrics({
+                            metrics_by_id[metric_id]: measures
+                            for metric_id, measures
+                            in six.iteritems(metrics_and_measures)
+                        })
+                        LOG.debug("Measures for %d metrics processed",
+                                  len(metric_ids))
+            except Exception:
+                if sync:
+                    raise
+                LOG.error("Error processing new measures", exc_info=True)
+            finally:
+                lock.release()
+
+    def process_new_measures_for_sack(self, sack, blocking=False, sync=False):
+        """Process added measures in background.
+
+        Lock a sack and try to process measures from it. If the sack cannot be
+        locked, the method will raise `SackAlreadyLocked`.
+
+        :param sack: The sack to process new measures for.
+        :param blocking: Block to be sure the sack is processed or raise
+                         `SackAlreadyLocked` otherwise.
+        :param sync: If True, raise any issue immediately otherwise just log it
+        :return: The number of metrics processed.
+
+        """
+        lock = self.get_sack_lock(sack)
+        if not lock.acquire(blocking=blocking):
+            raise SackAlreadyLocked(sack)
+        LOG.debug("Processing measures for sack %s", sack)
+        try:
+            with self.incoming.process_measures_for_sack(sack) as measures:
+                # process only active metrics. deleted metrics with unprocessed
+                # measures will be skipped until cleaned by janitor.
+                if not measures:
+                    return 0
+
+                metrics = self.index.list_metrics(
+                    attribute_filter={
+                        "in": {"id": measures.keys()}
+                    })
+                self.storage.add_measures_to_metrics({
+                    metric: measures[metric.id]
+                    for metric in metrics
+                })
+                LOG.debug("Measures for %d metrics processed",
+                          len(metrics))
+                return len(measures)
+        except Exception:
+            if sync:
+                raise
+            LOG.error("Error processing new measures", exc_info=True)
+            return 0
+        finally:
+            lock.release()
+
+    def get_sack_lock(self, sack):
+        # FIXME(jd) Some tooz drivers have a limitation on lock name length
+        # (e.g. MySQL). This should be handled by tooz, but it's not yet.
+        lock_name = hashlib.new(
+            'sha1',
+            ('gnocchi-sack-%s-lock' % str(sack)).encode()).hexdigest().encode()
+        return self.coord.get_lock(lock_name)
diff --git a/gnocchi/cli/__init__.py b/gnocchi/cli/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/cli/amqp1d.py b/gnocchi/cli/amqp1d.py
new file mode 100644
index 0000000000000000000000000000000000000000..7f3dedc3fa48daa240d83ac38eb63219dd75bd76
--- /dev/null
+++ b/gnocchi/cli/amqp1d.py
@@ -0,0 +1,19 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from gnocchi import amqp1d as amqp1d_service
+
+
+def amqp1d():
+    amqp1d_service.start()
diff --git a/gnocchi/cli/api.py b/gnocchi/cli/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a9c0fbbd695b6a8d80b360210ac23e246e3ece0
--- /dev/null
+++ b/gnocchi/cli/api.py
@@ -0,0 +1,120 @@
+# Copyright (c) 2013 Mirantis Inc.
+# Copyright (c) 2015-2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import copy
+from distutils import spawn
+import math
+import os
+import sys
+
+import daiquiri
+from oslo_config import cfg
+from oslo_policy import opts as policy_opts
+
+from gnocchi import opts
+from gnocchi.rest import app
+from gnocchi import service
+from gnocchi import utils
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def prepare_service(conf=None):
+    if conf is None:
+        conf = cfg.ConfigOpts()
+
+    opts.set_defaults()
+    policy_opts.set_defaults(conf)
+    conf = service.prepare_service(conf=conf)
+    cfg_path = conf.oslo_policy.policy_file
+    if not os.path.isabs(cfg_path):
+        cfg_path = conf.find_file(cfg_path)
+    if cfg_path is None or not os.path.exists(cfg_path):
+        cfg_path = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                   '..', 'rest', 'policy.json'))
+    conf.set_default('policy_file', cfg_path, group='oslo_policy')
+    return conf
+
+
+def wsgi():
+    return app.load_app(prepare_service())
+
+
+def api():
+    # Compat with previous pbr script
+    try:
+        double_dash = sys.argv.index("--")
+    except ValueError:
+        double_dash = None
+    else:
+        sys.argv.pop(double_dash)
+
+    conf = cfg.ConfigOpts()
+    for opt in opts.API_OPTS:
+        # NOTE(jd) Register the API options without a default, so they are only
+        # used to override the one in the config file
+        c = copy.copy(opt)
+        c.default = None
+        conf.register_cli_opt(c)
+    conf = prepare_service(conf)
+
+    if double_dash is not None:
+        # NOTE(jd) Wait to this stage to log so we're sure the logging system
+        # is in place
+        LOG.warning(
+            "No need to pass `--' in gnocchi-api command line anymore, "
+            "please remove")
+
+    uwsgi = spawn.find_executable("uwsgi")
+    if not uwsgi:
+        LOG.error("Unable to find `uwsgi'.\n"
+                  "Be sure it is installed and in $PATH.")
+        return 1
+
+    workers = utils.get_default_workers()
+
+    # TODO(sileht): When uwsgi 2.1 will be release we should be able
+    # to use --wsgi-manage-chunked-input
+    # https://github.com/unbit/uwsgi/issues/1428
+    args = [
+        "--if-not-plugin", "python", "--plugin", "python", "--endif",
+        "--%s" % conf.api.uwsgi_mode, "%s:%d" % (
+            conf.host or conf.api.host,
+            conf.port or conf.api.port),
+        "--master",
+        "--enable-threads",
+        "--thunder-lock",
+        "--hook-master-start", "unix_signal:15 gracefully_kill_them_all",
+        "--die-on-term",
+        "--processes", str(math.floor(workers * 1.5)),
+        "--threads", str(workers),
+        "--lazy-apps",
+        "--chdir", "/",
+        "--wsgi", "gnocchi.rest.wsgi",
+        "--pyargv", " ".join(sys.argv[1:]),
+    ]
+    if conf.api.uwsgi_mode == "http":
+        args.extend([
+            "--so-keepalive",
+            "--http-keepalive",
+            "--add-header", "Connection: Keep-Alive"
+        ])
+
+    virtual_env = os.getenv("VIRTUAL_ENV")
+    if virtual_env is not None:
+        args.extend(["-H", os.getenv("VIRTUAL_ENV", ".")])
+
+    return os.execl(uwsgi, uwsgi, *args)
diff --git a/gnocchi/cli/injector.py b/gnocchi/cli/injector.py
new file mode 100644
index 0000000000000000000000000000000000000000..a624c4191c0366a395b9c410bc1951f7bdd204af
--- /dev/null
+++ b/gnocchi/cli/injector.py
@@ -0,0 +1,106 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright (c) 2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import random
+import time
+import uuid
+
+import daiquiri
+import numpy
+from oslo_config import cfg
+
+from gnocchi import chef
+from gnocchi import incoming
+from gnocchi import service
+from gnocchi import utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def injector():
+    conf = cfg.ConfigOpts()
+    conf.register_cli_opts([
+        cfg.IntOpt("--measures",
+                   help="Measures per metric."),
+        cfg.IntOpt("--metrics",
+                   help="Number of metrics to create."),
+        cfg.IntOpt("--archive-policy-name",
+                   help="Name of archive policy to use.",
+                   default="low"),
+        cfg.IntOpt("--interval",
+                   help="Interval to sleep between metrics sending."),
+        cfg.BoolOpt("--process", default=False,
+                    help="Process the ingested measures."),
+    ])
+    return _inject(service.prepare_service(conf=conf, log_to_std=True),
+                   metrics=conf.metrics,
+                   measures=conf.measures,
+                   archive_policy_name=conf.archive_policy_name,
+                   process=conf.process,
+                   interval=conf.interval)
+
+
+def _inject(inc, coord, store, idx,
+            metrics, measures, archive_policy_name="low", process=False,
+            interval=None):
+    LOG.info("Creating %d metrics", metrics)
+    with utils.StopWatch() as sw:
+        metric_ids = [
+            idx.create_metric(uuid.uuid4(), "admin",
+                              archive_policy_name).id
+            for _ in range(metrics)
+        ]
+    LOG.info("Created %d metrics in %.2fs", metrics, sw.elapsed())
+
+    LOG.info("Generating %d measures per metric for %d metrics… ",
+             measures, metrics)
+    now = numpy.datetime64(utils.utcnow())
+    with utils.StopWatch() as sw:
+        measures = {
+            m_id: [incoming.Measure(
+                now + numpy.timedelta64(seconds=s),
+                random.randint(-999999, 999999)) for s in range(measures)]
+            for m_id in metric_ids
+        }
+    LOG.info("… done in %.2fs", sw.elapsed())
+
+    interval_timer = utils.StopWatch().start()
+
+    while True:
+        interval_timer.reset()
+        with utils.StopWatch() as sw:
+            inc.add_measures_batch(measures)
+        total_measures = sum(map(len, measures.values()))
+        LOG.info("Pushed %d measures in %.2fs",
+                 total_measures,
+                 sw.elapsed())
+
+        if process:
+            c = chef.Chef(coord, inc, idx, store)
+
+            with utils.StopWatch() as sw:
+                for s in inc.iter_sacks():
+                    c.process_new_measures_for_sack(s, blocking=True)
+            LOG.info("Processed %d sacks in %.2fs",
+                     inc.NUM_SACKS, sw.elapsed())
+            LOG.info("Speed: %.2f measures/s",
+                     float(total_measures) / sw.elapsed())
+
+        if interval is None:
+            break
+        time.sleep(max(0, interval - interval_timer.elapsed()))
+
+    return total_measures
diff --git a/gnocchi/cli/manage.py b/gnocchi/cli/manage.py
new file mode 100644
index 0000000000000000000000000000000000000000..c5220ef9e6cff0bb1e3687dcbd29e6902b3985de
--- /dev/null
+++ b/gnocchi/cli/manage.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2013 Mirantis Inc.
+# Copyright (c) 2015-2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import copy
+import os
+import sys
+
+import daiquiri
+from oslo_config import cfg
+from oslo_config import generator
+import six
+
+from gnocchi import archive_policy
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import service
+from gnocchi import storage
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def config_generator():
+    args = sys.argv[1:]
+    if args is None:
+        args = ['--output-file', 'etc/gnocchi/gnocchi.conf']
+    return generator.main(['--config-file',
+                           '%s/../gnocchi-config-generator.conf' %
+                           os.path.dirname(__file__)]
+                          + args)
+
+
+_SACK_NUMBER_OPT = cfg.IntOpt(
+    "sacks-number", min=1, max=65535, required=True,
+    help="Number of incoming storage sacks to create.")
+
+
+def upgrade():
+    conf = cfg.ConfigOpts()
+    sack_number_opt = copy.copy(_SACK_NUMBER_OPT)
+    sack_number_opt.default = 128
+    conf.register_cli_opts([
+        cfg.BoolOpt("skip-index", default=False,
+                    help="Skip index upgrade."),
+        cfg.BoolOpt("skip-storage", default=False,
+                    help="Skip storage upgrade."),
+        cfg.BoolOpt("skip-incoming", default=False,
+                    help="Skip incoming storage upgrade."),
+        cfg.BoolOpt("skip-archive-policies-creation", default=False,
+                    help="Skip default archive policies creation."),
+        sack_number_opt,
+    ])
+    conf = service.prepare_service(conf=conf, log_to_std=True)
+    if not conf.skip_index:
+        index = indexer.get_driver(conf)
+        LOG.info("Upgrading indexer %s", index)
+        index.upgrade()
+    if not conf.skip_storage:
+        s = storage.get_driver(conf)
+        LOG.info("Upgrading storage %s", s)
+        s.upgrade()
+    if not conf.skip_incoming:
+        i = incoming.get_driver(conf)
+        LOG.info("Upgrading incoming storage %s", i)
+        i.upgrade(conf.sacks_number)
+
+    if (not conf.skip_archive_policies_creation
+            and not index.list_archive_policies()
+            and not index.list_archive_policy_rules()):
+        if conf.skip_index:
+            index = indexer.get_driver(conf)
+        for name, ap in six.iteritems(archive_policy.DEFAULT_ARCHIVE_POLICIES):
+            index.create_archive_policy(ap)
+        index.create_archive_policy_rule("default", "*", "low")
+
+
+def change_sack_size():
+    conf = cfg.ConfigOpts()
+    conf.register_cli_opts([_SACK_NUMBER_OPT])
+    conf = service.prepare_service(conf=conf, log_to_std=True)
+    s = incoming.get_driver(conf)
+    try:
+        report = s.measures_report(details=False)
+    except incoming.SackDetectionError:
+        LOG.error('Unable to detect the number of storage sacks.\n'
+                  'Ensure gnocchi-upgrade has been executed.')
+        return
+    remainder = report['summary']['measures']
+    if remainder:
+        LOG.error('Cannot change sack when non-empty backlog. Process '
+                  'remaining %s measures and try again', remainder)
+        return
+    LOG.info("Removing current %d sacks", s.NUM_SACKS)
+    s.remove_sacks()
+    LOG.info("Creating new %d sacks", conf.sacks_number)
+    s.upgrade(conf.sacks_number)
diff --git a/gnocchi/cli/metricd.py b/gnocchi/cli/metricd.py
new file mode 100644
index 0000000000000000000000000000000000000000..523c47a75b6118ac5daeaa9ff73550b425c3dac2
--- /dev/null
+++ b/gnocchi/cli/metricd.py
@@ -0,0 +1,329 @@
+# Copyright (c) 2013 Mirantis Inc.
+# Copyright (c) 2015-2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import socket
+import threading
+import time
+import uuid
+
+import cachetools.func
+import cotyledon
+from cotyledon import oslo_config_glue
+import daiquiri
+from oslo_config import cfg
+import tenacity
+import tooz
+from tooz import coordination
+
+from gnocchi import chef
+from gnocchi import exceptions
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import service
+from gnocchi import storage
+from gnocchi import utils
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+@utils.retry_on_exception_and_log("Unable to initialize coordination driver")
+def get_coordinator_and_start(member_id, url):
+    coord = coordination.get_coordinator(url, member_id)
+    coord.start(start_heart=True)
+    return coord
+
+
+class MetricProcessBase(cotyledon.Service):
+    def __init__(self, worker_id, conf, interval_delay=0):
+        super(MetricProcessBase, self).__init__(worker_id)
+        self.conf = conf
+        self.startup_delay = self.worker_id = worker_id
+        self.interval_delay = interval_delay
+        self._wake_up = threading.Event()
+        self._shutdown = threading.Event()
+        self._shutdown_done = threading.Event()
+
+    def wakeup(self):
+        self._wake_up.set()
+
+    def _configure(self):
+        member_id = "%s.%s.%s" % (socket.gethostname(),
+                                  self.worker_id,
+                                  # NOTE(jd) Still use a uuid here so we're
+                                  # sure there's no conflict in case of
+                                  # crash/restart
+                                  str(uuid.uuid4()))
+        self.coord = get_coordinator_and_start(member_id,
+                                               self.conf.coordination_url)
+        self.store = storage.get_driver(self.conf)
+        self.incoming = incoming.get_driver(self.conf)
+        self.index = indexer.get_driver(self.conf)
+        self.chef = chef.Chef(self.coord, self.incoming,
+                              self.index, self.store)
+
+    def run(self):
+        self._configure()
+        # Delay startup so workers are jittered.
+        time.sleep(self.startup_delay)
+
+        while not self._shutdown.is_set():
+            with utils.StopWatch() as timer:
+                try:
+                    self._run_job()
+                except Exception:
+                    LOG.error("Unexpected error during %s job",
+                              self.name,
+                              exc_info=True)
+            self._wake_up.wait(max(0, self.interval_delay - timer.elapsed()))
+            self._wake_up.clear()
+        self._shutdown_done.set()
+
+    def terminate(self):
+        self._shutdown.set()
+        self.wakeup()
+        LOG.info("Waiting ongoing metric processing to finish")
+        self._shutdown_done.wait()
+        self.close_services()
+
+    def close_services(self):
+        self.coord.stop()
+
+    @staticmethod
+    def _run_job():
+        raise NotImplementedError
+
+
+class MetricReporting(MetricProcessBase):
+    name = "reporting"
+
+    def __init__(self, worker_id, conf):
+        super(MetricReporting, self).__init__(
+            worker_id, conf, conf.metricd.metric_reporting_delay)
+
+    def _configure(self):
+        self.incoming = incoming.get_driver(self.conf)
+
+    @staticmethod
+    def close_services():
+        pass
+
+    def _run_job(self):
+        try:
+            report = self.incoming.measures_report(details=False)
+            LOG.info("%d measurements bundles across %d "
+                     "metrics wait to be processed.",
+                     report['summary']['measures'],
+                     report['summary']['metrics'])
+        except incoming.ReportGenerationError:
+            LOG.warning("Unable to compute backlog. Retrying at next "
+                        "interval.")
+
+
+class MetricProcessor(MetricProcessBase):
+    name = "processing"
+    GROUP_ID = b"gnocchi-processing"
+
+    def __init__(self, worker_id, conf):
+        super(MetricProcessor, self).__init__(
+            worker_id, conf, conf.metricd.metric_processing_delay)
+        self._tasks = []
+        self.group_state = None
+        self.sacks_with_measures_to_process = set()
+        # This stores the last time the processor did a scan on all the sack it
+        # is responsible for
+        self._last_full_sack_scan = utils.StopWatch().start()
+        # Only update the list of sacks to process every
+        # metric_processing_delay
+        self._get_sacks_to_process = cachetools.func.ttl_cache(
+            ttl=conf.metricd.metric_processing_delay
+        )(self._get_sacks_to_process)
+
+    @tenacity.retry(
+        wait=utils.wait_exponential,
+        # Never retry except when explicitly asked by raising TryAgain
+        retry=tenacity.retry_never)
+    def _configure(self):
+        super(MetricProcessor, self)._configure()
+
+        # create fallback in case paritioning fails or assigned no tasks
+        self.fallback_tasks = list(self.incoming.iter_sacks())
+        try:
+            self.partitioner = self.coord.join_partitioned_group(
+                self.GROUP_ID, partitions=200)
+            LOG.info('Joined coordination group: %s',
+                     self.GROUP_ID.decode())
+        except tooz.NotImplemented:
+            LOG.warning('Coordinator does not support partitioning. Worker '
+                        'will battle against other workers for jobs.')
+        except tooz.ToozError as e:
+            LOG.error('Unexpected error configuring coordinator for '
+                      'partitioning. Retrying: %s', e)
+            raise tenacity.TryAgain(e)
+
+        if self.conf.metricd.greedy:
+            filler = threading.Thread(target=self._fill_sacks_to_process)
+            filler.daemon = True
+            filler.start()
+
+    @utils.retry_on_exception.wraps
+    def _fill_sacks_to_process(self):
+        try:
+            for sack in self.incoming.iter_on_sacks_to_process():
+                if sack in self._get_sacks_to_process():
+                    LOG.debug(
+                        "Got notification for sack %s, waking up processing",
+                        sack)
+                    self.sacks_with_measures_to_process.add(sack)
+                    self.wakeup()
+        except exceptions.NotImplementedError:
+            LOG.info("Incoming driver does not support notification")
+        except Exception as e:
+            LOG.error(
+                "Error while listening for new measures notification, "
+                "retrying",
+                exc_info=True)
+            raise tenacity.TryAgain(e)
+
+    def _get_sacks_to_process(self):
+        try:
+            self.coord.run_watchers()
+            if (not self._tasks or
+                    self.group_state != self.partitioner.ring.nodes):
+                self.group_state = self.partitioner.ring.nodes.copy()
+                self._tasks = [
+                    sack for sack in self.incoming.iter_sacks()
+                    if self.partitioner.belongs_to_self(
+                        sack, replicas=self.conf.metricd.processing_replicas)]
+        except tooz.NotImplemented:
+            # Do not log anything. If `run_watchers` is not implemented, it's
+            # likely that partitioning is not implemented either, so it already
+            # has been logged at startup with a warning.
+            pass
+        except Exception as e:
+            LOG.error('Unexpected error updating the task partitioner: %s', e)
+        finally:
+            return self._tasks or self.fallback_tasks
+
+    def _run_job(self):
+        m_count = 0
+        s_count = 0
+        # We are going to process the sacks we got notified for, and if we got
+        # no notification, then we'll just try to process them all, just to be
+        # sure we don't miss anything. In case we did not do a full scan for
+        # more than `metric_processing_delay`, we do that instead.
+        if self._last_full_sack_scan.elapsed() >= self.interval_delay:
+            sacks = self._get_sacks_to_process()
+        else:
+            sacks = (self.sacks_with_measures_to_process.copy()
+                     or self._get_sacks_to_process())
+        for s in sacks:
+            try:
+                try:
+                    m_count += self.chef.process_new_measures_for_sack(s)
+                except chef.SackAlreadyLocked:
+                    continue
+                s_count += 1
+                self.incoming.finish_sack_processing(s)
+                self.sacks_with_measures_to_process.discard(s)
+            except Exception:
+                LOG.error("Unexpected error processing assigned job",
+                          exc_info=True)
+        LOG.debug("%d metrics processed from %d sacks", m_count, s_count)
+        try:
+            # Update statistics
+            self.coord.update_capabilities(self.GROUP_ID,
+                                           self.store.statistics)
+        except tooz.NotImplemented:
+            pass
+        if sacks == self._get_sacks_to_process():
+            # We just did a full scan of all sacks, reset the timer
+            self._last_full_sack_scan.reset()
+            LOG.debug("Full scan of sacks has been done")
+
+    def close_services(self):
+        self.coord.stop()
+
+
+class MetricJanitor(MetricProcessBase):
+    name = "janitor"
+
+    def __init__(self,  worker_id, conf):
+        super(MetricJanitor, self).__init__(
+            worker_id, conf, conf.metricd.metric_cleanup_delay)
+
+    def _run_job(self):
+        self.chef.expunge_metrics()
+        LOG.debug("Metrics marked for deletion removed from backend")
+
+
+class MetricdServiceManager(cotyledon.ServiceManager):
+    def __init__(self, conf):
+        super(MetricdServiceManager, self).__init__()
+        oslo_config_glue.setup(self, conf)
+
+        self.conf = conf
+        self.metric_processor_id = self.add(
+            MetricProcessor, args=(self.conf,),
+            workers=conf.metricd.workers)
+        if self.conf.metricd.metric_reporting_delay >= 0:
+            self.add(MetricReporting, args=(self.conf,))
+        self.add(MetricJanitor, args=(self.conf,))
+
+        self.register_hooks(on_reload=self.on_reload)
+
+    def on_reload(self):
+        # NOTE(sileht): We do not implement reload() in Workers so all workers
+        # will received SIGHUP and exit gracefully, then their will be
+        # restarted with the new number of workers. This is important because
+        # we use the number of worker to declare the capability in tooz and
+        # to select the block of metrics to proceed.
+        self.reconfigure(self.metric_processor_id,
+                         workers=self.conf.metricd.workers)
+
+
+def metricd_tester(conf):
+    # NOTE(sileht): This method is designed to be profiled, we
+    # want to avoid issues with profiler and os.fork(), that
+    # why we don't use the MetricdServiceManager.
+    index = indexer.get_driver(conf)
+    s = storage.get_driver(conf)
+    inc = incoming.get_driver(conf)
+    c = chef.Chef(None, inc, index, s)
+    metrics_count = 0
+    for sack in inc.iter_sacks():
+        try:
+            metrics_count += c.process_new_measures_for_sack(s, True)
+        except chef.SackAlreadyLocked:
+            continue
+        if metrics_count >= conf.stop_after_processing_metrics:
+            break
+
+
+def metricd():
+    conf = cfg.ConfigOpts()
+    conf.register_cli_opts([
+        cfg.IntOpt("stop-after-processing-metrics",
+                   default=0,
+                   min=0,
+                   help="Number of metrics to process without workers, "
+                   "for testing purpose"),
+    ])
+    conf = service.prepare_service(conf=conf)
+
+    if conf.stop_after_processing_metrics:
+        metricd_tester(conf)
+    else:
+        MetricdServiceManager(conf).run()
diff --git a/gnocchi/cli/statsd.py b/gnocchi/cli/statsd.py
new file mode 100644
index 0000000000000000000000000000000000000000..40f2deef405e2a7474649f08b5668a848d050796
--- /dev/null
+++ b/gnocchi/cli/statsd.py
@@ -0,0 +1,20 @@
+# Copyright (c) 2013 Mirantis Inc.
+# Copyright (c) 2015-2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+from gnocchi import statsd as statsd_service
+
+
+def statsd():
+    statsd_service.start()
diff --git a/gnocchi/common/__init__.py b/gnocchi/common/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/common/ceph.py b/gnocchi/common/ceph.py
new file mode 100644
index 0000000000000000000000000000000000000000..407aa44ac850ddacbc99d9c389575a3d432015a1
--- /dev/null
+++ b/gnocchi/common/ceph.py
@@ -0,0 +1,104 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import errno
+
+import daiquiri
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+for RADOS_MODULE_NAME in ('cradox', 'rados'):
+    try:
+        rados = __import__(RADOS_MODULE_NAME)
+    except ImportError:
+        pass
+    else:
+        break
+else:
+    RADOS_MODULE_NAME = None
+    rados = None
+
+if rados is not None and hasattr(rados, 'run_in_thread'):
+    rados.run_in_thread = lambda target, args, timeout=None: target(*args)
+    LOG.info("rados.run_in_thread is monkeypatched.")
+
+
+def create_rados_connection(conf):
+    options = {}
+    if conf.ceph_keyring:
+        options['keyring'] = conf.ceph_keyring
+    if conf.ceph_secret:
+        options['key'] = conf.ceph_secret
+    if conf.ceph_timeout:
+        options['rados_osd_op_timeout'] = conf.ceph_timeout
+        options['rados_mon_op_timeout'] = conf.ceph_timeout
+        options['client_mount_timeout'] = conf.ceph_timeout
+
+    if not rados:
+        raise ImportError("No module named 'rados' nor 'cradox'")
+
+    if not hasattr(rados, 'OmapIterator'):
+        raise ImportError("Your rados python module does not support "
+                          "omap feature. Install 'cradox' (recommended) "
+                          "or upgrade 'python-rados' >= 9.1.0 ")
+
+    LOG.info("Ceph storage backend use '%s' python library",
+             RADOS_MODULE_NAME)
+
+    # NOTE(sileht): librados handles reconnection itself,
+    # by default if a call timeout (30sec), it raises
+    # a rados.Timeout exception, and librados
+    # still continues to reconnect on the next call
+    conn = rados.Rados(conffile=conf.ceph_conffile,
+                       rados_id=conf.ceph_username,
+                       conf=options)
+    try:
+        conn.connect()
+    except rados.InvalidArgumentError:
+        raise Exception("Unable to connect to ceph, check the configuration")
+    ioctx = conn.open_ioctx(conf.ceph_pool)
+    return conn, ioctx
+
+
+def close_rados_connection(conn, ioctx):
+    ioctx.aio_flush()
+    ioctx.close()
+    conn.shutdown()
+
+
+# NOTE(sileht): The mapping is not part of the rados Public API So we copy it
+# here.
+EXCEPTION_NAMES = {
+    errno.EPERM: 'PermissionError',
+    errno.ENOENT: 'ObjectNotFound',
+    errno.EIO: 'IOError',
+    errno.ENOSPC: 'NoSpace',
+    errno.EEXIST: 'ObjectExists',
+    errno.EBUSY: 'ObjectBusy',
+    errno.ENODATA: 'NoData',
+    errno.EINTR: 'InterruptedOrTimeoutError',
+    errno.ETIMEDOUT: 'TimedOut',
+    errno.EACCES: 'PermissionDeniedError'
+}
+
+
+def errno_to_exception(ret):
+    if ret < 0:
+        name = EXCEPTION_NAMES.get(abs(ret))
+        if name is None:
+            raise rados.Error("Unhandled error '%s'" % ret)
+        else:
+            raise getattr(rados, name)
diff --git a/gnocchi/common/redis.py b/gnocchi/common/redis.py
new file mode 100644
index 0000000000000000000000000000000000000000..8607aab85278afd80503ed36e3ab114412a848c5
--- /dev/null
+++ b/gnocchi/common/redis.py
@@ -0,0 +1,169 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017-2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from __future__ import absolute_import
+
+from oslo_config import cfg
+import six
+from six.moves.urllib import parse
+
+try:
+    import redis
+    from redis import sentinel
+except ImportError:
+    redis = None
+    sentinel = None
+
+from gnocchi import utils
+
+
+SEP_S = ':'
+SEP = b':'
+
+CLIENT_ARGS = frozenset([
+    'db',
+    'encoding',
+    'retry_on_timeout',
+    'socket_keepalive',
+    'socket_timeout',
+    'ssl',
+    'ssl_certfile',
+    'ssl_keyfile',
+    'sentinel',
+    'sentinel_fallback',
+])
+"""
+"""
+
+#: Client arguments that are expected/allowed to be lists.
+CLIENT_LIST_ARGS = frozenset([
+    'sentinel_fallback',
+])
+
+#: Client arguments that are expected to be boolean convertible.
+CLIENT_BOOL_ARGS = frozenset([
+    'retry_on_timeout',
+    'ssl',
+])
+
+#: Client arguments that are expected to be int convertible.
+CLIENT_INT_ARGS = frozenset([
+    'db',
+    'socket_keepalive',
+    'socket_timeout',
+])
+
+OPTS = [
+    cfg.StrOpt('redis_url',
+               default='redis://localhost:6379/',
+               help="""Redis URL
+
+  For example::
+
+    redis://[:password]@localhost:6379?db=0
+
+  We proxy some options to the redis client (used to configure the redis client
+  internals so that it works as you expect/want it to):  `%s`
+
+  Further resources/links:
+
+   - http://redis-py.readthedocs.org/en/latest/#redis.Redis
+   - https://github.com/andymccurdy/redis-py/blob/2.10.3/redis/client.py
+
+  To use a `sentinel`_ the connection URI must point to the sentinel server.
+  At connection time the sentinel will be asked for the current IP and port
+  of the master and then connect there. The connection URI for sentinel
+  should be written as follows::
+
+    redis://<sentinel host>:<sentinel port>?sentinel=<master name>
+
+  Additional sentinel hosts are listed with multiple ``sentinel_fallback``
+  parameters as follows::
+
+      redis://<sentinel host>:<sentinel port>?sentinel=<master name>&
+        sentinel_fallback=<other sentinel host>:<sentinel port>&
+        sentinel_fallback=<other sentinel host>:<sentinel port>&
+        sentinel_fallback=<other sentinel host>:<sentinel port>
+
+  Further resources/links:
+
+  - http://redis.io/
+  - http://redis.io/topics/sentinel
+  - http://redis.io/topics/cluster-spec
+
+""" % "`, `".join(sorted(CLIENT_ARGS))),
+]
+
+
+def get_client(conf, scripts=None):
+    if redis is None:
+        raise RuntimeError("Redis Python module is unavailable")
+    parsed_url = parse.urlparse(conf.redis_url)
+    options = parse.parse_qs(parsed_url.query)
+
+    kwargs = {}
+    if parsed_url.hostname:
+        kwargs['host'] = parsed_url.hostname
+        if parsed_url.port:
+            kwargs['port'] = parsed_url.port
+    else:
+        if not parsed_url.path:
+            raise ValueError("Expected socket path in parsed urls path")
+        kwargs['unix_socket_path'] = parsed_url.path
+    if parsed_url.password:
+        kwargs['password'] = parsed_url.password
+
+    for a in CLIENT_ARGS:
+        if a not in options:
+            continue
+        if a in CLIENT_BOOL_ARGS:
+            v = utils.strtobool(options[a][-1])
+        elif a in CLIENT_LIST_ARGS:
+            v = options[a]
+        elif a in CLIENT_INT_ARGS:
+            v = int(options[a][-1])
+        else:
+            v = options[a][-1]
+        kwargs[a] = v
+
+    # Ask the sentinel for the current master if there is a
+    # sentinel arg.
+    if 'sentinel' in kwargs:
+        sentinel_hosts = [
+            tuple(fallback.split(':'))
+            for fallback in kwargs.get('sentinel_fallback', [])
+        ]
+        sentinel_hosts.insert(0, (kwargs['host'], kwargs['port']))
+        sentinel_server = sentinel.Sentinel(
+            sentinel_hosts,
+            socket_timeout=kwargs.get('socket_timeout'))
+        sentinel_name = kwargs['sentinel']
+        del kwargs['sentinel']
+        if 'sentinel_fallback' in kwargs:
+            del kwargs['sentinel_fallback']
+        # The client is a redis.StrictRedis using a
+        # Sentinel managed connection pool.
+        client = sentinel_server.master_for(sentinel_name, **kwargs)
+    else:
+        client = redis.StrictRedis(**kwargs)
+
+    if scripts is not None:
+        scripts = {
+            name: client.register_script(code)
+            for name, code in six.iteritems(scripts)
+        }
+
+    return client, scripts
diff --git a/gnocchi/common/s3.py b/gnocchi/common/s3.py
new file mode 100644
index 0000000000000000000000000000000000000000..4981a8a3fe14e3bc7b4ba5d6640cde0345013311
--- /dev/null
+++ b/gnocchi/common/s3.py
@@ -0,0 +1,84 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import daiquiri
+
+import tenacity
+try:
+    import boto3
+    import botocore.config as boto_config
+    import botocore.exceptions
+except ImportError:
+    boto3 = None
+    botocore = None
+
+from gnocchi import utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def retry_if_operationaborted(exception):
+    return (isinstance(exception, botocore.exceptions.ClientError)
+            and exception.response['Error'].get('Code') == "OperationAborted")
+
+
+def get_connection(conf):
+    if boto3 is None:
+        raise RuntimeError("boto3 unavailable")
+    conn = boto3.client(
+        's3',
+        endpoint_url=conf.s3_endpoint_url,
+        region_name=conf.s3_region_name,
+        aws_access_key_id=conf.s3_access_key_id,
+        aws_secret_access_key=conf.s3_secret_access_key,
+        config=boto_config.Config(
+            max_pool_connections=conf.s3_max_pool_connections))
+    return conn, conf.s3_region_name, conf.s3_bucket_prefix
+
+
+# NOTE(jd) OperationAborted might be raised if we try to create the bucket
+# for the first time at the same time
+@tenacity.retry(
+    stop=tenacity.stop_after_attempt(10),
+    wait=tenacity.wait_fixed(0.5),
+    retry=tenacity.retry_if_exception(retry_if_operationaborted)
+)
+def create_bucket(conn, name, region_name):
+    if region_name:
+        kwargs = dict(CreateBucketConfiguration={
+            "LocationConstraint": region_name,
+        })
+    else:
+        kwargs = {}
+    return conn.create_bucket(Bucket=name, **kwargs)
+
+
+def bulk_delete(conn, bucket, objects):
+    # NOTE(jd) The maximum object to delete at once is 1000
+    # TODO(jd) Parallelize?
+    deleted = 0
+    for obj_slice in utils.grouper(objects, 1000):
+        d = {
+            'Objects': [{'Key': o} for o in obj_slice],
+            # FIXME(jd) Use Quiet mode, but s3rver does not seem to
+            # support it
+            # 'Quiet': True,
+        }
+        response = conn.delete_objects(
+            Bucket=bucket,
+            Delete=d)
+        deleted += len(response['Deleted'])
+    LOG.debug('%s objects deleted, %s objects skipped',
+              deleted, len(objects) - deleted)
diff --git a/gnocchi/common/swift.py b/gnocchi/common/swift.py
new file mode 100644
index 0000000000000000000000000000000000000000..f961ba441817162021e755afa854f1eba44268c0
--- /dev/null
+++ b/gnocchi/common/swift.py
@@ -0,0 +1,73 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import daiquiri
+from six.moves.urllib.parse import quote
+
+try:
+    from swiftclient import client as swclient
+    from swiftclient import utils as swift_utils
+except ImportError:
+    swclient = None
+    swift_utils = None
+
+from gnocchi import storage
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def get_connection(conf):
+    if swclient is None:
+        raise RuntimeError("python-swiftclient unavailable")
+
+    os_options = {
+        'endpoint_type': conf.swift_endpoint_type,
+        'service_type':  conf.swift_service_type,
+        'user_domain_name': conf.swift_user_domain_name,
+        'project_domain_name': conf.swift_project_domain_name,
+    }
+    if conf.swift_region:
+        os_options['region_name'] = conf.swift_region
+
+    return swclient.Connection(
+        preauthurl=conf.swift_url,
+        auth_version=conf.swift_auth_version,
+        authurl=conf.swift_authurl,
+        preauthtoken=conf.swift_preauthtoken,
+        user=conf.swift_user,
+        key=conf.swift_key,
+        tenant_name=conf.swift_project_name,
+        timeout=conf.swift_timeout,
+        insecure=conf.swift_auth_insecure,
+        os_options=os_options,
+        cacert=conf.swift_cacert,
+        retries=0)
+
+
+POST_HEADERS = {'Accept': 'application/json', 'Content-Type': 'text/plain'}
+
+
+def bulk_delete(conn, container, objects):
+    objects = [quote(('/%s/%s' % (container, obj['name'])).encode('utf-8'))
+               for obj in objects]
+    resp = {}
+    headers, body = conn.post_account(
+        headers=POST_HEADERS, query_string='bulk-delete',
+        data=b''.join(obj.encode('utf-8') + b'\n' for obj in objects),
+        response_dict=resp)
+    if resp['status'] != 200:
+        raise storage.StorageError(
+            "Unable to bulk-delete, is bulk-delete enabled in Swift?")
+    resp = swift_utils.parse_api_response(headers, body)
+    LOG.debug('# of objects deleted: %s, # of objects skipped: %s',
+              resp['Number Deleted'], resp['Number Not Found'])
diff --git a/gnocchi/exceptions.py b/gnocchi/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..81b484bf3013b1a9beb44572ed21898bbacf0002
--- /dev/null
+++ b/gnocchi/exceptions.py
@@ -0,0 +1,19 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+class NotImplementedError(NotImplementedError):
+    pass
diff --git a/gnocchi/gendoc.py b/gnocchi/gendoc.py
new file mode 100644
index 0000000000000000000000000000000000000000..ccdc4d794be25f24043c7d7d4aedfc74ba163d42
--- /dev/null
+++ b/gnocchi/gendoc.py
@@ -0,0 +1,258 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import absolute_import
+import json
+import os
+import subprocess
+import sys
+import tempfile
+
+import jinja2
+from oslo_config import generator
+import six
+import six.moves
+from sphinx.util import logging
+import webob.request
+import yaml
+
+from gnocchi.tests import test_rest
+
+
+LOG = logging.getLogger(__name__)
+
+# HACK(jd) Not sure why but Sphinx setup this multiple times, so we just avoid
+# doing several times the requests by using this global variable :(
+_RUN = False
+
+
+def _format_json(txt):
+    return json.dumps(json.loads(txt),
+                      sort_keys=True,
+                      indent=2)
+
+
+def _extract_body(req_or_resp):
+    # TODO(jd) Make this a Sphinx option
+    if not req_or_resp.text:
+        return ""
+
+    if six.PY2:
+        body = req_or_resp.body
+    else:
+        body = req_or_resp.text
+    if req_or_resp.content_type.startswith("application/json"):
+        body = _format_json(body)
+    return "\n      ".join(body.split("\n"))
+
+
+def _format_headers(headers):
+    return "\n".join(
+        "      %s: %s" % (k, v)
+        for k, v in six.iteritems(headers))
+
+
+def _response_to_httpdomain(response):
+    return """
+   .. sourcecode:: http
+
+      HTTP/1.1 %(status)s
+%(headers)s
+
+      %(body)s""" % {
+        'status': response.status,
+        'body': _extract_body(response),
+        'headers': _format_headers(response.headers),
+    }
+
+
+def _request_to_httpdomain(request):
+    return """
+   .. sourcecode:: http
+
+      %(method)s %(path)s %(http_version)s
+%(headers)s
+
+      %(body)s""" % {
+        'body': _extract_body(request),
+        'method': request.method,
+        'path': request.path_qs,
+        'http_version': request.http_version,
+        'headers': _format_headers(request.headers),
+    }
+
+
+def _format_request_reply(request, response):
+    return (_request_to_httpdomain(request)
+            + "\n"
+            + _response_to_httpdomain(response))
+
+
+class ScenarioList(list):
+    def __getitem__(self, key):
+        for scenario in self:
+            if scenario['name'] == key:
+                return scenario
+        return super(ScenarioList, self).__getitem__(key)
+
+
+multiversion_hack = """
+import shutil
+import sys
+import os
+
+local_branch_path = os.getcwd()
+srcdir = os.path.join("%s", "..", "..")
+os.chdir(srcdir)
+sys.path.insert(0, srcdir)
+
+version = sys.argv[1]
+
+if version not in ["<local>", "master"]:
+    # NOTE(sileht): Update _static files (mainly logos)
+    if not os.path.exists("doc/source/_static"):
+        os.makedirs("doc/source/_static")
+    for f in ("doc/source/_static/gnocchi-icon.ico",
+              "doc/source/_static/gnocchi-logo.png"):
+        if os.path.exists(f):
+            os.remove(f)
+        shutil.copy(local_branch_path + "/" + f, f)
+
+    # NOTE(sileht): We delete releasenotes from old documentation
+    # only master will have it.
+    if (os.path.exists("releasenotes")
+            and os.path.exists("doc/source/releasenotes/unreleased.rst")):
+        shutil.rmtree("releasenotes")
+        shutil.move("doc/source/releasenotes", "backup")
+        os.makedirs("doc/source/releasenotes")
+        with open("doc/source/releasenotes/index.rst", "w") as f:
+            f.write(\"\"\"
+Release Notes
+=============
+
+Releases notes can be found `here </releasenotes/index.html>`_
+
+.. raw:: html
+
+    <meta http-equiv="refresh" content="0; url=/releasenotes/index.html">
+
+
+\"\"\")
+
+
+
+# NOTE(sileht): entry_points have old and new location mixed,
+# We create symlink to fool pkg_resource so it will find them even
+# if the new location is here.
+try:
+    os.symlink("storage/incoming", "gnocchi/incoming")
+except OSError:
+    pass
+
+class FakeApp(object):
+    def info(self, *args, **kwasrgs):
+        pass
+
+import gnocchi.gendoc
+gnocchi.gendoc.setup(FakeApp())
+"""
+
+
+def setup(app):
+    global _RUN
+    if _RUN:
+        return
+
+    # NOTE(sileht): On gnocchi.xyz, we build a multiversion of the docs
+    # all versions are built with the master gnocchi.gendoc sphinx extension.
+    # So the hack here run an other python script to generate the rest.rst
+    # file of old version of the module.
+    # It also drop the database before each run.
+    if sys.argv[0].endswith("sphinx-versioning"):
+        subprocess.check_call(["dropdb", os.environ['PGDATABASE']])
+        subprocess.check_call(["createdb", os.environ['PGDATABASE']])
+        from sphinxcontrib.versioning import sphinx_
+        version = sphinx_.EventHandlers.CURRENT_VERSION
+        with tempfile.NamedTemporaryFile() as f:
+            f.write(multiversion_hack % app.confdir)
+            f.flush()
+            subprocess.check_call(['python', f.name, version])
+        _RUN = True
+        return
+
+    # TODO(jd) Do not hardcode doc/source
+    with open("doc/source/rest.yaml") as f:
+        scenarios = ScenarioList(yaml.load(f))
+
+    test = test_rest.RestTest()
+    test.auth_mode = "basic"
+    test.setUpClass()
+    test.setUp()
+    webapp = test.app
+
+    try:
+        for entry in scenarios:
+            if 'filter' in entry:
+                entry['filter'] = jinja2.Template(entry['filter']).render(
+                    scenarios=scenarios)
+
+            template = jinja2.Template(entry['request'])
+            fake_file = six.moves.cStringIO()
+            content = template.render(scenarios=scenarios)
+            if six.PY2:
+                content = content.encode('utf-8')
+            fake_file.write(content)
+            fake_file.seek(0)
+            request = webapp.RequestClass.from_file(fake_file)
+
+            # TODO(jd) Fix this lame bug in webob < 1.7
+            if (hasattr(webob.request, "http_method_probably_has_body")
+               and request.method == "DELETE"):
+                # Webob has a bug it does not read the body for DELETE, l4m3r
+                clen = request.content_length
+                if clen is None:
+                    request.body = fake_file.read()
+                else:
+                    request.body = fake_file.read(clen)
+
+            LOG.info("Doing request %s: %s",
+                     entry['name'], six.text_type(request))
+            with webapp.use_admin_user():
+                response = webapp.request(request)
+            entry['response'] = response
+            entry['doc'] = _format_request_reply(request, response)
+    finally:
+        test.tearDown()
+        test.tearDownClass()
+    with open("doc/source/rest.j2", "r") as f:
+        content = f.read()
+        if six.PY2:
+            content = content.decode("utf-8")
+        template = jinja2.Template(content)
+    with open("doc/source/rest.rst", "w") as f:
+        content = template.render(scenarios=scenarios)
+        if six.PY2:
+            content = content.encode("utf-8")
+        f.write(content)
+
+    config_output_file = 'doc/source/gnocchi.conf.sample'
+    LOG.info("Generating %s", config_output_file)
+    generator.main([
+        '--config-file',
+        '%s/gnocchi-config-generator.conf' % os.path.dirname(__file__),
+        '--output-file', config_output_file,
+    ])
+
+    _RUN = True
diff --git a/gnocchi/gnocchi-config-generator.conf b/gnocchi/gnocchi-config-generator.conf
new file mode 100644
index 0000000000000000000000000000000000000000..5d0c49321092579d8792ecd2a859da6b4d95b34a
--- /dev/null
+++ b/gnocchi/gnocchi-config-generator.conf
@@ -0,0 +1,8 @@
+[DEFAULT]
+wrap_width = 79
+namespace = gnocchi
+namespace = oslo.middleware.cors
+namespace = oslo.middleware.healthcheck
+namespace = oslo.policy
+namespace = cotyledon
+namespace = keystonemiddleware.auth_token
diff --git a/gnocchi/incoming/__init__.py b/gnocchi/incoming/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d3620e99138534a18ee6bdac1654093ca26bf4fa
--- /dev/null
+++ b/gnocchi/incoming/__init__.py
@@ -0,0 +1,268 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017-2018 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+import functools
+import itertools
+import operator
+
+import daiquiri
+import numpy
+import six
+
+from gnocchi.carbonara import TIMESERIES_ARRAY_DTYPE
+from gnocchi import exceptions
+from gnocchi import utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+Measure = collections.namedtuple("Measure", ['timestamp', 'value'])
+
+
+ITEMGETTER_1 = operator.itemgetter(1)
+
+
+class ReportGenerationError(Exception):
+    pass
+
+
+class SackDetectionError(Exception):
+    pass
+
+
+@functools.total_ordering
+class Sack(object):
+    """A sack is a recipient that contains measures for a group of metrics.
+
+    It is identified by a positive integer called `number`.
+    """
+
+    # Use slots to make them as small as possible since we can create a ton of
+    # those.
+    __slots__ = [
+        "number",
+        "total",
+        "name",
+    ]
+
+    def __init__(self, number, total, name):
+        """Create a new sack.
+
+        :param number: The sack number, identifying it.
+        :param total: The total number of sacks.
+        :param name: The sack name.
+        """
+        self.number = number
+        self.total = total
+        self.name = name
+
+    def __str__(self):
+        return self.name
+
+    def __repr__(self):
+        return "<%s(%d/%d) %s>" % (
+            self.__class__.__name__, self.number, self.total, str(self),
+        )
+
+    def _compare(self, op, other):
+        if isinstance(other, Sack):
+            if self.total != other.total:
+                raise TypeError(
+                    "Cannot compare %s with different total number" %
+                    self.__class__.__name__)
+            return op(self.number, other.number)
+        raise TypeError("Cannot compare %r with %r" % (self, other))
+
+    def __lt__(self, other):
+        return self._compare(operator.lt, other)
+
+    def __eq__(self, other):
+        return self._compare(operator.eq, other)
+
+    def __ne__(self, other):
+        # neither total_ordering nor py2 sets ne as the opposite of eq
+        return self._compare(operator.ne, other)
+
+    def __hash__(self):
+        return hash(self.name)
+
+
+class IncomingDriver(object):
+    MEASURE_PREFIX = "measure"
+    SACK_NAME_FORMAT = "incoming{total}-{number}"
+    CFG_PREFIX = 'gnocchi-config'
+    CFG_SACKS = 'sacks'
+    # NOTE(sileht): By default we use threads, but some driver can disable
+    # threads by setting this to utils.sequencial_map
+    MAP_METHOD = staticmethod(utils.parallel_map)
+
+    @property
+    def NUM_SACKS(self):
+        if not hasattr(self, '_num_sacks'):
+            try:
+                self._num_sacks = int(self._get_storage_sacks())
+            except Exception as e:
+                raise SackDetectionError(e)
+        return self._num_sacks
+
+    def __init__(self, conf, greedy=True):
+        self._sacks = None
+
+    def upgrade(self, num_sacks):
+        try:
+            self.NUM_SACKS
+        except SackDetectionError:
+            self.set_storage_settings(num_sacks)
+
+    @staticmethod
+    def set_storage_settings(num_sacks):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def remove_sack_group(num_sacks):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def get_storage_sacks():
+        """Return the number of sacks in storage. None if not set."""
+        raise exceptions.NotImplementedError
+
+    def _make_measures_array(self):
+        return numpy.array([], dtype=TIMESERIES_ARRAY_DTYPE)
+
+    @staticmethod
+    def _array_concatenate(arrays):
+        if arrays:
+            return numpy.concatenate(arrays)
+        return arrays
+
+    def _unserialize_measures(self, measure_id, data):
+        try:
+            return numpy.frombuffer(data, dtype=TIMESERIES_ARRAY_DTYPE)
+        except ValueError:
+            LOG.error(
+                "Unable to decode measure %s, possible data corruption",
+                measure_id)
+            raise
+
+    def _encode_measures(self, measures):
+        return numpy.fromiter(measures,
+                              dtype=TIMESERIES_ARRAY_DTYPE).tobytes()
+
+    def group_metrics_by_sack(self, metrics):
+        """Iterate on a list of metrics, grouping them by sack.
+
+        :param metrics: A list of metric uuid.
+        :return: An iterator yield (group, metrics).
+        """
+        metrics_and_sacks = sorted(
+            ((m, self.sack_for_metric(m)) for m in metrics),
+            key=ITEMGETTER_1)
+        for sack, metrics in itertools.groupby(metrics_and_sacks,
+                                               key=ITEMGETTER_1):
+            yield sack, [m[0] for m in metrics]
+
+    def add_measures(self, metric_id, measures):
+        """Add a measure to a metric.
+
+        :param metric_id: The metric measured.
+        :param measures: The actual measures.
+        """
+        self.add_measures_batch({metric_id: measures})
+
+    def add_measures_batch(self, metrics_and_measures):
+        """Add a batch of measures for some metrics.
+
+        :param metrics_and_measures: A dict where keys are metric objects
+                                     and values are a list of
+                                     :py:class:`gnocchi.incoming.Measure`.
+        """
+        self.MAP_METHOD(self._store_new_measures,
+                        ((metric_id, self._encode_measures(measures))
+                         for metric_id, measures
+                         in six.iteritems(metrics_and_measures)))
+
+    @staticmethod
+    def _store_new_measures(metric_id, data):
+        raise exceptions.NotImplementedError
+
+    def measures_report(self, details=True):
+        """Return a report of pending to process measures.
+
+        Only useful for drivers that process measurements in background
+
+        :return: {'summary': {'metrics': count, 'measures': count},
+                  'details': {metric_id: pending_measures_count}}
+        """
+        metrics, measures, full_details = self._build_report(details)
+        report = {'summary': {'metrics': metrics, 'measures': measures}}
+        if full_details is not None:
+            report['details'] = full_details
+        return report
+
+    @staticmethod
+    def _build_report(details):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def delete_unprocessed_measures_for_metric(metric_id):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def process_measure_for_metrics(metric_id):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def process_measures_for_sack(sack):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def has_unprocessed(metric_id):
+        raise exceptions.NotImplementedError
+
+    def _get_sack_name(self, number):
+        return self.SACK_NAME_FORMAT.format(
+            total=self.NUM_SACKS, number=number)
+
+    def _make_sack(self, i):
+        return Sack(i, self.NUM_SACKS, self._get_sack_name(i))
+
+    def sack_for_metric(self, metric_id):
+        return self._make_sack(metric_id.int % self.NUM_SACKS)
+
+    def iter_sacks(self):
+        return (self._make_sack(i) for i in six.moves.range(self.NUM_SACKS))
+
+    @staticmethod
+    def iter_on_sacks_to_process():
+        """Return an iterable of sack that got new measures to process."""
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def finish_sack_processing(sack):
+        """Mark sack processing has finished."""
+        pass
+
+
+@utils.retry_on_exception_and_log("Unable to initialize incoming driver")
+def get_driver(conf):
+    """Return configured incoming driver only
+
+    :param conf: incoming configuration only (not global)
+    """
+    return utils.get_driver_class('gnocchi.incoming', conf.incoming)(
+        conf.incoming, conf.metricd.greedy)
diff --git a/gnocchi/incoming/ceph.py b/gnocchi/incoming/ceph.py
new file mode 100644
index 0000000000000000000000000000000000000000..8a39fa8e4a38ad3e8bf6a5eba114439d1bcf40f3
--- /dev/null
+++ b/gnocchi/incoming/ceph.py
@@ -0,0 +1,234 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from collections import defaultdict
+import contextlib
+import daiquiri
+import datetime
+import json
+import uuid
+
+import numpy
+import six
+
+from gnocchi.common import ceph
+from gnocchi import incoming
+
+rados = ceph.rados
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class CephStorage(incoming.IncomingDriver):
+
+    Q_LIMIT = 1000
+
+    def __init__(self, conf, greedy=True):
+        super(CephStorage, self).__init__(conf)
+        self.rados, self.ioctx = ceph.create_rados_connection(conf)
+        # NOTE(sileht): constants can't be class attributes because
+        # they rely on presence of rados module
+
+        # NOTE(sileht): We allow to read the measure object on
+        # outdated replicats, that safe for us, we will
+        # get the new stuffs on next metricd pass.
+        self.OMAP_READ_FLAGS = (rados.LIBRADOS_OPERATION_BALANCE_READS |
+                                rados.LIBRADOS_OPERATION_SKIPRWLOCKS)
+
+        # NOTE(sileht): That should be safe to manipulate the omap keys
+        # with any OSDs at the same times, each osd should replicate the
+        # new key to others and same thing for deletion.
+        # I wonder how ceph handle rm_omap and set_omap run at same time
+        # on the same key. I assume the operation are timestamped so that will
+        # be same. If not, they are still one acceptable race here, a rm_omap
+        # can finish before all replicats of set_omap are done, but we don't
+        # care, if that occurs next metricd run, will just remove it again, no
+        # object with the measure have already been delected by previous, so
+        # we are safe and good.
+        self.OMAP_WRITE_FLAGS = rados.LIBRADOS_OPERATION_SKIPRWLOCKS
+
+    def __str__(self):
+        # Use cluster ID for now
+        return "%s: %s" % (self.__class__.__name__, self.rados.get_fsid())
+
+    def stop(self):
+        ceph.close_rados_connection(self.rados, self.ioctx)
+        super(CephStorage, self).stop()
+
+    def _get_storage_sacks(self):
+        return json.loads(
+            self.ioctx.read(self.CFG_PREFIX).decode())[self.CFG_SACKS]
+
+    def set_storage_settings(self, num_sacks):
+        self.ioctx.write_full(self.CFG_PREFIX,
+                              json.dumps({self.CFG_SACKS: num_sacks}).encode())
+
+    def remove_sacks(self):
+        for sack in self.iter_sacks():
+            try:
+                self.ioctx.remove_object(str(sack))
+            except rados.ObjectNotFound:
+                pass
+
+    def add_measures_batch(self, metrics_and_measures):
+        data_by_sack = defaultdict(lambda: defaultdict(list))
+        for metric_id, measures in six.iteritems(metrics_and_measures):
+            name = "_".join((
+                self.MEASURE_PREFIX,
+                str(metric_id),
+                str(uuid.uuid4()),
+                datetime.datetime.utcnow().strftime("%Y%m%d_%H:%M:%S")))
+            sack = self.sack_for_metric(metric_id)
+            data_by_sack[sack]['names'].append(name)
+            data_by_sack[sack]['measures'].append(
+                self._encode_measures(measures))
+
+        ops = []
+        for sack, data in data_by_sack.items():
+            with rados.WriteOpCtx() as op:
+                # NOTE(sileht): list all objects in a pool is too slow with
+                # many objects (2min for 20000 objects in 50osds cluster),
+                # and enforce us to iterrate over all objects
+                # So we create an object MEASURE_PREFIX, that have as
+                # omap the list of objects to process (not xattr because
+                # it doesn't # allow to configure the locking behavior)
+                self.ioctx.set_omap(op, tuple(data['names']),
+                                    tuple(data['measures']))
+                ops.append(self.ioctx.operate_aio_write_op(
+                    op, str(sack), flags=self.OMAP_WRITE_FLAGS))
+        while ops:
+            op = ops.pop()
+            op.wait_for_complete()
+
+    def _build_report(self, details):
+        metrics = set()
+        count = 0
+        metric_details = defaultdict(int)
+        for sack in self.iter_sacks():
+            marker = ""
+            while True:
+                names = list(self._list_keys_to_process(
+                    sack, marker=marker, limit=self.Q_LIMIT))
+                if names and names[0] < marker:
+                    raise incoming.ReportGenerationError(
+                        "Unable to cleanly compute backlog.")
+                for name in names:
+                    count += 1
+                    metric = name.split("_")[1]
+                    metrics.add(metric)
+                    if details:
+                        metric_details[metric] += 1
+                if len(names) < self.Q_LIMIT:
+                    break
+                else:
+                    marker = name
+
+        return len(metrics), count, metric_details if details else None
+
+    def _list_keys_to_process(self, sack, prefix="", marker="", limit=-1):
+        with rados.ReadOpCtx() as op:
+            omaps, ret = self.ioctx.get_omap_vals(op, marker, prefix, limit)
+            try:
+                self.ioctx.operate_read_op(
+                    op, str(sack), flag=self.OMAP_READ_FLAGS)
+            except rados.ObjectNotFound:
+                # API have still written nothing
+                return {}
+            # NOTE(sileht): after reading the libradospy, I'm
+            # not sure that ret will have the correct value
+            # get_omap_vals transforms the C int to python int
+            # before operate_read_op is called, I dunno if the int
+            # content is copied during this transformation or if
+            # this is a pointer to the C int, I think it's copied...
+            try:
+                ceph.errno_to_exception(ret)
+            except rados.ObjectNotFound:
+                return {}
+
+            return dict(omaps)
+
+    def delete_unprocessed_measures_for_metric(self, metric_id):
+        sack = self.sack_for_metric(metric_id)
+        key_prefix = self.MEASURE_PREFIX + "_" + str(metric_id)
+        keys = tuple(self._list_keys_to_process(sack, key_prefix).keys())
+
+        if not keys:
+            return
+
+        # Now clean objects and omap
+        with rados.WriteOpCtx() as op:
+            # NOTE(sileht): come on Ceph, no return code
+            # for this operation ?!!
+            self.ioctx.remove_omap_keys(op, keys)
+            self.ioctx.operate_write_op(op, str(sack),
+                                        flags=self.OMAP_WRITE_FLAGS)
+
+    def has_unprocessed(self, metric_id):
+        sack = self.sack_for_metric(metric_id)
+        object_prefix = self.MEASURE_PREFIX + "_" + str(metric_id)
+        return bool(self._list_keys_to_process(sack, object_prefix))
+
+    @contextlib.contextmanager
+    def process_measure_for_metrics(self, metric_ids):
+        measures = {}
+        processed_keys = {}
+        with rados.ReadOpCtx() as op:
+            for metric_id in metric_ids:
+                sack = self.sack_for_metric(metric_id)
+                processed_keys[sack] = self._list_keys_to_process(
+                    sack, prefix=self.MEASURE_PREFIX + "_" + str(metric_id))
+                m = self._make_measures_array()
+                for k, v in six.iteritems(processed_keys[sack]):
+                    m = numpy.concatenate(
+                        (m, self._unserialize_measures(k, v)))
+
+                measures[metric_id] = m
+
+        yield measures
+
+        # Now clean omap
+        with rados.WriteOpCtx() as op:
+            for sack, keys in six.iteritems(processed_keys):
+                # NOTE(sileht): come on Ceph, no return code
+                # for this operation ?!!
+                self.ioctx.remove_omap_keys(op, tuple(keys.keys()))
+                self.ioctx.operate_write_op(op, str(sack),
+                                            flags=self.OMAP_WRITE_FLAGS)
+
+    @contextlib.contextmanager
+    def process_measures_for_sack(self, sack):
+        measures = defaultdict(self._make_measures_array)
+        omaps = self._list_keys_to_process(
+            sack, prefix=self.MEASURE_PREFIX + "_")
+        for k, v in six.iteritems(omaps):
+            try:
+                metric_id = uuid.UUID(k.split("_")[1])
+            except (ValueError, IndexError):
+                LOG.warning("Unable to parse measure object name %s",
+                            k)
+                continue
+            measures[metric_id] = numpy.concatenate(
+                (measures[metric_id], self._unserialize_measures(k, v))
+            )
+
+        yield measures
+
+        # Now clean omap
+        processed_keys = tuple(omaps.keys())
+        if processed_keys:
+            with rados.WriteOpCtx() as op:
+                # NOTE(sileht): come on Ceph, no return code
+                # for this operation ?!!
+                self.ioctx.remove_omap_keys(op, tuple(processed_keys))
+                self.ioctx.operate_write_op(op, str(sack),
+                                            flags=self.OMAP_WRITE_FLAGS)
diff --git a/gnocchi/incoming/file.py b/gnocchi/incoming/file.py
new file mode 100644
index 0000000000000000000000000000000000000000..446807e83b9e71e730807ac905da7e8d68b12653
--- /dev/null
+++ b/gnocchi/incoming/file.py
@@ -0,0 +1,208 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import contextlib
+import datetime
+import errno
+import json
+import os
+import shutil
+import tempfile
+import uuid
+
+import daiquiri
+import numpy
+import six
+
+from gnocchi import incoming
+from gnocchi import utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class FileStorage(incoming.IncomingDriver):
+    def __init__(self, conf, greedy=True):
+        super(FileStorage, self).__init__(conf)
+        self.basepath = conf.file_basepath
+        self.basepath_tmp = os.path.join(self.basepath, 'tmp')
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, str(self.basepath))
+
+    def upgrade(self, num_sacks):
+        super(FileStorage, self).upgrade(num_sacks)
+        utils.ensure_paths([self.basepath_tmp])
+
+    def _get_storage_sacks(self):
+        with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX),
+                  'r') as f:
+            return json.load(f)[self.CFG_SACKS]
+
+    def set_storage_settings(self, num_sacks):
+        data = {self.CFG_SACKS: num_sacks}
+        with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX), 'w') as f:
+            json.dump(data, f)
+        utils.ensure_paths((self._sack_path(s) for s in self.iter_sacks()))
+
+    def remove_sacks(self):
+        for sack in self.iter_sacks():
+            shutil.rmtree(os.path.join(self.basepath, str(sack)))
+
+    def _sack_path(self, sack):
+        return os.path.join(self.basepath, str(sack))
+
+    def _measure_path(self, sack, metric_id):
+        return os.path.join(self._sack_path(sack), six.text_type(metric_id))
+
+    def _build_measure_path(self, metric_id, random_id=None):
+        sack = self.sack_for_metric(metric_id)
+        path = self._measure_path(sack, metric_id)
+        if random_id:
+            if random_id is True:
+                now = datetime.datetime.utcnow().strftime("_%Y%m%d_%H:%M:%S")
+                random_id = six.text_type(uuid.uuid4()) + now
+            return os.path.join(path, random_id)
+        return path
+
+    def _store_new_measures(self, metric_id, data):
+        tmpfile = tempfile.NamedTemporaryFile(
+            prefix='gnocchi', dir=self.basepath_tmp,
+            delete=False)
+        tmpfile.write(data)
+        tmpfile.close()
+        path = self._build_measure_path(metric_id, True)
+        while True:
+            try:
+                os.rename(tmpfile.name, path)
+                break
+            except OSError as e:
+                if e.errno != errno.ENOENT:
+                    raise
+                try:
+                    os.mkdir(self._build_measure_path(metric_id))
+                except OSError as e:
+                    # NOTE(jd) It's possible that another process created the
+                    # path just before us! In this case, good for us, let's do
+                    # nothing then! (see bug #1475684)
+                    if e.errno != errno.EEXIST:
+                        raise
+
+    def _build_report(self, details):
+        report_vars = {'metrics': 0, 'measures': 0, 'metric_details': {}}
+        if details:
+            def build_metric_report(metric, sack):
+                report_vars['metric_details'][metric] = len(
+                    self._list_measures_container_for_metric_str(sack, metric))
+        else:
+            def build_metric_report(metric, sack):
+                report_vars['metrics'] += 1
+                report_vars['measures'] += len(
+                    self._list_measures_container_for_metric_str(sack, metric))
+
+        for sack in self.iter_sacks():
+            for metric in set(self._list_target(self._sack_path(sack))):
+                build_metric_report(metric, sack)
+        return (report_vars['metrics'] or
+                len(report_vars['metric_details'].keys()),
+                report_vars['measures'] or
+                sum(report_vars['metric_details'].values()),
+                report_vars['metric_details'] if details else None)
+
+    def _list_measures_container_for_metric_str(self, sack, metric_id):
+        return self._list_target(self._measure_path(sack, metric_id))
+
+    def _list_measures_container_for_metric(self, metric_id):
+        return self._list_target(self._build_measure_path(metric_id))
+
+    @staticmethod
+    def _list_target(target):
+        try:
+            return os.listdir(target)
+        except OSError as e:
+            # Some other process treated this one, then do nothing
+            if e.errno == errno.ENOENT:
+                return []
+            raise
+
+    def _delete_measures_files_for_metric(self, metric_id, files):
+        for f in files:
+            try:
+                os.unlink(self._build_measure_path(metric_id, f))
+            except OSError as e:
+                # Another process deleted it in the meantime, no prob'
+                if e.errno != errno.ENOENT:
+                    raise
+        try:
+            os.rmdir(self._build_measure_path(metric_id))
+        except OSError as e:
+            # ENOENT: ok, it has been removed at almost the same time
+            #         by another process
+            # ENOTEMPTY: ok, someone pushed measure in the meantime,
+            #            we'll delete the measures and directory later
+            # EEXIST: some systems use this instead of ENOTEMPTY
+            if e.errno not in (errno.ENOENT, errno.ENOTEMPTY, errno.EEXIST):
+                raise
+
+    def delete_unprocessed_measures_for_metric(self, metric_id):
+        files = self._list_measures_container_for_metric(metric_id)
+        self._delete_measures_files_for_metric(metric_id, files)
+
+    def has_unprocessed(self, metric_id):
+        return os.path.isdir(self._build_measure_path(metric_id))
+
+    @contextlib.contextmanager
+    def process_measure_for_metrics(self, metric_ids):
+        measures = {}
+        processed_files = {}
+        for metric_id in metric_ids:
+            files = self._list_measures_container_for_metric(metric_id)
+            processed_files[metric_id] = files
+            m = self._make_measures_array()
+            for f in files:
+                abspath = self._build_measure_path(metric_id, f)
+                with open(abspath, "rb") as e:
+                    m = numpy.concatenate((
+                        m, self._unserialize_measures(f, e.read())))
+            measures[metric_id] = m
+
+        yield measures
+
+        for metric_id, files in six.iteritems(processed_files):
+            self._delete_measures_files_for_metric(metric_id, files)
+
+    @contextlib.contextmanager
+    def process_measures_for_sack(self, sack):
+        measures = {}
+        processed_files = {}
+        for metric_id in self._list_target(self._sack_path(sack)):
+            try:
+                metric_id = uuid.UUID(metric_id)
+            except ValueError:
+                LOG.error("Unable to parse %s as an UUID, ignoring metric",
+                          metric_id)
+                continue
+            files = self._list_measures_container_for_metric_str(
+                sack, metric_id)
+            processed_files[metric_id] = files
+            m = self._make_measures_array()
+            for f in files:
+                abspath = self._build_measure_path(metric_id, f)
+                with open(abspath, "rb") as e:
+                    m = numpy.concatenate((
+                        m, self._unserialize_measures(f, e.read())))
+            measures[metric_id] = m
+
+        yield measures
+
+        for metric_id, files in six.iteritems(processed_files):
+            self._delete_measures_files_for_metric(metric_id, files)
diff --git a/gnocchi/incoming/redis.py b/gnocchi/incoming/redis.py
new file mode 100644
index 0000000000000000000000000000000000000000..2a5191b418571af2bad0c4d141baf439797447ae
--- /dev/null
+++ b/gnocchi/incoming/redis.py
@@ -0,0 +1,195 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017-2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import contextlib
+import uuid
+
+import daiquiri
+import six
+
+from gnocchi.common import redis
+from gnocchi import incoming
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class RedisStorage(incoming.IncomingDriver):
+
+    _SCRIPTS = {
+        "process_measure_for_metric": """
+local llen = redis.call("LLEN", KEYS[1])
+-- lrange is inclusive on both ends, decrease to grab exactly n items
+if llen > 0 then llen = llen - 1 end
+return {llen, table.concat(redis.call("LRANGE", KEYS[1], 0, llen), "")}
+""",
+        "process_measures_for_sack": """
+local results = {}
+local metric_id_extractor = "[^%s]*%s([^%s]*)"
+local metric_with_measures = redis.call("KEYS", KEYS[1] .. "%s*")
+for i, sack_metric in ipairs(metric_with_measures) do
+    local llen = redis.call("LLEN", sack_metric)
+    local metric_id = sack_metric:gmatch(metric_id_extractor)()
+    -- lrange is inclusive on both ends, decrease to grab exactly n items
+    if llen > 0 then llen = llen - 1 end
+    results[#results + 1] = {
+        metric_id,
+        llen,
+        table.concat(redis.call("LRANGE", sack_metric, 0, llen), "")
+    }
+end
+return results
+""" % (redis.SEP_S, redis.SEP_S, redis.SEP_S, redis.SEP_S),
+    }
+
+    def __init__(self, conf, greedy=True):
+        super(RedisStorage, self).__init__(conf)
+        self._client, self._scripts = redis.get_client(conf, self._SCRIPTS)
+        self.greedy = greedy
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, self._client)
+
+    def _get_storage_sacks(self):
+        return self._client.hget(self.CFG_PREFIX, self.CFG_SACKS)
+
+    def set_storage_settings(self, num_sacks):
+        self._client.hset(self.CFG_PREFIX, self.CFG_SACKS, num_sacks)
+
+    @staticmethod
+    def remove_sacks():
+        # NOTE(gordc): redis doesn't maintain keys with empty values
+        pass
+
+    def _build_measure_path_with_sack(self, metric_id, sack_name):
+        return redis.SEP.join([sack_name.encode(), str(metric_id).encode()])
+
+    def _build_measure_path(self, metric_id):
+        return self._build_measure_path_with_sack(
+            metric_id, str(self.sack_for_metric(metric_id)))
+
+    def add_measures_batch(self, metrics_and_measures):
+        notified_sacks = set()
+        pipe = self._client.pipeline(transaction=False)
+        for metric_id, measures in six.iteritems(metrics_and_measures):
+            sack_name = str(self.sack_for_metric(metric_id))
+            path = self._build_measure_path_with_sack(metric_id, sack_name)
+            pipe.rpush(path, self._encode_measures(measures))
+            if self.greedy and sack_name not in notified_sacks:
+                # value has no meaning, we just use this for notification
+                pipe.setnx(sack_name, 1)
+                notified_sacks.add(sack_name)
+        pipe.execute()
+
+    def _build_report(self, details):
+        report_vars = {'measures': 0, 'metric_details': {}}
+
+        def update_report(results, m_list):
+            report_vars['measures'] += sum(results)
+            if details:
+                report_vars['metric_details'].update(
+                    dict(six.moves.zip(m_list, results)))
+
+        match = redis.SEP.join([self._get_sack_name("*").encode(), b"*"])
+        metrics = 0
+        m_list = []
+        pipe = self._client.pipeline()
+        for key in self._client.scan_iter(match=match, count=1000):
+            metrics += 1
+            pipe.llen(key)
+            if details:
+                m_list.append(key.split(redis.SEP)[1].decode("utf8"))
+            # group 100 commands/call
+            if metrics % 100 == 0:
+                results = pipe.execute()
+                update_report(results, m_list)
+                m_list = []
+                pipe = self._client.pipeline()
+        else:
+            results = pipe.execute()
+            update_report(results, m_list)
+        return (metrics, report_vars['measures'],
+                report_vars['metric_details'] if details else None)
+
+    def delete_unprocessed_measures_for_metric(self, metric_id):
+        self._client.delete(self._build_measure_path(metric_id))
+
+    def has_unprocessed(self, metric_id):
+        return bool(self._client.exists(self._build_measure_path(metric_id)))
+
+    @contextlib.contextmanager
+    def process_measure_for_metrics(self, metric_ids):
+        measures = {}
+        pipe = self._client.pipeline(transaction=False)
+        for metric_id in metric_ids:
+            key = self._build_measure_path(metric_id)
+            self._scripts['process_measure_for_metric'](
+                keys=[key],
+                client=pipe,
+            )
+
+        results = pipe.execute()
+        for metric_id, (item_len, data) in six.moves.zip(metric_ids, results):
+            measures[metric_id] = self._unserialize_measures(metric_id, data)
+
+        yield measures
+
+        for metric_id, (item_len, data) in six.moves.zip(metric_ids, results):
+            key = self._build_measure_path(metric_id)
+            # ltrim is inclusive, bump 1 to remove up to and including nth item
+            pipe.ltrim(key, item_len + 1, -1)
+        pipe.execute()
+
+    @contextlib.contextmanager
+    def process_measures_for_sack(self, sack):
+        results = self._scripts['process_measures_for_sack'](keys=[str(sack)])
+
+        measures = {}
+        for metric_id, item_len, data in results:
+            try:
+                metric_id = uuid.UUID(metric_id.decode())
+            except ValueError:
+                LOG.error("Unable to parse metric id %s, ignoring",
+                          metric_id)
+                continue
+            measures[metric_id] = self._unserialize_measures(metric_id, data)
+
+        yield measures
+
+        pipe = self._client.pipeline()
+        for metric_id, item_len, data in results:
+            key = self._build_measure_path_with_sack(
+                metric_id.decode(), str(sack))
+            # ltrim is inclusive, bump 1 to remove up to and including nth item
+            pipe.ltrim(key, item_len + 1, -1)
+        pipe.execute()
+
+    def iter_on_sacks_to_process(self):
+        self._client.config_set("notify-keyspace-events", "K$")
+        p = self._client.pubsub()
+        db = self._client.connection_pool.connection_kwargs['db']
+        keyspace = b"__keyspace@" + str(db).encode() + b"__:"
+        pattern = keyspace + self._get_sack_name("*").encode()
+        p.psubscribe(pattern)
+        for message in p.listen():
+            if message['type'] == 'pmessage' and message['pattern'] == pattern:
+                # FIXME(jd) This is awful, we need a better way to extract this
+                # Format is defined by _get_sack_name: incoming128-17
+                yield self._make_sack(int(message['channel'].split(b"-")[-1]))
+
+    def finish_sack_processing(self, sack):
+        # Delete the sack key which handles no data but is used to get a SET
+        # notification in iter_on_sacks_to_process
+        self._client.delete(str(sack))
diff --git a/gnocchi/incoming/s3.py b/gnocchi/incoming/s3.py
new file mode 100644
index 0000000000000000000000000000000000000000..c4f7b45d835d70dccd673dc908f21287da4f4127
--- /dev/null
+++ b/gnocchi/incoming/s3.py
@@ -0,0 +1,190 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2018 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from collections import defaultdict
+import contextlib
+import daiquiri
+import datetime
+import json
+import uuid
+
+import numpy
+
+from gnocchi.common import s3
+from gnocchi import incoming
+
+boto3 = s3.boto3
+botocore = s3.botocore
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class S3Storage(incoming.IncomingDriver):
+
+    # NOTE(gordc): override to follow s3 partitioning logic
+    SACK_NAME_FORMAT = "{number}-{total}"
+
+    def __init__(self, conf, greedy=True):
+        super(S3Storage, self).__init__(conf)
+        self.s3, self._region_name, self._bucket_prefix = (
+            s3.get_connection(conf)
+        )
+
+        self._bucket_name_measures = (
+            self._bucket_prefix + "-" + self.MEASURE_PREFIX
+        )
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, self._bucket_name_measures)
+
+    def _get_storage_sacks(self):
+        response = self.s3.get_object(Bucket=self._bucket_name_measures,
+                                      Key=self.CFG_PREFIX)
+        return json.loads(response['Body'].read().decode())[self.CFG_SACKS]
+
+    def set_storage_settings(self, num_sacks):
+        data = {self.CFG_SACKS: num_sacks}
+        self.s3.put_object(Bucket=self._bucket_name_measures,
+                           Key=self.CFG_PREFIX,
+                           Body=json.dumps(data).encode())
+
+    @staticmethod
+    def remove_sacks(num_sacks):
+        # nothing to cleanup since sacks are part of path
+        pass
+
+    def upgrade(self, num_sacks):
+        try:
+            s3.create_bucket(self.s3, self._bucket_name_measures,
+                             self._region_name)
+        except botocore.exceptions.ClientError as e:
+            if e.response['Error'].get('Code') not in (
+                    "BucketAlreadyExists", "BucketAlreadyOwnedByYou"
+            ):
+                raise
+        # need to create bucket first to store storage settings object
+        super(S3Storage, self).upgrade(num_sacks)
+
+    def _store_new_measures(self, metric_id, data):
+        now = datetime.datetime.utcnow().strftime("_%Y%m%d_%H:%M:%S")
+        self.s3.put_object(
+            Bucket=self._bucket_name_measures,
+            Key="/".join((str(self.sack_for_metric(metric_id)),
+                          str(metric_id),
+                          str(uuid.uuid4()) + now)),
+            Body=data)
+
+    def _build_report(self, details):
+        metric_details = defaultdict(int)
+        response = {}
+        while response.get('IsTruncated', True):
+            if 'NextContinuationToken' in response:
+                kwargs = {
+                    'ContinuationToken': response['NextContinuationToken']
+                }
+            else:
+                kwargs = {}
+            response = self.s3.list_objects_v2(
+                Bucket=self._bucket_name_measures,
+                **kwargs)
+            for c in response.get('Contents', ()):
+                if c['Key'] != self.CFG_PREFIX:
+                    __, metric, metric_file = c['Key'].split("/", 2)
+                    metric_details[metric] += 1
+        return (len(metric_details), sum(metric_details.values()),
+                metric_details if details else None)
+
+    def _list_files(self, path_items, **kwargs):
+        response = {}
+        # Handle pagination
+        while response.get('IsTruncated', True):
+            if 'NextContinuationToken' in response:
+                kwargs['ContinuationToken'] = response['NextContinuationToken']
+            else:
+                try:
+                    del kwargs['ContinuationToken']
+                except KeyError:
+                    pass
+            response = self.s3.list_objects_v2(
+                Bucket=self._bucket_name_measures,
+                Prefix="/".join(path_items) + "/",
+                **kwargs)
+            yield response
+
+    def _list_measure_files(self, path_items):
+        files = set()
+        for response in self._list_files(path_items):
+            for c in response.get('Contents', ()):
+                files.add(c['Key'])
+        return files
+
+    def _list_measure_files_for_metric(self, sack, metric_id):
+        return self._list_measure_files((str(sack), str(metric_id)))
+
+    def delete_unprocessed_measures_for_metric(self, metric_id):
+        sack = self.sack_for_metric(metric_id)
+        files = self._list_measure_files_for_metric(sack, metric_id)
+        s3.bulk_delete(self.s3, self._bucket_name_measures, files)
+
+    def has_unprocessed(self, metric_id):
+        sack = self.sack_for_metric(metric_id)
+        return bool(self._list_measure_files_for_metric(sack, metric_id))
+
+    @contextlib.contextmanager
+    def process_measure_for_metrics(self, metric_ids):
+        measures = defaultdict(self._make_measures_array)
+        all_files = []
+        for metric_id in metric_ids:
+            sack = self.sack_for_metric(metric_id)
+            files = self._list_measure_files_for_metric(sack, metric_id)
+            all_files.extend(files)
+            for f in files:
+                response = self.s3.get_object(
+                    Bucket=self._bucket_name_measures,
+                    Key=f)
+                measures[metric_id] = numpy.concatenate((
+                    measures[metric_id],
+                    self._unserialize_measures(f, response['Body'].read())
+                ))
+
+        yield measures
+
+        # Now clean objects
+        s3.bulk_delete(self.s3, self._bucket_name_measures, all_files)
+
+    @contextlib.contextmanager
+    def process_measures_for_sack(self, sack):
+        measures = defaultdict(self._make_measures_array)
+        files = self._list_measure_files((str(sack),))
+        for f in files:
+            try:
+                sack, metric_id, measure_id = f.split("/")
+                metric_id = uuid.UUID(metric_id)
+            except ValueError:
+                LOG.warning("Unable to parse measure file name %s", f)
+                continue
+
+            response = self.s3.get_object(
+                Bucket=self._bucket_name_measures,
+                Key=f)
+            measures[metric_id] = numpy.concatenate((
+                measures[metric_id],
+                self._unserialize_measures(f, response['Body'].read())
+            ))
+
+        yield measures
+
+        # Now clean objects
+        s3.bulk_delete(self.s3, self._bucket_name_measures, files)
diff --git a/gnocchi/incoming/swift.py b/gnocchi/incoming/swift.py
new file mode 100644
index 0000000000000000000000000000000000000000..b232bfd72de28fb28ee778d15413bdbd71ccfb38
--- /dev/null
+++ b/gnocchi/incoming/swift.py
@@ -0,0 +1,146 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from collections import defaultdict
+import contextlib
+import daiquiri
+import datetime
+import json
+import uuid
+
+import six
+
+from gnocchi.common import swift
+from gnocchi import incoming
+from gnocchi import utils
+
+swclient = swift.swclient
+swift_utils = swift.swift_utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class SwiftStorage(incoming.IncomingDriver):
+    # NOTE(sileht): Using threads with swiftclient doesn't work
+    # as expected, so disable it
+    MAP_METHOD = staticmethod(utils.sequencial_map)
+
+    def __init__(self, conf, greedy=True):
+        super(SwiftStorage, self).__init__(conf)
+        self.swift = swift.get_connection(conf)
+
+    def __str__(self):
+        return self.__class__.__name__
+
+    def _get_storage_sacks(self):
+        __, data = self.swift.get_object(self.CFG_PREFIX, self.CFG_PREFIX)
+        return json.loads(data)[self.CFG_SACKS]
+
+    def set_storage_settings(self, num_sacks):
+        self.swift.put_container(self.CFG_PREFIX)
+        self.swift.put_object(self.CFG_PREFIX, self.CFG_PREFIX,
+                              json.dumps({self.CFG_SACKS: num_sacks}))
+        for sack in self.iter_sacks():
+            self.swift.put_container(str(sack))
+
+    def remove_sacks(self):
+        for sack in self.iter_sacks():
+            self.swift.delete_container(str(sack))
+
+    def _store_new_measures(self, metric_id, data):
+        now = datetime.datetime.utcnow().strftime("_%Y%m%d_%H:%M:%S")
+        self.swift.put_object(
+            str(self.sack_for_metric(metric_id)),
+            str(metric_id) + "/" + str(uuid.uuid4()) + now,
+            data)
+
+    def _build_report(self, details):
+        metric_details = defaultdict(int)
+        nb_metrics = 0
+        measures = 0
+        for sack in self.iter_sacks():
+            if details:
+                headers, files = self.swift.get_container(
+                    str(sack), full_listing=True)
+                for f in files:
+                    metric, __ = f['name'].split("/", 1)
+                    metric_details[metric] += 1
+            else:
+                headers, files = self.swift.get_container(
+                    str(sack), delimiter='/', full_listing=True)
+                nb_metrics += len([f for f in files if 'subdir' in f])
+            measures += int(headers.get('x-container-object-count'))
+        return (nb_metrics or len(metric_details), measures,
+                metric_details if details else None)
+
+    def _list_measure_files_for_metric(self, sack, metric_id):
+        headers, files = self.swift.get_container(
+            str(sack), path=six.text_type(metric_id),
+            full_listing=True)
+        return files
+
+    def delete_unprocessed_measures_for_metric(self, metric_id):
+        sack = self.sack_for_metric(metric_id)
+        files = self._list_measure_files_for_metric(sack, metric_id)
+        swift.bulk_delete(self.swift, str(sack), files)
+
+    def has_unprocessed(self, metric_id):
+        sack = self.sack_for_metric(metric_id)
+        return bool(self._list_measure_files_for_metric(sack, metric_id))
+
+    @contextlib.contextmanager
+    def process_measure_for_metrics(self, metric_ids):
+        measures = {}
+        all_files = defaultdict(list)
+        for metric_id in metric_ids:
+            sack = self.sack_for_metric(metric_id)
+            sack_name = str(sack)
+            files = self._list_measure_files_for_metric(sack, metric_id)
+            all_files[sack_name].extend(files)
+            measures[metric_id] = self._array_concatenate([
+                self._unserialize_measures(
+                    f['name'],
+                    self.swift.get_object(sack_name, f['name'])[1],
+                )
+                for f in files
+            ])
+
+        yield measures
+
+        # Now clean objects
+        for sack_name, files in six.iteritems(all_files):
+            swift.bulk_delete(self.swift, sack_name, files)
+
+    @contextlib.contextmanager
+    def process_measures_for_sack(self, sack):
+        measures = defaultdict(self._make_measures_array)
+        sack_name = str(sack)
+        headers, files = self.swift.get_container(sack_name, full_listing=True)
+        for f in files:
+            try:
+                metric_id, random_id = f['name'].split("/")
+                metric_id = uuid.UUID(metric_id)
+            except ValueError:
+                LOG.warning("Unable to parse measure file name %s", f)
+                continue
+            measures[metric_id] = self._array_concatenate([
+                measures[metric_id],
+                self._unserialize_measures(
+                    metric_id,
+                    self.swift.get_object(sack_name, f['name'])[1],
+                )
+            ])
+
+        yield measures
+
+        swift.bulk_delete(self.swift, sack_name, files)
diff --git a/gnocchi/indexer/__init__.py b/gnocchi/indexer/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..d8fd88540c5eb74353ace6b1d7831c04d319142c
--- /dev/null
+++ b/gnocchi/indexer/__init__.py
@@ -0,0 +1,479 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import fnmatch
+import hashlib
+import os
+
+import iso8601
+from oslo_config import cfg
+import six
+from six.moves.urllib import parse
+from stevedore import driver
+
+from gnocchi import exceptions
+from gnocchi import utils
+
+OPTS = [
+    cfg.StrOpt('url',
+               secret=True,
+               required=True,
+               default=os.getenv("GNOCCHI_INDEXER_URL"),
+               help='Indexer driver to use'),
+]
+
+
+_marker = object()
+
+
+class Resource(object):
+    def get_metric(self, metric_name):
+        for m in self.metrics:
+            if m.name == metric_name:
+                return m
+
+    def __eq__(self, other):
+        return (self.id == other.id
+                and self.type == other.type
+                and self.revision == other.revision
+                and self.revision_start == other.revision_start
+                and self.revision_end == other.revision_end
+                and self.creator == other.creator
+                and self.user_id == other.user_id
+                and self.project_id == other.project_id
+                and self.started_at == other.started_at
+                and self.ended_at == other.ended_at)
+
+    @property
+    def etag(self):
+        etag = hashlib.sha1()
+        etag.update(six.text_type(self.id).encode('utf-8'))
+        etag.update(six.text_type(
+            self.revision_start.isoformat()).encode('utf-8'))
+        return etag.hexdigest()
+
+    @property
+    def lastmodified(self):
+        # less precise revision start for Last-Modified http header
+        return self.revision_start.replace(microsecond=0,
+                                           tzinfo=iso8601.iso8601.UTC)
+
+    __hash__ = object.__hash__
+
+
+class Metric(object):
+    def __init__(self, id, archive_policy, creator=None,
+                 name=None, resource_id=None):
+        self.id = id
+        self.archive_policy = archive_policy
+        self.creator = creator
+        self.name = name
+        self.resource_id = resource_id
+
+    def __repr__(self):
+        return '<%s %s>' % (self.__class__.__name__, self.id)
+
+    def __str__(self):
+        return str(self.id)
+
+    def __eq__(self, other):
+        return (isinstance(other, Metric)
+                and self.id == other.id
+                and self.archive_policy == other.archive_policy
+                and self.creator == other.creator
+                and self.name == other.name
+                and self.resource_id == other.resource_id)
+
+    __hash__ = object.__hash__
+
+
+@utils.retry_on_exception_and_log("Unable to initialize indexer driver")
+def get_driver(conf):
+    """Return the configured driver."""
+    split = parse.urlsplit(conf.indexer.url)
+    d = driver.DriverManager('gnocchi.indexer',
+                             split.scheme).driver
+    return d(conf)
+
+
+class IndexerException(Exception):
+    """Base class for all exceptions raised by an indexer."""
+
+
+class NoSuchResourceType(IndexerException):
+    """Error raised when the resource type is unknown."""
+    def __init__(self, type):
+        super(NoSuchResourceType, self).__init__(
+            "Resource type %s does not exist" % type)
+        self.type = type
+
+    def jsonify(self):
+        return {
+            "cause": "Resource type does not exist",
+            "detail": self.type,
+        }
+
+
+class NoSuchMetric(IndexerException):
+    """Error raised when a metric does not exist."""
+    def __init__(self, metric):
+        super(NoSuchMetric, self).__init__("Metric %s does not exist" %
+                                           metric)
+        self.metric = metric
+
+    def jsonify(self):
+        return {
+            "cause": "Metrics not found",
+            "detail": self.metric,
+        }
+
+
+class NoSuchResource(IndexerException):
+    """Error raised when a resource does not exist."""
+    def __init__(self, resource):
+        super(NoSuchResource, self).__init__("Resource %s does not exist" %
+                                             resource)
+        self.resource = resource
+
+
+class NoSuchArchivePolicy(IndexerException):
+    """Error raised when an archive policy does not exist."""
+    def __init__(self, archive_policy):
+        super(NoSuchArchivePolicy, self).__init__(
+            "Archive policy %s does not exist" % archive_policy)
+        self.archive_policy = archive_policy
+
+    def jsonify(self):
+        return {
+            "cause": "Archive policy does not exist",
+            "detail": self.archive_policy,
+        }
+
+
+class UnsupportedArchivePolicyChange(IndexerException):
+    """Error raised when modifying archive policy if not supported."""
+    def __init__(self, archive_policy, message):
+        super(UnsupportedArchivePolicyChange, self).__init__(
+            "Archive policy %s does not support change: %s" %
+            (archive_policy, message))
+        self.archive_policy = archive_policy
+        self.message = message
+
+
+class ArchivePolicyInUse(IndexerException):
+    """Error raised when an archive policy is still being used."""
+    def __init__(self, archive_policy):
+        super(ArchivePolicyInUse, self).__init__(
+            "Archive policy %s is still in use" % archive_policy)
+        self.archive_policy = archive_policy
+
+
+class ResourceTypeInUse(IndexerException):
+    """Error raised when an resource type is still being used."""
+    def __init__(self, resource_type):
+        super(ResourceTypeInUse, self).__init__(
+            "Resource type %s is still in use" % resource_type)
+        self.resource_type = resource_type
+
+
+class UnexpectedResourceTypeState(IndexerException):
+    """Error raised when an resource type state is not expected."""
+    def __init__(self, resource_type, expected_state, state):
+        super(UnexpectedResourceTypeState, self).__init__(
+            "Resource type %s state is %s (expected: %s)" % (
+                resource_type, state, expected_state))
+        self.resource_type = resource_type
+        self.expected_state = expected_state
+        self.state = state
+
+
+class NoSuchArchivePolicyRule(IndexerException):
+    """Error raised when an archive policy rule does not exist."""
+    def __init__(self, archive_policy_rule):
+        super(NoSuchArchivePolicyRule, self).__init__(
+            "Archive policy rule %s does not exist" %
+            archive_policy_rule)
+        self.archive_policy_rule = archive_policy_rule
+
+
+class NoArchivePolicyRuleMatch(IndexerException):
+    """Error raised when no archive policy rule found for metric."""
+    def __init__(self, metric_name):
+        super(NoArchivePolicyRuleMatch, self).__init__(
+            "No Archive policy rule found for metric %s" %
+            metric_name)
+        self.metric_name = metric_name
+
+
+class UnsupportedArchivePolicyRuleChange(IndexerException):
+    """Error raised when modifying archive policy rule if not supported."""
+    def __init__(self, archive_policy_rule, message):
+        super(UnsupportedArchivePolicyRuleChange, self).__init__(
+            "Archive policy rule %s does not support change: %s" %
+            (archive_policy_rule, message))
+        self.archive_policy_rule = archive_policy_rule
+        self.message = message
+
+
+class NamedMetricAlreadyExists(IndexerException):
+    """Error raised when a named metric already exists."""
+    def __init__(self, metric_name):
+        super(NamedMetricAlreadyExists, self).__init__(
+            "Named metric %s already exists" % metric_name)
+        self.metric_name = metric_name
+
+    def jsonify(self):
+        return {"cause": "Named metric already exists",
+                "detail": self.metric_name}
+
+
+class ResourceAlreadyExists(IndexerException):
+    """Error raised when a resource already exists."""
+    def __init__(self, resource):
+        super(ResourceAlreadyExists, self).__init__(
+            "Resource %s already exists" % resource)
+        self.resource = resource
+
+    def jsonify(self):
+        return {"cause": "Resource already exists",
+                "detail": self.resource}
+
+
+class ResourceTypeAlreadyExists(IndexerException):
+    """Error raised when a resource type already exists."""
+    def __init__(self, resource_type):
+        super(ResourceTypeAlreadyExists, self).__init__(
+            "Resource type %s already exists" % resource_type)
+        self.resource_type = resource_type
+
+
+class ResourceAttributeError(IndexerException, AttributeError):
+    """Error raised when an attribute does not exist for a resource type."""
+    def __init__(self, resource, attribute):
+        super(ResourceAttributeError, self).__init__(
+            "Resource type %s has no %s attribute" % (resource, attribute))
+        self.resource = resource
+        self.attribute = attribute
+
+
+class ResourceValueError(IndexerException, ValueError):
+    """Error raised when an attribute value is invalid for a resource type."""
+    def __init__(self, resource_type, attribute, value):
+        super(ResourceValueError, self).__init__(
+            "Value %s for attribute %s on resource type %s is invalid"
+            % (value, attribute, resource_type))
+        self.resource_type = resource_type
+        self.attribute = attribute
+        self.value = value
+
+
+class ArchivePolicyAlreadyExists(IndexerException):
+    """Error raised when an archive policy already exists."""
+    def __init__(self, name):
+        super(ArchivePolicyAlreadyExists, self).__init__(
+            "Archive policy %s already exists" % name)
+        self.name = name
+
+
+class ArchivePolicyRuleAlreadyExists(IndexerException):
+    """Error raised when an archive policy rule already exists."""
+    def __init__(self, name):
+        super(ArchivePolicyRuleAlreadyExists, self).__init__(
+            "Archive policy rule %s already exists" % name)
+        self.name = name
+
+
+class QueryError(IndexerException):
+    def __init__(self):
+        super(QueryError, self).__init__("Unable to parse this query")
+
+
+class QueryValueError(QueryError, ValueError):
+    def __init__(self, v, f):
+        super(QueryError, self).__init__("Invalid value: `%s' for field `%s'"
+                                         % (v, f))
+
+
+class QueryInvalidOperator(QueryError):
+    def __init__(self, op):
+        self.op = op
+        super(QueryError, self).__init__("Unknown operator `%s'" % op)
+
+
+class QueryAttributeError(QueryError, ResourceAttributeError):
+    def __init__(self, resource, attribute):
+        ResourceAttributeError.__init__(self, resource, attribute)
+
+
+class InvalidPagination(IndexerException):
+    """Error raised when a resource does not exist."""
+    def __init__(self, reason):
+        self.reason = reason
+        super(InvalidPagination, self).__init__(
+            "Invalid pagination: `%s'" % reason)
+
+
+class IndexerDriver(object):
+    @staticmethod
+    def __init__(conf):
+        pass
+
+    @staticmethod
+    def disconnect():
+        pass
+
+    @staticmethod
+    def upgrade(nocreate=False):
+        pass
+
+    @staticmethod
+    def get_resource(resource_type, resource_id, with_metrics=False):
+        """Get a resource from the indexer.
+
+        :param resource_type: The type of the resource to look for.
+        :param resource_id: The UUID of the resource.
+        :param with_metrics: Whether to include metrics information.
+        """
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def list_resources(resource_type='generic',
+                       attribute_filter=None,
+                       details=False,
+                       history=False,
+                       limit=None,
+                       marker=None,
+                       sorts=None):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def list_archive_policies():
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def get_archive_policy(name):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def update_archive_policy(name, ap_items):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def delete_archive_policy(name):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def get_archive_policy_rule(name):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def list_archive_policy_rules():
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def create_archive_policy_rule(name, metric_pattern, archive_policy_name):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def update_archive_policy_rule(name, new_name):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def delete_archive_policy_rule(name):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def create_metric(id, creator,
+                      archive_policy_name, name=None, unit=None,
+                      resource_id=None):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def list_metrics(details=False, status='active',
+                     limit=None, marker=None, sorts=None,
+                     attribute_filter=None, policy_filter=None,
+                     resource_policy_filter=None):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def create_archive_policy(archive_policy):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def create_resource(resource_type, id, creator,
+                        user_id=None, project_id=None,
+                        started_at=None, ended_at=None, metrics=None,
+                        **kwargs):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def update_resource(resource_type, resource_id, ended_at=_marker,
+                        metrics=_marker,
+                        append_metrics=False,
+                        create_revision=True,
+                        **kwargs):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def delete_resource(uuid):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def delete_resources(resource_type='generic',
+                         attribute_filter=None):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def delete_metric(id):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def expunge_metric(id):
+        raise exceptions.NotImplementedError
+
+    def get_archive_policy_for_metric(self, metric_name):
+        """Helper to get the archive policy according archive policy rules."""
+        rules = self.list_archive_policy_rules()
+        for rule in rules:
+            if fnmatch.fnmatch(metric_name or "", rule.metric_pattern):
+                return self.get_archive_policy(rule.archive_policy_name)
+        raise NoArchivePolicyRuleMatch(metric_name)
+
+    @staticmethod
+    def create_resource_type(resource_type):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def get_resource_type(name):
+        """Get a resource type from the indexer.
+
+        :param name: name of the resource type
+        """
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def list_resource_types(attribute_filter=None,
+                            limit=None,
+                            marker=None,
+                            sorts=None):
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def get_resource_attributes_schemas():
+        raise exceptions.NotImplementedError
+
+    @staticmethod
+    def get_resource_type_schema():
+        raise exceptions.NotImplementedError
diff --git a/gnocchi/indexer/alembic/alembic.ini b/gnocchi/indexer/alembic/alembic.ini
new file mode 100644
index 0000000000000000000000000000000000000000..db7340acd9cfc2cd1ae794c6006ca739297e4c25
--- /dev/null
+++ b/gnocchi/indexer/alembic/alembic.ini
@@ -0,0 +1,3 @@
+[alembic]
+script_location = gnocchi.indexer:alembic
+sqlalchemy.url = postgresql://localhost/gnocchi
diff --git a/gnocchi/indexer/alembic/env.py b/gnocchi/indexer/alembic/env.py
new file mode 100644
index 0000000000000000000000000000000000000000..c7d7f9e13a7c27db2912e4031fb5f771f80db753
--- /dev/null
+++ b/gnocchi/indexer/alembic/env.py
@@ -0,0 +1,89 @@
+#
+# Copyright 2015 Red Hat. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""A test module to exercise the Gnocchi API with gabbi."""
+
+from alembic import context
+
+from gnocchi.indexer import sqlalchemy
+from gnocchi.indexer import sqlalchemy_base
+
+# this is the Alembic Config object, which provides
+# access to the values within the .ini file in use.
+config = context.config
+
+# add your model's MetaData object here
+# for 'autogenerate' support
+# from myapp import mymodel
+# target_metadata = mymodel.Base.metadata
+target_metadata = sqlalchemy_base.Base.metadata
+
+# other values from the config, defined by the needs of env.py,
+# can be acquired:
+# my_important_option = config.get_main_option("my_important_option")
+# ... etc.
+
+
+def run_migrations_offline():
+    """Run migrations in 'offline' mode.
+
+    This configures the context with just a URL
+    and not an Engine, though an Engine is acceptable
+    here as well.  By skipping the Engine creation
+    we don't even need a DBAPI to be available.
+
+    Calls to context.execute() here emit the given string to the
+    script output.
+
+    """
+    conf = config.conf
+    context.configure(url=conf.indexer.url,
+                      target_metadata=target_metadata)
+
+    with context.begin_transaction():
+        context.run_migrations()
+
+
+def run_migrations_online():
+    """Run migrations in 'online' mode.
+
+    In this scenario we need to create an Engine
+    and associate a connection with the context.
+
+    """
+    conf = config.conf
+    indexer = sqlalchemy.SQLAlchemyIndexer(conf)
+    with indexer.facade.writer_connection() as connectable:
+
+        with connectable.connect() as connection:
+            context.configure(
+                connection=connection,
+                target_metadata=target_metadata
+            )
+
+            with context.begin_transaction():
+                context.run_migrations()
+
+    indexer.disconnect()
+
+# If `alembic' was used directly from the CLI
+if not hasattr(config, "conf"):
+    from gnocchi import service
+    config.conf = service.prepare_service([])
+
+if context.is_offline_mode():
+    run_migrations_offline()
+else:
+    run_migrations_online()
diff --git a/gnocchi/indexer/alembic/script.py.mako b/gnocchi/indexer/alembic/script.py.mako
new file mode 100644
index 0000000000000000000000000000000000000000..66e2be40d329644a7d4709d078e658324fa5c83c
--- /dev/null
+++ b/gnocchi/indexer/alembic/script.py.mako
@@ -0,0 +1,36 @@
+# Copyright ${create_date.year} The Gnocchi Developers
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""${message}
+
+Revision ID: ${up_revision}
+Revises: ${down_revision | comma,n}
+Create Date: ${create_date}
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+${imports if imports else ""}
+
+# revision identifiers, used by Alembic.
+revision = ${repr(up_revision)}
+down_revision = ${repr(down_revision)}
+branch_labels = ${repr(branch_labels)}
+depends_on = ${repr(depends_on)}
+
+
+def upgrade():
+    ${upgrades if upgrades else "pass"}
diff --git a/gnocchi/indexer/alembic/versions/0718ed97e5b3_add_tablename_to_resource_type.py b/gnocchi/indexer/alembic/versions/0718ed97e5b3_add_tablename_to_resource_type.py
new file mode 100644
index 0000000000000000000000000000000000000000..8662b1146c2c57fb4eee733b1ae8a587dbc278d8
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/0718ed97e5b3_add_tablename_to_resource_type.py
@@ -0,0 +1,54 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Add tablename to resource_type
+
+Revision ID: 0718ed97e5b3
+Revises: 828c16f70cce
+Create Date: 2016-01-20 08:14:04.893783
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '0718ed97e5b3'
+down_revision = '828c16f70cce'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.add_column("resource_type", sa.Column('tablename', sa.String(18),
+                                             nullable=True))
+
+    resource_type = sa.Table(
+        'resource_type', sa.MetaData(),
+        sa.Column('name', sa.String(255), nullable=False),
+        sa.Column('tablename', sa.String(18), nullable=True)
+    )
+    op.execute(resource_type.update().where(
+        resource_type.c.name == "instance_network_interface"
+    ).values({'tablename': op.inline_literal("'instance_net_int'")}))
+    op.execute(resource_type.update().where(
+        resource_type.c.name != "instance_network_interface"
+    ).values({'tablename': resource_type.c.name}))
+
+    op.alter_column("resource_type", "tablename", type_=sa.String(18),
+                    nullable=False)
+    op.create_unique_constraint("uniq_resource_type0tablename",
+                                "resource_type", ["tablename"])
diff --git a/gnocchi/indexer/alembic/versions/1c2c61ac1f4c_add_original_resource_id_column.py b/gnocchi/indexer/alembic/versions/1c2c61ac1f4c_add_original_resource_id_column.py
new file mode 100644
index 0000000000000000000000000000000000000000..59632635ffeefe257a1216cbdba6e67646f9dc69
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/1c2c61ac1f4c_add_original_resource_id_column.py
@@ -0,0 +1,40 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""add original resource id column
+
+Revision ID: 1c2c61ac1f4c
+Revises: 1f21cbdd6bc2
+Create Date: 2016-01-27 05:57:48.909012
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = '1c2c61ac1f4c'
+down_revision = '62a8dfb139bb'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.add_column('resource', sa.Column('original_resource_id',
+                                        sa.String(length=255),
+                                        nullable=True))
+    op.add_column('resource_history', sa.Column('original_resource_id',
+                                                sa.String(length=255),
+                                                nullable=True))
diff --git a/gnocchi/indexer/alembic/versions/1c98ac614015_initial_base.py b/gnocchi/indexer/alembic/versions/1c98ac614015_initial_base.py
new file mode 100644
index 0000000000000000000000000000000000000000..f22def6412bc7cdb62d2d157817fb2f2313432bd
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/1c98ac614015_initial_base.py
@@ -0,0 +1,268 @@
+# flake8: noqa
+# Copyright 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""Initial base for Gnocchi 1.0.0
+
+Revision ID: 1c98ac614015
+Revises: 
+Create Date: 2015-04-27 16:05:13.530625
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '1c98ac614015'
+down_revision = None
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utils
+
+import gnocchi.indexer.sqlalchemy_base
+import gnocchi.indexer.sqlalchemy_types
+
+
+def upgrade():
+    op.create_table('resource',
+    sa.Column('type', sa.Enum('generic', 'instance', 'swift_account', 'volume', 'ceph_account', 'network', 'identity', 'ipmi', 'stack', 'image', name='resource_type_enum'), nullable=False),
+    sa.Column('created_by_user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('created_by_project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('started_at', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=False),
+    sa.Column('revision_start', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=False),
+    sa.Column('ended_at', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=True),
+    sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_resource_id', 'resource', ['id'], unique=False)
+    op.create_table('archive_policy',
+    sa.Column('name', sa.String(length=255), nullable=False),
+    sa.Column('back_window', sa.Integer(), nullable=False),
+    sa.Column('definition', gnocchi.indexer.sqlalchemy_base.ArchivePolicyDefinitionType(), nullable=False),
+    sa.Column('aggregation_methods', gnocchi.indexer.sqlalchemy_base.SetType(), nullable=False),
+    sa.PrimaryKeyConstraint('name'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_archive_policy_name', 'archive_policy', ['name'], unique=False)
+    op.create_table('volume',
+    sa.Column('display_name', sa.String(length=255), nullable=False),
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_volume_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_volume_id', 'volume', ['id'], unique=False)
+    op.create_table('instance',
+    sa.Column('flavor_id', sa.Integer(), nullable=False),
+    sa.Column('image_ref', sa.String(length=255), nullable=False),
+    sa.Column('host', sa.String(length=255), nullable=False),
+    sa.Column('display_name', sa.String(length=255), nullable=False),
+    sa.Column('server_group', sa.String(length=255), nullable=True),
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_instance_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_instance_id', 'instance', ['id'], unique=False)
+    op.create_table('stack',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_stack_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_stack_id', 'stack', ['id'], unique=False)
+    op.create_table('archive_policy_rule',
+    sa.Column('name', sa.String(length=255), nullable=False),
+    sa.Column('archive_policy_name', sa.String(length=255), nullable=False),
+    sa.Column('metric_pattern', sa.String(length=255), nullable=False),
+    sa.ForeignKeyConstraint(['archive_policy_name'], ['archive_policy.name'], name="fk_archive_policy_rule_archive_policy_name_archive_policy_name", ondelete='RESTRICT'),
+    sa.PrimaryKeyConstraint('name'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_archive_policy_rule_name', 'archive_policy_rule', ['name'], unique=False)
+    op.create_table('swift_account',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_swift_account_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_swift_account_id', 'swift_account', ['id'], unique=False)
+    op.create_table('ceph_account',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_ceph_account_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_ceph_account_id', 'ceph_account', ['id'], unique=False)
+    op.create_table('ipmi',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_ipmi_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_ipmi_id', 'ipmi', ['id'], unique=False)
+    op.create_table('image',
+    sa.Column('name', sa.String(length=255), nullable=False),
+    sa.Column('container_format', sa.String(length=255), nullable=False),
+    sa.Column('disk_format', sa.String(length=255), nullable=False),
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_image_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_image_id', 'image', ['id'], unique=False)
+    op.create_table('resource_history',
+    sa.Column('type', sa.Enum('generic', 'instance', 'swift_account', 'volume', 'ceph_account', 'network', 'identity', 'ipmi', 'stack', 'image', name='resource_type_enum'), nullable=False),
+    sa.Column('created_by_user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('created_by_project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('started_at', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=False),
+    sa.Column('revision_start', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=False),
+    sa.Column('ended_at', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=True),
+    sa.Column('user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.Column('revision_end', gnocchi.indexer.sqlalchemy_types.PreciseTimestamp(), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_resource_history_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_resource_history_id', 'resource_history', ['id'], unique=False)
+    op.create_table('identity',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_identity_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_identity_id', 'identity', ['id'], unique=False)
+    op.create_table('network',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.ForeignKeyConstraint(['id'], ['resource.id'], name="fk_network_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_network_id', 'network', ['id'], unique=False)
+    op.create_table('metric',
+    sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=False),
+    sa.Column('archive_policy_name', sa.String(length=255), nullable=False),
+    sa.Column('created_by_user_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('created_by_project_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('resource_id', sqlalchemy_utils.types.uuid.UUIDType(binary=False), nullable=True),
+    sa.Column('name', sa.String(length=255), nullable=True),
+    sa.ForeignKeyConstraint(['archive_policy_name'], ['archive_policy.name'], name="fk_metric_archive_policy_name_archive_policy_name", ondelete='RESTRICT'),
+    sa.ForeignKeyConstraint(['resource_id'], ['resource.id'], name="fk_metric_resource_id_resource_id", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('id'),
+    sa.UniqueConstraint('resource_id', 'name', name='uniq_metric0resource_id0name'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_metric_id', 'metric', ['id'], unique=False)
+    op.create_table('identity_history',
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_identity_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_identity_history_revision', 'identity_history', ['revision'], unique=False)
+    op.create_table('instance_history',
+    sa.Column('flavor_id', sa.Integer(), nullable=False),
+    sa.Column('image_ref', sa.String(length=255), nullable=False),
+    sa.Column('host', sa.String(length=255), nullable=False),
+    sa.Column('display_name', sa.String(length=255), nullable=False),
+    sa.Column('server_group', sa.String(length=255), nullable=True),
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_instance_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_instance_history_revision', 'instance_history', ['revision'], unique=False)
+    op.create_table('network_history',
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_network_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_network_history_revision', 'network_history', ['revision'], unique=False)
+    op.create_table('swift_account_history',
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_swift_account_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_swift_account_history_revision', 'swift_account_history', ['revision'], unique=False)
+    op.create_table('ceph_account_history',
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_ceph_account_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_ceph_account_history_revision', 'ceph_account_history', ['revision'], unique=False)
+    op.create_table('ipmi_history',
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_ipmi_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_ipmi_history_revision', 'ipmi_history', ['revision'], unique=False)
+    op.create_table('image_history',
+    sa.Column('name', sa.String(length=255), nullable=False),
+    sa.Column('container_format', sa.String(length=255), nullable=False),
+    sa.Column('disk_format', sa.String(length=255), nullable=False),
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_image_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_image_history_revision', 'image_history', ['revision'], unique=False)
+    op.create_table('stack_history',
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_stack_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_stack_history_revision', 'stack_history', ['revision'], unique=False)
+    op.create_table('volume_history',
+    sa.Column('display_name', sa.String(length=255), nullable=False),
+    sa.Column('revision', sa.Integer(), nullable=False),
+    sa.ForeignKeyConstraint(['revision'], ['resource_history.revision'], name="fk_volume_history_resource_history_revision", ondelete='CASCADE'),
+    sa.PrimaryKeyConstraint('revision'),
+    mysql_charset='utf8',
+    mysql_engine='InnoDB'
+    )
+    op.create_index('ix_volume_history_revision', 'volume_history', ['revision'], unique=False)
diff --git a/gnocchi/indexer/alembic/versions/1e1a63d3d186_original_resource_id_not_null.py b/gnocchi/indexer/alembic/versions/1e1a63d3d186_original_resource_id_not_null.py
new file mode 100644
index 0000000000000000000000000000000000000000..bd73b12b01e5f5eeb5bcd39b25a0e7c1013f327f
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/1e1a63d3d186_original_resource_id_not_null.py
@@ -0,0 +1,66 @@
+# Copyright 2017 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Make sure resource.original_resource_id is NOT NULL
+
+Revision ID: 1e1a63d3d186
+Revises: 397987e38570
+Create Date: 2017-01-26 19:33:35.209688
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy import func
+import sqlalchemy_utils
+
+
+# revision identifiers, used by Alembic.
+revision = '1e1a63d3d186'
+down_revision = '397987e38570'
+branch_labels = None
+depends_on = None
+
+
+def clean_substr(col, start, length):
+    return func.lower(func.substr(func.hex(col), start, length))
+
+
+def upgrade():
+    bind = op.get_bind()
+    for table_name in ('resource', 'resource_history'):
+        table = sa.Table(table_name, sa.MetaData(),
+                         sa.Column('id',
+                                   sqlalchemy_utils.types.uuid.UUIDType(),
+                                   nullable=False),
+                         sa.Column('original_resource_id', sa.String(255)))
+
+        # NOTE(gordc): mysql stores id as binary so we need to rebuild back to
+        # string uuid.
+        if bind and bind.engine.name == "mysql":
+            vals = {'original_resource_id':
+                    clean_substr(table.c.id, 1, 8) + '-' +
+                    clean_substr(table.c.id, 9, 4) + '-' +
+                    clean_substr(table.c.id, 13, 4) + '-' +
+                    clean_substr(table.c.id, 17, 4) + '-' +
+                    clean_substr(table.c.id, 21, 12)}
+        else:
+            vals = {'original_resource_id': table.c.id}
+
+        op.execute(table.update().where(
+            table.c.original_resource_id.is_(None)).values(vals))
+        op.alter_column(table_name, "original_resource_id", nullable=False,
+                        existing_type=sa.String(255),
+                        existing_nullable=True)
diff --git a/gnocchi/indexer/alembic/versions/1f21cbdd6bc2_allow_volume_display_name_to_be_null.py b/gnocchi/indexer/alembic/versions/1f21cbdd6bc2_allow_volume_display_name_to_be_null.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2e48d9b4d3af1711a04c05f931dfdf15a499cf2
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/1f21cbdd6bc2_allow_volume_display_name_to_be_null.py
@@ -0,0 +1,41 @@
+# Copyright 2015 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""allow volume display name to be null
+
+Revision ID: 1f21cbdd6bc2
+Revises: 469b308577a9
+Create Date: 2015-12-08 02:12:20.273880
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '1f21cbdd6bc2'
+down_revision = '469b308577a9'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.alter_column('volume', 'display_name',
+                    existing_type=sa.String(length=255),
+                    nullable=True)
+    op.alter_column('volume_history', 'display_name',
+                    existing_type=sa.String(length=255),
+                    nullable=True)
diff --git a/gnocchi/indexer/alembic/versions/27d2a1d205ff_add_updating_resource_type_states.py b/gnocchi/indexer/alembic/versions/27d2a1d205ff_add_updating_resource_type_states.py
new file mode 100644
index 0000000000000000000000000000000000000000..33d60f475cb32560bc38b9bc91916d998a3ee284
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/27d2a1d205ff_add_updating_resource_type_states.py
@@ -0,0 +1,89 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Add updating resource type states
+
+Revision ID: 27d2a1d205ff
+Revises: 7e6f9d542f8b
+Create Date: 2016-08-31 14:05:34.316496
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+from gnocchi.indexer import sqlalchemy_types
+from gnocchi import utils
+
+# revision identifiers, used by Alembic.
+revision = '27d2a1d205ff'
+down_revision = '7e6f9d542f8b'
+branch_labels = None
+depends_on = None
+
+
+resource_type = sa.sql.table(
+    'resource_type',
+    sa.sql.column('updated_at', sqlalchemy_types.PreciseTimestamp()))
+
+state_enum = sa.Enum("active", "creating",
+                     "creation_error", "deleting",
+                     "deletion_error", "updating",
+                     "updating_error",
+                     name="resource_type_state_enum")
+
+
+def upgrade():
+
+    op.alter_column('resource_type', 'state',
+                    type_=state_enum,
+                    nullable=False,
+                    server_default=None)
+
+    # NOTE(sileht): postgresql have a builtin ENUM type, so
+    # just altering the column won't works.
+    # https://bitbucket.org/zzzeek/alembic/issues/270/altering-enum-type
+    # Does it break offline migration because we use get_bind() ?
+
+    # NOTE(luogangyi): since we cannot use 'ALTER TYPE' in transaction,
+    # we split the 'ALTER TYPE' operation into several steps.
+    bind = op.get_bind()
+    if bind and bind.engine.name == "postgresql":
+        op.execute("ALTER TYPE resource_type_state_enum RENAME TO \
+                    old_resource_type_state_enum")
+        op.execute("CREATE TYPE resource_type_state_enum AS ENUM \
+                       ('active', 'creating', 'creation_error', \
+                        'deleting', 'deletion_error', 'updating', \
+                        'updating_error')")
+        op.execute("ALTER TABLE resource_type ALTER COLUMN state TYPE \
+                   resource_type_state_enum USING \
+                   state::text::resource_type_state_enum")
+        op.execute("DROP TYPE old_resource_type_state_enum")
+
+    # NOTE(sileht): we can't alter type with server_default set on
+    # postgresql...
+    op.alter_column('resource_type', 'state',
+                    type_=state_enum,
+                    nullable=False,
+                    server_default="creating")
+    op.add_column("resource_type",
+                  sa.Column("updated_at",
+                            sqlalchemy_types.PreciseTimestamp(),
+                            nullable=True))
+
+    op.execute(resource_type.update().values({'updated_at': utils.utcnow()}))
+    op.alter_column("resource_type", "updated_at",
+                    type_=sqlalchemy_types.PreciseTimestamp(),
+                    nullable=False)
diff --git a/gnocchi/indexer/alembic/versions/2e0b912062d1_drop_useless_enum.py b/gnocchi/indexer/alembic/versions/2e0b912062d1_drop_useless_enum.py
new file mode 100644
index 0000000000000000000000000000000000000000..5215da09486f9a65bbeb31d55265a279b807e25b
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/2e0b912062d1_drop_useless_enum.py
@@ -0,0 +1,39 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""drop_useless_enum
+
+Revision ID: 2e0b912062d1
+Revises: 34c517bcc2dd
+Create Date: 2016-04-15 07:29:38.492237
+
+"""
+
+from alembic import op
+
+
+# revision identifiers, used by Alembic.
+revision = '2e0b912062d1'
+down_revision = '34c517bcc2dd'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    bind = op.get_bind()
+    if bind and bind.engine.name == "postgresql":
+        # NOTE(sileht): we use IF exists because if the database have
+        # been created from scratch with 2.1 the enum doesn't exists
+        op.execute("DROP TYPE IF EXISTS resource_type_enum")
diff --git a/gnocchi/indexer/alembic/versions/34c517bcc2dd_shorter_foreign_key.py b/gnocchi/indexer/alembic/versions/34c517bcc2dd_shorter_foreign_key.py
new file mode 100644
index 0000000000000000000000000000000000000000..f7a4a61a41f96a237634454e22a0d793ad18ab7f
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/34c517bcc2dd_shorter_foreign_key.py
@@ -0,0 +1,91 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""shorter_foreign_key
+
+Revision ID: 34c517bcc2dd
+Revises: ed9c6ddc5c35
+Create Date: 2016-04-13 16:58:42.536431
+
+"""
+
+from alembic import op
+import sqlalchemy
+
+# revision identifiers, used by Alembic.
+revision = '34c517bcc2dd'
+down_revision = 'ed9c6ddc5c35'
+branch_labels = None
+depends_on = None
+
+
+resource_type_helper = sqlalchemy.Table(
+    'resource_type',
+    sqlalchemy.MetaData(),
+    sqlalchemy.Column('tablename', sqlalchemy.String(18), nullable=False)
+)
+
+to_rename = [
+    ('fk_metric_archive_policy_name_archive_policy_name',
+     'fk_metric_ap_name_ap_name',
+     'archive_policy', 'name',
+     'metric', 'archive_policy_name',
+     "RESTRICT"),
+    ('fk_resource_history_resource_type_name',
+     'fk_rh_resource_type_name',
+     'resource_type', 'name', 'resource_history', 'type',
+     "RESTRICT"),
+    ('fk_resource_history_id_resource_id',
+     'fk_rh_id_resource_id',
+     'resource', 'id', 'resource_history', 'id',
+     "CASCADE"),
+    ('fk_archive_policy_rule_archive_policy_name_archive_policy_name',
+     'fk_apr_ap_name_ap_name',
+     'archive_policy', 'name', 'archive_policy_rule', 'archive_policy_name',
+     "RESTRICT")
+]
+
+
+def upgrade():
+    connection = op.get_bind()
+
+    insp = sqlalchemy.inspect(connection)
+
+    op.alter_column("resource_type", "tablename",
+                    type_=sqlalchemy.String(35),
+                    existing_type=sqlalchemy.String(18), nullable=False)
+
+    for rt in connection.execute(resource_type_helper.select()):
+        if rt.tablename == "generic":
+            continue
+
+        fk_names = [fk['name'] for fk in insp.get_foreign_keys("%s_history" %
+                                                               rt.tablename)]
+        fk_old = ("fk_%s_history_resource_history_revision" %
+                  rt.tablename)
+        if fk_old not in fk_names:
+            # The table have been created from scratch recently
+            fk_old = ("fk_%s_history_revision_resource_history_revision" %
+                      rt.tablename)
+
+        fk_new = "fk_%s_h_revision_rh_revision" % rt.tablename
+        to_rename.append((fk_old, fk_new, 'resource_history', 'revision',
+                          "%s_history" % rt.tablename, 'revision', 'CASCADE'))
+
+    for (fk_old, fk_new, src_table, src_col, dst_table, dst_col, ondelete
+         ) in to_rename:
+        op.drop_constraint(fk_old, dst_table, type_="foreignkey")
+        op.create_foreign_key(fk_new, dst_table, src_table,
+                              [dst_col], [src_col], ondelete=ondelete)
diff --git a/gnocchi/indexer/alembic/versions/3901f5ea2b8e_create_instance_disk_and_instance_.py b/gnocchi/indexer/alembic/versions/3901f5ea2b8e_create_instance_disk_and_instance_.py
new file mode 100644
index 0000000000000000000000000000000000000000..2c221f70cb28861f453a3cae8bbd70ea8bac6b0d
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/3901f5ea2b8e_create_instance_disk_and_instance_.py
@@ -0,0 +1,103 @@
+# Copyright 2015 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""create instance_disk and instance_net_int tables
+
+Revision ID: 3901f5ea2b8e
+Revises: 42ee7f3e25f8
+Create Date: 2015-08-27 17:00:25.092891
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '3901f5ea2b8e'
+down_revision = '42ee7f3e25f8'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utils
+
+
+def upgrade():
+    for table in ["resource", "resource_history"]:
+        op.alter_column(table, "type",
+                        type_=sa.Enum('generic', 'instance', 'swift_account',
+                                      'volume', 'ceph_account', 'network',
+                                      'identity', 'ipmi', 'stack', 'image',
+                                      'instance_network_interface',
+                                      'instance_disk',
+                                      name='resource_type_enum'),
+                        nullable=False)
+
+    # NOTE(sileht): postgresql have a builtin ENUM type, so
+    # just altering the column won't works.
+    # https://bitbucket.org/zzzeek/alembic/issues/270/altering-enum-type
+    # Does it break offline migration because we use get_bind() ?
+
+    # NOTE(luogangyi): since we cannot use 'ALTER TYPE' in transaction,
+    # we split the 'ALTER TYPE' operation into several steps.
+    bind = op.get_bind()
+    if bind and bind.engine.name == "postgresql":
+        op.execute("ALTER TYPE resource_type_enum RENAME TO \
+                    old_resource_type_enum")
+        op.execute("CREATE TYPE resource_type_enum AS ENUM \
+                       ('generic', 'instance', 'swift_account', \
+                        'volume', 'ceph_account', 'network', \
+                        'identity', 'ipmi', 'stack', 'image', \
+                        'instance_network_interface', 'instance_disk')")
+        for table in ["resource", "resource_history"]:
+            op.execute("ALTER TABLE %s ALTER COLUMN type TYPE \
+                        resource_type_enum USING \
+                        type::text::resource_type_enum" % table)
+        op.execute("DROP TYPE old_resource_type_enum")
+
+    for table in ['instance_disk', 'instance_net_int']:
+        op.create_table(
+            table,
+            sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                      nullable=False),
+            sa.Column('instance_id',
+                      sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                      nullable=False),
+            sa.Column('name', sa.String(length=255), nullable=False),
+            sa.Index('ix_%s_id' % table, 'id', unique=False),
+            sa.ForeignKeyConstraint(['id'], ['resource.id'],
+                                    name="fk_%s_id_resource_id" % table,
+                                    ondelete='CASCADE'),
+            sa.PrimaryKeyConstraint('id'),
+            mysql_charset='utf8',
+            mysql_engine='InnoDB'
+        )
+
+        op.create_table(
+            '%s_history' % table,
+            sa.Column('instance_id',
+                      sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                      nullable=False),
+            sa.Column('name', sa.String(length=255), nullable=False),
+            sa.Column('revision', sa.Integer(), nullable=False),
+            sa.Index('ix_%s_history_revision' % table, 'revision',
+                     unique=False),
+            sa.ForeignKeyConstraint(['revision'],
+                                    ['resource_history.revision'],
+                                    name=("fk_%s_history_"
+                                          "resource_history_revision") % table,
+                                    ondelete='CASCADE'),
+            sa.PrimaryKeyConstraint('revision'),
+            mysql_charset='utf8',
+            mysql_engine='InnoDB'
+        )
diff --git a/gnocchi/indexer/alembic/versions/397987e38570_no_more_slash_and_reencode.py b/gnocchi/indexer/alembic/versions/397987e38570_no_more_slash_and_reencode.py
new file mode 100644
index 0000000000000000000000000000000000000000..a671fc1db066ff297cab378cd1e2f9ea252b6ecd
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/397987e38570_no_more_slash_and_reencode.py
@@ -0,0 +1,189 @@
+# Copyright 2017 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Remove slashes from original resource IDs, recompute their id with creator
+
+Revision ID: 397987e38570
+Revises: aba5a217ca9b
+Create Date: 2017-01-11 16:32:40.421758
+
+"""
+import uuid
+
+from alembic import op
+import six
+import sqlalchemy as sa
+import sqlalchemy_utils
+
+from gnocchi import utils
+
+# revision identifiers, used by Alembic.
+revision = '397987e38570'
+down_revision = 'aba5a217ca9b'
+branch_labels = None
+depends_on = None
+
+resource_type_table = sa.Table(
+    'resource_type',
+    sa.MetaData(),
+    sa.Column('name', sa.String(255), nullable=False),
+    sa.Column('tablename', sa.String(35), nullable=False)
+)
+
+resource_table = sa.Table(
+    'resource',
+    sa.MetaData(),
+    sa.Column('id',
+              sqlalchemy_utils.types.uuid.UUIDType(),
+              nullable=False),
+    sa.Column('original_resource_id', sa.String(255)),
+    sa.Column('type', sa.String(255)),
+    sa.Column('creator', sa.String(255))
+)
+
+resourcehistory_table = sa.Table(
+    'resource_history',
+    sa.MetaData(),
+    sa.Column('id',
+              sqlalchemy_utils.types.uuid.UUIDType(),
+              nullable=False),
+    sa.Column('original_resource_id', sa.String(255))
+)
+
+metric_table = sa.Table(
+    'metric',
+    sa.MetaData(),
+    sa.Column('id',
+              sqlalchemy_utils.types.uuid.UUIDType(),
+              nullable=False),
+    sa.Column('name', sa.String(255)),
+    sa.Column('resource_id', sqlalchemy_utils.types.uuid.UUIDType())
+
+)
+
+
+uuidtype = sqlalchemy_utils.types.uuid.UUIDType()
+
+
+def upgrade():
+    connection = op.get_bind()
+
+    resource_type_tables = {}
+    resource_type_tablenames = dict(
+        (rt.name, rt.tablename)
+        for rt in connection.execute(resource_type_table.select())
+        if rt.tablename != "generic"
+    )
+
+    op.drop_constraint("fk_rh_id_resource_id", "resource_history",
+                       type_="foreignkey")
+    op.drop_constraint("fk_metric_resource_id_resource_id", "metric",
+                       type_="foreignkey")
+    for name, table in resource_type_tablenames.items():
+        op.drop_constraint("fk_%s_id_resource_id" % table, table,
+                           type_="foreignkey")
+
+        resource_type_tables[name] = sa.Table(
+            table,
+            sa.MetaData(),
+            sa.Column('id',
+                      sqlalchemy_utils.types.uuid.UUIDType(),
+                      nullable=False),
+        )
+
+    for resource in connection.execute(resource_table.select()):
+
+        if resource.original_resource_id is None:
+            # statsd resource has no original_resource_id and is NULL
+            continue
+
+        try:
+            orig_as_uuid = uuid.UUID(str(resource.original_resource_id))
+        except ValueError:
+            pass
+        else:
+            if orig_as_uuid == resource.id:
+                continue
+
+        new_original_resource_id = resource.original_resource_id.replace(
+            '/', '_')
+        if six.PY2:
+            new_original_resource_id = new_original_resource_id.encode('utf-8')
+        new_id = sa.literal(uuidtype.process_bind_param(
+            str(utils.ResourceUUID(
+                new_original_resource_id, resource.creator)),
+            connection.dialect))
+
+        # resource table
+        connection.execute(
+            resource_table.update().where(
+                resource_table.c.id == resource.id
+            ).values(
+                id=new_id,
+                original_resource_id=new_original_resource_id
+            )
+        )
+        # resource history table
+        connection.execute(
+            resourcehistory_table.update().where(
+                resourcehistory_table.c.id == resource.id
+            ).values(
+                id=new_id,
+                original_resource_id=new_original_resource_id
+            )
+        )
+
+        if resource.type != "generic":
+            rtable = resource_type_tables[resource.type]
+
+            # resource table (type)
+            connection.execute(
+                rtable.update().where(
+                    rtable.c.id == resource.id
+                ).values(id=new_id)
+            )
+
+        # Metric
+        connection.execute(
+            metric_table.update().where(
+                metric_table.c.resource_id == resource.id
+            ).values(
+                resource_id=new_id
+            )
+        )
+
+    for (name, table) in resource_type_tablenames.items():
+        op.create_foreign_key("fk_%s_id_resource_id" % table,
+                              table, "resource",
+                              ("id",), ("id",),
+                              ondelete="CASCADE")
+
+    op.create_foreign_key("fk_metric_resource_id_resource_id",
+                          "metric", "resource",
+                          ("resource_id",), ("id",),
+                          ondelete="SET NULL")
+    op.create_foreign_key("fk_rh_id_resource_id",
+                          "resource_history", "resource",
+                          ("id",), ("id",), ondelete="CASCADE")
+
+    for metric in connection.execute(metric_table.select().where(
+            metric_table.c.name.like("%/%"))):
+        connection.execute(
+            metric_table.update().where(
+                metric_table.c.id == metric.id
+            ).values(
+                name=metric.name.replace('/', '_'),
+            )
+        )
diff --git a/gnocchi/indexer/alembic/versions/39b7d449d46a_create_metric_status_column.py b/gnocchi/indexer/alembic/versions/39b7d449d46a_create_metric_status_column.py
new file mode 100644
index 0000000000000000000000000000000000000000..c3d7be9964b1337de6ec91042dcf8180f449028a
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/39b7d449d46a_create_metric_status_column.py
@@ -0,0 +1,49 @@
+# Copyright 2015 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""create metric status column
+
+Revision ID: 39b7d449d46a
+Revises: 3901f5ea2b8e
+Create Date: 2015-09-16 13:25:34.249237
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '39b7d449d46a'
+down_revision = '3901f5ea2b8e'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    enum = sa.Enum("active", "delete", name="metric_status_enum")
+    enum.create(op.get_bind(), checkfirst=False)
+    op.add_column("metric",
+                  sa.Column('status', enum,
+                            nullable=False,
+                            server_default="active"))
+    op.create_index('ix_metric_status', 'metric', ['status'], unique=False)
+
+    op.drop_constraint("fk_metric_resource_id_resource_id",
+                       "metric", type_="foreignkey")
+    op.create_foreign_key("fk_metric_resource_id_resource_id",
+                          "metric", "resource",
+                          ("resource_id",), ("id",),
+                          ondelete="SET NULL")
diff --git a/gnocchi/indexer/alembic/versions/40c6aae14c3f_ck_started_before_ended.py b/gnocchi/indexer/alembic/versions/40c6aae14c3f_ck_started_before_ended.py
new file mode 100644
index 0000000000000000000000000000000000000000..cf6922c9b8695dc78f0182357a40edd7f1d7e3d2
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/40c6aae14c3f_ck_started_before_ended.py
@@ -0,0 +1,39 @@
+#
+# Copyright 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""ck_started_before_ended
+
+Revision ID: 40c6aae14c3f
+Revises: 1c98ac614015
+Create Date: 2015-04-28 16:35:11.999144
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '40c6aae14c3f'
+down_revision = '1c98ac614015'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+
+
+def upgrade():
+    op.create_check_constraint("ck_started_before_ended",
+                               "resource",
+                               "started_at <= ended_at")
+    op.create_check_constraint("ck_started_before_ended",
+                               "resource_history",
+                               "started_at <= ended_at")
diff --git a/gnocchi/indexer/alembic/versions/42ee7f3e25f8_alter_flavorid_from_int_to_string.py b/gnocchi/indexer/alembic/versions/42ee7f3e25f8_alter_flavorid_from_int_to_string.py
new file mode 100644
index 0000000000000000000000000000000000000000..e8d10d44fab79e8ddca9a1d937039e85862b99e8
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/42ee7f3e25f8_alter_flavorid_from_int_to_string.py
@@ -0,0 +1,38 @@
+#
+# Copyright 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""alter flavorid from int to string
+
+Revision ID: 42ee7f3e25f8
+Revises: f7d44b47928
+Create Date: 2015-05-10 21:20:24.941263
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '42ee7f3e25f8'
+down_revision = 'f7d44b47928'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+
+
+def upgrade():
+    for table in ('instance', 'instance_history'):
+        op.alter_column(table, "flavor_id",
+                        type_=sa.String(length=255),
+                        nullable=False)
diff --git a/gnocchi/indexer/alembic/versions/469b308577a9_allow_image_ref_to_be_null.py b/gnocchi/indexer/alembic/versions/469b308577a9_allow_image_ref_to_be_null.py
new file mode 100644
index 0000000000000000000000000000000000000000..5ac8dfcf77b63a1faabdabf4e97d72fff39cadea
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/469b308577a9_allow_image_ref_to_be_null.py
@@ -0,0 +1,41 @@
+# Copyright 2015 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""allow image_ref to be null
+
+Revision ID: 469b308577a9
+Revises: 39b7d449d46a
+Create Date: 2015-11-29 00:23:39.998256
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '469b308577a9'
+down_revision = '39b7d449d46a'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.alter_column('instance', 'image_ref',
+                    existing_type=sa.String(length=255),
+                    nullable=True)
+    op.alter_column('instance_history', 'image_ref',
+                    existing_type=sa.String(length=255),
+                    nullable=True)
diff --git a/gnocchi/indexer/alembic/versions/5c4f93e5bb4_mysql_float_to_timestamp.py b/gnocchi/indexer/alembic/versions/5c4f93e5bb4_mysql_float_to_timestamp.py
new file mode 100644
index 0000000000000000000000000000000000000000..6c73dd73ef1e3458e32c3a4dc626b509d80bfd3b
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/5c4f93e5bb4_mysql_float_to_timestamp.py
@@ -0,0 +1,77 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""mysql_float_to_timestamp
+
+Revision ID: 5c4f93e5bb4
+Revises: 7e6f9d542f8b
+Create Date: 2016-07-25 15:36:36.469847
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+from sqlalchemy.sql import func
+
+from gnocchi.indexer import sqlalchemy_types
+
+# revision identifiers, used by Alembic.
+revision = '5c4f93e5bb4'
+down_revision = '27d2a1d205ff'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    bind = op.get_bind()
+    if bind and bind.engine.name == "mysql":
+        op.execute("SET time_zone = '+00:00'")
+        # NOTE(jd) So that crappy engine that is MySQL does not have "ALTER
+        # TABLE … USING …". We need to copy everything and convert…
+        for table_name, column_name in (("resource", "started_at"),
+                                        ("resource", "ended_at"),
+                                        ("resource", "revision_start"),
+                                        ("resource_history", "started_at"),
+                                        ("resource_history", "ended_at"),
+                                        ("resource_history", "revision_start"),
+                                        ("resource_history", "revision_end"),
+                                        ("resource_type", "updated_at")):
+
+            nullable = column_name == "ended_at"
+
+            existing_type = sa.types.DECIMAL(
+                precision=20, scale=6, asdecimal=True)
+            existing_col = sa.Column(
+                column_name,
+                existing_type,
+                nullable=nullable)
+            temp_col = sa.Column(
+                column_name + "_ts",
+                sqlalchemy_types.TimestampUTC(),
+                nullable=True)
+            op.add_column(table_name, temp_col)
+            t = sa.sql.table(table_name, existing_col, temp_col)
+            op.execute(t.update().values(
+                **{column_name + "_ts": func.from_unixtime(existing_col)}))
+            op.drop_column(table_name, column_name)
+            op.alter_column(table_name,
+                            column_name + "_ts",
+                            nullable=nullable,
+                            type_=sqlalchemy_types.TimestampUTC(),
+                            existing_nullable=nullable,
+                            existing_type=existing_type,
+                            new_column_name=column_name)
diff --git a/gnocchi/indexer/alembic/versions/62a8dfb139bb_change_uuid_to_string.py b/gnocchi/indexer/alembic/versions/62a8dfb139bb_change_uuid_to_string.py
new file mode 100644
index 0000000000000000000000000000000000000000..9dbb437c18ab7ec7d33b9ce172e3d0e621f3a542
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/62a8dfb139bb_change_uuid_to_string.py
@@ -0,0 +1,249 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Change uuid to string
+
+Revision ID: 62a8dfb139bb
+Revises: 1f21cbdd6bc2
+Create Date: 2016-01-20 11:57:45.954607
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utils
+
+
+# revision identifiers, used by Alembic.
+revision = '62a8dfb139bb'
+down_revision = '1f21cbdd6bc2'
+branch_labels = None
+depends_on = None
+
+resourcehelper = sa.Table(
+    'resource',
+    sa.MetaData(),
+    sa.Column('id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=False),
+    sa.Column('tmp_created_by_user_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_created_by_project_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_user_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_project_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('created_by_user_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('created_by_project_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('user_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('project_id',
+              sa.String(length=255),
+              nullable=True),
+)
+
+resourcehistoryhelper = sa.Table(
+    'resource_history',
+    sa.MetaData(),
+    sa.Column('id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=False),
+    sa.Column('tmp_created_by_user_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_created_by_project_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_user_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_project_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('created_by_user_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('created_by_project_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('user_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('project_id',
+              sa.String(length=255),
+              nullable=True),
+)
+
+metrichelper = sa.Table(
+    'metric',
+    sa.MetaData(),
+    sa.Column('id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=False),
+    sa.Column('tmp_created_by_user_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('tmp_created_by_project_id',
+              sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+              nullable=True),
+    sa.Column('created_by_user_id',
+              sa.String(length=255),
+              nullable=True),
+    sa.Column('created_by_project_id',
+              sa.String(length=255),
+              nullable=True),
+)
+
+
+def upgrade():
+    connection = op.get_bind()
+
+    # Rename user/project fields to tmp_*
+    op.alter_column('metric', 'created_by_project_id',
+                    new_column_name='tmp_created_by_project_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('metric', 'created_by_user_id',
+                    new_column_name='tmp_created_by_user_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource', 'created_by_project_id',
+                    new_column_name='tmp_created_by_project_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource', 'created_by_user_id',
+                    new_column_name='tmp_created_by_user_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource', 'project_id',
+                    new_column_name='tmp_project_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource', 'user_id',
+                    new_column_name='tmp_user_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource_history', 'created_by_project_id',
+                    new_column_name='tmp_created_by_project_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource_history', 'created_by_user_id',
+                    new_column_name='tmp_created_by_user_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource_history', 'project_id',
+                    new_column_name='tmp_project_id',
+                    existing_type=sa.BINARY(length=16))
+    op.alter_column('resource_history', 'user_id',
+                    new_column_name='tmp_user_id',
+                    existing_type=sa.BINARY(length=16))
+
+    # Add new user/project fields as strings
+    op.add_column('metric',
+                  sa.Column('created_by_project_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('metric',
+                  sa.Column('created_by_user_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource',
+                  sa.Column('created_by_project_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource',
+                  sa.Column('created_by_user_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource',
+                  sa.Column('project_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource',
+                  sa.Column('user_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource_history',
+                  sa.Column('created_by_project_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource_history',
+                  sa.Column('created_by_user_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource_history',
+                  sa.Column('project_id',
+                            sa.String(length=255), nullable=True))
+    op.add_column('resource_history',
+                  sa.Column('user_id',
+                            sa.String(length=255), nullable=True))
+
+    # Migrate data
+    for tablehelper in [resourcehelper, resourcehistoryhelper]:
+        for resource in connection.execute(tablehelper.select()):
+            if resource.tmp_created_by_project_id:
+                created_by_project_id = \
+                    str(resource.tmp_created_by_project_id).replace('-', '')
+            else:
+                created_by_project_id = None
+            if resource.tmp_created_by_user_id:
+                created_by_user_id = \
+                    str(resource.tmp_created_by_user_id).replace('-', '')
+            else:
+                created_by_user_id = None
+            if resource.tmp_project_id:
+                project_id = str(resource.tmp_project_id).replace('-', '')
+            else:
+                project_id = None
+            if resource.tmp_user_id:
+                user_id = str(resource.tmp_user_id).replace('-', '')
+            else:
+                user_id = None
+
+            connection.execute(
+                tablehelper.update().where(
+                    tablehelper.c.id == resource.id
+                ).values(
+                    created_by_project_id=created_by_project_id,
+                    created_by_user_id=created_by_user_id,
+                    project_id=project_id,
+                    user_id=user_id,
+                )
+            )
+    for metric in connection.execute(metrichelper.select()):
+        if resource.tmp_created_by_project_id:
+            created_by_project_id = \
+                str(resource.tmp_created_by_project_id).replace('-', '')
+        else:
+            created_by_project_id = None
+        if resource.tmp_created_by_user_id:
+            created_by_user_id = \
+                str(resource.tmp_created_by_user_id).replace('-', '')
+        else:
+            created_by_user_id = None
+        connection.execute(
+            metrichelper.update().where(
+                metrichelper.c.id == metric.id
+            ).values(
+                created_by_project_id=created_by_project_id,
+                created_by_user_id=created_by_user_id,
+            )
+        )
+
+    # Delete temp fields
+    op.drop_column('metric', 'tmp_created_by_project_id')
+    op.drop_column('metric', 'tmp_created_by_user_id')
+    op.drop_column('resource', 'tmp_created_by_project_id')
+    op.drop_column('resource', 'tmp_created_by_user_id')
+    op.drop_column('resource', 'tmp_project_id')
+    op.drop_column('resource', 'tmp_user_id')
+    op.drop_column('resource_history', 'tmp_created_by_project_id')
+    op.drop_column('resource_history', 'tmp_created_by_user_id')
+    op.drop_column('resource_history', 'tmp_project_id')
+    op.drop_column('resource_history', 'tmp_user_id')
diff --git a/gnocchi/indexer/alembic/versions/7e6f9d542f8b_resource_type_state_column.py b/gnocchi/indexer/alembic/versions/7e6f9d542f8b_resource_type_state_column.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b3a88ff93969117c6f7e6f8af499c0b1114f5ce
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/7e6f9d542f8b_resource_type_state_column.py
@@ -0,0 +1,43 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""resource_type state column
+
+Revision ID: 7e6f9d542f8b
+Revises: c62df18bf4ee
+Create Date: 2016-05-19 16:52:58.939088
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+# revision identifiers, used by Alembic.
+revision = '7e6f9d542f8b'
+down_revision = 'c62df18bf4ee'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    states = ("active", "creating", "creation_error", "deleting",
+              "deletion_error")
+    enum = sa.Enum(*states, name="resource_type_state_enum")
+    enum.create(op.get_bind(), checkfirst=False)
+    op.add_column("resource_type",
+                  sa.Column('state', enum, nullable=False,
+                            server_default="creating"))
+    rt = sa.sql.table('resource_type', sa.sql.column('state', enum))
+    op.execute(rt.update().values(state="active"))
diff --git a/gnocchi/indexer/alembic/versions/828c16f70cce_create_resource_type_table.py b/gnocchi/indexer/alembic/versions/828c16f70cce_create_resource_type_table.py
new file mode 100644
index 0000000000000000000000000000000000000000..c95d2684b82f1f1e3659e84b95ab68dfc9cf8922
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/828c16f70cce_create_resource_type_table.py
@@ -0,0 +1,85 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""create resource_type table
+
+Revision ID: 828c16f70cce
+Revises: 9901e5ea4b6e
+Create Date: 2016-01-19 12:47:19.384127
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = '828c16f70cce'
+down_revision = '9901e5ea4b6e'
+branch_labels = None
+depends_on = None
+
+
+type_string = sa.String(255)
+type_enum = sa.Enum('generic', 'instance',
+                    'swift_account', 'volume',
+                    'ceph_account', 'network',
+                    'identity', 'ipmi', 'stack',
+                    'image', 'instance_disk',
+                    'instance_network_interface',
+                    'host', 'host_disk',
+                    'host_network_interface',
+                    name="resource_type_enum")
+
+
+def type_string_col(name, table):
+    return sa.Column(
+        name, type_string,
+        sa.ForeignKey('resource_type.name',
+                      ondelete="RESTRICT",
+                      name="fk_%s_resource_type_name" % table))
+
+
+def type_enum_col(name):
+    return sa.Column(name, type_enum,
+                     nullable=False, default='generic')
+
+
+def upgrade():
+    resource_type = op.create_table(
+        'resource_type',
+        sa.Column('name', sa.String(length=255), nullable=False),
+        sa.PrimaryKeyConstraint('name'),
+        mysql_charset='utf8',
+        mysql_engine='InnoDB'
+    )
+
+    resource = sa.Table('resource', sa.MetaData(),
+                        type_string_col("type", "resource"))
+    op.execute(resource_type.insert().from_select(
+        ['name'], sa.select([resource.c.type]).distinct()))
+
+    for table in ["resource", "resource_history"]:
+        op.alter_column(table, "type", new_column_name="old_type",
+                        existing_type=type_enum)
+        op.add_column(table, type_string_col("type", table))
+        sa_table = sa.Table(table, sa.MetaData(),
+                            type_string_col("type", table),
+                            type_enum_col('old_type'))
+        op.execute(sa_table.update().values(
+            {sa_table.c.type: sa_table.c.old_type}))
+        op.drop_column(table, "old_type")
+        op.alter_column(table, "type", nullable=False,
+                        existing_type=type_string)
diff --git a/gnocchi/indexer/alembic/versions/8f376189b9eb_migrate_legacy_resources_to_db.py b/gnocchi/indexer/alembic/versions/8f376189b9eb_migrate_legacy_resources_to_db.py
new file mode 100644
index 0000000000000000000000000000000000000000..f1a83bd40a1ba1067c08c40ec2d83c3574ef6dd9
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/8f376189b9eb_migrate_legacy_resources_to_db.py
@@ -0,0 +1,48 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Migrate legacy resources to DB
+
+Revision ID: 8f376189b9eb
+Revises: d24877c22ab0
+Create Date: 2016-01-20 15:03:28.115656
+
+"""
+import json
+
+from alembic import op
+import sqlalchemy as sa
+
+from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
+
+# revision identifiers, used by Alembic.
+revision = '8f376189b9eb'
+down_revision = 'd24877c22ab0'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    resource_type = sa.Table(
+        'resource_type', sa.MetaData(),
+        sa.Column('name', sa.String(255), nullable=False),
+        sa.Column('attributes', sa.Text, nullable=False)
+    )
+
+    for name, attributes in legacy.ceilometer_resources.items():
+        text_attributes = json.dumps(attributes)
+        op.execute(resource_type.update().where(
+            resource_type.c.name == name
+        ).values({resource_type.c.attributes: text_attributes}))
diff --git a/gnocchi/indexer/alembic/versions/9901e5ea4b6e_create_host.py b/gnocchi/indexer/alembic/versions/9901e5ea4b6e_create_host.py
new file mode 100644
index 0000000000000000000000000000000000000000..901e6f8f921710f0d5713c5fe795fdd207e581ca
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/9901e5ea4b6e_create_host.py
@@ -0,0 +1,127 @@
+# Copyright 2015 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""create host tables
+
+Revision ID: 9901e5ea4b6e
+Revises: a54c57ada3f5
+Create Date: 2015-12-15 17:20:25.092891
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = '9901e5ea4b6e'
+down_revision = 'a54c57ada3f5'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utils
+
+
+def upgrade():
+    for table in ["resource", "resource_history"]:
+        op.alter_column(table, "type",
+                        type_=sa.Enum('generic', 'instance', 'swift_account',
+                                      'volume', 'ceph_account', 'network',
+                                      'identity', 'ipmi', 'stack', 'image',
+                                      'instance_network_interface',
+                                      'instance_disk',
+                                      'host', 'host_disk',
+                                      'host_network_interface',
+                                      name='resource_type_enum'),
+                        nullable=False)
+
+    # NOTE(sileht): postgresql have a builtin ENUM type, so
+    # just altering the column won't works.
+    # https://bitbucket.org/zzzeek/alembic/issues/270/altering-enum-type
+    # Does it break offline migration because we use get_bind() ?
+
+    # NOTE(luogangyi): since we cannot use 'ALTER TYPE' in transaction,
+    # we split the 'ALTER TYPE' operation into several steps.
+    bind = op.get_bind()
+    if bind and bind.engine.name == "postgresql":
+        op.execute("ALTER TYPE resource_type_enum RENAME TO \
+                    old_resource_type_enum")
+        op.execute("CREATE TYPE resource_type_enum AS ENUM \
+                       ('generic', 'instance', 'swift_account', \
+                        'volume', 'ceph_account', 'network', \
+                        'identity', 'ipmi', 'stack', 'image', \
+                        'instance_network_interface', 'instance_disk', \
+                        'host', 'host_disk', \
+                        'host_network_interface')")
+        for table in ["resource", "resource_history"]:
+            op.execute("ALTER TABLE %s ALTER COLUMN type TYPE \
+                        resource_type_enum USING \
+                        type::text::resource_type_enum" % table)
+        op.execute("DROP TYPE old_resource_type_enum")
+
+    op.create_table(
+        'host',
+        sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                  nullable=False),
+        sa.Column('host_name', sa.String(length=255), nullable=False),
+        sa.ForeignKeyConstraint(['id'], ['resource.id'],
+                                name="fk_hypervisor_id_resource_id",
+                                ondelete='CASCADE'),
+        sa.PrimaryKeyConstraint('id'),
+        mysql_charset='utf8',
+        mysql_engine='InnoDB'
+    )
+
+    op.create_table(
+        'host_history',
+        sa.Column('host_name', sa.String(length=255), nullable=False),
+        sa.Column('revision', sa.Integer(), nullable=False),
+        sa.ForeignKeyConstraint(['revision'],
+                                ['resource_history.revision'],
+                                name=("fk_hypervisor_history_"
+                                      "resource_history_revision"),
+                                ondelete='CASCADE'),
+        sa.PrimaryKeyConstraint('revision'),
+        mysql_charset='utf8',
+        mysql_engine='InnoDB'
+    )
+
+    for table in ['host_disk', 'host_net_int']:
+        op.create_table(
+            table,
+            sa.Column('id', sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                      nullable=False),
+            sa.Column('host_name', sa.String(length=255), nullable=False),
+            sa.Column('device_name', sa.String(length=255), nullable=True),
+            sa.ForeignKeyConstraint(['id'], ['resource.id'],
+                                    name="fk_%s_id_resource_id" % table,
+                                    ondelete='CASCADE'),
+            sa.PrimaryKeyConstraint('id'),
+            mysql_charset='utf8',
+            mysql_engine='InnoDB'
+        )
+
+        op.create_table(
+            '%s_history' % table,
+            sa.Column('host_name', sa.String(length=255), nullable=False),
+            sa.Column('device_name', sa.String(length=255), nullable=True),
+            sa.Column('revision', sa.Integer(), nullable=False),
+            sa.ForeignKeyConstraint(['revision'],
+                                    ['resource_history.revision'],
+                                    name=("fk_%s_history_"
+                                          "resource_history_revision") % table,
+                                    ondelete='CASCADE'),
+            sa.PrimaryKeyConstraint('revision'),
+            mysql_charset='utf8',
+            mysql_engine='InnoDB'
+        )
diff --git a/gnocchi/indexer/alembic/versions/a54c57ada3f5_removes_useless_indexes.py b/gnocchi/indexer/alembic/versions/a54c57ada3f5_removes_useless_indexes.py
new file mode 100644
index 0000000000000000000000000000000000000000..b979857ade29fa16c7081eb096adcc248242ebf6
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/a54c57ada3f5_removes_useless_indexes.py
@@ -0,0 +1,72 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""merges primarykey and indexes
+
+Revision ID: a54c57ada3f5
+Revises: 1c2c61ac1f4c
+Create Date: 2016-02-04 09:09:23.180955
+
+"""
+
+from alembic import op
+
+
+# revision identifiers, used by Alembic.
+revision = 'a54c57ada3f5'
+down_revision = '1c2c61ac1f4c'
+branch_labels = None
+depends_on = None
+
+resource_tables = [(t, "id") for t in [
+    "instance",
+    "instance_disk",
+    "instance_net_int",
+    "swift_account",
+    "volume",
+    "ceph_account",
+    "network",
+    "identity",
+    "ipmi",
+    "stack",
+    "image"
+]]
+history_tables = [("%s_history" % t, "revision")
+                  for t, c in resource_tables]
+other_tables = [("metric", "id"), ("archive_policy", "name"),
+                ("archive_policy_rule", "name"),
+                ("resource", "id"),
+                ("resource_history", "id")]
+
+
+def upgrade():
+    bind = op.get_bind()
+    # NOTE(sileht): mysql can't delete an index on a foreign key
+    # even this one is not the index used by the foreign key itself...
+    # In our case we have two indexes fk_resource_history_id_resource_id and
+    # and ix_resource_history_id, we want to delete only the second, but mysql
+    # can't do that with a simple DROP INDEX ix_resource_history_id...
+    # so we have to remove the constraint and put it back...
+    if bind.engine.name == "mysql":
+        op.drop_constraint("fk_resource_history_id_resource_id",
+                           type_="foreignkey", table_name="resource_history")
+
+    for table, colname in resource_tables + history_tables + other_tables:
+        op.drop_index("ix_%s_%s" % (table, colname), table_name=table)
+
+    if bind.engine.name == "mysql":
+        op.create_foreign_key("fk_resource_history_id_resource_id",
+                              "resource_history", "resource", ["id"], ["id"],
+                              ondelete="CASCADE")
diff --git a/gnocchi/indexer/alembic/versions/aba5a217ca9b_merge_created_in_creator.py b/gnocchi/indexer/alembic/versions/aba5a217ca9b_merge_created_in_creator.py
new file mode 100644
index 0000000000000000000000000000000000000000..72339057b00d3702f3ff1e86bb089ebfdb3f3f34
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/aba5a217ca9b_merge_created_in_creator.py
@@ -0,0 +1,53 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""merge_created_in_creator
+
+Revision ID: aba5a217ca9b
+Revises: 5c4f93e5bb4
+Create Date: 2016-12-06 17:40:25.344578
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'aba5a217ca9b'
+down_revision = '5c4f93e5bb4'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    for table_name in ("resource", "resource_history", "metric"):
+        creator_col = sa.Column("creator", sa.String(255))
+        created_by_user_id_col = sa.Column("created_by_user_id",
+                                           sa.String(255))
+        created_by_project_id_col = sa.Column("created_by_project_id",
+                                              sa.String(255))
+        op.add_column(table_name, creator_col)
+        t = sa.sql.table(
+            table_name, creator_col,
+            created_by_user_id_col, created_by_project_id_col)
+        op.execute(
+            t.update().values(
+                creator=(
+                    created_by_user_id_col + ":" + created_by_project_id_col
+                )).where((created_by_user_id_col is not None)
+                         | (created_by_project_id_col is not None)))
+        op.drop_column(table_name, "created_by_user_id")
+        op.drop_column(table_name, "created_by_project_id")
diff --git a/gnocchi/indexer/alembic/versions/c62df18bf4ee_add_unit_column_for_metric.py b/gnocchi/indexer/alembic/versions/c62df18bf4ee_add_unit_column_for_metric.py
new file mode 100644
index 0000000000000000000000000000000000000000..7d4deef595fc513f2078d92684345916769ba3d1
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/c62df18bf4ee_add_unit_column_for_metric.py
@@ -0,0 +1,38 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""add unit column for metric
+
+Revision ID: c62df18bf4ee
+Revises: 2e0b912062d1
+Create Date: 2016-05-04 12:31:25.350190
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+
+
+# revision identifiers, used by Alembic.
+revision = 'c62df18bf4ee'
+down_revision = '2e0b912062d1'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.add_column('metric', sa.Column('unit',
+                                      sa.String(length=31),
+                                      nullable=True))
diff --git a/gnocchi/indexer/alembic/versions/d24877c22ab0_add_attributes_to_resource_type.py b/gnocchi/indexer/alembic/versions/d24877c22ab0_add_attributes_to_resource_type.py
new file mode 100644
index 0000000000000000000000000000000000000000..dda81e504552392b8f055e890fa0f8ac55453296
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/d24877c22ab0_add_attributes_to_resource_type.py
@@ -0,0 +1,38 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""Add attributes to resource_type
+
+Revision ID: d24877c22ab0
+Revises: 0718ed97e5b3
+Create Date: 2016-01-19 22:45:06.431190
+
+"""
+
+from alembic import op
+import sqlalchemy as sa
+import sqlalchemy_utils as sa_utils
+
+
+# revision identifiers, used by Alembic.
+revision = 'd24877c22ab0'
+down_revision = '0718ed97e5b3'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    op.add_column("resource_type",
+                  sa.Column('attributes', sa_utils.JSONType(),))
diff --git a/gnocchi/indexer/alembic/versions/ed9c6ddc5c35_fix_host_foreign_key.py b/gnocchi/indexer/alembic/versions/ed9c6ddc5c35_fix_host_foreign_key.py
new file mode 100644
index 0000000000000000000000000000000000000000..e5cfdd02bafb687e21434aafa92a27fac6ffa45d
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/ed9c6ddc5c35_fix_host_foreign_key.py
@@ -0,0 +1,53 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""fix_host_foreign_key
+
+Revision ID: ed9c6ddc5c35
+Revises: ffc7bbeec0b0
+Create Date: 2016-04-15 06:25:34.649934
+
+"""
+
+from alembic import op
+from sqlalchemy import inspect
+
+# revision identifiers, used by Alembic.
+revision = 'ed9c6ddc5c35'
+down_revision = 'ffc7bbeec0b0'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    conn = op.get_bind()
+
+    insp = inspect(conn)
+    fk_names = [fk['name'] for fk in insp.get_foreign_keys('host')]
+    if ("fk_hypervisor_id_resource_id" not in fk_names and
+            "fk_host_id_resource_id" in fk_names):
+        # NOTE(sileht): we are already good, the BD have been created from
+        # scratch after "a54c57ada3f5"
+        return
+
+    op.drop_constraint("fk_hypervisor_id_resource_id", "host",
+                       type_="foreignkey")
+    op.drop_constraint("fk_hypervisor_history_resource_history_revision",
+                       "host_history", type_="foreignkey")
+    op.create_foreign_key("fk_host_id_resource_id", "host", "resource",
+                          ["id"], ["id"], ondelete="CASCADE")
+    op.create_foreign_key("fk_host_history_resource_history_revision",
+                          "host_history", "resource_history",
+                          ["revision"], ["revision"], ondelete="CASCADE")
diff --git a/gnocchi/indexer/alembic/versions/f7d44b47928_uuid_to_binary.py b/gnocchi/indexer/alembic/versions/f7d44b47928_uuid_to_binary.py
new file mode 100644
index 0000000000000000000000000000000000000000..c53c725df4486cc0c6fbe00e0aed7a957f81b919
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/f7d44b47928_uuid_to_binary.py
@@ -0,0 +1,89 @@
+#
+# Copyright 2015 OpenStack Foundation
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""uuid_to_binary
+
+Revision ID: f7d44b47928
+Revises: 40c6aae14c3f
+Create Date: 2015-04-30 13:29:29.074794
+
+"""
+
+# revision identifiers, used by Alembic.
+revision = 'f7d44b47928'
+down_revision = '40c6aae14c3f'
+branch_labels = None
+depends_on = None
+
+from alembic import op
+import sqlalchemy_utils.types.uuid
+
+
+def upgrade():
+    op.alter_column("metric", "id",
+                    type_=sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                    nullable=False)
+
+    for table in ('resource', 'resource_history', 'metric'):
+        op.alter_column(table, "created_by_user_id",
+                        type_=sqlalchemy_utils.types.uuid.UUIDType(
+                            binary=True))
+        op.alter_column(table, "created_by_project_id",
+                        type_=sqlalchemy_utils.types.uuid.UUIDType(
+                            binary=True))
+    for table in ('resource', 'resource_history'):
+        op.alter_column(table, "user_id",
+                        type_=sqlalchemy_utils.types.uuid.UUIDType(
+                            binary=True))
+        op.alter_column(table, "project_id",
+                        type_=sqlalchemy_utils.types.uuid.UUIDType(
+                            binary=True))
+
+    # Drop all foreign keys linking to resource.id
+    for table in ('ceph_account', 'identity', 'volume', 'swift_account',
+                  'ipmi', 'image', 'network', 'stack', 'instance',
+                  'resource_history'):
+        op.drop_constraint("fk_%s_id_resource_id" % table, table,
+                           type_="foreignkey")
+
+    op.drop_constraint("fk_metric_resource_id_resource_id", "metric",
+                       type_="foreignkey")
+
+    # Now change the type of resource.id
+    op.alter_column("resource", "id",
+                    type_=sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+                    nullable=False)
+
+    # Now change all the types of $table.id and re-add the FK
+    for table in ('ceph_account', 'identity', 'volume', 'swift_account',
+                  'ipmi', 'image', 'network', 'stack', 'instance',
+                  'resource_history'):
+        op.alter_column(
+            table, "id",
+            type_=sqlalchemy_utils.types.uuid.UUIDType(binary=True),
+            nullable=False)
+
+        op.create_foreign_key("fk_%s_id_resource_id" % table,
+                              table, "resource",
+                              ("id",), ("id",),
+                              ondelete="CASCADE")
+
+    op.alter_column("metric", "resource_id",
+                    type_=sqlalchemy_utils.types.uuid.UUIDType(binary=True))
+
+    op.create_foreign_key("fk_metric_resource_id_resource_id",
+                          "metric", "resource",
+                          ("resource_id",), ("id",),
+                          ondelete="CASCADE")
diff --git a/gnocchi/indexer/alembic/versions/ffc7bbeec0b0_migrate_legacy_resources_to_db2.py b/gnocchi/indexer/alembic/versions/ffc7bbeec0b0_migrate_legacy_resources_to_db2.py
new file mode 100644
index 0000000000000000000000000000000000000000..1be98151d0439ba7791e632756ed55bfbde6b772
--- /dev/null
+++ b/gnocchi/indexer/alembic/versions/ffc7bbeec0b0_migrate_legacy_resources_to_db2.py
@@ -0,0 +1,65 @@
+# Copyright 2016 OpenStack Foundation
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+#
+
+"""migrate_legacy_resources_to_db2
+
+Revision ID: ffc7bbeec0b0
+Revises: 8f376189b9eb
+Create Date: 2016-04-14 15:57:13.072128
+
+"""
+import json
+
+from alembic import op
+import sqlalchemy as sa
+
+from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
+
+# revision identifiers, used by Alembic.
+revision = 'ffc7bbeec0b0'
+down_revision = '8f376189b9eb'
+branch_labels = None
+depends_on = None
+
+
+def upgrade():
+    bind = op.get_bind()
+
+    resource_type = sa.Table(
+        'resource_type', sa.MetaData(),
+        sa.Column('name', sa.String(255), nullable=False),
+        sa.Column('tablename', sa.String(18), nullable=False),
+        sa.Column('attributes', sa.Text, nullable=False)
+    )
+
+    # NOTE(gordc): fix for incorrect migration:
+    # 0718ed97e5b3_add_tablename_to_resource_type.py#L46
+    op.execute(resource_type.update().where(
+        resource_type.c.name == "instance_network_interface"
+    ).values({'tablename': 'instance_net_int'}))
+
+    resource_type_names = [rt.name for rt in
+                           list(bind.execute(resource_type.select()))]
+
+    for name, attributes in legacy.ceilometer_resources.items():
+        if name in resource_type_names:
+            continue
+        tablename = legacy.ceilometer_tablenames.get(name, name)
+        text_attributes = json.dumps(attributes)
+        op.execute(resource_type.insert().values({
+            resource_type.c.attributes: text_attributes,
+            resource_type.c.name: name,
+            resource_type.c.tablename: tablename,
+        }))
diff --git a/gnocchi/indexer/sqlalchemy.py b/gnocchi/indexer/sqlalchemy.py
new file mode 100644
index 0000000000000000000000000000000000000000..f9c0dbc0d39bc1e0455a9b2a38348efdaab4013f
--- /dev/null
+++ b/gnocchi/indexer/sqlalchemy.py
@@ -0,0 +1,1329 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import absolute_import
+import datetime
+import itertools
+import operator
+import os.path
+import threading
+import uuid
+
+from alembic import migration
+from alembic import operations
+import daiquiri
+import oslo_db.api
+from oslo_db import exception
+from oslo_db.sqlalchemy import enginefacade
+from oslo_db.sqlalchemy import utils as oslo_db_utils
+try:
+    import psycopg2
+except ImportError:
+    psycopg2 = None
+try:
+    import pymysql.constants.ER
+    import pymysql.err
+except ImportError:
+    pymysql = None
+import six
+from six.moves.urllib import parse as urlparse
+import sqlalchemy
+from sqlalchemy.engine import url as sqlalchemy_url
+import sqlalchemy.exc
+from sqlalchemy import types as sa_types
+import sqlalchemy_utils
+
+from gnocchi import exceptions
+from gnocchi import indexer
+from gnocchi.indexer import sqlalchemy_base as base
+from gnocchi.indexer import sqlalchemy_types as types
+from gnocchi import resource_type
+from gnocchi import utils
+
+Base = base.Base
+Metric = base.Metric
+ArchivePolicy = base.ArchivePolicy
+ArchivePolicyRule = base.ArchivePolicyRule
+Resource = base.Resource
+ResourceHistory = base.ResourceHistory
+ResourceType = base.ResourceType
+
+_marker = indexer._marker
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def _retry_on_exceptions(exc):
+    if not isinstance(exc, exception.DBError):
+        return False
+    inn_e = exc.inner_exception
+    if not isinstance(inn_e, sqlalchemy.exc.InternalError):
+        return False
+    return ((
+        pymysql and
+        isinstance(inn_e.orig, pymysql.err.InternalError) and
+        (inn_e.orig.args[0] == pymysql.constants.ER.TABLE_DEF_CHANGED)
+    ) or (
+        # HACK(jd) Sometimes, PostgreSQL raises an error such as "current
+        # transaction is aborted, commands ignored until end of transaction
+        # block" on its own catalog, so we need to retry, but this is not
+        # caught by oslo.db as a deadlock. This is likely because when we use
+        # Base.metadata.create_all(), sqlalchemy itself gets an error it does
+        # not catch or something. So this is why this function exists. To
+        # paperover I guess.
+        psycopg2
+        and isinstance(inn_e.orig, psycopg2.InternalError)
+        # current transaction is aborted
+        and inn_e.orig.pgcode == '25P02'
+    ))
+
+
+def retry_on_deadlock(f):
+    return oslo_db.api.wrap_db_retry(retry_on_deadlock=True,
+                                     max_retries=20,
+                                     retry_interval=0.1,
+                                     max_retry_interval=2,
+                                     exception_checker=_retry_on_exceptions)(f)
+
+
+class PerInstanceFacade(object):
+    def __init__(self, conf):
+        self.trans = enginefacade.transaction_context()
+        self.trans.configure(
+            **dict(conf.database.items())
+        )
+        self._context = threading.local()
+
+    def independent_writer(self):
+        return self.trans.independent.writer.using(self._context)
+
+    def independent_reader(self):
+        return self.trans.independent.reader.using(self._context)
+
+    def writer_connection(self):
+        return self.trans.connection.writer.using(self._context)
+
+    def reader_connection(self):
+        return self.trans.connection.reader.using(self._context)
+
+    def writer(self):
+        return self.trans.writer.using(self._context)
+
+    def reader(self):
+        return self.trans.reader.using(self._context)
+
+    def get_engine(self):
+        # TODO(mbayer): add get_engine() to enginefacade
+        if not self.trans._factory._started:
+            self.trans._factory._start()
+        return self.trans._factory._writer_engine
+
+    def dispose(self):
+        # TODO(mbayer): add dispose() to enginefacade
+        if self.trans._factory._started:
+            self.trans._factory._writer_engine.dispose()
+
+
+class ResourceClassMapper(object):
+    def __init__(self):
+        # FIXME(sileht): 3 attributes, perhaps we need a better structure.
+        self._cache = {'generic': {'resource': base.Resource,
+                                   'history': base.ResourceHistory,
+                                   'updated_at': utils.utcnow()}}
+
+    @staticmethod
+    def _build_class_mappers(resource_type, baseclass=None):
+        tablename = resource_type.tablename
+        tables_args = {"extend_existing": True}
+        tables_args.update(base.COMMON_TABLES_ARGS)
+        # TODO(sileht): Add columns
+        if not baseclass:
+            baseclass = resource_type.to_baseclass()
+        resource_ext = type(
+            str("%s_resource" % tablename),
+            (baseclass, base.ResourceExtMixin, base.Resource),
+            {"__tablename__": tablename, "__table_args__": tables_args})
+        resource_history_ext = type(
+            str("%s_history" % tablename),
+            (baseclass, base.ResourceHistoryExtMixin, base.ResourceHistory),
+            {"__tablename__": ("%s_history" % tablename),
+             "__table_args__": tables_args})
+        return {'resource': resource_ext,
+                'history': resource_history_ext,
+                'updated_at': resource_type.updated_at}
+
+    def get_classes(self, resource_type):
+        # NOTE(sileht): We don't care about concurrency here because we allow
+        # sqlalchemy to override its global object with extend_existing=True
+        # this is safe because classname and tablename are uuid.
+        try:
+            mappers = self._cache[resource_type.tablename]
+            # Cache is outdated
+            if (resource_type.name != "generic"
+                    and resource_type.updated_at > mappers['updated_at']):
+                for table_purpose in ['resource', 'history']:
+                    Base.metadata.remove(Base.metadata.tables[
+                        mappers[table_purpose].__tablename__])
+                del self._cache[resource_type.tablename]
+                raise KeyError
+            return mappers
+        except KeyError:
+            mapper = self._build_class_mappers(resource_type)
+            self._cache[resource_type.tablename] = mapper
+            return mapper
+
+    @retry_on_deadlock
+    def map_and_create_tables(self, resource_type, facade):
+        if resource_type.state != "creating":
+            raise RuntimeError("map_and_create_tables must be called in state "
+                               "creating")
+
+        mappers = self.get_classes(resource_type)
+        tables = [Base.metadata.tables[mappers["resource"].__tablename__],
+                  Base.metadata.tables[mappers["history"].__tablename__]]
+
+        with facade.writer_connection() as connection:
+            Base.metadata.create_all(connection, tables=tables)
+
+        # NOTE(sileht): no need to protect the _cache with a lock
+        # get_classes cannot be called in state creating
+        self._cache[resource_type.tablename] = mappers
+
+    @retry_on_deadlock
+    def unmap_and_delete_tables(self, resource_type, facade):
+        if resource_type.state != "deleting":
+            raise RuntimeError("unmap_and_delete_tables must be called in "
+                               "state deleting")
+
+        mappers = self.get_classes(resource_type)
+        del self._cache[resource_type.tablename]
+
+        tables = [Base.metadata.tables[mappers['resource'].__tablename__],
+                  Base.metadata.tables[mappers['history'].__tablename__]]
+
+        # NOTE(sileht): Base.metadata.drop_all doesn't
+        # issue CASCADE stuffs correctly at least on postgresql
+        # We drop foreign keys manually to not lock the destination
+        # table for too long during drop table.
+        # It's safe to not use a transaction since
+        # the resource_type table is already cleaned and committed
+        # so this code cannot be triggerred anymore for this
+        # resource_type
+        with facade.writer_connection() as connection:
+            for table in tables:
+                for fk in table.foreign_key_constraints:
+                    try:
+                        self._safe_execute(
+                            connection,
+                            sqlalchemy.schema.DropConstraint(fk))
+                    except exception.DBNonExistentConstraint:
+                        pass
+            for table in tables:
+                try:
+                    self._safe_execute(connection,
+                                       sqlalchemy.schema.DropTable(table))
+                except exception.DBNonExistentTable:
+                    pass
+
+            # NOTE(sileht): If something goes wrong here, we are currently
+            # fucked, that why we expose the state to the superuser.
+            # But we allow him to delete a resource type in error state
+            # in case of he cleanup the mess manually and want gnocchi to
+            # control and finish the cleanup.
+
+        # TODO(sileht): Remove this resource on other workers
+        # by using expiration on cache ?
+        for table in tables:
+            Base.metadata.remove(table)
+
+    @retry_on_deadlock
+    def _safe_execute(self, connection, works):
+        # NOTE(sileht): we create a transaction to ensure mysql
+        # create locks on other transaction...
+        trans = connection.begin()
+        connection.execute(works)
+        trans.commit()
+
+
+class SQLAlchemyIndexer(indexer.IndexerDriver):
+    _RESOURCE_TYPE_MANAGER = ResourceClassMapper()
+
+    @classmethod
+    def _create_new_database(cls, url):
+        """Used by testing to create a new database."""
+        purl = sqlalchemy_url.make_url(
+            cls.dress_url(
+                url))
+        purl.database = purl.database + str(uuid.uuid4()).replace('-', '')
+        new_url = str(purl)
+        sqlalchemy_utils.create_database(new_url)
+        return new_url
+
+    @staticmethod
+    def dress_url(url):
+        # If no explicit driver has been set, we default to pymysql
+        if url.startswith("mysql://"):
+            url = sqlalchemy_url.make_url(url)
+            url.drivername = "mysql+pymysql"
+            return str(url)
+        if url.startswith("postgresql://"):
+            url = sqlalchemy_url.make_url(url)
+            url.drivername = "postgresql+psycopg2"
+            return str(url)
+        return url
+
+    def __init__(self, conf):
+        conf.set_override("connection",
+                          self.dress_url(conf.indexer.url),
+                          "database")
+        self.conf = conf
+        self.facade = PerInstanceFacade(conf)
+
+    def __str__(self):
+        parsed = urlparse.urlparse(self.conf.indexer.url)
+        url = urlparse.urlunparse((
+            parsed.scheme,
+            "***:***@%s%s" % (parsed.hostname,
+                              ":%s" % parsed.port if parsed.port else ""),
+            parsed.path,
+            parsed.params,
+            parsed.query,
+            parsed.fragment))
+        return "%s: %s" % (self.__class__.__name__, url)
+
+    def disconnect(self):
+        self.facade.dispose()
+
+    def _get_alembic_config(self):
+        from alembic import config
+
+        cfg = config.Config(
+            "%s/alembic/alembic.ini" % os.path.dirname(__file__))
+        cfg.set_main_option('sqlalchemy.url',
+                            self.conf.database.connection.replace('%', '%%'))
+        return cfg
+
+    def get_engine(self):
+        return self.facade.get_engine()
+
+    def upgrade(self, nocreate=False):
+        from alembic import command
+        from alembic import migration
+
+        cfg = self._get_alembic_config()
+        cfg.conf = self.conf
+        if nocreate:
+            command.upgrade(cfg, "head")
+        else:
+            with self.facade.writer_connection() as connection:
+                ctxt = migration.MigrationContext.configure(connection)
+                current_version = ctxt.get_current_revision()
+                if current_version is None:
+                    Base.metadata.create_all(connection)
+                    command.stamp(cfg, "head")
+                else:
+                    command.upgrade(cfg, "head")
+
+        try:
+            with self.facade.writer() as session:
+                session.add(
+                    ResourceType(
+                        name="generic",
+                        tablename="generic",
+                        state="active",
+                        attributes=resource_type.ResourceTypeAttributes()))
+        except exception.DBDuplicateEntry:
+            pass
+
+    # NOTE(jd) We can have deadlock errors either here or later in
+    # map_and_create_tables(). We can't decorate create_resource_type()
+    # directly or each part might retry later on its own and cause a
+    # duplicate. And it seems there's no way to use the same session for
+    # both adding the resource_type in our table and calling
+    # map_and_create_tables() :-(
+    @retry_on_deadlock
+    def _add_resource_type(self, resource_type):
+        try:
+            with self.facade.writer() as session:
+                session.add(resource_type)
+        except exception.DBDuplicateEntry:
+            raise indexer.ResourceTypeAlreadyExists(resource_type.name)
+
+    def create_resource_type(self, resource_type):
+        # NOTE(sileht): mysql have a stupid and small length limitation on the
+        # foreign key and index name, so we can't use the resource type name as
+        # tablename, the limit is 64. The longest name we have is
+        # fk_<tablename>_h_revision_rh_revision,
+        # so 64 - 26 = 38 and 3 chars for rt_, 35 chars, uuid is 32, it's cool.
+        tablename = "rt_%s" % uuid.uuid4().hex
+        resource_type = ResourceType(name=resource_type.name,
+                                     tablename=tablename,
+                                     attributes=resource_type.attributes,
+                                     state="creating")
+
+        # NOTE(sileht): ensure the driver is able to store the request
+        # resource_type
+        resource_type.to_baseclass()
+
+        self._add_resource_type(resource_type)
+
+        try:
+            self._RESOURCE_TYPE_MANAGER.map_and_create_tables(resource_type,
+                                                              self.facade)
+        except Exception:
+            # NOTE(sileht): We fail the DDL, we have no way to automatically
+            # recover, just set a particular state
+            self._set_resource_type_state(resource_type.name, "creation_error")
+            raise
+
+        self._set_resource_type_state(resource_type.name, "active")
+        resource_type.state = "active"
+        return resource_type
+
+    def update_resource_type(self, name, add_attributes=None,
+                             del_attributes=None):
+        if not add_attributes and not del_attributes:
+            return
+        add_attributes = add_attributes or []
+        del_attributes = del_attributes or []
+
+        self._set_resource_type_state(name, "updating", "active")
+
+        try:
+            with self.facade.independent_writer() as session:
+                engine = session.connection()
+                rt = self._get_resource_type(session, name)
+
+                with self.facade.writer_connection() as connection:
+                    ctx = migration.MigrationContext.configure(connection)
+                    op = operations.Operations(ctx)
+                    for table in [rt.tablename, '%s_history' % rt.tablename]:
+                        with op.batch_alter_table(table) as batch_op:
+                            for attr in del_attributes:
+                                batch_op.drop_column(attr)
+                            for attr in add_attributes:
+                                server_default = attr.for_filling(
+                                    engine.dialect)
+                                batch_op.add_column(sqlalchemy.Column(
+                                    attr.name, attr.satype,
+                                    nullable=not attr.required,
+                                    server_default=server_default))
+
+                                # We have all rows filled now, we can remove
+                                # the server_default
+                                if server_default is not None:
+                                    batch_op.alter_column(
+                                        column_name=attr.name,
+                                        existing_type=attr.satype,
+                                        existing_server_default=server_default,
+                                        existing_nullable=not attr.required,
+                                        server_default=None)
+
+                rt.state = "active"
+                rt.updated_at = utils.utcnow()
+                rt.attributes.extend(add_attributes)
+                for attr in list(rt.attributes):
+                    if attr.name in del_attributes:
+                        rt.attributes.remove(attr)
+                # FIXME(sileht): yeah that's wierd but attributes is a custom
+                # json column and 'extend' doesn't trigger sql update, this
+                # enforce the update. I wonder if sqlalchemy provides something
+                # on column description side.
+                sqlalchemy.orm.attributes.flag_modified(rt, 'attributes')
+
+        except Exception:
+            # NOTE(sileht): We fail the DDL, we have no way to automatically
+            # recover, just set a particular state
+            # TODO(sileht): Create a repair REST endpoint that delete
+            # columns not existing in the database but in the resource type
+            # description. This will allow to pass wrong update_error to active
+            # state, that currently not possible.
+            self._set_resource_type_state(name, "updating_error")
+            raise
+
+        return rt
+
+    def get_resource_type(self, name):
+        with self.facade.independent_reader() as session:
+            return self._get_resource_type(session, name)
+
+    def _get_resource_type(self, session, name):
+        resource_type = session.query(ResourceType).get(name)
+        if not resource_type:
+            raise indexer.NoSuchResourceType(name)
+        return resource_type
+
+    @retry_on_deadlock
+    def _set_resource_type_state(self, name, state,
+                                 expected_previous_state=None):
+        with self.facade.writer() as session:
+            q = session.query(ResourceType)
+            q = q.filter(ResourceType.name == name)
+            if expected_previous_state is not None:
+                q = q.filter(ResourceType.state == expected_previous_state)
+            update = q.update({'state': state})
+            if update == 0:
+                if expected_previous_state is not None:
+                    rt = session.query(ResourceType).get(name)
+                    if rt:
+                        raise indexer.UnexpectedResourceTypeState(
+                            name, expected_previous_state, rt.state)
+                raise indexer.IndexerException(
+                    "Fail to set resource type state of %s to %s" %
+                    (name, state))
+
+    @staticmethod
+    def get_resource_type_schema():
+        return base.RESOURCE_TYPE_SCHEMA_MANAGER
+
+    @staticmethod
+    def get_resource_attributes_schemas():
+        return [ext.plugin.schema() for ext in ResourceType.RESOURCE_SCHEMAS]
+
+    def list_resource_types(self):
+        with self.facade.independent_reader() as session:
+            return list(session.query(ResourceType).order_by(
+                ResourceType.name.asc()).all())
+
+    # NOTE(jd) We can have deadlock errors either here or later in
+    # map_and_create_tables(). We can't decorate delete_resource_type()
+    # directly or each part might retry later on its own and cause a
+    # duplicate. And it seems there's no way to use the same session for
+    # both adding the resource_type in our table and calling
+    # map_and_create_tables() :-(
+    @retry_on_deadlock
+    def _mark_as_deleting_resource_type(self, name):
+        try:
+            with self.facade.writer() as session:
+                rt = self._get_resource_type(session, name)
+                if rt.state not in ["active", "deletion_error",
+                                    "creation_error", "updating_error"]:
+                    raise indexer.UnexpectedResourceTypeState(
+                        name,
+                        "active/deletion_error/creation_error/updating_error",
+                        rt.state)
+                session.delete(rt)
+
+                # FIXME(sileht): Why do I need to flush here !!!
+                # I want remove/add in the same transaction !!!
+                session.flush()
+
+                # NOTE(sileht): delete and recreate to:
+                # * raise duplicate constraints
+                # * ensure we do not create a new resource type
+                #   with the same name while we destroy the tables next
+                rt = ResourceType(name=rt.name,
+                                  tablename=rt.tablename,
+                                  state="deleting",
+                                  attributes=rt.attributes)
+                session.add(rt)
+        except exception.DBReferenceError as e:
+            if (e.constraint in [
+                    'fk_resource_resource_type_name',
+                    'fk_resource_history_resource_type_name',
+                    'fk_rh_resource_type_name']):
+                raise indexer.ResourceTypeInUse(name)
+            raise
+        return rt
+
+    @retry_on_deadlock
+    def _delete_resource_type(self, name):
+        # Really delete the resource type, no resource can be linked to it
+        # Because we cannot add a resource to a resource_type not in 'active'
+        # state
+        with self.facade.writer() as session:
+            resource_type = self._get_resource_type(session, name)
+            session.delete(resource_type)
+
+    def delete_resource_type(self, name):
+        if name == "generic":
+            raise indexer.ResourceTypeInUse(name)
+
+        rt = self._mark_as_deleting_resource_type(name)
+
+        try:
+            self._RESOURCE_TYPE_MANAGER.unmap_and_delete_tables(
+                rt, self.facade)
+        except Exception:
+            # NOTE(sileht): We fail the DDL, we have no way to automatically
+            # recover, just set a particular state
+            self._set_resource_type_state(rt.name, "deletion_error")
+            raise
+
+        self._delete_resource_type(name)
+
+    def _resource_type_to_mappers(self, session, name):
+        resource_type = self._get_resource_type(session, name)
+        if resource_type.state != "active":
+            raise indexer.UnexpectedResourceTypeState(
+                name, "active", resource_type.state)
+        return self._RESOURCE_TYPE_MANAGER.get_classes(resource_type)
+
+    def list_archive_policies(self):
+        with self.facade.independent_reader() as session:
+            return list(session.query(ArchivePolicy).all())
+
+    def get_archive_policy(self, name):
+        with self.facade.independent_reader() as session:
+            return session.query(ArchivePolicy).get(name)
+
+    def update_archive_policy(self, name, ap_items):
+        with self.facade.independent_writer() as session:
+            ap = session.query(ArchivePolicy).get(name)
+            if not ap:
+                raise indexer.NoSuchArchivePolicy(name)
+            current = sorted(ap.definition,
+                             key=operator.attrgetter('granularity'))
+            new = sorted(ap_items, key=operator.attrgetter('granularity'))
+            if len(current) != len(new):
+                raise indexer.UnsupportedArchivePolicyChange(
+                    name, 'Cannot add or drop granularities')
+            for c, n in zip(current, new):
+                if c.granularity != n.granularity:
+                    raise indexer.UnsupportedArchivePolicyChange(
+                        name, '%s granularity interval was changed'
+                        % utils.timespan_total_seconds(c.granularity))
+            # NOTE(gordc): ORM doesn't update JSON column unless new
+            ap.definition = ap_items
+            return ap
+
+    def delete_archive_policy(self, name):
+        constraints = [
+            "fk_metric_ap_name_ap_name",
+            "fk_apr_ap_name_ap_name"]
+        with self.facade.writer() as session:
+            try:
+                if session.query(ArchivePolicy).filter(
+                        ArchivePolicy.name == name).delete() == 0:
+                    raise indexer.NoSuchArchivePolicy(name)
+            except exception.DBReferenceError as e:
+                if e.constraint in constraints:
+                    raise indexer.ArchivePolicyInUse(name)
+                raise
+
+    def create_archive_policy(self, archive_policy):
+        ap = ArchivePolicy(
+            name=archive_policy.name,
+            back_window=archive_policy.back_window,
+            definition=archive_policy.definition,
+            aggregation_methods=list(archive_policy.aggregation_methods),
+        )
+        try:
+            with self.facade.writer() as session:
+                session.add(ap)
+        except exception.DBDuplicateEntry:
+            raise indexer.ArchivePolicyAlreadyExists(archive_policy.name)
+        return ap
+
+    def list_archive_policy_rules(self):
+        with self.facade.independent_reader() as session:
+            return session.query(ArchivePolicyRule).order_by(
+                ArchivePolicyRule.metric_pattern.desc(),
+                ArchivePolicyRule.name.asc()
+            ).all()
+
+    def get_archive_policy_rule(self, name):
+        with self.facade.independent_reader() as session:
+            return session.query(ArchivePolicyRule).get(name)
+
+    def delete_archive_policy_rule(self, name):
+        with self.facade.writer() as session:
+            if session.query(ArchivePolicyRule).filter(
+                    ArchivePolicyRule.name == name).delete() == 0:
+                raise indexer.NoSuchArchivePolicyRule(name)
+
+    def create_archive_policy_rule(self, name, metric_pattern,
+                                   archive_policy_name):
+        apr = ArchivePolicyRule(
+            name=name,
+            archive_policy_name=archive_policy_name,
+            metric_pattern=metric_pattern
+        )
+        try:
+            with self.facade.writer() as session:
+                session.add(apr)
+        except exception.DBReferenceError as e:
+            if e.constraint == 'fk_apr_ap_name_ap_name':
+                raise indexer.NoSuchArchivePolicy(archive_policy_name)
+            raise
+        except exception.DBDuplicateEntry:
+            raise indexer.ArchivePolicyRuleAlreadyExists(name)
+        return apr
+
+    def update_archive_policy_rule(self, name, new_name):
+        apr = self.get_archive_policy_rule(name)
+        if not apr:
+            raise indexer.NoSuchArchivePolicyRule(name)
+        apr.name = new_name
+        try:
+            with self.facade.writer() as session:
+                session.add(apr)
+        except exception.DBDuplicateEntry:
+            raise indexer.UnsupportedArchivePolicyRuleChange(
+                name,
+                'Archive policy rule %s already exists.'
+                % new_name)
+        return apr
+
+    @retry_on_deadlock
+    def create_metric(self, id, creator, archive_policy_name,
+                      name=None, unit=None, resource_id=None):
+        m = Metric(id=id,
+                   creator=creator,
+                   archive_policy_name=archive_policy_name,
+                   name=name,
+                   unit=unit,
+                   resource_id=resource_id)
+        try:
+            with self.facade.writer() as session:
+                session.add(m)
+        except exception.DBDuplicateEntry:
+            raise indexer.NamedMetricAlreadyExists(name)
+        except exception.DBReferenceError as e:
+            if (e.constraint ==
+               'fk_metric_ap_name_ap_name'):
+                raise indexer.NoSuchArchivePolicy(archive_policy_name)
+            if e.constraint == 'fk_metric_resource_id_resource_id':
+                raise indexer.NoSuchResource(resource_id)
+            raise
+        return m
+
+    @retry_on_deadlock
+    def list_metrics(self, details=False, status='active',
+                     limit=None, marker=None, sorts=None,
+                     policy_filter=None, resource_policy_filter=None,
+                     attribute_filter=None):
+        sorts = sorts or []
+        with self.facade.independent_reader() as session:
+            q = session.query(Metric).filter(
+                Metric.status == status)
+            if details:
+                q = q.options(sqlalchemy.orm.joinedload('resource'))
+            if policy_filter or resource_policy_filter or attribute_filter:
+                engine = session.connection()
+                if attribute_filter:
+                    # We don't catch the indexer.QueryAttributeError error here
+                    # since we expect any user input on this function. If the
+                    # caller screws it, it's its problem: no need to convert
+                    # the exception to another type.
+                    attribute_f = QueryTransformer.build_filter(
+                        engine.dialect.name,
+                        Metric, attribute_filter)
+                    q = q.filter(attribute_f)
+                if policy_filter:
+                    # We don't catch the indexer.QueryAttributeError error here
+                    # since we expect any user input on this function. If the
+                    # caller screws it, it's its problem: no need to convert
+                    # the exception to another type.
+                    policy_f = QueryTransformer.build_filter(
+                        engine.dialect.name,
+                        Metric, policy_filter)
+                else:
+                    policy_f = None
+                if resource_policy_filter:
+                    q = q.join(Metric.resource)
+                    try:
+                        resource_policy_f = QueryTransformer.build_filter(
+                            engine.dialect.name,
+                            Resource,
+                            resource_policy_filter)
+                    except indexer.QueryAttributeError as e:
+                        # NOTE(jd) The QueryAttributeError does not know about
+                        # resource_type, so convert it
+                        raise indexer.ResourceAttributeError("generic",
+                                                             e.attribute)
+                else:
+                    resource_policy_f = None
+
+                if policy_filter or resource_policy_filter:
+                    q = q.filter(sqlalchemy.or_(policy_f, resource_policy_f))
+
+            sort_keys, sort_dirs = self._build_sort_keys(sorts, ['id'])
+
+            if marker:
+                metric_marker = self.list_metrics(
+                    attribute_filter={"in": {"id": [marker]}})
+                if metric_marker:
+                    metric_marker = metric_marker[0]
+                else:
+                    raise indexer.InvalidPagination(
+                        "Invalid marker: `%s'" % marker)
+            else:
+                metric_marker = None
+
+            try:
+                q = oslo_db_utils.paginate_query(q, Metric, limit=limit,
+                                                 sort_keys=sort_keys,
+                                                 marker=metric_marker,
+                                                 sort_dirs=sort_dirs)
+            except ValueError as e:
+                raise indexer.InvalidPagination(e)
+            except exception.InvalidSortKey as e:
+                raise indexer.InvalidPagination(e)
+
+            return list(q.all())
+
+    @retry_on_deadlock
+    def create_resource(self, resource_type, id,
+                        creator, user_id=None, project_id=None,
+                        started_at=None, ended_at=None, metrics=None,
+                        original_resource_id=None,
+                        **kwargs):
+        if (started_at is not None
+           and ended_at is not None
+           and started_at > ended_at):
+            raise ValueError(
+                "Start timestamp cannot be after end timestamp")
+        if original_resource_id is None:
+            original_resource_id = str(id)
+        with self.facade.writer() as session:
+            resource_cls = self._resource_type_to_mappers(
+                session, resource_type)['resource']
+            r = resource_cls(
+                id=id,
+                original_resource_id=original_resource_id,
+                type=resource_type,
+                creator=creator,
+                user_id=user_id,
+                project_id=project_id,
+                started_at=started_at,
+                ended_at=ended_at,
+                **kwargs)
+            session.add(r)
+            try:
+                session.flush()
+            except exception.DBDuplicateEntry:
+                raise indexer.ResourceAlreadyExists(id)
+            except exception.DBReferenceError as ex:
+                raise indexer.ResourceValueError(r.type,
+                                                 ex.key,
+                                                 getattr(r, ex.key))
+            if metrics is not None:
+                self._set_metrics_for_resource(session, r, metrics)
+
+            # NOTE(jd) Force load of metrics :)
+            r.metrics
+
+            return r
+
+    @retry_on_deadlock
+    def update_resource(self, resource_type,
+                        resource_id, ended_at=_marker, metrics=_marker,
+                        append_metrics=False,
+                        create_revision=True,
+                        **kwargs):
+        with self.facade.writer() as session:
+            mappers = self._resource_type_to_mappers(session, resource_type)
+            resource_cls = mappers["resource"]
+            resource_history_cls = mappers["history"]
+
+            try:
+                # NOTE(sileht): We use FOR UPDATE that is not galera friendly,
+                # but they are no other way to cleanly patch a resource and
+                # store the history that safe when two concurrent calls are
+                # done.
+                q = session.query(resource_cls).filter(
+                    resource_cls.id == resource_id).with_for_update()
+
+                r = q.first()
+                if r is None:
+                    raise indexer.NoSuchResource(resource_id)
+
+                if create_revision:
+                    # Build history
+                    rh = resource_history_cls()
+                    for col in sqlalchemy.inspect(resource_cls).columns:
+                        setattr(rh, col.name, getattr(r, col.name))
+                    now = utils.utcnow()
+                    rh.revision_end = now
+                    session.add(rh)
+                    r.revision_start = now
+
+                # Update the resource
+                if ended_at is not _marker:
+                    # NOTE(jd) MySQL does not honor checks. I hate it.
+                    engine = session.connection()
+                    if engine.dialect.name == "mysql":
+                        if r.started_at is not None and ended_at is not None:
+                            if r.started_at > ended_at:
+                                raise indexer.ResourceValueError(
+                                    resource_type, "ended_at", ended_at)
+                    r.ended_at = ended_at
+
+                if kwargs:
+                    for attribute, value in six.iteritems(kwargs):
+                        if hasattr(r, attribute):
+                            setattr(r, attribute, value)
+                        else:
+                            raise indexer.ResourceAttributeError(
+                                r.type, attribute)
+
+                if metrics is not _marker:
+                    if not append_metrics:
+                        session.query(Metric).filter(
+                            Metric.resource_id == resource_id,
+                            Metric.status == 'active').update(
+                                {"resource_id": None})
+                    self._set_metrics_for_resource(session, r, metrics)
+
+                session.flush()
+            except exception.DBConstraintError as e:
+                if e.check_name == "ck_started_before_ended":
+                    raise indexer.ResourceValueError(
+                        resource_type, "ended_at", ended_at)
+                raise
+
+            # NOTE(jd) Force load of metrics – do it outside the session!
+            r.metrics
+
+            return r
+
+    @staticmethod
+    def _set_metrics_for_resource(session, r, metrics):
+        for name, value in six.iteritems(metrics):
+            if isinstance(value, uuid.UUID):
+                try:
+                    update = session.query(Metric).filter(
+                        Metric.id == value,
+                        Metric.status == 'active',
+                        Metric.creator == r.creator,
+                    ).update({"resource_id": r.id, "name": name})
+                except exception.DBDuplicateEntry:
+                    raise indexer.NamedMetricAlreadyExists(name)
+                if update == 0:
+                    raise indexer.NoSuchMetric(value)
+            else:
+                unit = value.get('unit')
+                ap_name = value['archive_policy_name']
+                m = Metric(id=uuid.uuid4(),
+                           creator=r.creator,
+                           archive_policy_name=ap_name,
+                           name=name,
+                           unit=unit,
+                           resource_id=r.id)
+                session.add(m)
+                try:
+                    session.flush()
+                except exception.DBDuplicateEntry:
+                    raise indexer.NamedMetricAlreadyExists(name)
+                except exception.DBReferenceError as e:
+                    if (e.constraint ==
+                       'fk_metric_ap_name_ap_name'):
+                        raise indexer.NoSuchArchivePolicy(ap_name)
+                    raise
+
+        session.expire(r, ['metrics'])
+
+    @retry_on_deadlock
+    def delete_resource(self, resource_id):
+        with self.facade.writer() as session:
+            # We are going to delete the resource; the on delete will set the
+            # resource_id of the attached metrics to NULL, we just have to mark
+            # their status as 'delete'
+            session.query(Metric).filter(
+                Metric.resource_id == resource_id).update(
+                    {"status": "delete"})
+            if session.query(Resource).filter(
+                    Resource.id == resource_id).delete() == 0:
+                raise indexer.NoSuchResource(resource_id)
+
+    @retry_on_deadlock
+    def delete_resources(self, resource_type='generic',
+                         attribute_filter=None):
+        if not attribute_filter:
+            raise ValueError("attribute_filter must be set")
+
+        with self.facade.writer() as session:
+            target_cls = self._resource_type_to_mappers(
+                session, resource_type)["resource"]
+
+            q = session.query(target_cls.id)
+
+            engine = session.connection()
+            try:
+                f = QueryTransformer.build_filter(engine.dialect.name,
+                                                  target_cls,
+                                                  attribute_filter)
+            except indexer.QueryAttributeError as e:
+                # NOTE(jd) The QueryAttributeError does not know about
+                # resource_type, so convert it
+                raise indexer.ResourceAttributeError(resource_type,
+                                                     e.attribute)
+
+            q = q.filter(f)
+
+            session.query(Metric).filter(
+                Metric.resource_id.in_(q)
+            ).update({"status": "delete"},
+                     synchronize_session=False)
+            return q.delete(synchronize_session=False)
+
+    @retry_on_deadlock
+    def get_resource(self, resource_type, resource_id, with_metrics=False):
+        with self.facade.independent_reader() as session:
+            resource_cls = self._resource_type_to_mappers(
+                session, resource_type)['resource']
+            q = session.query(
+                resource_cls).filter(
+                    resource_cls.id == resource_id)
+            if with_metrics:
+                q = q.options(sqlalchemy.orm.joinedload('metrics'))
+            return q.first()
+
+    def _get_history_result_mapper(self, session, resource_type):
+        mappers = self._resource_type_to_mappers(session, resource_type)
+        resource_cls = mappers['resource']
+        history_cls = mappers['history']
+
+        resource_cols = {}
+        history_cols = {}
+        for col in sqlalchemy.inspect(history_cls).columns:
+            history_cols[col.name] = col
+            if col.name in ["revision", "revision_end"]:
+                value = None if col.name == "revision_end" else -1
+                resource_cols[col.name] = sqlalchemy.bindparam(
+                    col.name, value, col.type).label(col.name)
+            else:
+                resource_cols[col.name] = getattr(resource_cls, col.name)
+        s1 = sqlalchemy.select(history_cols.values())
+        s2 = sqlalchemy.select(resource_cols.values())
+        if resource_type != "generic":
+            s1 = s1.where(history_cls.revision == ResourceHistory.revision)
+            s2 = s2.where(resource_cls.id == Resource.id)
+        union_stmt = sqlalchemy.union(s1, s2)
+        stmt = union_stmt.alias("result")
+
+        class Result(base.ResourceJsonifier, base.GnocchiBase):
+            def __iter__(self):
+                return iter((key, getattr(self, key)) for key in stmt.c.keys())
+
+        sqlalchemy.orm.mapper(
+            Result, stmt, primary_key=[stmt.c.id, stmt.c.revision],
+            properties={
+                'metrics': sqlalchemy.orm.relationship(
+                    Metric,
+                    primaryjoin=sqlalchemy.and_(
+                        Metric.resource_id == stmt.c.id,
+                        Metric.status == 'active'),
+                    foreign_keys=Metric.resource_id)
+            })
+
+        return Result
+
+    @retry_on_deadlock
+    def list_resources(self, resource_type='generic',
+                       attribute_filter=None,
+                       details=False,
+                       history=False,
+                       limit=None,
+                       marker=None,
+                       sorts=None):
+        sorts = sorts or []
+
+        with self.facade.independent_reader() as session:
+            if history:
+                target_cls = self._get_history_result_mapper(
+                    session, resource_type)
+                unique_keys = ["id", "revision"]
+            else:
+                target_cls = self._resource_type_to_mappers(
+                    session, resource_type)["resource"]
+                unique_keys = ["id"]
+
+            q = session.query(target_cls)
+
+            if attribute_filter:
+                engine = session.connection()
+                try:
+                    f = QueryTransformer.build_filter(engine.dialect.name,
+                                                      target_cls,
+                                                      attribute_filter)
+                except indexer.QueryAttributeError as e:
+                    # NOTE(jd) The QueryAttributeError does not know about
+                    # resource_type, so convert it
+                    raise indexer.ResourceAttributeError(resource_type,
+                                                         e.attribute)
+
+                q = q.filter(f)
+
+            sort_keys, sort_dirs = self._build_sort_keys(sorts, unique_keys)
+
+            if marker:
+                marker_q = session.query(target_cls)
+                if history:
+                    try:
+                        rid, rrev = marker.split("@")
+                        rrev = int(rrev)
+                    except ValueError:
+                        resource_marker = None
+                    else:
+                        resource_marker = marker_q.filter(
+                            target_cls.id == rid,
+                            target_cls.revision == rrev).first()
+                else:
+                    resource_marker = marker_q.filter(
+                        target_cls.id == marker).first()
+
+                if resource_marker is None:
+                    raise indexer.InvalidPagination(
+                        "Invalid marker: `%s'" % marker)
+            else:
+                resource_marker = None
+
+            try:
+                q = oslo_db_utils.paginate_query(q, target_cls, limit=limit,
+                                                 sort_keys=sort_keys,
+                                                 marker=resource_marker,
+                                                 sort_dirs=sort_dirs)
+            except ValueError as e:
+                raise indexer.InvalidPagination(e)
+            except exception.InvalidSortKey as e:
+                raise indexer.InvalidPagination(e)
+
+            # Always include metrics
+            q = q.options(sqlalchemy.orm.joinedload("metrics"))
+            all_resources = q.all()
+
+            if details:
+                grouped_by_type = itertools.groupby(
+                    all_resources, lambda r: (r.revision != -1, r.type))
+                all_resources = []
+                for (is_history, type), resources in grouped_by_type:
+                    if type == 'generic':
+                        # No need for a second query
+                        all_resources.extend(resources)
+                    else:
+                        try:
+                            target_cls = self._resource_type_to_mappers(
+                                session, type)['history' if is_history else
+                                               'resource']
+                        except (indexer.UnexpectedResourceTypeState,
+                                indexer.NoSuchResourceType):
+                            # NOTE(sileht): This resource_type have been
+                            # removed in the meantime.
+                            continue
+                        if is_history:
+                            f = target_cls.revision.in_([r.revision
+                                                         for r in resources])
+                        else:
+                            f = target_cls.id.in_([r.id for r in resources])
+
+                        q = session.query(target_cls).filter(f)
+                        # Always include metrics
+                        q = q.options(sqlalchemy.orm.joinedload('metrics'))
+                        try:
+                            all_resources.extend(q.all())
+                        except sqlalchemy.exc.ProgrammingError as e:
+                            # NOTE(jd) This exception can happen when the
+                            # resources and their resource type have been
+                            # deleted in the meantime:
+                            #  sqlalchemy.exc.ProgrammingError:
+                            #    (pymysql.err.ProgrammingError)
+                            #    (1146, "Table \'test.rt_f00\' doesn\'t exist")
+                            # In that case, just ignore those resources.
+                            if (not pymysql
+                               or not isinstance(
+                                   e, sqlalchemy.exc.ProgrammingError)
+                               or not isinstance(
+                                   e.orig, pymysql.err.ProgrammingError)
+                               or (e.orig.args[0]
+                                   != pymysql.constants.ER.NO_SUCH_TABLE)):
+                                raise
+
+            return all_resources
+
+    def expunge_metric(self, id):
+        with self.facade.writer() as session:
+            if session.query(Metric).filter(Metric.id == id).delete() == 0:
+                raise indexer.NoSuchMetric(id)
+
+    def delete_metric(self, id):
+        with self.facade.writer() as session:
+            if session.query(Metric).filter(
+                Metric.id == id, Metric.status == 'active').update(
+                    {"status": "delete", "resource_id": None}) == 0:
+                raise indexer.NoSuchMetric(id)
+
+    @staticmethod
+    def _build_sort_keys(sorts, unique_keys):
+        # transform the api-wg representation to the oslo.db one
+        sort_keys = []
+        sort_dirs = []
+        for sort in sorts:
+            sort_key, __, sort_dir = sort.partition(":")
+            sort_keys.append(sort_key.strip())
+            sort_dirs.append(sort_dir or 'asc')
+
+        # paginate_query require at list one uniq column
+        for key in unique_keys:
+            if key not in sort_keys:
+                sort_keys.append(key)
+                sort_dirs.append('asc')
+
+        return sort_keys, sort_dirs
+
+
+def _operator_in(field_name, value):
+    # Do not generate empty IN comparison
+    # https://github.com/gnocchixyz/gnocchi/issues/530
+    if len(value):
+        return field_name.in_(value)
+
+
+class QueryTransformer(object):
+
+    unary_operators = {
+        u"not": sqlalchemy.not_,
+    }
+
+    binary_operators = {
+        u"=": operator.eq,
+        u"==": operator.eq,
+        u"eq": operator.eq,
+
+        u"<": operator.lt,
+        u"lt": operator.lt,
+
+        u">": operator.gt,
+        u"gt": operator.gt,
+
+        u"<=": operator.le,
+        u"≤": operator.le,
+        u"le": operator.le,
+
+        u">=": operator.ge,
+        u"≥": operator.ge,
+        u"ge": operator.ge,
+
+        u"!=": operator.ne,
+        u"≠": operator.ne,
+        u"ne": operator.ne,
+
+        u"in": _operator_in,
+
+        u"like": lambda field, value: field.like(value),
+    }
+
+    multiple_operators = {
+        u"or": sqlalchemy.or_,
+        u"∨": sqlalchemy.or_,
+
+        u"and": sqlalchemy.and_,
+        u"∧": sqlalchemy.and_,
+    }
+
+    converters = (
+        (types.TimestampUTC, utils.to_datetime),
+        (sa_types.String, six.text_type),
+        (sa_types.Integer, int),
+        (sa_types.Numeric, float),
+    )
+
+    @classmethod
+    def _handle_multiple_op(cls, engine, table, op, nodes):
+        return op(*[
+            cls.build_filter(engine, table, node)
+            for node in nodes
+        ])
+
+    @classmethod
+    def _handle_unary_op(cls, engine, table, op, node):
+        return op(cls.build_filter(engine, table, node))
+
+    @classmethod
+    def _handle_binary_op(cls, engine, table, op, nodes):
+        try:
+            field_name, value = list(nodes.items())[0]
+        except Exception:
+            raise indexer.QueryError()
+
+        if field_name == "lifespan":
+            attr = getattr(table, "ended_at") - getattr(table, "started_at")
+            value = datetime.timedelta(
+                seconds=utils.timespan_total_seconds(
+                    utils.to_timespan(value)))
+            if engine == "mysql":
+                # NOTE(jd) So subtracting 2 timestamps in MySQL result in some
+                # weird results based on string comparison. It's useless and it
+                # does not work at all with seconds or anything. Just skip it.
+                raise exceptions.NotImplementedError
+        elif field_name == "created_by_user_id":
+            creator = getattr(table, "creator")
+            if op == operator.eq:
+                return creator.like("%s:%%" % value)
+            elif op == operator.ne:
+                return sqlalchemy.not_(creator.like("%s:%%" % value))
+            elif op == cls.binary_operators[u"like"]:
+                return creator.like("%s:%%" % value)
+            raise indexer.QueryValueError(value, field_name)
+        elif field_name == "created_by_project_id":
+            creator = getattr(table, "creator")
+            if op == operator.eq:
+                return creator.like("%%:%s" % value)
+            elif op == operator.ne:
+                return sqlalchemy.not_(creator.like("%%:%s" % value))
+            elif op == cls.binary_operators[u"like"]:
+                return creator.like("%%:%s" % value)
+            raise indexer.QueryValueError(value, field_name)
+        else:
+            try:
+                attr = getattr(table, field_name)
+            except AttributeError:
+                raise indexer.QueryAttributeError(table, field_name)
+
+            if not hasattr(attr, "type"):
+                # This is not a column
+                raise indexer.QueryAttributeError(table, field_name)
+
+            # Convert value to the right type
+            if value is not None:
+                for klass, converter in cls.converters:
+                    if isinstance(attr.type, klass):
+                        try:
+                            if isinstance(value, list):
+                                # we got a list for in_ operator
+                                value = [converter(v) for v in value]
+                            else:
+                                value = converter(value)
+                        except Exception:
+                            raise indexer.QueryValueError(value, field_name)
+                        break
+
+        if op == operator.ne and value is not None:
+            return operator.or_(operator.eq(attr, None),
+                                op(attr, value))
+        else:
+            return op(attr, value)
+
+    @classmethod
+    def build_filter(cls, engine, table, tree):
+        try:
+            operator, nodes = list(tree.items())[0]
+        except Exception:
+            raise indexer.QueryError()
+
+        try:
+            op = cls.multiple_operators[operator]
+        except KeyError:
+            try:
+                op = cls.binary_operators[operator]
+            except KeyError:
+                try:
+                    op = cls.unary_operators[operator]
+                except KeyError:
+                    raise indexer.QueryInvalidOperator(operator)
+                return cls._handle_unary_op(engine, table, op, nodes)
+            return cls._handle_binary_op(engine, table, op, nodes)
+        return cls._handle_multiple_op(engine, table, op, nodes)
diff --git a/gnocchi/indexer/sqlalchemy_base.py b/gnocchi/indexer/sqlalchemy_base.py
new file mode 100644
index 0000000000000000000000000000000000000000..7def0cf43cbfb0166234aa91aacd7e5b6430f0f5
--- /dev/null
+++ b/gnocchi/indexer/sqlalchemy_base.py
@@ -0,0 +1,365 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import absolute_import
+
+from oslo_db.sqlalchemy import models
+import six
+import sqlalchemy
+from sqlalchemy.ext import declarative
+import sqlalchemy_utils
+
+from gnocchi import archive_policy
+from gnocchi import indexer
+from gnocchi.indexer import sqlalchemy_types as types
+from gnocchi import resource_type
+from gnocchi import utils
+
+Base = declarative.declarative_base()
+
+COMMON_TABLES_ARGS = {'mysql_charset': "utf8",
+                      'mysql_engine': "InnoDB"}
+
+
+class GnocchiBase(models.ModelBase):
+    __table_args__ = (
+        COMMON_TABLES_ARGS,
+    )
+
+
+class ArchivePolicyDefinitionType(sqlalchemy_utils.JSONType):
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            return super(
+                ArchivePolicyDefinitionType, self).process_bind_param(
+                    [v.serialize() for v in value],
+                    dialect)
+
+    def process_result_value(self, value, dialect):
+        values = super(ArchivePolicyDefinitionType,
+                       self).process_result_value(value, dialect)
+        return [archive_policy.ArchivePolicyItem(**v) for v in values]
+
+
+class SetType(sqlalchemy_utils.JSONType):
+    def process_result_value(self, value, dialect):
+        return set(super(SetType,
+                         self).process_result_value(value, dialect))
+
+
+class ArchivePolicy(Base, GnocchiBase, archive_policy.ArchivePolicy):
+    __tablename__ = 'archive_policy'
+
+    name = sqlalchemy.Column(sqlalchemy.String(255), primary_key=True)
+    back_window = sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
+    definition = sqlalchemy.Column(ArchivePolicyDefinitionType, nullable=False)
+    # TODO(jd) Use an array of string instead, PostgreSQL can do that
+    aggregation_methods = sqlalchemy.Column(SetType,
+                                            nullable=False)
+
+
+class Metric(Base, GnocchiBase, indexer.Metric):
+    __tablename__ = 'metric'
+    __table_args__ = (
+        sqlalchemy.Index('ix_metric_status', 'status'),
+        sqlalchemy.UniqueConstraint("resource_id", "name",
+                                    name="uniq_metric0resource_id0name"),
+        COMMON_TABLES_ARGS,
+    )
+
+    id = sqlalchemy.Column(sqlalchemy_utils.UUIDType(),
+                           primary_key=True)
+    archive_policy_name = sqlalchemy.Column(
+        sqlalchemy.String(255),
+        sqlalchemy.ForeignKey(
+            'archive_policy.name',
+            ondelete="RESTRICT",
+            name="fk_metric_ap_name_ap_name"),
+        nullable=False)
+    archive_policy = sqlalchemy.orm.relationship(ArchivePolicy, lazy="joined")
+    creator = sqlalchemy.Column(sqlalchemy.String(255))
+    resource_id = sqlalchemy.Column(
+        sqlalchemy_utils.UUIDType(),
+        sqlalchemy.ForeignKey('resource.id',
+                              ondelete="SET NULL",
+                              name="fk_metric_resource_id_resource_id"))
+    name = sqlalchemy.Column(sqlalchemy.String(255))
+    unit = sqlalchemy.Column(sqlalchemy.String(31))
+    status = sqlalchemy.Column(sqlalchemy.Enum('active', 'delete',
+                                               name="metric_status_enum"),
+                               nullable=False,
+                               server_default='active')
+
+    def jsonify(self):
+        d = {
+            "id": self.id,
+            "creator": self.creator,
+            "name": self.name,
+            "unit": self.unit,
+        }
+        unloaded = sqlalchemy.inspect(self).unloaded
+        if 'resource' in unloaded:
+            d['resource_id'] = self.resource_id
+        else:
+            d['resource'] = self.resource
+        if 'archive_policy' in unloaded:
+            d['archive_policy_name'] = self.archive_policy_name
+        else:
+            d['archive_policy'] = self.archive_policy
+
+        if self.creator is None:
+            d['created_by_user_id'] = d['created_by_project_id'] = None
+        else:
+            d['created_by_user_id'], _, d['created_by_project_id'] = (
+                self.creator.partition(":")
+            )
+
+        return d
+
+    def __eq__(self, other):
+        # NOTE(jd) If `other` is a SQL Metric, we only compare
+        # archive_policy_name, and we don't compare archive_policy that might
+        # not be loaded. Otherwise we fallback to the original comparison for
+        # indexer.Metric.
+        return ((isinstance(other, Metric)
+                 and self.id == other.id
+                 and self.archive_policy_name == other.archive_policy_name
+                 and self.creator == other.creator
+                 and self.name == other.name
+                 and self.unit == other.unit
+                 and self.resource_id == other.resource_id)
+                or (indexer.Metric.__eq__(self, other)))
+
+    __hash__ = indexer.Metric.__hash__
+
+
+RESOURCE_TYPE_SCHEMA_MANAGER = resource_type.ResourceTypeSchemaManager(
+    "gnocchi.indexer.sqlalchemy.resource_type_attribute")
+
+
+class ResourceTypeAttributes(sqlalchemy_utils.JSONType):
+    def process_bind_param(self, attributes, dialect):
+        return super(ResourceTypeAttributes, self).process_bind_param(
+            attributes.jsonify(), dialect)
+
+    def process_result_value(self, value, dialect):
+        attributes = super(ResourceTypeAttributes, self).process_result_value(
+            value, dialect)
+        return RESOURCE_TYPE_SCHEMA_MANAGER.attributes_from_dict(attributes)
+
+
+class ResourceType(Base, GnocchiBase, resource_type.ResourceType):
+    __tablename__ = 'resource_type'
+    __table_args__ = (
+        sqlalchemy.UniqueConstraint("tablename",
+                                    name="uniq_resource_type0tablename"),
+        COMMON_TABLES_ARGS,
+    )
+
+    name = sqlalchemy.Column(sqlalchemy.String(255), primary_key=True,
+                             nullable=False)
+    tablename = sqlalchemy.Column(sqlalchemy.String(35), nullable=False)
+    attributes = sqlalchemy.Column(ResourceTypeAttributes)
+    state = sqlalchemy.Column(sqlalchemy.Enum("active", "creating",
+                                              "creation_error", "deleting",
+                                              "deletion_error", "updating",
+                                              "updating_error",
+                                              name="resource_type_state_enum"),
+                              nullable=False,
+                              server_default="creating")
+    updated_at = sqlalchemy.Column(types.TimestampUTC, nullable=False,
+                                   # NOTE(jd): We would like to use
+                                   # sqlalchemy.func.now, but we can't
+                                   # because the type of PreciseTimestamp in
+                                   # MySQL is not a Timestamp, so it would
+                                   # not store a timestamp but a date as an
+                                   # integer.
+                                   default=lambda: utils.utcnow())
+
+    def to_baseclass(self):
+        cols = {}
+        for attr in self.attributes:
+            cols[attr.name] = sqlalchemy.Column(attr.satype,
+                                                nullable=not attr.required)
+        return type(str("%s_base" % self.tablename), (object, ), cols)
+
+
+class ResourceJsonifier(indexer.Resource):
+    def jsonify(self, attrs=None):
+        d = dict(self)
+        del d['revision']
+        if 'metrics' not in sqlalchemy.inspect(self).unloaded:
+            d['metrics'] = dict((m.name, six.text_type(m.id))
+                                for m in self.metrics)
+
+        if self.creator is None:
+            d['created_by_user_id'] = d['created_by_project_id'] = None
+        else:
+            d['created_by_user_id'], _, d['created_by_project_id'] = (
+                self.creator.partition(":")
+            )
+
+        if attrs:
+            return {key: val for key, val in d.items() if key in attrs}
+        else:
+            return d
+
+
+class ResourceMixin(ResourceJsonifier):
+    @declarative.declared_attr
+    def __table_args__(cls):
+        return (sqlalchemy.CheckConstraint('started_at <= ended_at',
+                                           name="ck_started_before_ended"),
+                COMMON_TABLES_ARGS)
+
+    @declarative.declared_attr
+    def type(cls):
+        return sqlalchemy.Column(
+            sqlalchemy.String(255),
+            sqlalchemy.ForeignKey('resource_type.name',
+                                  ondelete="RESTRICT",
+                                  name="fk_%s_resource_type_name" %
+                                  cls.__tablename__),
+            nullable=False)
+
+    creator = sqlalchemy.Column(sqlalchemy.String(255))
+    started_at = sqlalchemy.Column(types.TimestampUTC, nullable=False,
+                                   default=lambda: utils.utcnow())
+    revision_start = sqlalchemy.Column(types.TimestampUTC, nullable=False,
+                                       default=lambda: utils.utcnow())
+    ended_at = sqlalchemy.Column(types.TimestampUTC)
+    user_id = sqlalchemy.Column(sqlalchemy.String(255))
+    project_id = sqlalchemy.Column(sqlalchemy.String(255))
+    original_resource_id = sqlalchemy.Column(sqlalchemy.String(255),
+                                             nullable=False)
+
+
+class Resource(ResourceMixin, Base, GnocchiBase):
+    __tablename__ = 'resource'
+    _extra_keys = ['revision', 'revision_end']
+    revision = -1
+    id = sqlalchemy.Column(sqlalchemy_utils.UUIDType(),
+                           primary_key=True)
+    revision_end = None
+    metrics = sqlalchemy.orm.relationship(
+        Metric, backref="resource",
+        primaryjoin="and_(Resource.id == Metric.resource_id, "
+        "Metric.status == 'active')")
+
+    def get_metric(self, metric_name):
+        m = super(Resource, self).get_metric(metric_name)
+        if m:
+            if sqlalchemy.orm.session.object_session(self):
+                # NOTE(jd) The resource is already loaded so that should not
+                # trigger a SELECT
+                m.resource
+            return m
+
+
+class ResourceHistory(ResourceMixin, Base, GnocchiBase):
+    __tablename__ = 'resource_history'
+
+    revision = sqlalchemy.Column(sqlalchemy.Integer, autoincrement=True,
+                                 primary_key=True)
+    id = sqlalchemy.Column(sqlalchemy_utils.UUIDType(),
+                           sqlalchemy.ForeignKey(
+                               'resource.id',
+                               ondelete="CASCADE",
+                               name="fk_rh_id_resource_id"),
+                           nullable=False)
+    revision_end = sqlalchemy.Column(types.TimestampUTC, nullable=False,
+                                     default=lambda: utils.utcnow())
+    metrics = sqlalchemy.orm.relationship(
+        Metric, primaryjoin="Metric.resource_id == ResourceHistory.id",
+        foreign_keys='Metric.resource_id')
+
+
+class ResourceExt(object):
+    """Default extension class for plugin
+
+    Used for plugin that doesn't need additional columns
+    """
+
+
+class ResourceExtMixin(object):
+    @declarative.declared_attr
+    def __table_args__(cls):
+        return (COMMON_TABLES_ARGS, )
+
+    @declarative.declared_attr
+    def id(cls):
+        tablename_compact = cls.__tablename__
+        if tablename_compact.endswith("_history"):
+            tablename_compact = tablename_compact[:-6]
+        return sqlalchemy.Column(
+            sqlalchemy_utils.UUIDType(),
+            sqlalchemy.ForeignKey(
+                'resource.id',
+                ondelete="CASCADE",
+                name="fk_%s_id_resource_id" % tablename_compact,
+                # NOTE(sileht): We use to ensure that postgresql
+                # does not use AccessExclusiveLock on destination table
+                use_alter=True),
+            primary_key=True
+        )
+
+
+class ResourceHistoryExtMixin(object):
+    @declarative.declared_attr
+    def __table_args__(cls):
+        return (COMMON_TABLES_ARGS, )
+
+    @declarative.declared_attr
+    def revision(cls):
+        tablename_compact = cls.__tablename__
+        if tablename_compact.endswith("_history"):
+            tablename_compact = tablename_compact[:-6]
+        return sqlalchemy.Column(
+            sqlalchemy.Integer,
+            sqlalchemy.ForeignKey(
+                'resource_history.revision',
+                ondelete="CASCADE",
+                name="fk_%s_revision_rh_revision"
+                % tablename_compact,
+                # NOTE(sileht): We use to ensure that postgresql
+                # does not use AccessExclusiveLock on destination table
+                use_alter=True),
+            primary_key=True
+        )
+
+
+class HistoryModelIterator(models.ModelIterator):
+    def __next__(self):
+        # NOTE(sileht): Our custom resource attribute columns don't
+        # have the same name in database than in sqlalchemy model
+        # so remove the additional "f_" for the model name
+        n = six.advance_iterator(self.i)
+        model_attr = n[2:] if n[:2] == "f_" else n
+        return model_attr, getattr(self.model, n)
+
+
+class ArchivePolicyRule(Base, GnocchiBase):
+    __tablename__ = 'archive_policy_rule'
+
+    name = sqlalchemy.Column(sqlalchemy.String(255), primary_key=True)
+    archive_policy_name = sqlalchemy.Column(
+        sqlalchemy.String(255),
+        sqlalchemy.ForeignKey(
+            'archive_policy.name',
+            ondelete="RESTRICT",
+            name="fk_apr_ap_name_ap_name"),
+        nullable=False)
+    metric_pattern = sqlalchemy.Column(sqlalchemy.String(255), nullable=False)
diff --git a/gnocchi/indexer/sqlalchemy_extension.py b/gnocchi/indexer/sqlalchemy_extension.py
new file mode 100644
index 0000000000000000000000000000000000000000..eaba6163e58e018ad7dcee0c15b349545a2f035e
--- /dev/null
+++ b/gnocchi/indexer/sqlalchemy_extension.py
@@ -0,0 +1,66 @@
+# -*- encoding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from __future__ import absolute_import
+
+import sqlalchemy
+import sqlalchemy_utils
+
+from gnocchi.indexer import sqlalchemy_types
+from gnocchi import resource_type
+
+
+class SchemaMixin(object):
+    def for_filling(self, dialect):
+        # NOTE(sileht): This must be used only for patching resource type
+        # to fill all row with a default value and then switch back the
+        # server_default to None
+        if self.fill is None:
+            return None
+
+        # NOTE(sileht): server_default must be converted in sql element
+        return sqlalchemy.literal(self.fill)
+
+
+class StringSchema(resource_type.StringSchema, SchemaMixin):
+    @property
+    def satype(self):
+        return sqlalchemy.String(self.max_length)
+
+
+class UUIDSchema(resource_type.UUIDSchema, SchemaMixin):
+    satype = sqlalchemy_utils.UUIDType()
+
+    def for_filling(self, dialect):
+        if self.fill is None:
+            return None
+        return sqlalchemy.literal(
+            self.satype.process_bind_param(self.fill, dialect))
+
+
+class NumberSchema(resource_type.NumberSchema, SchemaMixin):
+    satype = sqlalchemy.Float(53)
+
+
+class BoolSchema(resource_type.BoolSchema, SchemaMixin):
+    satype = sqlalchemy.Boolean
+
+
+class DatetimeSchema(resource_type.DatetimeSchema, SchemaMixin):
+    satype = sqlalchemy_types.TimestampUTC()
+
+    def for_filling(self, dialect):
+        if self.fill is None:
+            return None
+        return self.satype.process_bind_param(self.fill, dialect).isoformat()
diff --git a/gnocchi/indexer/sqlalchemy_legacy_resources.py b/gnocchi/indexer/sqlalchemy_legacy_resources.py
new file mode 100644
index 0000000000000000000000000000000000000000..8390476bb9cce29ad9ec2a9c941897cc0656d950
--- /dev/null
+++ b/gnocchi/indexer/sqlalchemy_legacy_resources.py
@@ -0,0 +1,78 @@
+# -*- encoding: utf-8 -*-
+
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+# NOTE(sileht): this code is also in alembic migration
+ceilometer_tablenames = {
+    "instance_network_interface": "instance_net_int",
+    "host_network_interface": "host_net_int",
+}
+ceilometer_resources = {
+    "generic": {},
+    "image": {
+        "name": {"type": "string", "min_length": 0, "max_length": 255,
+                 "required": True},
+        "container_format": {"type": "string", "min_length": 0,
+                             "max_length": 255, "required": True},
+        "disk_format": {"type": "string", "min_length": 0, "max_length": 255,
+                        "required": True},
+    },
+    "instance": {
+        "flavor_id": {"type": "string", "min_length": 0, "max_length": 255,
+                      "required": True},
+        "image_ref": {"type": "string", "min_length": 0, "max_length": 255,
+                      "required": False},
+        "host": {"type": "string", "min_length": 0, "max_length": 255,
+                 "required": True},
+        "display_name": {"type": "string", "min_length": 0, "max_length": 255,
+                         "required": True},
+        "server_group": {"type": "string", "min_length": 0, "max_length": 255,
+                         "required": False},
+    },
+    "instance_disk": {
+        "name": {"type": "string", "min_length": 0, "max_length": 255,
+                 "required": True},
+        "instance_id": {"type": "uuid", "required": True},
+    },
+    "instance_network_interface": {
+        "name": {"type": "string", "min_length": 0, "max_length": 255,
+                 "required": True},
+        "instance_id": {"type": "uuid", "required": True},
+    },
+    "volume": {
+        "display_name": {"type": "string", "min_length": 0, "max_length": 255,
+                         "required": False},
+    },
+    "swift_account": {},
+    "ceph_account": {},
+    "network": {},
+    "identity": {},
+    "ipmi": {},
+    "stack": {},
+    "host": {
+        "host_name": {"type": "string", "min_length": 0, "max_length": 255,
+                      "required": True},
+    },
+    "host_network_interface": {
+        "host_name": {"type": "string", "min_length": 0, "max_length": 255,
+                      "required": True},
+        "device_name": {"type": "string", "min_length": 0, "max_length": 255,
+                        "required": False},
+    },
+    "host_disk": {
+        "host_name": {"type": "string", "min_length": 0, "max_length": 255,
+                      "required": True},
+        "device_name": {"type": "string", "min_length": 0, "max_length": 255,
+                        "required": False},
+    },
+}
diff --git a/gnocchi/indexer/sqlalchemy_types.py b/gnocchi/indexer/sqlalchemy_types.py
new file mode 100644
index 0000000000000000000000000000000000000000..b566465aaa7d7b6e4e31012dd0680aed1172f8ae
--- /dev/null
+++ b/gnocchi/indexer/sqlalchemy_types.py
@@ -0,0 +1,110 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from __future__ import absolute_import
+
+import calendar
+import datetime
+import decimal
+
+import iso8601
+import sqlalchemy
+from sqlalchemy.dialects import mysql
+from sqlalchemy import types
+
+from gnocchi import utils
+
+
+class PreciseTimestamp(types.TypeDecorator):
+    """Represents a timestamp precise to the microsecond.
+
+    Deprecated in favor of TimestampUTC.
+    Still used in alembic migrations.
+    """
+
+    impl = sqlalchemy.DateTime
+
+    @staticmethod
+    def _decimal_to_dt(dec):
+        """Return a datetime from Decimal unixtime format."""
+        if dec is None:
+            return None
+
+        integer = int(dec)
+        micro = (dec - decimal.Decimal(integer)) * decimal.Decimal(1000000)
+        daittyme = datetime.datetime.utcfromtimestamp(integer)
+        return daittyme.replace(microsecond=int(round(micro)))
+
+    @staticmethod
+    def _dt_to_decimal(utc):
+        """Datetime to Decimal.
+
+        Some databases don't store microseconds in datetime
+        so we always store as Decimal unixtime.
+        """
+        if utc is None:
+            return None
+
+        decimal.getcontext().prec = 30
+        return (decimal.Decimal(str(calendar.timegm(utc.utctimetuple()))) +
+                (decimal.Decimal(str(utc.microsecond)) /
+                 decimal.Decimal("1000000.0")))
+
+    def load_dialect_impl(self, dialect):
+        if dialect.name == 'mysql':
+            return dialect.type_descriptor(
+                types.DECIMAL(precision=20,
+                              scale=6,
+                              asdecimal=True))
+        return dialect.type_descriptor(self.impl)
+
+    def compare_against_backend(self, dialect, conn_type):
+        if dialect.name == 'mysql':
+            return issubclass(type(conn_type), types.DECIMAL)
+        return issubclass(type(conn_type), type(self.impl))
+
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            value = utils.normalize_time(value)
+        if dialect.name == 'mysql':
+            return self._dt_to_decimal(value)
+        return value
+
+    def process_result_value(self, value, dialect):
+        if dialect.name == 'mysql':
+            value = self._decimal_to_dt(value)
+        if value is not None:
+            return utils.normalize_time(value).replace(
+                tzinfo=iso8601.iso8601.UTC)
+
+
+class TimestampUTC(types.TypeDecorator):
+    """Represents a timestamp precise to the microsecond."""
+
+    impl = sqlalchemy.DateTime
+
+    def load_dialect_impl(self, dialect):
+        if dialect.name == 'mysql':
+            return dialect.type_descriptor(mysql.DATETIME(fsp=6))
+        return self.impl
+
+    def process_bind_param(self, value, dialect):
+        if value is not None:
+            return utils.normalize_time(value)
+
+    def process_result_value(self, value, dialect):
+        if value is not None:
+            return value.replace(tzinfo=iso8601.iso8601.UTC)
diff --git a/gnocchi/json.py b/gnocchi/json.py
new file mode 100644
index 0000000000000000000000000000000000000000..3147d38db38b5960dbbd6aa2c1b7b88f46d19853
--- /dev/null
+++ b/gnocchi/json.py
@@ -0,0 +1,62 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2015-2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import uuid
+
+import numpy
+import six
+import ujson
+
+
+def to_primitive(obj):
+    if isinstance(obj, ((six.text_type,)
+                        + six.integer_types
+                        + (type(None), bool, float))):
+        return obj
+    if isinstance(obj, uuid.UUID):
+        return six.text_type(obj)
+    if isinstance(obj, datetime.datetime):
+        return obj.isoformat()
+    if isinstance(obj, numpy.datetime64):
+        # Do not include nanoseconds if null
+        return str(obj).rpartition(".000000000")[0] + "+00:00"
+    if isinstance(obj, numpy.timedelta64):
+        return obj / numpy.timedelta64(1, 's')
+    if isinstance(obj, datetime.timedelta):
+        return obj.total_seconds()
+    # This mimics what Pecan implements in its default JSON encoder
+    if hasattr(obj, "jsonify"):
+        return to_primitive(obj.jsonify())
+    if isinstance(obj, dict):
+        return {to_primitive(k): to_primitive(v)
+                for k, v in obj.items()}
+    if hasattr(obj, 'iteritems'):
+        return to_primitive(dict(obj.iteritems()))
+    # Python 3 does not have iteritems
+    if hasattr(obj, 'items'):
+        return to_primitive(dict(obj.items()))
+    if hasattr(obj, '__iter__'):
+        return list(map(to_primitive, obj))
+    return obj
+
+
+def dumps(obj):
+    return ujson.dumps(to_primitive(obj))
+
+
+# For convenience
+loads = ujson.loads
+load = ujson.load
diff --git a/gnocchi/opts.py b/gnocchi/opts.py
new file mode 100644
index 0000000000000000000000000000000000000000..e2d8f9c6f1381ee35282ff96bf565af618c4a79d
--- /dev/null
+++ b/gnocchi/opts.py
@@ -0,0 +1,252 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import copy
+import itertools
+import operator
+import pkg_resources
+import uuid
+
+from oslo_config import cfg
+
+import gnocchi.archive_policy
+import gnocchi.common.redis
+import gnocchi.indexer
+import gnocchi.rest.http_proxy_to_wsgi
+import gnocchi.storage
+import gnocchi.storage.ceph
+import gnocchi.storage.file
+import gnocchi.storage.s3
+import gnocchi.storage.swift
+
+
+# NOTE(sileht): The oslo.config interpolation is buggy when the value
+# is None, this replaces it by the expected empty string.
+# Fix will perhaps be fixed by https://review.openstack.org/#/c/417496/
+# But it seems some projects are relaying on the bug...
+class CustomStrSubWrapper(cfg.ConfigOpts.StrSubWrapper):
+    def __getitem__(self, key):
+        value = super(CustomStrSubWrapper, self).__getitem__(key)
+        if value is None:
+            return ''
+        return value
+
+
+cfg.ConfigOpts.StrSubWrapper = CustomStrSubWrapper
+
+
+_STORAGE_OPTS = list(itertools.chain(gnocchi.storage.OPTS,
+                                     gnocchi.storage.ceph.OPTS,
+                                     gnocchi.storage.file.OPTS,
+                                     gnocchi.storage.swift.OPTS,
+                                     gnocchi.common.redis.OPTS,
+                                     gnocchi.storage.s3.OPTS))
+
+
+_INCOMING_OPTS = copy.deepcopy(_STORAGE_OPTS)
+for opt in _INCOMING_OPTS:
+    opt.default = '${storage.%s}' % opt.name
+
+
+API_OPTS = (
+    cfg.HostAddressOpt('host',
+                       default="0.0.0.0",
+                       help="Host to listen on"),
+    cfg.PortOpt('port',
+                default=8041,
+                help="Port to listen on"),
+    cfg.StrOpt('uwsgi-mode',
+               default='http',
+               choices=["http", "http-socket", "socket"],
+               help="""Socket type to use for uWSGI:
+* http: support HTTP/1.1 and keepalive,
+  but not chunked encoding (InfluxDB)
+* http-socket/socket: support chunked encoding, but require a upstream HTTP
+  Server for HTTP/1.1, keepalive and HTTP protocol correctness.
+""")
+)
+
+
+_cli_options = (
+    cfg.BoolOpt(
+        'debug',
+        short='d',
+        default=False,
+        help='If set to true, the logging level will be set to DEBUG.'),
+    cfg.BoolOpt(
+        'verbose',
+        short='v',
+        default=True,
+        help='If set to true, the logging level will be set to INFO.'),
+    cfg.StrOpt(
+        "log-dir",
+        help="Base directory for log files. "
+        "If not set, logging will go to stderr."),
+    cfg.StrOpt(
+        'log-file',
+        metavar='PATH',
+        help='(Optional) Name of log file to send logging output to. '
+        'If no default is set, logging will go to stderr as '
+        'defined by use_stderr.'),
+)
+
+
+def list_opts():
+    return [
+        ("DEFAULT", _cli_options + (
+            cfg.StrOpt(
+                'coordination_url',
+                secret=True,
+                deprecated_group="storage",
+                help='Coordination driver URL'),
+            cfg.IntOpt(
+                'parallel_operations',
+                min=1,
+                deprecated_name='aggregation_workers_number',
+                deprecated_group='storage',
+                help='Number of threads to use to parallelize '
+                'some operations. '
+                'Default is set to the number of CPU available.'),
+            cfg.BoolOpt(
+                'use-syslog',
+                default=False,
+                help='Use syslog for logging.'),
+            cfg.BoolOpt(
+                'use-journal',
+                default=False,
+                help='Enable journald for logging. '
+                'If running in a systemd environment you may wish '
+                'to enable journal support. Doing so will use the '
+                'journal native protocol which includes structured '
+                'metadata in addition to log messages.'),
+            cfg.StrOpt(
+                'syslog-log-facility',
+                default='user',
+                help='Syslog facility to receive log lines.')
+        )),
+        ("indexer", gnocchi.indexer.OPTS),
+        ("metricd", (
+            cfg.IntOpt('workers', min=1,
+                       required=True,
+                       help='Number of workers for Gnocchi metric daemons. '
+                       'By default the available number of CPU is used.'),
+            cfg.IntOpt('metric_processing_delay',
+                       default=60,
+                       required=True,
+                       deprecated_group='storage',
+                       help="How many seconds to wait between "
+                       "scheduling new metrics to process"),
+            cfg.BoolOpt(
+                'greedy', default=True,
+                help="Allow to bypass `metric_processing_delay` if metricd "
+                "is notified that measures are ready to be processed."
+            ),
+            cfg.IntOpt('metric_reporting_delay',
+                       deprecated_group='storage',
+                       default=120,
+                       min=-1,
+                       required=True,
+                       help="How many seconds to wait between "
+                       "metric ingestion reporting. Set value to -1 to "
+                       "disable reporting"),
+            cfg.IntOpt('metric_cleanup_delay',
+                       deprecated_group='storage',
+                       default=300,
+                       required=True,
+                       help="How many seconds to wait between "
+                       "cleaning of expired data"),
+            cfg.IntOpt('processing_replicas',
+                       default=3,
+                       min=1,
+                       help="Number of workers that share a task. A higher "
+                       "value may improve worker utilization but may also "
+                       "increase load on coordination backend. Value is "
+                       "capped by number of workers globally."),
+        )),
+        ("api", (
+            cfg.StrOpt('paste_config',
+                       default="api-paste.ini",
+                       help='Path to API Paste configuration.'),
+            cfg.StrOpt('auth_mode',
+                       default="basic",
+                       choices=list(map(operator.attrgetter("name"),
+                                    pkg_resources.iter_entry_points(
+                                        "gnocchi.rest.auth_helper"))),
+                       help='Authentication mode to use.'),
+            cfg.IntOpt('max_limit',
+                       default=1000,
+                       required=True,
+                       help=('The maximum number of items returned in a '
+                             'single response from a collection resource')),
+            cfg.IntOpt('operation_timeout',
+                       deprecated_name="refresh_timeout",
+                       default=10, min=0,
+                       help='Number of seconds before timeout when attempting '
+                            'to do some operations.'),
+        ) + API_OPTS + gnocchi.rest.http_proxy_to_wsgi.OPTS,
+        ),
+        ("storage", _STORAGE_OPTS),
+        ("incoming", _INCOMING_OPTS),
+        ("statsd", (
+            cfg.HostAddressOpt('host',
+                               default='0.0.0.0',
+                               help='The listen IP for statsd'),
+            cfg.PortOpt('port',
+                        default=8125,
+                        help='The port for statsd'),
+            cfg.Opt(
+                'resource_id',
+                type=uuid.UUID,
+                help='Resource UUID to use to identify statsd in Gnocchi'),
+            cfg.StrOpt(
+                'creator',
+                help='Creator value to use to identify statsd in Gnocchi'),
+            cfg.StrOpt(
+                'archive_policy_name',
+                help='Archive policy name to use when creating metrics'),
+            cfg.FloatOpt(
+                'flush_delay',
+                default=10,
+                help='Delay between flushes'),
+        )),
+        ("amqp1d", (
+            cfg.StrOpt('url',
+                       default='localhost:5672/u/collectd/telemetry',
+                       help='AMQP 1.0 URL to listen to'),
+            cfg.StrOpt('data_source',
+                       default='collectd',
+                       choices=['collectd'],
+                       help='Data source for amqp1d'),
+            cfg.StrOpt('resource_type',
+                       default='collectd_amqp1d',
+                       help='Resource type name to use to identify metrics'),
+            cfg.StrOpt('creator', help='Creator value to use to amqpd1'),
+            cfg.FloatOpt('flush_delay',
+                         default=10,
+                         help='Delay between flushes in seconds'),
+        )),
+        ("archive_policy", gnocchi.archive_policy.OPTS),
+    ]
+
+
+def set_defaults():
+    from oslo_middleware import cors
+    cfg.set_defaults(cors.CORS_OPTS,
+                     allow_headers=[
+                         'Authorization',
+                         'X-Auth-Token',
+                         'X-Subject-Token',
+                         'X-User-Id',
+                         'X-Domain-Id',
+                         'X-Project-Id',
+                         'X-Roles'])
diff --git a/gnocchi/resource_type.py b/gnocchi/resource_type.py
new file mode 100644
index 0000000000000000000000000000000000000000..9daec8e6ebecac99ae668ef51778ca52975937cc
--- /dev/null
+++ b/gnocchi/resource_type.py
@@ -0,0 +1,283 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import numbers
+import re
+import six
+import stevedore
+import voluptuous
+
+from gnocchi import utils
+
+
+INVALID_NAMES = [
+    "id", "type", "metrics",
+    "revision", "revision_start", "revision_end",
+    "started_at", "ended_at",
+    "user_id", "project_id",
+    "created_by_user_id", "created_by_project_id", "get_metric",
+    "creator",
+]
+
+VALID_CHARS = re.compile("[a-zA-Z0-9][a-zA-Z0-9_]*")
+
+
+class InvalidResourceAttribute(ValueError):
+    pass
+
+
+class InvalidResourceAttributeName(InvalidResourceAttribute):
+    """Error raised when the resource attribute name is invalid."""
+    def __init__(self, name):
+        super(InvalidResourceAttributeName, self).__init__(
+            "Resource attribute name %s is invalid" % str(name))
+        self.name = name
+
+
+class InvalidResourceAttributeValue(InvalidResourceAttribute):
+    """Error raised when the resource attribute min is greater than max"""
+    def __init__(self, min, max):
+        super(InvalidResourceAttributeValue, self).__init__(
+            "Resource attribute value min (or min_length) %s must be less  "
+            "than or equal to max (or max_length) %s!" % (str(min), str(max)))
+        self.min = min
+        self.max = max
+
+
+class InvalidResourceAttributeOption(InvalidResourceAttribute):
+    """Error raised when the resource attribute name is invalid."""
+    def __init__(self, name, option, reason):
+        super(InvalidResourceAttributeOption, self).__init__(
+            "Option '%s' of resource attribute %s is invalid: %s" %
+            (option, str(name), str(reason)))
+        self.name = name
+        self.option = option
+        self.reason = reason
+
+
+# NOTE(sileht): This is to store the behavior of some operations:
+#  * fill, to set a default value to all existing resource type
+#
+# in the future for example, we can allow to change the length of
+# a string attribute, if the new one is shorter, we can add a option
+# to define the behavior like:
+#  * resize = trunc or reject
+OperationOptions = {
+    voluptuous.Optional('fill'): object
+}
+
+
+class CommonAttributeSchema(object):
+    meta_schema_ext = {}
+    schema_ext = None
+
+    def __init__(self, type, name, required, options=None):
+        if (len(name) > 63 or name in INVALID_NAMES
+                or not VALID_CHARS.match(name)):
+            raise InvalidResourceAttributeName(name)
+
+        self.name = name
+        self.required = required
+        self.fill = None
+
+        # options is set only when we update a resource type
+        if options is not None:
+            fill = options.get("fill")
+            if fill is None and required:
+                raise InvalidResourceAttributeOption(
+                    name, "fill", "must not be empty if required=True")
+            elif fill is not None:
+                # Ensure fill have the correct attribute type
+                try:
+                    self.fill = voluptuous.Schema(self.schema_ext)(fill)
+                except voluptuous.Error as e:
+                    raise InvalidResourceAttributeOption(name, "fill", e)
+
+    @classmethod
+    def meta_schema(cls, for_update=False):
+        d = {
+            voluptuous.Required('type'): cls.typename,
+            voluptuous.Required('required', default=True): bool
+        }
+        if for_update:
+            d[voluptuous.Required('options', default={})] = OperationOptions
+        if callable(cls.meta_schema_ext):
+            d.update(cls.meta_schema_ext())
+        else:
+            d.update(cls.meta_schema_ext)
+        return d
+
+    def schema(self):
+        if self.required:
+            return {self.name: self.schema_ext}
+        else:
+            return {voluptuous.Optional(self.name): self.schema_ext}
+
+    def jsonify(self):
+        return {"type": self.typename,
+                "required": self.required}
+
+
+class StringSchema(CommonAttributeSchema):
+    typename = "string"
+
+    def __init__(self, min_length, max_length, *args, **kwargs):
+        if min_length > max_length:
+            raise InvalidResourceAttributeValue(min_length, max_length)
+
+        self.min_length = min_length
+        self.max_length = max_length
+        super(StringSchema, self).__init__(*args, **kwargs)
+
+    meta_schema_ext = {
+        voluptuous.Required('min_length', default=0):
+        voluptuous.All(int, voluptuous.Range(min=0, max=255)),
+        voluptuous.Required('max_length', default=255):
+        voluptuous.All(int, voluptuous.Range(min=1, max=255))
+    }
+
+    @property
+    def schema_ext(self):
+        return voluptuous.All(six.text_type,
+                              voluptuous.Length(
+                                  min=self.min_length,
+                                  max=self.max_length))
+
+    def jsonify(self):
+        d = super(StringSchema, self).jsonify()
+        d.update({"max_length": self.max_length,
+                  "min_length": self.min_length})
+        return d
+
+
+class UUIDSchema(CommonAttributeSchema):
+    typename = "uuid"
+
+    @staticmethod
+    def schema_ext(value):
+        try:
+            return utils.UUID(value)
+        except ValueError as e:
+            raise voluptuous.Invalid(e)
+
+
+class DatetimeSchema(CommonAttributeSchema):
+    typename = "datetime"
+
+    @staticmethod
+    def schema_ext(value):
+        try:
+            return utils.to_datetime(value)
+        except ValueError as e:
+            raise voluptuous.Invalid(e)
+
+
+class NumberSchema(CommonAttributeSchema):
+    typename = "number"
+
+    def __init__(self, min, max, *args, **kwargs):
+        if max is not None and min is not None and min > max:
+            raise InvalidResourceAttributeValue(min, max)
+        self.min = min
+        self.max = max
+        super(NumberSchema, self).__init__(*args, **kwargs)
+
+    meta_schema_ext = {
+        voluptuous.Required('min', default=None): voluptuous.Any(
+            None, numbers.Real),
+        voluptuous.Required('max', default=None): voluptuous.Any(
+            None, numbers.Real)
+    }
+
+    @property
+    def schema_ext(self):
+        return voluptuous.All(numbers.Real,
+                              voluptuous.Range(min=self.min,
+                                               max=self.max))
+
+    def jsonify(self):
+        d = super(NumberSchema, self).jsonify()
+        d.update({"min": self.min, "max": self.max})
+        return d
+
+
+class BoolSchema(CommonAttributeSchema):
+    typename = "bool"
+    schema_ext = bool
+
+
+class ResourceTypeAttributes(list):
+    def jsonify(self):
+        d = {}
+        for attr in self:
+            d[attr.name] = attr.jsonify()
+        return d
+
+
+class ResourceTypeSchemaManager(stevedore.ExtensionManager):
+    def __init__(self, *args, **kwargs):
+        super(ResourceTypeSchemaManager, self).__init__(*args, **kwargs)
+        type_schemas = tuple([ext.plugin.meta_schema()
+                              for ext in self.extensions])
+        self._schema = voluptuous.Schema({
+            "name": six.text_type,
+            voluptuous.Required("attributes", default={}): {
+                six.text_type: voluptuous.Any(*tuple(type_schemas))
+            }
+        })
+
+        type_schemas = tuple([ext.plugin.meta_schema(for_update=True)
+                              for ext in self.extensions])
+        self._schema_for_update = voluptuous.Schema({
+            "name": six.text_type,
+            voluptuous.Required("attributes", default={}): {
+                six.text_type: voluptuous.Any(*tuple(type_schemas))
+            }
+        })
+
+    def __call__(self, definition):
+        return self._schema(definition)
+
+    def for_update(self, definition):
+        return self._schema_for_update(definition)
+
+    def attributes_from_dict(self, attributes):
+        return ResourceTypeAttributes(
+            self[attr["type"]].plugin(name=name, **attr)
+            for name, attr in attributes.items())
+
+    def resource_type_from_dict(self, name, attributes, state):
+        return ResourceType(name, self.attributes_from_dict(attributes), state)
+
+
+class ResourceType(object):
+    def __init__(self, name, attributes, state):
+        self.name = name
+        self.attributes = attributes
+        self.state = state
+
+    @property
+    def schema(self):
+        schema = {}
+        for attr in self.attributes:
+            schema.update(attr.schema())
+        return schema
+
+    def __eq__(self, other):
+        return self.name == other.name
+
+    def jsonify(self):
+        return {"name": self.name,
+                "attributes": self.attributes.jsonify(),
+                "state": self.state}
diff --git a/gnocchi/rest/__init__.py b/gnocchi/rest/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/rest/aggregates/__init__.py b/gnocchi/rest/aggregates/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/rest/aggregates/api.py b/gnocchi/rest/aggregates/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..ca6acfb6a2e7f0dc1f837504b616f2f90712bad6
--- /dev/null
+++ b/gnocchi/rest/aggregates/api.py
@@ -0,0 +1,339 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import fnmatch
+import itertools
+
+import pecan
+from pecan import rest
+import pyparsing
+import six
+import voluptuous
+
+from gnocchi import indexer
+from gnocchi.rest.aggregates import exceptions
+from gnocchi.rest.aggregates import operations as agg_operations
+from gnocchi.rest.aggregates import processor
+from gnocchi.rest import api
+from gnocchi import storage
+from gnocchi import utils
+
+
+def _OperationsSubNodeSchema(v):
+    return OperationsSubNodeSchema(v)
+
+
+def MetricSchema(v):
+    """metric keyword schema
+
+    It could be:
+
+    ["metric", "metric-ref", "aggregation"]
+
+    or
+
+    ["metric, ["metric-ref", "aggregation"], ["metric-ref", "aggregation"]]
+    """
+    if not isinstance(v, (list, tuple)):
+        raise voluptuous.Invalid("Expected a tuple/list, got a %s" % type(v))
+    elif not v:
+        raise voluptuous.Invalid("Operation must not be empty")
+    elif len(v) < 2:
+        raise voluptuous.Invalid("Operation need at least one argument")
+    elif v[0] != u"metric":
+        # NOTE(sileht): this error message doesn't looks related to "metric",
+        # but because that the last schema validated by voluptuous, we have
+        # good chance (voluptuous.Any is not predictable) to print this
+        # message even if it's an other operation that invalid.
+        raise voluptuous.Invalid("'%s' operation invalid" % v[0])
+
+    return [u"metric"] + voluptuous.Schema(voluptuous.Any(
+        voluptuous.ExactSequence([six.text_type, six.text_type]),
+        voluptuous.All(
+            voluptuous.Length(min=1),
+            [voluptuous.ExactSequence([six.text_type, six.text_type])],
+        )), required=True)(v[1:])
+
+
+OperationsSchemaBase = [
+    MetricSchema,
+    voluptuous.ExactSequence(
+        [voluptuous.Any(*list(
+            agg_operations.ternary_operators.keys())),
+         _OperationsSubNodeSchema, _OperationsSubNodeSchema,
+         _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [voluptuous.Any(*list(
+            agg_operations.binary_operators.keys())),
+         _OperationsSubNodeSchema, _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [voluptuous.Any(*list(
+            agg_operations.ternary_operators.keys())),
+         _OperationsSubNodeSchema, _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [voluptuous.Any(*list(
+            agg_operations.unary_operators.keys())),
+         _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [voluptuous.Any(*list(
+            agg_operations.unary_operators_with_timestamps.keys())),
+         _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [u"aggregate",
+         voluptuous.Any(*list(agg_operations.AGG_MAP.keys())),
+         _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [u"resample",
+         voluptuous.Any(*list(agg_operations.AGG_MAP.keys())),
+         utils.to_timespan, _OperationsSubNodeSchema]
+    ),
+    voluptuous.ExactSequence(
+        [u"rolling",
+         voluptuous.Any(*list(agg_operations.AGG_MAP.keys())),
+         voluptuous.All(
+             voluptuous.Coerce(int),
+             voluptuous.Range(min=1),
+         ),
+         _OperationsSubNodeSchema]
+    )
+]
+
+
+OperationsSubNodeSchema = voluptuous.Schema(voluptuous.Any(*tuple(
+    OperationsSchemaBase + [voluptuous.Coerce(float)]
+)), required=True)
+
+
+def OperationsSchema(v):
+    if isinstance(v, six.text_type):
+        try:
+            v = pyparsing.OneOrMore(
+                pyparsing.nestedExpr()).parseString(v).asList()[0]
+        except pyparsing.ParseException as e:
+            api.abort(400, {"cause": "Invalid operations",
+                            "reason": "Fail to parse the operations string",
+                            "detail": six.text_type(e)})
+    return voluptuous.Schema(voluptuous.Any(*OperationsSchemaBase),
+                             required=True)(v)
+
+
+class ReferencesList(list):
+    "A very simplified OrderedSet with list interface"
+
+    def append(self, ref):
+        if ref not in self:
+            super(ReferencesList, self).append(ref)
+
+    def extend(self, refs):
+        for ref in refs:
+            self.append(ref)
+
+
+def extract_references(nodes):
+    references = ReferencesList()
+    if nodes[0] == "metric":
+        if isinstance(nodes[1], list):
+            for subnodes in nodes[1:]:
+                references.append(tuple(subnodes))
+        else:
+            references.append(tuple(nodes[1:]))
+    else:
+        for subnodes in nodes[1:]:
+            if isinstance(subnodes, list):
+                references.extend(extract_references(subnodes))
+    return references
+
+
+def get_measures_or_abort(references, operations, start,
+                          stop, granularity, needed_overlap, fill):
+    try:
+        return processor.get_measures(
+            pecan.request.storage,
+            references,
+            operations,
+            start, stop,
+            granularity, needed_overlap, fill)
+    except exceptions.UnAggregableTimeseries as e:
+        api.abort(400, e)
+    # TODO(sileht): We currently got only one metric for these exceptions but
+    # we can improve processor to returns all missing metrics at once, so we
+    # returns a list for the future
+    except storage.MetricDoesNotExist as e:
+        api.abort(404, {"cause": "Unknown metrics",
+                        "detail": [str(e.metric.id)]})
+    except storage.AggregationDoesNotExist as e:
+        api.abort(404, {"cause": "Metrics with unknown aggregation",
+                        "detail": [(str(e.metric.id), e.method)]})
+
+
+def ResourceTypeSchema(resource_type):
+    try:
+        pecan.request.indexer.get_resource_type(resource_type)
+    except indexer.NoSuchResourceType as e:
+        api.abort(400, e)
+    return resource_type
+
+
+class AggregatesController(rest.RestController):
+
+    FetchSchema = voluptuous.Any({
+        "operations": OperationsSchema
+    }, {
+        "operations": OperationsSchema,
+        "resource_type": ResourceTypeSchema,
+        "search": voluptuous.Any(api.ResourceSearchSchema,
+                                 api.QueryStringSearchAttrFilter.parse),
+    })
+
+    @pecan.expose("json")
+    def post(self, start=None, stop=None, granularity=None,
+             needed_overlap=None, fill=None, groupby=None, **kwargs):
+        details = api.get_bool_param('details', kwargs)
+
+        if fill is None and needed_overlap is None:
+            fill = "dropna"
+        start, stop, granularity, needed_overlap, fill = api.validate_qs(
+            start, stop, granularity, needed_overlap, fill)
+
+        body = api.deserialize_and_validate(self.FetchSchema)
+
+        references = extract_references(body["operations"])
+        if not references:
+            api.abort(400, {"cause": "Operations is invalid",
+                            "reason": "At least one 'metric' is required",
+                            "detail": body["operations"]})
+
+        if "resource_type" in body:
+            attr_filter = body["search"]
+            policy_filter = (
+                pecan.request.auth_helper.get_resource_policy_filter(
+                    pecan.request, "search resource", body["resource_type"]))
+            if policy_filter:
+                if attr_filter:
+                    attr_filter = {"and": [
+                        policy_filter,
+                        attr_filter
+                    ]}
+                else:
+                    attr_filter = policy_filter
+
+            groupby = sorted(set(api.arg_to_list(groupby)))
+            sorts = groupby if groupby else api.RESOURCE_DEFAULT_PAGINATION
+            try:
+                resources = pecan.request.indexer.list_resources(
+                    body["resource_type"],
+                    attribute_filter=attr_filter,
+                    sorts=sorts)
+            except indexer.IndexerException as e:
+                api.abort(400, six.text_type(e))
+            if not groupby:
+                try:
+                    return self._get_measures_by_name(
+                        resources, references, body["operations"], start, stop,
+                        granularity, needed_overlap, fill, details=details)
+                except indexer.NoSuchMetric as e:
+                    api.abort(400, e)
+
+            def groupper(r):
+                return tuple((attr, r[attr]) for attr in groupby)
+
+            results = []
+            for key, resources in itertools.groupby(resources, groupper):
+                try:
+                    results.append({
+                        "group": dict(key),
+                        "measures": self._get_measures_by_name(
+                            resources, references, body["operations"],
+                            start, stop, granularity, needed_overlap, fill,
+                            details=details)
+                    })
+                except indexer.NoSuchMetric:
+                    pass
+            if not results:
+                api.abort(
+                    400,
+                    indexer.NoSuchMetric(set((m for (m, a) in references))))
+            return results
+
+        else:
+            try:
+                metric_ids = set(six.text_type(utils.UUID(m))
+                                 for (m, a) in references)
+            except ValueError as e:
+                api.abort(400, {"cause": "Invalid metric references",
+                                "reason": six.text_type(e),
+                                "detail": references})
+
+            metrics = pecan.request.indexer.list_metrics(
+                attribute_filter={"in": {"id": metric_ids}})
+            missing_metric_ids = (set(metric_ids)
+                                  - set(six.text_type(m.id) for m in metrics))
+            if missing_metric_ids:
+                api.abort(404, {"cause": "Unknown metrics",
+                                "reason": "Provided metrics don't exists",
+                                "detail": missing_metric_ids})
+
+            number_of_metrics = len(metrics)
+            if number_of_metrics == 0:
+                return []
+
+            for metric in metrics:
+                api.enforce("get metric", metric)
+
+            metrics_by_ids = dict((six.text_type(m.id), m) for m in metrics)
+            references = [processor.MetricReference(metrics_by_ids[m], a)
+                          for (m, a) in references]
+
+            response = {
+                "measures": get_measures_or_abort(
+                    references, body["operations"],
+                    start, stop, granularity, needed_overlap, fill)
+            }
+            if details:
+                response["references"] = metrics
+
+            return response
+
+    @staticmethod
+    def _get_measures_by_name(resources, metric_wildcards, operations,
+                              start, stop, granularity, needed_overlap, fill,
+                              details):
+
+        references = []
+        for r in resources:
+            references.extend([
+                processor.MetricReference(m, agg, r, wildcard)
+                for wildcard, agg in metric_wildcards
+                for m in r.metrics if fnmatch.fnmatch(m.name, wildcard)
+            ])
+
+        if not references:
+            raise indexer.NoSuchMetric(set((m for (m, a) in metric_wildcards)))
+
+        response = {
+            "measures": get_measures_or_abort(
+                references, operations, start, stop, granularity,
+                needed_overlap, fill)
+        }
+        if details:
+            response["references"] = set((r.resource for r in references))
+        return response
diff --git a/gnocchi/rest/aggregates/exceptions.py b/gnocchi/rest/aggregates/exceptions.py
new file mode 100644
index 0000000000000000000000000000000000000000..00387d7fedeec8752cf3c44fac37a8876f69624f
--- /dev/null
+++ b/gnocchi/rest/aggregates/exceptions.py
@@ -0,0 +1,30 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+
+class UnAggregableTimeseries(Exception):
+    """Error raised when timeseries cannot be aggregated."""
+    def __init__(self, references, reason):
+        self.references = references
+        self.reason = reason
+        super(UnAggregableTimeseries, self).__init__(reason)
+
+    def jsonify(self):
+        return {
+            "cause": "Metrics can't being aggregated",
+            "reason": self.reason,
+            "detail": self.references
+        }
diff --git a/gnocchi/rest/aggregates/operations.py b/gnocchi/rest/aggregates/operations.py
new file mode 100644
index 0000000000000000000000000000000000000000..5a54a720fd17d07027cf48f3b84ef5a22d6138f9
--- /dev/null
+++ b/gnocchi/rest/aggregates/operations.py
@@ -0,0 +1,347 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+import numbers
+
+import numpy
+from numpy.lib.stride_tricks import as_strided
+
+from gnocchi import carbonara
+from gnocchi.rest.aggregates import exceptions
+
+
+AGG_MAP = {
+    'mean': numpy.nanmean,
+    'median': numpy.nanmedian,
+    'std': numpy.nanstd,
+    'min': numpy.nanmin,
+    'max': numpy.nanmax,
+    'sum': numpy.nansum,
+    'var': numpy.nanvar,
+    'count': lambda values, axis: numpy.count_nonzero(
+        ~numpy.isnan(values), axis=axis),
+}
+
+
+def rated_agg(agg):
+    def _inner_rated_agg(values, axis):
+        values = AGG_MAP[agg](values, axis)
+        values = numpy.diff(values)
+        return values
+
+    return _inner_rated_agg
+
+
+for agg in list(AGG_MAP):
+    AGG_MAP["rate:%s" % agg] = rated_agg(agg)
+
+
+# TODO(sileht): expose all operators in capability API
+binary_operators = {
+    u"=": numpy.equal,
+    u"==": numpy.equal,
+    u"eq": numpy.equal,
+
+    u"<": numpy.less,
+    u"lt": numpy.less,
+
+    u">": numpy.greater,
+    u"gt": numpy.greater,
+
+    u"<=": numpy.less_equal,
+    u"≤": numpy.less_equal,
+    u"le": numpy.less_equal,
+
+    u">=": numpy.greater_equal,
+    u"≥": numpy.greater_equal,
+    u"ge": numpy.greater_equal,
+
+    u"!=": numpy.not_equal,
+    u"≠": numpy.not_equal,
+    u"ne": numpy.not_equal,
+
+    u"%": numpy.mod,
+    u"mod": numpy.mod,
+
+    u"+": numpy.add,
+    u"add": numpy.add,
+
+    u"-": numpy.subtract,
+    u"sub": numpy.subtract,
+
+    u"*": numpy.multiply,
+    u"×": numpy.multiply,
+    u"mul": numpy.multiply,
+
+    u"/": numpy.true_divide,
+    u"÷": numpy.true_divide,
+    u"div": numpy.true_divide,
+
+    u"**": numpy.power,
+    u"^": numpy.power,
+    u"pow": numpy.power,
+
+    u"clip_min": lambda array, value: numpy.clip(array, value, None),
+    u"clip_max": lambda array, value: numpy.clip(array, None, value),
+
+}
+
+ternary_operators = {
+    u"clip": numpy.clip,
+}
+
+# TODO(sileht): adds, numpy.around, but it take a decimal argument to handle
+unary_operators = {
+    u"abs": numpy.absolute,
+    u"absolute": numpy.absolute,
+
+    u"neg": numpy.negative,
+    u"negative": numpy.negative,
+
+    u"cos": numpy.cos,
+    u"sin": numpy.sin,
+    u"tan": numpy.tan,
+    u"floor": numpy.floor,
+    u"ceil": numpy.ceil,
+}
+
+
+unary_operators_with_timestamps = {
+    u"rateofchange": lambda t, v: (t[1:], numpy.diff(v.T).T)
+}
+
+
+def handle_unary_operator(nodes, granularity, timestamps, initial_values,
+                          is_aggregated, references):
+    op = nodes[0]
+    granularity, timestamps, values, is_aggregated = evaluate(
+        nodes[1], granularity, timestamps, initial_values,
+        is_aggregated, references)
+
+    if op in unary_operators:
+        values = unary_operators[op](values)
+    else:
+        timestamps, values = unary_operators_with_timestamps[op](
+            timestamps, values)
+    return granularity, timestamps, values, is_aggregated
+
+
+def handle_binary_operator(nodes, granularity, timestamps,
+                           initial_values, is_aggregated, references):
+    op = nodes[0]
+    g1, t1, v1, is_a1 = evaluate(nodes[1], granularity, timestamps,
+                                 initial_values, is_aggregated, references)
+    g2, t2, v2, is_a2 = evaluate(nodes[2], granularity, timestamps,
+                                 initial_values, is_aggregated, references)
+
+    is_aggregated = is_a1 or is_a2
+    # We keep the computed timeseries
+    if isinstance(v1, numpy.ndarray) and isinstance(v2, numpy.ndarray):
+        if not numpy.array_equal(t1, t2) or g1 != g2:
+            raise exceptions.UnAggregableTimeseries(
+                references,
+                "Can't compute timeseries with different "
+                "granularity %s <> %s" % (nodes[1], nodes[2]))
+        timestamps = t1
+        granularity = g1
+        is_aggregated = True
+
+    elif isinstance(v2, numpy.ndarray):
+        timestamps = t2
+        granularity = g2
+    else:
+        timestamps = t1
+        granularity = g1
+
+    values = binary_operators[op](v1, v2)
+    return granularity, timestamps, values, is_aggregated
+
+
+def handle_ternary_operator(nodes, granularity, timestamps,
+                            initial_values, is_aggregated, references):
+    op = nodes[0]
+    g1, t1, v1, is_a1 = evaluate(nodes[1], granularity, timestamps,
+                                 initial_values, is_aggregated, references)
+    g2, t2, v2, is_a2 = evaluate(nodes[2], granularity, timestamps,
+                                 initial_values, is_aggregated, references)
+    if len(nodes) > 3:
+        g3, t3, v3, is_a3 = evaluate(nodes[3], granularity, timestamps,
+                                     initial_values, is_aggregated, references)
+    else:
+        g3, t3, v3, is_a3 = g2, t2, None, is_a2
+
+    is_aggregated = is_a1 or is_a2 or is_a3
+    if isinstance(v1, numpy.ndarray) and isinstance(v2, numpy.ndarray)\
+            and isinstance(v3, numpy.ndarray):
+        if not numpy.array_equal(t1, t2) or g1 != g2:
+            if not numpy.array_equal(t2, t3) or g2 != g3:
+                raise exceptions.OperandsMismatch(
+                    references,
+                    "Can't compute timeseries with different "
+                    "granularity %s <> %s <> %s"
+                    % (nodes[1], nodes[2], nodes[3]))
+        timestamps = t1
+        granularity = g1
+        is_aggregated = True
+
+    elif isinstance(v2, numpy.ndarray):
+        timestamps = t2
+        granularity = g2
+    elif isinstance(v3, numpy.ndarray):
+        timestamps = t3
+        granularity = g3
+    else:
+        timestamps = t1
+        granularity = g1
+
+    values = ternary_operators[op](v1, v2, v3)
+    return granularity, timestamps, values, is_aggregated
+
+
+def handle_aggregate(agg, granularity, timestamps, values, is_aggregated,
+                     references):
+    values = numpy.array([AGG_MAP[agg](values, axis=1)]).T
+    if values.shape[1] != 1:
+        raise RuntimeError("Unexpected resulting aggregated array shape: %s" %
+                           values)
+    if agg.startswith("rate:"):
+        timestamps = timestamps[1:]
+    return (granularity, timestamps, values, True)
+
+
+def handle_rolling(agg, granularity, timestamps, values, is_aggregated,
+                   references, window):
+    if window > len(values):
+        raise exceptions.UnAggregableTimeseries(
+            references,
+            "Rolling window '%d' is greater than serie length '%d'" %
+            (window, len(values))
+        )
+
+    timestamps = timestamps[window - 1:]
+    values = values.T
+    # rigtorp.se/2011/01/01/rolling-statistics-numpy.html
+    shape = values.shape[:-1] + (values.shape[-1] - window + 1, window)
+    strides = values.strides + (values.strides[-1],)
+    new_values = AGG_MAP[agg](as_strided(values, shape=shape, strides=strides),
+                              axis=-1)
+    if agg.startswith("rate:"):
+        timestamps = timestamps[1:]
+    return granularity, timestamps, new_values.T, is_aggregated
+
+
+def handle_resample(agg, granularity, timestamps, values, is_aggregated,
+                    references, sampling):
+    # TODO(sileht): make a more optimised version that
+    # compute the data across the whole matrix
+    new_values = None
+    result_timestamps = timestamps
+    for ts in values.T:
+        ts = carbonara.AggregatedTimeSerie.from_data(
+            carbonara.Aggregation(agg, None, None),
+            timestamps, ts)
+        ts = ts.resample(sampling)
+        result_timestamps = ts["timestamps"]
+        if new_values is None:
+            new_values = numpy.array([ts["values"]])
+        else:
+            new_values = numpy.concatenate((new_values, [ts["values"]]))
+    return sampling, result_timestamps, new_values.T, is_aggregated
+
+
+def handle_aggregation_operator(nodes, granularity, timestamps, initial_values,
+                                is_aggregated, references):
+    op = aggregation_operators[nodes[0]]
+    agg = nodes[1]
+    subnodes = nodes[-1]
+    args = nodes[2:-1]
+    granularity, timestamps, values, is_aggregated = evaluate(
+        subnodes, granularity, timestamps, initial_values,
+        is_aggregated, references)
+    return op(agg, granularity, timestamps, values, is_aggregated,
+              references, *args)
+
+
+aggregation_operators = {
+    u"aggregate": handle_aggregate,
+    u"rolling": handle_rolling,
+    u"resample": handle_resample,
+}
+
+
+def sanity_check(method):
+    # NOTE(sileht): This is important checks, because caller may use zip and
+    # build an incomplete timeseries without we notice the result is
+    # unexpected.
+
+    def inner(*args, **kwargs):
+        granularity, timestamps, values, is_aggregated = method(
+            *args, **kwargs)
+
+        t_len = len(timestamps)
+        if t_len > 2 and not ((timestamps[1] - timestamps[0]) /
+                              granularity).is_integer():
+            # NOTE(sileht): numpy.mod is not possible with timedelta64,
+            # we don't really care about the remainder value, instead we just
+            # check we don't have remainder, by using floor_divide and checking
+            # the result is an integer.
+            raise RuntimeError("timestamps and granularity doesn't match: "
+                               "%s vs %s" % (timestamps[1] - timestamps[0],
+                                             granularity))
+
+        elif isinstance(values, numpy.ndarray) and t_len != len(values):
+            raise RuntimeError("timestamps and values length are different: "
+                               "%s vs %s" % (t_len, len(values)))
+
+        return granularity, timestamps, values, is_aggregated
+    return inner
+
+
+@sanity_check
+def evaluate(nodes, granularity, timestamps, initial_values, is_aggregated,
+             references):
+    if isinstance(nodes, numbers.Number):
+        return granularity, timestamps, nodes, is_aggregated
+    elif nodes[0] in aggregation_operators:
+        return handle_aggregation_operator(nodes, granularity, timestamps,
+                                           initial_values, is_aggregated,
+                                           references)
+    elif nodes[0] in ternary_operators:
+        return handle_ternary_operator(nodes, granularity, timestamps,
+                                       initial_values, is_aggregated,
+                                       references)
+
+    elif nodes[0] in binary_operators:
+        return handle_binary_operator(nodes, granularity, timestamps,
+                                      initial_values, is_aggregated,
+                                      references)
+
+    elif (nodes[0] in unary_operators or
+          nodes[0] in unary_operators_with_timestamps):
+        return handle_unary_operator(nodes, granularity, timestamps,
+                                     initial_values, is_aggregated,
+                                     references)
+    elif nodes[0] == "metric":
+        if isinstance(nodes[1], list):
+            predicat = lambda r: r in nodes[1:]
+        else:
+            predicat = lambda r: r == nodes[1:]
+        indexes = [i for i, r in enumerate(references) if predicat(r)]
+        return (granularity, timestamps, initial_values.T[indexes].T,
+                is_aggregated)
+
+    else:
+        raise RuntimeError("Operation node tree is malformed: %s" % nodes)
diff --git a/gnocchi/rest/aggregates/processor.py b/gnocchi/rest/aggregates/processor.py
new file mode 100644
index 0000000000000000000000000000000000000000..9bf3957e80aae0bbb60710cbbaa32b06546c1f45
--- /dev/null
+++ b/gnocchi/rest/aggregates/processor.py
@@ -0,0 +1,239 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2017 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Timeseries cross-aggregation."""
+import collections
+
+import daiquiri
+import numpy
+import six
+
+from gnocchi import carbonara
+from gnocchi.rest.aggregates import exceptions
+from gnocchi.rest.aggregates import operations as agg_operations
+from gnocchi import storage as gnocchi_storage
+from gnocchi import utils
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class MetricReference(object):
+    def __init__(self, metric, aggregation, resource=None, wildcard=None):
+        self.metric = metric
+        self.aggregation = aggregation
+        self.resource = resource
+        self.timeseries = {}
+
+        if self.resource is None:
+            self.name = str(self.metric.id)
+        else:
+            self.name = self.metric.name
+
+        self.lookup_key = [wildcard or self.name, self.aggregation]
+
+    def __eq__(self, other):
+        return (self.metric == other.metric and
+                self.resource == other.resource and
+                self.aggregation == other.aggregation)
+
+
+def _get_measures_timeserie(storage, ref, granularity, *args, **kwargs):
+    agg = ref.metric.archive_policy.get_aggregation(
+        ref.aggregation, granularity)
+    try:
+        data = storage.get_aggregated_measures(
+            {ref.metric: [agg]},
+            *args, **kwargs)[ref.metric][agg]
+    except gnocchi_storage.MetricDoesNotExist:
+        data = carbonara.AggregatedTimeSerie(
+            carbonara.Aggregation(ref.aggregation, granularity, None))
+    return (ref, data)
+
+
+def get_measures(storage, references, operations,
+                 from_timestamp=None, to_timestamp=None,
+                 granularities=None, needed_overlap=100.0,
+                 fill=None):
+    """Get aggregated measures of multiple entities.
+
+    :param storage: The storage driver.
+    :param metrics_and_aggregations: List of metric+agg_method tuple
+                                     measured to aggregate.
+    :param from timestamp: The timestamp to get the measure from.
+    :param to timestamp: The timestamp to get the measure to.
+    :param granularities: The granularities to retrieve.
+    :param fill: The value to use to fill in missing data in series.
+    """
+
+    if granularities is None:
+        all_granularities = (
+            definition.granularity
+            for ref in references
+            for definition in ref.metric.archive_policy.definition
+        )
+        # granularities_in_common
+        granularities = [
+            g
+            for g, occurrence in six.iteritems(
+                collections.Counter(all_granularities))
+            if occurrence == len(references)
+        ]
+
+        if not granularities:
+            raise exceptions.UnAggregableTimeseries(
+                list((ref.name, ref.aggregation)
+                     for ref in references),
+                'No granularity match')
+
+    references_with_missing_granularity = []
+    for ref in references:
+        if (ref.aggregation not in
+                ref.metric.archive_policy.aggregation_methods):
+            raise gnocchi_storage.AggregationDoesNotExist(
+                ref.metric, ref.aggregation,
+                # Use the first granularity, that should be good enough since
+                # they are all missing anyway
+                ref.metric.archive_policy.definition[0].granularity)
+
+        available_granularities = [
+            d.granularity
+            for d in ref.metric.archive_policy.definition
+        ]
+        for g in granularities:
+            if g not in available_granularities:
+                references_with_missing_granularity.append(
+                    (ref.name, ref.aggregation, g))
+                break
+
+    if references_with_missing_granularity:
+        raise exceptions.UnAggregableTimeseries(
+            references_with_missing_granularity,
+            "Granularities are missing")
+
+    tss = utils.parallel_map(_get_measures_timeserie,
+                             [(storage, ref, g, from_timestamp, to_timestamp)
+                              for ref in references
+                              for g in granularities])
+
+    return aggregated(tss, operations, from_timestamp, to_timestamp,
+                      needed_overlap, fill)
+
+
+def aggregated(refs_and_timeseries, operations, from_timestamp=None,
+               to_timestamp=None, needed_percent_of_overlap=100.0, fill=None):
+
+    series = collections.defaultdict(list)
+    references = collections.defaultdict(list)
+    lookup_keys = collections.defaultdict(list)
+    for (ref, timeserie) in refs_and_timeseries:
+        from_ = (None if from_timestamp is None else
+                 carbonara.round_timestamp(
+                     from_timestamp, timeserie.aggregation.granularity))
+        references[timeserie.aggregation.granularity].append(ref)
+        lookup_keys[timeserie.aggregation.granularity].append(ref.lookup_key)
+        series[timeserie.aggregation.granularity].append(
+            timeserie[from_:to_timestamp])
+
+    result = []
+    is_aggregated = False
+    result = {}
+    for sampling in sorted(series, reverse=True):
+        combine = numpy.concatenate(series[sampling])
+        # np.unique sorts results for us
+        times, indices = numpy.unique(combine['timestamps'],
+                                      return_inverse=True)
+
+        # create nd-array (unique series x unique times) and fill
+        filler = (numpy.NaN if fill in [None, 'null', 'dropna']
+                  else fill)
+        val_grid = numpy.full((len(series[sampling]), len(times)), filler)
+        start = 0
+        for i, split in enumerate(series[sampling]):
+            size = len(split)
+            val_grid[i][indices[start:start + size]] = split['values']
+            start += size
+        values = val_grid.T
+
+        if fill is None:
+            overlap = numpy.flatnonzero(~numpy.any(numpy.isnan(values),
+                                                   axis=1))
+            if overlap.size == 0 and needed_percent_of_overlap > 0:
+                raise exceptions.UnAggregableTimeseries(lookup_keys[sampling],
+                                                        'No overlap')
+            if times.size:
+                # if no boundary set, use first/last timestamp which overlap
+                if to_timestamp is None and overlap.size:
+                    times = times[:overlap[-1] + 1]
+                    values = values[:overlap[-1] + 1]
+                if from_timestamp is None and overlap.size:
+                    times = times[overlap[0]:]
+                    values = values[overlap[0]:]
+                percent_of_overlap = overlap.size * 100.0 / times.size
+                if percent_of_overlap < needed_percent_of_overlap:
+                    raise exceptions.UnAggregableTimeseries(
+                        lookup_keys[sampling],
+                        'Less than %f%% of datapoints overlap in this '
+                        'timespan (%.2f%%)' % (needed_percent_of_overlap,
+                                               percent_of_overlap))
+
+        granularity, times, values, is_aggregated = (
+            agg_operations.evaluate(operations, sampling, times, values,
+                                    False, lookup_keys[sampling]))
+
+        values = values.T
+        result[sampling] = (granularity, times, values, references[sampling])
+
+    if is_aggregated:
+        output = {"aggregated": []}
+        for sampling in sorted(result, reverse=True):
+            granularity, times, values, references = result[sampling]
+            if fill == "dropna":
+                pos = ~numpy.logical_or(numpy.isnan(values[0]),
+                                        numpy.isinf(values[0]))
+                v = values[0][pos]
+                t = times[pos]
+            else:
+                v = values[0]
+                t = times
+            g = [granularity] * len(t)
+            output["aggregated"].extend(six.moves.zip(t, g, v))
+        return output
+    else:
+        r_output = collections.defaultdict(
+            lambda: collections.defaultdict(
+                lambda: collections.defaultdict(list)))
+        m_output = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+        for sampling in sorted(result, reverse=True):
+            granularity, times, values, references = result[sampling]
+            for i, ref in enumerate(references):
+                if fill == "dropna":
+                    pos = ~numpy.logical_or(numpy.isnan(values[i]),
+                                            numpy.isinf(values[i]))
+                    v = values[i][pos]
+                    t = times[pos]
+                else:
+                    v = values[i]
+                    t = times
+                g = [granularity] * len(t)
+                measures = six.moves.zip(t, g, v)
+                if ref.resource is None:
+                    m_output[ref.name][ref.aggregation].extend(measures)
+                else:
+                    r_output[str(ref.resource.id)][
+                        ref.metric.name][ref.aggregation].extend(measures)
+        return r_output if r_output else m_output
diff --git a/gnocchi/rest/api-paste.ini b/gnocchi/rest/api-paste.ini
new file mode 100644
index 0000000000000000000000000000000000000000..aa40553b0ed86385b2f05f61683810ee89509066
--- /dev/null
+++ b/gnocchi/rest/api-paste.ini
@@ -0,0 +1,42 @@
+[composite:gnocchi+basic]
+use = egg:Paste#urlmap
+/ = gnocchiversions_pipeline
+/v1 = gnocchiv1+noauth
+/healthcheck = healthcheck
+
+[composite:gnocchi+keystone]
+use = egg:Paste#urlmap
+/ = gnocchiversions_pipeline
+/v1 = gnocchiv1+keystone
+/healthcheck = healthcheck
+
+[composite:gnocchi+remoteuser]
+use = egg:Paste#urlmap
+/ = gnocchiversions_pipeline
+/v1 = gnocchiv1+noauth
+/healthcheck = healthcheck
+
+[pipeline:gnocchiv1+noauth]
+pipeline = gnocchiv1
+
+[pipeline:gnocchiv1+keystone]
+pipeline = keystone_authtoken gnocchiv1
+
+[pipeline:gnocchiversions_pipeline]
+pipeline = gnocchiversions
+
+[app:gnocchiversions]
+paste.app_factory = gnocchi.rest.app:app_factory
+root = gnocchi.rest.api.VersionsController
+
+[app:gnocchiv1]
+paste.app_factory = gnocchi.rest.app:app_factory
+root = gnocchi.rest.api.V1Controller
+
+[filter:keystone_authtoken]
+use = egg:keystonemiddleware#auth_token
+oslo_config_project = gnocchi
+
+[app:healthcheck]
+use = egg:oslo.middleware#healthcheck
+oslo_config_project = gnocchi
diff --git a/gnocchi/rest/api.py b/gnocchi/rest/api.py
new file mode 100644
index 0000000000000000000000000000000000000000..23d1fc3c6f8352a2c96a2bddfd6c3c93978b9353
--- /dev/null
+++ b/gnocchi/rest/api.py
@@ -0,0 +1,2335 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2018 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+import functools
+import itertools
+import operator
+import uuid
+
+import jsonpatch
+import pecan
+from pecan import rest
+import pyparsing
+import six
+from six.moves.urllib import parse as urllib_parse
+import tenacity
+import tooz
+import voluptuous
+import werkzeug.http
+
+import gnocchi
+from gnocchi import archive_policy
+from gnocchi import calendar
+from gnocchi import chef
+from gnocchi.cli import metricd
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import json
+from gnocchi import resource_type
+from gnocchi.rest.aggregates import exceptions
+from gnocchi.rest.aggregates import processor
+from gnocchi import storage
+from gnocchi import utils
+
+try:
+    from gnocchi.rest.prometheus import remote_pb2
+    import snappy
+    PROMETHEUS_SUPPORTED = True
+except ImportError:
+    PROMETHEUS_SUPPORTED = False
+
+
+ATTRGETTER_GRANULARITY = operator.attrgetter("granularity")
+
+
+def arg_to_list(value):
+    if isinstance(value, list):
+        return value
+    elif value:
+        return [value]
+    return []
+
+
+def abort(status_code, detail=''):
+    """Like pecan.abort, but make sure detail is a string."""
+    if status_code == 404 and not detail:
+        raise RuntimeError("http code 404 must have 'detail' set")
+
+    if isinstance(detail, voluptuous.Invalid):
+        detail = {
+            'cause': 'Invalid input',
+            'reason': six.text_type(detail),
+            'detail': [six.text_type(path) for path in detail.path],
+        }
+    elif isinstance(detail, Exception):
+        detail = detail.jsonify()
+    return pecan.abort(status_code, detail)
+
+
+def flatten_dict_to_keypairs(d, separator=':'):
+    """Generator that produces sequence of keypairs for nested dictionaries.
+
+    :param d: dictionaries which may be nested
+    :param separator: symbol between names
+    """
+    for name, value in sorted(six.iteritems(d)):
+        if isinstance(value, dict):
+            for subname, subvalue in flatten_dict_to_keypairs(value,
+                                                              separator):
+                yield ('%s%s%s' % (name, separator, subname), subvalue)
+        else:
+            yield name, value
+
+
+def enforce(rule, target):
+    """Return the user and project the request should be limited to.
+
+    :param rule: The rule name
+    :param target: The target to enforce on.
+
+    """
+    creds = pecan.request.auth_helper.get_auth_info(pecan.request)
+
+    if not isinstance(target, dict):
+        if hasattr(target, "jsonify"):
+            target = target.jsonify()
+        else:
+            target = target.__dict__
+
+    # Flatten dict
+    target = dict(flatten_dict_to_keypairs(d=target, separator='.'))
+
+    if not pecan.request.policy_enforcer.enforce(rule, target, creds):
+        abort(403)
+
+
+def set_resp_location_hdr(location):
+    location = '%s%s' % (pecan.request.script_name, location)
+    # NOTE(sileht): according the pep-3333 the headers must be
+    # str in py2 and py3 even this is not the same thing in both
+    # version
+    # see: http://legacy.python.org/dev/peps/pep-3333/#unicode-issues
+    if six.PY2 and isinstance(location, six.text_type):
+        location = location.encode('utf-8')
+    location = urllib_parse.quote(location)
+    pecan.response.headers['Location'] = location
+
+
+def set_resp_link_hdr(marker, *args):
+    # NOTE(sileht): This comes from rfc5988.
+    # Setting prev, last is too costly/complicated, so just set next for now.
+    options = {}
+    for arg in args:
+        options.update(arg)
+    if "sorts" in options:
+        options["sort"] = options["sorts"]
+        del options["sorts"]
+    options["marker"] = marker
+    # NOTE(sileht): To always have the same orders
+    options = sorted(options.items())
+    params = urllib_parse.urlencode(options, doseq=True)
+    pecan.response.headers.add("Link", '<%s?%s>; rel="next"' %
+                               (pecan.request.path_url, params))
+
+
+def deserialize(expected_content_types=None):
+    if expected_content_types is None:
+        expected_content_types = ("application/json", )
+
+    mime_type, options = werkzeug.http.parse_options_header(
+        pecan.request.headers.get('Content-Type'))
+    if mime_type not in expected_content_types:
+        abort(415)
+    try:
+        params = json.load(pecan.request.body_file)
+    except Exception as e:
+        abort(400, "Unable to decode body: " + six.text_type(e))
+    return params
+
+
+def validate(schema, data, required=True):
+    try:
+        return voluptuous.Schema(schema, required=required)(data)
+    except voluptuous.Invalid as e:
+        abort(400, e)
+
+
+def deserialize_and_validate(schema, required=True,
+                             expected_content_types=None):
+    return validate(schema,
+                    deserialize(expected_content_types=expected_content_types),
+                    required)
+
+
+def Timespan(value):
+    try:
+        return utils.to_timespan(value)
+    except ValueError as e:
+        raise voluptuous.Invalid(e)
+
+
+def get_bool_param(name, params, default='false'):
+    return strtobool(name, params.get(name, default))
+
+
+def strtobool(varname, v):
+    """Convert a string to a boolean."""
+    try:
+        return utils.strtobool(v)
+    except ValueError as e:
+        abort(400, "Unable to parse `%s': %s" % (varname, six.text_type(e)))
+
+
+RESOURCE_DEFAULT_PAGINATION = [u'revision_start:asc',
+                               u'started_at:asc']
+
+METRIC_DEFAULT_PAGINATION = [u'id:asc']
+
+
+def get_pagination_options(params, default):
+    try:
+        opts = voluptuous.Schema({
+            voluptuous.Required(
+                "limit", default=pecan.request.conf.api.max_limit):
+            voluptuous.All(voluptuous.Coerce(int),
+                           voluptuous.Range(min=1),
+                           voluptuous.Clamp(
+                               min=1, max=pecan.request.conf.api.max_limit)),
+            "marker": six.text_type,
+            voluptuous.Required("sort", default=default):
+            voluptuous.All(
+                voluptuous.Coerce(arg_to_list),
+                [six.text_type]),
+        }, extra=voluptuous.REMOVE_EXTRA)(params)
+    except voluptuous.Invalid as e:
+        abort(400, {"cause": "Argument value error",
+                    "reason": str(e)})
+    opts['sorts'] = opts['sort']
+    del opts['sort']
+    return opts
+
+
+ArchivePolicyDefinitionSchema = voluptuous.Schema(
+    voluptuous.All([{
+        "granularity": Timespan,
+        "points": voluptuous.All(
+            voluptuous.Coerce(int),
+            voluptuous.Range(min=1),
+        ),
+        "timespan": Timespan,
+    }], voluptuous.Length(min=1)),
+)
+
+
+class ArchivePolicyController(rest.RestController):
+    def __init__(self, archive_policy):
+        self.archive_policy = archive_policy
+
+    @pecan.expose('json')
+    def get(self):
+        ap = pecan.request.indexer.get_archive_policy(self.archive_policy)
+        if ap:
+            enforce("get archive policy", ap)
+            return ap
+        abort(404, six.text_type(
+            indexer.NoSuchArchivePolicy(self.archive_policy)))
+
+    @pecan.expose('json')
+    def patch(self):
+        ap = pecan.request.indexer.get_archive_policy(self.archive_policy)
+        if not ap:
+            abort(404, six.text_type(
+                indexer.NoSuchArchivePolicy(self.archive_policy)))
+        enforce("update archive policy", ap)
+
+        body = deserialize_and_validate(voluptuous.Schema({
+            voluptuous.Required("definition"): ArchivePolicyDefinitionSchema,
+        }))
+        # Validate the data
+        try:
+            ap_items = [archive_policy.ArchivePolicyItem(**item) for item in
+                        body['definition']]
+        except ValueError as e:
+            abort(400, six.text_type(e))
+
+        try:
+            return pecan.request.indexer.update_archive_policy(
+                self.archive_policy, ap_items)
+        except indexer.UnsupportedArchivePolicyChange as e:
+            abort(400, six.text_type(e))
+
+    @pecan.expose()
+    def delete(self):
+        # NOTE(jd) I don't think there's any point in fetching and passing the
+        # archive policy here, as the rule is probably checking the actual role
+        # of the user, not the content of the AP.
+        enforce("delete archive policy", {})
+        try:
+            pecan.request.indexer.delete_archive_policy(self.archive_policy)
+        except indexer.NoSuchArchivePolicy as e:
+            abort(404, six.text_type(e))
+        except indexer.ArchivePolicyInUse as e:
+            abort(400, six.text_type(e))
+
+
+class ArchivePoliciesController(rest.RestController):
+    @pecan.expose()
+    def _lookup(self, archive_policy, *remainder):
+        return ArchivePolicyController(archive_policy), remainder
+
+    @pecan.expose('json')
+    def post(self):
+        enforce("create archive policy", {})
+        # NOTE(jd): Initialize this one at run-time because we rely on conf
+        conf = pecan.request.conf
+        valid_agg_methods = list(
+            archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS_VALUES
+        )
+        ArchivePolicySchema = voluptuous.Schema({
+            voluptuous.Required("name"): six.text_type,
+            voluptuous.Required("back_window", default=0): voluptuous.All(
+                voluptuous.Coerce(int),
+                voluptuous.Range(min=0),
+            ),
+            voluptuous.Required(
+                "aggregation_methods",
+                default=list(conf.archive_policy.default_aggregation_methods)):
+            valid_agg_methods,
+            voluptuous.Required("definition"): ArchivePolicyDefinitionSchema,
+        })
+
+        body = deserialize_and_validate(ArchivePolicySchema)
+        # Validate the data
+        try:
+            ap = archive_policy.ArchivePolicy.from_dict(body)
+        except ValueError as e:
+            abort(400, six.text_type(e))
+        enforce("create archive policy", ap)
+        try:
+            ap = pecan.request.indexer.create_archive_policy(ap)
+        except indexer.ArchivePolicyAlreadyExists as e:
+            abort(409, six.text_type(e))
+
+        location = "/archive_policy/" + ap.name
+        set_resp_location_hdr(location)
+        pecan.response.status = 201
+        return ap
+
+    @pecan.expose('json')
+    def get_all(self):
+        enforce("list archive policy", {})
+        return pecan.request.indexer.list_archive_policies()
+
+
+class ArchivePolicyRulesController(rest.RestController):
+    @pecan.expose()
+    def _lookup(self, archive_policy_rule, *remainder):
+        apr = pecan.request.indexer.get_archive_policy_rule(
+            archive_policy_rule
+        )
+        if apr:
+            return ArchivePolicyRuleController(apr), remainder
+        abort(404, six.text_type(
+            indexer.NoSuchArchivePolicyRule(archive_policy_rule)))
+
+    @pecan.expose('json')
+    def post(self):
+        enforce("create archive policy rule", {})
+        ArchivePolicyRuleSchema = voluptuous.Schema({
+            voluptuous.Required("name"): six.text_type,
+            voluptuous.Required("metric_pattern"): six.text_type,
+            voluptuous.Required("archive_policy_name"): six.text_type,
+            })
+
+        body = deserialize_and_validate(ArchivePolicyRuleSchema)
+        enforce("create archive policy rule", body)
+        try:
+            ap = pecan.request.indexer.create_archive_policy_rule(
+                body['name'], body['metric_pattern'],
+                body['archive_policy_name']
+            )
+        except indexer.ArchivePolicyRuleAlreadyExists as e:
+            abort(409, six.text_type(e))
+        except indexer.NoSuchArchivePolicy as e:
+            abort(400, e)
+
+        location = "/archive_policy_rule/" + ap.name
+        set_resp_location_hdr(location)
+        pecan.response.status = 201
+        return ap
+
+    @pecan.expose('json')
+    def get_all(self):
+        enforce("list archive policy rule", {})
+        return pecan.request.indexer.list_archive_policy_rules()
+
+
+class ArchivePolicyRuleController(rest.RestController):
+    def __init__(self, archive_policy_rule):
+        self.archive_policy_rule = archive_policy_rule
+
+    @pecan.expose('json')
+    def get(self):
+        enforce("get archive policy rule", self.archive_policy_rule)
+        return self.archive_policy_rule
+
+    @pecan.expose('json')
+    def patch(self):
+        ArchivePolicyRuleSchema = voluptuous.Schema({
+            voluptuous.Required("name"): six.text_type,
+            })
+        body = deserialize_and_validate(ArchivePolicyRuleSchema)
+        enforce("update archive policy rule", {})
+        try:
+            return pecan.request.indexer.update_archive_policy_rule(
+                self.archive_policy_rule.name, body["name"])
+        except indexer.UnsupportedArchivePolicyRuleChange as e:
+            abort(400, six.text_type(e))
+
+    @pecan.expose()
+    def delete(self):
+        # NOTE(jd) I don't think there's any point in fetching and passing the
+        # archive policy rule here, as the rule is probably checking the actual
+        # role of the user, not the content of the AP rule.
+        enforce("delete archive policy rule", {})
+        try:
+            pecan.request.indexer.delete_archive_policy_rule(
+                self.archive_policy_rule.name
+            )
+        except indexer.NoSuchArchivePolicyRule as e:
+            abort(404, six.text_type(e))
+
+
+def MeasuresListSchema(measures):
+    try:
+        times = utils.to_timestamps([m['timestamp'] for m in measures])
+    except TypeError:
+        raise voluptuous.Invalid("unexpected measures format")
+    except ValueError as e:
+        raise voluptuous.Invalid("unexpected timestamp '%s'" % e)
+
+    try:
+        values = [float(i['value']) for i in measures]
+    except Exception:
+        raise voluptuous.Invalid("unexpected measures value")
+
+    return (incoming.Measure(t, v) for t, v in six.moves.zip(times, values))
+
+
+class MetricController(rest.RestController):
+    _custom_actions = {
+        'measures': ['POST', 'GET']
+    }
+
+    def __init__(self, metric):
+        self.metric = metric
+
+    def enforce_metric(self, rule):
+        enforce(rule, json.to_primitive(self.metric))
+
+    @pecan.expose('json')
+    def get_all(self):
+        self.enforce_metric("get metric")
+        return self.metric
+
+    @pecan.expose('json')
+    def post_measures(self):
+        self.enforce_metric("post measures")
+        measures = deserialize_and_validate(MeasuresListSchema)
+        if measures:
+            pecan.request.incoming.add_measures(self.metric.id, measures)
+        pecan.response.status = 202
+
+    @pecan.expose('json')
+    def get_measures(self, start=None, stop=None, aggregation='mean',
+                     granularity=None, resample=None, refresh=False,
+                     **param):
+        self.enforce_metric("get measures")
+
+        if resample:
+            if not granularity:
+                abort(400, 'A granularity must be specified to resample')
+            try:
+                resample = (resample if calendar.GROUPINGS.get(resample) else
+                            utils.to_timespan(resample))
+            except ValueError as e:
+                abort(400, six.text_type(e))
+
+        if granularity is None:
+            granularity = [d.granularity
+                           for d in self.metric.archive_policy.definition]
+            start, stop, _, _, _ = validate_qs(
+                start=start, stop=stop)
+        else:
+            start, stop, granularity, _, _ = validate_qs(
+                start=start, stop=stop, granularity=granularity)
+
+        if aggregation not in self.metric.archive_policy.aggregation_methods:
+            abort(404, {
+                "cause": "Aggregation method does not exist for this metric",
+                "detail": {
+                    "metric": self.metric.id,
+                    "aggregation_method": aggregation,
+                },
+            })
+
+        aggregations = []
+        for g in sorted(granularity, reverse=True):
+            agg = self.metric.archive_policy.get_aggregation(
+                aggregation, g)
+            if agg is None:
+                abort(404, six.text_type(
+                    storage.AggregationDoesNotExist(
+                        self.metric, aggregation, g)
+                ))
+            aggregations.append(agg)
+
+        if (strtobool("refresh", refresh) and
+                pecan.request.incoming.has_unprocessed(self.metric.id)):
+            try:
+                pecan.request.chef.refresh_metrics(
+                    [self.metric],
+                    pecan.request.conf.api.operation_timeout)
+            except chef.SackAlreadyLocked:
+                abort(503, 'Unable to refresh metric: %s. Metric is locked. '
+                      'Please try again.' % self.metric.id)
+        try:
+            results = pecan.request.storage.get_aggregated_measures(
+                {self.metric: aggregations},
+                start, stop, resample)[self.metric]
+            return [(timestamp, results[key].aggregation.granularity, value)
+                    for key in sorted(results.keys(),
+                                      reverse=True)
+                    for timestamp, value in results[key]]
+        except storage.AggregationDoesNotExist as e:
+            abort(404, six.text_type(e))
+        except storage.MetricDoesNotExist:
+            return []
+
+    @pecan.expose()
+    def delete(self):
+        self.enforce_metric("delete metric")
+        try:
+            pecan.request.indexer.delete_metric(self.metric.id)
+        except indexer.NoSuchMetric as e:
+            abort(404, six.text_type(e))
+
+
+class MetricsController(rest.RestController):
+
+    @pecan.expose()
+    def _lookup(self, id, *remainder):
+        try:
+            metric_id = uuid.UUID(id)
+        except ValueError:
+            abort(404, six.text_type(indexer.NoSuchMetric(id)))
+
+        # Load details for ACL
+        metrics = pecan.request.indexer.list_metrics(
+            attribute_filter={"=": {"id": metric_id}}, details=True)
+        if not metrics:
+            abort(404, six.text_type(indexer.NoSuchMetric(id)))
+        return MetricController(metrics[0]), remainder
+
+    # NOTE(jd) Define this method as it was a voluptuous schema – it's just a
+    # smarter version of a voluptuous schema, no?
+    @staticmethod
+    def MetricSchema(definition):
+        creator = pecan.request.auth_helper.get_current_user(
+            pecan.request)
+
+        # First basic validation
+        schema = voluptuous.Schema({
+            "archive_policy_name": six.text_type,
+            "resource_id": functools.partial(ResourceID, creator=creator),
+            "name": six.text_type,
+            voluptuous.Optional("unit"):
+            voluptuous.All(six.text_type, voluptuous.Length(max=31)),
+        })
+        definition = schema(definition)
+        archive_policy_name = definition.get('archive_policy_name')
+
+        name = definition.get('name')
+        if name and '/' in name:
+            abort(400, "'/' is not supported in metric name")
+        if archive_policy_name is None:
+            try:
+                ap = pecan.request.indexer.get_archive_policy_for_metric(name)
+            except indexer.NoArchivePolicyRuleMatch:
+                # NOTE(jd) Since this is a schema-like function, we
+                # should/could raise ValueError, but if we do so, voluptuous
+                # just returns a "invalid value" with no useful message – so we
+                # prefer to use abort() to make sure the user has the right
+                # error message
+                abort(400, "No archive policy name specified "
+                      "and no archive policy rule found matching "
+                      "the metric name %s" % name)
+            else:
+                definition['archive_policy_name'] = ap.name
+
+        resource_id = definition.get('resource_id')
+        if resource_id is None:
+            original_resource_id = None
+        else:
+            if name is None:
+                abort(400,
+                      {"cause": "Attribute value error",
+                       "detail": "name",
+                       "reason": "Name cannot be null "
+                       "if resource_id is not null"})
+            original_resource_id, resource_id = resource_id
+
+        enforce("create metric", {
+            "creator": creator,
+            "archive_policy_name": archive_policy_name,
+            "resource_id": resource_id,
+            "original_resource_id": original_resource_id,
+            "name": name,
+            "unit": definition.get('unit'),
+        })
+
+        return definition
+
+    @pecan.expose('json')
+    def post(self):
+        creator = pecan.request.auth_helper.get_current_user(
+            pecan.request)
+        body = deserialize_and_validate(self.MetricSchema)
+
+        resource_id = body.get('resource_id')
+        if resource_id is not None:
+            resource_id = resource_id[1]
+
+        try:
+            m = pecan.request.indexer.create_metric(
+                uuid.uuid4(),
+                creator,
+                resource_id=resource_id,
+                name=body.get('name'),
+                unit=body.get('unit'),
+                archive_policy_name=body['archive_policy_name'])
+        except indexer.NoSuchArchivePolicy as e:
+            abort(400, six.text_type(e))
+        except indexer.NamedMetricAlreadyExists as e:
+            abort(400, e)
+        set_resp_location_hdr("/metric/" + str(m.id))
+        pecan.response.status = 201
+        return m
+
+    MetricListSchema = voluptuous.Schema({
+        "user_id": six.text_type,
+        "project_id": six.text_type,
+        "creator": six.text_type,
+        "name": six.text_type,
+        "id": six.text_type,
+        "unit": six.text_type,
+        "archive_policy_name": six.text_type,
+        "status": voluptuous.Any("active", "delete"),
+    }, extra=voluptuous.REMOVE_EXTRA)
+
+    @classmethod
+    @pecan.expose('json')
+    def get_all(cls, **kwargs):
+        filtering = cls.MetricListSchema(kwargs)
+
+        # Compat with old user/project API
+        provided_user_id = filtering.pop('user_id', None)
+        provided_project_id = filtering.pop('project_id', None)
+        if provided_user_id is None and provided_project_id is None:
+            provided_creator = filtering.pop('creator', None)
+        else:
+            provided_creator = (
+                (provided_user_id or "")
+                + ":"
+                + (provided_project_id or "")
+            )
+
+        pagination_opts = get_pagination_options(kwargs,
+                                                 METRIC_DEFAULT_PAGINATION)
+        attr_filters = []
+        if provided_creator is not None:
+            attr_filters.append({"=": {"creator": provided_creator}})
+
+        for k, v in six.iteritems(filtering):
+            attr_filters.append({"=": {k: v}})
+
+        policy_filter = pecan.request.auth_helper.get_metric_policy_filter(
+            pecan.request, "list metric")
+        resource_policy_filter = (
+            pecan.request.auth_helper.get_resource_policy_filter(
+                pecan.request, "list metric", resource_type=None,
+                prefix="resource")
+        )
+
+        try:
+            metrics = pecan.request.indexer.list_metrics(
+                attribute_filter={"and": attr_filters},
+                policy_filter=policy_filter,
+                resource_policy_filter=resource_policy_filter,
+                **pagination_opts)
+            if metrics and len(metrics) >= pagination_opts['limit']:
+                set_resp_link_hdr(str(metrics[-1].id), kwargs, pagination_opts)
+            return metrics
+        except indexer.InvalidPagination as e:
+            abort(400, six.text_type(e))
+
+
+_MetricsSchema = voluptuous.Schema({
+    six.text_type: voluptuous.Any(utils.UUID,
+                                  MetricsController.MetricSchema),
+})
+
+
+def MetricsSchema(data):
+    # NOTE(jd) Before doing any kind of validation, copy the metric name
+    # into the metric definition. This is required so we have the name
+    # available when doing the metric validation with its own MetricSchema,
+    # and so we can do things such as applying archive policy rules.
+    if isinstance(data, dict):
+        for metric_name, metric_def in six.iteritems(data):
+            if isinstance(metric_def, dict):
+                metric_def['name'] = metric_name
+    return _MetricsSchema(data)
+
+
+class NamedMetricController(rest.RestController):
+    def __init__(self, resource_id, resource_type):
+        self.resource_id = resource_id
+        self.resource_type = resource_type
+
+    @pecan.expose()
+    def _lookup(self, name, *remainder):
+        m = pecan.request.indexer.list_metrics(
+            details=True,
+            attribute_filter={"and": [
+                {"=": {"name": name}},
+                {"=": {"resource_id": self.resource_id}},
+            ]})
+        if m:
+            return MetricController(m[0]), remainder
+
+        resource = pecan.request.indexer.get_resource(self.resource_type,
+                                                      self.resource_id)
+        if resource:
+            abort(404, six.text_type(indexer.NoSuchMetric(name)))
+        else:
+            abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
+
+    @pecan.expose('json')
+    def post(self):
+        resource = pecan.request.indexer.get_resource(
+            self.resource_type, self.resource_id)
+        if not resource:
+            abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
+        enforce("update resource", resource)
+        metrics = deserialize_and_validate(MetricsSchema)
+        try:
+            r = pecan.request.indexer.update_resource(
+                self.resource_type,
+                self.resource_id,
+                metrics=metrics,
+                append_metrics=True,
+                create_revision=False)
+        except (indexer.NoSuchMetric,
+                indexer.NoSuchArchivePolicy,
+                ValueError) as e:
+            abort(400, six.text_type(e))
+        except indexer.NamedMetricAlreadyExists as e:
+            abort(409, six.text_type(e))
+        except indexer.NoSuchResource as e:
+            abort(404, six.text_type(e))
+
+        return r.metrics
+
+    @pecan.expose('json')
+    def get_all(self):
+        resource = pecan.request.indexer.get_resource(
+            self.resource_type, self.resource_id)
+        if not resource:
+            abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
+        enforce("get resource", resource)
+        return pecan.request.indexer.list_metrics(
+            attribute_filter={"=": {"resource_id": self.resource_id}})
+
+
+class ResourceHistoryController(rest.RestController):
+    def __init__(self, resource_id, resource_type):
+        self.resource_id = resource_id
+        self.resource_type = resource_type
+
+    @pecan.expose('json')
+    def get(self, **kwargs):
+        details = get_bool_param('details', kwargs)
+        pagination_opts = get_pagination_options(
+            kwargs, RESOURCE_DEFAULT_PAGINATION)
+
+        resource = pecan.request.indexer.get_resource(
+            self.resource_type, self.resource_id)
+        if not resource:
+            abort(404, six.text_type(indexer.NoSuchResource(self.resource_id)))
+
+        enforce("get resource", resource)
+
+        try:
+            resources = pecan.request.indexer.list_resources(
+                self.resource_type,
+                attribute_filter={"=": {"id": self.resource_id}},
+                details=details,
+                history=True,
+                **pagination_opts
+            )
+            if resources and len(resources) >= pagination_opts['limit']:
+                marker = "%s@%s" % (resources[-1].id, resources[-1].revision)
+                set_resp_link_hdr(marker, kwargs, pagination_opts)
+            return resources
+        except indexer.IndexerException as e:
+            abort(400, six.text_type(e))
+
+
+def etag_precondition_check(obj):
+    etag, lastmodified = obj.etag, obj.lastmodified
+    # NOTE(sileht): Checks and order come from rfc7232
+    # in webob, the '*' and the absent of the header is handled by
+    # if_match.__contains__() and if_none_match.__contains__()
+    # and are identique...
+    if etag not in pecan.request.if_match:
+        abort(412)
+    elif (not pecan.request.environ.get("HTTP_IF_MATCH")
+          and pecan.request.if_unmodified_since
+          and pecan.request.if_unmodified_since < lastmodified):
+        abort(412)
+
+    if etag in pecan.request.if_none_match:
+        if pecan.request.method in ['GET', 'HEAD']:
+            abort(304)
+        else:
+            abort(412)
+    elif (not pecan.request.environ.get("HTTP_IF_NONE_MATCH")
+          and pecan.request.if_modified_since
+          and (pecan.request.if_modified_since >=
+               lastmodified)
+          and pecan.request.method in ['GET', 'HEAD']):
+        abort(304)
+
+
+def etag_set_headers(obj):
+    pecan.response.etag = obj.etag
+    pecan.response.last_modified = obj.lastmodified
+
+
+def AttributesPath(value):
+    if value.startswith("/attributes"):
+        return value
+    raise ValueError("Only attributes can be modified")
+
+
+ResourceTypeJsonPatchSchema = voluptuous.Schema([{
+    "op": voluptuous.Any("add", "remove"),
+    "path": AttributesPath,
+    voluptuous.Optional("value"): dict,
+}])
+
+
+class ResourceTypeController(rest.RestController):
+    def __init__(self, name):
+        self._name = name
+
+    @pecan.expose('json')
+    def get(self):
+        try:
+            rt = pecan.request.indexer.get_resource_type(self._name)
+        except indexer.NoSuchResourceType as e:
+            abort(404, six.text_type(e))
+        enforce("get resource type", rt)
+        return rt
+
+    @pecan.expose('json')
+    def patch(self):
+        # NOTE(sileht): should we check for "application/json-patch+json"
+        # Content-Type ?
+
+        try:
+            rt = pecan.request.indexer.get_resource_type(self._name)
+        except indexer.NoSuchResourceType as e:
+            abort(404, six.text_type(e))
+        enforce("update resource type", rt)
+
+        # Ensure this is a valid jsonpatch dict
+        patch = deserialize_and_validate(
+            ResourceTypeJsonPatchSchema,
+            expected_content_types=["application/json-patch+json"])
+
+        # Add new attributes to the resource type
+        rt_json_current = rt.jsonify()
+        try:
+            rt_json_next = jsonpatch.apply_patch(rt_json_current, patch)
+        except jsonpatch.JsonPatchException as e:
+            abort(400, six.text_type(e))
+        del rt_json_next['state']
+
+        # Validate that the whole new resource_type is valid
+        schema = pecan.request.indexer.get_resource_type_schema()
+        try:
+            rt_json_next = voluptuous.Schema(schema.for_update, required=True)(
+                rt_json_next)
+        except voluptuous.Error as e:
+            abort(400, "Invalid input: %s" % e)
+
+        # Get only newly formatted and deleted attributes
+        add_attrs = {k: v for k, v in rt_json_next["attributes"].items()
+                     if k not in rt_json_current["attributes"]}
+        del_attrs = [k for k in rt_json_current["attributes"]
+                     if k not in rt_json_next["attributes"]]
+
+        if not add_attrs and not del_attrs:
+            # NOTE(sileht): just returns the resource, the asked changes
+            # just do nothing
+            return rt
+
+        try:
+            add_attrs = schema.attributes_from_dict(add_attrs)
+        except resource_type.InvalidResourceAttribute as e:
+            abort(400, "Invalid input: %s" % e)
+
+        try:
+            return pecan.request.indexer.update_resource_type(
+                self._name, add_attributes=add_attrs,
+                del_attributes=del_attrs)
+        except indexer.NoSuchResourceType as e:
+                abort(400, six.text_type(e))
+
+    @pecan.expose()
+    def delete(self):
+        try:
+            pecan.request.indexer.get_resource_type(self._name)
+        except indexer.NoSuchResourceType as e:
+            abort(404, six.text_type(e))
+        enforce("delete resource type", resource_type)
+        try:
+            pecan.request.indexer.delete_resource_type(self._name)
+        except (indexer.NoSuchResourceType,
+                indexer.ResourceTypeInUse) as e:
+            abort(400, six.text_type(e))
+
+
+class ResourceTypesController(rest.RestController):
+
+    @pecan.expose()
+    def _lookup(self, name, *remainder):
+        return ResourceTypeController(name), remainder
+
+    @pecan.expose('json')
+    def post(self):
+        schema = pecan.request.indexer.get_resource_type_schema()
+        body = deserialize_and_validate(schema)
+        body["state"] = "creating"
+
+        try:
+            rt = schema.resource_type_from_dict(**body)
+        except resource_type.InvalidResourceAttribute as e:
+            abort(400, "Invalid input: %s" % e)
+
+        enforce("create resource type", body)
+        try:
+            rt = pecan.request.indexer.create_resource_type(rt)
+        except indexer.ResourceTypeAlreadyExists as e:
+            abort(409, six.text_type(e))
+        set_resp_location_hdr("/resource_type/" + rt.name)
+        pecan.response.status = 201
+        return rt
+
+    @pecan.expose('json')
+    def get_all(self, **kwargs):
+        enforce("list resource type", {})
+        try:
+            return pecan.request.indexer.list_resource_types()
+        except indexer.IndexerException as e:
+            abort(400, six.text_type(e))
+
+
+def ResourceSchema(schema):
+    base_schema = {
+        voluptuous.Optional('started_at'): utils.to_datetime,
+        voluptuous.Optional('ended_at'): utils.to_datetime,
+        voluptuous.Optional('user_id'): voluptuous.Any(None, six.text_type),
+        voluptuous.Optional('project_id'): voluptuous.Any(None, six.text_type),
+        voluptuous.Optional('metrics'): MetricsSchema,
+    }
+    base_schema.update(schema)
+    return base_schema
+
+
+class ResourceController(rest.RestController):
+
+    def __init__(self, resource_type, id):
+        self._resource_type = resource_type
+        creator = pecan.request.auth_helper.get_current_user(
+            pecan.request)
+        try:
+            self.id = utils.ResourceUUID(id, creator)
+        except ValueError:
+            abort(404, six.text_type(indexer.NoSuchResource(id)))
+        self.metric = NamedMetricController(str(self.id), self._resource_type)
+        self.history = ResourceHistoryController(str(self.id),
+                                                 self._resource_type)
+
+    @pecan.expose('json')
+    def get(self):
+        resource = pecan.request.indexer.get_resource(
+            self._resource_type, self.id, with_metrics=True)
+        if resource:
+            enforce("get resource", resource)
+            etag_precondition_check(resource)
+            etag_set_headers(resource)
+            return resource
+        abort(404, six.text_type(indexer.NoSuchResource(self.id)))
+
+    @pecan.expose('json')
+    def patch(self):
+        resource = pecan.request.indexer.get_resource(
+            self._resource_type, self.id, with_metrics=True)
+        if not resource:
+            abort(404, six.text_type(indexer.NoSuchResource(self.id)))
+        enforce("update resource", resource)
+        etag_precondition_check(resource)
+
+        body = deserialize_and_validate(
+            schema_for(self._resource_type),
+            required=False)
+
+        if len(body) == 0:
+            etag_set_headers(resource)
+            return resource
+
+        for k, v in six.iteritems(body):
+            if k != 'metrics' and getattr(resource, k) != v:
+                create_revision = True
+                break
+        else:
+            if 'metrics' not in body:
+                # No need to go further, we assume the db resource
+                # doesn't change between the get and update
+                return resource
+            create_revision = False
+
+        try:
+            resource = pecan.request.indexer.update_resource(
+                self._resource_type,
+                self.id,
+                create_revision=create_revision,
+                **body)
+        except (indexer.NoSuchMetric,
+                indexer.NoSuchArchivePolicy,
+                ValueError) as e:
+            abort(400, six.text_type(e))
+        except indexer.NoSuchResource as e:
+            abort(404, six.text_type(e))
+        etag_set_headers(resource)
+        return resource
+
+    @pecan.expose()
+    def delete(self):
+        resource = pecan.request.indexer.get_resource(
+            self._resource_type, self.id)
+        if not resource:
+            abort(404, six.text_type(indexer.NoSuchResource(self.id)))
+        enforce("delete resource", resource)
+        etag_precondition_check(resource)
+        try:
+            pecan.request.indexer.delete_resource(self.id)
+        except indexer.NoSuchResource as e:
+            abort(404, six.text_type(e))
+
+
+def schema_for(resource_type):
+    resource_type = pecan.request.indexer.get_resource_type(resource_type)
+    return ResourceSchema(resource_type.schema)
+
+
+def ResourceUUID(value, creator):
+    try:
+        return utils.ResourceUUID(value, creator)
+    except ValueError as e:
+        raise voluptuous.Invalid(e)
+
+
+def ResourceID(value, creator):
+    """Convert value to a resource ID.
+
+    :return: A tuple (original_resource_id, resource_id)
+    """
+    return (six.text_type(value), ResourceUUID(value, creator))
+
+
+class ResourcesController(rest.RestController):
+    def __init__(self, resource_type):
+        self._resource_type = resource_type
+
+    @pecan.expose()
+    def _lookup(self, id, *remainder):
+        return ResourceController(self._resource_type, id), remainder
+
+    @pecan.expose('json')
+    def post(self):
+        # NOTE(sileht): we need to copy the dict because when change it
+        # and we don't want that next patch call have the "id"
+        schema = dict(schema_for(self._resource_type))
+        creator = pecan.request.auth_helper.get_current_user(
+            pecan.request)
+        schema["id"] = functools.partial(ResourceID, creator=creator)
+
+        body = deserialize_and_validate(schema)
+        body["original_resource_id"], body["id"] = body["id"]
+
+        target = {
+            "resource_type": self._resource_type,
+        }
+        target.update(body)
+        enforce("create resource", target)
+        rid = body['id']
+        del body['id']
+        try:
+            resource = pecan.request.indexer.create_resource(
+                self._resource_type, rid, creator,
+                **body)
+        except (ValueError,
+                indexer.NoSuchMetric,
+                indexer.NoSuchArchivePolicy) as e:
+            abort(400, six.text_type(e))
+        except indexer.ResourceAlreadyExists as e:
+            abort(409, six.text_type(e))
+        set_resp_location_hdr("/resource/"
+                              + self._resource_type + "/"
+                              + six.text_type(resource.id))
+        etag_set_headers(resource)
+        pecan.response.status = 201
+        return resource
+
+    @pecan.expose('json')
+    def get_all(self, **kwargs):
+        details = get_bool_param('details', kwargs)
+        history = get_bool_param('history', kwargs)
+        pagination_opts = get_pagination_options(
+            kwargs, RESOURCE_DEFAULT_PAGINATION)
+        json_attrs = arg_to_list(kwargs.get('attrs', None))
+        policy_filter = pecan.request.auth_helper.get_resource_policy_filter(
+            pecan.request, "list resource", self._resource_type)
+
+        try:
+            # FIXME(sileht): next API version should returns
+            # {'resources': [...], 'links': [ ... pagination rel ...]}
+            resources = pecan.request.indexer.list_resources(
+                self._resource_type,
+                attribute_filter=policy_filter,
+                details=details,
+                history=history,
+                **pagination_opts
+            )
+            if resources and len(resources) >= pagination_opts['limit']:
+                if history:
+                    marker = "%s@%s" % (resources[-1].id,
+                                        resources[-1].revision)
+                else:
+                    marker = str(resources[-1].id)
+                set_resp_link_hdr(marker, kwargs, pagination_opts)
+            return [r.jsonify(json_attrs) for r in resources]
+        except indexer.IndexerException as e:
+            abort(400, six.text_type(e))
+
+    @pecan.expose('json')
+    def delete(self, **kwargs):
+        # NOTE(sileht): Don't allow empty filter, this is going to delete
+        # the entire database.
+        if pecan.request.body:
+            attr_filter = deserialize_and_validate(ResourceSearchSchema)
+        elif kwargs.get("filter"):
+            attr_filter = QueryStringSearchAttrFilter.parse(kwargs["filter"])
+        else:
+            attr_filter = None
+
+        # the voluptuous checks everything, but it is better to
+        # have this here.
+        if not attr_filter:
+            abort(400, "caution: the query can not be empty, or it will \
+                  delete entire database")
+
+        policy_filter = pecan.request.auth_helper.get_resource_policy_filter(
+            pecan.request,
+            "delete resources", self._resource_type)
+
+        if policy_filter:
+            attr_filter = {"and": [policy_filter, attr_filter]}
+
+        try:
+            delete_num = pecan.request.indexer.delete_resources(
+                self._resource_type, attribute_filter=attr_filter)
+        except indexer.IndexerException as e:
+            abort(400, six.text_type(e))
+
+        return {"deleted": delete_num}
+
+
+class ResourcesByTypeController(rest.RestController):
+    @pecan.expose('json')
+    def get_all(self):
+        return dict(
+            (rt.name,
+             pecan.request.application_url + '/resource/' + rt.name)
+            for rt in pecan.request.indexer.list_resource_types())
+
+    @pecan.expose()
+    def _lookup(self, resource_type, *remainder):
+        try:
+            pecan.request.indexer.get_resource_type(resource_type)
+        except indexer.NoSuchResourceType as e:
+            abort(404, six.text_type(e))
+        return ResourcesController(resource_type), remainder
+
+
+class QueryStringSearchAttrFilter(object):
+    uninary_operators = ("not", )
+    binary_operator = (u">=", u"<=", u"!=", u">", u"<", u"=", u"==", u"eq",
+                       u"ne", u"lt", u"gt", u"ge", u"le", u"in", u"like", u"≠",
+                       u"≥", u"≤")
+    multiple_operators = (u"and", u"or", u"∧", u"∨")
+
+    operator = pyparsing.Regex(u"|".join(binary_operator))
+    null = pyparsing.Regex("None|none|null").setParseAction(
+        pyparsing.replaceWith(None))
+    boolean = "False|True|false|true"
+    boolean = pyparsing.Regex(boolean).setParseAction(
+        lambda t: t[0].lower() == "true")
+    hex_string = lambda n: pyparsing.Word(pyparsing.hexnums, exact=n)
+    uuid_string = pyparsing.Combine(
+        hex_string(8) + (pyparsing.Optional("-") + hex_string(4)) * 3 +
+        pyparsing.Optional("-") + hex_string(12))
+    number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?"
+    number = pyparsing.Regex(number).setParseAction(lambda t: float(t[0]))
+    identifier = pyparsing.Word(pyparsing.alphas, pyparsing.alphanums + "_")
+    quoted_string = pyparsing.QuotedString('"') | pyparsing.QuotedString("'")
+    comparison_term = pyparsing.Forward()
+    in_list = pyparsing.Group(
+        pyparsing.Suppress('[') +
+        pyparsing.Optional(pyparsing.delimitedList(comparison_term)) +
+        pyparsing.Suppress(']'))("list")
+    comparison_term << (null | boolean | uuid_string | identifier | number |
+                        quoted_string | in_list)
+    condition = pyparsing.Group(comparison_term + operator + comparison_term)
+
+    expr = pyparsing.infixNotation(condition, [
+        ("not", 1, pyparsing.opAssoc.RIGHT, ),
+        ("and", 2, pyparsing.opAssoc.LEFT, ),
+        ("∧", 2, pyparsing.opAssoc.LEFT, ),
+        ("or", 2, pyparsing.opAssoc.LEFT, ),
+        ("∨", 2, pyparsing.opAssoc.LEFT, ),
+    ])
+
+    @classmethod
+    def _parsed_query2dict(cls, parsed_query):
+        result = None
+        while parsed_query:
+            part = parsed_query.pop()
+            if part in cls.binary_operator:
+                result = {part: {parsed_query.pop(): result}}
+
+            elif part in cls.multiple_operators:
+                if result.get(part):
+                    result[part].append(
+                        cls._parsed_query2dict(parsed_query.pop()))
+                else:
+                    result = {part: [result]}
+
+            elif part in cls.uninary_operators:
+                result = {part: result}
+            elif isinstance(part, pyparsing.ParseResults):
+                kind = part.getName()
+                if kind == "list":
+                    res = part.asList()
+                else:
+                    res = cls._parsed_query2dict(part)
+                if result is None:
+                    result = res
+                elif isinstance(result, dict):
+                    list(result.values())[0].append(res)
+            else:
+                result = part
+        return result
+
+    @classmethod
+    def _parse(cls, query):
+        try:
+            parsed_query = cls.expr.parseString(query, parseAll=True)[0]
+        except pyparsing.ParseException as e:
+            raise abort(400, "Invalid filter: %s" % str(e))
+        return cls._parsed_query2dict(parsed_query)
+
+    @classmethod
+    def parse(cls, query):
+        attr_filter = cls._parse(query)
+        return validate(ResourceSearchSchema, attr_filter, required=True)
+
+
+def ResourceSearchSchema(v):
+    return _ResourceSearchSchema()(v)
+
+
+# NOTE(sileht): indexer will cast this type to the real attribute
+# type, here we just want to be sure this is not a dict or a list
+ResourceSearchSchemaAttributeValue = voluptuous.Any(
+    six.text_type, float, int, bool, None)
+
+
+NotIDKey = voluptuous.All(six.text_type, voluptuous.NotIn(["id"]))
+
+
+def _ResourceSearchSchema():
+    user = pecan.request.auth_helper.get_current_user(
+        pecan.request)
+    _ResourceUUID = functools.partial(ResourceUUID, creator=user)
+
+    return voluptuous.Schema(
+        voluptuous.All(
+            voluptuous.Length(min=0, max=1),
+            {
+                voluptuous.Any(
+                    u"=", u"==", u"eq",
+                    u"<", u"lt",
+                    u">", u"gt",
+                    u"<=", u"≤", u"le",
+                    u">=", u"≥", u"ge",
+                    u"!=", u"≠", u"ne",
+                ): voluptuous.All(
+                    voluptuous.Length(min=1, max=1),
+                    {"id": _ResourceUUID,
+                     NotIDKey: ResourceSearchSchemaAttributeValue},
+                ),
+                u"like": voluptuous.All(
+                    voluptuous.Length(min=1, max=1),
+                    {NotIDKey: ResourceSearchSchemaAttributeValue},
+                ),
+                u"in": voluptuous.All(
+                    voluptuous.Length(min=1, max=1),
+                    {"id": voluptuous.All(
+                        [_ResourceUUID],
+                        voluptuous.Length(min=1)),
+                     NotIDKey: voluptuous.All(
+                         [ResourceSearchSchemaAttributeValue],
+                         voluptuous.Length(min=1))}
+                ),
+                voluptuous.Any(
+                    u"and", u"∨",
+                    u"or", u"∧",
+                ): voluptuous.All(
+                    [ResourceSearchSchema], voluptuous.Length(min=1)
+                ),
+                u"not": ResourceSearchSchema,
+            }
+        )
+    )
+
+
+class SearchResourceTypeController(rest.RestController):
+    def __init__(self, resource_type):
+        self._resource_type = resource_type
+
+    def _search(self, **kwargs):
+        if pecan.request.body:
+            attr_filter = deserialize_and_validate(ResourceSearchSchema)
+        elif kwargs.get("filter"):
+            attr_filter = QueryStringSearchAttrFilter.parse(kwargs["filter"])
+        else:
+            attr_filter = None
+
+        details = get_bool_param('details', kwargs)
+        history = get_bool_param('history', kwargs)
+        pagination_opts = get_pagination_options(
+            kwargs, RESOURCE_DEFAULT_PAGINATION)
+
+        policy_filter = pecan.request.auth_helper.get_resource_policy_filter(
+            pecan.request, "search resource", self._resource_type)
+        if policy_filter:
+            if attr_filter:
+                attr_filter = {"and": [
+                    policy_filter,
+                    attr_filter
+                ]}
+            else:
+                attr_filter = policy_filter
+
+        resources = pecan.request.indexer.list_resources(
+            self._resource_type,
+            attribute_filter=attr_filter,
+            details=details,
+            history=history,
+            **pagination_opts)
+        if resources and len(resources) >= pagination_opts['limit']:
+            if history:
+                marker = "%s@%s" % (resources[-1].id,
+                                    resources[-1].revision)
+            else:
+                marker = str(resources[-1].id)
+            set_resp_link_hdr(marker, kwargs, pagination_opts)
+        return resources
+
+    @pecan.expose('json')
+    def post(self, **kwargs):
+        json_attrs = arg_to_list(kwargs.get('attrs', None))
+        try:
+            return [r.jsonify(json_attrs) for r in self._search(**kwargs)]
+        except indexer.IndexerException as e:
+            abort(400, six.text_type(e))
+
+
+class SearchResourceController(rest.RestController):
+    @pecan.expose()
+    def _lookup(self, resource_type, *remainder):
+        try:
+            pecan.request.indexer.get_resource_type(resource_type)
+        except indexer.NoSuchResourceType as e:
+            abort(404, six.text_type(e))
+        return SearchResourceTypeController(resource_type), remainder
+
+
+def _MetricSearchSchema(v):
+    """Helper method to indirect the recursivity of the search schema"""
+    return SearchMetricController.MetricSearchSchema(v)
+
+
+def _MetricSearchOperationSchema(v):
+    """Helper method to indirect the recursivity of the search schema"""
+    return SearchMetricController.MetricSearchOperationSchema(v)
+
+
+class SearchMetricController(rest.RestController):
+
+    MetricSearchOperationSchema = voluptuous.Schema(
+        voluptuous.All(
+            voluptuous.Length(min=1, max=1),
+            {
+                voluptuous.Any(
+                    u"=", u"==", u"eq",
+                    u"<", u"lt",
+                    u">", u"gt",
+                    u"<=", u"≤", u"le",
+                    u">=", u"≥", u"ge",
+                    u"!=", u"≠", u"ne",
+                    u"%", u"mod",
+                    u"+", u"add",
+                    u"-", u"sub",
+                    u"*", u"×", u"mul",
+                    u"/", u"÷", u"div",
+                    u"**", u"^", u"pow",
+                ): voluptuous.Any(
+                    float, int,
+                    voluptuous.All(
+                        [float, int,
+                         voluptuous.Any(_MetricSearchOperationSchema)],
+                        voluptuous.Length(min=2, max=2),
+                    ),
+                ),
+            },
+        )
+    )
+
+    MetricSearchSchema = voluptuous.Schema(
+        voluptuous.Any(
+            MetricSearchOperationSchema,
+            voluptuous.All(
+                voluptuous.Length(min=1, max=1),
+                {
+                    voluptuous.Any(
+                        u"and", u"∨",
+                        u"or", u"∧",
+                        u"not",
+                    ): [_MetricSearchSchema],
+                }
+            )
+        )
+    )
+
+    class MeasureQuery(object):
+        binary_operators = {
+            u"=": operator.eq,
+            u"==": operator.eq,
+            u"eq": operator.eq,
+
+            u"<": operator.lt,
+            u"lt": operator.lt,
+
+            u">": operator.gt,
+            u"gt": operator.gt,
+
+            u"<=": operator.le,
+            u"≤": operator.le,
+            u"le": operator.le,
+
+            u">=": operator.ge,
+            u"≥": operator.ge,
+            u"ge": operator.ge,
+
+            u"!=": operator.ne,
+            u"≠": operator.ne,
+            u"ne": operator.ne,
+
+            u"%": operator.mod,
+            u"mod": operator.mod,
+
+            u"+": operator.add,
+            u"add": operator.add,
+
+            u"-": operator.sub,
+            u"sub": operator.sub,
+
+            u"*": operator.mul,
+            u"×": operator.mul,
+            u"mul": operator.mul,
+
+            u"/": operator.truediv,
+            u"÷": operator.truediv,
+            u"div": operator.truediv,
+
+            u"**": operator.pow,
+            u"^": operator.pow,
+            u"pow": operator.pow,
+        }
+
+        multiple_operators = {
+            u"or": any,
+            u"∨": any,
+            u"and": all,
+            u"∧": all,
+        }
+
+        def __init__(self, tree):
+            self._eval = self.build_evaluator(tree)
+
+        def __call__(self, value):
+            return self._eval(value)
+
+        def build_evaluator(self, tree):
+            try:
+                operator, nodes = list(tree.items())[0]
+            except Exception:
+                return lambda value: tree
+            try:
+                op = self.multiple_operators[operator]
+            except KeyError:
+                try:
+                    op = self.binary_operators[operator]
+                except KeyError:
+                    raise self.InvalidQuery("Unknown operator %s" % operator)
+                return self._handle_binary_op(op, nodes)
+            return self._handle_multiple_op(op, nodes)
+
+        def _handle_multiple_op(self, op, nodes):
+            elements = [self.build_evaluator(node) for node in nodes]
+            return lambda value: op((e(value) for e in elements))
+
+        def _handle_binary_op(self, op, node):
+            try:
+                iterator = iter(node)
+            except Exception:
+                return lambda value: op(value, node)
+            nodes = list(iterator)
+            if len(nodes) != 2:
+                raise self.InvalidQuery(
+                    "Binary operator %s needs 2 arguments, %d given" %
+                    (op, len(nodes)))
+            node0 = self.build_evaluator(node[0])
+            node1 = self.build_evaluator(node[1])
+            return lambda value: op(node0(value), node1(value))
+
+        class InvalidQuery(Exception):
+            pass
+
+    @pecan.expose('json')
+    def post(self, metric_id, start=None, stop=None, aggregation='mean',
+             granularity=None):
+        metrics = pecan.request.indexer.list_metrics(
+            attribute_filter={"in": {"id": arg_to_list(metric_id)}})
+
+        for metric in metrics:
+            enforce("search metric", metric)
+
+        if not pecan.request.body:
+            abort(400, "No query specified in body")
+
+        query = deserialize_and_validate(self.MetricSearchSchema)
+
+        if start is not None:
+            try:
+                start = utils.to_timestamp(start)
+            except Exception:
+                abort(400, "Invalid value for start")
+
+        if stop is not None:
+            try:
+                stop = utils.to_timestamp(stop)
+            except Exception:
+                abort(400, "Invalid value for stop")
+
+        try:
+            predicate = self.MeasureQuery(query)
+        except self.MeasureQuery.InvalidQuery as e:
+            abort(400, six.text_type(e))
+
+        if granularity is not None:
+            granularity = sorted(
+                map(utils.to_timespan, arg_to_list(granularity)),
+                reverse=True)
+
+        metrics_and_aggregations = collections.defaultdict(list)
+
+        for metric in metrics:
+            if granularity is None:
+                granularity = sorted((
+                    d.granularity
+                    for d in metric.archive_policy.definition),
+                    reverse=True)
+            for gr in granularity:
+                agg = metric.archive_policy.get_aggregation(
+                    aggregation, gr)
+                if agg is None:
+                    abort(400,
+                          storage.AggregationDoesNotExist(
+                              metric, aggregation, gr))
+                metrics_and_aggregations[metric].append(agg)
+
+        try:
+            timeseries = pecan.request.storage.get_aggregated_measures(
+                metrics_and_aggregations, start, stop)
+        except storage.MetricDoesNotExist as e:
+            # This can happen if all the metrics have been created but one
+            # doesn't have any measures yet.
+            abort(400, e)
+
+        return {
+            str(metric.id): [
+                (timestamp, aggregation.granularity, value)
+                for aggregation, ts in six.iteritems(aggregations_and_ts)
+                for timestamp, value in ts
+                if predicate(value)
+            ]
+            for metric, aggregations_and_ts in six.iteritems(timeseries)
+        }
+
+
+class ResourcesMetricsMeasuresBatchController(rest.RestController):
+
+    @staticmethod
+    def BackwardCompatibleMeasuresList(v):
+        v = voluptuous.Schema(
+            voluptuous.Any(MeasuresListSchema,
+                           {voluptuous.Optional("archive_policy_name"):
+                            six.text_type,
+                            voluptuous.Optional("unit"):
+                            six.text_type,
+                            "measures": MeasuresListSchema}),
+            required=True)(v)
+        if isinstance(v, dict):
+            return v
+        else:
+            # Old format
+            return {"measures": v}
+
+    @pecan.expose('json')
+    def post(self, create_metrics=False):
+        creator = pecan.request.auth_helper.get_current_user(
+            pecan.request)
+        MeasuresBatchSchema = voluptuous.Schema(
+            {functools.partial(ResourceID, creator=creator):
+             {six.text_type: self.BackwardCompatibleMeasuresList}})
+        body = deserialize_and_validate(MeasuresBatchSchema)
+
+        known_metrics = []
+        unknown_metrics = []
+        unknown_resources = []
+        body_by_rid = {}
+
+        attribute_filter = {"or": []}
+        for original_resource_id, resource_id in body:
+            names = list(body[(original_resource_id, resource_id)].keys())
+            if names:
+                attribute_filter["or"].append({"and": [
+                    {"=": {"resource_id": resource_id}},
+                    {"in": {"name": names}}]})
+
+        if not attribute_filter["or"]:
+            pecan.response.status = 202
+            return
+
+        all_metrics = collections.defaultdict(list)
+        for metric in pecan.request.indexer.list_metrics(
+                attribute_filter=attribute_filter):
+            all_metrics[metric.resource_id].append(metric)
+
+        for original_resource_id, resource_id in body:
+            r = body[(original_resource_id, resource_id)]
+            body_by_rid[resource_id] = r
+            names = list(r.keys())
+            metrics = all_metrics[resource_id]
+
+            known_names = [m.name for m in metrics]
+            if strtobool("create_metrics", create_metrics):
+                already_exists_names = []
+                for name in names:
+                    if name not in known_names:
+                        metric_data = {"name": name}
+                        for attr in ["archive_policy_name", "unit"]:
+                            if attr in r[name]:
+                                metric_data[attr] = r[name][attr]
+                        metric = MetricsController.MetricSchema(metric_data)
+                        try:
+                            m = pecan.request.indexer.create_metric(
+                                uuid.uuid4(),
+                                creator=creator,
+                                resource_id=resource_id,
+                                name=metric.get('name'),
+                                unit=metric.get('unit'),
+                                archive_policy_name=metric[
+                                    'archive_policy_name'])
+                        except indexer.NamedMetricAlreadyExists as e:
+                            already_exists_names.append(e.metric_name)
+                        except indexer.NoSuchResource:
+                            unknown_resources.append({
+                                'resource_id': six.text_type(resource_id),
+                                'original_resource_id': original_resource_id})
+                            break
+                        except indexer.IndexerException as e:
+                            # This catch NoSuchArchivePolicy, which is unlikely
+                            # be still possible
+                            abort(400, six.text_type(e))
+                        else:
+                            known_metrics.append(m)
+
+                if already_exists_names:
+                    # Add metrics created in the meantime
+                    known_names.extend(already_exists_names)
+                    known_metrics.extend(
+                        pecan.request.indexer.list_metrics(
+                            attribute_filter={"and": [
+                                {"=": {"resource_id": resource_id}},
+                                {"in": {"name": already_exists_names}},
+                            ]}))
+
+            elif len(names) != len(metrics):
+                unknown_metrics.extend(
+                    ["%s/%s" % (six.text_type(resource_id), m)
+                     for m in names if m not in known_names])
+
+            known_metrics.extend(metrics)
+
+        if unknown_resources:
+            abort(400, {"cause": "Unknown resources",
+                        "detail": unknown_resources})
+
+        if unknown_metrics:
+            abort(400, "Unknown metrics: %s" % ", ".join(
+                sorted(unknown_metrics)))
+
+        for metric in known_metrics:
+            enforce("post measures", metric)
+
+        pecan.request.incoming.add_measures_batch(
+            dict((metric.id,
+                 body_by_rid[metric.resource_id][metric.name]["measures"])
+                 for metric in known_metrics))
+
+        pecan.response.status = 202
+
+
+class MetricsMeasuresBatchController(rest.RestController):
+    # NOTE(sileht): we don't allow to mix both formats
+    # to not have to deal with id collision that can
+    # occurs between a metric_id and a resource_id.
+    # Because while json allow duplicate keys in dict payload
+    # only the last key will be retain by json python module to
+    # build the python dict.
+    MeasuresBatchSchema = voluptuous.Schema(
+        {utils.UUID: MeasuresListSchema}
+    )
+
+    @pecan.expose("json")
+    def post(self):
+        body = deserialize_and_validate(self.MeasuresBatchSchema)
+        metrics = pecan.request.indexer.list_metrics(
+            attribute_filter={"in": {"id": list(body.keys())}})
+
+        if len(metrics) != len(body):
+            missing_metrics = sorted(set(body) - set(m.id for m in metrics))
+            abort(400, "Unknown metrics: %s" % ", ".join(
+                six.moves.map(str, missing_metrics)))
+
+        for metric in metrics:
+            enforce("post measures", metric)
+
+        pecan.request.incoming.add_measures_batch(
+            dict((metric.id, body[metric.id]) for metric in
+                 metrics))
+
+        pecan.response.status = 202
+
+
+class SearchController(object):
+    resource = SearchResourceController()
+    metric = SearchMetricController()
+
+
+class AggregationResourceController(rest.RestController):
+    def __init__(self, resource_type, metric_name):
+        self.resource_type = resource_type
+        self.metric_name = metric_name
+
+    @pecan.expose('json')
+    def post(self, start=None, stop=None, aggregation='mean',
+             reaggregation=None, granularity=None, needed_overlap=100.0,
+             groupby=None, fill=None, refresh=False, resample=None,
+             **kwargs):
+        # First, set groupby in the right format: a sorted list of unique
+        # strings.
+        groupby = sorted(set(arg_to_list(groupby)))
+
+        # NOTE(jd) Sort by groupby so we are sure we do not return multiple
+        # groups when using itertools.groupby later.
+        try:
+            resources = SearchResourceTypeController(
+                self.resource_type)._search(sort=groupby,
+                                            filter=kwargs.get("filter"))
+        except indexer.InvalidPagination:
+            abort(400, "Invalid groupby attribute")
+        except indexer.IndexerException as e:
+            abort(400, six.text_type(e))
+
+        if resources is None:
+            return []
+
+        if not groupby:
+            metrics = list(filter(None,
+                                  (r.get_metric(self.metric_name)
+                                   for r in resources)))
+            return AggregationController.get_cross_metric_measures_from_objs(
+                metrics, start, stop, aggregation, reaggregation,
+                granularity, needed_overlap, fill, refresh, resample)
+
+        def groupper(r):
+            return tuple((attr, r[attr]) for attr in groupby)
+
+        results = []
+        for key, resources in itertools.groupby(resources, groupper):
+            metrics = list(filter(None,
+                                  (r.get_metric(self.metric_name)
+                                   for r in resources)))
+            results.append({
+                "group": dict(key),
+                "measures": AggregationController.get_cross_metric_measures_from_objs(  # noqa
+                    metrics, start, stop, aggregation, reaggregation,
+                    granularity, needed_overlap, fill, refresh, resample)
+            })
+
+        return results
+
+FillSchema = voluptuous.Schema(
+    voluptuous.Any(voluptuous.Coerce(float), "null", "dropna",
+                   msg="Must be a float, 'dropna' or 'null'"))
+
+
+def validate_qs(start=None, stop=None, granularity=None,
+                needed_overlap=None, fill=None):
+    if needed_overlap is not None:
+        try:
+            needed_overlap = float(needed_overlap)
+        except ValueError:
+            abort(400, {"cause": "Argument value error",
+                        "detail": "needed_overlap",
+                        "reason": "Must be a number"})
+
+    if start is not None:
+        try:
+            start = utils.to_timestamp(start)
+        except Exception:
+            abort(400, {"cause": "Argument value error",
+                        "detail": "start",
+                        "reason": "Must be a datetime or a timestamp"})
+
+    if stop is not None:
+        try:
+            stop = utils.to_timestamp(stop)
+        except Exception:
+            abort(400, {"cause": "Argument value error",
+                        "detail": "stop",
+                        "reason": "Must be a datetime or a timestamp"})
+
+    if granularity is not None:
+        try:
+            granularity = [utils.to_timespan(granularity)]
+        except ValueError as e:
+            abort(400, {"cause": "Argument value error",
+                        "detail": "granularity",
+                        "reason": six.text_type(e)})
+
+    if fill is not None:
+        try:
+            fill = FillSchema(fill)
+        except voluptuous.Error as e:
+            abort(400, {"cause": "Argument value error",
+                        "detail": "fill",
+                        "reason": str(e)})
+
+    return start, stop, granularity, needed_overlap, fill
+
+
+class AggregationController(rest.RestController):
+    _custom_actions = {
+        'metric': ['POST', 'GET'],
+    }
+
+    @pecan.expose()
+    def _lookup(self, object_type, resource_type, key, metric_name,
+                *remainder):
+        if object_type != "resource" or key != "metric":
+            # NOTE(sileht): we want the raw 404 message here
+            # so use directly pecan
+            pecan.abort(404)
+        try:
+            pecan.request.indexer.get_resource_type(resource_type)
+        except indexer.NoSuchResourceType as e:
+            abort(404, six.text_type(e))
+        return AggregationResourceController(resource_type,
+                                             metric_name), remainder
+
+    @staticmethod
+    def get_cross_metric_measures_from_objs(metrics, start=None, stop=None,
+                                            aggregation='mean',
+                                            reaggregation=None,
+                                            granularity=None,
+                                            needed_overlap=100.0, fill=None,
+                                            refresh=False, resample=None):
+        start, stop, granularity, needed_overlap, fill = validate_qs(
+            start, stop, granularity, needed_overlap, fill)
+
+        if reaggregation is None:
+            reaggregation = aggregation
+
+        for metric in metrics:
+            enforce("get metric", metric)
+
+        number_of_metrics = len(metrics)
+        if number_of_metrics == 0:
+            return []
+
+        if resample:
+            if not granularity:
+                abort(400, 'A granularity must be specified to resample')
+            try:
+                resample = (resample if calendar.GROUPINGS.get(resample) else
+                            utils.to_timespan(resample))
+            except ValueError as e:
+                abort(400, six.text_type(e))
+
+        if granularity is None:
+            granularities = (
+                definition.granularity
+                for m in metrics
+                for definition in m.archive_policy.definition
+            )
+            # granularities_in_common
+            granularity = [
+                g
+                for g, occurrence in six.iteritems(
+                    collections.Counter(granularities))
+                if occurrence == len(metrics)
+            ]
+
+            if not granularity:
+                abort(400, exceptions.UnAggregableTimeseries(
+                    list((metric.id, aggregation)
+                         for metric in metrics),
+                    'No granularity match'))
+
+        aggregations = set()
+        for metric in metrics:
+            for g in granularity:
+                agg = metric.archive_policy.get_aggregation(
+                    aggregation, g)
+                if agg is None:
+                    abort(404, six.text_type(
+                        storage.AggregationDoesNotExist(metric, aggregation, g)
+                    ))
+                aggregations.add(agg)
+        aggregations = sorted(aggregations, key=ATTRGETTER_GRANULARITY,
+                              reverse=True)
+
+        operations = ["aggregate", reaggregation, []]
+        if resample:
+            operations[2].extend(
+                ["resample", aggregation, resample,
+                 ["metric"] + [[str(m.id), aggregation]
+                               for m in metrics]]
+            )
+        else:
+            operations[2].extend(
+                ["metric"] + [[str(m.id), aggregation]
+                              for m in metrics]
+            )
+
+        try:
+            if strtobool("refresh", refresh):
+                metrics_to_update = [
+                    m for m in metrics
+                    if pecan.request.incoming.has_unprocessed(m.id)]
+                for m in metrics_to_update:
+                    try:
+                        pecan.request.chef.refresh_metrics(
+                            [m], pecan.request.conf.api.operation_timeout)
+                    except chef.SackAlreadyLocked:
+                        abort(503, 'Unable to refresh metric: %s. '
+                              'Metric is locked. '
+                              'Please try again.' % m.id)
+            if number_of_metrics == 1:
+                # NOTE(sileht): don't do the aggregation if we only have one
+                # metric
+                metric = metrics[0]
+                if (aggregation
+                   not in metric.archive_policy.aggregation_methods):
+                    abort(404, {
+                        "cause":
+                        "Aggregation method does not exist for this metric",
+                        "detail": {
+                            "metric": str(metric.id),
+                            "aggregation_method": aggregation,
+                        },
+                    })
+                try:
+                    results = pecan.request.storage.get_aggregated_measures(
+                        {metric: aggregations}, start, stop, resample)[metric]
+                    return [(timestamp, results[key].aggregation.granularity,
+                             value)
+                            for key in sorted(results.keys(),
+                                              reverse=True)
+                            for timestamp, value in results[key]]
+                except storage.MetricDoesNotExist:
+                    return []
+            return processor.get_measures(
+                pecan.request.storage,
+                [processor.MetricReference(m, aggregation) for m in metrics],
+                operations, start, stop,
+                granularity, needed_overlap, fill)["aggregated"]
+        except exceptions.UnAggregableTimeseries as e:
+            abort(400, e)
+        except storage.AggregationDoesNotExist as e:
+            abort(404, six.text_type(e))
+
+    MetricIDsSchema = [utils.UUID]
+
+    @pecan.expose('json')
+    def get_metric(self, metric=None, start=None, stop=None,
+                   aggregation='mean', reaggregation=None, granularity=None,
+                   needed_overlap=100.0, fill=None,
+                   refresh=False, resample=None):
+        if pecan.request.method == 'GET':
+            try:
+                metric_ids = voluptuous.Schema(
+                    self.MetricIDsSchema, required=True)(arg_to_list(metric))
+            except voluptuous.Error as e:
+                abort(400, "Invalid input: %s" % e)
+        else:
+            self._workaround_pecan_issue_88()
+            metric_ids = deserialize_and_validate(self.MetricIDsSchema)
+
+        metric_ids = [six.text_type(m) for m in metric_ids]
+        # Check RBAC policy
+        metrics = pecan.request.indexer.list_metrics(
+            attribute_filter={"in": {"id": metric_ids}})
+        missing_metric_ids = (set(metric_ids)
+                              - set(six.text_type(m.id) for m in metrics))
+        if missing_metric_ids:
+            # Return one of the missing one in the error
+            abort(404, six.text_type(storage.MetricDoesNotExist(
+                missing_metric_ids.pop())))
+        return self.get_cross_metric_measures_from_objs(
+            metrics, start, stop, aggregation, reaggregation,
+            granularity, needed_overlap, fill, refresh, resample)
+
+    post_metric = get_metric
+
+    def _workaround_pecan_issue_88(self):
+        # FIXME(sileht): https://github.com/pecan/pecan/pull/88
+        if pecan.request.path_info.startswith("/aggregation/resource"):
+            pecan.abort(405)
+
+
+class CapabilityController(rest.RestController):
+    @staticmethod
+    @pecan.expose('json')
+    def get():
+        return dict(aggregation_methods=set(
+            archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS))
+
+
+class StatusController(rest.RestController):
+    @staticmethod
+    @pecan.expose('json')
+    def get(details=True):
+        enforce("get status", {})
+        try:
+            members_req = pecan.request.coordinator.get_members(
+                metricd.MetricProcessor.GROUP_ID)
+        except tooz.NotImplemented:
+            members_req = None
+        try:
+            report = pecan.request.incoming.measures_report(
+                strtobool("details", details))
+        except incoming.ReportGenerationError:
+            abort(503, 'Unable to generate status. Please retry.')
+        report_dict = {"storage": {"summary": report['summary']}}
+        if 'details' in report:
+            report_dict["storage"]["measures_to_process"] = report['details']
+        report_dict['metricd'] = {}
+        if members_req:
+            members = members_req.get()
+            caps = [
+                pecan.request.coordinator.get_member_capabilities(
+                    metricd.MetricProcessor.GROUP_ID, member)
+                for member in members
+            ]
+            report_dict['metricd']['processors'] = members
+            report_dict['metricd']['statistics'] = {
+                member: cap.get()
+                for member, cap in six.moves.zip(members, caps)
+            }
+        else:
+            report_dict['metricd']['processors'] = None
+            report_dict['metricd']['statistics'] = {}
+        return report_dict
+
+
+class MetricsBatchController(object):
+    measures = MetricsMeasuresBatchController()
+
+
+class ResourcesMetricsBatchController(object):
+    measures = ResourcesMetricsMeasuresBatchController()
+
+
+class ResourcesBatchController(object):
+    metrics = ResourcesMetricsBatchController()
+
+
+class BatchController(object):
+    metrics = MetricsBatchController()
+    resources = ResourcesBatchController()
+
+
+# Retry with exponential backoff for up to 1 minute
+@tenacity.retry(
+    wait=tenacity.wait_exponential(multiplier=0.5, max=60),
+    retry=tenacity.retry_if_exception_type(
+        (indexer.NoSuchResource, indexer.ResourceAlreadyExists,
+         indexer.ResourceTypeAlreadyExists,
+         indexer.NamedMetricAlreadyExists)))
+def get_or_create_resource_and_metrics(
+        creator, rid, original_resource_id, metric_names,
+        resource_attributes,
+        resource_type, resource_type_attributes=None):
+    try:
+        r = pecan.request.indexer.get_resource(resource_type, rid,
+                                               with_metrics=True)
+    except indexer.NoSuchResourceType:
+        if resource_type_attributes:
+            enforce("create resource type", {
+                'name': resource_type,
+                'state': 'creating',
+                'attributes': resource_type_attributes,
+            })
+
+            schema = pecan.request.indexer.get_resource_type_schema()
+            rt = schema.resource_type_from_dict(
+                resource_type, resource_type_attributes, 'creating')
+            pecan.request.indexer.create_resource_type(rt)
+            raise tenacity.TryAgain
+        else:
+            raise
+    except indexer.UnexpectedResourceTypeState as e:
+        # NOTE(sileht): Currently created by another thread
+        if not e.state.endswith("_error"):
+            raise tenacity.TryAgain
+
+    if r:
+        enforce("update resource", r)
+        exists_metric_names = [m.name for m in r.metrics]
+        metrics = MetricsSchema(dict(
+            (m, {}) for m in metric_names
+            if m not in exists_metric_names
+        ))
+        if metrics:
+            return pecan.request.indexer.update_resource(
+                resource_type, rid,
+                metrics=metrics,
+                append_metrics=True,
+                create_revision=False
+            ).metrics
+        else:
+            return r.metrics
+    else:
+        metrics = MetricsSchema(dict((m, {}) for m in metric_names))
+        target = {
+            "id": rid,
+            "resource_type": resource_type,
+            "creator": creator,
+            "original_resource_id": original_resource_id,
+            "metrics": metrics,
+        }
+        target.update(resource_attributes)
+        enforce("create resource", target)
+
+        kwargs = resource_attributes  # no copy used since not used after
+        kwargs['metrics'] = metrics
+        kwargs['original_resource_id'] = original_resource_id
+
+        try:
+            return pecan.request.indexer.create_resource(
+                resource_type, rid, creator, **kwargs
+            ).metrics
+        except indexer.ResourceAlreadyExists as e:
+            # NOTE(sileht): ensure the rid is not registered whitin another
+            # resource type.
+            r = pecan.request.indexer.get_resource('generic', rid)
+            if r.type != resource_type:
+                abort(409, e)
+            raise
+
+
+class PrometheusWriteController(rest.RestController):
+
+    PROMETHEUS_RESOURCE_TYPE = {
+        "instance": {"type": "string",
+                     "min_length": 1,
+                     "max_length": 512,
+                     "required": True},
+        "job": {"type": "string",
+                "min_length": 1,
+                "max_length": 512,
+                "required": True}
+    }
+
+    @pecan.expose()
+    def post(self):
+        buf = snappy.uncompress(pecan.request.body)
+        f = remote_pb2.WriteRequest()
+        f.ParseFromString(buf)
+        measures_by_rid = collections.defaultdict(dict)
+        for ts in f.timeseries:
+            attrs = dict((l.name, l.value) for l in ts.labels)
+            original_rid = (attrs.get("job", "none"),
+                            attrs.get("instance", "none"))
+            name = attrs['__name__']
+            if ts.samples:
+                data = [{'timestamp': s.timestamp_ms / 1000.0,
+                         'value': s.value} for s in ts.samples]
+                measures_by_rid[original_rid][name] = validate(
+                    MeasuresListSchema, data)
+
+        creator = pecan.request.auth_helper.get_current_user(pecan.request)
+
+        measures_to_batch = {}
+        for (job, instance), measures in measures_by_rid.items():
+            original_rid = '%s@%s' % (job, instance)
+            rid = ResourceUUID(original_rid, creator=creator)
+            metric_names = list(measures.keys())
+            timeout = pecan.request.conf.api.operation_timeout
+            metrics = get_or_create_resource_and_metrics.retry_with(
+                stop=tenacity.stop_after_delay(timeout))(
+                    creator, rid, original_rid, metric_names,
+                    dict(job=job, instance=instance),
+                    "prometheus", self.PROMETHEUS_RESOURCE_TYPE)
+
+            for metric in metrics:
+                enforce("post measures", metric)
+
+            measures_to_batch.update(
+                dict((metric.id, measures[metric.name]) for metric in
+                     metrics if metric.name in measures))
+
+        pecan.request.incoming.add_measures_batch(measures_to_batch)
+        pecan.response.status = 202
+
+
+class PrometheusController(object):
+    write = PrometheusWriteController()
+
+
+class V1Controller(object):
+
+    def __init__(self):
+        # FIXME(sileht): split controllers to avoid lazy loading
+        from gnocchi.rest.aggregates import api as agg_api
+        from gnocchi.rest import influxdb
+
+        self.sub_controllers = {
+            "search": SearchController(),
+            "archive_policy": ArchivePoliciesController(),
+            "archive_policy_rule": ArchivePolicyRulesController(),
+            "metric": MetricsController(),
+            "batch": BatchController(),
+            "resource": ResourcesByTypeController(),
+            "resource_type": ResourceTypesController(),
+            "aggregation": AggregationController(),
+            "capabilities": CapabilityController(),
+            "status": StatusController(),
+            "aggregates": agg_api.AggregatesController(),
+            "influxdb": influxdb.InfluxDBController(),
+        }
+        for name, ctrl in self.sub_controllers.items():
+            setattr(self, name, ctrl)
+        if PROMETHEUS_SUPPORTED:
+            setattr(self, "prometheus", PrometheusController())
+
+    @pecan.expose('json')
+    def index(self):
+        return {
+            "version": "1.0",
+            "links": [
+                {"rel": "self",
+                 "href": pecan.request.application_url}
+            ] + [
+                {"rel": name,
+                 "href": pecan.request.application_url + "/" + name}
+                for name in sorted(self.sub_controllers)
+            ]
+        }
+
+
+class VersionsController(object):
+    @staticmethod
+    @pecan.expose('json')
+    def index():
+        return {
+            "build": gnocchi.__version__,
+            "versions": [
+                {
+                    "status": "CURRENT",
+                    "links": [
+                        {
+                            "rel": "self",
+                            "href": pecan.request.application_url + "/v1/"
+                            }
+                        ],
+                    "id": "v1.0",
+                    "updated": "2015-03-19"
+                    }
+                ]
+            }
diff --git a/gnocchi/rest/app.py b/gnocchi/rest/app.py
new file mode 100644
index 0000000000000000000000000000000000000000..983846623f7136b75bb9583198a6c52a71001a99
--- /dev/null
+++ b/gnocchi/rest/app.py
@@ -0,0 +1,203 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2018 Red Hat
+# Copyright © 2014-2016 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import os
+import pkg_resources
+import threading
+import uuid
+
+import daiquiri
+from oslo_middleware import cors
+from oslo_policy import policy
+from paste import deploy
+import pecan
+from pecan import jsonify
+from pecan import templating
+from stevedore import driver
+import webob.exc
+
+from gnocchi import chef
+from gnocchi.cli import metricd
+from gnocchi import exceptions
+from gnocchi import incoming as gnocchi_incoming
+from gnocchi import indexer as gnocchi_indexer
+from gnocchi import json
+from gnocchi.rest import http_proxy_to_wsgi
+from gnocchi import storage as gnocchi_storage
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+# Register our encoder by default for everything
+jsonify.jsonify.register(object)(json.to_primitive)
+
+
+class GnocchiHook(pecan.hooks.PecanHook):
+
+    def __init__(self, conf):
+        self.backends = {}
+        self.conf = conf
+        self.policy_enforcer = policy.Enforcer(conf)
+        self.auth_helper = driver.DriverManager("gnocchi.rest.auth_helper",
+                                                conf.api.auth_mode,
+                                                invoke_on_load=True).driver
+
+    def on_route(self, state):
+        state.request.coordinator = self._lazy_load('coordinator')
+        state.request.storage = self._lazy_load('storage')
+        state.request.indexer = self._lazy_load('indexer')
+        state.request.incoming = self._lazy_load('incoming')
+        state.request.chef = chef.Chef(
+            state.request.coordinator,
+            state.request.incoming,
+            state.request.indexer,
+            state.request.storage,
+        )
+        state.request.conf = self.conf
+        state.request.policy_enforcer = self.policy_enforcer
+        state.request.auth_helper = self.auth_helper
+
+    @staticmethod
+    def after(state):
+        # NOTE(sileht): uwsgi expects the application to consume the wsgi.input
+        # fd. Otherwise the connection with the application freeze. In our
+        # case, if we raise an error before we read request.body_file, or if
+        # json.load(body_file) doesn't read the whole file the freeze can
+        # occurs. This will ensures we always read the full body_file.
+        if state.request.content_length is not None:
+            state.request.body_file.read()
+
+    BACKEND_LOCKS = {
+        'coordinator': threading.Lock(),
+        'storage': threading.Lock(),
+        'incoming': threading.Lock(),
+        'indexer': threading.Lock(),
+    }
+
+    def _lazy_load(self, name):
+        # NOTE(sileht): We don't care about raise error here, if something
+        # fail, this will just raise a 500, until the backend is ready.
+        if name not in self.backends:
+            with self.BACKEND_LOCKS[name]:
+                # Recheck, maybe it have been created in the meantime.
+                if name not in self.backends:
+                    if name == "coordinator":
+                        # NOTE(jd) This coordinator is never stop. I don't
+                        # think it's a real problem since the Web app can never
+                        # really be stopped anyway, except by quitting it
+                        # entirely.
+                        self.backends[name] = (
+                            metricd.get_coordinator_and_start(
+                                str(uuid.uuid4()),
+                                self.conf.coordination_url)
+                        )
+                    elif name == "storage":
+                        self.backends[name] = (
+                            gnocchi_storage.get_driver(self.conf)
+                        )
+                    elif name == "incoming":
+                        self.backends[name] = (
+                            gnocchi_incoming.get_driver(self.conf)
+                        )
+                    elif name == "indexer":
+                        self.backends[name] = (
+                            gnocchi_indexer.get_driver(self.conf)
+                        )
+                    else:
+                        raise RuntimeError("Unknown driver %s" % name)
+
+        return self.backends[name]
+
+
+class NotImplementedMiddleware(object):
+    def __init__(self, app):
+        self.app = app
+
+    def __call__(self, environ, start_response):
+        try:
+            return self.app(environ, start_response)
+        except exceptions.NotImplementedError:
+            raise webob.exc.HTTPNotImplemented(
+                "Sorry, this Gnocchi server does "
+                "not implement this feature 😞")
+
+
+class JsonRenderer(templating.JsonRenderer):
+    def render(self, template_path, namespace):
+        # NOTE(sileht): Unlike the builtin renderer of pecan
+        # we don't want to return "null" for None. Our API
+        # returns only empty, list or dict.
+        if namespace is None:
+            return ""
+        return super(JsonRenderer, self).render(template_path, namespace)
+
+
+# NOTE(sileht): pastedeploy uses ConfigParser to handle
+# global_conf, since python 3 ConfigParser doesn't
+# allow to store object as config value, only strings are
+# permit, so to be able to pass an object created before paste load
+# the app, we store them into a global var. But the each loaded app
+# store it's configuration in unique key to be concurrency safe.
+global APPCONFIGS
+APPCONFIGS = {}
+
+
+def load_app(conf, not_implemented_middleware=True):
+    global APPCONFIGS
+
+    # Build the WSGI app
+    cfg_path = conf.api.paste_config
+    if not os.path.isabs(cfg_path):
+        cfg_path = conf.find_file(cfg_path)
+
+    if cfg_path is None or not os.path.exists(cfg_path):
+        LOG.debug("No api-paste configuration file found! Using default.")
+        cfg_path = os.path.abspath(pkg_resources.resource_filename(
+            __name__, "api-paste.ini"))
+
+    config = dict(conf=conf,
+                  not_implemented_middleware=not_implemented_middleware)
+    configkey = str(uuid.uuid4())
+    APPCONFIGS[configkey] = config
+
+    LOG.info("WSGI config used: %s", cfg_path)
+
+    appname = "gnocchi+" + conf.api.auth_mode
+    app = deploy.loadapp("config:" + cfg_path, name=appname,
+                         global_conf={'configkey': configkey})
+    return http_proxy_to_wsgi.HTTPProxyToWSGI(
+        cors.CORS(app, conf=conf), conf=conf)
+
+
+def _setup_app(root, conf, not_implemented_middleware):
+    app = pecan.make_app(
+        root,
+        hooks=(GnocchiHook(conf),),
+        guess_content_type_from_ext=False,
+        custom_renderers={"json": JsonRenderer}
+    )
+
+    if not_implemented_middleware:
+        app = webob.exc.HTTPExceptionMiddleware(NotImplementedMiddleware(app))
+
+    return app
+
+
+def app_factory(global_config, **local_conf):
+    global APPCONFIGS
+    appconfig = APPCONFIGS.get(global_config.get('configkey'))
+    return _setup_app(root=local_conf.get('root'), **appconfig)
diff --git a/gnocchi/rest/auth_helper.py b/gnocchi/rest/auth_helper.py
new file mode 100644
index 0000000000000000000000000000000000000000..1362f83ae10b0ed025a0ce3a29f832d156630c24
--- /dev/null
+++ b/gnocchi/rest/auth_helper.py
@@ -0,0 +1,170 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import webob
+import werkzeug.http
+
+from gnocchi.rest import api
+
+
+class KeystoneAuthHelper(object):
+    @staticmethod
+    def get_current_user(request):
+        # FIXME(jd) should have domain but should not break existing :(
+        user_id = request.headers.get("X-User-Id", "")
+        project_id = request.headers.get("X-Project-Id", "")
+        return user_id + ":" + project_id
+
+    @staticmethod
+    def get_auth_info(request):
+        user_id = request.headers.get("X-User-Id")
+        project_id = request.headers.get("X-Project-Id")
+        return {
+            "user": (user_id or "") + ":" + (project_id or ""),
+            "user_id": user_id,
+            "project_id": project_id,
+            'domain_id': request.headers.get("X-Domain-Id"),
+            'roles': request.headers.get("X-Roles", "").split(","),
+        }
+
+    @staticmethod
+    def get_resource_policy_filter(request, rule, resource_type, prefix=None):
+        try:
+            # Check if the policy allows the user to list any resource
+            api.enforce(rule, {
+                "resource_type": resource_type,
+            })
+        except webob.exc.HTTPForbidden:
+            policy_filter = []
+            project_id = request.headers.get("X-Project-Id")
+            target = {}
+            if prefix:
+                resource = target[prefix] = {}
+            else:
+                resource = target
+
+            resource["resource_type"] = resource_type
+            resource["project_id"] = project_id
+            try:
+                # Check if the policy allows the user to list resources linked
+                # to their project
+                api.enforce(rule, target)
+            except webob.exc.HTTPForbidden:
+                pass
+            else:
+                policy_filter.append({"=": {"project_id": project_id}})
+
+            del resource["project_id"]
+            resource["created_by_project_id"] = project_id
+            try:
+                # Check if the policy allows the user to list resources linked
+                # to their created_by_project
+                api.enforce(rule, target)
+            except webob.exc.HTTPForbidden:
+                pass
+            else:
+                if project_id:
+                    policy_filter.append(
+                        {"like": {"creator": "%:" + project_id}})
+                else:
+                    policy_filter.append({"=": {"creator": None}})
+
+            if not policy_filter:
+                # We need to have at least one policy filter in place
+                api.abort(403, "Insufficient privileges")
+
+            return {"or": policy_filter}
+
+    @staticmethod
+    def get_metric_policy_filter(request, rule):
+        try:
+            # Check if the policy allows the user to list any metric
+            api.enforce(rule, {})
+        except webob.exc.HTTPForbidden:
+            policy_filter = []
+            project_id = request.headers.get("X-Project-Id")
+            try:
+                # Check if the policy allows the user to list metrics linked
+                # to their created_by_project
+                api.enforce(rule, {
+                    "created_by_project_id": project_id,
+                })
+            except webob.exc.HTTPForbidden:
+                pass
+            else:
+                policy_filter.append(
+                    {"like": {"creator": "%:" + project_id}})
+
+            if not policy_filter:
+                # We need to have at least one policy filter in place
+                api.abort(403, "Insufficient privileges")
+
+            return {"or": policy_filter}
+
+
+class BasicAuthHelper(object):
+    @staticmethod
+    def get_current_user(request):
+        auth = werkzeug.http.parse_authorization_header(
+            request.headers.get("Authorization"))
+        if auth is None:
+            api.abort(401)
+        return auth.username
+
+    def get_auth_info(self, request):
+        user = self.get_current_user(request)
+        roles = []
+        if user == "admin":
+            roles.append("admin")
+        return {
+            "user": user,
+            "roles": roles
+        }
+
+    @staticmethod
+    def get_resource_policy_filter(request, rule, resource_type, prefix=None):
+        return None
+
+    @staticmethod
+    def get_metric_policy_filter(request, rule):
+        return None
+
+
+class RemoteUserAuthHelper(object):
+    @staticmethod
+    def get_current_user(request):
+        user = request.remote_user
+        if user is None:
+            api.abort(401)
+        return user.decode('iso-8859-1')
+
+    def get_auth_info(self, request):
+        user = self.get_current_user(request)
+        roles = []
+        if user == "admin":
+            roles.append("admin")
+        return {
+            "user": user,
+            "roles": roles
+        }
+
+    @staticmethod
+    def get_resource_policy_filter(request, rule, resource_type, prefix=None):
+        return None
+
+    @staticmethod
+    def get_metric_policy_filter(request, rule):
+        return None
diff --git a/gnocchi/rest/http_proxy_to_wsgi.py b/gnocchi/rest/http_proxy_to_wsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..9b86360e243f6151dbb5e14a363d6d57a2630cc8
--- /dev/null
+++ b/gnocchi/rest/http_proxy_to_wsgi.py
@@ -0,0 +1,116 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied. See the License for the specific language governing permissions and
+# limitations under the License.
+from oslo_config import cfg
+
+import webob.dec
+import webob.request
+import webob.response
+
+
+OPTS = (
+    cfg.BoolOpt('enable_proxy_headers_parsing',
+                deprecated_group="oslo_middleware",
+                default=False,
+                help="Whether the application is behind a proxy or not. "
+                     "This determines if the middleware should parse the "
+                     "headers or not."),
+)
+
+
+class NoContentTypeResponse(webob.response.Response):
+
+    default_content_type = None  # prevents webob assigning content type
+
+
+class NoContentTypeRequest(webob.request.Request):
+
+    ResponseClass = NoContentTypeResponse
+
+
+class HTTPProxyToWSGI(object):
+    """HTTP proxy to WSGI termination middleware.
+
+    This middleware overloads WSGI environment variables with the one provided
+    by the remote HTTP reverse proxy.
+
+    """
+
+    def __init__(self, application, conf=None):
+        """Base middleware constructor
+
+        :param  conf: a cfg.ConfigOpts object
+        """
+        self.application = application
+        self.oslo_conf = conf
+
+    @webob.dec.wsgify(RequestClass=NoContentTypeRequest)
+    def __call__(self, req):
+        self.process_request(req)
+        return req.get_response(self.application)
+
+    @staticmethod
+    def _parse_rfc7239_header(header):
+        """Parses RFC7239 Forward headers.
+
+        e.g. for=192.0.2.60;proto=http, for=192.0.2.60;by=203.0.113.43
+
+        """
+        result = []
+        for proxy in header.split(","):
+            entry = {}
+            for d in proxy.split(";"):
+                key, _, value = d.partition("=")
+                entry[key.lower().strip()] = value.strip()
+            result.append(entry)
+        return result
+
+    def process_request(self, req):
+        if not self.oslo_conf.api.enable_proxy_headers_parsing:
+            return
+        fwd_hdr = req.environ.get("HTTP_FORWARDED")
+        if fwd_hdr:
+            proxies = self._parse_rfc7239_header(fwd_hdr)
+            # Let's use the value from the first proxy
+            if proxies:
+                proxy = proxies[0]
+
+                forwarded_proto = proxy.get("proto")
+                if forwarded_proto:
+                    req.environ['wsgi.url_scheme'] = forwarded_proto
+
+                forwarded_host = proxy.get("host")
+                if forwarded_host:
+                    req.environ['HTTP_HOST'] = forwarded_host
+
+                forwarded_for = proxy.get("for")
+                if forwarded_for:
+                    req.environ['REMOTE_ADDR'] = forwarded_for
+
+        else:
+            # World before RFC7239
+            forwarded_proto = req.environ.get("HTTP_X_FORWARDED_PROTO")
+            if forwarded_proto:
+                req.environ['wsgi.url_scheme'] = forwarded_proto
+
+            forwarded_host = req.environ.get("HTTP_X_FORWARDED_HOST")
+            if forwarded_host:
+                req.environ['HTTP_HOST'] = forwarded_host
+
+            forwarded_for = req.environ.get("HTTP_X_FORWARDED_FOR")
+            if forwarded_for:
+                req.environ['REMOTE_ADDR'] = forwarded_for
+
+        v = req.environ.get("HTTP_X_FORWARDED_PREFIX")
+        if v:
+            req.environ['SCRIPT_NAME'] = v + req.environ['SCRIPT_NAME']
diff --git a/gnocchi/rest/influxdb.py b/gnocchi/rest/influxdb.py
new file mode 100644
index 0000000000000000000000000000000000000000..58524f54dae5f71414404b06543af059b1cda0a8
--- /dev/null
+++ b/gnocchi/rest/influxdb.py
@@ -0,0 +1,261 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+import time
+
+import gnocchi
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi.rest import api
+from gnocchi import utils
+
+import daiquiri
+import numpy
+import pecan
+from pecan import rest
+import pyparsing
+import six
+import tenacity
+try:
+    import uwsgi
+except ImportError:
+    uwsgi = None
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+boolean = "False|True|false|true|FALSE|TRUE|F|T|f|t"
+boolean = pyparsing.Regex(boolean).setParseAction(
+    lambda t: t[0].lower()[0] == "t")
+
+quoted_string = pyparsing.QuotedString('"', escChar="\\")
+unquoted_string = pyparsing.OneOrMore(
+    pyparsing.CharsNotIn(" ,=\\") +
+    pyparsing.Optional(
+        pyparsing.OneOrMore(
+            (pyparsing.Literal("\\ ") |
+             pyparsing.Literal("\\,") |
+             pyparsing.Literal("\\=") |
+             pyparsing.Literal("\\")).setParseAction(
+                 lambda s, loc, tok: tok[0][-1])))).setParseAction(
+                     lambda s, loc, tok: "".join(list(tok)))
+measurement = tag_key = tag_value = field_key = quoted_string | unquoted_string
+number = r"[+-]?\d+(:?\.\d*)?(:?[eE][+-]?\d+)?"
+number = pyparsing.Regex(number).setParseAction(
+    lambda s, loc, tok: float(tok[0]))
+integer = (
+    pyparsing.Word(pyparsing.nums).setParseAction(
+        lambda s, loc, tok: int(tok[0])) +
+    pyparsing.Suppress("i")
+    )
+field_value = integer | number | quoted_string
+timestamp = pyparsing.Word(pyparsing.nums).setParseAction(
+    lambda s, loc, tok: numpy.datetime64(int(tok[0]), 'ns'))
+
+line_protocol = (
+    measurement +
+    # Tags
+    pyparsing.Optional(pyparsing.Suppress(",") +
+                       pyparsing.delimitedList(
+                           pyparsing.OneOrMore(
+                               pyparsing.Group(
+                                   tag_key +
+                                   pyparsing.Suppress("=") +
+                                   tag_value), ",")).setParseAction(
+                                       lambda s, loc, tok: dict(list(tok))),
+                       default={}) +
+    pyparsing.Suppress(" ") +
+    # Fields
+    pyparsing.delimitedList(
+        pyparsing.OneOrMore(
+            pyparsing.Group(field_key +
+                            pyparsing.Suppress("=") +
+                            field_value), ",")).setParseAction(
+                                lambda s, loc, tok: dict(list(tok))) +
+    # Timestamp
+    pyparsing.Optional(pyparsing.Suppress(" ") + timestamp, default=None)
+).leaveWhitespace()
+
+
+query_parser = (
+    pyparsing.Suppress(pyparsing.CaselessLiteral("create")) +
+    pyparsing.Suppress(pyparsing.CaselessLiteral("database")) +
+    pyparsing.Suppress(pyparsing.White()) +
+    (pyparsing.QuotedString('"', escChar="\\") |
+     pyparsing.Word(pyparsing.alphas + "_",
+                    pyparsing.alphanums + "_")) +
+    pyparsing.Suppress(
+        pyparsing.Optional(pyparsing.Optional(pyparsing.White()) +
+                           pyparsing.Optional(pyparsing.Literal(";"))))
+)
+
+
+class InfluxDBController(rest.RestController):
+    _custom_actions = {
+        'ping': ['HEAD', 'GET'],
+        'query': ['POST'],
+        'write': ['POST'],
+    }
+
+    DEFAULT_TAG_RESOURCE_ID = "host"
+
+    @pecan.expose()
+    def ping(self):
+        pecan.response.headers['X-Influxdb-Version'] = (
+            "Gnocchi " + gnocchi.__version__
+        )
+
+    @pecan.expose('json')
+    def post_query(self, q=None):
+        if q is not None:
+            try:
+                query = query_parser.parseString(q)
+            except pyparsing.ParseException:
+                api.abort(501, {"cause": "Not implemented error",
+                                "detail": "q",
+                                "reason": "Query not implemented"})
+            resource_type = query[0]
+            api.enforce("create resource type", {"name": resource_type})
+            schema = pecan.request.indexer.get_resource_type_schema()
+            rt = schema.resource_type_from_dict(resource_type, {}, 'creating')
+            try:
+                pecan.request.indexer.create_resource_type(rt)
+            except indexer.ResourceTypeAlreadyExists:
+                pass
+            pecan.response.status = 204
+
+    @staticmethod
+    def _write_get_lines():
+        encoding = pecan.request.headers.get('Transfer-Encoding', "").lower()
+        if encoding == "chunked":
+            # TODO(sileht): Support reading chunk without uwsgi when
+            # pecan.request.environ['wsgi.input_terminated'] is set.
+            # https://github.com/unbit/uwsgi/issues/1428
+            if uwsgi is None:
+                api.abort(
+                    501, {"cause": "Not implemented error",
+                          "reason": "This server is not running with uwsgi"})
+            return encoding, uwsgi.chunked_read()
+        return None, pecan.request.body
+
+    @pecan.expose('json')
+    def post_write(self, db="influxdb"):
+
+        creator = pecan.request.auth_helper.get_current_user(pecan.request)
+        tag_to_rid = pecan.request.headers.get(
+            "X-Gnocchi-InfluxDB-Tag-Resource-ID",
+            self.DEFAULT_TAG_RESOURCE_ID)
+
+        while True:
+            encoding, chunk = self._write_get_lines()
+
+            # If chunk is empty then this is over.
+            if not chunk:
+                break
+
+            # Compute now on a per-chunk basis
+            now = numpy.datetime64(int(time.time() * 10e8), 'ns')
+
+            # resources = { resource_id: {
+            #     metric_name: [ incoming.Measure(t, v), …], …
+            #   }, …
+            # }
+            resources = collections.defaultdict(
+                lambda: collections.defaultdict(list))
+            for line_number, line in enumerate(chunk.split(b"\n")):
+                # Ignore empty lines
+                if not line:
+                    continue
+
+                try:
+                    measurement, tags, fields, timestamp = (
+                        line_protocol.parseString(line.decode())
+                    )
+                except (UnicodeDecodeError, SyntaxError,
+                        pyparsing.ParseException):
+                    api.abort(400, {
+                        "cause": "Value error",
+                        "detail": "line",
+                        "reason": "Unable to parse line %d" % (
+                            line_number + 1),
+                    })
+
+                if timestamp is None:
+                    timestamp = now
+
+                try:
+                    resource_id = tags.pop(tag_to_rid)
+                except KeyError:
+                    api.abort(400, {
+                        "cause": "Value error",
+                        "detail": "key",
+                        "reason": "Unable to find key `%s' in tags" % (
+                            tag_to_rid),
+                    })
+
+                tags_str = (("@" if tags else "") +
+                            ",".join(("%s=%s" % (k, tags[k]))
+                                     for k in sorted(tags)))
+
+                for field_name, field_value in six.iteritems(fields):
+                    if isinstance(field_value, str):
+                        # We do not support field value that are not numerical
+                        continue
+
+                    # Metric name is the:
+                    # <measurement>.<field_key>@<tag_key>=<tag_value>,…
+                    # with tag ordered
+                    # Replace "/" with "_" because Gnocchi does not support /
+                    # in metric names
+                    metric_name = (
+                        measurement + "." + field_name + tags_str
+                    ).replace("/", "_")
+
+                    resources[resource_id][metric_name].append(
+                        incoming.Measure(timestamp, field_value))
+
+            measures_to_batch = {}
+            for resource_name, metrics_and_measures in six.iteritems(
+                    resources):
+                resource_name = resource_name
+                resource_id = utils.ResourceUUID(
+                    resource_name, creator=creator)
+                LOG.debug("Getting metrics from resource `%s'", resource_name)
+                timeout = pecan.request.conf.api.operation_timeout
+                metrics = (
+                    api.get_or_create_resource_and_metrics.retry_with(
+                        stop=tenacity.stop_after_delay(timeout))(
+                            creator, resource_id, resource_name,
+                            metrics_and_measures.keys(),
+                            {}, db)
+                )
+
+                for metric in metrics:
+                    api.enforce("post measures", metric)
+
+                measures_to_batch.update(
+                    dict((metric.id, metrics_and_measures[metric.name])
+                         for metric in metrics
+                         if metric.name in metrics_and_measures))
+
+            LOG.debug("Add measures batch for %d metrics",
+                      len(measures_to_batch))
+            pecan.request.incoming.add_measures_batch(measures_to_batch)
+            pecan.response.status = 204
+
+            if encoding != "chunked":
+                return
diff --git a/gnocchi/rest/policy.json b/gnocchi/rest/policy.json
new file mode 100644
index 0000000000000000000000000000000000000000..b747354ddabac138588f2d72876f0f3f3cd6b81b
--- /dev/null
+++ b/gnocchi/rest/policy.json
@@ -0,0 +1,42 @@
+{
+    "admin_or_creator": "role:admin or user:%(creator)s or project_id:%(created_by_project_id)s",
+    "resource_owner": "project_id:%(project_id)s",
+    "metric_owner": "project_id:%(resource.project_id)s",
+
+    "get status": "role:admin",
+
+    "create resource": "",
+    "get resource": "rule:admin_or_creator or rule:resource_owner",
+    "update resource": "rule:admin_or_creator",
+    "delete resource": "rule:admin_or_creator",
+    "delete resources": "rule:admin_or_creator",
+    "list resource": "rule:admin_or_creator or rule:resource_owner",
+    "search resource": "rule:admin_or_creator or rule:resource_owner",
+
+    "create resource type": "role:admin",
+    "delete resource type": "role:admin",
+    "update resource type": "role:admin",
+    "list resource type": "",
+    "get resource type": "",
+
+    "get archive policy": "",
+    "list archive policy": "",
+    "create archive policy": "role:admin",
+    "update archive policy": "role:admin",
+    "delete archive policy": "role:admin",
+
+    "create archive policy rule": "role:admin",
+    "get archive policy rule": "",
+    "list archive policy rule": "",
+    "update archive policy rule": "role:admin",
+    "delete archive policy rule": "role:admin",
+
+    "create metric": "",
+    "delete metric": "rule:admin_or_creator",
+    "get metric": "rule:admin_or_creator or rule:metric_owner",
+    "search metric": "rule:admin_or_creator or rule:metric_owner",
+    "list metric": "rule:admin_or_creator or rule:metric_owner",
+
+    "get measures":  "rule:admin_or_creator or rule:metric_owner",
+    "post measures":  "rule:admin_or_creator"
+}
diff --git a/gnocchi/rest/prometheus/README b/gnocchi/rest/prometheus/README
new file mode 100644
index 0000000000000000000000000000000000000000..0c79cb4bd68b6f4ee63cd0d58b38f087841df82f
--- /dev/null
+++ b/gnocchi/rest/prometheus/README
@@ -0,0 +1,6 @@
+# remote.proto come from
+
+https://raw.githubusercontent.com/prometheus/prometheus/master/storage/remote/remote.proto
+
+# remote_pb2.py is generated with:
+protoc --proto_path=. --python_out=. remote.proto
diff --git a/gnocchi/rest/prometheus/__init__.py b/gnocchi/rest/prometheus/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/rest/prometheus/remote.proto b/gnocchi/rest/prometheus/remote.proto
new file mode 100644
index 0000000000000000000000000000000000000000..6f09c9efcb9528ea0d238f9d0a529c7e6a67d2d2
--- /dev/null
+++ b/gnocchi/rest/prometheus/remote.proto
@@ -0,0 +1,68 @@
+// Copyright 2016 Prometheus Team
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+// http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+syntax = "proto3";
+
+package remote;
+
+message Sample {
+  double value       = 1;
+  int64 timestamp_ms = 2;
+}
+
+message LabelPair {
+  string name  = 1;
+  string value = 2;
+}
+
+message TimeSeries {
+  repeated LabelPair labels = 1;
+  // Sorted by time, oldest sample first.
+  repeated Sample samples   = 2;
+}
+
+message WriteRequest {
+  repeated TimeSeries timeseries = 1;
+}
+
+message ReadRequest {
+  repeated Query queries = 1;
+}
+
+message ReadResponse {
+  // In same order as the request's queries.
+  repeated QueryResult results = 1;
+}
+
+message Query {
+  int64 start_timestamp_ms = 1;
+  int64 end_timestamp_ms = 2;
+  repeated LabelMatcher matchers = 3;
+}
+
+enum MatchType {
+  EQUAL = 0;
+  NOT_EQUAL = 1;
+  REGEX_MATCH = 2;
+  REGEX_NO_MATCH = 3;
+}
+
+message LabelMatcher {
+  MatchType type = 1;
+  string name = 2;
+  string value = 3;
+}
+
+message QueryResult {
+  repeated TimeSeries timeseries = 1;
+}
diff --git a/gnocchi/rest/prometheus/remote_pb2.py b/gnocchi/rest/prometheus/remote_pb2.py
new file mode 100644
index 0000000000000000000000000000000000000000..7eeae9470eb388263da9715d16e95bded74b0490
--- /dev/null
+++ b/gnocchi/rest/prometheus/remote_pb2.py
@@ -0,0 +1,475 @@
+# Generated by the protocol buffer compiler.  DO NOT EDIT!
+# source: remote.proto
+
+import sys
+_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
+from google.protobuf.internal import enum_type_wrapper
+from google.protobuf import descriptor as _descriptor
+from google.protobuf import message as _message
+from google.protobuf import reflection as _reflection
+from google.protobuf import symbol_database as _symbol_database
+from google.protobuf import descriptor_pb2
+# @@protoc_insertion_point(imports)
+
+_sym_db = _symbol_database.Default()
+
+
+
+
+DESCRIPTOR = _descriptor.FileDescriptor(
+  name='remote.proto',
+  package='remote',
+  syntax='proto3',
+  serialized_pb=_b('\n\x0cremote.proto\x12\x06remote\"-\n\x06Sample\x12\r\n\x05value\x18\x01 \x01(\x01\x12\x14\n\x0ctimestamp_ms\x18\x02 \x01(\x03\"(\n\tLabelPair\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\t\"P\n\nTimeSeries\x12!\n\x06labels\x18\x01 \x03(\x0b\x32\x11.remote.LabelPair\x12\x1f\n\x07samples\x18\x02 \x03(\x0b\x32\x0e.remote.Sample\"6\n\x0cWriteRequest\x12&\n\ntimeseries\x18\x01 \x03(\x0b\x32\x12.remote.TimeSeries\"-\n\x0bReadRequest\x12\x1e\n\x07queries\x18\x01 \x03(\x0b\x32\r.remote.Query\"4\n\x0cReadResponse\x12$\n\x07results\x18\x01 \x03(\x0b\x32\x13.remote.QueryResult\"e\n\x05Query\x12\x1a\n\x12start_timestamp_ms\x18\x01 \x01(\x03\x12\x18\n\x10\x65nd_timestamp_ms\x18\x02 \x01(\x03\x12&\n\x08matchers\x18\x03 \x03(\x0b\x32\x14.remote.LabelMatcher\"L\n\x0cLabelMatcher\x12\x1f\n\x04type\x18\x01 \x01(\x0e\x32\x11.remote.MatchType\x12\x0c\n\x04name\x18\x02 \x01(\t\x12\r\n\x05value\x18\x03 \x01(\t\"5\n\x0bQueryResult\x12&\n\ntimeseries\x18\x01 \x03(\x0b\x32\x12.remote.TimeSeries*J\n\tMatchType\x12\t\n\x05\x45QUAL\x10\x00\x12\r\n\tNOT_EQUAL\x10\x01\x12\x0f\n\x0bREGEX_MATCH\x10\x02\x12\x12\n\x0eREGEX_NO_MATCH\x10\x03\x62\x06proto3')
+)
+_sym_db.RegisterFileDescriptor(DESCRIPTOR)
+
+_MATCHTYPE = _descriptor.EnumDescriptor(
+  name='MatchType',
+  full_name='remote.MatchType',
+  filename=None,
+  file=DESCRIPTOR,
+  values=[
+    _descriptor.EnumValueDescriptor(
+      name='EQUAL', index=0, number=0,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='NOT_EQUAL', index=1, number=1,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REGEX_MATCH', index=2, number=2,
+      options=None,
+      type=None),
+    _descriptor.EnumValueDescriptor(
+      name='REGEX_NO_MATCH', index=3, number=3,
+      options=None,
+      type=None),
+  ],
+  containing_type=None,
+  options=None,
+  serialized_start=588,
+  serialized_end=662,
+)
+_sym_db.RegisterEnumDescriptor(_MATCHTYPE)
+
+MatchType = enum_type_wrapper.EnumTypeWrapper(_MATCHTYPE)
+EQUAL = 0
+NOT_EQUAL = 1
+REGEX_MATCH = 2
+REGEX_NO_MATCH = 3
+
+
+
+_SAMPLE = _descriptor.Descriptor(
+  name='Sample',
+  full_name='remote.Sample',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='value', full_name='remote.Sample.value', index=0,
+      number=1, type=1, cpp_type=5, label=1,
+      has_default_value=False, default_value=float(0),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='timestamp_ms', full_name='remote.Sample.timestamp_ms', index=1,
+      number=2, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=24,
+  serialized_end=69,
+)
+
+
+_LABELPAIR = _descriptor.Descriptor(
+  name='LabelPair',
+  full_name='remote.LabelPair',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='name', full_name='remote.LabelPair.name', index=0,
+      number=1, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='remote.LabelPair.value', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=71,
+  serialized_end=111,
+)
+
+
+_TIMESERIES = _descriptor.Descriptor(
+  name='TimeSeries',
+  full_name='remote.TimeSeries',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='labels', full_name='remote.TimeSeries.labels', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='samples', full_name='remote.TimeSeries.samples', index=1,
+      number=2, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=113,
+  serialized_end=193,
+)
+
+
+_WRITEREQUEST = _descriptor.Descriptor(
+  name='WriteRequest',
+  full_name='remote.WriteRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timeseries', full_name='remote.WriteRequest.timeseries', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=195,
+  serialized_end=249,
+)
+
+
+_READREQUEST = _descriptor.Descriptor(
+  name='ReadRequest',
+  full_name='remote.ReadRequest',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='queries', full_name='remote.ReadRequest.queries', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=251,
+  serialized_end=296,
+)
+
+
+_READRESPONSE = _descriptor.Descriptor(
+  name='ReadResponse',
+  full_name='remote.ReadResponse',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='results', full_name='remote.ReadResponse.results', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=298,
+  serialized_end=350,
+)
+
+
+_QUERY = _descriptor.Descriptor(
+  name='Query',
+  full_name='remote.Query',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='start_timestamp_ms', full_name='remote.Query.start_timestamp_ms', index=0,
+      number=1, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='end_timestamp_ms', full_name='remote.Query.end_timestamp_ms', index=1,
+      number=2, type=3, cpp_type=2, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='matchers', full_name='remote.Query.matchers', index=2,
+      number=3, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=352,
+  serialized_end=453,
+)
+
+
+_LABELMATCHER = _descriptor.Descriptor(
+  name='LabelMatcher',
+  full_name='remote.LabelMatcher',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='type', full_name='remote.LabelMatcher.type', index=0,
+      number=1, type=14, cpp_type=8, label=1,
+      has_default_value=False, default_value=0,
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='name', full_name='remote.LabelMatcher.name', index=1,
+      number=2, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+    _descriptor.FieldDescriptor(
+      name='value', full_name='remote.LabelMatcher.value', index=2,
+      number=3, type=9, cpp_type=9, label=1,
+      has_default_value=False, default_value=_b("").decode('utf-8'),
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=455,
+  serialized_end=531,
+)
+
+
+_QUERYRESULT = _descriptor.Descriptor(
+  name='QueryResult',
+  full_name='remote.QueryResult',
+  filename=None,
+  file=DESCRIPTOR,
+  containing_type=None,
+  fields=[
+    _descriptor.FieldDescriptor(
+      name='timeseries', full_name='remote.QueryResult.timeseries', index=0,
+      number=1, type=11, cpp_type=10, label=3,
+      has_default_value=False, default_value=[],
+      message_type=None, enum_type=None, containing_type=None,
+      is_extension=False, extension_scope=None,
+      options=None),
+  ],
+  extensions=[
+  ],
+  nested_types=[],
+  enum_types=[
+  ],
+  options=None,
+  is_extendable=False,
+  syntax='proto3',
+  extension_ranges=[],
+  oneofs=[
+  ],
+  serialized_start=533,
+  serialized_end=586,
+)
+
+_TIMESERIES.fields_by_name['labels'].message_type = _LABELPAIR
+_TIMESERIES.fields_by_name['samples'].message_type = _SAMPLE
+_WRITEREQUEST.fields_by_name['timeseries'].message_type = _TIMESERIES
+_READREQUEST.fields_by_name['queries'].message_type = _QUERY
+_READRESPONSE.fields_by_name['results'].message_type = _QUERYRESULT
+_QUERY.fields_by_name['matchers'].message_type = _LABELMATCHER
+_LABELMATCHER.fields_by_name['type'].enum_type = _MATCHTYPE
+_QUERYRESULT.fields_by_name['timeseries'].message_type = _TIMESERIES
+DESCRIPTOR.message_types_by_name['Sample'] = _SAMPLE
+DESCRIPTOR.message_types_by_name['LabelPair'] = _LABELPAIR
+DESCRIPTOR.message_types_by_name['TimeSeries'] = _TIMESERIES
+DESCRIPTOR.message_types_by_name['WriteRequest'] = _WRITEREQUEST
+DESCRIPTOR.message_types_by_name['ReadRequest'] = _READREQUEST
+DESCRIPTOR.message_types_by_name['ReadResponse'] = _READRESPONSE
+DESCRIPTOR.message_types_by_name['Query'] = _QUERY
+DESCRIPTOR.message_types_by_name['LabelMatcher'] = _LABELMATCHER
+DESCRIPTOR.message_types_by_name['QueryResult'] = _QUERYRESULT
+DESCRIPTOR.enum_types_by_name['MatchType'] = _MATCHTYPE
+
+Sample = _reflection.GeneratedProtocolMessageType('Sample', (_message.Message,), dict(
+  DESCRIPTOR = _SAMPLE,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.Sample)
+  ))
+_sym_db.RegisterMessage(Sample)
+
+LabelPair = _reflection.GeneratedProtocolMessageType('LabelPair', (_message.Message,), dict(
+  DESCRIPTOR = _LABELPAIR,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.LabelPair)
+  ))
+_sym_db.RegisterMessage(LabelPair)
+
+TimeSeries = _reflection.GeneratedProtocolMessageType('TimeSeries', (_message.Message,), dict(
+  DESCRIPTOR = _TIMESERIES,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.TimeSeries)
+  ))
+_sym_db.RegisterMessage(TimeSeries)
+
+WriteRequest = _reflection.GeneratedProtocolMessageType('WriteRequest', (_message.Message,), dict(
+  DESCRIPTOR = _WRITEREQUEST,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.WriteRequest)
+  ))
+_sym_db.RegisterMessage(WriteRequest)
+
+ReadRequest = _reflection.GeneratedProtocolMessageType('ReadRequest', (_message.Message,), dict(
+  DESCRIPTOR = _READREQUEST,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.ReadRequest)
+  ))
+_sym_db.RegisterMessage(ReadRequest)
+
+ReadResponse = _reflection.GeneratedProtocolMessageType('ReadResponse', (_message.Message,), dict(
+  DESCRIPTOR = _READRESPONSE,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.ReadResponse)
+  ))
+_sym_db.RegisterMessage(ReadResponse)
+
+Query = _reflection.GeneratedProtocolMessageType('Query', (_message.Message,), dict(
+  DESCRIPTOR = _QUERY,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.Query)
+  ))
+_sym_db.RegisterMessage(Query)
+
+LabelMatcher = _reflection.GeneratedProtocolMessageType('LabelMatcher', (_message.Message,), dict(
+  DESCRIPTOR = _LABELMATCHER,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.LabelMatcher)
+  ))
+_sym_db.RegisterMessage(LabelMatcher)
+
+QueryResult = _reflection.GeneratedProtocolMessageType('QueryResult', (_message.Message,), dict(
+  DESCRIPTOR = _QUERYRESULT,
+  __module__ = 'remote_pb2'
+  # @@protoc_insertion_point(class_scope:remote.QueryResult)
+  ))
+_sym_db.RegisterMessage(QueryResult)
+
+
+# @@protoc_insertion_point(module_scope)
diff --git a/gnocchi/rest/wsgi.py b/gnocchi/rest/wsgi.py
new file mode 100644
index 0000000000000000000000000000000000000000..0ebe7533717d89c0efa1e758c44aa14226f78faa
--- /dev/null
+++ b/gnocchi/rest/wsgi.py
@@ -0,0 +1,16 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""This file is loaded by gnocchi-api when executing uwsgi"""
+from gnocchi.cli import api
+from gnocchi.rest import app
+application = app.load_app(api.prepare_service())
diff --git a/gnocchi/service.py b/gnocchi/service.py
new file mode 100644
index 0000000000000000000000000000000000000000..cdcc9ec06b955ae3c5c311cf1bf00c47e5abbde4
--- /dev/null
+++ b/gnocchi/service.py
@@ -0,0 +1,108 @@
+# Copyright (c) 2016-2017 Red Hat, Inc.
+# Copyright (c) 2015 eNovance
+# Copyright (c) 2013 Mirantis Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import logging
+
+import daiquiri
+from oslo_config import cfg
+from oslo_db import options as db_options
+from six.moves.urllib import parse as urlparse
+
+import gnocchi
+from gnocchi import archive_policy
+from gnocchi import opts
+from gnocchi import utils
+
+LOG = daiquiri.getLogger(__name__)
+
+
+def prepare_service(args=None, conf=None,
+                    default_config_files=None,
+                    log_to_std=False, logging_level=None,
+                    skip_log_opts=False):
+    if conf is None:
+        conf = cfg.ConfigOpts()
+    # FIXME(jd) Use the pkg_entry info to register the options of these libs
+    db_options.set_defaults(conf)
+
+    # Register our own Gnocchi options
+    for group, options in opts.list_opts():
+        conf.register_opts(list(options),
+                           group=None if group == "DEFAULT" else group)
+
+    conf.register_cli_opts(opts._cli_options)
+
+    workers = utils.get_default_workers()
+    conf.set_default("workers", workers, group="metricd")
+    conf.set_default("parallel_operations", workers)
+
+    conf(args, project='gnocchi', validate_default_values=True,
+         default_config_files=default_config_files,
+         version=gnocchi.__version__)
+
+    utils.parallel_map.MAX_WORKERS = conf.parallel_operations
+
+    if not log_to_std and (conf.log_dir or conf.log_file):
+        outputs = [daiquiri.output.File(filename=conf.log_file,
+                                        directory=conf.log_dir)]
+    else:
+        outputs = [daiquiri.output.STDERR]
+
+    if conf.use_syslog:
+        outputs.append(
+            daiquiri.output.Syslog(facility=conf.syslog_log_facility))
+
+    if conf.use_journal:
+        outputs.append(daiquiri.output.Journal())
+
+    daiquiri.setup(outputs=outputs)
+    if logging_level is None:
+        if conf.debug:
+            logging_level = logging.DEBUG
+        elif conf.verbose:
+            logging_level = logging.INFO
+        else:
+            logging_level = logging.WARNING
+    logging.getLogger("gnocchi").setLevel(logging_level)
+
+    # HACK(jd) I'm not happy about that, fix AP class to handle a conf object?
+    archive_policy.ArchivePolicy.DEFAULT_AGGREGATION_METHODS = (
+        conf.archive_policy.default_aggregation_methods
+    )
+
+    # If no coordination URL is provided, default to using the indexer as
+    # coordinator
+    if conf.coordination_url is None:
+        if conf.storage.driver == "redis":
+            conf.set_default("coordination_url",
+                             conf.storage.redis_url)
+        elif conf.incoming.driver == "redis":
+            conf.set_default("coordination_url",
+                             conf.incoming.redis_url)
+        else:
+            parsed = urlparse.urlparse(conf.indexer.url)
+            proto, _, _ = parsed.scheme.partition("+")
+            parsed = list(parsed)
+            # Set proto without the + part
+            parsed[0] = proto
+            conf.set_default("coordination_url",
+                             urlparse.urlunparse(parsed))
+
+    if not skip_log_opts:
+        LOG.info("Gnocchi version %s", gnocchi.__version__)
+        conf.log_opt_values(LOG, logging.DEBUG)
+
+    return conf
diff --git a/gnocchi/setuptools.py b/gnocchi/setuptools.py
new file mode 100644
index 0000000000000000000000000000000000000000..67c1c858e52989af2cb2134d670794092151c1fe
--- /dev/null
+++ b/gnocchi/setuptools.py
@@ -0,0 +1,137 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+from __future__ import absolute_import
+
+import os
+import subprocess
+import sys
+
+from distutils import version
+from setuptools.command import develop
+from setuptools.command import easy_install
+from setuptools.command import egg_info
+from setuptools.command import install_scripts
+
+# NOTE(sileht): We use a template to set the right
+# python version in the sheban
+SCRIPT_TMPL = """
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import sys
+from gnocchi.cli import api
+
+if __name__ == '__main__':
+    sys.exit(api.api())
+else:
+    application = api.wsgi()
+"""
+
+
+def git(*args):
+    p = subprocess.Popen(["git"] + list(args),
+                         stdout=subprocess.PIPE,
+                         stderr=subprocess.PIPE)
+    out, _ = p.communicate()
+    return out.strip().decode('utf-8', 'replace')
+
+
+class local_egg_info(egg_info.egg_info):
+    def run(self):
+        if os.path.exists(".git"):
+            self._gen_changelog_and_authors()
+        egg_info.egg_info.run(self)
+
+    @staticmethod
+    def _gen_changelog_and_authors():
+        with open("AUTHORS", 'wb') as f:
+            authors = git('log', '--format=%aN <%aE>')
+            authors = sorted(set(authors.split("\n")))
+            f.writelines([b"%s\n" % author.encode('utf8')
+                          for author in authors])
+
+        with open("ChangeLog", "wb") as f:
+            f.write(b"CHANGES\n")
+            f.write(b"=======\n\n")
+            changelog = git('log', '--decorate=full', '--format=%s%x00%d')
+            for line in changelog.split('\n'):
+                msg, refname = line.split("\x00")
+
+                if "refs/tags/" in refname:
+                    refname = refname.strip()[1:-1]  # remove wrapping ()'s
+                    # If we start with "tag: refs/tags/1.2b1, tag:
+                    # refs/tags/1.2" The first split gives us "['', '1.2b1,
+                    # tag:', '1.2']" Which is why we do the second split below
+                    # on the comma
+                    for tag_string in refname.split("refs/tags/")[1:]:
+                        # git tag does not allow : or " " in tag names, so we
+                        # split on ", " which is the separator between elements
+                        candidate = tag_string.split(", ")[0]
+                        try:
+                            version.StrictVersion(candidate)
+                        except ValueError:
+                            pass
+                        else:
+                            f.write(b"\n%s\n" % candidate.encode('utf8'))
+                            f.write(b"%s\n\n" % (b"-" * len(candidate)))
+
+                if msg.startswith("Merge "):
+                    continue
+                if msg.endswith("."):
+                    msg = msg[:-1]
+                msg = msg.replace('*', '\*')
+                msg = msg.replace('_', '\_')
+                msg = msg.replace('`', '\`')
+                f.write(b"* %s\n" % msg.encode("utf8"))
+
+
+# Can't use six in this file it's too early in the bootstrap process
+PY3 = sys.version_info >= (3,)
+
+
+class local_install_scripts(install_scripts.install_scripts):
+    def run(self):
+        install_scripts.install_scripts.run(self)
+        # NOTE(sileht): Build wheel embed custom script as data, and put sheban
+        # in script of the building machine. To workaround that build_scripts
+        # on bdist_whell return '#!python' and then during whl install it's
+        # replaced by the correct interpreter. We do the same here.
+        bs_cmd = self.get_finalized_command('build_scripts')
+        executable = getattr(bs_cmd, 'executable', easy_install.sys_executable)
+        script = easy_install.get_script_header("", executable) + SCRIPT_TMPL
+        if PY3:
+            script = script.encode('ascii')
+        self.write_script("gnocchi-api", script, 'b')
+
+
+class local_develop(develop.develop):
+    def install_wrapper_scripts(self, dist):
+        develop.develop.install_wrapper_scripts(self, dist)
+        if self.exclude_scripts:
+            return
+        script = easy_install.get_script_header("") + SCRIPT_TMPL
+        if PY3:
+            script = script.encode('ascii')
+        self.write_script("gnocchi-api", script, 'b')
diff --git a/gnocchi/statsd.py b/gnocchi/statsd.py
new file mode 100644
index 0000000000000000000000000000000000000000..95bb3cc8a6057c5587f248570517ffa879212370
--- /dev/null
+++ b/gnocchi/statsd.py
@@ -0,0 +1,198 @@
+# Copyright (c) 2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import itertools
+import uuid
+
+try:
+    import asyncio
+except ImportError:
+    import trollius as asyncio
+import daiquiri
+from oslo_config import cfg
+import six
+
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import service
+from gnocchi import utils
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+class Stats(object):
+    def __init__(self, conf):
+        self.conf = conf
+        self.incoming = incoming.get_driver(self.conf)
+        self.indexer = indexer.get_driver(self.conf)
+        try:
+            self.indexer.create_resource('generic',
+                                         self.conf.statsd.resource_id,
+                                         self.conf.statsd.creator)
+        except indexer.ResourceAlreadyExists:
+            LOG.debug("Resource %s already exists",
+                      self.conf.statsd.resource_id)
+        else:
+            LOG.info("Created resource %s", self.conf.statsd.resource_id)
+        self.gauges = {}
+        self.counters = {}
+        self.times = {}
+        self.metrics = {
+            metric.name: metric
+            for metric
+            in self.indexer.get_resource('generic',
+                                         self.conf.statsd.resource_id,
+                                         with_metrics=True).metrics
+        }
+
+    def reset(self):
+        self.gauges.clear()
+        self.counters.clear()
+        self.times.clear()
+
+    def treat_metric(self, metric_name, metric_type, value, sampling):
+        metric_name += "|" + metric_type
+        if metric_type == "ms":
+            if sampling is not None:
+                raise ValueError(
+                    "Invalid sampling for ms: `%d`, should be none"
+                    % sampling)
+            self.times[metric_name] = incoming.Measure(
+                utils.dt_in_unix_ns(utils.utcnow()), value)
+        elif metric_type == "g":
+            if sampling is not None:
+                raise ValueError(
+                    "Invalid sampling for g: `%d`, should be none"
+                    % sampling)
+            self.gauges[metric_name] = incoming.Measure(
+                utils.dt_in_unix_ns(utils.utcnow()), value)
+        elif metric_type == "c":
+            sampling = 1 if sampling is None else sampling
+            if metric_name in self.counters:
+                current_value = self.counters[metric_name].value
+            else:
+                current_value = 0
+            self.counters[metric_name] = incoming.Measure(
+                utils.dt_in_unix_ns(utils.utcnow()),
+                current_value + (value * (1 / sampling)))
+        # TODO(jd) Support "set" type
+        # elif metric_type == "s":
+        #     pass
+        else:
+            raise ValueError("Unknown metric type `%s'" % metric_type)
+
+    def flush(self):
+        for metric_name, measure in itertools.chain(
+                six.iteritems(self.gauges),
+                six.iteritems(self.counters),
+                six.iteritems(self.times)):
+            try:
+                # NOTE(jd) We avoid considering any concurrency here as statsd
+                # is not designed to run in parallel and we do not envision
+                # operators manipulating the resource/metrics using the Gnocchi
+                # API at the same time.
+                metric = self.metrics.get(metric_name)
+                if not metric:
+                    ap_name = self._get_archive_policy_name(metric_name)
+                    metric = self.indexer.create_metric(
+                        uuid.uuid4(),
+                        self.conf.statsd.creator,
+                        archive_policy_name=ap_name,
+                        name=metric_name,
+                        resource_id=self.conf.statsd.resource_id)
+                    self.metrics[metric_name] = metric
+                self.incoming.add_measures(metric.id, (measure,))
+            except Exception as e:
+                LOG.error("Unable to add measure %s: %s",
+                          metric_name, e)
+
+        self.reset()
+
+    def _get_archive_policy_name(self, metric_name):
+        if self.conf.statsd.archive_policy_name:
+            return self.conf.statsd.archive_policy_name
+        # NOTE(sileht): We didn't catch NoArchivePolicyRuleMatch to log it
+        ap = self.indexer.get_archive_policy_for_metric(metric_name)
+        return ap.name
+
+
+class StatsdServer(asyncio.Protocol):
+    def __init__(self, stats):
+        self.stats = stats
+
+    @staticmethod
+    def connection_made(transport):
+        pass
+
+    def datagram_received(self, data, addr):
+        LOG.debug("Received data `%r' from %s", data, addr)
+        try:
+            messages = [m for m in data.decode().split("\n") if m]
+        except Exception as e:
+            LOG.error("Unable to decode datagram: %s", e)
+            return
+        for message in messages:
+            metric = message.split("|")
+            if len(metric) == 2:
+                metric_name, metric_type = metric
+                sampling = None
+            elif len(metric) == 3:
+                metric_name, metric_type, sampling = metric
+            else:
+                LOG.error("Invalid number of | in `%s'", message)
+                continue
+            sampling = float(sampling[1:]) if sampling is not None else None
+            metric_name, metric_str_val = metric_name.split(':')
+            # NOTE(jd): We do not support +/- gauge, and we delete gauge on
+            # each flush.
+            value = float(metric_str_val)
+            try:
+                self.stats.treat_metric(metric_name, metric_type,
+                                        value, sampling)
+            except Exception as e:
+                LOG.error("Unable to treat metric %s: %s", message, str(e))
+
+
+def start():
+    conf = service.prepare_service()
+
+    if conf.statsd.resource_id is None:
+        raise cfg.RequiredOptError("resource_id", cfg.OptGroup("statsd"))
+
+    stats = Stats(conf)
+
+    loop = asyncio.get_event_loop()
+    # TODO(jd) Add TCP support
+    listen = loop.create_datagram_endpoint(
+        lambda: StatsdServer(stats),
+        local_addr=(conf.statsd.host, conf.statsd.port))
+
+    def _flush():
+        loop.call_later(conf.statsd.flush_delay, _flush)
+        stats.flush()
+
+    loop.call_later(conf.statsd.flush_delay, _flush)
+    transport, protocol = loop.run_until_complete(listen)
+
+    LOG.info("Started on %s:%d", conf.statsd.host, conf.statsd.port)
+    LOG.info("Flush delay: %d seconds", conf.statsd.flush_delay)
+
+    try:
+        loop.run_forever()
+    except KeyboardInterrupt:
+        pass
+
+    transport.close()
+    loop.close()
diff --git a/gnocchi/storage/__init__.py b/gnocchi/storage/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..936710a73f8626a0a8e436a0662975762eb84760
--- /dev/null
+++ b/gnocchi/storage/__init__.py
@@ -0,0 +1,694 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2018 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+import itertools
+import operator
+
+import daiquiri
+import numpy
+from oslo_config import cfg
+import six
+
+from gnocchi import carbonara
+from gnocchi import utils
+
+
+OPTS = [
+    cfg.StrOpt('driver',
+               default='file',
+               help='Storage driver to use'),
+]
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+ATTRGETTER_METHOD = operator.attrgetter("method")
+ATTRGETTER_GRANULARITY = operator.attrgetter("granularity")
+
+
+class StorageError(Exception):
+    pass
+
+
+class MetricDoesNotExist(StorageError):
+    """Error raised when this metric does not exist."""
+
+    def __init__(self, metric):
+        self.metric = metric
+        super(MetricDoesNotExist, self).__init__(
+            "Metric %s does not exist" % metric)
+
+    def jsonify(self):
+        return {
+            "cause": "Metric does not exist",
+            "detail": {
+                "metric": self.metric,
+            },
+        }
+
+
+class AggregationDoesNotExist(StorageError):
+    """Error raised when the aggregation method doesn't exists for a metric."""
+
+    def __init__(self, metric, method, granularity):
+        self.metric = metric
+        self.method = method
+        self.granularity = granularity
+        super(AggregationDoesNotExist, self).__init__(
+            "Aggregation method '%s' at granularity '%s' "
+            "for metric %s does not exist" %
+            (method, utils.timespan_total_seconds(granularity), metric))
+
+    def jsonify(self):
+        return {
+            "cause": "Aggregation does not exist",
+            "detail": {
+                # FIXME(jd) Pecan does not use our JSON renderer for errors
+                # So we need to convert this
+                "granularity": utils.timespan_total_seconds(self.granularity),
+                "aggregation_method": self.method,
+            },
+        }
+
+
+class MetricAlreadyExists(StorageError):
+    """Error raised when this metric already exists."""
+
+    def __init__(self, metric):
+        self.metric = metric
+        super(MetricAlreadyExists, self).__init__(
+            "Metric %s already exists" % metric)
+
+
+@utils.retry_on_exception_and_log("Unable to initialize storage driver")
+def get_driver(conf):
+    """Return the configured driver."""
+    return utils.get_driver_class('gnocchi.storage', conf.storage)(
+        conf.storage)
+
+
+class Statistics(collections.defaultdict):
+    class StatisticsTimeContext(object):
+        def __init__(self, stats, name):
+            self.stats = stats
+            self.name = name + " time"
+
+        def __enter__(self):
+            self.sw = utils.StopWatch()
+            self.sw.start()
+            return self
+
+        def __exit__(self, type, value, traceback):
+            self.stats[self.name] += self.sw.elapsed()
+
+    def __init__(self):
+        super(Statistics, self).__init__(lambda: 0)
+
+    def time(self, name):
+        return self.StatisticsTimeContext(self, name)
+
+
+class StorageDriver(object):
+
+    # NOTE(sileht): By default we use threads, but some driver can disable
+    # threads by setting this to utils.sequencial_map
+    MAP_METHOD = staticmethod(utils.parallel_map)
+
+    def __init__(self, conf):
+        self.statistics = Statistics()
+
+    @staticmethod
+    def upgrade():
+        pass
+
+    def _get_splits(self, metrics_aggregations_keys, version=3):
+        results = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+        for metric, aggregation, split in self.MAP_METHOD(
+                lambda m, k, a, v: (m, a, self._get_splits_unbatched(m, k, a, v)),  # noqa
+                ((metric, key, aggregation, version)
+                 for metric, aggregations_and_keys
+                 in six.iteritems(metrics_aggregations_keys)
+                 for aggregation, keys
+                 in six.iteritems(aggregations_and_keys)
+                 for key in keys)):
+            results[metric][aggregation].append(split)
+        return results
+
+    @staticmethod
+    def _get_splits_unbatched(metric, timestamp_key, aggregation, version=3):
+        raise NotImplementedError
+
+    @staticmethod
+    def _get_or_create_unaggregated_timeseries_unbatched(metric, version=3):
+        """Get the unaggregated timeserie of metrics.
+
+        If the metrics does not exist, it is created.
+
+        :param metric: A metric.
+        :param version: The storage format version number.
+        """
+        raise NotImplementedError
+
+    def _get_or_create_unaggregated_timeseries(self, metrics, version=3):
+        """Get the unaggregated timeserie of metrics.
+
+        If the metrics does not exist, it is created.
+
+        :param metrics: A list of metrics.
+        :param version: The storage format version number.
+        """
+        return dict(
+            six.moves.zip(
+                metrics,
+                self.MAP_METHOD(
+                    utils.return_none_on_failure(
+                        self._get_or_create_unaggregated_timeseries_unbatched),
+                    ((metric, version) for metric in metrics))))
+
+    @staticmethod
+    def _store_unaggregated_timeseries_unbatched(metric, data, version=3):
+        """Store unaggregated timeseries.
+
+        :param metric: A metric.
+        :param data: The data to store.
+        :param version: Storage engine data format version
+        """
+        raise NotImplementedError
+
+    def _store_unaggregated_timeseries(self, metrics_and_data, version=3):
+        """Store unaggregated timeseries.
+
+        :param metrics_and_data: A list of (metric, serialized_data) tuples
+        :param version: Storage engine data format version
+        """
+        self.MAP_METHOD(
+            utils.return_none_on_failure(
+                self._store_unaggregated_timeseries_unbatched),
+            ((metric, data, version) for metric, data in metrics_and_data))
+
+    @staticmethod
+    def _store_metric_splits_unbatched(metric, key, aggregation, data, offset,
+                                       version=3):
+        """Store a metric split.
+
+        :param metric: A metric.
+        :param key: The `carbonara.SplitKey`.
+        :param aggregation: The `carbonara.Aggregation`.
+        :param data: The actual data to write.
+        :param offset: The offset to write to.
+        :param version: Storage engine format version.
+        """
+        raise NotImplementedError
+
+    def _store_metric_splits(self, metrics_keys_aggregations_data_offset,
+                             version=3):
+        """Store metric splits.
+
+        Store a bunch of splits for some metrics.
+
+        :param metrics_keys_aggregations_data_offset: A dict where keys are
+                                                      `storage.Metric` and
+                                                      values are a list of
+                                                      (key, aggregation,
+                                                       data, offset) tuples.
+        :param version: Storage engine format version.
+        """
+        self.MAP_METHOD(
+            self._store_metric_splits_unbatched,
+            ((metric, key, aggregation, data, offset, version)
+             for metric, keys_aggregations_data_offset
+             in six.iteritems(metrics_keys_aggregations_data_offset)
+             for key, aggregation, data, offset
+             in keys_aggregations_data_offset))
+
+    @staticmethod
+    def _list_split_keys_unbatched(self, metric, aggregations, version=3):
+        """List split keys for a metric.
+
+        :param metric: The metric to look key for.
+        :param aggregations: List of Aggregations to look for.
+        :param version: Storage engine format version.
+        :return: A dict where keys are Aggregation objects and values are
+                 a set of SplitKey objects.
+        """
+        raise NotImplementedError
+
+    def _list_split_keys(self, metrics_and_aggregations, version=3):
+        """List split keys for metrics.
+
+        :param metrics_and_aggregations: Dict of
+                                         {`storage.Metric`:
+                                          [`carbonara.Aggregation`]}
+                                         to look for.
+        :param version: Storage engine format version.
+        :return: A dict where keys are `storage.Metric` and values are dicts
+                 where keys are `carbonara.Aggregation` objects and values are
+                 a set of `carbonara.SplitKey` objects.
+        """
+        metrics = list(metrics_and_aggregations.keys())
+        r = self.MAP_METHOD(
+            self._list_split_keys_unbatched,
+            ((metric, metrics_and_aggregations[metric], version)
+             for metric in metrics))
+        return {
+            metric: results
+            for metric, results in six.moves.zip(metrics, r)
+        }
+
+    @staticmethod
+    def _version_check(name, v):
+
+        """Validate object matches expected version.
+
+        Version should be last attribute and start with 'v'
+        """
+        return name.split("_")[-1] == 'v%s' % v
+
+    def get_aggregated_measures(self, metrics_and_aggregations,
+                                from_timestamp=None, to_timestamp=None,
+                                resample=None):
+        """Get aggregated measures from a metric.
+
+        :param metrics_and_aggregations: The metrics and aggregations to
+                                         retrieve in format
+                                         {metric: [aggregation, …]}.
+        :param from timestamp: The timestamp to get the measure from.
+        :param to timestamp: The timestamp to get the measure to.
+        """
+        metrics_aggs_keys = self._list_split_keys(metrics_and_aggregations)
+
+        for metric, aggregations_keys in six.iteritems(metrics_aggs_keys):
+            for aggregation, keys in six.iteritems(aggregations_keys):
+                start = (
+                    carbonara.SplitKey.from_timestamp_and_sampling(
+                        from_timestamp, aggregation.granularity)
+                ) if from_timestamp else None
+
+                stop = (
+                    carbonara.SplitKey.from_timestamp_and_sampling(
+                        to_timestamp, aggregation.granularity)
+                ) if to_timestamp else None
+
+                # Replace keys with filtered version
+                metrics_aggs_keys[metric][aggregation] = [
+                    key for key in sorted(keys)
+                    if ((not start or key >= start)
+                        and (not stop or key <= stop))
+                ]
+
+        metrics_aggregations_splits = self._get_splits_and_unserialize(
+            metrics_aggs_keys)
+
+        results = collections.defaultdict(dict)
+        for metric, aggregations in six.iteritems(metrics_and_aggregations):
+            for aggregation in aggregations:
+                ts = carbonara.AggregatedTimeSerie.from_timeseries(
+                    metrics_aggregations_splits[metric][aggregation],
+                    aggregation)
+                # We need to truncate because:
+                # - If the driver is not in WRITE_FULL mode, then it might read
+                # too much data that will be deleted once the split is
+                # rewritten. Just truncate so we don't return it.
+                # - If the driver is in WRITE_FULL but the archive policy has
+                # been resized, we might still have too much points stored,
+                # which will be deleted at a later point when new points will
+                # be processed. Truncate to be sure we don't return them.
+                if aggregation.timespan is not None:
+                    ts.truncate(aggregation.timespan)
+                results[metric][aggregation] = ts.resample(resample) if resample \
+                    else ts
+                results[metric][aggregation] = results[metric][
+                    aggregation].fetch(from_timestamp, to_timestamp)
+        return results
+
+    def _get_splits_and_unserialize(self, metrics_aggregations_keys):
+        """Get splits and unserialize them
+
+        :param metrics_aggregations_keys: A dict where keys are
+                                         `storage.Metric` and values are dict
+                                          of {Aggregation: [SplitKey]} to
+                                          retrieve.
+        :return: A dict where keys are `storage.Metric` and values are dict
+                 {aggregation: [`carbonara.AggregatedTimeSerie`]}.
+        """
+        raw_measures = self._get_splits(metrics_aggregations_keys)
+        results = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+        for metric, aggregations_and_raws in six.iteritems(raw_measures):
+            for aggregation, raws in six.iteritems(aggregations_and_raws):
+                for key, raw in six.moves.zip(
+                        metrics_aggregations_keys[metric][aggregation], raws):
+                    try:
+                        ts = carbonara.AggregatedTimeSerie.unserialize(
+                            raw, key, aggregation)
+                    except carbonara.InvalidData:
+                        LOG.error("Data corruption detected for %s "
+                                  "aggregated `%s' timeserie, granularity "
+                                  "`%s' around time `%s', ignoring.",
+                                  metric.id, aggregation.method, key.sampling,
+                                  key)
+                        ts = carbonara.AggregatedTimeSerie(aggregation)
+                    results[metric][aggregation].append(ts)
+        return results
+
+    def _update_metric_splits(self, metrics_keys_aggregations_splits):
+        """Store splits of `carbonara.`AggregatedTimeSerie` for a metric.
+
+        This reads the existing split and merge it with the new one give as
+        argument, then writing it to the storage.
+
+        :param metrics_keys_aggregations_splits: A dict where keys are
+                                                 `storage.Metric` and values
+                                                 are tuples of the form
+                                                 ({(key, aggregation): split},
+                                                  oldest_mutable_timestamp)
+        """
+        metrics_splits_to_store = {}
+        keys_to_get = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+        splits_to_rewrite = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+
+        for metric, (keys_and_aggregations_and_splits,
+                     oldest_mutable_timestamp) in six.iteritems(
+                         metrics_keys_aggregations_splits):
+            for (key, aggregation), split in six.iteritems(
+                    keys_and_aggregations_and_splits):
+                # NOTE(jd) We write the full split only if the driver works
+                # that way (self.WRITE_FULL) or if the oldest_mutable_timestamp
+                # is out of range.
+                if self.WRITE_FULL or next(key) <= oldest_mutable_timestamp:
+                    # Update the splits that were passed as argument with the
+                    # data already stored in the case that we need to rewrite
+                    # them fully. First, fetch all those existing splits.
+                    keys_to_get[metric][aggregation].append(key)
+                    splits_to_rewrite[metric][aggregation].append(split)
+
+        existing_data = self._get_splits_and_unserialize(keys_to_get)
+
+        for metric, (keys_and_aggregations_and_splits,
+                     oldest_mutable_timestamp) in six.iteritems(
+                         metrics_keys_aggregations_splits):
+            for aggregation, existing_list in six.iteritems(
+                    existing_data[metric]):
+                for key, split, existing in six.moves.zip(
+                        keys_to_get[metric][aggregation],
+                        splits_to_rewrite[metric][aggregation],
+                        existing_list):
+                    existing.merge(split)
+                    keys_and_aggregations_and_splits[
+                        (key, split.aggregation)] = existing
+
+            keys_aggregations_data_offset = []
+            for (key, aggregation), split in six.iteritems(
+                    keys_and_aggregations_and_splits):
+                # Do not store the split if it's empty.
+                if split:
+                    offset, data = split.serialize(
+                        key,
+                        compressed=key in keys_to_get[metric][aggregation])
+                    keys_aggregations_data_offset.append(
+                        (key, split.aggregation, data, offset))
+            metrics_splits_to_store[metric] = keys_aggregations_data_offset
+
+        return self._store_metric_splits(metrics_splits_to_store)
+
+    def _compute_split_operations(self, metric, aggregations_and_timeseries,
+                                  previous_oldest_mutable_timestamp,
+                                  oldest_mutable_timestamp):
+        """Compute changes to a metric and return operations to be done.
+
+        Based on an aggregations list and a grouped timeseries, this computes
+        what needs to be deleted and stored for a metric and returns it.
+
+        :param metric: The metric
+        :param aggregations_and_timeseries: A dictionary of timeseries of the
+                                            form {aggregation: timeseries}.
+        :param previous_oldest_mutable_timestamp: The previous oldest storable
+                                                  timestamp from the previous
+                                                  backwindow.
+        :param oldest_mutable_timestamp: The current oldest storable timestamp
+                                         from the current backwindow.
+        :return: A tuple (keys_to_delete, keys_to_store) where keys_to_delete
+                 is a set of `carbonara.SplitKey` to delete and where
+                 keys_to_store is a dictionary of the form {key: aggts}
+                 where key is a `carbonara.SplitKey` and aggts a
+                 `carbonara.AggregatedTimeSerie` to be serialized.
+        """
+        # We only need to check for rewrite if driver is not in WRITE_FULL mode
+        # and if we already stored splits once
+        need_rewrite = (
+            not self.WRITE_FULL
+            and previous_oldest_mutable_timestamp is not None
+        )
+
+        aggregations_needing_list_of_keys = set()
+        oldest_values = {}
+
+        for aggregation, ts in six.iteritems(aggregations_and_timeseries):
+            # Don't do anything if the timeseries is empty
+            if not ts:
+                continue
+
+            agg_oldest_values = {
+                'oldest_point_to_keep': ts.truncate(aggregation.timespan)
+                if aggregation.timespan else None,
+                'prev_oldest_mutable_key': None, 'oldest_mutable_key': None}
+
+            if previous_oldest_mutable_timestamp and (aggregation.timespan or
+                                                      need_rewrite):
+                previous_oldest_mutable_key = ts.get_split_key(
+                    previous_oldest_mutable_timestamp)
+                oldest_mutable_key = ts.get_split_key(oldest_mutable_timestamp)
+
+                # only cleanup if there is a new object, as there must be a new
+                # object for an old object to be cleanup
+                if previous_oldest_mutable_key != oldest_mutable_key:
+                    aggregations_needing_list_of_keys.add(aggregation)
+                    agg_oldest_values['prev_oldest_mutable_key'] = (
+                        previous_oldest_mutable_key)
+                    agg_oldest_values['oldest_mutable_key'] = (
+                        oldest_mutable_key)
+
+            oldest_values[aggregation.granularity] = agg_oldest_values
+
+        all_existing_keys = self._list_split_keys(
+            {metric: aggregations_needing_list_of_keys})[metric]
+
+        # NOTE(jd) This dict uses (key, aggregation) tuples as keys because
+        # using just (key) would not carry the aggregation method and therefore
+        # would not be unique per aggregation!
+        keys_and_split_to_store = {}
+        deleted_keys = set()
+
+        for aggregation, ts in six.iteritems(aggregations_and_timeseries):
+            # Don't do anything if the timeseries is empty
+            if not ts:
+                continue
+
+            agg_oldest_values = oldest_values[aggregation.granularity]
+
+            oldest_key_to_keep = ts.get_split_key(
+                agg_oldest_values['oldest_point_to_keep'])
+
+            # If we listed the keys for the aggregation, that's because we need
+            # to check for cleanup and/or rewrite
+            if aggregation in all_existing_keys:
+                # FIXME(jd) This should be sorted by the driver and asserted it
+                # is in tests. It's likely backends already sort anyway.
+                existing_keys = sorted(all_existing_keys[aggregation])
+                # First, check for old splits to delete
+                if aggregation.timespan:
+                    for key in list(existing_keys):
+                        # NOTE(jd) Only delete if the key is strictly
+                        # inferior the timestamp; we don't delete any
+                        # timeserie split that contains our timestamp, so
+                        # we prefer to keep a bit more than deleting too
+                        # much
+                        if key >= oldest_key_to_keep:
+                            break
+                        deleted_keys.add((key, aggregation))
+                        existing_keys.remove(key)
+
+                # Rewrite all read-only splits just for fun (and
+                # compression). This only happens if
+                # `previous_oldest_mutable_timestamp' exists, which means
+                # we already wrote some splits at some point – so this is
+                # not the first time we treat this timeserie.
+                if need_rewrite:
+                    for key in existing_keys:
+                        if agg_oldest_values['prev_oldest_mutable_key'] <= key:
+                            if key >= agg_oldest_values['oldest_mutable_key']:
+                                break
+                            LOG.debug(
+                                "Compressing previous split %s (%s) for "
+                                "metric %s", key, aggregation.method,
+                                metric)
+                            # NOTE(jd) Rewrite it entirely for fun (and
+                            # later for compression). For that, we just
+                            # pass an empty split.
+                            keys_and_split_to_store[
+                                (key, aggregation)] = (
+                                carbonara.AggregatedTimeSerie(
+                                    aggregation)
+                            )
+
+            for key, split in ts.split():
+                if key >= oldest_key_to_keep:
+                    LOG.debug(
+                        "Storing split %s (%s) for metric %s",
+                        key, aggregation.method, metric)
+                    keys_and_split_to_store[(key, aggregation)] = split
+
+        return (deleted_keys, keys_and_split_to_store)
+
+    @staticmethod
+    def _delete_metric(metric):
+        raise NotImplementedError
+
+    @staticmethod
+    def _delete_metric_splits_unbatched(metric, keys, aggregation, version=3):
+        raise NotImplementedError
+
+    def _delete_metric_splits(self, metrics_keys_aggregations, version=3):
+        """Delete splits of metrics.
+
+        :param metrics_keys_aggregations: A dict where keys are
+                                         `storage.Metric` and values are lists
+                                         of (key, aggregation) tuples.
+        """
+        self.MAP_METHOD(
+            utils.return_none_on_failure(self._delete_metric_splits_unbatched),
+            ((metric, key, aggregation)
+             for metric, keys_and_aggregations
+             in six.iteritems(metrics_keys_aggregations)
+             for key, aggregation in keys_and_aggregations))
+
+    def add_measures_to_metrics(self, metrics_and_measures):
+        """Update a metric with a new measures, computing new aggregations.
+
+        :param metrics_and_measures: A dict there keys are `storage.Metric`
+                                     objects and values are timeseries array of
+                                     the new measures.
+        """
+        with self.statistics.time("raw measures fetch"):
+            raw_measures = self._get_or_create_unaggregated_timeseries(
+                metrics_and_measures.keys())
+        self.statistics["raw measures fetch"] += len(metrics_and_measures)
+        self.statistics["processed measures"] += sum(
+            map(len, metrics_and_measures.values()))
+
+        new_boundts = []
+        splits_to_delete = {}
+        splits_to_update = {}
+
+        for metric, measures in six.iteritems(metrics_and_measures):
+            measures = numpy.sort(measures, order='timestamps')
+
+            agg_methods = list(metric.archive_policy.aggregation_methods)
+            block_size = metric.archive_policy.max_block_size
+            back_window = metric.archive_policy.back_window
+            # NOTE(sileht): We keep one more blocks to calculate rate of change
+            # correctly
+            if any(filter(lambda x: x.startswith("rate:"), agg_methods)):
+                back_window += 1
+
+            if raw_measures[metric] is None:
+                ts = None
+            else:
+                try:
+                    ts = carbonara.BoundTimeSerie.unserialize(
+                        raw_measures[metric], block_size, back_window)
+                except carbonara.InvalidData:
+                    LOG.error("Data corruption detected for %s "
+                              "unaggregated timeserie, creating a new one",
+                              metric.id)
+                    ts = None
+
+            if ts is None:
+                # This is the first time we treat measures for this
+                # metric, or data are corrupted, create a new one
+                ts = carbonara.BoundTimeSerie(block_size=block_size,
+                                              back_window=back_window)
+                current_first_block_timestamp = None
+            else:
+                current_first_block_timestamp = ts.first_block_timestamp()
+
+            def _map_compute_splits_operations(bound_timeserie):
+                # NOTE (gordc): bound_timeserie is entire set of
+                # unaggregated measures matching largest
+                # granularity. the following takes only the points
+                # affected by new measures for specific granularity
+                tstamp = max(bound_timeserie.first, measures['timestamps'][0])
+                new_first_block_timestamp = (
+                    bound_timeserie.first_block_timestamp()
+                )
+                aggregations = metric.archive_policy.aggregations
+
+                grouped_timeseries = {
+                    granularity: bound_timeserie.group_serie(
+                        granularity,
+                        carbonara.round_timestamp(tstamp, granularity))
+                    for granularity, aggregations
+                    # No need to sort the aggregation, they are already
+                    in itertools.groupby(aggregations, ATTRGETTER_GRANULARITY)
+                }
+
+                aggregations_and_timeseries = {
+                    aggregation:
+                    carbonara.AggregatedTimeSerie.from_grouped_serie(
+                        grouped_timeseries[aggregation.granularity],
+                        aggregation)
+                    for aggregation in aggregations
+                }
+
+                deleted_keys, keys_and_split_to_store = (
+                    self._compute_split_operations(
+                        metric, aggregations_and_timeseries,
+                        current_first_block_timestamp,
+                        new_first_block_timestamp)
+                )
+
+                return (new_first_block_timestamp,
+                        deleted_keys,
+                        keys_and_split_to_store)
+
+            with self.statistics.time("aggregated measures compute"):
+                (new_first_block_timestamp,
+                 deleted_keys,
+                 keys_and_splits_to_store) = ts.set_values(
+                     measures,
+                     before_truncate_callback=_map_compute_splits_operations,
+                )
+
+            splits_to_delete[metric] = deleted_keys
+            splits_to_update[metric] = (keys_and_splits_to_store,
+                                        new_first_block_timestamp)
+
+            new_boundts.append((metric, ts.serialize()))
+
+        with self.statistics.time("splits delete"):
+            self._delete_metric_splits(splits_to_delete)
+        self.statistics["splits delete"] += len(splits_to_delete)
+        with self.statistics.time("splits update"):
+            self._update_metric_splits(splits_to_update)
+        self.statistics["splits update"] += len(splits_to_update)
+        with self.statistics.time("raw measures store"):
+            self._store_unaggregated_timeseries(new_boundts)
+        self.statistics["raw measures store"] += len(new_boundts)
diff --git a/gnocchi/storage/ceph.py b/gnocchi/storage/ceph.py
new file mode 100644
index 0000000000000000000000000000000000000000..91793cd760afc77276a2be677e74912571a7de20
--- /dev/null
+++ b/gnocchi/storage/ceph.py
@@ -0,0 +1,234 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2018 Red Hat
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+
+from oslo_config import cfg
+import six
+
+from gnocchi import carbonara
+from gnocchi.common import ceph
+from gnocchi import storage
+from gnocchi import utils
+
+
+OPTS = [
+    cfg.StrOpt('ceph_pool',
+               default='gnocchi',
+               help='Ceph pool name to use.'),
+    cfg.StrOpt('ceph_username',
+               help='Ceph username (ie: admin without "client." prefix).'),
+    cfg.StrOpt('ceph_secret', help='Ceph key', secret=True),
+    cfg.StrOpt('ceph_keyring', help='Ceph keyring path.'),
+    cfg.StrOpt('ceph_timeout',
+               default="30",
+               help='Ceph connection timeout in seconds'),
+    cfg.StrOpt('ceph_conffile',
+               default='/etc/ceph/ceph.conf',
+               help='Ceph configuration file.'),
+]
+
+rados = ceph.rados
+
+
+class CephStorage(storage.StorageDriver):
+    WRITE_FULL = False
+
+    def __init__(self, conf):
+        super(CephStorage, self).__init__(conf)
+        self.rados, self.ioctx = ceph.create_rados_connection(conf)
+
+    def __str__(self):
+        # Use cluster ID for now
+        return "%s: %s" % (self.__class__.__name__, self.rados.get_fsid())
+
+    def stop(self):
+        ceph.close_rados_connection(self.rados, self.ioctx)
+        super(CephStorage, self).stop()
+
+    @staticmethod
+    def _get_object_name(metric, key, aggregation, version=3):
+        name = str("gnocchi_%s_%s_%s_%s" % (
+            metric.id, key, aggregation,
+            utils.timespan_total_seconds(key.sampling)),
+        )
+        return name + '_v%s' % version if version else name
+
+    def _object_exists(self, name):
+        try:
+            self.ioctx.stat(name)
+            return True
+        except rados.ObjectNotFound:
+            return False
+
+    def _create_metric(self, metric):
+        name = self._build_unaggregated_timeserie_path(metric, 3)
+        if self._object_exists(name):
+            raise storage.MetricAlreadyExists(metric)
+        else:
+            self.ioctx.write_full(name, b"")
+
+    def _store_metric_splits(self, metrics_keys_aggregations_data_offset,
+                             version=3):
+        with rados.WriteOpCtx() as op:
+            for metric, keys_aggregations_data_offset in six.iteritems(
+                    metrics_keys_aggregations_data_offset):
+                for key, agg, data, offset in keys_aggregations_data_offset:
+                    name = self._get_object_name(
+                        metric, key, agg.method, version)
+                    if offset is None:
+                        self.ioctx.write_full(name, data)
+                    else:
+                        self.ioctx.write(name, data, offset=offset)
+                    self.ioctx.set_omap(op, (name,), (b"",))
+                self.ioctx.operate_write_op(
+                    op, self._build_unaggregated_timeserie_path(metric, 3))
+
+    def _delete_metric_splits(self, metrics_keys_aggregations, version=3):
+        with rados.WriteOpCtx() as op:
+            for metric, keys_and_aggregations in six.iteritems(
+                    metrics_keys_aggregations):
+                names = tuple(
+                    self._get_object_name(
+                        metric, key, aggregation.method, version)
+                    for key, aggregation in keys_and_aggregations
+                )
+                for name in names:
+                    try:
+                        self.ioctx.remove_object(name)
+                    except rados.ObjectNotFound:
+                        # It's possible that we already remove that object and
+                        # then crashed before removing it from the OMAP key
+                        # list; then no big deal anyway.
+                        pass
+                self.ioctx.remove_omap_keys(op, names)
+                self.ioctx.operate_write_op(
+                    op, self._build_unaggregated_timeserie_path(metric, 3))
+
+    def _delete_metric(self, metric):
+        with rados.ReadOpCtx() as op:
+            omaps, ret = self.ioctx.get_omap_vals(op, "", "", -1)
+            try:
+                self.ioctx.operate_read_op(
+                    op, self._build_unaggregated_timeserie_path(metric, 3))
+            except rados.ObjectNotFound:
+                return
+
+            # NOTE(sileht): after reading the libradospy, I'm
+            # not sure that ret will have the correct value
+            # get_omap_vals transforms the C int to python int
+            # before operate_read_op is called, I dunno if the int
+            # content is copied during this transformation or if
+            # this is a pointer to the C int, I think it's copied...
+            try:
+                ceph.errno_to_exception(ret)
+            except rados.ObjectNotFound:
+                return
+
+        ops = [self.ioctx.aio_remove(name) for name, _ in omaps]
+
+        for op in ops:
+            op.wait_for_complete_and_cb()
+
+        try:
+            self.ioctx.remove_object(
+                self._build_unaggregated_timeserie_path(metric, 3))
+        except rados.ObjectNotFound:
+            # It's possible that the object does not exists
+            pass
+
+    def _get_splits_unbatched(self, metric, key, aggregation, version=3):
+        try:
+            name = self._get_object_name(
+                metric, key, aggregation.method, version)
+            return self._get_object_content(name)
+        except rados.ObjectNotFound:
+            return
+
+    def _list_split_keys_unbatched(self, metric, aggregations, version=3):
+        with rados.ReadOpCtx() as op:
+            omaps, ret = self.ioctx.get_omap_vals(op, "", "", -1)
+            try:
+                self.ioctx.operate_read_op(
+                    op, self._build_unaggregated_timeserie_path(metric, 3))
+            except rados.ObjectNotFound:
+                raise storage.MetricDoesNotExist(metric)
+
+            # NOTE(sileht): after reading the libradospy, I'm
+            # not sure that ret will have the correct value
+            # get_omap_vals transforms the C int to python int
+            # before operate_read_op is called, I dunno if the int
+            # content is copied during this transformation or if
+            # this is a pointer to the C int, I think it's copied...
+            try:
+                ceph.errno_to_exception(ret)
+            except rados.ObjectNotFound:
+                raise storage.MetricDoesNotExist(metric)
+
+            raw_keys = [name.split("_")
+                        for name, value in omaps
+                        if self._version_check(name, version)]
+            keys = collections.defaultdict(set)
+            if not raw_keys:
+                return keys
+            zipped = list(zip(*raw_keys))
+            k_timestamps = utils.to_timestamps(zipped[2])
+            k_methods = zipped[3]
+            k_granularities = list(map(utils.to_timespan, zipped[4]))
+
+            for timestamp, method, granularity in six.moves.zip(
+                    k_timestamps, k_methods, k_granularities):
+                for aggregation in aggregations:
+                    if (aggregation.method == method
+                       and aggregation.granularity == granularity):
+                        keys[aggregation].add(carbonara.SplitKey(
+                            timestamp,
+                            sampling=granularity))
+                        break
+            return keys
+
+    @staticmethod
+    def _build_unaggregated_timeserie_path(metric, version):
+        return (('gnocchi_%s_none' % metric.id)
+                + ("_v%s" % version if version else ""))
+
+    def _get_or_create_unaggregated_timeseries_unbatched(
+            self, metric, version=3):
+        try:
+            contents = self._get_object_content(
+                self._build_unaggregated_timeserie_path(metric, version))
+        except rados.ObjectNotFound:
+            self._create_metric(metric)
+        else:
+            # _create_metric writes "" so replace it by None to indicate
+            # emptiness instead.
+            return contents or None
+
+    def _store_unaggregated_timeseries_unbatched(
+            self, metric, data, version=3):
+        self.ioctx.write_full(
+            self._build_unaggregated_timeserie_path(metric, version), data)
+
+    def _get_object_content(self, name):
+        offset = 0
+        content = b''
+        while True:
+            data = self.ioctx.read(name, offset=offset)
+            if not data:
+                break
+            content += data
+            offset += len(data)
+        return content
diff --git a/gnocchi/storage/file.py b/gnocchi/storage/file.py
new file mode 100644
index 0000000000000000000000000000000000000000..3383320e05c23dec7d2b5978dcfb70a7f42dd786
--- /dev/null
+++ b/gnocchi/storage/file.py
@@ -0,0 +1,249 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014 Objectif Libre
+# Copyright © 2015-2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+import errno
+import itertools
+import json
+import operator
+import os
+import shutil
+import tempfile
+import uuid
+
+import daiquiri
+from oslo_config import cfg
+import six
+
+from gnocchi import carbonara
+from gnocchi import storage
+from gnocchi import utils
+
+
+OPTS = [
+    cfg.StrOpt('file_basepath',
+               default='/var/lib/gnocchi',
+               help='Path used to store gnocchi data files.'),
+    cfg.IntOpt('file_subdir_len',
+               default=2, min=0, max=32,
+               help='if > 0, this create a subdirectory for every N bytes'
+               'of the metric uuid')
+]
+
+ATTRGETTER_METHOD = operator.attrgetter("method")
+
+LOG = daiquiri.getLogger(__name__)
+
+# Python 2 compatibility
+try:
+    FileNotFoundError
+except NameError:
+    FileNotFoundError = None
+
+
+class FileStorage(storage.StorageDriver):
+    WRITE_FULL = True
+    CFG_PREFIX = 'gnocchi-storage-config'
+    CFG_SUBDIR_LEN = 'subdir_len'
+
+    def __init__(self, conf):
+        super(FileStorage, self).__init__(conf)
+        self.basepath = conf.file_basepath
+        self.basepath_tmp = os.path.join(self.basepath, 'tmp')
+        self.conf = conf
+        self._file_subdir_len = None
+
+    @property
+    def SUBDIR_LEN(self):
+        if self._file_subdir_len is None:
+            config_path = os.path.join(self.basepath_tmp, self.CFG_PREFIX)
+            if os.path.exists(config_path):
+                with open(config_path, 'r') as f:
+                    self._file_subdir_len = json.load(f)[self.CFG_SUBDIR_LEN]
+            elif self.is_old_directory_structure():
+                self._file_subdir_len = 0
+            else:
+                # Fresh install
+                self._file_subdir_len = self.conf.file_subdir_len
+
+            if self._file_subdir_len != self.conf.file_subdir_len:
+                LOG.warning("Changing file_subdir_len is not supported, using "
+                            "the stored value: %d", self._file_subdir_len)
+        return self._file_subdir_len
+
+    def set_subdir_len(self, subdir_len):
+        data = {self.CFG_SUBDIR_LEN: subdir_len}
+        with open(os.path.join(self.basepath_tmp, self.CFG_PREFIX), 'w') as f:
+            json.dump(data, f)
+
+    def upgrade(self):
+        utils.ensure_paths([self.basepath_tmp])
+        self.set_subdir_len(self.SUBDIR_LEN)
+
+    def is_old_directory_structure(self):
+        # NOTE(sileht): We look for at least one metric directory
+        for p in os.listdir(self.basepath):
+            if os.path.isdir(p) and '-' in p:
+                try:
+                    uuid.UUID(p)
+                except ValueError:
+                    pass
+                else:
+                    return True
+        return False
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, str(self.basepath))
+
+    def _atomic_file_store(self, dest, data):
+        tmpfile = tempfile.NamedTemporaryFile(
+            prefix='gnocchi', dir=self.basepath_tmp,
+            delete=False)
+        tmpfile.write(data)
+        tmpfile.close()
+        os.rename(tmpfile.name, dest)
+
+    def _build_metric_dir(self, metric):
+        path_parts = [self.basepath]
+        if self.SUBDIR_LEN > 0:
+            metric_id = metric.id.hex
+            path_parts.extend(
+                [metric_id[start:start+self.SUBDIR_LEN]
+                 for start in range(0, 32, self.SUBDIR_LEN)
+                 ])
+        path_parts.append(str(metric.id))
+        return os.path.join(*path_parts)
+
+    def _build_unaggregated_timeserie_path(self, metric, version=3):
+        return os.path.join(
+            self._build_metric_dir(metric),
+            'none' + ("_v%s" % version if version else ""))
+
+    def _build_metric_path(self, metric, aggregation):
+        return os.path.join(self._build_metric_dir(metric),
+                            "agg_" + aggregation)
+
+    def _build_metric_path_for_split(self, metric, aggregation,
+                                     key, version=3):
+        path = os.path.join(
+            self._build_metric_path(metric, aggregation),
+            str(key)
+            + "_"
+            + str(utils.timespan_total_seconds(key.sampling)))
+        return path + '_v%s' % version if version else path
+
+    def _create_metric(self, metric):
+        path = self._build_metric_dir(metric)
+        try:
+            os.makedirs(path, 0o750)
+        except OSError as e:
+            if e.errno == errno.EEXIST:
+                raise storage.MetricAlreadyExists(metric)
+            raise
+        for agg in metric.archive_policy.aggregation_methods:
+            try:
+                os.mkdir(self._build_metric_path(metric, agg), 0o750)
+            except OSError as e:
+                if e.errno != errno.EEXIST:
+                    raise
+
+    def _store_unaggregated_timeseries_unbatched(
+            self, metric, data, version=3):
+        dest = self._build_unaggregated_timeserie_path(metric, version)
+        with open(dest, "wb") as f:
+            f.write(data)
+
+    def _get_or_create_unaggregated_timeseries_unbatched(
+            self, metric, version=3):
+        path = self._build_unaggregated_timeserie_path(metric, version)
+        try:
+            with open(path, 'rb') as f:
+                return f.read()
+        except FileNotFoundError:
+            pass
+        except IOError as e:
+            if e.errno != errno.ENOENT:
+                raise
+        try:
+            self._create_metric(metric)
+        except storage.MetricAlreadyExists:
+            pass
+
+    def _list_split_keys_unbatched(self, metric, aggregations, version=3):
+        keys = collections.defaultdict(set)
+        for method, grouped_aggregations in itertools.groupby(
+                sorted(aggregations, key=ATTRGETTER_METHOD),
+                ATTRGETTER_METHOD):
+            try:
+                files = os.listdir(
+                    self._build_metric_path(metric, method))
+            except OSError as e:
+                if e.errno == errno.ENOENT:
+                    raise storage.MetricDoesNotExist(metric)
+                raise
+            raw_keys = list(map(
+                lambda k: k.split("_"),
+                filter(
+                    lambda f: self._version_check(f, version),
+                    files)))
+            if not raw_keys:
+                continue
+            zipped = list(zip(*raw_keys))
+            k_timestamps = utils.to_timestamps(zipped[0])
+            k_granularities = list(map(utils.to_timespan, zipped[1]))
+            grouped_aggregations = list(grouped_aggregations)
+            for timestamp, granularity in six.moves.zip(
+                    k_timestamps, k_granularities):
+                for agg in grouped_aggregations:
+                    if granularity == agg.granularity:
+                        keys[agg].add(carbonara.SplitKey(
+                            timestamp,
+                            sampling=granularity))
+                        break
+        return keys
+
+    def _delete_metric_splits_unbatched(
+            self, metric, key, aggregation, version=3):
+        os.unlink(self._build_metric_path_for_split(
+            metric, aggregation.method, key, version))
+
+    def _store_metric_splits_unbatched(self, metric, key, aggregation, data,
+                                       offset, version):
+        self._atomic_file_store(
+            self._build_metric_path_for_split(
+                metric, aggregation.method, key, version),
+            data)
+
+    def _delete_metric(self, metric):
+        path = self._build_metric_dir(metric)
+        try:
+            shutil.rmtree(path)
+        except OSError as e:
+            if e.errno != errno.ENOENT:
+                # NOTE(jd) Maybe the metric has never been created (no
+                # measures)
+                raise
+
+    def _get_splits_unbatched(self, metric, key, aggregation, version=3):
+        path = self._build_metric_path_for_split(
+            metric, aggregation.method, key, version)
+        try:
+            with open(path, 'rb') as aggregation_file:
+                return aggregation_file.read()
+        except IOError as e:
+            if e.errno == errno.ENOENT:
+                return
+            raise
diff --git a/gnocchi/storage/redis.py b/gnocchi/storage/redis.py
new file mode 100644
index 0000000000000000000000000000000000000000..d65027af3461c0cb578da257eb8d78a0a6eee696
--- /dev/null
+++ b/gnocchi/storage/redis.py
@@ -0,0 +1,195 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017-2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+
+import six
+
+from gnocchi import carbonara
+from gnocchi.common import redis
+from gnocchi import storage
+from gnocchi import utils
+
+
+class RedisStorage(storage.StorageDriver):
+    WRITE_FULL = True
+
+    STORAGE_PREFIX = b"timeseries"
+    FIELD_SEP = '_'
+    FIELD_SEP_B = b'_'
+
+    _SCRIPTS = {
+        "list_split_keys": """
+local metric_key = KEYS[1]
+local ids = {}
+local cursor = 0
+local substring = "([^%s]*)%s([^%s]*)%s([^%s]*)"
+repeat
+    local result = redis.call("HSCAN", metric_key, cursor, "MATCH", ARGV[1])
+    cursor = tonumber(result[1])
+    for i, v in ipairs(result[2]) do
+        -- Only return keys, not values
+        if i %% 2 ~= 0 then
+            local timestamp, method, granularity = v:gmatch(substring)()
+            ids[#ids + 1] = {timestamp, method, granularity}
+        end
+    end
+until cursor == 0
+return ids
+""" % (FIELD_SEP, FIELD_SEP, FIELD_SEP, FIELD_SEP, FIELD_SEP),
+    }
+
+    def __init__(self, conf):
+        super(RedisStorage, self).__init__(conf)
+        self._client, self._scripts = redis.get_client(conf, self._SCRIPTS)
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, self._client)
+
+    def _metric_key(self, metric):
+        return redis.SEP.join([self.STORAGE_PREFIX, str(metric.id).encode()])
+
+    @staticmethod
+    def _unaggregated_field(version=3):
+        return 'none' + ("_v%s" % version if version else "")
+
+    @classmethod
+    def _aggregated_field_for_split(cls, aggregation, key, version=3,
+                                    granularity=None):
+        path = cls.FIELD_SEP.join([
+            str(key), aggregation,
+            str(utils.timespan_total_seconds(granularity or key.sampling))])
+        return path + '_v%s' % version if version else path
+
+    def _store_unaggregated_timeseries(self, metrics_and_data, version=3):
+        pipe = self._client.pipeline(transaction=False)
+        unagg_key = self._unaggregated_field(version)
+        for metric, data in metrics_and_data:
+            pipe.hset(self._metric_key(metric), unagg_key, data)
+        pipe.execute()
+
+    def _get_or_create_unaggregated_timeseries(self, metrics, version=3):
+        pipe = self._client.pipeline(transaction=False)
+        for metric in metrics:
+            metric_key = self._metric_key(metric)
+            unagg_key = self._unaggregated_field(version)
+            # Create the metric if it was not created
+            pipe.hsetnx(metric_key, unagg_key, "")
+            # Get the data
+            pipe.hget(metric_key, unagg_key)
+        ts = {
+            # Replace "" by None
+            metric: data or None
+            for metric, (created, data)
+            in six.moves.zip(metrics, utils.grouper(pipe.execute(), 2))
+        }
+        return ts
+
+    def _list_split_keys(self, metrics_and_aggregations, version=3):
+        pipe = self._client.pipeline(transaction=False)
+        # Keep an ordered list of metrics
+        metrics = list(metrics_and_aggregations.keys())
+        for metric in metrics:
+            key = self._metric_key(metric)
+            pipe.exists(key)
+            aggregations = metrics_and_aggregations[metric]
+            for aggregation in aggregations:
+                self._scripts["list_split_keys"](
+                    keys=[key], args=[self._aggregated_field_for_split(
+                        aggregation.method, "*",
+                        version, aggregation.granularity)],
+                    client=pipe,
+                )
+        results = pipe.execute()
+        keys = collections.defaultdict(dict)
+        start = 0
+        for metric in metrics:
+            metric_exists_p = results[start]
+            if not metric_exists_p:
+                raise storage.MetricDoesNotExist(metric)
+            aggregations = metrics_and_aggregations[metric]
+            number_of_aggregations = len(aggregations)
+            keys_for_aggregations = results[
+                start + 1:start + 1 + number_of_aggregations
+            ]
+            start += 1 + number_of_aggregations  # 1 for metric_exists_p
+            for aggregation, k in six.moves.zip(
+                    aggregations, keys_for_aggregations):
+                if not k:
+                    keys[metric][aggregation] = set()
+                    continue
+                timestamps, methods, granularities = list(zip(*k))
+                timestamps = utils.to_timestamps(timestamps)
+                granularities = map(utils.to_timespan, granularities)
+                keys[metric][aggregation] = {
+                    carbonara.SplitKey(timestamp,
+                                       sampling=granularity)
+                    for timestamp, granularity
+                    in six.moves.zip(timestamps, granularities)
+                }
+        return keys
+
+    def _delete_metric_splits(self, metrics_keys_aggregations, version=3):
+        pipe = self._client.pipeline(transaction=False)
+        for metric, keys_and_aggregations in six.iteritems(
+                metrics_keys_aggregations):
+            metric_key = self._metric_key(metric)
+            for key, aggregation in keys_and_aggregations:
+                pipe.hdel(metric_key, self._aggregated_field_for_split(
+                    aggregation.method, key, version))
+        pipe.execute()
+
+    def _store_metric_splits(self, metrics_keys_aggregations_data_offset,
+                             version=3):
+        pipe = self._client.pipeline(transaction=False)
+        for metric, keys_aggs_data_offset in six.iteritems(
+                metrics_keys_aggregations_data_offset):
+            metric_key = self._metric_key(metric)
+            for key, aggregation, data, offset in keys_aggs_data_offset:
+                key = self._aggregated_field_for_split(
+                    aggregation.method, key, version)
+                pipe.hset(metric_key, key, data)
+        pipe.execute()
+
+    def _delete_metric(self, metric):
+        self._client.delete(self._metric_key(metric))
+
+    def _get_splits(self, metrics_aggregations_keys, version=3):
+        # Use a list of metric and aggregations with a constant sorting
+        metrics_aggregations = [
+            (metric, aggregation)
+            for metric, aggregation_and_keys in six.iteritems(
+                metrics_aggregations_keys)
+            for aggregation, keys in six.iteritems(aggregation_and_keys)
+            # Do not send any fetch request if keys is empty
+            if keys
+        ]
+
+        pipe = self._client.pipeline(transaction=False)
+        for metric, aggregation in metrics_aggregations:
+            pipe.hmget(
+                self._metric_key(metric),
+                [self._aggregated_field_for_split(aggregation.method,
+                                                  key, version)
+                 for key in metrics_aggregations_keys[metric][aggregation]])
+
+        results = collections.defaultdict(
+            lambda: collections.defaultdict(list))
+
+        for (metric, aggregation), result in six.moves.zip(
+                metrics_aggregations, pipe.execute()):
+            results[metric][aggregation] = result
+
+        return results
diff --git a/gnocchi/storage/s3.py b/gnocchi/storage/s3.py
new file mode 100644
index 0000000000000000000000000000000000000000..16b73e5cc897f4f0db400404accb67e5a4523949
--- /dev/null
+++ b/gnocchi/storage/s3.py
@@ -0,0 +1,244 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2018 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import os
+
+from oslo_config import cfg
+import tenacity
+
+from gnocchi import carbonara
+from gnocchi.common import s3
+from gnocchi import storage
+from gnocchi import utils
+
+boto3 = s3.boto3
+botocore = s3.botocore
+
+OPTS = [
+    cfg.StrOpt('s3_endpoint_url',
+               help='S3 endpoint URL'),
+    cfg.StrOpt('s3_region_name',
+               default=os.getenv("AWS_DEFAULT_REGION"),
+               help='S3 region name'),
+    cfg.StrOpt('s3_access_key_id',
+               default=os.getenv("AWS_ACCESS_KEY_ID"),
+               help='S3 access key id'),
+    cfg.StrOpt('s3_secret_access_key',
+               default=os.getenv("AWS_SECRET_ACCESS_KEY"),
+               help='S3 secret access key'),
+    cfg.StrOpt('s3_bucket_prefix',
+               # Max bucket length is 63 and we use "-" as separator
+               # 63 - 1 - len(uuid) = 26
+               max_length=26,
+               default='gnocchi',
+               help='Prefix to namespace metric bucket.'),
+    cfg.FloatOpt('s3_check_consistency_timeout',
+                 min=0,
+                 default=60,
+                 help="Maximum time to wait checking data consistency when "
+                 "writing to S3. Set to 0 to disable data consistency "
+                 "validation."),
+    cfg.IntOpt('s3_max_pool_connections',
+               min=1,
+               default=50,
+               help="The maximum number of connections to keep in a "
+               "connection pool."),
+]
+
+
+def retry_if_operationaborted(exception):
+    return (isinstance(exception, botocore.exceptions.ClientError)
+            and exception.response['Error'].get('Code') == "OperationAborted")
+
+
+class S3Storage(storage.StorageDriver):
+
+    WRITE_FULL = True
+
+    _consistency_wait = tenacity.wait_exponential(multiplier=0.1)
+
+    def __init__(self, conf):
+        super(S3Storage, self).__init__(conf)
+        self.s3, self._region_name, self._bucket_prefix = (
+            s3.get_connection(conf)
+        )
+        self._bucket_name = '%s-aggregates' % self._bucket_prefix
+        if conf.s3_check_consistency_timeout > 0:
+            self._consistency_stop = tenacity.stop_after_delay(
+                conf.s3_check_consistency_timeout)
+        else:
+            self._consistency_stop = None
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, self._bucket_name)
+
+    def upgrade(self):
+        super(S3Storage, self).upgrade()
+        try:
+            s3.create_bucket(self.s3, self._bucket_name, self._region_name)
+        except botocore.exceptions.ClientError as e:
+            if e.response['Error'].get('Code') != "BucketAlreadyExists":
+                raise
+
+    @staticmethod
+    def _object_name(split_key, aggregation, version=3):
+        name = '%s_%s_%s' % (
+            aggregation,
+            utils.timespan_total_seconds(split_key.sampling),
+            split_key,
+        )
+        return name + '_v%s' % version if version else name
+
+    @staticmethod
+    def _prefix(metric):
+        return str(metric.id) + '/'
+
+    def _put_object_safe(self, Bucket, Key, Body):
+        put = self.s3.put_object(Bucket=Bucket, Key=Key, Body=Body)
+
+        if self._consistency_stop:
+
+            def _head():
+                return self.s3.head_object(Bucket=Bucket,
+                                           Key=Key, IfMatch=put['ETag'])
+
+            tenacity.Retrying(
+                retry=tenacity.retry_if_result(
+                    lambda r: r['ETag'] != put['ETag']),
+                wait=self._consistency_wait,
+                stop=self._consistency_stop)(_head)
+
+    def _store_metric_splits_unbatched(self, metric, key, aggregation, data,
+                                       offset, version):
+        self._put_object_safe(
+            Bucket=self._bucket_name,
+            Key=self._prefix(metric) + self._object_name(
+                key, aggregation.method, version),
+            Body=data)
+
+    def _delete_metric_splits_unbatched(self, metric, key, aggregation,
+                                        version=3):
+        self.s3.delete_object(
+            Bucket=self._bucket_name,
+            Key=self._prefix(metric) + self._object_name(
+                key, aggregation.method, version))
+
+    def _delete_metric(self, metric):
+        bucket = self._bucket_name
+        response = {}
+        while response.get('IsTruncated', True):
+            if 'NextContinuationToken' in response:
+                kwargs = {
+                    'ContinuationToken': response['NextContinuationToken']
+                }
+            else:
+                kwargs = {}
+            try:
+                response = self.s3.list_objects_v2(
+                    Bucket=bucket, Prefix=self._prefix(metric), **kwargs)
+            except botocore.exceptions.ClientError as e:
+                if e.response['Error'].get('Code') == "NoSuchKey":
+                    # Maybe it never has been created (no measure)
+                    return
+                raise
+            s3.bulk_delete(self.s3, bucket,
+                           [c['Key'] for c in response.get('Contents', ())])
+
+    def _get_splits_unbatched(self, metric, key, aggregation, version=3):
+        try:
+            response = self.s3.get_object(
+                Bucket=self._bucket_name,
+                Key=self._prefix(metric) + self._object_name(
+                    key, aggregation.method, version))
+        except botocore.exceptions.ClientError as e:
+            if e.response['Error'].get('Code') == 'NoSuchKey':
+                return
+            raise
+        return response['Body'].read()
+
+    def _metric_exists_p(self, metric, version):
+        unaggkey = self._build_unaggregated_timeserie_path(metric, version)
+        try:
+            self.s3.head_object(Bucket=self._bucket_name, Key=unaggkey)
+        except botocore.exceptions.ClientError as e:
+            if e.response['Error'].get('Code') == "404":
+                return False
+            raise
+        return True
+
+    def _list_split_keys_unbatched(self, metric, aggregations, version=3):
+        bucket = self._bucket_name
+        keys = {}
+        for aggregation in aggregations:
+            keys[aggregation] = set()
+            response = {}
+            while response.get('IsTruncated', True):
+                if 'NextContinuationToken' in response:
+                    kwargs = {
+                        'ContinuationToken': response['NextContinuationToken']
+                    }
+                else:
+                    kwargs = {}
+                response = self.s3.list_objects_v2(
+                    Bucket=bucket,
+                    Prefix=self._prefix(metric) + '%s_%s' % (
+                        aggregation.method,
+                        utils.timespan_total_seconds(
+                            aggregation.granularity),
+                    ),
+                    **kwargs)
+                # If response is empty then check that the metric exists
+                contents = response.get('Contents', ())
+                if not contents and not self._metric_exists_p(metric, version):
+                    raise storage.MetricDoesNotExist(metric)
+                for f in contents:
+                    try:
+                        if (self._version_check(f['Key'], version)):
+                            meta = f['Key'].split('_')
+                            keys[aggregation].add(carbonara.SplitKey(
+                                utils.to_timestamp(meta[2]),
+                                sampling=aggregation.granularity))
+                    except (ValueError, IndexError):
+                        # Might be "none", or any other file. Be resilient.
+                        continue
+        return keys
+
+    @staticmethod
+    def _build_unaggregated_timeserie_path(metric, version):
+        return S3Storage._prefix(metric) + 'none' + ("_v%s" % version
+                                                     if version else "")
+
+    def _get_or_create_unaggregated_timeseries_unbatched(
+            self, metric, version=3):
+        key = self._build_unaggregated_timeserie_path(metric, version)
+        try:
+            response = self.s3.get_object(
+                Bucket=self._bucket_name, Key=key)
+        except botocore.exceptions.ClientError as e:
+            if e.response['Error'].get('Code') == "NoSuchKey":
+                # Create the metric with empty data
+                self._put_object_safe(
+                    Bucket=self._bucket_name, Key=key, Body="")
+            else:
+                raise
+        else:
+            return response['Body'].read() or None
+
+    def _store_unaggregated_timeseries_unbatched(
+            self, metric, data, version=3):
+        self._put_object_safe(
+            Bucket=self._bucket_name,
+            Key=self._build_unaggregated_timeserie_path(metric, version),
+            Body=data)
diff --git a/gnocchi/storage/swift.py b/gnocchi/storage/swift.py
new file mode 100644
index 0000000000000000000000000000000000000000..509113950746026dd00ecf8cf57eddd9420414fb
--- /dev/null
+++ b/gnocchi/storage/swift.py
@@ -0,0 +1,223 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2018 Red Hat
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import collections
+
+from oslo_config import cfg
+import six
+
+from gnocchi import carbonara
+from gnocchi.common import swift
+from gnocchi import storage
+from gnocchi import utils
+
+swclient = swift.swclient
+swift_utils = swift.swift_utils
+
+OPTS = [
+    cfg.StrOpt('swift_auth_version',
+               default='1',
+               help='Swift authentication version to user.'),
+    cfg.BoolOpt('swift_auth_insecure',
+                default=False,
+                help='If True, swiftclient won\'t check for a valid SSL '
+                     'certificate when authenticating.'),
+    cfg.StrOpt('swift_url',
+               help='Swift URL. '
+               'If unset, it is obtained from the auth service.'),
+    cfg.StrOpt('swift_authurl',
+               default="http://localhost:8080/auth/v1.0",
+               help='Swift auth URL.'),
+    cfg.StrOpt('swift_preauthtoken',
+               secret=True,
+               help='Swift token to user to authenticate.'),
+    cfg.StrOpt('swift_cacert',
+               help='A string giving the CA certificate file to use in '
+                    'SSL connections for verifying certs.'),
+    cfg.StrOpt('swift_region',
+               help='Swift region.'),
+    cfg.StrOpt('swift_user',
+               default="admin:admin",
+               help='Swift user.'),
+    cfg.StrOpt('swift_user_domain_name',
+               default='Default',
+               help='Swift user domain name.'),
+    cfg.StrOpt('swift_key',
+               secret=True,
+               default="admin",
+               help='Swift key/password.'),
+    cfg.StrOpt('swift_project_name',
+               help='Swift tenant name, only used in v2/v3 auth.',
+               deprecated_name="swift_tenant_name"),
+    cfg.StrOpt('swift_project_domain_name',
+               default='Default',
+               help='Swift project domain name.'),
+    cfg.StrOpt('swift_container_prefix',
+               default='gnocchi',
+               help='Prefix to namespace metric containers.'),
+    cfg.StrOpt('swift_endpoint_type',
+               default='publicURL',
+               help='Endpoint type to connect to Swift',),
+    cfg.StrOpt('swift_service_type',
+               default='object-store',
+               help='A string giving the service type of the swift service '
+                    'to use. This setting is only used if '
+                    'swift_auth_version is 2.'),
+    cfg.IntOpt('swift_timeout',
+               min=0,
+               default=300,
+               help='Connection timeout in seconds.'),
+]
+
+
+class SwiftStorage(storage.StorageDriver):
+
+    WRITE_FULL = True
+    # NOTE(sileht): Using threads with swiftclient doesn't work
+    # as expected, so disable it
+    MAP_METHOD = staticmethod(utils.sequencial_map)
+
+    def __init__(self, conf):
+        super(SwiftStorage, self).__init__(conf)
+        self.swift = swift.get_connection(conf)
+        self._container_prefix = conf.swift_container_prefix
+
+    def __str__(self):
+        return "%s: %s" % (self.__class__.__name__, self._container_prefix)
+
+    def _container_name(self, metric):
+        return '%s.%s' % (self._container_prefix, str(metric.id))
+
+    @staticmethod
+    def _object_name(split_key, aggregation, version=3):
+        name = '%s_%s_%s' % (
+            split_key, aggregation,
+            utils.timespan_total_seconds(split_key.sampling),
+        )
+        return name + '_v%s' % version if version else name
+
+    def _create_metric(self, metric):
+        # TODO(jd) A container per user in their account?
+        resp = {}
+        self.swift.put_container(self._container_name(metric),
+                                 response_dict=resp)
+        # put_container() should return 201 Created; if it returns 204, that
+        # means the metric was already created!
+        if resp['status'] == 204:
+            raise storage.MetricAlreadyExists(metric)
+
+    def _store_metric_splits_unbatched(self, metric, key, aggregation, data,
+                                       offset, version):
+        self.swift.put_object(
+            self._container_name(metric),
+            self._object_name(key, aggregation.method, version),
+            data)
+
+    def _delete_metric_splits_unbatched(
+            self, metric, key, aggregation, version=3):
+        self.swift.delete_object(
+            self._container_name(metric),
+            self._object_name(key, aggregation.method, version))
+
+    def _delete_metric(self, metric):
+        container = self._container_name(metric)
+        try:
+            headers, files = self.swift.get_container(
+                container, full_listing=True)
+        except swclient.ClientException as e:
+            if e.http_status != 404:
+                # Maybe it never has been created (no measure)
+                raise
+        else:
+            swift.bulk_delete(self.swift, container, files)
+            try:
+                self.swift.delete_container(container)
+            except swclient.ClientException as e:
+                if e.http_status != 404:
+                    # Deleted in the meantime? Whatever.
+                    raise
+
+    def _get_splits_unbatched(self, metric, key, aggregation, version=3):
+        try:
+            headers, contents = self.swift.get_object(
+                self._container_name(metric), self._object_name(
+                    key, aggregation.method, version))
+        except swclient.ClientException as e:
+            if e.http_status == 404:
+                return
+            raise
+        return contents
+
+    def _list_split_keys_unbatched(self, metric, aggregations, version=3):
+        container = self._container_name(metric)
+        try:
+            headers, files = self.swift.get_container(
+                container, full_listing=True)
+        except swclient.ClientException as e:
+            if e.http_status == 404:
+                raise storage.MetricDoesNotExist(metric)
+            raise
+
+        raw_keys = list(map(
+            lambda k: k.split("_"),
+            (f['name'] for f in files
+             if self._version_check(f['name'], version)
+             and not f['name'].startswith('none'))))
+        keys = collections.defaultdict(set)
+        if not raw_keys:
+            return keys
+        zipped = list(zip(*raw_keys))
+        k_timestamps = utils.to_timestamps(zipped[0])
+        k_methods = zipped[1]
+        k_granularities = list(map(utils.to_timespan, zipped[2]))
+
+        for timestamp, method, granularity in six.moves.zip(
+                k_timestamps, k_methods, k_granularities):
+            for aggregation in aggregations:
+                if (aggregation.method == method
+                   and aggregation.granularity == granularity):
+                    keys[aggregation].add(carbonara.SplitKey(
+                        timestamp,
+                        sampling=granularity))
+                    break
+        return keys
+
+    @staticmethod
+    def _build_unaggregated_timeserie_path(version):
+        return 'none' + ("_v%s" % version if version else "")
+
+    def _get_or_create_unaggregated_timeseries_unbatched(
+            self, metric, version=3):
+        try:
+            headers, contents = self.swift.get_object(
+                self._container_name(metric),
+                self._build_unaggregated_timeserie_path(version))
+        except swclient.ClientException as e:
+            if e.http_status != 404:
+                raise
+            try:
+                self._create_metric(metric)
+            except storage.MetricAlreadyExists:
+                pass
+        else:
+            return contents
+
+    def _store_unaggregated_timeseries_unbatched(
+            self, metric, data, version=3):
+        self.swift.put_object(
+            self._container_name(metric),
+            self._build_unaggregated_timeserie_path(version),
+            data)
diff --git a/gnocchi/tests/__init__.py b/gnocchi/tests/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/tests/base.py b/gnocchi/tests/base.py
new file mode 100644
index 0000000000000000000000000000000000000000..9a7e8396155b3dab38888976a505ec4a9109381e
--- /dev/null
+++ b/gnocchi/tests/base.py
@@ -0,0 +1,396 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2016 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import functools
+import json
+import logging
+import os
+import subprocess
+import threading
+import uuid
+
+import daiquiri
+import fixtures
+import numpy
+import six
+from six.moves.urllib.parse import unquote
+try:
+    from swiftclient import exceptions as swexc
+except ImportError:
+    swexc = None
+from testtools import testcase
+
+from gnocchi import archive_policy
+from gnocchi import chef
+from gnocchi.cli import metricd
+from gnocchi import exceptions
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import service
+from gnocchi import storage
+from gnocchi.tests import utils
+
+
+class SkipNotImplementedMeta(type):
+    def __new__(cls, name, bases, local):
+        for attr in local:
+            value = local[attr]
+            if callable(value) and (
+                    attr.startswith('test_') or attr == 'setUp'):
+                local[attr] = _skip_decorator(value)
+        return type.__new__(cls, name, bases, local)
+
+
+def _skip_decorator(func):
+    @functools.wraps(func)
+    def skip_if_not_implemented(*args, **kwargs):
+        try:
+            return func(*args, **kwargs)
+        except exceptions.NotImplementedError as e:
+            raise testcase.TestSkipped(six.text_type(e))
+    return skip_if_not_implemented
+
+
+class FakeSwiftClient(object):
+    def __init__(self, *args, **kwargs):
+        self.kvs = {}
+
+    def put_container(self, container, response_dict=None):
+        if response_dict is not None:
+            if container in self.kvs:
+                response_dict['status'] = 204
+            else:
+                response_dict['status'] = 201
+        self.kvs[container] = {}
+
+    def get_container(self, container, delimiter=None,
+                      path=None, full_listing=False, limit=None):
+        try:
+            container = self.kvs[container]
+        except KeyError:
+            raise swexc.ClientException("No such container",
+                                        http_status=404)
+
+        files = []
+        directories = set()
+        for k, v in six.iteritems(container.copy()):
+            if path and not k.startswith(path):
+                continue
+
+            if delimiter is not None and delimiter in k:
+                dirname = k.split(delimiter, 1)[0]
+                if dirname not in directories:
+                    directories.add(dirname)
+                    files.append({'subdir': dirname + delimiter})
+            else:
+                files.append({'bytes': len(v),
+                              'last_modified': None,
+                              'hash': None,
+                              'name': k,
+                              'content_type': None})
+
+        if full_listing:
+            end = None
+        elif limit:
+            end = limit
+        else:
+            # In truth, it's 10000, but 1 is enough to make sure our test fails
+            # otherwise.
+            end = 1
+
+        return ({'x-container-object-count': len(container.keys())},
+                (files + list(directories))[:end])
+
+    def put_object(self, container, key, obj):
+        if hasattr(obj, "seek"):
+            obj.seek(0)
+            obj = obj.read()
+            # TODO(jd) Maybe we should reset the seek(), but well…
+        try:
+            self.kvs[container][key] = obj
+        except KeyError:
+            raise swexc.ClientException("No such container",
+                                        http_status=404)
+
+    def get_object(self, container, key):
+        try:
+            return {}, self.kvs[container][key]
+        except KeyError:
+            raise swexc.ClientException("No such container/object",
+                                        http_status=404)
+
+    def delete_object(self, container, obj):
+        try:
+            del self.kvs[container][obj]
+        except KeyError:
+            raise swexc.ClientException("No such container/object",
+                                        http_status=404)
+
+    def delete_container(self, container):
+        if container not in self.kvs:
+            raise swexc.ClientException("No such container",
+                                        http_status=404)
+        if self.kvs[container]:
+            raise swexc.ClientException("Container not empty",
+                                        http_status=409)
+        del self.kvs[container]
+
+    def head_container(self, container):
+        if container not in self.kvs:
+            raise swexc.ClientException("No such container",
+                                        http_status=404)
+
+    def post_account(self, headers, query_string=None, data=None,
+                     response_dict=None):
+        if query_string == 'bulk-delete':
+            resp = {'Response Status': '200 OK',
+                    'Response Body': '',
+                    'Number Deleted': 0,
+                    'Number Not Found': 0}
+            if response_dict is not None:
+                response_dict['status'] = 200
+            if data:
+                for path in data.splitlines():
+                    try:
+                        __, container, obj = (unquote(path.decode('utf8'))
+                                              .split('/', 2))
+                        del self.kvs[container][obj]
+                        resp['Number Deleted'] += 1
+                    except KeyError:
+                        resp['Number Not Found'] += 1
+            return {}, json.dumps(resp).encode('utf-8')
+
+        if response_dict is not None:
+            response_dict['status'] = 204
+
+        return {}, None
+
+
+class CaptureOutput(fixtures.Fixture):
+    """Optionally capture the output streams.
+
+    .. py:attribute:: stdout
+
+       The ``stream`` attribute from a :class:`StringStream` instance
+       replacing stdout.
+
+    .. py:attribute:: stderr
+
+       The ``stream`` attribute from a :class:`StringStream` instance
+       replacing stderr.
+
+    """
+
+    def setUp(self):
+        super(CaptureOutput, self).setUp()
+        self._stdout_fixture = fixtures.StringStream('stdout')
+        self.stdout = self.useFixture(self._stdout_fixture).stream
+        self.useFixture(fixtures.MonkeyPatch('sys.stdout', self.stdout))
+        self._stderr_fixture = fixtures.StringStream('stderr')
+        self.stderr = self.useFixture(self._stderr_fixture).stream
+        self.useFixture(fixtures.MonkeyPatch('sys.stderr', self.stderr))
+
+        self._logs_fixture = fixtures.StringStream('logs')
+        self.logs = self.useFixture(self._logs_fixture).stream
+        self.useFixture(fixtures.MonkeyPatch(
+            'daiquiri.output.STDERR', daiquiri.output.Stream(self.logs)))
+
+    @property
+    def output(self):
+        self.logs.seek(0)
+        return self.logs.read()
+
+
+class BaseTestCase(testcase.TestCase):
+    def setUp(self):
+        super(BaseTestCase, self).setUp()
+        if not os.getenv("GNOCCHI_TEST_DEBUG"):
+            self.useFixture(CaptureOutput())
+
+
+@six.add_metaclass(SkipNotImplementedMeta)
+class TestCase(BaseTestCase):
+
+    REDIS_DB_INDEX = 0
+    REDIS_DB_LOCK = threading.Lock()
+
+    ARCHIVE_POLICIES = {
+        'no_granularity_match': archive_policy.ArchivePolicy(
+            "no_granularity_match",
+            0, [
+                # 2 second resolution for a day
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(2, 's'),
+                    timespan=numpy.timedelta64(1, 'D'),
+                ),
+            ],
+        ),
+        'low': archive_policy.ArchivePolicy(
+            "low", 0, [
+                # 5 minutes resolution for an hour
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(5, 'm'), points=12),
+                # 1 hour resolution for a day
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'h'), points=24),
+                # 1 day resolution for a month
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'D'), points=30),
+            ],
+        ),
+        'medium': archive_policy.ArchivePolicy(
+            "medium", 0, [
+                # 1 minute resolution for an day
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'm'), points=60 * 24),
+                # 1 hour resolution for a week
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'h'), points=7 * 24),
+                # 1 day resolution for a year
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'D'), points=365),
+            ],
+        ),
+        'high': archive_policy.ArchivePolicy(
+            "high", 0, [
+                # 1 second resolution for an hour
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 's'), points=3600),
+                # 1 minute resolution for a week
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'm'), points=60 * 24 * 7),
+                # 1 hour resolution for a year
+                archive_policy.ArchivePolicyItem(
+                    granularity=numpy.timedelta64(1, 'h'), points=365 * 24),
+            ],
+        ),
+    }
+
+    def setUp(self):
+        super(TestCase, self).setUp()
+
+        self.conf = service.prepare_service(
+            [], conf=utils.prepare_conf(),
+            default_config_files=[],
+            logging_level=logging.DEBUG,
+            skip_log_opts=True)
+
+        self.index = indexer.get_driver(self.conf)
+
+        self.coord = metricd.get_coordinator_and_start(
+            str(uuid.uuid4()),
+            self.conf.coordination_url)
+
+        # NOTE(jd) So, some driver, at least SQLAlchemy, can't create all
+        # their tables in a single transaction even with the
+        # checkfirst=True, so what we do here is we force the upgrade code
+        # path to be sequential to avoid race conditions as the tests run
+        # in parallel.
+        with self.coord.get_lock(b"gnocchi-tests-db-lock"):
+            self.index.upgrade()
+
+        self.archive_policies = self.ARCHIVE_POLICIES.copy()
+        for name, ap in six.iteritems(self.archive_policies):
+            # Create basic archive policies
+            try:
+                self.index.create_archive_policy(ap)
+            except indexer.ArchivePolicyAlreadyExists:
+                pass
+
+        py_root = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                               '..',))
+        self.conf.set_override('paste_config',
+                               os.path.join(py_root, 'rest', 'api-paste.ini'),
+                               group="api")
+        self.conf.set_override('policy_file',
+                               os.path.join(py_root, 'rest', 'policy.json'),
+                               group="oslo_policy")
+
+        # NOTE(jd) This allows to test S3 on AWS
+        if not os.getenv("AWS_ACCESS_KEY_ID"):
+            self.conf.set_override('s3_endpoint_url',
+                                   os.getenv("GNOCCHI_STORAGE_HTTP_URL"),
+                                   group="storage")
+            self.conf.set_override('s3_access_key_id', "gnocchi",
+                                   group="storage")
+            self.conf.set_override('s3_secret_access_key', "anythingworks",
+                                   group="storage")
+
+        storage_driver = os.getenv("GNOCCHI_TEST_STORAGE_DRIVER", "file")
+        self.conf.set_override('driver', storage_driver, 'storage')
+
+        if swexc:
+            self.useFixture(fixtures.MockPatch(
+                'swiftclient.client.Connection',
+                FakeSwiftClient))
+
+        if self.conf.storage.driver == 'file':
+            tempdir = self.useFixture(fixtures.TempDir())
+            self.conf.set_override('file_basepath',
+                                   tempdir.path,
+                                   'storage')
+        elif self.conf.storage.driver == 'ceph':
+            self.conf.set_override('ceph_conffile',
+                                   os.getenv("CEPH_CONF"),
+                                   'storage')
+            pool_name = uuid.uuid4().hex
+            with open(os.devnull, 'w') as f:
+                subprocess.call("rados -c %s mkpool %s" % (
+                    os.getenv("CEPH_CONF"), pool_name), shell=True,
+                    stdout=f, stderr=subprocess.STDOUT)
+            self.conf.set_override('ceph_pool', pool_name, 'storage')
+
+        # Override the bucket prefix to be unique to avoid concurrent access
+        # with any other test
+        self.conf.set_override("s3_bucket_prefix", str(uuid.uuid4())[:26],
+                               "storage")
+
+        self.storage = storage.get_driver(self.conf)
+        self.incoming = incoming.get_driver(self.conf)
+
+        if self.conf.storage.driver == 'redis':
+            # Create one prefix per test
+            self.storage.STORAGE_PREFIX = str(uuid.uuid4()).encode()
+
+        if self.conf.incoming.driver == 'redis':
+            self.incoming.SACK_NAME_FORMAT = (
+                str(uuid.uuid4()) + incoming.IncomingDriver.SACK_NAME_FORMAT
+            )
+
+        self.storage.upgrade()
+        self.incoming.upgrade(3)
+        self.chef = chef.Chef(
+            self.coord, self.incoming, self.index, self.storage)
+
+    def tearDown(self):
+        self.index.disconnect()
+        self.coord.stop()
+        super(TestCase, self).tearDown()
+
+    def _create_metric(self, archive_policy_name="low"):
+        """Create a metric and return it"""
+        m = indexer.Metric(uuid.uuid4(),
+                           self.archive_policies[archive_policy_name])
+        m_sql = self.index.create_metric(m.id, str(uuid.uuid4()),
+                                         archive_policy_name)
+        return m, m_sql
+
+    def trigger_processing(self, metrics=None):
+        if metrics is None:
+            self.chef.process_new_measures_for_sack(
+                self.incoming.sack_for_metric(self.metric.id),
+                blocking=True, sync=True)
+        else:
+            self.chef.refresh_metrics(metrics, timeout=True, sync=True)
diff --git a/gnocchi/tests/functional/__init__.py b/gnocchi/tests/functional/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/tests/functional/fixtures.py b/gnocchi/tests/functional/fixtures.py
new file mode 100644
index 0000000000000000000000000000000000000000..dc0146d963b33912375bf6de28729e748f11d09b
--- /dev/null
+++ b/gnocchi/tests/functional/fixtures.py
@@ -0,0 +1,267 @@
+#
+# Copyright 2015-2017 Red Hat. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+"""Fixtures for use with gabbi tests."""
+
+from __future__ import absolute_import
+
+import logging
+import os
+import shutil
+import subprocess
+import tempfile
+import threading
+import time
+from unittest import case
+import uuid
+import warnings
+
+import fixtures
+from gabbi import fixture
+import numpy
+from oslo_config import cfg
+from oslo_middleware import cors
+import sqlalchemy_utils
+import yaml
+
+from gnocchi import chef
+from gnocchi.cli import metricd
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi.indexer import sqlalchemy
+from gnocchi.rest import app
+from gnocchi import service
+from gnocchi import storage
+from gnocchi.tests import base
+from gnocchi.tests import utils
+
+# NOTE(chdent): Hack to restore semblance of global configuration to
+# pass to the WSGI app used per test suite. LOAD_APP_KWARGS are the olso
+# configuration, and the pecan application configuration of
+# which the critical part is a reference to the current indexer.
+LOAD_APP_KWARGS = None
+
+
+def setup_app():
+    global LOAD_APP_KWARGS
+    return app.load_app(**LOAD_APP_KWARGS)
+
+
+class AssertNAN(yaml.YAMLObject):
+    def __eq__(self, other):
+        try:
+            return numpy.isnan(other)
+        except TypeError:
+            return False
+
+
+yaml.add_constructor(u'!AssertNAN', lambda loader, node: AssertNAN())
+
+
+class ConfigFixture(fixture.GabbiFixture):
+    """Establish the relevant configuration fixture, per test file.
+
+    Each test file gets its own oslo config and its own indexer and storage
+    instance. The indexer is based on the current database url. The storage
+    uses a temporary directory.
+
+    To use this fixture in a gabbit add::
+
+        fixtures:
+            - ConfigFixture
+    """
+
+    def __init__(self):
+        self.conf = None
+        self.tmp_dir = None
+
+    def start_fixture(self):
+        """Create necessary temp files and do the config dance."""
+        global LOAD_APP_KWARGS
+
+        if not os.getenv("GNOCCHI_TEST_DEBUG"):
+            self.output = base.CaptureOutput()
+            self.output.setUp()
+
+        data_tmp_dir = tempfile.mkdtemp(prefix='gnocchi')
+
+        if os.getenv("GABBI_LIVE"):
+            dcf = None
+        else:
+            dcf = []
+        conf = service.prepare_service([], conf=utils.prepare_conf(),
+                                       default_config_files=dcf,
+                                       logging_level=logging.DEBUG,
+                                       skip_log_opts=True)
+
+        py_root = os.path.abspath(os.path.join(os.path.dirname(__file__),
+                                               '..', '..',))
+        conf.set_override('paste_config',
+                          os.path.join(py_root, 'rest', 'api-paste.ini'),
+                          group="api")
+        conf.set_override('policy_file',
+                          os.path.join(py_root, 'rest', 'policy.json'),
+                          group="oslo_policy")
+
+        # NOTE(sileht): This is not concurrency safe, but only this tests file
+        # deal with cors, so we are fine. set_override don't work because cors
+        # group doesn't yet exists, and we the CORS middleware is created it
+        # register the option and directly copy value of all configurations
+        # options making impossible to override them properly...
+        cfg.set_defaults(cors.CORS_OPTS, allowed_origin="http://foobar.com")
+
+        self.conf = conf
+        self.tmp_dir = data_tmp_dir
+
+        if conf.indexer.url is None:
+            raise case.SkipTest("No indexer configured")
+
+        storage_driver = os.getenv("GNOCCHI_TEST_STORAGE_DRIVER", "file")
+
+        conf.set_override('driver', storage_driver, 'storage')
+        if conf.storage.driver == 'file':
+            conf.set_override('file_basepath', data_tmp_dir, 'storage')
+        elif conf.storage.driver == 'ceph':
+            conf.set_override('ceph_conffile', os.getenv("CEPH_CONF"),
+                              'storage')
+            pool_name = uuid.uuid4().hex
+            with open(os.devnull, 'w') as f:
+                subprocess.call("rados -c %s mkpool %s" % (
+                    os.getenv("CEPH_CONF"), pool_name), shell=True,
+                    stdout=f, stderr=subprocess.STDOUT)
+            conf.set_override('ceph_pool', pool_name, 'storage')
+        elif conf.storage.driver == "s3":
+            conf.set_override('s3_endpoint_url',
+                              os.getenv("GNOCCHI_STORAGE_HTTP_URL"),
+                              group="storage")
+            conf.set_override('s3_access_key_id', "gnocchi", group="storage")
+            conf.set_override('s3_secret_access_key', "anythingworks",
+                              group="storage")
+            conf.set_override("s3_bucket_prefix", str(uuid.uuid4())[:26],
+                              "storage")
+        elif conf.storage.driver == "swift":
+            # NOTE(sileht): This fixture must start before any driver stuff
+            swift_fixture = fixtures.MockPatch(
+                'swiftclient.client.Connection',
+                base.FakeSwiftClient)
+            swift_fixture.setUp()
+
+        # NOTE(jd) All of that is still very SQL centric but we only support
+        # SQL for now so let's say it's good enough.
+        conf.set_override(
+            'url',
+            sqlalchemy.SQLAlchemyIndexer._create_new_database(
+                conf.indexer.url),
+            'indexer')
+
+        index = indexer.get_driver(conf)
+        index.upgrade()
+
+        # Set pagination to a testable value
+        conf.set_override('max_limit', 7, 'api')
+
+        conf.set_override('enable_proxy_headers_parsing', True, group="api")
+
+        self.index = index
+
+        self.coord = metricd.get_coordinator_and_start(str(uuid.uuid4()),
+                                                       conf.coordination_url)
+        s = storage.get_driver(conf)
+        i = incoming.get_driver(conf)
+
+        if conf.storage.driver == 'redis':
+            # Create one prefix per test
+            s.STORAGE_PREFIX = str(uuid.uuid4()).encode()
+
+        if conf.incoming.driver == 'redis':
+            i.SACK_NAME_FORMAT = (
+                str(uuid.uuid4()) + incoming.IncomingDriver.SACK_NAME_FORMAT
+            )
+
+        self.fixtures = [
+            fixtures.MockPatch("gnocchi.storage.get_driver",
+                               return_value=s),
+            fixtures.MockPatch("gnocchi.incoming.get_driver",
+                               return_value=i),
+            fixtures.MockPatch("gnocchi.indexer.get_driver",
+                               return_value=self.index),
+            fixtures.MockPatch(
+                "gnocchi.cli.metricd.get_coordinator_and_start",
+                return_value=self.coord),
+        ]
+        for f in self.fixtures:
+            f.setUp()
+
+        if conf.storage.driver == 'swift':
+            self.fixtures.append(swift_fixture)
+
+        LOAD_APP_KWARGS = {
+            'conf': conf,
+        }
+
+        s.upgrade()
+        i.upgrade(128)
+
+        # start up a thread to async process measures
+        self.metricd_thread = MetricdThread(chef.Chef(self.coord, i, index, s))
+        self.metricd_thread.start()
+
+    def stop_fixture(self):
+        """Clean up the config fixture and storage artifacts."""
+
+        if hasattr(self, 'metricd_thread'):
+            self.metricd_thread.stop()
+            self.metricd_thread.join()
+
+        if hasattr(self, 'fixtures'):
+            for f in reversed(self.fixtures):
+                f.cleanUp()
+
+        if hasattr(self, 'index'):
+            self.index.disconnect()
+
+        # Swallow noise from missing tables when dropping
+        # database.
+        with warnings.catch_warnings():
+            warnings.filterwarnings('ignore',
+                                    module='sqlalchemy.engine.default')
+            sqlalchemy_utils.drop_database(self.conf.indexer.url)
+
+        if self.tmp_dir:
+            shutil.rmtree(self.tmp_dir)
+
+        if hasattr(self, 'coord'):
+            self.coord.stop()
+
+        self.conf.reset()
+        if not os.getenv("GNOCCHI_TEST_DEBUG"):
+            self.output.cleanUp()
+
+
+class MetricdThread(threading.Thread):
+    """Run metricd in a naive thread to process measures."""
+
+    def __init__(self, chef, name='metricd'):
+        super(MetricdThread, self).__init__(name=name)
+        self.chef = chef
+        self.flag = True
+
+    def run(self):
+        while self.flag:
+            for sack in self.chef.incoming.iter_sacks():
+                self.chef.process_new_measures_for_sack(sack, blocking=True)
+            time.sleep(0.1)
+
+    def stop(self):
+        self.flag = False
diff --git a/gnocchi/tests/functional/gabbits/aggregates-with-metric-ids.yaml b/gnocchi/tests/functional/gabbits/aggregates-with-metric-ids.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ac1826ae583d86811c6af831f2af2e2b83febb45
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/aggregates-with-metric-ids.yaml
@@ -0,0 +1,861 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cookies
+          definition:
+              - granularity: 1 second
+              - granularity: 60 second
+      status: 201
+
+    - name: create second archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cake
+          definition:
+              - granularity: 5 second
+      status: 201
+
+    - name: create metric1
+      POST: /v1/metric
+      data:
+          name: metric1
+          archive_policy_name: cookies
+      status: 201
+
+    - name: create metric2
+      POST: /v1/metric
+      data:
+          name: metric2
+          archive_policy_name: cookies
+      status: 201
+
+    - name: create metric3
+      POST: /v1/metric
+      data:
+          name: metric3
+          archive_policy_name: cake
+      status: 201
+
+    - name: create metric4
+      POST: /v1/metric
+      data:
+          name: metric4
+          archive_policy_name: cookies
+      status: 201
+
+    - name: push measurements to metric1
+      POST: /v1/metric/$HISTORY['create metric1'].$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+          - timestamp: "2015-03-06T14:34:15"
+            value: -16
+          - timestamp: "2015-03-06T14:35:12"
+            value: 9
+          - timestamp: "2015-03-06T14:35:15"
+            value: 11
+      status: 202
+
+    - name: push measurements to metric2
+      POST: /v1/metric/$HISTORY['create metric2'].$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 2
+          - timestamp: "2015-03-06T14:34:12"
+            value: 4
+          - timestamp: "2015-03-06T14:34:15"
+            value: 5
+          - timestamp: "2015-03-06T14:35:12"
+            value: 10
+          - timestamp: "2015-03-06T14:35:15"
+            value: 15
+      status: 202
+
+    - name: push measurements to metric4
+      POST: /v1/metric/$HISTORY['create metric4'].$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2017-04-06T14:33:57"
+            value: 20
+          - timestamp: "2017-04-06T14:34:12"
+            value: 10
+      status: 202
+
+    - name: get measurements from metric1
+      GET: /v1/metric/$HISTORY['create metric1'].$RESPONSE['$.id']/measures?refresh=true
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+
+
+    - name: get measurements from metric2
+      GET: /v1/metric/$HISTORY['create metric2'].$RESPONSE['$.id']/measures?refresh=true
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 2.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.5]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 2.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 5.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 10.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 15.0]
+
+    - name: get measurements from metric3
+      GET: /v1/metric/$HISTORY['create metric3'].$RESPONSE['$.id']/measures?refresh=true
+      response_json_paths:
+        $: []
+
+    - name: get measurements from metric4
+      GET: /v1/metric/$HISTORY['create metric4'].$RESPONSE['$.id']/measures?refresh=true
+      response_json_paths:
+        $:
+          - ["2017-04-06T14:33:00+00:00", 60.0, 20.0]
+          - ["2017-04-06T14:34:00+00:00", 60.0, 10.0]
+          - ["2017-04-06T14:33:57+00:00", 1.0, 20.0]
+          - ["2017-04-06T14:34:12+00:00", 1.0, 10.0]
+
+    - name: get aggregates, no references
+      POST: /v1/aggregates
+      data:
+        operations: ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.`len`: 1
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 2.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.5]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 2.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 5.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 10.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 15.0]
+
+    - name: get aggregates with references
+      desc: we put metric2 twice to ensure we retrieve it once
+      POST: /v1/aggregates?details=true
+      data:
+        operations: ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"],  ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.`len`: 2
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.references[/name][0].archive_policy.name: cookies
+        $.references[/name][1].archive_policy.name: cookies
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 2.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.5]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 2.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 5.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 10.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 15.0]
+
+    - name: get aggregates start and stop
+      POST: /v1/aggregates
+      query_parameters:
+        details: true
+        start: "2015-03-06T14:34:00"
+        stop: "2015-03-06T14:35:13"
+      data:
+        operations: ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.references[/name][0].archive_policy.name: cookies
+        $.references[/name][1].archive_policy.name: cookies
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.5]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 5.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 10.0]
+
+    - name: get aggregates granularity
+      POST: /v1/aggregates?granularity=60&details=true
+      data:
+        operations: ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "max"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "min"]]
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.references[/name][0].archive_policy.name: cookies
+        $.references[/name][1].archive_policy.name: cookies
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".max:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 12.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 11.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".min:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 2.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+
+    - name: get aggregates simple with array
+      POST: /v1/aggregates?details=true
+      data:
+        operations: ["+", ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]], 2.0]
+      response_json_paths:
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.references[/name][0].archive_policy.name: cookies
+        $.references[/name][1].archive_policy.name: cookies
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 45.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 0.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 45.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 14.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -14.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 11.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 13.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 4.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 6.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 14.5]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 6.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 7.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 17.0]
+
+    - name: get aggregates resample
+      POST: /v1/aggregates?granularity=1&details=true
+      data:
+        operations:
+          - resample
+          - mean
+          - 60
+          - ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.references[/name][0].archive_policy.name: cookies
+        $.references[/name][1].archive_policy.name: cookies
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 2.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.5]
+
+    - name: get aggregates rolling
+      POST: /v1/aggregates?granularity=1&details=true
+      data:
+        operations:
+          - rolling
+          - mean
+          - 2
+          - ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.references[/name][0].archive_policy.name: cookies
+        $.references[/name][1].archive_policy.name: cookies
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 27.55]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -2.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, -3.5]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 10.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 3.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 4.5]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 7.5]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 12.5]
+
+    - name: get one metric
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean)"
+      response_json_paths:
+        $.references.`len`: 1
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+
+    - name: get aggregates mean
+      POST: /v1/aggregates
+      data:
+        operations:
+          - aggregate
+          - mean
+          - ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.measures.aggregated:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 22.55]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 1.25]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 11.25]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 22.55]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 8.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -5.5]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.5]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 13.0]
+
+    - name: get aggregates rate:mean
+      POST: /v1/aggregates
+      data:
+        operations:
+          - aggregate
+          - rate:mean
+          - ["metric", ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"], ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]]
+      response_json_paths:
+        $.measures.aggregated:
+          - ["2015-03-06T14:34:00+00:00", 60.0, -21.30]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, -14.55]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -13.5]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 15.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 3.5]
+
+    - name: get aggregates one metric
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(aggregate mean (metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean))"
+      response_json_paths:
+        $.references.`len`: 1
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.measures.aggregated:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+
+    - name: get aggregates one metric rateofchange
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(rateofchange (metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean))"
+      response_json_paths:
+        $.references.`len`: 1
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:34:00+00:00", 60.0, -45.1]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, -31.1]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -28.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 25.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 2.0]
+
+    - name: get aggregates math with string
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(+ (metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric2'].$RESPONSE['$.id'] mean)) 2.0)"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 45.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 0.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 45.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 14.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -14.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 11.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 13.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 4.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 6.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 14.5]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 6.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 7.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 17.0]
+
+    - name: get aggregates substact
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(- (metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean) (metric $HISTORY['create metric2'].$RESPONSE['$.id'] mean)))"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.measures.aggregated:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 41.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -6.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, -2.5]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 41.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 8.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -21.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, -1.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, -4.0]
+
+    - name: get aggregates mean aggregate
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(aggregate mean (metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric2'].$RESPONSE['$.id'] mean)))"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.measures.aggregated:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 22.55]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 1.25]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 11.25]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 22.55]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 8.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -5.5]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.5]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 13.0]
+
+    - name: get aggregates negative absolute
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(negative (absolute (aggregate mean (metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric2'].$RESPONSE['$.id'] mean)))))"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.measures.aggregated:
+          - ["2015-03-06T14:33:00+00:00", 60.0, -22.55]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -1.25]
+          - ["2015-03-06T14:35:00+00:00", 60.0, -11.25]
+          - ["2015-03-06T14:33:57+00:00", 1.0, -22.55]
+          - ["2015-03-06T14:34:12+00:00", 1.0, -8.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -5.5]
+          - ["2015-03-06T14:35:12+00:00", 1.0, -9.5]
+          - ["2015-03-06T14:35:15+00:00", 1.0, -13.0]
+
+
+    - name: push new measurements to metric1
+      POST: /v1/metric/$HISTORY['create metric1'].$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:37:00"
+            value: 15
+          - timestamp: "2015-03-06T14:38:00"
+            value: 15
+      status: 202
+
+    - name: refresh metric1
+      GET: /v1/metric/$HISTORY['create metric1'].$RESPONSE['$.id']/measures?refresh=true
+
+    - name: fill and no granularity
+      POST: /v1/aggregates?fill=123&details=true
+      data:
+        operations: "(metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric2'].$RESPONSE['$.id'] mean))"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric2'].$RESPONSE['$.id']
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ['2015-03-06T14:37:00+00:00', 60.0, 15.0]
+          - ['2015-03-06T14:38:00+00:00', 60.0, 15.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+          - ['2015-03-06T14:37:00+00:00', 1.0, 15.0]
+          - ['2015-03-06T14:38:00+00:00', 1.0, 15.0]
+        $.measures."$HISTORY['create metric2'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 2.0]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 4.5]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 12.5]
+          - ['2015-03-06T14:37:00+00:00', 60.0, 123.0]
+          - ['2015-03-06T14:38:00+00:00', 60.0, 123.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 2.0]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 4.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 5.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 10.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 15.0]
+          - ['2015-03-06T14:37:00+00:00', 1.0, 123.0]
+          - ['2015-03-06T14:38:00+00:00', 1.0, 123.0]
+
+    - name: no overlap dropna
+      POST: /v1/aggregates?details=true
+      data:
+        operations: "(metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric4'].$RESPONSE['$.id'] mean))"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric4'].$RESPONSE['$.id']
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ['2015-03-06T14:37:00+00:00', 60.0, 15.0]
+          - ['2015-03-06T14:38:00+00:00', 60.0, 15.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+          - ['2015-03-06T14:37:00+00:00', 1.0, 15.0]
+          - ['2015-03-06T14:38:00+00:00', 1.0, 15.0]
+        $.measures."$HISTORY['create metric4'].$RESPONSE['$.id']".mean:
+          - ["2017-04-06T14:33:00+00:00", 60.0, 20.0]
+          - ["2017-04-06T14:34:00+00:00", 60.0, 10.0]
+          - ["2017-04-06T14:33:57+00:00", 1.0, 20.0]
+          - ["2017-04-06T14:34:12+00:00", 1.0, 10.0]
+
+    - name: no overlap null
+      POST: /v1/aggregates?fill=null&details=true
+      data:
+        operations: "(metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric4'].$RESPONSE['$.id'] mean))"
+      response_json_paths:
+        $.references.`len`: 2
+        $.references[/name][0].id: $HISTORY['create metric1'].$RESPONSE['$.id']
+        $.references[/name][1].id: $HISTORY['create metric4'].$RESPONSE['$.id']
+        $.measures."$HISTORY['create metric1'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -2.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+          - ['2015-03-06T14:37:00+00:00', 60.0, 15.0]
+          - ['2015-03-06T14:38:00+00:00', 60.0, 15.0]
+          - ["2017-04-06T14:33:00+00:00", 60.0, !AssertNAN ]
+          - ["2017-04-06T14:34:00+00:00", 60.0, !AssertNAN ]
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+          - ['2015-03-06T14:37:00+00:00', 1.0, 15.0]
+          - ['2015-03-06T14:38:00+00:00', 1.0, 15.0]
+          - ["2017-04-06T14:33:57+00:00", 1.0, !AssertNAN ]
+          - ["2017-04-06T14:34:12+00:00", 1.0, !AssertNAN ]
+        $.measures."$HISTORY['create metric4'].$RESPONSE['$.id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, !AssertNAN ]
+          - ["2015-03-06T14:34:00+00:00", 60.0, !AssertNAN ]
+          - ["2015-03-06T14:35:00+00:00", 60.0, !AssertNAN ]
+          - ['2015-03-06T14:37:00+00:00', 60.0, !AssertNAN ]
+          - ['2015-03-06T14:38:00+00:00', 60.0, !AssertNAN ]
+          - ["2017-04-06T14:33:00+00:00", 60.0, 20.0]
+          - ["2017-04-06T14:34:00+00:00", 60.0, 10.0]
+          - ["2015-03-06T14:33:57+00:00", 1.0, !AssertNAN ]
+          - ["2015-03-06T14:34:12+00:00", 1.0, !AssertNAN ]
+          - ["2015-03-06T14:34:15+00:00", 1.0, !AssertNAN ]
+          - ["2015-03-06T14:35:12+00:00", 1.0, !AssertNAN ]
+          - ["2015-03-06T14:35:15+00:00", 1.0, !AssertNAN ]
+          - ['2015-03-06T14:37:00+00:00', 1.0, !AssertNAN ]
+          - ['2015-03-06T14:38:00+00:00', 1.0, !AssertNAN ]
+          - ["2017-04-06T14:33:57+00:00", 1.0, 20.0]
+          - ["2017-04-06T14:34:12+00:00", 1.0, 10.0]
+
+# Negative tests
+
+    - name: get no operations
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: []
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.detail: ['operations']
+        $.description.reason: "/^Operation must not be empty/"
+
+    - name: get operations without list
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations:
+          foo: bar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.detail: ['operations']
+        $.description.reason: "/^Expected a tuple/list, got a/"
+
+    - name: invalid operations string
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: "(metroc foo bar"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Invalid operations"
+        $.description.reason: "/^Fail to parse the operations string/"
+        $.description.detail: "Expected \")\" (at char 15), (line:1, col:16)"
+
+    - name: get invalid metric operations
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: ["metric"]
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.detail: ["operations"]
+        $.description.reason: "/^Operation need at least one argument for dictionary value/"
+
+    - name: get unknown metrics
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations:
+          - metric
+          - ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"]
+          - ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]
+          - ["8c062a7e-9f9f-4b1c-9996-9d0328512ab7", "mean"]
+          - ["e4864464-1b27-4622-9fbb-dc900e06c192", "mean"]
+      status: 404
+      response_json_paths:
+        $.code: 404
+        $.description.cause: "Unknown metrics"
+        $.description.detail.`sorted`:
+            - "8c062a7e-9f9f-4b1c-9996-9d0328512ab7"
+            - "e4864464-1b27-4622-9fbb-dc900e06c192"
+
+    - name: get not matching granularity
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations:
+          - metric
+          - ["$HISTORY['create metric1'].$RESPONSE['$.id']", "mean"]
+          - ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]
+          - ["$HISTORY['create metric3'].$RESPONSE['$.id']", "mean"]
+
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Metrics can't being aggregated"
+        $.description.reason: "No granularity match"
+        $.description.detail.`len`: 3
+
+    - name: get unknown granularity
+      POST: /v1/aggregates?granularity=123
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations:
+          - metric
+          - "$HISTORY['create metric1'].$RESPONSE['$.id']"
+          - "mean"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Metrics can't being aggregated"
+        $.description.reason: "Granularities are missing"
+        $.description.detail:
+        - ["$HISTORY['create metric1'].$RESPONSE['$.id']", mean, 123]
+
+    - name: get unknown aggregation
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations:
+          - metric
+          - ["$HISTORY['create metric1'].$RESPONSE['$.id']", "what?"]
+          - ["$HISTORY['create metric2'].$RESPONSE['$.id']", "mean"]
+      status: 404
+      response_json_paths:
+        $.code: 404
+        $.description.cause: "Metrics with unknown aggregation"
+        $.description.detail:
+        - ["$HISTORY['create metric1'].$RESPONSE['$.id']", "what?"]
+
+    - name: invalid start
+      POST: /v1/aggregates?start=notadate
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Argument value error"
+        $.description.detail: "start"
+        $.description.reason: "Must be a datetime or a timestamp"
+
+    - name: invalid stop
+      POST: /v1/aggregates?stop=notadate
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Argument value error"
+        $.description.detail: "stop"
+        $.description.reason: "Must be a datetime or a timestamp"
+
+    - name: invalid needed_overlap
+      POST: /v1/aggregates?needed_overlap=notnumber
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Argument value error"
+        $.description.detail: "needed_overlap"
+        $.description.reason: "Must be a number"
+
+    - name: invalid granularity
+      POST: /v1/aggregates?granularity=foobar
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Argument value error"
+        $.description.detail: "granularity"
+        $.description.reason: "Unable to parse timespan"
+
+    - name: invalid fill
+      POST: /v1/aggregates?fill=foobar&granularity=5
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Argument value error"
+        $.description.detail: "fill"
+        $.description.reason: "Must be a float, 'dropna' or 'null'"
+
+    - name: get rolling bad aggregate
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: "(rolling blah 2 (metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean))"
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^'rolling' operation invalid for dictionary value/"
+        $.description.detail: ["operations"]
+
+    - name: get rolling-mean missing window
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: "(rolling mean (metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean))"
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^'rolling' operation invalid for dictionary value/"
+        $.description.detail: ["operations"]
+
+    - name: get measurements from metric and invalid operations
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: "(notexist (absolute (metric $HISTORY['create metric1'].$RESPONSE['$.id'] mean)))"
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^'notexist' operation invalid for dictionary value/"
+        $.description.detail: ["operations"]
+
+    - name: invalid resample
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        operations: "(resample mean invalid (metric ($HISTORY['create metric1'].$RESPONSE['$.id'] mean) ($HISTORY['create metric2'].$RESPONSE['$.id'] mean)))"
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^'resample' operation invalid for dictionary value/"
+        $.description.detail: ["operations"]
diff --git a/gnocchi/tests/functional/gabbits/aggregates-with-resources.yaml b/gnocchi/tests/functional/gabbits/aggregates-with-resources.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..c5bcf0418e0b92174b8f531334db9e8c08427768
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/aggregates-with-resources.yaml
@@ -0,0 +1,445 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: low
+        definition:
+          - granularity: 1 second
+          - granularity: 300 seconds
+      status: 201
+
+    - name: create another archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: unrelated
+        definition:
+          - granularity: 5 second
+      status: 201
+
+    - name: create resource 1
+      POST: /v1/resource/generic
+      data:
+        id: 1ed9c196-4c9f-4ba8-a5be-c9a71a82aac4
+        user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+        project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        metrics:
+          cpu.util:
+            archive_policy_name: low
+          cpu.idle:
+            archive_policy_name: low
+          noway:
+            archive_policy_name: low
+      status: 201
+
+    - name: post cpuutil measures 1
+      POST: /v1/resource/generic/1ed9c196-4c9f-4ba8-a5be-c9a71a82aac4/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 43.1
+        - timestamp: "2015-03-06T14:34:12"
+          value: 12
+      status: 202
+
+    - name: create resource 2
+      POST: /v1/resource/generic
+      data:
+        id: 2447CD7E-48A6-4C50-A991-6677CC0D00E6
+        user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+        project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        metrics:
+          cpu.util:
+            archive_policy_name: low
+          unique.stuff:
+            archive_policy_name: low
+      status: 201
+
+    - name: post customstuff measures 1
+      POST: /v1/resource/generic/2447CD7E-48A6-4C50-A991-6677CC0D00E6/metric/unique.stuff/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 23
+        - timestamp: "2015-03-06T14:34:12"
+          value: 8
+      status: 202
+
+    - name: post cpuutil measures 2
+      POST: /v1/resource/generic/2447CD7E-48A6-4C50-A991-6677CC0D00E6/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 23
+        - timestamp: "2015-03-06T14:34:12"
+          value: 8
+      status: 202
+
+    - name: create resource 3
+      POST: /v1/resource/generic
+      data:
+        id: 33333BC5-5948-4F29-B7DF-7DE607660452
+        user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+        project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+        metrics:
+          cpu.util:
+            archive_policy_name: low
+      status: 201
+
+    - name: post cpuutil measures 3
+      POST: /v1/resource/generic/33333BC5-5948-4F29-B7DF-7DE607660452/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 230
+        - timestamp: "2015-03-06T14:34:12"
+          value: 45.41
+      status: 202
+
+    - name: create resource 4
+      POST: /v1/resource/generic
+      data:
+        id: 41409ec6-3909-4b37-bbff-f9a5448fe328
+        user_id: 70b5b732-9d81-4dfb-a8a1-a424ef3eae6b
+        project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+        metrics:
+          cpu.util:
+            archive_policy_name: unrelated
+      status: 201
+
+    - name: post cpuutil measures 4
+      POST: /v1/resource/generic/41409ec6-3909-4b37-bbff-f9a5448fe328/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 230
+        - timestamp: "2015-03-06T14:34:12"
+          value: 45.41
+      status: 202
+
+    - name: list resources
+      GET: /v1/resource/generic
+
+    - name: aggregate metric
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(aggregate mean (metric cpu.util mean))"
+      poll:
+        count: 10
+        delay: 1
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures.aggregated:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 60.251666666666665]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 98.7]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 21.80333333333333]
+
+    - name: batch get list
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(metric (cpu.util mean) (cpu.idle mean))"
+      poll:
+        count: 10
+        delay: 1
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.idle".mean: []
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 27.55]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 43.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 12.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[1].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.5]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 8.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[2].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 137.70499999999998]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 230.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 45.41]
+
+    - name: batch get solo
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(metric cpu.util mean)"
+      poll:
+        count: 10
+        delay: 1
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 27.55]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 43.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 12.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[1].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.5]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 8.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[2].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 137.70499999999998]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 230.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 45.41]
+
+    - name: batch get wildcard list
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(metric (cpu.* mean) (*way mean))"
+      poll:
+        count: 10
+        delay: 1
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 27.55]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 43.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 12.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.idle".mean: []
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."noway".mean: []
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[1].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.5]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 8.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[2].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 137.70499999999998]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 230.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 45.41]
+
+    - name: batch get wildcard solo
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(metric cpu.* mean)"
+      poll:
+        count: 10
+        delay: 1
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 27.55]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 43.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 12.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.idle".mean: []
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[1].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.5]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 8.0]
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[2].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 137.70499999999998]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 230.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 45.41]
+
+    - name: stupid but valid batch get
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "id = '1ed9c196-4c9f-4ba8-a5be-c9a71a82aac4'"
+        operations: "(metric (cpu.util mean) (cpu.util mean))"
+      poll:
+        count: 10
+        delay: 1
+      response_json_paths:
+        $.references.`len`: 1
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.measures."$HISTORY['list resources'].$RESPONSE['$[0].id']"."cpu.util".mean:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 27.55]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 43.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 12.0]
+
+    - name: aggregate metric with groupby on project_id and user_id with aggregates API
+      POST: /v1/aggregates?groupby=project_id&groupby=user_id&details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(aggregate mean (metric cpu.util mean))"
+      response_json_paths:
+        $.`len`: 2
+        $[0].measures.references.`len`: 2
+        $[0].measures.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $[0].measures.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $[0].measures.measures.aggregated:
+              - ['2015-03-06T14:30:00+00:00', 300.0, 21.525]
+              - ['2015-03-06T14:33:57+00:00', 1.0, 33.05]
+              - ['2015-03-06T14:34:12+00:00', 1.0, 10.0]
+        $[0].group:
+              user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+              project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        $[1].measures.references.`len`: 1
+        $[1].measures.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $[1].measures.measures.aggregated:
+              - ['2015-03-06T14:30:00+00:00', 300.0, 137.70499999999998]
+              - ['2015-03-06T14:33:57+00:00', 1.0, 230.0]
+              - ['2015-03-06T14:34:12+00:00', 1.0, 45.41]
+        $[1].group:
+              user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+              project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+
+    - name: aggregate metric with groupby on id aggregates API
+      POST: /v1/aggregates?groupby=id&details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(aggregate mean (metric unique.stuff mean))"
+      response_json_paths:
+        $.`len`: 1
+        $[0].measures.references.`len`: 1
+        $[0].measures.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $[0].measures.measures.aggregated:
+              - ['2015-03-06T14:30:00+00:00', 300.0, 15.5]
+              - ['2015-03-06T14:33:57+00:00', 1.0, 23.0]
+              - ['2015-03-06T14:34:12+00:00', 1.0, 8.0]
+        $[0].group:
+              id: 2447cd7e-48a6-4c50-a991-6677cc0d00e6
+
+    - name: aggregate and drop infinity from divide by zero
+      POST: /v1/aggregates?details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(/ (* 100 (aggregate mean (metric cpu.util mean))) 0 )"
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures.aggregated: []
+
+    - name: aggregate and return infinity from divide by zero
+      POST: /v1/aggregates?details=true&fill=null
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(/ (* 100 (aggregate mean (metric cpu.util mean))) 0 )"
+      response_json_paths:
+        $.references.`len`: 3
+        $.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $.references[/id].[2]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $.measures.aggregated:
+          - ['2015-03-06T14:30:00+00:00', 300.0, .inf]
+          - ['2015-03-06T14:33:57+00:00', 1.0, .inf]
+          - ['2015-03-06T14:34:12+00:00', 1.0, .inf]
+
+    - name: aggregate metric with groupby on project_id and user_id drop infinity
+      POST: /v1/aggregates?groupby=project_id&groupby=user_id&details=true
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(/ (* 100 (aggregate mean (metric cpu.util mean))) 0 )"
+      response_json_paths:
+        $.`len`: 2
+        $[0].measures.references.`len`: 2
+        $[0].measures.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[0]']
+        $[0].measures.references[/id].[1]: $HISTORY['list resources'].$RESPONSE['$[1]']
+        $[0].measures.measures.aggregated: []
+        $[0].group:
+              user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+              project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        $[1].measures.references.`len`: 1
+        $[1].measures.references[/id].[0]: $HISTORY['list resources'].$RESPONSE['$[2]']
+        $[1].measures.measures.aggregated: []
+        $[1].group:
+              user_id: A50F549C-1F1C-4888-A71A-2C5473CCCEC1
+              project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+
+# Negative tests
+
+    - name: not matching granularity
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        resource_type: generic
+        search: {}
+        operations: "(aggregate mean (metric cpu.util mean))"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Metrics can't being aggregated"
+        $.description.detail.`len`: 4
+
+    - name: not matching metrics
+      POST: /v1/aggregates
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(aggregate mean (metric (notexists mean) (foobar mean)))"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Metrics not found"
+        $.description.detail.`sorted`:
+          - foobar
+          - notexists
+
+    - name: not matching metrics in any group
+      POST: /v1/aggregates?groupby=id
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        resource_type: generic
+        search: "user_id = 'A50F549C-1F1C-4888-A71A-2C5473CCCEC1'"
+        operations: "(aggregate mean (metric (notexists mean) (foobar mean)))"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Metrics not found"
+        $.description.detail.`sorted`:
+          - foobar
+          - notexists
+
+    - name: invalid groupby attribute
+      POST: /v1/aggregates?groupby=unit
+      request_headers:
+        accept: application/json
+        content-type: application/json
+        authorization: "basic Zm9vYmFyOg=="
+      data:
+        resource_type: generic
+        search: "id = '1ed9c196-4c9f-4ba8-a5be-c9a71a82aac4'"
+        operations: "(metric (cpu.util mean) (cpu.util mean))"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description: "Invalid pagination: `Sort key supplied is invalid: unit'"
diff --git a/gnocchi/tests/functional/gabbits/aggregation.yaml b/gnocchi/tests/functional/gabbits/aggregation.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d85e85de91dcbf486f7c7e189afcbd75a54175ef
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/aggregation.yaml
@@ -0,0 +1,387 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: low
+          definition:
+              - granularity: 1 second
+              - granularity: 300 seconds
+      status: 201
+
+# Aggregation by metric ids
+
+    - name: create metric 1
+      POST: /v1/metric
+      request_headers:
+          content-type: application/json
+      data:
+        archive_policy_name: low
+      status: 201
+
+    - name: create metric 2
+      POST: /v1/metric
+      request_headers:
+          content-type: application/json
+      data:
+        archive_policy_name: low
+      status: 201
+
+    - name: get metric list
+      GET: /v1/metric
+
+    - name: push measurements to metric 1
+      POST: /v1/metric/$RESPONSE['$[0].id']/measures
+      request_headers:
+           content-type: application/json
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+
+    - name: push measurements to metric 2
+      POST: /v1/metric/$HISTORY['get metric list'].$RESPONSE['$[1].id']/measures
+      request_headers:
+           content-type: application/json
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 3.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 2
+          - timestamp: "2015-03-06T14:35:12"
+            value: 5
+      status: 202
+
+    - name: get measure aggregates by granularity not float
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=foobar
+      status: 400
+
+    - name: get measure aggregates with invalid uuids
+      GET: /v1/aggregation/metric?metric=foobar
+      status: 400
+
+    - name: GET measure aggregates by granularity with refresh
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=1&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+
+    - name: POST measure aggregates by granularity with refresh
+      POST: /v1/aggregation/metric?granularity=1&refresh=true
+      request_headers:
+          content-type: application/json
+      data:
+        - $HISTORY['get metric list'].$RESPONSE['$[0].id']
+        - $HISTORY['get metric list'].$RESPONSE['$[1].id']
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+
+    - name: get measure aggregates by granularity
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=1&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+
+    - name: get measure aggregates by granularity with timestamps
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&start=2015-03-06T15:33:57%2B01:00&stop=2015-03-06T15:34:00%2B01:00&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.05]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+
+    - name: get measure aggregates with invalid aggregation method
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&aggregation=wtf
+      request_headers:
+        accept: application/json
+      status: 404
+      response_json_paths:
+        $.description: Aggregation method 'wtf' at granularity '1.0' for metric $HISTORY['get metric list'].$RESPONSE['$[0].id'] does not exist
+
+    - name: get measure aggregates and reaggregate
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&reaggregation=min&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 2.55]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 3.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 2.0]
+
+    - name: get measure aggregates and resample
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=1&resample=60
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:00+00:00', 60.0, 23.1]
+          - ['2015-03-06T14:34:00+00:00', 60.0, 7.0]
+
+    - name: get measure aggregates and operations
+      POST: /v1/aggregates?granularity=1
+      data:
+        operations: "(aggregate mean (resample mean 60 (metric ($HISTORY['get metric list'].$RESPONSE['$[0].id'] mean) ($HISTORY['get metric list'].$RESPONSE['$[1].id'] mean))))"
+      response_json_paths:
+        $.measures.aggregated:
+          - ['2015-03-06T14:33:00+00:00', 60.0, 23.1]
+          - ['2015-03-06T14:34:00+00:00', 60.0, 7.0]
+          - ['2015-03-06T14:35:00+00:00', 60.0, 5.0]
+
+    - name: get measure aggregates with fill zero
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=1&fill=0
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 2.5]
+
+    - name: get measure aggregates with fill null
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=1&fill=null
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 5.0]
+
+    - name: get measure aggregates with fill all granularities
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&fill=0
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.05]
+          - ['2015-03-06T14:35:00+00:00', 300.0, 2.5]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 2.5]
+
+    - name: get measure aggregates with bad fill
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&metric=$HISTORY['get metric list'].$RESPONSE['$[1].id']&granularity=1&fill=asdf
+      status: 400
+
+    - name: get measure aggregates non existing granularity
+      desc: https://github.com/gnocchixyz/gnocchi/issues/148
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list'].$RESPONSE['$[0].id']&granularity=42
+      status: 404
+      response_strings:
+        - Aggregation method 'mean' at granularity '42.0' for metric
+
+# Aggregation by resource and metric_name
+
+    - name: post a resource
+      POST: /v1/resource/generic
+      data:
+          id: bcd3441c-b5aa-4d1b-af9a-5a72322bb269
+          metrics:
+            agg_meter:
+                archive_policy_name: low
+      status: 201
+
+    - name: post another resource
+      POST: /v1/resource/generic
+      data:
+          id: 1b0a8345-b279-4cb8-bd7a-2cb83193624f
+          metrics:
+            agg_meter:
+                archive_policy_name: low
+      status: 201
+
+    - name: push measurements to resource 1
+      POST: /v1/resource/generic/bcd3441c-b5aa-4d1b-af9a-5a72322bb269/metric/agg_meter/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+
+    - name: push measurements to resource 2
+      POST: /v1/resource/generic/1b0a8345-b279-4cb8-bd7a-2cb83193624f/metric/agg_meter/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 3.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 2
+          - timestamp: "2015-03-06T14:35:12"
+            value: 5
+      status: 202
+
+    - name: get measure aggregates by granularity from resources with refresh
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?granularity=1&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+
+    - name: get measure aggregates by granularity from resources
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?granularity=1&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+
+    - name: get measure aggregates by granularity from aggregates API
+      POST: /v1/aggregates?granularity=1
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(aggregate mean (metric agg_meter mean))'
+      response_json_paths:
+        $.measures.aggregated:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 5.0]
+
+    - name: get measure aggregates by granularity from resources and resample
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?granularity=1&resample=60
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:00+00:00', 60.0, 23.1]
+          - ['2015-03-06T14:34:00+00:00', 60.0, 7.0]
+
+    - name: get measure aggregates by granularity from aggregates API and resample
+      POST: /v1/aggregates?granularity=1
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(aggregate mean (resample mean 60 (metric agg_meter mean)))'
+      response_json_paths:
+        $.measures.aggregated:
+          - ['2015-03-06T14:33:00+00:00', 60.0, 23.1]
+          - ['2015-03-06T14:34:00+00:00', 60.0, 7.0]
+          - ['2015-03-06T14:35:00+00:00', 60.0, 5.0]
+
+    - name: get measure aggregates by granularity from resources and operations
+      POST: /v1/aggregates?granularity=1
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(aggregate mean (resample mean 60 (metric agg_meter mean)))'
+      response_json_paths:
+        $.measures.aggregated:
+          - ['2015-03-06T14:33:00+00:00', 60.0, 23.1]
+          - ['2015-03-06T14:34:00+00:00', 60.0, 7.0]
+          - ['2015-03-06T14:35:00+00:00', 60.0, 5.0]
+
+    - name: get measure aggregates by granularity from resources and bad resample
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?resample=abc
+      status: 400
+
+    - name: get measure aggregates by granularity from resources and resample no granularity
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?resample=60
+      status: 400
+      response_strings:
+        - A granularity must be specified to resample
+
+    - name: get measure aggregates by granularity with timestamps from resources
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?start=2015-03-06T15:33:57%2B01:00&stop=2015-03-06T15:34:00%2B01:00&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.05]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+
+    - name: get measure aggregates by granularity with timestamps from aggregates API
+      POST: /v1/aggregates?start=2015-03-06T15:33:57%2B01:00&stop=2015-03-06T15:34:00%2B01:00
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(aggregate mean (metric agg_meter mean))'
+      poll:
+          count: 10
+          delay: 1
+      response_json_paths:
+        $.measures.aggregated:
+          - ['2015-03-06T14:30:00+00:00', 300.0, 15.05]
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+
+    - name: get measure aggregates by granularity from resources and reaggregate
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?granularity=1&reaggregation=min&refresh=true
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 3.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 2.0]
+
+    - name: get measure aggregates from resources with fill zero
+      POST: /v1/aggregation/resource/generic/metric/agg_meter?granularity=1&fill=0
+      response_json_paths:
+        $:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 2.5]
+
+    - name: get measure aggregates from aggregates API with fill zero
+      POST: /v1/aggregates?granularity=1&fill=0
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(aggregate mean (metric agg_meter mean))'
+      response_json_paths:
+        $.measures.aggregated:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 23.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 7.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 2.5]
+
+# Some negative tests
+
+    - name: get measure aggregates with wrong GET
+      GET: /v1/aggregation/resource/generic/metric/agg_meter
+      status: 405
+
+    - name: get measure aggregates with wrong metric_name
+      POST: /v1/aggregation/resource/generic/metric/notexists
+      status: 200
+      response_json_paths:
+        $.`len`: 0
+
+    - name: get measure aggregates with wrong resource
+      POST: /v1/aggregation/resource/notexits/metric/agg_meter
+      status: 404
+      response_strings:
+          - Resource type notexits does not exist
+
+    - name: get measure aggregates with wrong path
+      POST: /v1/aggregation/re/generic/metric/agg_meter
+      status: 404
+
+    - name: get measure aggregates with wrong path 2
+      POST: /v1/aggregation/resource/generic/notexists/agg_meter
+      status: 404
+
+    - name: get measure aggregates with no resource name
+      POST: /v1/aggregation/resource/generic/metric
+      status: 405
+
+    - name: get measure aggregates with None as min in clip operation
+      POST: /v1/aggregates?fill=0&granularity=1
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(clip_min (metric agg_meter mean) 10)'
+      response_json_paths:
+        $.measures.bcd3441c-b5aa-4d1b-af9a-5a72322bb269.agg_meter.mean:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 43.1]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 12.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 10.0]
+
+    - name: get measure aggregates with None as max in clip operation
+      POST: /v1/aggregates?fill=0&granularity=1
+      data:
+        resource_type: generic
+        search: {}
+        operations: '(clip_max (metric agg_meter mean) 10)'
+      response_json_paths:
+        $.measures.bcd3441c-b5aa-4d1b-af9a-5a72322bb269.agg_meter.mean:
+          - ['2015-03-06T14:33:57+00:00', 1.0, 10.0]
+          - ['2015-03-06T14:34:12+00:00', 1.0, 10.0]
+          - ['2015-03-06T14:35:12+00:00', 1.0, 0.0]
diff --git a/gnocchi/tests/functional/gabbits/archive-rule.yaml b/gnocchi/tests/functional/gabbits/archive-rule.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..3e7b357f4c77988571562e2ff42edb84b4639af8
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/archive-rule.yaml
@@ -0,0 +1,268 @@
+#
+## Test the Archive Policy API to achieve coverage of just the
+## ArchivePolicyRulesController.
+##
+#
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+# create dependent policy
+    - name: create archive policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: low
+          definition:
+              - granularity: 1 hour
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/low
+
+# Attempt to create an archive policy rule
+
+    - name: create archive policy rule1
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_rule1
+        metric_pattern: "*"
+        archive_policy_name: low
+      status: 201
+      response_json_paths:
+        $.metric_pattern: "*"
+        $.archive_policy_name: low
+        $.name: test_rule1
+
+    - name: create archive policy rule 2
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_rule2
+        metric_pattern: "disk.foo.*"
+        archive_policy_name: low
+      status: 201
+      response_json_paths:
+        $.metric_pattern: disk.foo.*
+        $.archive_policy_name: low
+        $.name: test_rule2
+
+    - name: create archive policy rule 3
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_rule3
+        metric_pattern: "disk.*"
+        archive_policy_name: low
+      status: 201
+      response_json_paths:
+        $.metric_pattern: disk.*
+        $.archive_policy_name: low
+        $.name: test_rule3
+
+
+# Attempt to create an invalid policy rule
+
+    - name: create invalid archive policy rule
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_rule
+        metric_pattern: "disk.foo.*"
+      status: 400
+
+    - name: create archive policy rule with invalid archive policy
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+        accept: application/json
+        content-type: application/json
+      data:
+        name: test_rule
+        archive_policy_name: not-exists
+        metric_pattern: "disk.foo.*"
+      status: 400
+      response_json_paths:
+        $.code: 400
+        $.description.cause: "Archive policy does not exist"
+        $.description.detail: not-exists
+
+    - name: missing auth archive policy rule
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: application/json
+      data:
+        name: test_rule
+        metric_pattern: "disk.foo.*"
+        archive_policy_name: low
+      status: 403
+
+    - name: wrong content type
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: text/plain
+          # User admin
+          authorization: "basic YWRtaW46"
+      status: 415
+      response_strings:
+          - Unsupported Media Type
+
+    - name: wrong auth create rule
+      POST: /v1/archive_policy_rule
+      data:
+          name: test_rule_wrong_auth
+          metric_pattern: "disk.foo.*"
+          archive_policy_name: low
+      status: 403
+
+    - name: missing auth createrule
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: application/json
+      data:
+          name: test_rule_miss_auth
+          metric_pattern: "disk.foo.*"
+          archive_policy_name: low
+      status: 403
+
+    - name: bad request body
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        accept: application/json
+        authorization: "basic YWRtaW46"
+      data:
+          whaa: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed @ data/"
+        $.description.detail: ["whaa"]
+
+# get an archive policy rules
+
+    - name: get archive policy rule
+      GET: /v1/archive_policy_rule
+      status: 200
+      response_json_paths:
+        $.[0].metric_pattern: disk.foo.*
+        $.[1].metric_pattern: disk.*
+        $.[2].metric_pattern: "*"
+
+    - name: get unknown archive policy rule
+      GET: /v1/archive_policy_rule/foo
+      status: 404
+
+    - name: delete used archive policy
+      DELETE: /v1/archive_policy/low
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 400
+
+# rename an archive policy rule
+
+    - name: rename archive policy rule with missing name
+      PATCH: /v1/archive_policy_rule/test_rule3
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 400
+
+    - name: rename archive policy rule
+      PATCH: /v1/archive_policy_rule/test_rule3
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: test_rule3_renamed
+      status: 200
+
+    - name: get renamed archive policy rule
+      GET: /v1/archive_policy_rule/test_rule3_renamed
+      status: 200
+      response_json_paths:
+        $.metric_pattern: disk.*
+        $.archive_policy_name: low
+        $.name: test_rule3_renamed
+
+    - name: old archive policy rule doesn't exist
+      GET: /v1/archive_policy_rule/test_rule3
+      status: 404
+
+    - name: rename archive policy rule with existing name
+      PATCH: /v1/archive_policy_rule/test_rule2
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: test_rule3_renamed
+      status: 400
+      response_strings:
+          - 'Archive policy rule test_rule3_renamed already exists.'
+
+# delete rule as non admin
+
+    - name: delete archive policy rule non admin
+      DELETE: /v1/archive_policy_rule/test_rule1
+      status: 403
+
+# delete rule
+
+    - name: delete archive policy rule1
+      DELETE: /v1/archive_policy_rule/test_rule1
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+    - name: delete archive policy rule2
+      DELETE: /v1/archive_policy_rule/test_rule2
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+
+    - name: delete archive policy rule3
+      DELETE: /v1/archive_policy_rule/test_rule3_renamed
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+# delete again
+
+    - name: confirm delete archive policy rule
+      DELETE: /v1/archive_policy_rule/test_rule1
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 404
+
+    - name: delete missing archive policy rule utf8
+      DELETE: /v1/archive_policy_rule/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 404
+      response_strings:
+          - Archive policy rule ✔éñ☃ does not exist
diff --git a/gnocchi/tests/functional/gabbits/archive.yaml b/gnocchi/tests/functional/gabbits/archive.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b84da5f61a642b324ecb70115adb1741986f821a
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/archive.yaml
@@ -0,0 +1,640 @@
+#
+# Test the Archive Policy API to achieve coverage of just the
+# ArchivePoliciesController.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+# Retrieve the empty list when there are no archive policies.
+# NOTE(chdent): This demonstrates what used to be considered a
+# security bug in JSON output:
+# http://flask.pocoo.org/docs/0.10/security/#json-security
+# The version described there is supposed to be fixed in most modern
+# browsers but there is a new version of the problem which is only
+# fixed in some:
+# http://haacked.com/archive/2009/06/25/json-hijacking.aspx/
+# The caveats point out that this is only an issue if your data is
+# sensitive, which in this case...?
+# However, the api-wg has made it recommendation that collections
+# should be returned as an object with a named key with a value of
+# a list as follows: {"archive_policies": [...]}
+# This allows for extensibility such as future support for pagination.
+# Do we care?
+
+    - name: empty archive policy list
+      GET: /v1/archive_policy
+      response_headers:
+          content-type: /application/json/
+      response_strings:
+          - "[]"
+
+    - name: empty list text
+      GET: /v1/archive_policy
+      request_headers:
+          accept: text/plain
+      status: 406
+
+    - name: empty list html
+      GET: /v1/archive_policy
+      request_headers:
+          accept: text/html
+      status: 406
+
+# Fail to create an archive policy for various reasons.
+
+    - name: wrong content type
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: text/plain
+          # User admin
+          authorization: "basic YWRtaW46"
+      status: 415
+      response_strings:
+          - Unsupported Media Type
+
+    - name: wrong method
+      PUT: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      status: 405
+
+    - name: wrong authZ
+      POST: /v1/archive_policy
+      data:
+          name: medium
+          definition:
+              - granularity: 1 second
+      status: 403
+
+    - name: missing authZ
+      POST: /v1/archive_policy
+      request_headers:
+        authorization: ""
+      data:
+          name: medium
+          definition:
+              - granularity: 1 second
+      status: 401
+
+    - name: bad request body
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          cowsay: moo
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed/"
+        $.description.detail: ["cowsay"]
+
+    - name: missing definition
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          name: medium
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^required key not provided/"
+        $.description.detail: ["definition"]
+
+    - name: empty definition
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          name: medium
+          definition: []
+      status: 400
+      response_json_paths:
+          $.description.cause: "Invalid input"
+          $.description.reason: "/^length of value must be at least 1/"
+          $.description.detail: ["definition"]
+
+    - name: wrong value definition
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          name: somename
+          definition: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^expected a list/"
+        $.description.detail: ["definition"]
+
+    - name: useless definition
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          name: medium
+          definition:
+              - cowsay: moo
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed/"
+        $.description.detail: ["definition", '0', "cowsay"]
+
+# Create a valid archive policy.
+
+    - name: create archive policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: medium
+          definition:
+              - granularity: 1 second
+                points: 20
+              - granularity: 2 second
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/medium
+      status: 201
+
+# Retrieve it correctly and then poorly
+
+    - name: get archive policy
+      GET: $LOCATION
+      response_headers:
+          content-type: /application/json/
+      response_json_paths:
+          $.name: medium
+          $.definition[0].granularity: "0:00:01"
+          $.definition[0].points: 20
+          $.definition[0].timespan: "0:00:20"
+          $.definition[1].granularity: "0:00:02"
+          $.definition[1].points: null
+          $.definition[1].timespan: null
+
+    - name: get wrong accept
+      GET: $LAST_URL
+      request_headers:
+          accept: text/plain
+      status: 406
+
+# Update archive policy
+
+    - name: patch archive policy with bad definition
+      PATCH: $LAST_URL
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          definition:
+              - granularity: 1 second
+                points: 50
+                timespan: 1 hour
+              - granularity: 2 second
+      status: 400
+      response_strings:
+          - timespan ≠ granularity × points
+
+    - name: patch archive policy with missing granularity
+      PATCH: $LAST_URL
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          definition:
+              - granularity: 1 second
+                points: 50
+      status: 400
+      response_strings:
+          - "Archive policy medium does not support change: Cannot add or drop granularities"
+
+    - name: patch archive policy with non-matching granularity
+      PATCH: $LAST_URL
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          definition:
+              - granularity: 5 second
+                points: 20
+              - granularity: 2 second
+      status: 400
+      response_strings:
+          - "Archive policy medium does not support change: 1.0 granularity interval was changed"
+
+    - name: patch archive policy
+      PATCH: $LAST_URL
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          definition:
+              - granularity: 1 second
+                points: 50
+              - granularity: 2 second
+      status: 200
+      response_json_paths:
+          $.name: medium
+          $.definition[0].granularity: "0:00:01"
+          $.definition[0].points: 50
+          $.definition[0].timespan: "0:00:50"
+
+    - name: get patched archive policy
+      GET: $LAST_URL
+      response_headers:
+          content-type: /application/json/
+      response_json_paths:
+          $.name: medium
+          $.definition[0].granularity: "0:00:01"
+          $.definition[0].points: 50
+          $.definition[0].timespan: "0:00:50"
+
+# Unexpected methods
+
+    - name: post single archive
+      POST: $LAST_URL
+      status: 405
+
+    - name: put single archive
+      PUT: $LAST_URL
+      status: 405
+
+# Create another one and then test duplication
+
+    - name: create second policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: large
+          definition:
+              - granularity: 1 hour
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/large
+      status: 201
+
+    - name: create duplicate policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: large
+          definition:
+              - granularity: 1 hour
+      status: 409
+      response_strings:
+          - Archive policy large already exists
+
+# Create a unicode named policy
+
+    - name: post unicode policy name
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: ✔éñ☃
+          definition:
+              - granularity: 1 minute
+                points: 20
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      response_json_paths:
+          name: ✔éñ☃
+
+    - name: retrieve unicode policy name
+      GET: $LOCATION
+      response_json_paths:
+          name: ✔éñ☃
+
+    - name: post small unicode policy name
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: æ
+          definition:
+              - granularity: 1 minute
+                points: 20
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/%C3%A6
+      response_json_paths:
+          name: æ
+
+    - name: retrieve small unicode policy name
+      GET: $LOCATION
+      response_json_paths:
+          name: æ
+
+# List the collection
+
+    - name: get archive policy list
+      GET: /v1/archive_policy
+      response_strings:
+          - '"name": "medium"'
+          - '"name": "large"'
+      response_json_paths:
+          $[?name = "large"].definition[?granularity = "1:00:00"].points: null
+          $[?name = "medium"].definition[?granularity = "0:00:02"].points: null
+
+# Delete one as non-admin
+
+    - name: delete single archive non admin
+      DELETE: /v1/archive_policy/medium
+      status: 403
+
+# Delete one
+
+    - name: delete single archive
+      DELETE: /v1/archive_policy/medium
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+# It really is gone
+
+    - name: confirm delete
+      GET: $LAST_URL
+      status: 404
+
+# Fail to delete one that does not exist
+
+    - name: delete missing archive
+      DELETE: /v1/archive_policy/grandiose
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 404
+      response_strings:
+          - Archive policy grandiose does not exist
+
+    - name: delete archive utf8
+      DELETE: /v1/archive_policy/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+    - name: delete missing archive utf8 again
+      DELETE: /v1/archive_policy/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 404
+      response_strings:
+          - Archive policy ✔éñ☃ does not exist
+
+# Add metric using the policy and then be unable to delete policy
+
+    - name: create metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: large
+      status: 201
+
+    - name: delete in use policy
+      DELETE: /v1/archive_policy/large
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 400
+      response_strings:
+          - Archive policy large is still in use
+
+# Attempt to create illogical policies
+
+    - name: create illogical policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: complex
+          definition:
+              - granularity: 1 second
+                points: 60
+                timespan: "0:01:01"
+      status: 400
+      response_strings:
+          - timespan ≠ granularity × points
+
+    - name: create invalid granularity policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          name: complex
+          definition:
+              - granularity: 0
+                points: 60
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^Timespan must be positive for dictionary value/"
+        $.description.detail: ["definition", '0', "granularity"]
+
+    - name: create invalid points policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          accept: application/json
+      data:
+          name: complex
+          definition:
+              - granularity: 10
+                points: 0
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^value must be at least 1 for dictionary value/"
+        $.description.detail: ["definition", '0', "points"]
+
+
+    - name: create identical granularities policy
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: complex
+          definition:
+              - granularity: 1 second
+                points: 60
+              - granularity: 1 second
+                points: 120
+      status: 400
+      response_strings:
+          - "More than one archive policy uses granularity `1.0'"
+
+    - name: policy invalid unit
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: 227d0e1f-4295-4e4b-8515-c296c47d71d3
+          definition:
+              - granularity: 1 second
+                timespan: "1 shenanigan"
+      status: 400
+
+    - name: policy invalid aggregation method
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: invalid-agg-method
+          aggregation_methods:
+            - wtf
+          definition:
+              - granularity: 1 second
+                timespan: 1 hour
+      status: 400
+
+    - name: create policy when granularity is larger than timespan
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+        accept: application/json
+      data:
+          name: should-have-failed
+          definition:
+              - granularity: 2 hour
+                timespan: 1 hour
+      status: 400
+
+# Non admin user attempt
+
+    - name: fail to create policy non-admin
+      POST: /v1/archive_policy
+      data:
+          name: f1d150d9-02ad-4fe7-8872-c64b2bcaaa97
+          definition:
+              - granularity: 1 minute
+                points: 20
+      status: 403
+      response_strings:
+          - Access was denied to this resource
+
+# Back windows
+
+    - name: policy with back window
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: 7720a99d-cd3b-4aa4-8a6f-935bf0d46ded
+          back_window: 1
+          definition:
+              - granularity: 10s
+                points: 20
+      status: 201
+      response_json_paths:
+          $.back_window: 1
+          $.definition[0].timespan: "0:03:20"
+
+    - name: policy no back window
+      desc: and default seconds on int granularity
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: 22f2b99f-e629-4170-adc4-09b65635e056
+          back_window: 0
+          definition:
+              - granularity: 10
+                points: 20
+      status: 201
+      response_json_paths:
+          $.back_window: 0
+          $.definition[0].points: 20
+          $.definition[0].timespan: "0:03:20"
+
+# Timespan, points, granularity input tests
+
+    - name: policy float granularity
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: 595228db-ea29-4415-9d5b-ecb5366abb1b
+          definition:
+              - timespan: 1 hour
+                points: 1000
+      status: 201
+      response_json_paths:
+          $.definition[0].points: 1000
+          $.definition[0].granularity: "0:00:03.600000"
+          $.definition[0].timespan: "1:00:00"
+
+    - name: policy float timespan
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: 6bc72791-a27e-4417-a589-afc6d2067a38
+          definition:
+              - timespan: 1 hour
+                granularity: 7s
+      status: 201
+      response_json_paths:
+          $.definition[0].points: 514
+          $.definition[0].granularity: "0:00:07"
+          $.definition[0].timespan: "0:59:58"
+
+    - name: policy rated
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: 595228db-ea29-4415-9d5b-ecb5366abb1c
+          aggregation_methods:
+            - rate:mean
+            - rate:last
+          definition:
+              - timespan: 1 hour
+                points: 1000
+      status: 201
+      response_json_paths:
+          $.aggregation_methods.`sorted`:
+            - rate:last
+            - rate:mean
+          $.definition[0].points: 1000
+          $.definition[0].granularity: "0:00:03.600000"
+          $.definition[0].timespan: "1:00:00"
diff --git a/gnocchi/tests/functional/gabbits/async.yaml b/gnocchi/tests/functional/gabbits/async.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..bc2ae3e83a305971040dccecd2505d327742dc95
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/async.yaml
@@ -0,0 +1,56 @@
+#
+# Test async processing of measures.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+    - name: create archive policy
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: moderate
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: make a generic resource
+      POST: /v1/resource/generic
+      data:
+          id: 41937416-1644-497d-a0ed-b43d55a2b0ea
+          started_at: "2015-06-06T02:02:02.000000"
+          metrics:
+              some.counter:
+                  archive_policy_name: moderate
+      status: 201
+
+    - name: confirm no metrics yet
+      GET: /v1/resource/generic/41937416-1644-497d-a0ed-b43d55a2b0ea/metric/some.counter/measures
+      response_json_paths:
+          $: []
+
+    - name: post some measures
+      POST: /v1/resource/generic/41937416-1644-497d-a0ed-b43d55a2b0ea/metric/some.counter/measures
+      data:
+          - timestamp: "2015-06-06T14:33:00"
+            value: 11
+          - timestamp: "2015-06-06T14:35:00"
+            value: 12
+      status: 202
+
+    - name: get some measures
+      GET: /v1/resource/generic/41937416-1644-497d-a0ed-b43d55a2b0ea/metric/some.counter/measures?refresh=true
+      response_strings:
+          - "2015"
+      response_json_paths:
+          $[-1][-1]: 12
diff --git a/gnocchi/tests/functional/gabbits/base.yaml b/gnocchi/tests/functional/gabbits/base.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..fdde5f496a7dd14f9ac312c2eef4aef084643caf
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/base.yaml
@@ -0,0 +1,144 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+- name: get information on APIs
+  desc: Root URL must return information about API versions
+  GET: /
+  response_headers:
+      content-type: /^application\/json/
+  response_json_paths:
+      $.versions.[0].id: "v1.0"
+      $.versions.[0].status: "CURRENT"
+
+- name: archive policy post success
+  POST: /v1/archive_policy
+  request_headers:
+      # User admin
+      authorization: "basic YWRtaW46"
+  data:
+      name: test1
+      definition:
+          - granularity: 1 minute
+            points: 20
+  status: 201
+  response_headers:
+      content-type: /^application\/json/
+      location: $SCHEME://$NETLOC/v1/archive_policy/test1
+  response_json_paths:
+      $.name: test1
+      $.definition.[0].granularity: 0:01:00
+      $.definition.[0].points: 20
+      $.definition.[0].timespan: 0:20:00
+
+- name: post archive policy no auth
+  desc: this confirms that auth handling comes before data validation
+  POST: /v1/archive_policy
+  data:
+      definition:
+          - granularity: 1 second
+            points: 20
+  status: 403
+
+- name: post metric with archive policy
+  POST: /v1/metric
+  data:
+      archive_policy_name: test1
+  status: 201
+  response_headers:
+      content-type: /application\/json/
+  response_json_paths:
+      $.archive_policy_name: test1
+
+- name: retrieve metric info
+  GET: $LOCATION
+  status: 200
+  response_json_paths:
+      $.archive_policy.name: test1
+      $.creator: foobar
+
+- name: list the one metric
+  GET: /v1/metric
+  status: 200
+  response_json_paths:
+      $[0].archive_policy.name: test1
+
+- name: post a single measure
+  desc: post one measure
+  POST: /v1/metric/$RESPONSE['$[0].id']/measures
+  data:
+      - timestamp: "2013-01-01 23:23:20"
+        value: 1234.2
+  status: 202
+
+- name: Get list of resource type and URL
+  desc: Resources index page should return list of type associated with a URL
+  GET: /v1/resource/
+  response_headers:
+      content-type: /^application\/json/
+  status: 200
+  response_json_paths:
+      $.generic: $SCHEME://$NETLOC/v1/resource/generic
+
+- name: post generic resource
+  POST: /v1/resource/generic
+  data:
+    id: 5b7ebe90-4ad2-4c83-ad2c-f6344884ab70
+    started_at: "2014-01-03T02:02:02.000000"
+    user_id: 0fbb231484614b1a80131fc22f6afc9c
+    project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+  status: 201
+  response_headers:
+    location: $SCHEME://$NETLOC/v1/resource/generic/5b7ebe90-4ad2-4c83-ad2c-f6344884ab70
+  response_json_paths:
+    type: generic
+    started_at: "2014-01-03T02:02:02+00:00"
+    project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+    creator: foobar
+
+- name: post generic resource bad id
+  POST: /v1/resource/generic
+  data:
+    id: 1.2.3.4
+    started_at: "2014-01-03T02:02:02.000000"
+    user_id: 0fbb2314-8461-4b1a-8013-1fc22f6afc9c
+    project_id: f3d41b77-0cc1-4f0b-b94a-1d5be9c0e3ea
+  status: 201
+  response_headers:
+    location: $SCHEME://$NETLOC/v1/resource/generic/a9c729cc-d1b0-5e6b-b5ba-8b5a7f45f1fc
+  response_json_paths:
+    type: generic
+    started_at: "2014-01-03T02:02:02+00:00"
+    project_id: f3d41b77-0cc1-4f0b-b94a-1d5be9c0e3ea
+    creator: foobar
+    id: a9c729cc-d1b0-5e6b-b5ba-8b5a7f45f1fc
+    original_resource_id: 1.2.3.4
+
+- name: get status denied
+  GET: /v1/status
+  status: 403
+
+- name: get status
+  GET: /v1/status
+  request_headers:
+    # User admin
+    authorization: "basic YWRtaW46"
+  response_json_paths:
+    $.storage.`len`: 2
+    $.metricd.`len`: 2
+
+- name: get status, no details
+  GET: /v1/status?details=False
+  request_headers:
+    # User admin
+    authorization: "basic YWRtaW46"
+  response_json_paths:
+    $.storage.`len`: 1
+    $.metricd.`len`: 2
diff --git a/gnocchi/tests/functional/gabbits/batch-measures.yaml b/gnocchi/tests/functional/gabbits/batch-measures.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ce91accc403021b43a7cc5865ed929c1427ad7ec
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/batch-measures.yaml
@@ -0,0 +1,326 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: simple
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: simple
+      status: 201
+
+    - name: push measurements to metric
+      POST: /v1/batch/metrics/measures
+      data:
+        $RESPONSE['$.id']:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: push measurements to unknown metrics
+      POST: /v1/batch/metrics/measures
+      data:
+        37AEC8B7-C0D9-445B-8AB9-D3C6312DCF5C:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+        37AEC8B7-C0D9-445B-8AB9-D3C6312DCF5D:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 400
+      response_strings:
+        - "Unknown metrics: 37aec8b7-c0d9-445b-8ab9-d3c6312dcf5c, 37aec8b7-c0d9-445b-8ab9-d3c6312dcf5d"
+
+    - name: push measurements to unknown named metrics
+      POST: /v1/batch/resources/metrics/measures
+      data:
+        37AEC8B7-C0D9-445B-8AB9-D3C6312DCF5D:
+          cpu_util:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+        46c9418d-d63b-4cdd-be89-8f57ffc5952e:
+          disk.iops:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+      status: 400
+      response_strings:
+          - "Unknown metrics: 37aec8b7-c0d9-445b-8ab9-d3c6312dcf5d/cpu_util, 46c9418d-d63b-4cdd-be89-8f57ffc5952e/disk.iops"
+
+    - name: create second metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: simple
+      status: 201
+
+    - name: post a resource
+      POST: /v1/resource/generic
+      data:
+          id: 46c9418d-d63b-4cdd-be89-8f57ffc5952e
+          user_id: 0fbb2314-8461-4b1a-8013-1fc22f6afc9c
+          project_id: f3d41b77-0cc1-4f0b-b94a-1d5be9c0e3ea
+          metrics:
+              disk.iops:
+                  archive_policy_name: simple
+              cpu_util:
+                  archive_policy_name: simple
+      status: 201
+
+    - name: post a second resource
+      POST: /v1/resource/generic
+      data:
+          id: f0f6038f-f82c-4f30-8d81-65db8be249fe
+          user_id: 0fbb2314-8461-4b1a-8013-1fc22f6afc9c
+          project_id: f3d41b77-0cc1-4f0b-b94a-1d5be9c0e3ea
+          metrics:
+              net.speed:
+                  archive_policy_name: simple
+              mem_usage:
+                  archive_policy_name: simple
+      status: 201
+
+    - name: list metrics
+      GET: /v1/metric
+
+    - name: push measurements to two metrics
+      POST: /v1/batch/metrics/measures
+      data:
+        $RESPONSE['$[0].id']:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+        $RESPONSE['$[1].id']:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: push measurements to two named metrics
+      POST: /v1/batch/resources/metrics/measures
+      data:
+        46c9418d-d63b-4cdd-be89-8f57ffc5952e:
+          disk.iops:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+          cpu_util:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+        f0f6038f-f82c-4f30-8d81-65db8be249fe:
+          mem_usage:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+          net.speed:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: create archive policy rule for auto
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: rule_auto
+        metric_pattern: "auto.*"
+        archive_policy_name: simple
+      status: 201
+
+    - name: push measurements to unknown named metrics and create it
+      POST: /v1/batch/resources/metrics/measures?create_metrics=true
+      data:
+        46c9418d-d63b-4cdd-be89-8f57ffc5952e:
+          auto.test:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: push measurements to unknown named metrics and create it with new format
+      POST: /v1/batch/resources/metrics/measures?create_metrics=true
+      data:
+        46c9418d-d63b-4cdd-be89-8f57ffc5952e:
+          newformat:
+            archive_policy_name: simple
+            unit: "ks"
+            measures:
+              - timestamp: "2015-03-06T14:33:57"
+                value: 43.1
+              - timestamp: "2015-03-06T14:34:12"
+                value: 42
+          auto.newformat:
+            measures:
+              - timestamp: "2015-03-06T14:33:57"
+                value: 43.1
+              - timestamp: "2015-03-06T14:34:12"
+                value: 24
+          auto.test:
+            measures:
+              - timestamp: "2015-03-06T14:33:57"
+                value: 43.1
+              - timestamp: "2015-03-06T14:35:12"
+                value: 24
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: get created metric to check creation
+      GET: /v1/resource/generic/46c9418d-d63b-4cdd-be89-8f57ffc5952e/metric/auto.test
+
+    - name: ensure measure have been posted
+      GET: /v1/resource/generic/46c9418d-d63b-4cdd-be89-8f57ffc5952e/metric/auto.test/measures?refresh=true&start=2015-03-06T14:34
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 24.0]
+
+    - name: get created metric to check creation with newformat
+      GET: /v1/resource/generic/46c9418d-d63b-4cdd-be89-8f57ffc5952e/metric/newformat
+
+    - name: get created metric to check creation with newformat2
+      GET: /v1/resource/generic/46c9418d-d63b-4cdd-be89-8f57ffc5952e/metric/auto.newformat
+
+    - name: ensure measure have been posted with newformat
+      GET: /v1/resource/generic/46c9418d-d63b-4cdd-be89-8f57ffc5952e/metric/newformat/measures?refresh=true&start=2015-03-06T14:34
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 42.0]
+
+    - name: ensure measure have been posted with newformat2
+      GET: /v1/resource/generic/46c9418d-d63b-4cdd-be89-8f57ffc5952e/metric/auto.newformat/measures?refresh=true&start=2015-03-06T14:34
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 24.0]
+
+    - name: push measurements to unknown named metrics and resource with create_metrics with uuid resource id
+      POST: /v1/batch/resources/metrics/measures?create_metrics=true
+      request_headers:
+        accept: application/json
+      data:
+        aaaaaaaa-d63b-4cdd-be89-111111111111:
+          auto.test:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+        bbbbbbbb-d63b-4cdd-be89-111111111111:
+          auto.test:
+            measures:
+              - timestamp: "2015-03-06T14:33:57"
+                value: 43.1
+              - timestamp: "2015-03-06T14:34:12"
+                value: 12
+      status: 400
+      response_json_paths:
+          $.description.cause: "Unknown resources"
+          $.description.detail[/original_resource_id]:
+            - original_resource_id: "aaaaaaaa-d63b-4cdd-be89-111111111111"
+              resource_id: "aaaaaaaa-d63b-4cdd-be89-111111111111"
+            - original_resource_id: "bbbbbbbb-d63b-4cdd-be89-111111111111"
+              resource_id: "bbbbbbbb-d63b-4cdd-be89-111111111111"
+
+    - name: push measurements to unknown named metrics and resource with create_metrics with uuid resource id where resources is several times listed
+      POST: /v1/batch/resources/metrics/measures?create_metrics=true
+      request_headers:
+        accept: application/json
+      data:
+        aaaaaaaa-d63b-4cdd-be89-111111111111:
+          auto.test:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+          auto.test2:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+        bbbbbbbb-d63b-4cdd-be89-111111111111:
+          auto.test:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+      status: 400
+      response_json_paths:
+          $.description.cause: "Unknown resources"
+          $.description.detail[/original_resource_id]:
+            - original_resource_id: "aaaaaaaa-d63b-4cdd-be89-111111111111"
+              resource_id: "aaaaaaaa-d63b-4cdd-be89-111111111111"
+            - original_resource_id: "bbbbbbbb-d63b-4cdd-be89-111111111111"
+              resource_id: "bbbbbbbb-d63b-4cdd-be89-111111111111"
+
+    - name: push measurements to unknown named metrics and resource with create_metrics with non uuid resource id
+      POST: /v1/batch/resources/metrics/measures?create_metrics=true
+      request_headers:
+        accept: application/json
+      data:
+        foobar:
+          auto.test:
+            - timestamp: "2015-03-06T14:33:57"
+              value: 43.1
+            - timestamp: "2015-03-06T14:34:12"
+              value: 12
+      status: 400
+      response_json_paths:
+          $.description.cause: "Unknown resources"
+          $.description.detail:
+            - resource_id: "2fbfbb20-8d56-5e1e-afb9-b3007da11fdf"
+              original_resource_id: "foobar"
+
+    - name: push measurements to named metrics and resource with create_metrics with wrong measure objects
+      POST: /v1/batch/resources/metrics/measures?create_metrics=true
+      request_headers:
+        accept: application/json
+      data:
+        46c9418d-d63b-4cdd-be89-8f57ffc5952e:
+          auto.test:
+            - [ "2015-03-06T14:33:57", 43.1]
+            - [ "2015-03-06T14:34:12", 12]
+      status: 400
+      response_strings:
+        - "unexpected measures format"
diff --git a/gnocchi/tests/functional/gabbits/cors.yaml b/gnocchi/tests/functional/gabbits/cors.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..bd2395d55a78906427d378aa0542f098c54dddcd
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/cors.yaml
@@ -0,0 +1,21 @@
+fixtures:
+    - ConfigFixture
+
+tests:
+  - name: get CORS headers for non-allowed
+    OPTIONS: /v1/status
+    request_headers:
+      Origin: http://notallowed.com
+      Access-Control-Request-Method: GET
+    response_forbidden_headers:
+      - Access-Control-Allow-Origin
+      - Access-Control-Allow-Methods
+
+  - name: get CORS headers for allowed
+    OPTIONS: /v1/status
+    request_headers:
+      Origin: http://foobar.com
+      Access-Control-Request-Method: GET
+    response_headers:
+      Access-Control-Allow-Origin: http://foobar.com
+      Access-Control-Allow-Methods: GET
diff --git a/gnocchi/tests/functional/gabbits/create-metric-with-resource-id.yaml b/gnocchi/tests/functional/gabbits/create-metric-with-resource-id.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0adb02ef21390efd0c0db3413fdf07a0fbf61b31
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/create-metric-with-resource-id.yaml
@@ -0,0 +1,62 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+    accept: application/json
+
+tests:
+    - name: create archive policy
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: medium
+        definition:
+          - granularity: 1 second
+      status: 201
+
+    - name: create resource
+      POST: /v1/resource/generic
+      data:
+        id: foobar
+      status: 201
+
+    - name: create metric with a resource id
+      POST: /v1/metric
+      data:
+        resource_id: foobar
+        archive_policy_name: medium
+        name: cpu
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: medium
+          $.resource_id: 2fbfbb20-8d56-5e1e-afb9-b3007da11fdf
+          $.creator: foobar
+          $.name: cpu
+
+    - name: create metric with a resource id and an already existing name
+      POST: /v1/metric
+      data:
+        resource_id: foobar
+        archive_policy_name: medium
+        name: cpu
+      status: 400
+      response_json_paths:
+          $.description.cause: Named metric already exists
+          $.description.detail: cpu
+
+    - name: create metric with a resource id but no name
+      POST: /v1/metric
+      data:
+        resource_id: foobar
+        archive_policy_name: medium
+      status: 400
+      response_json_paths:
+          $.description.cause: Attribute value error
+          $.description.detail: name
+          $.description.reason: Name cannot be null if resource_id is not null
diff --git a/gnocchi/tests/functional/gabbits/healthcheck.yaml b/gnocchi/tests/functional/gabbits/healthcheck.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a2cf6fd1c4b5daa7ac607df059c2e371be21954b
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/healthcheck.yaml
@@ -0,0 +1,7 @@
+fixtures:
+    - ConfigFixture
+
+tests:
+    - name: healthcheck
+      GET: /healthcheck
+      status: 200
diff --git a/gnocchi/tests/functional/gabbits/history.yaml b/gnocchi/tests/functional/gabbits/history.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ad2de03ea23756188622600c788a8e4358a577da
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/history.yaml
@@ -0,0 +1,133 @@
+#
+# Test the resource history related API
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+
+tests:
+    - name: create archive policy
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: low
+          definition:
+              - granularity: 1 hour
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/low
+
+# Try creating a new generic resource
+
+    - name: post generic resource
+      POST: /v1/resource/generic
+      data:
+          id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1
+          content-type: /^application\/json/
+      response_json_paths:
+          $.creator: foobar
+          $.user_id: 0fbb231484614b1a80131fc22f6afc9c
+
+# Update it twice
+    - name: patch resource user_id
+      PATCH: /v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1
+      data:
+          user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+      status: 200
+      response_json_paths:
+          user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+
+    - name: patch resource project_id
+      PATCH: $LAST_URL
+      data:
+          project_id: fe20a931-1012-4cc6-addc-39556ec60907
+          metrics:
+            mymetric:
+              archive_policy_name: low
+      status: 200
+      response_json_paths:
+          user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          project_id: fe20a931-1012-4cc6-addc-39556ec60907
+
+# List resources
+
+    - name: list all resources without history
+      GET: /v1/resource/generic
+      response_json_paths:
+          $[0].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[0].project_id: fe20a931-1012-4cc6-addc-39556ec60907
+
+    - name: list all resources with history
+      GET: $LAST_URL?details=true&history=true
+      response_json_paths:
+          $.`len`: 3
+          $[0].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[0].user_id: 0fbb231484614b1a80131fc22f6afc9c
+          $[0].project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          $[1].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[1].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[1].project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          $[2].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[2].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[2].project_id: fe20a931-1012-4cc6-addc-39556ec60907
+
+    - name: patch resource metrics
+      PATCH: /v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1
+      data:
+          metrics:
+            foo:
+              archive_policy_name: low
+      status: 200
+
+    - name: list all resources with history no change after metrics update
+      GET: /v1/resource/generic?details=true&history=true
+      response_json_paths:
+          $.`len`: 3
+          $[0].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[0].user_id: 0fbb231484614b1a80131fc22f6afc9c
+          $[0].project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          $[1].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[1].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[1].project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          $[2].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[2].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[2].project_id: fe20a931-1012-4cc6-addc-39556ec60907
+
+    - name: create new metrics
+      POST: /v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1/metric
+      data:
+        foobar:
+          archive_policy_name: low
+      status: 200
+      response_json_paths:
+          $[/name][1].name: foobar
+          $[/name][1].resource_id: f93450f2-d8a5-4d67-9985-02511241e7d1
+
+    - name: list all resources with history no change after metrics creation
+      GET: /v1/resource/generic?history=true&details=true
+      response_json_paths:
+          $.`len`: 3
+          $[0].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[0].user_id: 0fbb231484614b1a80131fc22f6afc9c
+          $[0].project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          $[1].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[1].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[1].project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          $[2].id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          $[2].user_id: f53c58a4-fdea-4c09-aac4-02135900be67
+          $[2].project_id: fe20a931-1012-4cc6-addc-39556ec60907
diff --git a/gnocchi/tests/functional/gabbits/http-proxy-to-wsgi.yaml b/gnocchi/tests/functional/gabbits/http-proxy-to-wsgi.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..368a620be3949a152f3d257a29721863825678e8
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/http-proxy-to-wsgi.yaml
@@ -0,0 +1,16 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+
+tests:
+  - name: test HTTP proxy headers
+    GET: /
+    request_headers:
+      Forwarded: for=192.0.2.60;proto=http;host=foobar
+    response_json_paths:
+      $.versions[0].links[0].href: http://foobar/gnocchi/v1/
diff --git a/gnocchi/tests/functional/gabbits/influxdb.yaml b/gnocchi/tests/functional/gabbits/influxdb.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..c5a2da1698cb64ff9656f2ec843cfe264acfc24d
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/influxdb.yaml
@@ -0,0 +1,130 @@
+# Tests for the InfluxDB compatibility layer
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User admin
+    authorization: "basic YWRtaW46"
+    content-type: application/json
+
+tests:
+    - name: ping influxdb status with head
+      desc: test HEAD on ping – xfails because Pecan does not honor HEAD correctly yet
+      xfail: true
+      HEAD: /v1/influxdb/ping
+      status: 204
+
+    - name: ping influxdb status with get
+      GET: /v1/influxdb/ping
+      status: 204
+
+    - name: create a database
+      POST: /v1/influxdb/query?q=create+database+influxdbtest
+      status: 204
+
+    - name: check the resource type now exists
+      GET: /v1/resource_type/influxdbtest
+      status: 200
+      response_json_paths:
+        $:
+          name: influxdbtest
+          attributes: {}
+          state: active
+
+    - name: do an unrecognized query
+      POST: /v1/influxdb/query?q=select+metrics+plz
+      request_headers:
+        # This is useful to get the error in JSON format
+        accept: application/json
+      status: 501
+      response_json_paths:
+        $.description.cause: Not implemented error
+        $.description.detail: q
+        $.description.reason: Query not implemented
+
+    - name: create archive policy
+      POST: /v1/archive_policy
+      data:
+          name: low
+          definition:
+              - granularity: 1 hour
+      status: 201
+
+    - name: create archive policy for influxdb
+      POST: /v1/archive_policy_rule
+      data:
+        name: influxdb
+        metric_pattern: "*"
+        archive_policy_name: low
+      status: 201
+
+    - name: write a line
+      POST: /v1/influxdb/write?db=influxdbtest
+      request_headers:
+        content-type: text/plain
+      data:
+        "mymetric,host=foobar,mytag=myvalue field=123 1510581804179554816"
+      status: 204
+
+    - name: check resource created
+      GET: /v1/resource/influxdbtest/foobar
+      status: 200
+      response_json_paths:
+        $.original_resource_id: foobar
+        $.id: b4d568e4-7af1-5aec-ac3f-9c09fa3685a9
+        $.type: influxdbtest
+        $.creator: admin
+
+    - name: check metric created
+      GET: /v1/resource/influxdbtest/foobar/metric/mymetric.field@mytag=myvalue
+
+    - name: check measures processed
+      GET: /v1/resource/influxdbtest/foobar/metric/mymetric.field@mytag=myvalue/measures?refresh=true
+      response_json_paths:
+        $:
+          - ["2017-11-13T14:00:00+00:00", 3600.0, 123.0]
+
+    - name: write lines with different tag resource id
+      POST: /v1/influxdb/write?db=influxdbtest
+      request_headers:
+        content-type: text/plain
+        X-Gnocchi-InfluxDB-Tag-Resource-ID: mytag
+      data:
+        "mymetric,host=foobar,mytag=myvalue field=123 1510581804179554816\ncpu,path=/foobar,mytag=myvalue field=43i 1510581804179554816"
+      status: 204
+
+    - name: check resource created with different resource id
+      GET: /v1/resource/influxdbtest/myvalue
+      status: 200
+      response_json_paths:
+        $.original_resource_id: myvalue
+        $.id: 6b9e2039-98d0-5d8d-9153-2d7491cf13e5
+        $.type: influxdbtest
+        $.creator: admin
+
+    - name: check metric created different tag resource id
+      GET: /v1/resource/influxdbtest/myvalue/metric/mymetric.field@host=foobar
+
+    - name: check metric created different tag resource id and slash replaced
+      GET: /v1/resource/influxdbtest/myvalue/metric/cpu.field@path=_foobar
+
+    - name: create a generic resource for conflict
+      POST: /v1/resource/generic
+      data:
+        id: conflict
+      status: 201
+
+    - name: write lines with conflicting resource
+      POST: /v1/influxdb/write?db=influxdbtest
+      request_headers:
+        content-type: text/plain
+        accept: application/json
+      data:
+        "mymetric,host=conflict,mytag=myvalue field=123 1510581804179554816"
+      status: 409
+      response_json_paths:
+        $.title: "Conflict"
+        $.description.cause: "Resource already exists"
+        $.description.detail: "da19a545-af76-5081-9a88-f370baab66c6"
diff --git a/gnocchi/tests/functional/gabbits/metric-derived.yaml b/gnocchi/tests/functional/gabbits/metric-derived.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..1d62c722a8ce47ff3f6e1f793afd807f91e75e7a
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/metric-derived.yaml
@@ -0,0 +1,199 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: carrot-cake
+          aggregation_methods:
+              - rate:mean
+              - rate:max
+              - rate:95pct
+              - max
+          definition:
+              - granularity: 1 minute
+      status: 201
+
+    - name: create valid metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: carrot-cake
+      status: 201
+
+    - name: push measurements to metric
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:33:00"
+            value: 10
+          - timestamp: "2015-03-06T14:34:10"
+            value: 13
+          - timestamp: "2015-03-06T14:34:20"
+            value: 13
+          - timestamp: "2015-03-06T14:34:30"
+            value: 15
+          - timestamp: "2015-03-06T14:34:40"
+            value: 18
+          - timestamp: "2015-03-06T14:34:50"
+            value: 20
+          - timestamp: "2015-03-06T14:35:00"
+            value: 22
+          - timestamp: "2015-03-06T14:35:10"
+            value: 26
+          - timestamp: "2015-03-06T14:35:20"
+            value: 30
+          - timestamp: "2015-03-06T14:35:30"
+            value: 31
+          - timestamp: "2015-03-06T14:35:40"
+            value: 37
+          - timestamp: "2015-03-06T14:35:50"
+            value: 55
+          - timestamp: "2015-03-06T14:36:00"
+            value: 62
+          - timestamp: "2015-03-06T14:36:10"
+            value: 100
+          - timestamp: "2015-03-06T14:36:20"
+            value: 102
+          - timestamp: "2015-03-06T14:36:30"
+            value: 103
+          - timestamp: "2015-03-06T14:36:40"
+            value: 104
+          - timestamp: "2015-03-06T14:36:50"
+            value: 110
+      status: 202
+
+    - name: get measurements rate:mean
+      GET: /v1/metric/$HISTORY['create valid metric'].$RESPONSE['id']/measures?aggregation=rate:mean&refresh=true
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:34:00+00:00', 60.0, 2.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 5.833333333333333]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 9.166666666666666]
+
+    - name: get measurements rate:95pct
+      GET: /v1/metric/$HISTORY['create valid metric'].$RESPONSE['id']/measures?aggregation=rate:95pct
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:34:00+00:00', 60.0, 3.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 15.0]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 30.25]
+
+    - name: get measurements rate:max
+      GET: /v1/metric/$HISTORY['create valid metric'].$RESPONSE['id']/measures?aggregation=rate:max
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:34:00+00:00', 60.0, 3.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 18.0]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 38.0]
+
+    - name: get measurements max
+      GET: /v1/metric/$HISTORY['create valid metric'].$RESPONSE['id']/measures?aggregation=max
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:33:00+00:00', 60.0, 10.0]
+            - ['2015-03-06T14:34:00+00:00', 60.0, 20.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 55.0]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 110.0]
+
+    - name: create a second metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: carrot-cake
+      status: 201
+
+    - name: push measurements to the second metric
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:33:00"
+            value: 10
+          - timestamp: "2015-03-06T14:34:10"
+            value: 13
+          - timestamp: "2015-03-06T14:34:20"
+            value: 13
+          - timestamp: "2015-03-06T14:34:30"
+            value: 15
+          - timestamp: "2015-03-06T14:34:40"
+            value: 18
+          - timestamp: "2015-03-06T14:34:50"
+            value: 20
+          - timestamp: "2015-03-06T14:35:00"
+            value: 22
+          - timestamp: "2015-03-06T14:35:10"
+            value: 26
+      status: 202
+
+    - name: push other measurements to the second metric
+      POST: /v1/metric/$HISTORY['create a second metric'].$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:35:20"
+            value: 30
+          - timestamp: "2015-03-06T14:35:30"
+            value: 31
+          - timestamp: "2015-03-06T14:35:40"
+            value: 37
+          - timestamp: "2015-03-06T14:35:50"
+            value: 55
+          - timestamp: "2015-03-06T14:36:00"
+            value: 62
+          - timestamp: "2015-03-06T14:36:10"
+            value: 100
+          - timestamp: "2015-03-06T14:36:20"
+            value: 102
+          - timestamp: "2015-03-06T14:36:30"
+            value: 103
+          - timestamp: "2015-03-06T14:36:40"
+            value: 104
+          - timestamp: "2015-03-06T14:36:50"
+            value: 110
+      status: 202
+
+    - name: get measurements rate:mean second metric
+      GET: /v1/metric/$HISTORY['create a second metric'].$RESPONSE['id']/measures?aggregation=rate:mean&refresh=true
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:34:00+00:00', 60.0, 2.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 5.833333333333333]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 9.166666666666666]
+
+    - name: get measurements rate:95pct second metric
+      GET: /v1/metric/$HISTORY['create a second metric'].$RESPONSE['id']/measures?aggregation=rate:95pct
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:34:00+00:00', 60.0, 3.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 15.0]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 30.25]
+
+    - name: get measurements rate:max second metric
+      GET: /v1/metric/$HISTORY['create a second metric'].$RESPONSE['id']/measures?aggregation=rate:max
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:34:00+00:00', 60.0, 3.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 18.0]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 38.0]
+
+    - name: get measurements max second metric
+      GET: /v1/metric/$HISTORY['create a second metric'].$RESPONSE['id']/measures?aggregation=max
+      status: 200
+      response_json_paths:
+          $:
+            - ['2015-03-06T14:33:00+00:00', 60.0, 10.0]
+            - ['2015-03-06T14:34:00+00:00', 60.0, 20.0]
+            - ['2015-03-06T14:35:00+00:00', 60.0, 55.0]
+            - ['2015-03-06T14:36:00+00:00', 60.0, 110.0]
diff --git a/gnocchi/tests/functional/gabbits/metric-granularity.yaml b/gnocchi/tests/functional/gabbits/metric-granularity.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..79afba4845369e1009ad22dfd4425ddb02c00f13
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/metric-granularity.yaml
@@ -0,0 +1,54 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cookies
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create valid metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+
+    - name: push measurements to metric
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+
+    - name: get metric list
+      GET: /v1/metric
+      status: 200
+
+    - name: get measurements invalid granularity
+      GET: /v1/metric/$RESPONSE['$[0].id']/measures?granularity=42
+      status: 404
+      response_strings:
+        - Aggregation method 'mean' at granularity '42.0' for metric $RESPONSE['$[0].id'] does not exist
+
+    - name: get measurements granularity
+      GET: /v1/metric/$HISTORY['get metric list'].$RESPONSE['$[0].id']/measures?granularity=1&refresh=true
+      status: 200
+      response_json_paths:
+          $:
+            - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+            - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
diff --git a/gnocchi/tests/functional/gabbits/metric-list.yaml b/gnocchi/tests/functional/gabbits/metric-list.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f71b2d10205995326df7f110b7f80a05a3147ab7
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/metric-list.yaml
@@ -0,0 +1,181 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy 1
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: first_archive
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create archive policy 2
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: second_archive
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create metric 1
+      POST: /v1/metric
+      data:
+          name: "disk.io.rate"
+          unit: "B/s"
+          archive_policy_name: first_archive
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: first_archive
+          $.name: disk.io.rate
+          $.unit: B/s
+
+    - name: create metric 2
+      POST: /v1/metric
+      request_headers:
+        # User foobaz
+        authorization: "basic Zm9vYmF6Og=="
+      data:
+          name: "disk.io.rate"
+          unit: "B/s"
+          archive_policy_name: first_archive
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: first_archive
+          $.name: disk.io.rate
+          $.unit: B/s
+
+    - name: create metric 3
+      POST: /v1/metric
+      request_headers:
+        # User jd
+        authorization: "basic amQ6"
+      data:
+          name: "cpu_util"
+          unit: "%"
+          archive_policy_name: first_archive
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: first_archive
+          $.name: cpu_util
+          $.unit: "%"
+
+    - name: create metric 4
+      POST: /v1/metric
+      data:
+          name: "cpu"
+          unit: "ns"
+          archive_policy_name: second_archive
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: second_archive
+          $.name: cpu
+          $.unit: ns
+
+    - name: list metrics
+      GET: /v1/metric
+      response_json_paths:
+          $.`len`: 4
+
+    - name: list metrics by id
+      GET: /v1/metric?id=$HISTORY['create metric 1'].$RESPONSE['id']
+      response_json_paths:
+          $.`len`: 1
+          $[0].name: disk.io.rate
+          $[0].archive_policy.name: first_archive
+
+    - name: list metrics by name
+      GET: /v1/metric?name=disk.io.rate
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      response_json_paths:
+          $.`len`: 2
+          $[0].name: disk.io.rate
+          $[1].name: disk.io.rate
+          $[0].archive_policy.name: first_archive
+          $[1].archive_policy.name: first_archive
+
+    - name: list metrics by unit
+      GET: /v1/metric?unit=ns
+      response_json_paths:
+          $.`len`: 1
+          $[0].name: cpu
+          $[0].archive_policy.name: second_archive
+
+    - name: list metrics by archive_policy
+      GET: /v1/metric?archive_policy_name=first_archive&sort=name:desc
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      response_json_paths:
+          $.`len`: 3
+          $[0].name: disk.io.rate
+          $[1].name: disk.io.rate
+          $[2].name: cpu_util
+          $[0].archive_policy.name: first_archive
+          $[1].archive_policy.name: first_archive
+          $[2].archive_policy.name: first_archive
+
+    - name: list metrics by archive_policy with limit and pagination links page 1
+      GET: /v1/metric?archive_policy_name=first_archive&sort=name:desc&limit=2
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      response_headers:
+          Link: "<$SCHEME://$NETLOC/v1/metric?archive_policy_name=first_archive&limit=2&marker=$RESPONSE['$[1].id']&sort=name%3Adesc>; rel=\"next\""
+      response_json_paths:
+          $.`len`: 2
+          $[0].name: disk.io.rate
+          $[1].name: disk.io.rate
+          $[0].archive_policy.name: first_archive
+          $[1].archive_policy.name: first_archive
+
+    - name: list metrics by archive_policy with limit and pagination links page 2
+      GET: /v1/metric?archive_policy_name=first_archive&limit=2&marker=$RESPONSE['$[1].id']&sort=name:desc
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      response_json_paths:
+          $.`len`: 1
+          $[0].name: cpu_util
+          $[0].archive_policy.name: first_archive
+
+    - name: list metrics ensure no Link header
+      GET: /v1/metric?archive_policy_name=first_archive&sort=name:desc
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      xfail: true
+      response_headers:
+          Link: whatever
+
+    - name: list metrics by creator jd
+      GET: /v1/metric?creator=jd
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      response_json_paths:
+          $.`len`: 1
+
+    - name: list metrics by creator foobaz
+      GET: /v1/metric?creator=foobaz
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      response_json_paths:
+          $.`len`: 1
diff --git a/gnocchi/tests/functional/gabbits/metric-timestamp-format.yaml b/gnocchi/tests/functional/gabbits/metric-timestamp-format.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e376189d139288daafdc8342a00ba65a471ee0f3
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/metric-timestamp-format.yaml
@@ -0,0 +1,53 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cookies
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+
+    - name: push measurements to metric with relative timestamp
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "-5 minutes"
+            value: 43.1
+      status: 202
+
+    - name: create metric 2
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+
+    - name: push measurements to metric with mixed timestamps
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: 1478012832
+            value: 43.1
+          - timestamp: "-5 minutes"
+            value: 43.1
+      status: 400
diff --git a/gnocchi/tests/functional/gabbits/metric.yaml b/gnocchi/tests/functional/gabbits/metric.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..2111a185da88dfd899199398ffea6c7c3dd366b4
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/metric.yaml
@@ -0,0 +1,411 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+
+tests:
+    - name: wrong metric
+      desc: https://bugs.launchpad.net/gnocchi/+bug/1429949
+      GET: /v1/metric/foobar
+      status: 404
+
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cookies
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create archive policy rule
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_rule
+        metric_pattern: "disk.io.*"
+        archive_policy_name: cookies
+      status: 201
+
+    - name: create alt archive policy
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cream
+          definition:
+              - granularity: 5 second
+      status: 201
+
+    - name: create alt archive policy rule
+      desc: extra rule that won't be matched
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_ignore_rule
+        metric_pattern: "disk.*"
+        archive_policy_name: cream
+      status: 201
+
+    - name: get metric empty
+      GET: /v1/metric
+      status: 200
+      response_strings:
+          - "[]"
+
+    - name: get metric list with nonexistent sort key
+      GET: /v1/metric?sort=nonexistent_key:asc
+      status: 400
+      response_strings:
+          - "Sort key supplied is invalid: nonexistent_key"
+
+    - name: create metric with name and unit
+      POST: /v1/metric
+      data:
+          name: "disk.io.rate"
+          unit: "B/s"
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+          $.name: disk.io.rate
+          $.unit: B/s
+
+    - name: create metric with invalid name
+      POST: /v1/metric
+      data:
+          name: "disk/io/rate"
+          unit: "B/s"
+      status: 400
+      response_strings:
+        - "'/' is not supported in metric name"
+
+    - name: create metric with name and over length unit
+      POST: /v1/metric
+      request_headers:
+        accept: application/json
+      data:
+          name: "disk.io.rate"
+          unit: "over_length_unit_over_length_unit"
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^length of value must be at most 31 for dictionary value @ data/"
+        $.description.detail: ["unit"]
+
+    - name: create metric with name no rule
+      POST: /v1/metric
+      data:
+          name: "volume.io.rate"
+      status: 400
+      response_strings:
+          - No archive policy name specified and no archive policy rule found matching the metric name volume.io.rate
+
+    - name: create metric bad archive policy
+      POST: /v1/metric
+      data:
+          archive_policy_name: bad-cookie
+      status: 400
+      response_strings:
+          - Archive policy bad-cookie does not exist
+
+    - name: create metric bad content-type
+      POST: /v1/metric
+      request_headers:
+          content-type: plain/text
+      data: '{"archive_policy_name": "cookies"}'
+      status: 415
+
+    - name: create valid metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+
+    - name: get valid metric id
+      GET: /v1/metric/$RESPONSE['$.id']
+      status: 200
+      response_json_paths:
+        $.archive_policy.name: cookies
+
+    - name: push measurements to metric before epoch
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "1915-03-06T14:33:57"
+            value: 43.1
+      status: 400
+      response_strings:
+        - Timestamp must be after Epoch
+
+    - name: list valid metrics
+      GET: /v1/metric
+      response_json_paths:
+          $[0].archive_policy.name: cookies
+
+    - name: push measurements to metric with bad timestamp
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      data:
+          - timestamp: "1915-100-06T14:33:57"
+            value: 43.1
+      status: 400
+
+    - name: push measurements to metric epoch format
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      data:
+          - timestamp: 1425652437.0
+            value: 43.1
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: push measurements to metric
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      data:
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+
+    - name: get measurements invalid agg method
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?aggregation=wtf
+      request_headers:
+        accept: application/json
+      status: 404
+      response_json_paths:
+        $.description.cause: Aggregation method does not exist for this metric
+        $.description.detail.metric: $HISTORY['list valid metrics'].$RESPONSE['$[0].id']
+        $.description.detail.aggregation_method: wtf
+
+    - name: get measurements by start
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?refresh=true&start=2015-03-06T14:34
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+
+    - name: get measurements by start with epoch
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?refresh=true&start=1425652440
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+
+    - name: get measurements from metric
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?refresh=true
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+
+    - name: get measurements from metric invalid granularity
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?granularity=foobar
+      request_headers:
+        accept: application/json
+      status: 400
+      response_json_paths:
+        $.description.cause: Argument value error
+        $.description.reason: Unable to parse timespan
+        $.description.detail: granularity
+
+    - name: push measurements to metric again
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      data:
+          - timestamp: "2015-03-06T14:34:15"
+            value: 16
+          - timestamp: "2015-03-06T14:35:12"
+            value: 9
+          - timestamp: "2015-03-06T14:35:15"
+            value: 11
+      status: 202
+
+    - name: get measurements from metric and resample
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?refresh=true&resample=60&granularity=1
+      response_json_paths:
+        $:
+          - ["2015-03-06T14:33:00+00:00", 60.0, 43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, 14.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, 10.0]
+
+    - name: get measurements from metric and resample and negative
+      POST: /v1/aggregates?granularity=1
+      data:
+        operations: "(negative (resample mean 60 (metric $HISTORY['list valid metrics'].$RESPONSE['$[0].id'] mean)))"
+      response_json_paths:
+        $.measures."$HISTORY['list valid metrics'].$RESPONSE['$[0].id']".mean:
+          - ["2015-03-06T14:33:00+00:00", 60.0, -43.1]
+          - ["2015-03-06T14:34:00+00:00", 60.0, -14.0]
+          - ["2015-03-06T14:35:00+00:00", 60.0, -10.0]
+
+    - name: push negative measurements to metric again
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      data:
+          - timestamp: "2015-03-06T14:36:15"
+            value: -16
+          - timestamp: "2015-03-06T14:37:15"
+            value: -23
+      status: 202
+
+    - name: push measurements with wrong measure objects
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      request_headers:
+        accept: application/json
+      data:
+        - [ "2015-03-06T14:33:57", 43.1]
+        - [ "2015-03-06T14:34:12", 12]
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.detail: []
+        $.description.reason: "unexpected measures format"
+
+    - name: refresh metric
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?refresh=true
+
+    - name: get absolute measurements from metric
+      POST: /v1/aggregates
+      data:
+        operations: "(absolute (metric $HISTORY['list valid metrics'].$RESPONSE['$[0].id'] mean))"
+      response_json_paths:
+        $.measures."$HISTORY['list valid metrics'].$RESPONSE['$[0].id']".mean:
+          - ["2015-03-06T14:33:57+00:00", 1.0, 43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, 12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 11.0]
+          - ["2015-03-06T14:36:15+00:00", 1.0, 16.0]
+          - ["2015-03-06T14:37:15+00:00", 1.0, 23.0]
+
+    - name: rolling-mean
+      POST: /v1/aggregates
+      data:
+        operations: "(rolling mean 2 (metric $HISTORY['list valid metrics'].$RESPONSE['$[0].id'] mean))"
+      status: 200
+      response_json_paths:
+        $.measures."$HISTORY['list valid metrics'].$RESPONSE['$[0].id']".mean:
+          - ["2015-03-06T14:34:12+00:00", 1.0, 27.55]
+          - ["2015-03-06T14:34:15+00:00", 1.0, 14.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, 12.5]
+          - ["2015-03-06T14:35:15+00:00", 1.0, 10.0]
+          - ["2015-03-06T14:36:15+00:00", 1.0, -2.5]
+          - ["2015-03-06T14:37:15+00:00", 1.0, -19.5]
+
+    - name: get measurements from metric and two operations
+      POST: /v1/aggregates
+      data:
+        operations: "(negative (absolute (metric $HISTORY['list valid metrics'].$RESPONSE['$[0].id'] mean)))"
+      response_json_paths:
+        $.measures."$HISTORY['list valid metrics'].$RESPONSE['$[0].id']".mean:
+          - ["2015-03-06T14:33:57+00:00", 1.0, -43.1]
+          - ["2015-03-06T14:34:12+00:00", 1.0, -12.0]
+          - ["2015-03-06T14:34:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:35:12+00:00", 1.0, -9.0]
+          - ["2015-03-06T14:35:15+00:00", 1.0, -11.0]
+          - ["2015-03-06T14:36:15+00:00", 1.0, -16.0]
+          - ["2015-03-06T14:37:15+00:00", 1.0, -23.0]
+
+    - name: get measurements from metric and invalid operations
+      POST: /v1/aggregates
+      data:
+        operations: "(notexist (absolute (metric $HISTORY['list valid metrics'].$RESPONSE['$[0].id'] mean)))"
+      status: 400
+
+    - name: get measurements from metric and resample no granularity
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=60
+      status: 400
+      response_strings:
+        - A granularity must be specified to resample
+
+    - name: get measurements from metric and bad resample
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=abc
+      status: 400
+
+    - name: create valid metric two
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+
+    - name: push invalid measurements to metric
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 12
+          - timestamp: "2015-03-06T14:34:12"
+            value: "foobar"
+      status: 400
+
+    - name: create valid metric three
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+
+    - name: push invalid measurements to metric bis
+      POST: /v1/metric/$RESPONSE['$.id']/measures
+      data: 1
+      status: 400
+
+    - name: add measure unknown metric
+      POST: /v1/metric/fake/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+      status: 404
+
+    - name: get metric list for authenticated user
+      request_headers:
+        # User foobaz
+        authorization: "basic Zm9vYmF6Og=="
+      GET: /v1/metric
+
+    - name: get measures unknown metric
+      GET: /v1/metric/fake/measures
+      status: 404
+
+    - name: get metric list for aggregates
+      GET: /v1/metric
+      status: 200
+      response_json_paths:
+          $[0].archive_policy.name: cookies
+
+    - name: get measure unknown aggregates
+      GET: /v1/aggregation/metric?metric=$HISTORY['get metric list for aggregates'].$RESPONSE['$[0].id']&aggregation=last
+      status: 404
+      request_headers:
+        accept: application/json
+      response_json_paths:
+        $.description.cause: Aggregation method does not exist for this metric
+        $.description.detail.metric: $HISTORY['get metric list for aggregates'].$RESPONSE['$[0].id']
+        $.description.detail.aggregation_method: last
+
+    - name: aggregate measure unknown metric
+      GET: /v1/aggregation/metric?metric=cee6ef1f-52cc-4a16-bbb5-648aedfd1c37
+      status: 404
+      response_strings:
+          - Metric cee6ef1f-52cc-4a16-bbb5-648aedfd1c37 does not exist
+
+    - name: delete metric
+      DELETE: /v1/metric/$HISTORY['get metric list for aggregates'].$RESPONSE['$[0].id']
+      status: 204
+
+    - name: delete metric again
+      DELETE: $LAST_URL
+      status: 404
+
+    - name: delete non existent metric
+      DELETE: /v1/metric/foo
+      status: 404
diff --git a/gnocchi/tests/functional/gabbits/pagination.yaml b/gnocchi/tests/functional/gabbits/pagination.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0e3a7f4cde27b9d2d1e3b99f34c40a580e2c4eb4
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/pagination.yaml
@@ -0,0 +1,369 @@
+#
+# Test the pagination API
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+#
+# Creation resources for this scenarion
+#
+    - name: post resource 1
+      POST: /v1/resource/generic
+      data:
+          id: 57a9e836-87b8-4a21-9e30-18a474b98fef
+          started_at: "2014-01-01T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post resource 2
+      POST: $LAST_URL
+      data:
+          id: 4facbf7e-a900-406d-a828-82393f7006b3
+          started_at: "2014-01-02T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post resource 3
+      POST: $LAST_URL
+      data:
+          id: 36775172-ebc9-4060-9870-a649361bc3ab
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post resource 4
+      POST: $LAST_URL
+      data:
+          id: 28593168-52bb-43b5-a6db-fc2343aac02a
+          started_at: "2014-01-04T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post resource 5
+      POST: $LAST_URL
+      data:
+          id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          started_at: "2014-01-05T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+#
+# Basic resource limit/ordering tests
+#
+    - name: list first two items default order
+      GET: /v1/resource/generic?limit=2
+      response_json_paths:
+          $.`len`: 2
+          $[0].id: 57a9e836-87b8-4a21-9e30-18a474b98fef
+          $[1].id: 4facbf7e-a900-406d-a828-82393f7006b3
+
+    - name: list next third items default order
+      GET: /v1/resource/generic?limit=4&marker=4facbf7e-a900-406d-a828-82393f7006b3
+      response_json_paths:
+          $.`len`: 3
+          $[0].id: 36775172-ebc9-4060-9870-a649361bc3ab
+          $[1].id: 28593168-52bb-43b5-a6db-fc2343aac02a
+          $[2].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+
+    - name: list first two items order by id witouth direction
+      GET: /v1/resource/generic?limit=2&sort=id
+      status: 200
+      response_json_paths:
+          $.`len`: 2
+          $[0].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[1].id: 28593168-52bb-43b5-a6db-fc2343aac02a
+
+    - name: list first two items order by id
+      GET: /v1/resource/generic?limit=2&sort=id:asc
+      response_headers:
+          link: "<$SCHEME://$NETLOC/v1/resource/generic?limit=2&marker=28593168-52bb-43b5-a6db-fc2343aac02a&sort=id%3Aasc>; rel=\"next\""
+      response_json_paths:
+          $.`len`: 2
+          $[0].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[1].id: 28593168-52bb-43b5-a6db-fc2343aac02a
+
+    - name: list next third items order by id
+      GET: /v1/resource/generic?limit=4&sort=id:asc&marker=28593168-52bb-43b5-a6db-fc2343aac02a
+      response_json_paths:
+          $.`len`: 3
+          $[0].id: 36775172-ebc9-4060-9870-a649361bc3ab
+          $[1].id: 4facbf7e-a900-406d-a828-82393f7006b3
+          $[2].id: 57a9e836-87b8-4a21-9e30-18a474b98fef
+
+    - name: search for some resources with limit, order and marker
+      POST: /v1/search/resource/generic?limit=2&sort=id:asc&marker=36775172-ebc9-4060-9870-a649361bc3ab
+      data:
+        "or": [
+            {"=": {"id": 36775172-ebc9-4060-9870-a649361bc3ab}},
+            {"=": {"id": 4facbf7e-a900-406d-a828-82393f7006b3}},
+            {"=": {"id": 57a9e836-87b8-4a21-9e30-18a474b98fef}},
+        ]
+      response_json_paths:
+          $.`len`: 2
+          $[0].id: 4facbf7e-a900-406d-a828-82393f7006b3
+          $[1].id: 57a9e836-87b8-4a21-9e30-18a474b98fef
+
+#
+# Invalid resource limit/ordering
+#
+    - name: invalid sort_key
+      GET: /v1/resource/generic?sort=invalid:asc
+      status: 400
+
+    - name: invalid sort_dir
+      GET: /v1/resource/generic?sort=id:invalid
+      status: 400
+
+    - name: invalid marker
+      GET: /v1/resource/generic?marker=d44b3f4c-27bc-4ace-b81c-2a8e60026874
+      status: 400
+
+    - name: invalid negative limit
+      GET: /v1/resource/generic?limit=-2
+      status: 400
+
+    - name: invalid limit
+      GET: /v1/resource/generic?limit=invalid
+      status: 400
+
+#
+# Default limit
+#
+
+    - name: post resource 6
+      POST: /v1/resource/generic
+      data:
+          id: 465f87b2-61f7-4118-adec-1d96a78af401
+          started_at: "2014-01-02T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post resource 7
+      POST: $LAST_URL
+      data:
+          id: 9b6af245-57df-4ed6-a8c0-f64b77d8867f
+          started_at: "2014-01-28T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post resource 8
+      POST: $LAST_URL
+      data:
+          id: d787aa85-5743-4443-84f9-204270bc141a
+          started_at: "2014-01-31T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: default limit
+      GET: $LAST_URL
+      response_json_paths:
+          $.`len`: 7
+          $[-1].id: 9b6af245-57df-4ed6-a8c0-f64b77d8867f
+
+
+    - name: update resource 5
+      PATCH: /v1/resource/generic/1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+      data:
+          ended_at: "2014-01-30T02:02:02.000000"
+
+    - name: update resource 5 again
+      PATCH: $LAST_URL
+      data:
+          ended_at: "2014-01-31T02:02:02.000000"
+
+    - name: default limit with history and multiple sort key
+      GET: /v1/resource/generic?history=true&sort=id:asc&sort=ended_at:desc-nullslast
+      response_json_paths:
+          $.`len`: 7
+          $[0].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[0].ended_at: "2014-01-31T02:02:02+00:00"
+          $[1].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[1].ended_at: "2014-01-30T02:02:02+00:00"
+          $[2].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[2].ended_at: null
+
+    - name: limit with history and links page 1
+      GET: /v1/resource/generic?history=true&sort=id:asc&sort=ended_at:asc-nullsfirst&limit=1
+      response_headers:
+          link: "<$SCHEME://$NETLOC/v1/resource/generic?history=true&limit=1&marker=1e3d5702-2cbf-46e0-ba13-0ddaa3c71150%401&sort=id%3Aasc&sort=ended_at%3Aasc-nullsfirst>; rel=\"next\""
+      response_json_paths:
+          $.`len`: 1
+          $[0].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[0].ended_at: null
+
+    - name: limit with history and links page 2
+      GET: /v1/resource/generic?history=true&limit=1&marker=1e3d5702-2cbf-46e0-ba13-0ddaa3c71150@1&sort=id:asc&sort=ended_at:asc-nullsfirst
+      response_headers:
+          link: "<$SCHEME://$NETLOC/v1/resource/generic?history=true&limit=1&marker=1e3d5702-2cbf-46e0-ba13-0ddaa3c71150%402&sort=id%3Aasc&sort=ended_at%3Aasc-nullsfirst>; rel=\"next\""
+      response_json_paths:
+          $.`len`: 1
+          $[0].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[0].ended_at: "2014-01-30T02:02:02+00:00"
+
+    - name: limit with history and links page 3 with no limit
+      GET: /v1/resource/generic?history=true&marker=1e3d5702-2cbf-46e0-ba13-0ddaa3c71150@2&sort=id:asc&sort=ended_at:asc-nullsfirst
+      response_headers:
+          link: "<$SCHEME://$NETLOC/v1/resource/generic?history=true&limit=7&marker=9b6af245-57df-4ed6-a8c0-f64b77d8867f%40-1&sort=id%3Aasc&sort=ended_at%3Aasc-nullsfirst>; rel=\"next\""
+      response_json_paths:
+          $.`len`: 7
+          $[0].id: 1e3d5702-2cbf-46e0-ba13-0ddaa3c71150
+          $[0].ended_at: "2014-01-31T02:02:02+00:00"
+
+#
+# Create metrics
+#
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+      data:
+          name: dummy_policy
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create metric with name1
+      POST: /v1/metric
+      data:
+          name: "dummy1"
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: create metric with name2
+      POST: /v1/metric
+      data:
+          name: "dummy2"
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: create metric with name3
+      POST: /v1/metric
+      data:
+          name: "dummy3"
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: create metric with name4
+      POST: /v1/metric
+      data:
+          name: "dummy4"
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: create metric with name5
+      POST: /v1/metric
+      data:
+          name: "dummy5"
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: list all default order
+      GET: /v1/metric
+
+    - name: list first two metrics default order
+      GET: /v1/metric?limit=2
+      response_json_paths:
+          $.`len`: 2
+          $[0].name: $RESPONSE['$[0].name']
+          $[1].name: $RESPONSE['$[1].name']
+
+    - name: list next three metrics default order
+      GET: /v1/metric?limit=4&marker=$HISTORY['list all default order'].$RESPONSE['$[1].id']
+      response_json_paths:
+          $.`len`: 3
+          $[0].name: $HISTORY['list all default order'].$RESPONSE['$[2].name']
+          $[1].name: $HISTORY['list all default order'].$RESPONSE['$[3].name']
+          $[2].name: $HISTORY['list all default order'].$RESPONSE['$[4].name']
+
+    - name: list first two metrics order by user without direction
+      GET: /v1/metric?limit=2&sort=name
+      status: 200
+      response_json_paths:
+          $.`len`: 2
+          $[0].name: dummy1
+          $[1].name: dummy2
+
+    - name: list first two metrics order by user
+      GET: /v1/metric?limit=2&sort=name:asc
+      response_json_paths:
+          $.`len`: 2
+          $[0].name: dummy1
+          $[1].name: dummy2
+
+    - name: list next third metrics order by user
+      GET: /v1/metric?limit=4&sort=name:asc&marker=$RESPONSE['$[1].id']
+      response_json_paths:
+          $.`len`: 3
+          $[0].name: dummy3
+          $[1].name: dummy4
+          $[2].name: dummy5
+
+#
+# Default metric limit
+#
+
+    - name: create metric with name6
+      POST: /v1/metric
+      data:
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: create metric with name7
+      POST: /v1/metric
+      data:
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: create metric with name8
+      POST: /v1/metric
+      data:
+          archive_policy_name: dummy_policy
+      status: 201
+
+    - name: default metric limit
+      GET: /v1/metric
+      response_json_paths:
+          $.`len`: 7
+
+#
+# Invalid metrics limit/ordering
+#
+
+    - name: metric invalid sort_key
+      GET: /v1/metric?sort=invalid:asc
+      status: 400
+
+    - name: metric invalid sort_dir
+      GET: /v1/metric?sort=id:invalid
+      status: 400
+
+    - name: metric invalid marker
+      GET: /v1/metric?marker=d44b3f4c-27bc-4ace-b81c-2a8e60026874
+      status: 400
+
+    - name: metric invalid negative limit
+      GET: /v1/metric?limit=-2
+      status: 400
+
+    - name: metric invalid limit
+      GET: /v1/metric?limit=invalid
+      status: 400
diff --git a/gnocchi/tests/functional/gabbits/prometheus.yaml b/gnocchi/tests/functional/gabbits/prometheus.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ee6bed902d27378ee3b11ef4a6f4f1b678a83a3c
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/prometheus.yaml
@@ -0,0 +1,79 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    authorization: "basic YWRtaW46"
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      data:
+          name: space
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create archive policy rule
+      POST: /v1/archive_policy_rule
+      data:
+        name: test_prom
+        metric_pattern: "*"
+        archive_policy_name: space
+      status: 201
+
+    - name: post some measures
+      POST: /v1/prometheus/write
+      request_headers:
+        authorization: "basic YWRtaW46"
+        content-type: application/x-protobuf
+        content-encoding: snappy
+      data: <@prometheus_fixtures/031b586e-ebe1-4737-812e-cf0ddf26f5ad.dump
+      status: 202
+
+    - name: ensure resource has been created
+      GET: /v1/resource/prometheus
+      response_json_paths:
+          $.`len`: 1
+          $[0].job: "prometheus"
+          $[0].instance: "localhost:9090"
+
+    - name: ensure one resource have all metrics created
+      GET: /v1/resource/prometheus/prometheus@localhost:9090
+      response_json_paths:
+          $.metrics.`len`: 56
+
+    - name: check metrics
+      GET: /v1/resource/prometheus/prometheus@localhost:9090/metric
+      response_json_paths:
+          $[\name].[24].name: 'prometheus_sd_marathon_refresh_duration_seconds_sum'
+
+    - name: check measures
+      GET: /v1/resource/prometheus/prometheus@localhost:9090/metric/scrape_samples_scraped/measures?refresh=true
+      response_json_paths:
+          $[0]: ['2017-09-23T06:02:58+00:00', 1.0, 558.0]
+
+    - name: post some measures second
+      POST: /v1/prometheus/write
+      request_headers:
+        authorization: "basic YWRtaW46"
+        content-type: application/x-protobuf
+        content-encoding: snappy
+      data: <@prometheus_fixtures/a0c06674-a5ef-4621-883c-e94880a2de02.dump
+      status: 202
+
+    - name: post some measures third
+      POST: /v1/prometheus/write
+      request_headers:
+        authorization: "basic YWRtaW46"
+        content-type: application/x-protobuf
+        content-encoding: snappy
+      data: <@prometheus_fixtures/1ea8f6f7-eebe-49d5-8276-ceb2d56c5ba4.dump
+      status: 202
+
+    - name: check measures second
+      GET: /v1/resource/prometheus/prometheus@localhost:9090/metric/scrape_samples_scraped/measures?refresh=true
+      response_json_paths:
+          $[0]: ['2017-09-23T06:02:58+00:00', 1.0, 558.0]
diff --git a/gnocchi/tests/functional/gabbits/prometheus_fixtures/031b586e-ebe1-4737-812e-cf0ddf26f5ad.dump b/gnocchi/tests/functional/gabbits/prometheus_fixtures/031b586e-ebe1-4737-812e-cf0ddf26f5ad.dump
new file mode 100644
index 0000000000000000000000000000000000000000..01b67dfa6fcc2b6b200259cf06da9f16d3ca7daa
Binary files /dev/null and b/gnocchi/tests/functional/gabbits/prometheus_fixtures/031b586e-ebe1-4737-812e-cf0ddf26f5ad.dump differ
diff --git a/gnocchi/tests/functional/gabbits/prometheus_fixtures/1ea8f6f7-eebe-49d5-8276-ceb2d56c5ba4.dump b/gnocchi/tests/functional/gabbits/prometheus_fixtures/1ea8f6f7-eebe-49d5-8276-ceb2d56c5ba4.dump
new file mode 100644
index 0000000000000000000000000000000000000000..56cb758ed322af0112eb29f5229bf094f9d20787
Binary files /dev/null and b/gnocchi/tests/functional/gabbits/prometheus_fixtures/1ea8f6f7-eebe-49d5-8276-ceb2d56c5ba4.dump differ
diff --git a/gnocchi/tests/functional/gabbits/prometheus_fixtures/a0c06674-a5ef-4621-883c-e94880a2de02.dump b/gnocchi/tests/functional/gabbits/prometheus_fixtures/a0c06674-a5ef-4621-883c-e94880a2de02.dump
new file mode 100644
index 0000000000000000000000000000000000000000..06c2b9750ac766d4a009de5ca213ecfbcbfbe33c
Binary files /dev/null and b/gnocchi/tests/functional/gabbits/prometheus_fixtures/a0c06674-a5ef-4621-883c-e94880a2de02.dump differ
diff --git a/gnocchi/tests/functional/gabbits/resample-calendar.yaml b/gnocchi/tests/functional/gabbits/resample-calendar.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5b6dc05417386e9e079ecceb2ddf6abc8a4a32e3
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/resample-calendar.yaml
@@ -0,0 +1,122 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    content-type: application/json
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: cookies
+          definition:
+              - granularity: 1 day
+      status: 201
+
+    - name: get metric empty
+      GET: /v1/metric
+      status: 200
+      response_strings:
+          - "[]"
+
+    - name: create valid metric
+      POST: /v1/metric
+      data:
+          archive_policy_name: cookies
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: cookies
+
+    - name: get valid metric id
+      GET: /v1/metric/$RESPONSE['$.id']
+      status: 200
+      response_json_paths:
+        $.archive_policy.name: cookies
+
+    - name: list valid metrics
+      GET: /v1/metric
+      response_json_paths:
+          $[0].archive_policy.name: cookies
+
+    - name: push measurements to metric
+      POST: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures
+      data:
+          - timestamp: "2015-03-01T14:34:12"
+            value: 10
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+          - timestamp: "2015-04-01T14:34:12"
+            value: 2
+          - timestamp: "2015-04-06T14:34:12"
+            value: 4
+          - timestamp: "2015-10-06T14:34:12"
+            value: 7
+          - timestamp: "2016-01-06T14:34:12"
+            value: 12
+          - timestamp: "2016-02-06T14:34:12"
+            value: 4
+      status: 202
+
+    - name: get measurements from metric
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?refresh=true
+      response_json_paths:
+        $:
+          - ["2015-03-01T00:00:00+00:00", 86400.0, 10.0]
+          - ["2015-03-06T00:00:00+00:00", 86400.0, 12.0]
+          - ["2015-04-01T00:00:00+00:00", 86400.0, 2.0]
+          - ["2015-04-06T00:00:00+00:00", 86400.0, 4.0]
+          - ["2015-10-06T00:00:00+00:00", 86400.0, 7.0]
+          - ["2016-01-06T00:00:00+00:00", 86400.0, 12.0]
+          - ["2016-02-06T00:00:00+00:00", 86400.0, 4.0]
+
+    - name: get measurements from metric and resample calendar year
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=Y&granularity=86400
+      response_json_paths:
+        $:
+          - ["2015-01-01T00:00:00+00:00", "Y", 7.0]
+          - ["2016-01-01T00:00:00+00:00", "Y", 8.0]
+
+    - name: get measurements from metric and resample calendar year-half
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=H&granularity=86400
+      response_json_paths:
+        $:
+          - ["2015-01-01T00:00:00+00:00", "H", 7.0]
+          - ["2015-07-01T00:00:00+00:00", "H", 7.0]
+          - ["2016-01-01T00:00:00+00:00", "H", 8.0]
+
+    - name: get measurements from metric and resample calendar year-quarter
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=Q&granularity=86400
+      response_json_paths:
+        $:
+          - ["2015-01-01T00:00:00+00:00", "Q", 11.0]
+          - ["2015-04-01T00:00:00+00:00", "Q", 3.0]
+          - ["2015-10-01T00:00:00+00:00", "Q", 7.0]
+          - ["2016-01-01T00:00:00+00:00", "Q", 8.0]
+
+    - name: get measurements from metric and resample calendar year-month
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=M&granularity=86400
+      response_json_paths:
+        $:
+          - ["2015-03-01T00:00:00+00:00", "M", 11.0]
+          - ["2015-04-01T00:00:00+00:00", "M", 3.0]
+          - ["2015-10-01T00:00:00+00:00", "M", 7.0]
+          - ["2016-01-01T00:00:00+00:00", "M", 12.0]
+          - ["2016-02-01T00:00:00+00:00", "M", 4.0]
+
+    - name: get measurements from metric and resample calendar year-week
+      GET: /v1/metric/$HISTORY['list valid metrics'].$RESPONSE['$[0].id']/measures?resample=W&granularity=86400
+      response_json_paths:
+        $:
+          - ["2015-03-01T00:00:00+00:00", "W", 11.0]
+          - ["2015-03-29T00:00:00+00:00", "W", 2.0]
+          - ["2015-04-05T00:00:00+00:00", "W", 4.0]
+          - ["2015-10-04T00:00:00+00:00", "W", 7.0]
+          - ["2016-01-03T00:00:00+00:00", "W", 12.0]
+          - ["2016-01-31T00:00:00+00:00", "W", 4.0]
diff --git a/gnocchi/tests/functional/gabbits/resource-aggregation.yaml b/gnocchi/tests/functional/gabbits/resource-aggregation.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..2796cbc03b1ff60b888cc6485ec07029258566c1
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/resource-aggregation.yaml
@@ -0,0 +1,163 @@
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: low
+        definition:
+          - granularity: 1 second
+          - granularity: 300 seconds
+      status: 201
+
+    - name: create resource 1
+      POST: /v1/resource/generic
+      data:
+        id: 4ed9c196-4c9f-4ba8-a5be-c9a71a82aac4
+        user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+        project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        metrics:
+          cpu.util:
+            archive_policy_name: low
+      status: 201
+
+    - name: post cpuutil measures 1
+      POST: /v1/resource/generic/4ed9c196-4c9f-4ba8-a5be-c9a71a82aac4/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 43.1
+        - timestamp: "2015-03-06T14:34:12"
+          value: 12
+      status: 202
+
+    - name: get aggregation with no data
+      desc: https://github.com/gnocchixyz/gnocchi/issues/69
+      POST: /v1/aggregation/resource/generic/metric/cpu.util?stop=2012-03-06T00:00:00&fill=0&granularity=300&resample=3600&refresh=true
+      request_headers:
+        x-user-id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+        x-project-id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        content-type: application/json
+      data:
+        =:
+          id: 4ed9c196-4c9f-4ba8-a5be-c9a71a82aac4
+      response_json_paths:
+        $: []
+
+    - name: create resource 2
+      POST: /v1/resource/generic
+      data:
+        id: 1447CD7E-48A6-4C50-A991-6677CC0D00E6
+        user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+        project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+        metrics:
+          cpu.util:
+            archive_policy_name: low
+      status: 201
+
+    - name: post cpuutil measures 2
+      POST: /v1/resource/generic/1447CD7E-48A6-4C50-A991-6677CC0D00E6/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 23
+        - timestamp: "2015-03-06T14:34:12"
+          value: 8
+      status: 202
+
+    - name: create resource 3
+      POST: /v1/resource/generic
+      data:
+        id: 0FB0B7CD-9A41-4A76-8B2E-BFC02843506A
+        user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+        project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+        metrics:
+          cpu.util:
+            archive_policy_name: low
+      status: 201
+
+    - name: post cpuutil measures 3
+      POST: /v1/resource/generic/0FB0B7CD-9A41-4A76-8B2E-BFC02843506A/metric/cpu.util/measures
+      data:
+        - timestamp: "2015-03-06T14:33:57"
+          value: 230
+        - timestamp: "2015-03-06T14:34:12"
+          value: 45.41
+      status: 202
+
+    - name: aggregate metric with groupby on project_id with filter
+      POST: /v1/aggregation/resource/generic/metric/cpu.util?groupby=project_id&filter=user_id%3D%276c865dd0-7945-4e08-8b27-d0d7f1c2b667%27&refresh=true
+      response_json_paths:
+        $:
+          - measures:
+            - ["2015-03-06T14:30:00+00:00", 300.0, 21.525]
+            - ["2015-03-06T14:33:57+00:00", 1.0, 33.05]
+            - ["2015-03-06T14:34:12+00:00", 1.0, 10.0]
+            group:
+              project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+          - measures:
+            - ["2015-03-06T14:30:00+00:00", 300.0, 137.70499999999998]
+            - ["2015-03-06T14:33:57+00:00", 1.0, 230.0]
+            - ["2015-03-06T14:34:12+00:00", 1.0, 45.41]
+            group:
+              project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+
+    - name: aggregate metric with groupby on project_id
+      POST: /v1/aggregation/resource/generic/metric/cpu.util?groupby=project_id&refresh=true
+      data:
+        =:
+          user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+      response_json_paths:
+        $:
+          - measures:
+            - ["2015-03-06T14:30:00+00:00", 300.0, 21.525]
+            - ["2015-03-06T14:33:57+00:00", 1.0, 33.05]
+            - ["2015-03-06T14:34:12+00:00", 1.0, 10.0]
+            group:
+              project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+          - measures:
+            - ["2015-03-06T14:30:00+00:00", 300.0, 137.70499999999998]
+            - ["2015-03-06T14:33:57+00:00", 1.0, 230.0]
+            - ["2015-03-06T14:34:12+00:00", 1.0, 45.41]
+            group:
+              project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
+
+    - name: aggregate metric with groupby on project_id and invalid group
+      POST: /v1/aggregation/resource/generic/metric/cpu.util?groupby=project_id&groupby=thisisdumb
+      data:
+        =:
+          user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+      status: 400
+      response_strings:
+        - Invalid groupby attribute
+
+    - name: aggregate metric with groupby on project_id and user_id
+      POST: /v1/aggregation/resource/generic/metric/cpu.util?groupby=project_id&groupby=user_id&refresh=true
+      data:
+        =:
+          user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+      response_json_paths:
+        $:
+          - measures:
+            - ['2015-03-06T14:30:00+00:00', 300.0, 21.525]
+            - ['2015-03-06T14:33:57+00:00', 1.0, 33.05]
+            - ['2015-03-06T14:34:12+00:00', 1.0, 10.0]
+            group:
+              user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+              project_id: c7f32f1f-c5ef-427a-8ecd-915b219c66e8
+          - measures:
+            - ['2015-03-06T14:30:00+00:00', 300.0, 137.70499999999998]
+            - ['2015-03-06T14:33:57+00:00', 1.0, 230.0]
+            - ['2015-03-06T14:34:12+00:00', 1.0, 45.41]
+            group:
+              user_id: 6c865dd0-7945-4e08-8b27-d0d7f1c2b667
+              project_id: ee4cfc41-1cdc-4d2f-9a08-f94111d80171
diff --git a/gnocchi/tests/functional/gabbits/resource-type.yaml b/gnocchi/tests/functional/gabbits/resource-type.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..babe6dcd978e1cc5bf70f01bc4c6b1460983b4e6
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/resource-type.yaml
@@ -0,0 +1,888 @@
+#
+# Test the resource type API to achieve coverage of just the
+# ResourceTypesController and ResourceTypeController class code.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+    - name: list resource type
+      desc: only legacy resource types are present
+      GET: /v1/resource_type
+      response_json_paths:
+          $.`len`: 1
+
+# Some bad cases
+
+    - name: post resource type as non-admin
+      POST: $LAST_URL
+      data:
+          name: my_custom_resource
+      status: 403
+
+    - name: post resource type with existing name
+      POST: /v1/resource_type
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: my_custom_resource
+          attributes:
+              project_id:
+                  type: string
+      status: 400
+
+    - name: post resource type bad string
+      POST: $LAST_URL
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+        accept: application/json
+      data:
+          name: my_custom_resource
+          attributes:
+              foo:
+                  type: string
+                  max_length: 32
+                  min_length: 5
+                  noexist: foo
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        # NOTE(sileht): We would prefer to have a better message but voluptuous seems a bit lost when
+        # an Any have many dict with the same key, here "type"
+        # $.description.reason: "/^extra keys not allowed/"
+        # $.description.reason: "/^not a valid value for dictionary value @ data/"
+        # $.description.detail: ['attributes', 'foo', 'type']
+        # $.description.detail: ['attributes', 'foo', 'noexist']
+
+    - name: post resource type bad min_length value
+      POST: $LAST_URL
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: my_custom_resource
+          attributes:
+              name:
+                  type: string
+                  required: true
+                  max_length: 2
+                  min_length: 5
+      status: 400
+
+    - name: post resource type bad min value
+      POST: $LAST_URL
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: my_custom_resource
+          attributes:
+              int:
+                  type: number
+                  required: false
+                  max: 3
+                  min: 8
+      status: 400
+
+# Create a type
+
+    - name: post resource type
+      POST: $LAST_URL
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: my_custom_resource
+          attributes:
+              name:
+                  type: string
+                  required: true
+                  max_length: 5
+                  min_length: 2
+              foobar:
+                  type: string
+                  required: false
+              uuid:
+                  type: uuid
+              int:
+                  type: number
+                  required: false
+                  min: -2
+                  max: 3
+              intnomin:
+                  type: number
+                  required: false
+                  max: 3
+              float:
+                  type: number
+                  required: false
+                  min: -2.3
+              bool:
+                  type: bool
+                  required: false
+              datetime:
+                  type: datetime
+                  required: false
+      status: 201
+      response_json_paths:
+          $.name: my_custom_resource
+          $.state: active
+          $.attributes:
+              name:
+                  type: string
+                  required: True
+                  max_length: 5
+                  min_length: 2
+              foobar:
+                  type: string
+                  required: False
+                  max_length: 255
+                  min_length: 0
+              uuid:
+                  type: uuid
+                  required: True
+              int:
+                  type: number
+                  required: False
+                  min: -2
+                  max: 3
+              intnomin:
+                  type: number
+                  required: False
+                  min:
+                  max: 3
+              float:
+                  type: number
+                  required: false
+                  min: -2.3
+                  max:
+              bool:
+                  type: bool
+                  required: false
+              datetime:
+                  type: datetime
+                  required: false
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/resource_type/my_custom_resource
+
+# Control the created type
+
+    - name: relist resource types
+      desc: we have a resource type now
+      GET: $LAST_URL
+      response_json_paths:
+          $.`len`: 2
+          $.[1].name: my_custom_resource
+          $.[1].state: active
+
+    - name: get the custom resource type
+      GET: /v1/resource_type/my_custom_resource
+      response_json_paths:
+          $.name: my_custom_resource
+          $.state: active
+          $.attributes:
+              name:
+                  type: string
+                  required: True
+                  min_length: 2
+                  max_length: 5
+              foobar:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              uuid:
+                  type: uuid
+                  required: True
+              int:
+                  type: number
+                  required: False
+                  min: -2
+                  max: 3
+              intnomin:
+                  type: number
+                  required: False
+                  min:
+                  max: 3
+              float:
+                  type: number
+                  required: false
+                  min: -2.3
+                  max:
+              bool:
+                  type: bool
+                  required: false
+              datetime:
+                  type: datetime
+                  required: false
+
+# Some bad case case on the type
+
+    - name: delete as non-admin
+      DELETE: $LAST_URL
+      status: 403
+
+# Bad resources for this type
+
+    - name: post invalid resource
+      POST: /v1/resource/my_custom_resource
+      request_headers:
+        accept: application/json
+      data:
+          id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          name: toolong!!!
+          foobar: what
+          uuid: 07eb339e-23c0-4be2-be43-cd8247afae3b
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^length of value must be at most 5 for dictionary value @ data/"
+        $.description.detail: ['name']
+
+    - name: post invalid resource uuid
+      POST: $LAST_URL
+      request_headers:
+        accept: application/json
+      data:
+          id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          name: too
+          foobar: what
+          uuid: really!
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^badly formed hexadecimal UUID string for dictionary value @ data/"
+        $.description.detail: ['uuid']
+
+# Good resources for this type
+
+    - name: post custom resource
+      POST: $LAST_URL
+      data:
+          id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          name: bar
+          foobar: what
+          uuid: e495ebad-be64-46c0-81d6-b079beb48df9
+          int: 1
+          datetime: "2017-05-02T11:11:11Z"
+      status: 201
+      response_json_paths:
+          $.id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $.name: bar
+          $.foobar: what
+          $.datetime: "2017-05-02T11:11:11+00:00"
+
+    - name: patch custom resource
+      PATCH: /v1/resource/my_custom_resource/d11edfca-4393-4fda-b94d-b05a3a1b3747
+      data:
+          name: foo
+      status: 200
+      response_json_paths:
+          $.id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $.name: foo
+          $.foobar: what
+          $.uuid: e495ebad-be64-46c0-81d6-b079beb48df9
+          $.int: 1
+
+    - name: get resource
+      GET: $LAST_URL
+      response_json_paths:
+          $.id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $.name: foo
+          $.foobar: what
+          $.uuid: e495ebad-be64-46c0-81d6-b079beb48df9
+          $.int: 1
+
+    - name: post resource with default
+      POST: /v1/resource/my_custom_resource
+      data:
+          id: c4110aec-6e5c-43fa-b8c5-ffdfbca3ce59
+          name: foo
+          uuid: e495ebad-be64-46c0-81d6-b079beb48df9
+      status: 201
+      response_json_paths:
+          $.id: c4110aec-6e5c-43fa-b8c5-ffdfbca3ce59
+          $.name: foo
+          $.foobar:
+          $.uuid: e495ebad-be64-46c0-81d6-b079beb48df9
+          $.int:
+
+    - name: list resource history
+      GET: /v1/resource/my_custom_resource/d11edfca-4393-4fda-b94d-b05a3a1b3747/history?sort=revision_end:asc-nullslast
+      response_json_paths:
+          $.`len`: 2
+          $[0].id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $[0].name: bar
+          $[0].foobar: what
+          $[0].datetime: "2017-05-02T11:11:11+00:00"
+          $[1].id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $[1].name: foo
+          $[1].foobar: what
+
+# CRUD resource type attributes
+
+    - name: post a new resource attribute
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: add
+          path: /attributes/new-optional-bool
+          value:
+            type: bool
+            required: False
+        - op: add
+          path: /attributes/new-optional-int
+          value:
+            type: number
+            required: False
+            min: 0
+            max: 255
+        - op: add
+          path: /attributes/new-optional-uuid
+          value:
+            type: uuid
+            required: False
+        - op: add
+          path: /attributes/new-optional-datetime
+          value:
+            type: datetime
+            required: False
+        - op: add
+          path: /attributes/newstuff
+          value:
+            type: string
+            required: False
+            min_length: 0
+            max_length: 255
+        - op: add
+          path: /attributes/newfilled
+          value:
+            type: string
+            required: False
+            min_length: 0
+            max_length: 255
+            options:
+              fill: "filled"
+        - op: add
+          path: /attributes/newbool
+          value:
+            type: bool
+            required: True
+            options:
+              fill: True
+        - op: add
+          path: /attributes/newint
+          value:
+            type: number
+            required: True
+            min: 0
+            max: 255
+            options:
+              fill: 15
+        - op: add
+          path: /attributes/newstring
+          value:
+            type: string
+            required: True
+            min_length: 0
+            max_length: 255
+            options:
+              fill: "foobar"
+        - op: add
+          path: /attributes/newuuid
+          value:
+            type: uuid
+            required: True
+            options:
+              fill: "00000000-0000-0000-0000-000000000000"
+        - op: add
+          path: /attributes/newdatetime
+          value:
+            type: datetime
+            required: True
+            options:
+              fill: "2017-10-10T10:10:10Z"
+        - op: remove
+          path: /attributes/foobar
+      status: 200
+      response_json_paths:
+          $.name: my_custom_resource
+          $.attributes:
+              name:
+                  type: string
+                  required: True
+                  min_length: 2
+                  max_length: 5
+              uuid:
+                  type: uuid
+                  required: True
+              int:
+                  type: number
+                  required: False
+                  min: -2
+                  max: 3
+              intnomin:
+                  type: number
+                  required: False
+                  min:
+                  max: 3
+              float:
+                  type: number
+                  required: false
+                  min: -2.3
+                  max:
+              bool:
+                  type: bool
+                  required: false
+              datetime:
+                  type: datetime
+                  required: False
+              new-optional-bool:
+                  type: bool
+                  required: False
+              new-optional-int:
+                  type: number
+                  required: False
+                  min: 0
+                  max: 255
+              new-optional-uuid:
+                  type: uuid
+                  required: False
+              new-optional-datetime:
+                  type: datetime
+                  required: False
+              newstuff:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              newfilled:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              newstring:
+                  type: string
+                  required: True
+                  min_length: 0
+                  max_length: 255
+              newbool:
+                  type: bool
+                  required: True
+              newint:
+                  type: number
+                  required: True
+                  min: 0
+                  max: 255
+              newuuid:
+                  type: uuid
+                  required: True
+              newdatetime:
+                  type: datetime
+                  required: True
+
+    - name: post a new resource attribute with missing fill
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: add
+          path: /attributes/missing
+          value:
+            type: bool
+            required: True
+            options: {}
+      status: 400
+      response_strings:
+        - "Invalid input: Option 'fill' of resource attribute missing is invalid: must not be empty if required=True"
+
+    - name: post a new resource attribute with incorrect fill
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: add
+          path: /attributes/incorrect
+          value:
+            type: number
+            required: True
+            options:
+              fill: "a-string"
+      status: 400
+      response_strings:
+        - "Invalid input: Option 'fill' of resource attribute incorrect is invalid: expected Real"
+
+    - name: get the new custom resource type
+      GET: /v1/resource_type/my_custom_resource
+      response_json_paths:
+          $.name: my_custom_resource
+          $.attributes:
+              name:
+                  type: string
+                  required: True
+                  min_length: 2
+                  max_length: 5
+              uuid:
+                  type: uuid
+                  required: True
+              int:
+                  type: number
+                  required: False
+                  min: -2
+                  max: 3
+              intnomin:
+                  type: number
+                  required: False
+                  min:
+                  max: 3
+              float:
+                  type: number
+                  required: false
+                  min: -2.3
+                  max:
+              bool:
+                  type: bool
+                  required: false
+              datetime:
+                  type: datetime
+                  required: False
+              new-optional-bool:
+                  type: bool
+                  required: False
+              new-optional-int:
+                  type: number
+                  required: False
+                  min: 0
+                  max: 255
+              new-optional-uuid:
+                  type: uuid
+                  required: False
+              new-optional-datetime:
+                  type: datetime
+                  required: False
+              newstuff:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              newfilled:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              newstring:
+                  type: string
+                  required: True
+                  min_length: 0
+                  max_length: 255
+              newbool:
+                  type: bool
+                  required: True
+              newint:
+                  type: number
+                  required: True
+                  min: 0
+                  max: 255
+              newuuid:
+                  type: uuid
+                  required: True
+              newdatetime:
+                  type: datetime
+                  required: True
+
+    - name: control new attributes of existing resource
+      GET: /v1/resource/my_custom_resource/d11edfca-4393-4fda-b94d-b05a3a1b3747
+      status: 200
+      response_json_paths:
+          $.id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $.name: foo
+          $.newstuff: null
+          $.newfilled: "filled"
+          $.newbool: true
+          $.newint: 15
+          $.newstring: foobar
+          $.newuuid: "00000000-0000-0000-0000-000000000000"
+          $.newdatetime: "2017-10-10T10:10:10+00:00"
+          $.new-optional-bool: null
+          $.new-optional-int: null
+          $.new-optional-uuid: null
+          $.new-optional-datetime: null
+
+    - name: control new attributes of existing resource history
+      GET: /v1/resource/my_custom_resource/d11edfca-4393-4fda-b94d-b05a3a1b3747/history?sort=revision_end:asc-nullslast
+      response_json_paths:
+          $.`len`: 2
+          $[0].id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $[0].name: bar
+          $[0].newstuff: null
+          $[0].newfilled: "filled"
+          $[0].newbool: true
+          $[0].newint: 15
+          $[0].newstring: foobar
+          $[0].newuuid: "00000000-0000-0000-0000-000000000000"
+          $[0].newdatetime: "2017-10-10T10:10:10+00:00"
+          $[0].new-optional-bool: null
+          $[0].new-optional-int: null
+          $[0].new-optional-uuid: null
+          $[0].new-optional-datetime: null
+          $[1].id: d11edfca-4393-4fda-b94d-b05a3a1b3747
+          $[1].name: foo
+          $[1].newstuff: null
+          $[1].newfilled: "filled"
+          $[1].newbool: true
+          $[1].newint: 15
+          $[1].newstring: foobar
+          $[1].newuuid: "00000000-0000-0000-0000-000000000000"
+          $[1].newdatetime: "2017-10-10T10:10:10+00:00"
+          $[1].new-optional-bool: null
+          $[1].new-optional-int: null
+          $[1].new-optional-uuid: null
+          $[1].new-optional-datetime: null
+
+# Invalid patch
+
+    - name: add/delete the same resource attribute
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: add
+          path: /attributes/what
+          value:
+            type: string
+            required: False
+            min_length: 0
+            max_length: 255
+        - op: remove
+          path: /attributes/what
+      status: 200
+      response_json_paths:
+          $.name: my_custom_resource
+          $.attributes:
+              name:
+                  type: string
+                  required: True
+                  min_length: 2
+                  max_length: 5
+              uuid:
+                  type: uuid
+                  required: True
+              int:
+                  type: number
+                  required: False
+                  min: -2
+                  max: 3
+              intnomin:
+                  type: number
+                  required: False
+                  min:
+                  max: 3
+              float:
+                  type: number
+                  required: false
+                  min: -2.3
+                  max:
+              bool:
+                  type: bool
+                  required: false
+              datetime:
+                  type: datetime
+                  required: False
+              newstuff:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              newfilled:
+                  type: string
+                  required: False
+                  min_length: 0
+                  max_length: 255
+              newstring:
+                  type: string
+                  required: True
+                  min_length: 0
+                  max_length: 255
+              newbool:
+                  type: bool
+                  required: True
+              newint:
+                  type: number
+                  required: True
+                  min: 0
+                  max: 255
+              newuuid:
+                  type: uuid
+                  required: True
+              newdatetime:
+                  type: datetime
+                  required: True
+              new-optional-bool:
+                  type: bool
+                  required: False
+              new-optional-int:
+                  type: number
+                  required: False
+                  min: 0
+                  max: 255
+              new-optional-uuid:
+                  type: uuid
+                  required: False
+              new-optional-datetime:
+                  type: datetime
+                  required: False
+
+    - name: delete/add the same resource attribute
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: remove
+          path: /attributes/what
+        - op: add
+          path: /attributes/what
+          value:
+            type: string
+            required: False
+            min_length: 0
+            max_length: 255
+      status: 400
+      response_strings:
+       - "can't remove non-existent object 'what'"
+
+    - name: patch a resource attribute replace
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+          accept: application/json
+      data:
+        - op: replace
+          path: /attributes/newstuff
+          value:
+            type: string
+            required: False
+            min_length: 0
+            max_length: 255
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.detail: ['0', 'op']
+        $.description.reason: "/^not a valid value for dictionary value @ data/"
+
+    - name: patch a resource attribute type not exist
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: add
+          path: /attributes/newstuff
+          value:
+            type: notexist
+            required: False
+            min_length: 0
+            max_length: 255
+      status: 400
+
+    - name: patch a resource attribute type unknown
+      PATCH: /v1/resource_type/my_custom_resource
+      request_headers:
+          # User admin
+          authorization: "basic YWRtaW46"
+          content-type: application/json-patch+json
+      data:
+        - op: remove
+          path: /attributes/unknown
+      status: 400
+      response_strings:
+       - "can't remove non-existent object 'unknown'"
+
+# Ensure we can't delete the type
+
+    - name: delete in use resource_type
+      DELETE: /v1/resource_type/my_custom_resource
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 400
+      response_strings:
+          - Resource type my_custom_resource is still in use
+
+# Delete associated resources
+
+    - name: delete the resource
+      DELETE: /v1/resource/my_custom_resource/d11edfca-4393-4fda-b94d-b05a3a1b3747
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+    - name: delete the second resource
+      DELETE: /v1/resource/my_custom_resource/c4110aec-6e5c-43fa-b8c5-ffdfbca3ce59
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+# Now we can deleted the type
+
+    - name: delete the custom resource type
+      DELETE: /v1/resource_type/my_custom_resource
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      status: 204
+
+    - name: delete non-existing custom resource type
+      DELETE: $LAST_URL
+      request_headers:
+        authorization: "basic YWRtaW46"
+      status: 404
+
+    - name: delete missing custom resource type utf8
+      DELETE: /v1/resource_type/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      request_headers:
+        authorization: "basic YWRtaW46"
+      status: 404
+      response_strings:
+          - Resource type ✔éñ☃ does not exist
+
+# Can we readd and delete the same resource type again
+
+    - name: post resource type again
+      POST: /v1/resource_type
+      request_headers:
+        authorization: "basic YWRtaW46"
+      data:
+          name: my_custom_resource
+      status: 201
+
+    - name: delete the custom resource type again
+      DELETE: /v1/resource_type/my_custom_resource
+      request_headers:
+        authorization: "basic YWRtaW46"
+      status: 204
diff --git a/gnocchi/tests/functional/gabbits/resource.yaml b/gnocchi/tests/functional/gabbits/resource.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..2b081f3d7f583c98d43a5f3e244b8dc234867b64
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/resource.yaml
@@ -0,0 +1,872 @@
+#
+# Test the resource API to achieve coverage of just the
+# ResourcesController and ResourceController class code.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+# We will need an archive for use in later tests so we create it
+# here. This could be done in a fixture but since the API allows it
+# may as well use it.
+
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: medium
+          definition:
+              - granularity: 1 second
+      status: 201
+
+    - name: create archive policy rule
+      POST: /v1/archive_policy_rule
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: test_rule
+        metric_pattern: "disk.io.*"
+        archive_policy_name: medium
+      status: 201
+
+    - name: root of all
+      GET: /
+      response_headers:
+          content-type: /application/json/
+      response_json_paths:
+          $.versions[0].links[0].href: $SCHEME://$NETLOC/v1/
+
+    - name: root of v1
+      GET: /v1
+      redirects: true
+      response_json_paths:
+          $.version: "1.0"
+          $.links.`len`: 13
+          $.links[0].href: $SCHEME://$NETLOC/v1
+          $.links[9].href: $SCHEME://$NETLOC/v1/resource
+
+    - name: root of resource
+      GET: /v1/resource
+      response_json_paths:
+          $.generic: $SCHEME://$NETLOC/v1/resource/generic
+
+    - name: typo of resource
+      GET: /v1/resoue
+      status: 404
+
+    - name: typo of resource extra
+      GET: /v1/resource/foobar
+      status: 404
+
+# Explore that GETting a list of resources demonstrates the expected
+# behaviors notably with regard to content negotiation.
+
+    - name: generic resource list
+      desc: there are no generic resources yet
+      GET: /v1/resource/generic
+      response_strings:
+          - "[]"
+
+    - name: generic resource bad accept
+      desc: Expect 406 on bad accept type
+      GET: $LAST_URL
+      request_headers:
+          accept: text/plain
+      status: 406
+      response_strings:
+          - 406 Not Acceptable
+
+    - name: generic resource complex accept
+      desc: failover accept media type appropriately
+      GET: $LAST_URL
+      request_headers:
+          accept: text/plain, application/json; q=0.8
+      response_strings:
+          - "[]"
+
+# Try creating a new generic resource in various ways.
+
+    - name: generic resource
+      desc: there are no generic resources yet
+      GET: /v1/resource/generic
+      response_strings:
+          - "[]"
+
+    - name: post generic resource
+      POST: $LAST_URL
+      data:
+          id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/resource/generic/f93450f2-d8a5-4d67-9985-02511241e7d1
+          content-type: /^application\/json/
+      response_json_paths:
+          $.creator: foobar
+          $.user_id: 0fbb231484614b1a80131fc22f6afc9c
+
+    - name: post same resource refuse
+      desc: We can only post one identified resource once
+      POST: $LAST_URL
+      data:
+          id: f93450f2-d8a5-4d67-9985-02511241e7d1
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 409
+
+    - name: post generic resource bad content type
+      POST: $LAST_URL
+      request_headers:
+          content-type: text/plain
+      data: '{"id": "f93450f2-d8a5-4d67-9985-02511241e7d1", "started_at": "2014-01-03T02:02:02.000000", "user_id": "0fbb231484614b1a80131fc22f6afc9c", "project_id": "f3d41b770cc14f0bb94a1d5be9c0e3ea"}'
+      status: 415
+
+# Create a new generic resource, demonstrate that including no data
+# gets a useful 400 response.
+
+    - name: post generic resource no data
+      POST: /v1/resource/generic
+      status: 400
+
+    - name: post generic with invalid metric name
+      POST: $LAST_URL
+      data:
+          metrics:
+              "disk/iops":
+                  archive_policy_name: medium
+      status: 400
+      response_strings:
+          - "'/' is not supported in metric name"
+
+    - name: post generic resource to modify
+      POST: $LAST_URL
+      data:
+          id: 75C44741-CC60-4033-804E-2D3098C7D2E9
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+      response_json_paths:
+          $.metrics: {} # empty dictionary
+
+# PATCH that generic resource to change its attributes and to
+# associate metrics. If a metric does not exist there should be a
+# graceful failure.
+    - name: patch generic resource
+      PATCH: $LOCATION
+      data:
+          user_id: foobar
+      status: 200
+      response_json_paths:
+          user_id: foobar
+
+    - name: patch generic resource with same data
+      desc: Ensure no useless revision have been created
+      PATCH: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9
+      data:
+          user_id: foobar
+      status: 200
+      response_json_paths:
+          user_id: foobar
+          revision_start: $RESPONSE['$.revision_start']
+
+    - name: patch generic resource with id
+      PATCH: $LAST_URL
+      request_headers:
+        accept: application/json
+      data:
+          id: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed @ data/"
+        $.description.detail: ["id"]
+
+    - name: patch generic with metrics
+      PATCH: $LAST_URL
+      data:
+          metrics:
+              disk.iops:
+                  archive_policy_name: medium
+      status: 200
+      response_strings:
+        - '"disk.iops": '
+
+    - name: get generic history
+      desc: Ensure we can get the history
+      GET: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9/history?sort=revision_end:asc-nullslast
+      response_json_paths:
+        $.`len`: 2
+        $[1].revision_end: null
+        $[1].metrics.'disk.iops': $RESPONSE["metrics.'disk.iops'"]
+
+    - name: get generic history with links page 1
+      desc: Ensure we can get the history
+      GET: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9/history?sort=revision_end:asc-nullslast&limit=1
+      response_headers:
+        link: "<$SCHEME://$NETLOC/v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9/history?limit=1&marker=75c44741-cc60-4033-804e-2d3098c7d2e9%401&sort=revision_end%3Aasc-nullslast>; rel=\"next\""
+      response_json_paths:
+        $.`len`: 1
+
+    - name: get generic history with links page 2
+      desc: Ensure we can get the history
+      GET: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9/history?limit=1&marker=75c44741-cc60-4033-804e-2d3098c7d2e9@1&sort=revision_end:asc-nullslast
+      response_json_paths:
+        $.`len`: 1
+        $[0].revision_end: null
+
+    - name: patch generic bad metric association
+      PATCH: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9
+      data:
+          metrics:
+              disk.iops: f3d41b77-0cc1-4f0b-b94a-1d5be9c0e3ea
+      status: 400
+      response_strings:
+          - Metric f3d41b77-0cc1-4f0b-b94a-1d5be9c0e3ea does not exist
+
+    - name: patch generic with bad archive policy
+      PATCH: $LAST_URL
+      data:
+          metrics:
+              disk.iops:
+                  archive_policy_name: noexist
+      status: 400
+      response_strings:
+          - Archive policy noexist does not exist
+
+    - name: patch generic with no archive policy rule
+      PATCH: $LAST_URL
+      data:
+          metrics:
+              disk.iops: {}
+      status: 400
+      response_strings:
+          - No archive policy name specified and no archive policy rule found matching the metric name disk.iops
+
+    - name: patch generic with archive policy rule
+      PATCH: $LAST_URL
+      data:
+          metrics:
+              disk.io.rate: {}
+      status: 200
+
+    - name: get patched resource
+      desc: confirm the patched resource is properly patched
+      GET: $LAST_URL
+      data:
+          user_id: foobar
+
+    - name: patch resource empty dict
+      desc: an empty dict in patch is an existence check
+      PATCH:  $LAST_URL
+      data: "{}"
+      status: 200
+      data:
+          user_id: foobar
+
+    - name: patch resource without change with metrics in response
+      desc: an empty dict in patch is an existence check
+      PATCH:  $LAST_URL
+      data: "{}"
+      status: 200
+      response_json_paths:
+          $.metrics.'disk.io.rate': $RESPONSE["$.metrics.'disk.io.rate'"]
+
+    - name: patch generic with invalid metric name
+      PATCH: $LAST_URL
+      data:
+          metrics:
+              "disk/iops":
+                  archive_policy_name: medium
+      status: 400
+      response_strings:
+          - "'/' is not supported in metric name"
+
+# Failure modes for history
+
+    - name: post generic history
+      desc: should don't work
+      POST: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9/history
+      status: 405
+
+    - name: delete generic history
+      desc: should don't work
+      DELETE: $LAST_URL
+      status: 405
+
+# Failure modes for PATCHing a resource
+
+    - name: patch resource no data
+      desc: providing no data is an error
+      PATCH: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9
+      status: 400
+      response_strings:
+          - "Unable to decode body:"
+
+    - name: patch resource bad data
+      desc: providing data that is not a dict is an error
+      request_headers:
+        accept: application/json
+      PATCH: $LAST_URL
+      status: 400
+      data:
+          - Beer and pickles
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^expected a dictionary/"
+        $.description.detail: []
+
+    - name: patch noexit resource
+      desc: "patching something that doesn't exist is a 404"
+      PATCH: /v1/resource/generic/77777777-CC60-4033-804E-2D3098C7D2E9
+      status: 404
+
+# GET single resource failure modes
+
+    - name: get noexist resource
+      desc: if a resource does not exist 404
+      GET: $LAST_URL
+      status: 404
+      response_strings:
+          - The resource could not be found.
+
+    - name: get bad resource id
+      desc: https://bugs.launchpad.net/gnocchi/+bug/1425588
+      GET:  /v1/resource/generic/noexist
+      status: 404
+      response_strings:
+          - The resource could not be found.
+
+    - name: get metrics for this not-existing resource
+      GET: /v1/resource/generic/77777777-CC60-4033-804E-2D3098C7D2E9/metric/cpu.util
+      status: 404
+
+# List resources
+
+    - name: list generic resources
+      GET: /v1/resource/generic
+      response_json_paths:
+          $[0].user_id: 0fbb231484614b1a80131fc22f6afc9c
+          $[-1].user_id: foobar
+
+    - name: list generic resources with attrs param
+      GET: /v1/resource/generic?attrs=id&attrs=started_at&attrs=user_id
+      response_json_paths:
+          $[0].`len`: 3
+          $[0].id: $RESPONSE['$[0].id']
+          $[0].started_at: $RESPONSE['$[0].started_at']
+          $[0].user_id: $RESPONSE['$[0].user_id']
+          $[1].`len`: 3
+
+    - name: list generic resources with invalid attrs param
+      GET: /v1/resource/generic?attrs=id&attrs=foo&attrs=bar
+      response_json_paths:
+          $[0].`len`: 1
+          $[0].id: $RESPONSE['$[0].id']
+          $[1].`len`: 1
+
+    - name: list generic resources without attrs param
+      GET: /v1/resource/generic
+      response_json_paths:
+          $[0].`len`: 13
+          $[1].`len`: 13
+
+    - name: list generic resources without attrs header
+      GET: /v1/resource/generic
+      request_headers:
+          Accept: "application/json"
+      response_json_paths:
+          $[0].`len`: 13
+          $[1].`len`: 13
+
+    - name: list all resources
+      GET: /v1/resource/generic
+      response_strings:
+          - '"type": "generic"'
+
+# Metric handling when POSTing resources.
+
+    - name: post new generic with non-existent metrics
+      POST: /v1/resource/generic
+      data:
+          id: 85C44741-CC60-4033-804E-2D3098C7D2E9
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util: 10
+      status: 400
+
+    - name: post new generic with metrics bad policy
+      POST: $LAST_URL
+      data:
+          id: 85C44741-CC60-4033-804E-2D3098C7D2E9
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: noexist
+      status: 400
+
+    - name: post new generic with metrics no policy rule
+      POST: $LAST_URL
+      data:
+          id: 85BABE39-F7F7-455A-877B-62C22E11AA40
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util: {}
+      status: 400
+      response_strings:
+        - No archive policy name specified and no archive policy rule found matching the metric name cpu.util
+
+    - name: post new generic with metrics using policy rule
+      POST: $LAST_URL
+      data:
+          id: 85BABE39-F7F7-455A-877B-62C22E11AA40
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              disk.io.rate: {}
+      status: 201
+
+    - name: post new generic with metrics
+      POST: $LAST_URL
+      data:
+          id: d13982cb-4cce-4f84-a96e-7581be1e599c
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              disk.util:
+                  archive_policy_name: medium
+      status: 201
+      response_json_paths:
+        creator: foobar
+
+    - name: post new generic with metrics and un-normalized user/project id from keystone middleware
+      POST: $LAST_URL
+      data:
+          id: 85C44741-CC60-4033-804E-2D3098C7D2E9
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 201
+      response_json_paths:
+        creator: foobar
+
+    - name: get metrics for this resource
+      desc: with async measure handling this is a null test
+      GET: /v1/resource/generic/$RESPONSE['$.id']/metric/cpu.util/measures
+      response_strings:
+          - "[]"
+
+# Interrogate the NamedMetricController
+
+    - name: list the generics
+      GET: /v1/resource/generic
+
+    - name: request metrics from one of the generics
+      GET: /v1/resource/generic/$RESPONSE['$[-1].id']/metric
+      response_json_paths:
+          $.`len`: 1
+          $[0].name: cpu.util
+          $[0].resource_id: 85c44741-cc60-4033-804e-2d3098c7d2e9
+
+    - name: request metrics from non uuid metrics
+      desc: 404 from GenericResourceController
+      GET: /v1/resource/generic/not.a.uuid/metric
+      status: 404
+
+    - name: request cpuutil metric from generic
+      GET: /v1/resource/generic/85C44741-CC60-4033-804E-2D3098C7D2E9/metric/cpu.util
+      response_json_paths:
+          $.archive_policy.name: medium
+
+    - name: try post cpuutil metric to generic
+      POST: $LAST_URL
+      status: 405
+
+    - name: request cpuutil measures from generic
+      desc: with async measure handling this is a null test
+      GET: /v1/resource/generic/85C44741-CC60-4033-804E-2D3098C7D2E9/metric/cpu.util/measures
+      response_strings:
+          - "[]"
+
+    - name: post cpuutil measures
+      POST: $LAST_URL
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+      response_headers:
+        content-length: 0
+
+    - name: request cpuutil measures again
+      GET: $LAST_URL?refresh=true
+      response_json_paths:
+          $[0][0]: "2015-03-06T14:33:57+00:00"
+          $[0][1]: 1.0
+          $[0][2]: 43.100000000000001
+
+    - name: post metric at generic
+      POST: /v1/resource/generic/85C44741-CC60-4033-804E-2D3098C7D2E9/metric
+      status: 200
+      data:
+          electron.spin:
+              archive_policy_name: medium
+      response_json_paths:
+          $[/name][1].name: electron.spin
+          $[/name][1].resource_id: 85c44741-cc60-4033-804e-2d3098c7d2e9
+
+    - name: post metric at generic with empty definition
+      POST: $LAST_URL
+      status: 400
+      data:
+          foo.bar: {}
+      response_strings:
+        - No archive policy name specified and no archive policy rule found matching the metric name foo.bar
+
+    - name: post metric at generic using archive policy rule
+      POST: $LAST_URL
+      status: 200
+      data:
+          disk.io.rate: {}
+      response_json_paths:
+          $[/name][1].name: disk.io.rate
+          $[/name][1].resource_id: 85c44741-cc60-4033-804e-2d3098c7d2e9
+
+    - name: duplicate metrics at generic
+      POST: $LAST_URL
+      status: 409
+      data:
+          electron.spin:
+              archive_policy_name: medium
+      response_strings:
+          - Named metric electron.spin already exists
+
+    - name: post metrics at generic bad policy
+      POST: $LAST_URL
+      status: 400
+      data:
+          electron.charge:
+              archive_policy_name: high
+      response_strings:
+          - Archive policy high does not exist
+
+# Check bad timestamps
+
+    - name: post new generic with bad timestamp
+      POST: /v1/resource/generic
+      data:
+          id: 95C44741-CC60-4033-804E-2D3098C7D2E9
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+          ended_at: "2001-12-15T02:59:43"
+          started_at: "2014-12-15T02:59:43"
+      status: 400
+      response_strings:
+          - Start timestamp cannot be after end timestamp
+
+# Post metrics to unknown resource
+
+    - name: post to non uuid metrics
+      desc: 404 from GenericResourceController
+      POST: /v1/resource/generic/not.a.uuid/metric
+      data:
+          some.metric:
+              archive_policy_name: medium
+      status: 404
+
+    - name: post to missing uuid metrics
+      desc: 404 from NamedMetricController
+      POST: /v1/resource/generic/d5a5994e-ee90-11e4-88cf-685b35afa334/metric
+      data:
+          some.metric:
+              archive_policy_name: medium
+      status: 404
+
+# Post measurements on unknown things
+
+    - name: post measure on unknown metric
+      desc: 404 from NamedMetricController with metric error
+      POST: /v1/resource/generic/85C44741-CC60-4033-804E-2D3098C7D2E9/metric/unknown/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+      status: 404
+      response_strings:
+          - Metric unknown does not exist
+
+# DELETE-ing generics
+
+    - name: delete generic
+      DELETE: /v1/resource/generic/75C44741-CC60-4033-804E-2D3098C7D2E9
+      status: 204
+
+    - name: delete noexist generic
+      DELETE: /v1/resource/generic/77777777-CC60-4033-804E-2D3098C7D2E9
+      status: 404
+
+# Delete a batch of resources by attributes filter
+
+    - name: create resource one
+      desc: before test batch delete, create some resources using a float in started_at
+      POST: /v1/resource/generic
+      data:
+          id: f93450f2-aaaa-4d67-9985-02511241e7d1
+          started_at: 1388714522.0
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource two
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: f93450f2-bbbb-4d67-9985-02511241e7d1
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource three
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: f93450f2-cccc-4d67-9985-02511241e7d1
+          started_at: "2014-08-04T00:00:00.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource four
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: f93450f2-dddd-4d67-9985-02511241e7d1
+          started_at: "2014-08-04T00:00:00.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource five
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: f93450f2-eeee-4d67-9985-02511241e7d1
+          started_at: "2015-08-14T00:00:00.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource six
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: f93450f2-ffff-4d67-9985-02511241e7d1
+          started_at: "2015-08-14T00:00:00.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource seven
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: cd09ecce-3e17-4733-ad32-8a6b2034dcfd
+          started_at: "2015-08-14T00:00:00.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: create resource eight
+      desc: before test batch delete, create some resources
+      POST: $LAST_URL
+      data:
+          id: 05fde895-cf8a-475c-90a7-a4c8598d935d
+          started_at: "2015-08-14T00:00:00.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: get resource one
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/f93450f2-aaaa-4d67-9985-02511241e7d1
+      status: 200
+
+    - name: get resource two
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/f93450f2-bbbb-4d67-9985-02511241e7d1
+      status: 200
+
+    - name: get resource three
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/f93450f2-cccc-4d67-9985-02511241e7d1
+      status: 200
+
+    - name: get resource four
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/f93450f2-dddd-4d67-9985-02511241e7d1
+      status: 200
+
+    - name: get resource five
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/f93450f2-eeee-4d67-9985-02511241e7d1
+      status: 200
+
+    - name: get resource six
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/f93450f2-ffff-4d67-9985-02511241e7d1
+      status: 200
+
+    - name: get resource seven
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/cd09ecce-3e17-4733-ad32-8a6b2034dcfd
+      status: 200
+
+    - name: get resource eight
+      desc: ensure the resources exists
+      GET: /v1/resource/generic/05fde895-cf8a-475c-90a7-a4c8598d935d
+      status: 200
+
+    - name: delete random data structure
+      desc: delete an empty list test
+      DELETE: /v1/resource/generic
+      data:
+          resource_ids:
+              []
+          attrs:
+              test
+      status: 400
+
+    - name: delete something empty
+      desc: use  empty filter for delete
+      DELETE: $LAST_URL
+      data: ""
+      status: 400
+
+    - name: delete something empty a
+      desc: use  empty filter for delete
+      DELETE: $LAST_URL
+      data:
+          in:
+             id: []
+      status: 400
+      response_strings:
+        - length of value must be at least 1
+
+    - name: delete something empty b
+      desc: use  empty filter for delete
+      DELETE: $LAST_URL
+      data:
+          in: {}
+      status: 400
+
+    - name: delete something empty c
+      desc: use  empty filter for delete
+      DELETE: $LAST_URL
+      data:
+          in:
+              and: []
+      status: 400
+
+    - name: delete something empty d
+      desc: use empty filter for delete
+      DELETE: $LAST_URL
+      data:
+          in:
+              and:
+                  - or: []
+                  - id:
+                      =: ""
+      status: 400
+
+    - name: delete something empty e
+      desc: use  empty filter for delete
+      DELETE: $LAST_URL
+      data:
+          and: []
+      status: 400
+
+    - name: delete something empty f
+      desc: use  empty filter for delete
+      DELETE: $LAST_URL
+      data:
+          and:
+              - in:
+                  id: []
+              - started_at: ""
+      status: 400
+
+    - name: delete batch of resources filter by started_at
+      desc: delete the created resources
+      DELETE: /v1/resource/generic
+      data:
+          eq:
+            started_at: "2014-08-04"
+      status: 200
+      response_json_paths:
+        $.deleted: 2
+
+    - name: delete batch of resources filter by multiple ids
+      desc: delete the created resources
+      DELETE: /v1/resource/generic
+      data:
+          in:
+            id:
+              - f93450f2-aaaa-4d67-9985-02511241e7d1
+              - f93450f2-bbbb-4d67-9985-02511241e7d1
+      status: 200
+      response_json_paths:
+        $.deleted: 2
+
+
+    - name: delete both existent and non-existent data
+      desc: delete exits and non-exist data
+      DELETE: $LAST_URL
+      data:
+          in:
+            id:
+              - f93450f2-eeee-4d67-9985-02511241e7d1
+              - f93450f2-ffff-4d67-9985-02511241e7d1
+              - f93450f2-yyyy-4d67-9985-02511241e7d1
+              - f93450f2-xxxx-4d67-9985-02511241e7d1
+      status: 200
+      response_json_paths:
+        $.deleted: 2
+
+    - name: delete multiple non-existent resources
+      desc: delete a batch of non-existent resources
+      DELETE: $LAST_URL
+      data:
+          in:
+            id:
+              - f93450f2-zzzz-4d67-9985-02511241e7d1
+              - f93450f2-kkkk-4d67-9985-02511241e7d1
+      status: 200
+      response_json_paths:
+        $.deleted: 0
+
+    - name: delete multiple with query string
+      DELETE: $LAST_URL?filter=id%20in%20%5Bcd09ecce-3e17-4733-ad32-8a6b2034dcfd%2C%2005fde895-cf8a-475c-90a7-a4c8598d935d%5D
+      response_json_paths:
+        $.deleted: 2
+
diff --git a/gnocchi/tests/functional/gabbits/search-metric.yaml b/gnocchi/tests/functional/gabbits/search-metric.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f4c97d4f3cd0997a24eb45024322b3d127c75bbf
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/search-metric.yaml
@@ -0,0 +1,141 @@
+#
+# Test the search API to achieve coverage of just the
+# SearchController and SearchMetricController class code.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+        name: high
+        definition:
+            - granularity: 1 second
+              timespan: 1 hour
+            - granularity: 2 second
+              timespan: 1 hour
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/high
+      status: 201
+
+    - name: create metric
+      POST: /v1/metric
+      data:
+        archive_policy_name: high
+      status: 201
+
+    - name: post measures
+      desc: for later use
+      POST: /v1/batch/metrics/measures
+      data:
+        $RESPONSE['$.id']:
+            - timestamp: "2014-10-06T14:34:12"
+              value: 12
+            - timestamp: "2014-10-06T14:34:14"
+              value: 12
+            - timestamp: "2014-10-06T14:34:16"
+              value: 12
+            - timestamp: "2014-10-06T14:34:18"
+              value: 12
+            - timestamp: "2014-10-06T14:34:20"
+              value: 12
+            - timestamp: "2014-10-06T14:34:22"
+              value: 12
+            - timestamp: "2014-10-06T14:34:24"
+              value: 12
+            - timestamp: "2014-10-06T14:34:26"
+              value: 12
+            - timestamp: "2014-10-06T14:34:28"
+              value: 12
+            - timestamp: "2014-10-06T14:34:30"
+              value: 12
+            - timestamp: "2014-10-06T14:34:32"
+              value: 12
+            - timestamp: "2014-10-06T14:34:34"
+              value: 12
+      status: 202
+
+    - name: get metric id
+      GET: /v1/metric
+      status: 200
+      response_json_paths:
+          $[0].archive_policy.name: high
+
+    - name: search with one correct granularity
+      POST: /v1/search/metric?metric_id=$HISTORY['get metric id'].$RESPONSE['$[0].id']&granularity=1s
+      data:
+          "=": 12
+      status: 200
+      poll:
+        count: 10
+        delay: 1
+
+    - name: search with multiple correct granularities
+      POST: /v1/search/metric?metric_id=$HISTORY['get metric id'].$RESPONSE['$[0].id']&granularity=1second&granularity=2s
+      data:
+          "=": 12
+      status: 200
+      poll:
+        count: 10
+        delay: 1
+
+    - name: search with correct and incorrect granularities
+      POST: /v1/search/metric?metric_id=$HISTORY['get metric id'].$RESPONSE['$[0].id']&granularity=1s&granularity=300
+      data:
+          "=": 12
+      status: 400
+      request_headers:
+          accept: application/json
+      response_json_paths:
+          $.description.cause: Aggregation does not exist
+          $.description.detail.granularity: 300
+          $.description.detail.aggregation_method: mean
+
+
+    - name: search with incorrect granularity
+      POST: /v1/search/metric?metric_id=$HISTORY['get metric id'].$RESPONSE['$[0].id']&granularity=300
+      data:
+          "=": 12
+      status: 400
+      request_headers:
+          accept: application/json
+      response_json_paths:
+          $.description.cause: Aggregation does not exist
+          $.description.detail.granularity: 300
+          $.description.detail.aggregation_method: mean
+
+    - name: search measure with wrong start
+      POST: /v1/search/metric?metric_id=$HISTORY['get metric id'].$RESPONSE['$[0].id']&start=foobar
+      data:
+        ∧:
+          - ≥: 1000
+      status: 400
+      response_strings:
+        - Invalid value for start
+
+    - name: create metric 2
+      POST: /v1/metric
+      data:
+        archive_policy_name: "high"
+      status: 201
+
+    - name: search measure with wrong stop
+      POST: /v1/search/metric?metric_id=$RESPONSE['$.id']&stop=foobar
+      data:
+        ∧:
+          - ≥: 1000
+      status: 400
+      response_strings:
+        - Invalid value for stop
diff --git a/gnocchi/tests/functional/gabbits/search.yaml b/gnocchi/tests/functional/gabbits/search.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d414acf1a48794cc659c65b408a35e6b92bb6d52
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/search.yaml
@@ -0,0 +1,260 @@
+#
+# Test the search API to achieve coverage of just the
+# SearchController and SearchResourceController class code.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+    - name: typo of search
+      GET: /v1/search/notexists
+      status: 404
+
+    - name: typo of search in resource
+      GET: /v1/search/resource/foobar
+      status: 404
+
+    # FIXME(sileht): this test looks wrong, it talks about invalidity
+    # but asserts it return 200...
+    - name: search with invalid uuid
+      POST: /v1/search/resource/generic
+      data:
+        =:
+          id: "cd9eef"
+
+    - name: search invalid and value
+      request_headers:
+        accept: application/json
+      desc: and should be followed by a list, not dict
+      POST: /v1/search/resource/generic
+      data:
+        and:
+          project_id: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^expected a list for dictionary value @ data/"
+        $.description.detail: ["and"]
+
+    - name: search like id
+      request_headers:
+        accept: application/json
+      POST: /v1/search/resource/generic
+      data:
+        like:
+          id: fa%
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed @ data/"
+        $.description.detail: ["like", "id"]
+
+    - name: search like list id
+      request_headers:
+        accept: application/json
+      POST: /v1/search/resource/generic
+      data:
+        like:
+          id:
+            - fa%
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed @ data/"
+        $.description.detail: ["like", "id"]
+
+    - name: search invalid ne value
+      request_headers:
+        accept: application/json
+      desc: attribute value for binary operator must not be dict or list
+      POST: /v1/search/resource/generic
+      data:
+        ne:
+          project_id:
+            - foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^expected (unicode|str) for dictionary value @ data/"
+        $.description.detail: ["ne", "project_id"]
+
+    - name: search invalid not value
+      request_headers:
+        accept: application/json
+      desc: uninary operator must follow by dict, not list
+      POST: /v1/search/resource/generic
+      data:
+        not:
+          - project_id: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^expected a dictionary for dictionary value @ data/"
+        $.description.detail: ["not"]
+
+    - name: post generic resource
+      POST: /v1/resource/generic
+      data:
+          id: faef212f-0bf4-4030-a461-2186fef79be0
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: post generic resource twice
+      POST: /v1/resource/generic
+      data:
+          id: df7e5e75-6a1d-4ff7-85cb-38eb9d75da7e
+          started_at: "2014-01-03T02:02:02.000000"
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 201
+
+    - name: search in_
+      POST: /v1/search/resource/generic
+      data:
+        in:
+          id:
+            - faef212f-0bf4-4030-a461-2186fef79be0
+            - df7e5e75-6a1d-4ff7-85cb-38eb9d75da7e
+      response_json_paths:
+        $.`len`: 2
+
+    - name: search eq created_by_project_id
+      POST: /v1/search/resource/generic
+      data:
+        eq:
+          created_by_project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      response_json_paths:
+        $.`len`: 0
+
+    - name: search eq creator
+      POST: /v1/search/resource/generic
+      data:
+        eq:
+          creator: "foobar"
+      response_json_paths:
+        $.`len`: 2
+
+    - name: search like
+      POST: /v1/search/resource/generic
+      data:
+        like:
+          creator: foo%
+      response_json_paths:
+        $.`len`: 2
+
+    - name: search invalid query string
+      POST: /v1/search/resource/generic?filter=id%20%3D%3D%20foobar
+      status: 400
+      response_strings:
+        - "Invalid filter: Expected"
+
+    - name: search in_ query string
+      POST: /v1/search/resource/generic?filter=id%20in%20%5Bfaef212f-0bf4-4030-a461-2186fef79be0%2C%20df7e5e75-6a1d-4ff7-85cb-38eb9d75da7e%5D
+      response_json_paths:
+        $.`len`: 2
+
+    - name: search not in_ query string
+      POST: /v1/search/resource/generic?filter=not%20id%20in%20%5Bfaef212f-0bf4-4030-a461-2186fef79be0%2C%20df7e5e75-6a1d-4ff7-85cb-38eb9d75da7e%5D
+      response_json_paths:
+        $.`len`: 0
+
+    - name: search empty in_
+      POST: /v1/search/resource/generic
+      data:
+        in:
+          id: []
+      status: 400
+      response_strings:
+        - length of value must be at least 1
+
+    - name: search empty in_ query string
+      POST: /v1/search/resource/generic?filter=id%20in%20%5B%5D
+      status: 400
+      response_strings:
+        - length of value must be at least 1
+
+    - name: search empty query
+      POST: /v1/search/resource/generic
+      data: {}
+      response_json_paths:
+        $.`len`: 2
+
+    - name: search empty query page 1
+      POST: /v1/search/resource/generic?limit=1
+      data: {}
+      response_headers:
+        link: "<$SCHEME://$NETLOC/v1/search/resource/generic?limit=1&marker=faef212f-0bf4-4030-a461-2186fef79be0&sort=revision_start%3Aasc&sort=started_at%3Aasc>; rel=\"next\""
+      response_json_paths:
+        $.`len`: 1
+
+    - name: search empty query last page
+      POST: /v1/search/resource/generic?marker=faef212f-0bf4-4030-a461-2186fef79be0&sort=revision_start:asc&sort=started_at:asc
+      data: {}
+      response_forbidden_headers:
+        - link
+      response_json_paths:
+        $.`len`: 1
+
+    - name: post generic resource with project/user
+      POST: /v1/resource/generic
+      data:
+          id: 95573760-b085-4e69-9280-91f66fc3ed3c
+          started_at: "2014-01-03T02:02:02.000000"
+      status: 201
+
+    - name: search empty query again
+      POST: /v1/search/resource/generic
+      data: {}
+      response_json_paths:
+        $.`len`: 3
+
+    - name: search all resource not foobar
+      POST: /v1/search/resource/generic
+      data:
+        ne:
+          project_id: foobar
+      response_json_paths:
+        $.`len`: 3
+
+    - name: search all resource with attrs param
+      POST: /v1/search/resource/generic?attrs=id&attrs=started_at&attrs=user_id
+      data: {}
+      response_json_paths:
+        $[0].`len`: 3
+        $[0].id: $RESPONSE['$[0].id']
+        $[0].started_at: $RESPONSE['$[0].started_at']
+        $[0].user_id: $RESPONSE['$[0].user_id']
+        $[1].`len`: 3
+
+    - name: search all resource with invalid attrs param
+      POST: /v1/search/resource/generic?attrs=id&attrs=foo&attrs=bar
+      data: {}
+      response_json_paths:
+        $[0].`len`: 1
+        $[0].id: $RESPONSE['$[0].id']
+        $[1].`len`: 1
+
+    - name: search all resource without attrs param
+      POST: /v1/search/resource/generic
+      data: {}
+      response_json_paths:
+        $[0].`len`: 13
+        $[1].`len`: 13
+
+    - name: search all resource without attrs header
+      POST: /v1/search/resource/generic
+      data: {}
+      request_headers:
+        Accept: "application/json"
+      response_json_paths:
+        $[0].`len`: 13
+        $[1].`len`: 13
diff --git a/gnocchi/tests/functional/gabbits/transformedids.yaml b/gnocchi/tests/functional/gabbits/transformedids.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d5ae289322461a81bd324f442464e694d2f50e58
--- /dev/null
+++ b/gnocchi/tests/functional/gabbits/transformedids.yaml
@@ -0,0 +1,178 @@
+#
+# Test the resource API to achieve coverage of just the
+# ResourcesController and ResourceController class code.
+#
+
+fixtures:
+    - ConfigFixture
+
+defaults:
+  request_headers:
+    # User foobar
+    authorization: "basic Zm9vYmFyOg=="
+    content-type: application/json
+
+tests:
+
+# We will need an archive for use in later tests so we create it
+# here. This could be done in a fixture but since the API allows it
+# may as well use it.
+
+    - name: create archive policy
+      desc: for later use
+      POST: /v1/archive_policy
+      request_headers:
+        # User admin
+        authorization: "basic YWRtaW46"
+      data:
+          name: medium
+          definition:
+              - granularity: 1 second
+      status: 201
+# Check transformed uuids across the URL hierarchy
+
+    - name: post new resource non uuid for duplication test
+      POST: /v1/resource/generic
+      data:
+          id: generic zero
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 201
+      response_json_paths:
+          creator: foobar
+      response_headers:
+          # is a UUID
+          location: /v1/resource/generic/[a-f0-9-]{36}/
+
+    - name: post new resource non uuid duplication
+      POST: /v1/resource/generic
+      data:
+          id: generic zero
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 409
+
+    - name: post new resource with invalid uuid
+      POST: /v1/resource/generic
+      data:
+          id: 'id-with-/'
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+      status: 400
+      response_strings:
+        - "'/' is not supported in resource id"
+
+
+    - name: post new resource non uuid again different user
+      POST: /v1/resource/generic
+      request_headers:
+        authorization: "basic cGFzdGE6"
+      data:
+          id: generic zero
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 201
+      response_json_paths:
+          creator: pasta
+      response_headers:
+          # is a UUID
+          location: /v1/resource/generic/[a-f0-9-]{36}/
+
+    - name: post new resource non uuid
+      POST: /v1/resource/generic
+      data:
+          id: generic one
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 201
+      response_json_paths:
+        creator: foobar
+      response_headers:
+          # is a UUID
+          location: /v1/resource/generic/[a-f0-9-]{36}/
+
+    - name: get new non uuid resource by external id
+      GET: /v1/resource/generic/generic%20one
+      response_json_paths:
+          $.id: $RESPONSE['$.id']
+
+    - name: get new non uuid resource by internal id
+      GET: /v1/resource/generic/$RESPONSE['$.id']
+      response_json_paths:
+          $.id: $RESPONSE['$.id']
+
+    - name: patch by external id
+      PATCH: /v1/resource/generic/generic%20one
+      data:
+          metrics:
+              cattle:
+                  archive_policy_name: medium
+      status: 200
+      response_strings:
+          - '"cattle"'
+
+    - name: list metric by external resource id
+      GET: /v1/resource/generic/generic%20one/metric
+      response_json_paths:
+          $[0].name: cattle
+
+    - name: list empty measures by external resource id
+      GET: /v1/resource/generic/generic%20one/metric/cattle/measures
+      response_json_paths:
+          $: []
+
+    - name: post measures by external resource id
+      POST: /v1/resource/generic/generic%20one/metric/cattle/measures
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 43.1
+          - timestamp: "2015-03-06T14:34:12"
+            value: 12
+      status: 202
+
+    - name: list two measures by external resource id
+      GET: $LAST_URL?refresh=true
+      response_json_paths:
+          $[0][2]: 43.1
+          $[1][2]: 12
+
+    - name: delete the resource by external id
+      DELETE: /v1/resource/generic/generic%20one
+      status: 204
+
+# Check length handling
+
+    - name: fail to post too long non uuid resource id
+      POST: /v1/resource/generic
+      data:
+          id: four score and seven years ago we the people of the United States of America i have a dream it is the courage to continue that counts four score and seven years ago we the people of the United States of America i have a dream it is the courage to continue that counts four score and seven years ago we the people of the United States of America i have a dream it is the courage to continue that counts
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 400
+      response_strings:
+        - transformable resource id >255 max allowed characters for dictionary value
+
+    - name: post long non uuid resource id
+      POST: $LAST_URL
+      data:
+          # 255 char string
+          id: four score and seven years ago we the people of the United States of America i have a dream it is the courage to continue that counts four score and seven years ago we the people of the United States of America i have a dream it is the courage to continue
+          user_id: 0fbb231484614b1a80131fc22f6afc9c
+          project_id: f3d41b770cc14f0bb94a1d5be9c0e3ea
+          metrics:
+              cpu.util:
+                  archive_policy_name: medium
+      status: 201
diff --git a/gnocchi/tests/functional/test_gabbi.py b/gnocchi/tests/functional/test_gabbi.py
new file mode 100644
index 0000000000000000000000000000000000000000..025256158b7db50b5dc67fa12f44ced932100429
--- /dev/null
+++ b/gnocchi/tests/functional/test_gabbi.py
@@ -0,0 +1,37 @@
+#
+# Copyright 2015 Red Hat. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""A test module to exercise the Gnocchi API with gabbi."""
+
+import os
+
+from gabbi import driver
+import wsgi_intercept
+
+from gnocchi.tests.functional import fixtures
+
+
+wsgi_intercept.STRICT_RESPONSE_HEADERS = True
+TESTS_DIR = 'gabbits'
+PREFIX = '/gnocchi'
+
+
+def load_tests(loader, tests, pattern):
+    """Provide a TestSuite to the discovery process."""
+    test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
+    return driver.build_tests(test_dir, loader, host=None, prefix=PREFIX,
+                              intercept=fixtures.setup_app,
+                              fixture_module=fixtures,
+                              safe_yaml=False)
diff --git a/gnocchi/tests/functional_live/__init__.py b/gnocchi/tests/functional_live/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/tests/functional_live/gabbits/live.yaml b/gnocchi/tests/functional_live/gabbits/live.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..858d18c9c28fc2f5aeea7bbc539e2f17e17e0223
--- /dev/null
+++ b/gnocchi/tests/functional_live/gabbits/live.yaml
@@ -0,0 +1,757 @@
+#
+# Confirmation tests to run against a live web server.
+#
+# These act as a very basic sanity check.
+
+defaults:
+    request_headers:
+        x-auth-token: $ENVIRON['GNOCCHI_SERVICE_TOKEN']
+        authorization: $ENVIRON['GNOCCHI_AUTHORIZATION']
+
+tests:
+    - name: check /
+      GET: /
+
+    # Fail to create archive policy
+    - name: wrong archive policy content type
+      desc: attempt to create archive policy with invalid content-type
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: text/plain
+      status: 415
+      response_strings:
+          - Unsupported Media Type
+
+    - name: wrong method
+      desc: attempt to create archive policy with 'PUT' method
+      PUT: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      status: 405
+
+    - name: invalid authZ
+      desc: x-auth-token is invalid
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+          x-auth-token: 'hello'
+          authorization: 'basic hello:'
+      data:
+          name: medium
+          definition:
+              - granularity: 1 second
+      status: 401
+
+    - name: bad archive policy body
+      desc: archive policy contains invalid key 'cowsay'
+      POST: /v1/archive_policy
+      request_headers:
+          accept: application/json
+          content-type: application/json
+      data:
+          cowsay: moo
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed @ data/"
+        $.description.detail: ["cowsay"]
+
+    - name: missing definition
+      desc: archive policy is missing 'definition' keyword
+      POST: /v1/archive_policy
+      request_headers:
+          accept: application/json
+          content-type: application/json
+      data:
+          name: medium
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^required key not provided/"
+        $.description.detail: ["definition"]
+
+    - name: empty definition
+      desc: empty definition for archive policy
+      POST: /v1/archive_policy
+      request_headers:
+          accept: application/json
+          content-type: application/json
+      data:
+          name: medium
+          definition: []
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^length of value must be at least 1/"
+        $.description.detail: ["definition"]
+
+    - name: wrong value definition
+      desc: invalid type of 'definition' key
+      POST: /v1/archive_policy
+      request_headers:
+          accept: application/json
+          content-type: application/json
+      data:
+          name: somename
+          definition: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^expected a list/"
+        $.description.detail: ["definition"]
+
+    - name: useless definition
+      desc: invalid archive policy definition
+      POST: /v1/archive_policy
+      request_headers:
+          accept: application/json
+          content-type: application/json
+      data:
+          name: medium
+          definition:
+              - cowsay: moo
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed/"
+        $.description.detail: ['definition', '0', 'cowsay']
+
+    #
+    # Create archive policy
+    #
+
+    - name: create archive policy
+      desc: create archve policy 'gabbilive' for live tests
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      data:
+          name: gabbilive
+          back_window: 0
+          definition:
+              - granularity: 1 second
+                points: 60
+              - granularity: 2 second
+                timespan: 1 minute
+              - points: 5
+                timespan: 5 minute
+          aggregation_methods:
+              - mean
+              - min
+              - max
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/gabbilive
+      status: 201
+
+    # Retrieve it correctly and then poorly
+
+    - name: get archive policy
+      desc: retrieve archive policy 'gabbilive' and asster its values
+      GET: $LOCATION
+      response_headers:
+          content-type: /application/json/
+      response_json_paths:
+          $.name: gabbilive
+          $.back_window: 0
+          $.definition[0].granularity: "0:00:01"
+          $.definition[0].points: 60
+          $.definition[0].timespan: "0:01:00"
+          $.definition[1].granularity: "0:00:02"
+          $.definition[1].points: 30
+          $.definition[1].timespan: "0:01:00"
+          $.definition[2].granularity: "0:01:00"
+          $.definition[2].points: 5
+          $.definition[2].timespan: "0:05:00"
+      response_json_paths:
+          $.aggregation_methods.`sorted`: ["max", "mean", "min"]
+
+    - name: get wrong accept
+      desc: invalid 'accept' header
+      GET: /v1/archive_policy/medium
+      request_headers:
+          accept: text/plain
+      status: 406
+
+    # Unexpected methods
+
+    - name: post single archive
+      desc: unexpected 'POST' request to archive policy
+      POST: /v1/archive_policy/gabbilive
+      status: 405
+
+    - name: put single archive
+      desc: unexpected 'PUT' request to archive policy
+      PUT: /v1/archive_policy/gabbilive
+      status: 405
+
+    # Duplicated archive policy names ain't allowed
+
+    - name: create duplicate archive policy
+      desc: create archve policy 'gabbilive' for live tests
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      data:
+          name: gabbilive
+          definition:
+              - granularity: 30 second
+                points: 60
+      status: 409
+      response_strings:
+          - Archive policy gabbilive already exists
+
+    # Create a unicode named policy
+
+    - name: post unicode policy name
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      data:
+          name: ✔éñ☃
+          definition:
+              - granularity: 1 minute
+                points: 20
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/archive_policy/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      response_json_paths:
+          name: ✔éñ☃
+
+    - name: retrieve unicode policy name
+      GET: $LOCATION
+      response_json_paths:
+          name: ✔éñ☃
+
+    - name: delete unicode archive policy
+      DELETE: /v1/archive_policy/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      status: 204
+
+    # It really is gone
+
+    - name: confirm delete
+      desc: assert deleted unicode policy is not available
+      GET: /v1/archive_policy/%E2%9C%94%C3%A9%C3%B1%E2%98%83
+      status: 404
+
+    # Fail to delete one that does not exist
+
+    - name: delete missing archive
+      desc: delete non-existent archive policy
+      DELETE: /v1/archive_policy/grandiose
+      status: 404
+      response_strings:
+          - Archive policy grandiose does not exist
+
+    # Attempt to create illogical policies
+
+    - name: create illogical policy
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      data:
+          name: complex
+          definition:
+              - granularity: 1 second
+                points: 60
+                timespan: "0:01:01"
+      status: 400
+      response_strings:
+          - timespan ≠ granularity × points
+
+    - name: create identical granularities policy
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      data:
+          name: complex
+          definition:
+              - granularity: 1 second
+                points: 60
+              - granularity: 1 second
+                points: 120
+      status: 400
+      response_strings:
+          - "More than one archive policy uses granularity `1.0'"
+
+    - name: policy invalid unit
+      desc: invalid unit for archive policy 'timespan' key
+      POST: /v1/archive_policy
+      request_headers:
+          content-type: application/json
+      data:
+          name: 227d0e1f-4295-4e4b-8515-c296c47d71d3
+          definition:
+              - granularity: 1 second
+                timespan: "1 shenanigan"
+      status: 400
+
+    #
+    # Archive policy rules
+    #
+
+    - name: create archive policy rule1
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: application/json
+      data:
+          name: gabbilive_rule
+          metric_pattern: "live.*"
+          archive_policy_name: gabbilive
+      status: 201
+      response_json_paths:
+        $.metric_pattern: "live.*"
+        $.archive_policy_name: gabbilive
+        $.name: gabbilive_rule
+
+    - name: create invalid archive policy rule
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: application/json
+      data:
+        name: test_rule
+        metric_pattern: "disk.foo.*"
+      status: 400
+
+    - name: missing auth archive policy rule
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: application/json
+          x-auth-token: 'hello'
+          authorization: 'basic hello:'
+      data:
+        name: test_rule
+        metric_pattern: "disk.foo.*"
+        archive_policy_name: low
+      status: 401
+
+    - name: wrong archive policy rule content type
+      POST: /v1/archive_policy_rule
+      request_headers:
+          content-type: text/plain
+      status: 415
+      response_strings:
+          - Unsupported Media Type
+
+    - name: bad archive policy rule body
+      POST: /v1/archive_policy_rule
+      request_headers:
+          accept: application/json
+          content-type: application/json
+      data:
+          whaa: foobar
+      status: 400
+      response_json_paths:
+        $.description.cause: "Invalid input"
+        $.description.reason: "/^extra keys not allowed/"
+        $.description.detail: ["whaa"]
+
+    # get an archive policy rules
+
+    - name: get all archive policy rules
+      GET: /v1/archive_policy_rule
+      status: 200
+      response_json_paths:
+          $[\name][0].name: "gabbilive_rule"
+          $[\name][0].metric_pattern: "live.*"
+          $[\name][0].archive_policy_name: "gabbilive"
+
+    - name: get unknown archive policy rule
+      GET: /v1/archive_policy_rule/foo
+      status: 404
+
+
+    - name: get archive policy rule
+      GET: /v1/archive_policy_rule/gabbilive_rule
+      status: 200
+      response_json_paths:
+          $.metric_pattern: "live.*"
+          $.archive_policy_name: "gabbilive"
+          $.name: "gabbilive_rule"
+
+    - name: delete archive policy in use
+      desc: fails due to https://bugs.launchpad.net/gnocchi/+bug/1569781
+      DELETE: /v1/archive_policy/gabbilive
+      status: 400
+
+    #
+    # Metrics
+    #
+
+
+    - name: get all metrics
+      GET: /v1/metric
+      status: 200
+
+    - name: create metric with name and rule
+      POST: /v1/metric
+      request_headers:
+          content-type: application/json
+      data:
+          name: "live.io.rate"
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: gabbilive
+          $.name: live.io.rate
+
+    - name: assert metric is present in listing
+      GET: /v1/metric?id=$HISTORY['create metric with name and rule'].$RESPONSE['$.id']
+      response_json_paths:
+          $.`len`: 1
+
+    - name: assert metric is the only one with this policy
+      GET: /v1/metric?archive_policy_name=gabbilive
+      response_json_paths:
+          $.`len`: 1
+
+    - name: delete metric
+      DELETE: /v1/metric/$HISTORY['create metric with name and rule'].$RESPONSE['$.id']
+      status: 204
+
+    - name: assert metric is expunged
+      GET: $HISTORY['assert metric is present in listing'].$URL&status=delete
+      poll:
+          count: 360
+          delay: 1
+      response_json_paths:
+          $.`len`: 0
+
+    - name: create metric with name and policy
+      POST: /v1/metric
+      request_headers:
+          content-type: application/json
+      data:
+          name: "aagabbi.live.metric"
+          archive_policy_name: "gabbilive"
+      status: 201
+      response_json_paths:
+          $.archive_policy_name: gabbilive
+          $.name: "aagabbi.live.metric"
+
+    - name: get valid metric id
+      GET: $LOCATION
+      status: 200
+      response_json_paths:
+        $.archive_policy.name: gabbilive
+
+    - name: delete the metric
+      DELETE: /v1/metric/$RESPONSE['$.id']
+      status: 204
+
+    - name: ensure the metric is delete
+      GET: /v1/metric/$HISTORY['get valid metric id'].$RESPONSE['$.id']
+      status: 404
+
+    - name: create metric bad archive policy
+      POST: /v1/metric
+      request_headers:
+          content-type: application/json
+      data:
+          archive_policy_name: 2e2675aa-105e-4664-a30d-c407e6a0ea7f
+      status: 400
+      response_strings:
+          - Archive policy 2e2675aa-105e-4664-a30d-c407e6a0ea7f does not exist
+
+    - name: create metric bad content-type
+      POST: /v1/metric
+      request_headers:
+          content-type: plain/text
+      data: '{"archive_policy_name": "cookies"}'
+      status: 415
+
+
+    #
+    # Cleanup
+    #
+
+    - name: delete archive policy rule
+      DELETE: /v1/archive_policy_rule/gabbilive_rule
+      status: 204
+
+    - name: confirm delete archive policy rule
+      DELETE: /v1/archive_policy_rule/gabbilive_rule
+      status: 404
+
+
+    #
+    # Resources section
+    #
+
+    - name: root of resource
+      GET: /v1/resource
+      response_json_paths:
+          $.generic: $SCHEME://$NETLOC/v1/resource/generic
+
+    - name: typo of resource
+      GET: /v1/resoue
+      status: 404
+
+    - name: typo of resource extra
+      GET: /v1/resource/foobar
+      status: 404
+
+    - name: generic resource
+      GET: /v1/resource/generic
+      status: 200
+
+    - name: post resource type
+      POST: /v1/resource_type
+      request_headers:
+          content-type: application/json
+      data:
+          name: myresource
+          attributes:
+              display_name:
+                  type: string
+                  required: true
+                  max_length: 5
+                  min_length: 2
+      status: 201
+      response_headers:
+          location: $SCHEME://$NETLOC/v1/resource_type/myresource
+
+    - name: add an attribute
+      PATCH: /v1/resource_type/myresource
+      request_headers:
+          content-type: application/json-patch+json
+      data:
+        - op: "add"
+          path: "/attributes/awesome-stuff"
+          value: {"type": "bool", "required": false}
+      status: 200
+      response_json_paths:
+          $.name: myresource
+          $.attributes."awesome-stuff".type: bool
+          $.attributes.[*].`len`: 2
+
+    - name: remove an attribute
+      PATCH: /v1/resource_type/myresource
+      request_headers:
+          content-type: application/json-patch+json
+      data:
+        - op: "remove"
+          path: "/attributes/awesome-stuff"
+      status: 200
+      response_json_paths:
+          $.name: myresource
+          $.attributes.display_name.type: string
+          $.attributes.[*].`len`: 1
+
+    - name: myresource resource bad accept
+      desc: Expect 406 on bad accept type
+      request_headers:
+          accept: text/plain
+      GET: /v1/resource/myresource
+      status: 406
+      response_strings:
+          - 406 Not Acceptable
+
+    - name: myresource resource complex accept
+      desc: failover accept media type appropriately
+      request_headers:
+          accept: text/plain, application/json; q=0.8
+      GET: /v1/resource/myresource
+      status: 200
+
+    - name: post myresource resource
+      POST: /v1/resource/myresource
+      request_headers:
+          content-type: application/json
+      data:
+          id: 2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+          user_id: 126204ef-989a-46fd-999b-ee45c8108f31
+          project_id: 98e785d7-9487-4159-8ab8-8230ec37537a
+          display_name: myvm
+          metrics:
+              vcpus:
+                  archive_policy_name: gabbilive
+      status: 201
+      response_json_paths:
+          $.id: 2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+          $.user_id: 126204ef-989a-46fd-999b-ee45c8108f31
+          $.project_id: 98e785d7-9487-4159-8ab8-8230ec37537a
+          $.display_name: "myvm"
+
+    - name: get myresource resource
+      GET: $LOCATION
+      status: 200
+      response_json_paths:
+          $.id: 2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+          $.user_id: 126204ef-989a-46fd-999b-ee45c8108f31
+          $.project_id: 98e785d7-9487-4159-8ab8-8230ec37537a
+          $.display_name: "myvm"
+
+    - name: get vcpus metric
+      GET: /v1/metric/$HISTORY['get myresource resource'].$RESPONSE['$.metrics.vcpus']
+      status: 200
+      response_json_paths:
+          $.name: vcpus
+          $.resource.id: 2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+
+    - name: search for myresource resource via user_id
+      POST: /v1/search/resource/myresource
+      request_headers:
+        content-type: application/json
+      data:
+        =:
+          user_id: "126204ef-989a-46fd-999b-ee45c8108f31"
+      response_json_paths:
+        $..id: 2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+        $..user_id: 126204ef-989a-46fd-999b-ee45c8108f31
+        $..project_id: 98e785d7-9487-4159-8ab8-8230ec37537a
+        $..display_name: myvm
+
+    - name: search for myresource resource via user_id and 'generic' type
+      POST: /v1/search/resource/generic
+      request_headers:
+        content-type: application/json
+      data:
+        =:
+          id: "2ae35573-7f9f-4bb1-aae8-dad8dff5706e"
+      response_strings:
+          - '"user_id": "126204ef-989a-46fd-999b-ee45c8108f31"'
+
+    - name: search for myresource resource via user_id and project_id
+      POST: /v1/search/resource/generic
+      request_headers:
+        content-type: application/json
+      data:
+        and:
+          - =:
+              user_id: "126204ef-989a-46fd-999b-ee45c8108f31"
+          - =:
+              project_id: "98e785d7-9487-4159-8ab8-8230ec37537a"
+      response_strings:
+          - '"id": "2ae35573-7f9f-4bb1-aae8-dad8dff5706e"'
+
+    - name: patch myresource resource
+      PATCH: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+      request_headers:
+          content-type: application/json
+      data:
+          display_name: myvm2
+      status: 200
+      response_json_paths:
+          display_name: myvm2
+
+    - name: post some measures to the metric on myresource
+      POST: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e/metric/vcpus/measures
+      request_headers:
+          content-type: application/json
+      data:
+          - timestamp: "2015-03-06T14:33:57"
+            value: 2
+          - timestamp: "2015-03-06T14:34:12"
+            value: 2
+      status: 202
+
+    - name: get myresource measures with poll
+      GET: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e/metric/vcpus/measures
+      # wait up to 60 seconds before policy is deleted
+      poll:
+          count: 60
+          delay: 1
+      response_json_paths:
+          $[0][2]: 2
+          $[1][2]: 2
+
+    - name: post some more measures to the metric on myresource
+      POST: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e/metric/vcpus/measures
+      request_headers:
+          content-type: application/json
+      data:
+          - timestamp: "2015-03-06T14:34:15"
+            value: 5
+          - timestamp: "2015-03-06T14:34:20"
+            value: 5
+      status: 202
+
+    - name: get myresource measures with refresh
+      GET: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e/metric/vcpus/measures?refresh=true
+      response_json_paths:
+          $[0][2]: 2
+          $[1][2]: 4
+          $[2][2]: 2
+          $[3][2]: 2
+          $[4][2]: 5
+          $[5][2]: 5
+
+    #
+    # Search for resources
+    #
+
+    - name: typo of search
+      POST: /v1/search/notexists
+      status: 404
+
+    - name: typo of search in resource
+      POST: /v1/search/resource/foobar
+      status: 404
+
+    - name: search with invalid uuid
+      POST: /v1/search/resource/generic
+      request_headers:
+        content-type: application/json
+      data:
+        =:
+          id: "cd9eef"
+      status: 200
+      response_json_paths:
+          $.`len`: 0
+
+    - name: assert vcpus metric exists in listing
+      GET: /v1/metric?id=$HISTORY['get myresource resource'].$RESPONSE['$.metrics.vcpus']
+      poll:
+          count: 360
+          delay: 1
+      response_json_paths:
+          $.`len`: 1
+
+    - name: delete myresource resource
+      DELETE: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+      status: 204
+
+      # assert resource is really deleted
+    - name: assert resource resource is deleted
+      GET: /v1/resource/myresource/2ae35573-7f9f-4bb1-aae8-dad8dff5706e
+      status: 404
+
+    - name: assert vcpus metric is really expurged
+      GET: $HISTORY['assert vcpus metric exists in listing'].$URL&status=delete
+      poll:
+          count: 360
+          delay: 1
+      response_json_paths:
+          $.`len`: 0
+
+    - name: post myresource resource no data
+      POST: /v1/resource/myresource
+      request_headers:
+          content-type: application/json
+      status: 400
+
+    - name: assert no metrics have the gabbilive policy
+      GET: $HISTORY['assert metric is the only one with this policy'].$URL
+      response_json_paths:
+          $.`len`: 0
+
+    - name: assert no delete metrics have the gabbilive policy
+      GET: $HISTORY['assert metric is the only one with this policy'].$URL&status=delete
+      response_json_paths:
+          $.`len`: 0
+
+    - name: delete single archive policy cleanup
+      DELETE: /v1/archive_policy/gabbilive
+      poll:
+          count: 360
+          delay: 1
+      status: 204
+
+    # It really is gone
+
+    - name: delete our resource type
+      DELETE: /v1/resource_type/myresource
+      status: 204
+
+    - name: confirm delete of cleanup
+      GET: /v1/archive_policy/gabbilive
+      status: 404
diff --git a/gnocchi/tests/functional_live/gabbits/search-resource.yaml b/gnocchi/tests/functional_live/gabbits/search-resource.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..fe2547885ee381ab4e0db7264be48e1caa7df496
--- /dev/null
+++ b/gnocchi/tests/functional_live/gabbits/search-resource.yaml
@@ -0,0 +1,275 @@
+#
+# Tests to confirm resources are searchable. Run against a live setup.
+# URL: http://gnocchi.xyz/rest.html#searching-for-resources
+#
+# Instance-ResourceID-1: a64ca14f-bc7c-45b0-aa85-42cd2179e1e2
+# Instance-ResourceID-2: 7ccccfa0-92ce-4225-80ca-3ac9cb122d6a
+# Instance-ResourceID-3: c442a47c-eb33-46ce-9665-f3aa0bef54e7
+#
+# UserID-1: 33ba83ca-2f12-4ad6-8fa2-bc8b55d36e07
+# UserID-2: 81d82ef3-4deb-499d-9270-9aeb5a3ec5fe
+#
+# ProjectID-1: c9a5f184-c0d0-4daa-83c3-af6fdc0879e6
+# ProjectID-2: 40eba01c-b348-49b8-803f-67123251a00a
+#
+# ImageID-1: 7ab2f7ae-7af5-4469-bdc8-3c0f6dfab75d
+# ImageID-2: b01f2588-89dc-46b2-897b-fffae1e10975
+#
+
+defaults:
+    request_headers:
+        x-auth-token: $ENVIRON['GNOCCHI_SERVICE_TOKEN']
+        authorization: $ENVIRON['GNOCCHI_AUTHORIZATION']
+
+tests:
+    #
+    # Setup resource types if don't exist
+    #
+
+    - name: create new resource type 'instance-like'
+      POST: /v1/resource_type
+      status: 201
+      request_headers:
+          content-type: application/json
+      data:
+          name: instance-like
+          attributes:
+              display_name:
+                  type: string
+                  required: True
+              flavor_id:
+                  type: string
+                  required: True
+              host:
+                  type: string
+                  required: True
+              image_ref:
+                  type: string
+                  required: False
+              server_group:
+                  type: string
+                  required: False
+
+    - name: create new resource type 'image-like'
+      POST: /v1/resource_type
+      status: 201
+      request_headers:
+          content-type: application/json
+      data:
+          name: image-like
+          attributes:
+              name:
+                  type: string
+                  required: True
+              disk_format:
+                  type: string
+                  required: True
+              container_format:
+                  type: string
+                  required: True
+
+    #
+    # Setup test resources
+    #
+    - name: helper. create instance-like resource-1
+      POST: /v1/resource/instance-like
+      request_headers:
+          content-type: application/json
+      data:
+          display_name: vm-gabbi-1
+          id: a64ca14f-bc7c-45b0-aa85-42cd2179e1e2
+          user_id: 33ba83ca-2f12-4ad6-8fa2-bc8b55d36e07
+          flavor_id: "1"
+          image_ref: 7ab2f7ae-7af5-4469-bdc8-3c0f6dfab75d
+          host: compute-0-gabbi.localdomain
+          project_id: c9a5f184-c0d0-4daa-83c3-af6fdc0879e6
+      status: 201
+
+    - name: helper. create instance-like resource-2
+      POST: /v1/resource/instance-like
+      request_headers:
+          content-type: application/json
+      data:
+          display_name: vm-gabbi-2
+          id: 7ccccfa0-92ce-4225-80ca-3ac9cb122d6a
+          user_id: 33ba83ca-2f12-4ad6-8fa2-bc8b55d36e07
+          flavor_id: "2"
+          image_ref: b01f2588-89dc-46b2-897b-fffae1e10975
+          host: compute-1-gabbi.localdomain
+          project_id: c9a5f184-c0d0-4daa-83c3-af6fdc0879e6
+      status: 201
+
+    - name: helper. create instance-like resource-3
+      POST: /v1/resource/instance-like
+      request_headers:
+          content-type: application/json
+      data:
+          display_name: vm-gabbi-3
+          id: c442a47c-eb33-46ce-9665-f3aa0bef54e7
+          user_id: 81d82ef3-4deb-499d-9270-9aeb5a3ec5fe
+          flavor_id: "2"
+          image_ref: b01f2588-89dc-46b2-897b-fffae1e10975
+          host: compute-1-gabbi.localdomain
+          project_id: 40eba01c-b348-49b8-803f-67123251a00a
+      status: 201
+
+    - name: helper. create image-like resource-1
+      POST: /v1/resource/image-like
+      request_headers:
+          content-type: application/json
+      data:
+          id: 7ab2f7ae-7af5-4469-bdc8-3c0f6dfab75d
+          container_format: bare
+          disk_format: qcow2
+          name: gabbi-image-1
+          user_id: 81d82ef3-4deb-499d-9270-9aeb5a3ec5fe
+          project_id: 40eba01c-b348-49b8-803f-67123251a00a
+      status: 201
+
+    #
+    # Actual tests
+    #
+
+    - name: search for all resources with a specific user_id
+      desc: search through all resource types
+      POST: /v1/search/resource/generic
+      request_headers:
+          content-type: application/json
+      data:
+          =:
+              user_id: 81d82ef3-4deb-499d-9270-9aeb5a3ec5fe
+      status: 200
+      response_json_paths:
+          $.`len`: 2
+      response_json_paths:
+          $.[0].type: instance-like
+          $.[1].type: image-like
+          $.[0].id: c442a47c-eb33-46ce-9665-f3aa0bef54e7
+          $.[1].id: 7ab2f7ae-7af5-4469-bdc8-3c0f6dfab75d
+
+    - name: search for all resources of instance-like type create by specific user_id
+      desc: all instances created by a specified user
+      POST: /v1/search/resource/generic
+      request_headers:
+          content-type: application/json
+      data:
+          and:
+              - =:
+                  type: instance-like
+              - =:
+                  user_id: 33ba83ca-2f12-4ad6-8fa2-bc8b55d36e07
+      status: 200
+      response_json_paths:
+          $.`len`: 2
+      response_strings:
+          - '"id": "a64ca14f-bc7c-45b0-aa85-42cd2179e1e2"'
+          - '"id": "7ccccfa0-92ce-4225-80ca-3ac9cb122d6a"'
+      response_json_paths:
+          $.[0].id: a64ca14f-bc7c-45b0-aa85-42cd2179e1e2
+          $.[1].id: 7ccccfa0-92ce-4225-80ca-3ac9cb122d6a
+          $.[0].type: instance-like
+          $.[1].type: instance-like
+          $.[0].metrics.`len`: 0
+          $.[1].metrics.`len`: 0
+
+    - name: search for all resources with a specific project_id
+      desc: search for all resources in a specific project
+      POST: /v1/search/resource/generic
+      request_headers:
+          content-type: application/json
+      data:
+          =:
+              project_id: c9a5f184-c0d0-4daa-83c3-af6fdc0879e6
+      status: 200
+      response_json_paths:
+          $.`len`: 2
+
+    - name: search for intances on a specific compute using "like" keyword
+      desc: search for vms hosted on a specific compute node
+      POST: /v1/search/resource/instance-like
+      request_headers:
+          content-type: application/json
+      data:
+          like:
+              host: 'compute-1-gabbi%'
+      response_json_paths:
+          $.`len`: 2
+      response_strings:
+          - '"project_id": "40eba01c-b348-49b8-803f-67123251a00a"'
+          - '"project_id": "c9a5f184-c0d0-4daa-83c3-af6fdc0879e6"'
+          - '"user_id": "33ba83ca-2f12-4ad6-8fa2-bc8b55d36e07"'
+          - '"user_id": "81d82ef3-4deb-499d-9270-9aeb5a3ec5fe"'
+          - '"display_name": "vm-gabbi-2"'
+          - '"display_name": "vm-gabbi-3"'
+
+    - name: search for instances using complex search with "like" keyword and user_id
+      desc: search for vms of specified user hosted on a specific compute node
+      POST: /v1/search/resource/instance-like
+      request_headers:
+          content-type: application/json
+      data:
+          and:
+            - like:
+                  host: 'compute-%-gabbi%'
+            - =:
+                  user_id: 33ba83ca-2f12-4ad6-8fa2-bc8b55d36e07
+      response_json_paths:
+          $.`len`: 2
+      response_strings:
+          - '"display_name": "vm-gabbi-1"'
+          - '"display_name": "vm-gabbi-2"'
+          - '"project_id": "c9a5f184-c0d0-4daa-83c3-af6fdc0879e6"'
+
+    - name: search for resources of instance-like or image-like type with specific user_id
+      desc: search for all image-like or instance-like resources created by a specific user
+      POST: /v1/search/resource/generic
+      request_headers:
+          content-type: application/json
+      data:
+          and:
+              - =:
+                  user_id: 81d82ef3-4deb-499d-9270-9aeb5a3ec5fe
+
+              - or:
+                  - =:
+                      type: instance-like
+
+                  - =:
+                      type: image-like
+      status: 200
+      response_json_paths:
+          $.`len`: 2
+      response_strings:
+          - '"type": "image-like"'
+          - '"type": "instance-like"'
+          - '"id": "7ab2f7ae-7af5-4469-bdc8-3c0f6dfab75d"'
+          - '"id": "c442a47c-eb33-46ce-9665-f3aa0bef54e7"'
+
+    #
+    # Tear down resources
+    #
+
+    - name: helper. delete instance-like resource-1
+      DELETE: /v1/resource/instance-like/a64ca14f-bc7c-45b0-aa85-42cd2179e1e2
+      status: 204
+
+    - name: helper. delete instance-like resource-2
+      DELETE: /v1/resource/instance-like/7ccccfa0-92ce-4225-80ca-3ac9cb122d6a
+      status: 204
+
+    - name: helper. delete instance-like resource-3
+      DELETE: /v1/resource/instance-like/c442a47c-eb33-46ce-9665-f3aa0bef54e7
+      status: 204
+
+    - name: helper. delete image-like resource
+      DELETE: /v1/resource/image-like/7ab2f7ae-7af5-4469-bdc8-3c0f6dfab75d
+      status: 204
+
+    - name: helper. delete resource-type instance-like
+      DELETE: /v1/resource_type/instance-like
+      status: 204
+
+    - name: helper. delete resource-type image-like
+      DELETE: /v1/resource_type/image-like
+      status: 204
+
diff --git a/gnocchi/tests/functional_live/test_gabbi_live.py b/gnocchi/tests/functional_live/test_gabbi_live.py
new file mode 100644
index 0000000000000000000000000000000000000000..aeed07a88acd1760bd9ee3073c665dfc1f29076c
--- /dev/null
+++ b/gnocchi/tests/functional_live/test_gabbi_live.py
@@ -0,0 +1,48 @@
+#
+# Copyright 2015 Red Hat. All Rights Reserved.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+"""A test module to exercise the Gnocchi API with gabbi."""
+
+import os
+
+from gabbi import driver
+import six.moves.urllib.parse as urlparse
+
+
+TESTS_DIR = 'gabbits'
+
+
+def load_tests(loader, tests, pattern):
+    """Provide a TestSuite to the discovery process."""
+    gnocchi_url = os.getenv('GNOCCHI_ENDPOINT')
+    if gnocchi_url:
+        parsed_url = urlparse.urlsplit(gnocchi_url)
+        prefix = parsed_url.path.rstrip('/')  # turn it into a prefix
+
+        # NOTE(chdent): gabbi requires a port be passed or it will
+        # default to 8001, so we must dance a little dance to get
+        # the right ports. Probably gabbi needs to change.
+        # https://github.com/cdent/gabbi/issues/50
+        port = 443 if parsed_url.scheme == 'https' else 80
+        if parsed_url.port:
+            port = parsed_url.port
+
+        test_dir = os.path.join(os.path.dirname(__file__), TESTS_DIR)
+        return driver.build_tests(test_dir, loader,
+                                  host=parsed_url.hostname,
+                                  port=port,
+                                  prefix=prefix)
+    elif os.getenv("GABBI_LIVE"):
+        raise RuntimeError('"GNOCCHI_ENDPOINT" is not set')
diff --git a/gnocchi/tests/indexer/__init__.py b/gnocchi/tests/indexer/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/tests/indexer/sqlalchemy/__init__.py b/gnocchi/tests/indexer/sqlalchemy/__init__.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/gnocchi/tests/indexer/sqlalchemy/test_migrations.py b/gnocchi/tests/indexer/sqlalchemy/test_migrations.py
new file mode 100644
index 0000000000000000000000000000000000000000..2371234750d5e483342f6429a487053b304b61d3
--- /dev/null
+++ b/gnocchi/tests/indexer/sqlalchemy/test_migrations.py
@@ -0,0 +1,80 @@
+# Copyright 2015 eNovance
+# All Rights Reserved.
+#
+#    Licensed under the Apache License, Version 2.0 (the "License"); you may
+#    not use this file except in compliance with the License. You may obtain
+#    a copy of the License at
+#
+#         http://www.apache.org/licenses/LICENSE-2.0
+#
+#    Unless required by applicable law or agreed to in writing, software
+#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+#    License for the specific language governing permissions and limitations
+#    under the License.
+import abc
+
+import fixtures
+import mock
+import oslo_db.exception
+from oslo_db.sqlalchemy import test_migrations
+import six
+import sqlalchemy.schema
+import sqlalchemy_utils
+
+from gnocchi import indexer
+from gnocchi.indexer import sqlalchemy
+from gnocchi.indexer import sqlalchemy_base
+from gnocchi.tests import base
+
+
+class ABCSkip(base.SkipNotImplementedMeta, abc.ABCMeta):
+    pass
+
+
+class ModelsMigrationsSync(
+        six.with_metaclass(ABCSkip,
+                           base.TestCase,
+                           test_migrations.ModelsMigrationsSync)):
+
+    def setUp(self):
+        super(ModelsMigrationsSync, self).setUp()
+        self.useFixture(fixtures.Timeout(120, gentle=True))
+        self.db = mock.Mock()
+        self.conf.set_override(
+            'url',
+            sqlalchemy.SQLAlchemyIndexer._create_new_database(
+                self.conf.indexer.url),
+            'indexer')
+        self.index = indexer.get_driver(self.conf)
+        self.index.upgrade(nocreate=True)
+        self.addCleanup(self._drop_database)
+
+        # NOTE(sileht): remove tables dynamically created by other tests
+        valid_resource_type_tables = []
+        for rt in self.index.list_resource_types():
+            valid_resource_type_tables.append(rt.tablename)
+            valid_resource_type_tables.append("%s_history" % rt.tablename)
+            # NOTE(sileht): load it in sqlalchemy metadata
+            self.index._RESOURCE_TYPE_MANAGER.get_classes(rt)
+
+        for table in sqlalchemy_base.Base.metadata.sorted_tables:
+            if (table.name.startswith("rt_") and
+                    table.name not in valid_resource_type_tables):
+                sqlalchemy_base.Base.metadata.remove(table)
+                self.index._RESOURCE_TYPE_MANAGER._cache.pop(
+                    table.name.replace('_history', ''), None)
+
+    def _drop_database(self):
+        try:
+            sqlalchemy_utils.drop_database(self.conf.indexer.url)
+        except oslo_db.exception.DBNonExistentDatabase:
+            # NOTE(sileht): oslo db >= 4.15.0 cleanup this for us
+            pass
+
+    @staticmethod
+    def get_metadata():
+        return sqlalchemy_base.Base.metadata
+
+    def get_engine(self):
+        return self.index.get_engine()
diff --git a/gnocchi/tests/indexer/sqlalchemy/test_utils.py b/gnocchi/tests/indexer/sqlalchemy/test_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..9d251ec40389eb6dc452d87aeded2d0609886c6e
--- /dev/null
+++ b/gnocchi/tests/indexer/sqlalchemy/test_utils.py
@@ -0,0 +1,25 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from gnocchi import indexer
+from gnocchi.tests import base
+
+
+class TestUtils(base.TestCase):
+    def test_percent_in_url(self):
+        url = 'mysql+pymysql://user:pass%word@localhost/foobar'
+        self.conf.set_override('url', url, 'indexer')
+        alembic = indexer.get_driver(self.conf)._get_alembic_config()
+        self.assertEqual(url, alembic.get_main_option("sqlalchemy.url"))
diff --git a/gnocchi/tests/test_aggregates.py b/gnocchi/tests/test_aggregates.py
new file mode 100644
index 0000000000000000000000000000000000000000..47de657e15a1898236c307edaa5018ba54932154
--- /dev/null
+++ b/gnocchi/tests/test_aggregates.py
@@ -0,0 +1,1644 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2016 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import functools
+import uuid
+
+import mock
+import numpy
+
+from gnocchi import carbonara
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi.rest.aggregates import exceptions
+from gnocchi.rest.aggregates import processor
+from gnocchi import storage
+from gnocchi.tests import base
+
+
+class EqNan(object):
+    def __eq__(self, other):
+        return numpy.isnan(other)
+
+
+eq_nan = EqNan()
+
+
+def datetime64(*args):
+    return numpy.datetime64(datetime.datetime(*args))
+
+
+class TestAggregatedTimeseries(base.BaseTestCase):
+    @staticmethod
+    def _resample_and_merge(ts, agg_dict):
+        """Helper method that mimics _compute_splits_operations workflow."""
+        grouped = ts.group_serie(agg_dict['sampling'])
+        existing = agg_dict.get('return')
+        name = agg_dict.get("name")
+        resource = None if name is None else mock.Mock(id=str(uuid.uuid4()))
+        metric = mock.Mock(id=str(uuid.uuid4()), name=name)
+        agg_dict['return'] = (
+            processor.MetricReference(metric, "mean", resource),
+            carbonara.AggregatedTimeSerie.from_grouped_serie(
+                grouped,
+                carbonara.Aggregation(agg_dict['agg'],
+                                      agg_dict['sampling'],
+                                      None)))
+        if existing:
+            existing[2].merge(agg_dict['return'][2])
+            agg_dict['return'] = existing
+
+    def test_aggregated_different_archive_no_overlap(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 50, 'agg': 'mean', "name": "all"}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 50, 'agg': 'mean', "name": "all"}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 11, 46, 4), 4)],
+                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                        before_truncate_callback=functools.partial(
+                            self._resample_and_merge, agg_dict=tsc1))
+        tsb2.set_values(numpy.array([(datetime64(2014, 1, 1, 9, 1, 4), 4)],
+                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                        before_truncate_callback=functools.partial(
+                            self._resample_and_merge, agg_dict=tsc2))
+
+        dtfrom = datetime64(2014, 1, 1, 11, 0, 0)
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.aggregated,
+                          [tsc1['return'], tsc2['return']],
+                          from_timestamp=dtfrom,
+                          operations=["aggregate", "mean", [
+                              "metric", ["all", "mean"]]])
+
+    def test_aggregated_different_archive_no_overlap2(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 50, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = carbonara.AggregatedTimeSerie(
+            carbonara.Aggregation('mean', numpy.timedelta64(60, 's'), None))
+
+        tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 3, 0), 4)],
+                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                        before_truncate_callback=functools.partial(
+                            self._resample_and_merge, agg_dict=tsc1))
+        metric = mock.Mock(id=str(uuid.uuid4()))
+        ref = processor.MetricReference(metric, "mean")
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.aggregated,
+                          [tsc1['return'], (ref, tsc2)],
+                          operations=["aggregate", "mean",
+                                      ["metric", tsc1['return'][0].lookup_key,
+                                       ref.lookup_key]])
+
+    def test_aggregated_different_archive_overlap(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        # NOTE(sileht): minute 8 is missing in both and
+        # minute 7 in tsc2 too, but it looks like we have
+        # enough point to do the aggregation
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 0, 0), 4),
+            (datetime64(2014, 1, 1, 12, 1, 0), 3),
+            (datetime64(2014, 1, 1, 12, 2, 0), 2),
+            (datetime64(2014, 1, 1, 12, 3, 0), 4),
+            (datetime64(2014, 1, 1, 12, 4, 0), 2),
+            (datetime64(2014, 1, 1, 12, 5, 0), 3),
+            (datetime64(2014, 1, 1, 12, 6, 0), 4),
+            (datetime64(2014, 1, 1, 12, 7, 0), 10),
+            (datetime64(2014, 1, 1, 12, 9, 0), 2)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 1, 0), 3),
+            (datetime64(2014, 1, 1, 12, 2, 0), 4),
+            (datetime64(2014, 1, 1, 12, 3, 0), 4),
+            (datetime64(2014, 1, 1, 12, 4, 0), 6),
+            (datetime64(2014, 1, 1, 12, 5, 0), 3),
+            (datetime64(2014, 1, 1, 12, 6, 0), 6),
+            (datetime64(2014, 1, 1, 12, 9, 0), 2),
+            (datetime64(2014, 1, 1, 12, 11, 0), 2),
+            (datetime64(2014, 1, 1, 12, 12, 0), 2)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        dtfrom = datetime64(2014, 1, 1, 12, 0, 0)
+        dtto = datetime64(2014, 1, 1, 12, 10, 0)
+
+        # By default we require 100% of point that overlap
+        # so that fail
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.aggregated,
+                          [tsc1['return'], tsc2['return']],
+                          from_timestamp=dtfrom,
+                          to_timestamp=dtto,
+                          operations=["aggregate", "mean", [
+                              "metric",
+                              tsc1['return'][0].lookup_key,
+                              tsc2['return'][0].lookup_key,
+                          ]])
+
+        # Retry with 80% and it works
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            from_timestamp=dtfrom, to_timestamp=dtto,
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key,
+            ]],
+            needed_percent_of_overlap=80.0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60, 's'), 3.0),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60, 's'), 3.0),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60, 's'), 4.0),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60, 's'), 4.0),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(60, 's'), 3.0),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60, 's'), 5.0),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60, 's'), 10.0),
+            (datetime64(2014, 1, 1, 12, 9, 0),
+             numpy.timedelta64(60, 's'), 2.0),
+        ], list(output))
+
+    def test_aggregated_different_archive_overlap_edge_missing1(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 3, 0), 9),
+            (datetime64(2014, 1, 1, 12, 4, 0), 1),
+            (datetime64(2014, 1, 1, 12, 5, 0), 2),
+            (datetime64(2014, 1, 1, 12, 6, 0), 7),
+            (datetime64(2014, 1, 1, 12, 7, 0), 5),
+            (datetime64(2014, 1, 1, 12, 8, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 0, 0), 6),
+            (datetime64(2014, 1, 1, 12, 1, 0), 2),
+            (datetime64(2014, 1, 1, 12, 2, 0), 13),
+            (datetime64(2014, 1, 1, 12, 3, 0), 24),
+            (datetime64(2014, 1, 1, 12, 4, 0), 4),
+            (datetime64(2014, 1, 1, 12, 5, 0), 16),
+            (datetime64(2014, 1, 1, 12, 6, 0), 12)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        # By default we require 100% of point that overlap
+        # but we allow that the last datapoint is missing
+        # of the precisest granularity
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "sum", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]])["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60, 's'), 33.0),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60, 's'), 5.0),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(60, 's'), 18.0),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60, 's'), 19.0),
+        ], list(output))
+
+    def test_aggregated_different_archive_overlap_edge_missing2(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 3, 0), 4)],
+                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                        before_truncate_callback=functools.partial(
+                            self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([(datetime64(2014, 1, 1, 11, 0, 0), 4),
+                                     (datetime64(2014, 1, 1, 12, 3, 0), 4)],
+                                    dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                        before_truncate_callback=functools.partial(
+                            self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated(
+            [tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]])["aggregated"]
+        self.assertEqual([
+            (datetime64(
+                2014, 1, 1, 12, 3, 0
+            ), numpy.timedelta64(60000000000, 'ns'), 4.0),
+        ], list(output))
+
+    def test_processor_with_random_holes_derived_boundaries(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 1, 0), 9),
+            (datetime64(2014, 1, 1, 12, 2, 0), 1),
+            (datetime64(2014, 1, 1, 12, 4, 0), 5),
+            (datetime64(2014, 1, 1, 12, 6, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 0), 6),
+            (datetime64(2014, 1, 1, 12, 1, 0), 2),
+            (datetime64(2014, 1, 1, 12, 2, 0), 13),
+            (datetime64(2014, 1, 1, 12, 3, 0), 24),
+            (datetime64(2014, 1, 1, 12, 4, 0), 4)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], needed_percent_of_overlap=50.0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), 5.5),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), 7.0),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), 24.0),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), 4.5),
+        ], list(output))
+
+    def test_processor_derived_missing_boundary(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 1, 0), 9),
+            (datetime64(2014, 1, 1, 12, 2, 0), 1),
+            (datetime64(2014, 1, 1, 12, 4, 0), 5),
+            (datetime64(2014, 1, 1, 12, 6, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 0), 6),
+            (datetime64(2014, 1, 1, 12, 1, 0), 2),
+            (datetime64(2014, 1, 1, 12, 2, 0), 13),
+            (datetime64(2014, 1, 1, 12, 3, 0), 24),
+            (datetime64(2014, 1, 1, 12, 4, 0), 4)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]],
+            from_timestamp=datetime64(2014, 1, 1, 12, 0, 0),
+            needed_percent_of_overlap=50.0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), 6.0),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), 5.5),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), 7.0),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), 24.0),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), 4.5),
+        ], list(output))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key,
+            ]],
+            to_timestamp=datetime64(2014, 1, 1, 12, 7, 0),
+            needed_percent_of_overlap=50.0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), 5.5),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), 7.0),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), 24.0),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), 4.5),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60000000000, 'ns'), 3.0),
+        ], list(output))
+
+    def test_aggregated_some_overlap_with_fill_zero(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean', "name": "foo"}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean', "name": "bar"}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 3, 0), 9),
+            (datetime64(2014, 1, 1, 12, 4, 0), 1),
+            (datetime64(2014, 1, 1, 12, 5, 0), 2),
+            (datetime64(2014, 1, 1, 12, 6, 0), 7),
+            (datetime64(2014, 1, 1, 12, 7, 0), 5),
+            (datetime64(2014, 1, 1, 12, 8, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 0), 6),
+            (datetime64(2014, 1, 1, 12, 1, 0), 2),
+            (datetime64(2014, 1, 1, 12, 2, 0), 13),
+            (datetime64(2014, 1, 1, 12, 3, 0), 24),
+            (datetime64(2014, 1, 1, 12, 4, 0), 4),
+            (datetime64(2014, 1, 1, 12, 5, 0), 16),
+            (datetime64(2014, 1, 1, 12, 6, 0), 12)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], fill=0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), 3.0),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), 1.0),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), 6.5),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), 16.5),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), 2.5),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(60000000000, 'ns'), 9.0),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60000000000, 'ns'), 9.5),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60000000000, 'ns'), 2.5),
+            (datetime64(2014, 1, 1, 12, 8, 0),
+             numpy.timedelta64(60000000000, 'ns'), 1.5),
+        ], list(output))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["-", ["metric"] + tsc1['return'][0].lookup_key,
+                        ["metric"] + tsc2['return'][0].lookup_key
+                        ], fill=0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), -6.0),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), -2.0),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), -13),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), -15),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), -3),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(60000000000, 'ns'), -14),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60000000000, 'ns'), -5),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60000000000, 'ns'), 5),
+            (datetime64(2014, 1, 1, 12, 8, 0),
+             numpy.timedelta64(60000000000, 'ns'), 3),
+        ], list(output))
+
+    def test_aggregated_some_overlap_with_fill_null(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean', 'name': 'foo'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean', 'name': 'bar'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 3, 0), 9),
+            (datetime64(2014, 1, 1, 12, 4, 0), 1),
+            (datetime64(2014, 1, 1, 12, 5, 0), 2),
+            (datetime64(2014, 1, 1, 12, 6, 0), 7),
+            (datetime64(2014, 1, 1, 12, 7, 0), 5),
+            (datetime64(2014, 1, 1, 12, 8, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 0), 6),
+            (datetime64(2014, 1, 1, 12, 1, 0), 2),
+            (datetime64(2014, 1, 1, 12, 2, 0), 13),
+            (datetime64(2014, 1, 1, 12, 3, 0), 24),
+            (datetime64(2014, 1, 1, 12, 4, 0), 4),
+            (datetime64(2014, 1, 1, 12, 5, 0), 16),
+            (datetime64(2014, 1, 1, 12, 6, 0), 12)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], fill='null')["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), 6.0),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), 2.0),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), 13.0),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), 16.5),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), 2.5),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(60000000000, 'ns'), 9.0),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60000000000, 'ns'), 9.5),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60000000000, 'ns'), 5.0),
+            (datetime64(2014, 1, 1, 12, 8, 0),
+             numpy.timedelta64(60000000000, 'ns'), 3.0),
+        ], list(output))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["-", ["metric"] + tsc1['return'][0].lookup_key,
+                        ["metric"] + tsc2['return'][0].lookup_key
+                        ], fill='null')["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), eq_nan),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), eq_nan),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), eq_nan),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), -15.0),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), -3.0),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(60000000000, 'ns'), -14.0),
+            (datetime64(2014, 1, 1, 12, 6, 0),
+             numpy.timedelta64(60000000000, 'ns'), -5.0),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60000000000, 'ns'), eq_nan),
+            (datetime64(2014, 1, 1, 12, 8, 0),
+             numpy.timedelta64(60000000000, 'ns'), eq_nan),
+        ], list(output))
+
+    def test_aggregate_no_points_with_fill_zero(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean', 'name': 'foo'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean', 'name': 'bar'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 3, 0), 9),
+            (datetime64(2014, 1, 1, 12, 4, 0), 1),
+            (datetime64(2014, 1, 1, 12, 7, 0), 5),
+            (datetime64(2014, 1, 1, 12, 8, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 0), 6),
+            (datetime64(2014, 1, 1, 12, 1, 0), 2),
+            (datetime64(2014, 1, 1, 12, 2, 0), 13),
+            (datetime64(2014, 1, 1, 12, 3, 0), 24),
+            (datetime64(2014, 1, 1, 12, 4, 0), 4)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], fill=0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), 3.0),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), 1.0),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), 6.5),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), 16.5),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), 2.5),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60000000000, 'ns'), 2.5),
+            (datetime64(2014, 1, 1, 12, 8, 0),
+             numpy.timedelta64(60000000000, 'ns'), 1.5),
+        ], list(output))
+
+        output = processor.aggregated([
+            tsc1['return'], tsc2['return']],
+            operations=["-", ["metric"] + tsc1['return'][0].lookup_key,
+                        ["metric"] + tsc2['return'][0].lookup_key
+                        ], fill=0)["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(60000000000, 'ns'), -6.0),
+            (datetime64(2014, 1, 1, 12, 1, 0),
+             numpy.timedelta64(60000000000, 'ns'), -2.0),
+            (datetime64(2014, 1, 1, 12, 2, 0),
+             numpy.timedelta64(60000000000, 'ns'), -13),
+            (datetime64(2014, 1, 1, 12, 3, 0),
+             numpy.timedelta64(60000000000, 'ns'), -15),
+            (datetime64(2014, 1, 1, 12, 4, 0),
+             numpy.timedelta64(60000000000, 'ns'), -3),
+            (datetime64(2014, 1, 1, 12, 7, 0),
+             numpy.timedelta64(60000000000, 'ns'), 5),
+            (datetime64(2014, 1, 1, 12, 8, 0),
+             numpy.timedelta64(60000000000, 'ns'), 3),
+        ], list(output))
+
+    def test_aggregated_nominal(self):
+        tsc1 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsc12 = {'sampling': numpy.timedelta64(300, 's'),
+                 'size': 6, 'agg': 'mean'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc12['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(60, 's'),
+                'size': 10, 'agg': 'mean'}
+        tsc22 = {'sampling': numpy.timedelta64(300, 's'),
+                 'size': 6, 'agg': 'mean'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc22['sampling'])
+
+        def ts1_update(ts):
+            self._resample_and_merge(ts, tsc1)
+            self._resample_and_merge(ts, tsc12)
+
+        def ts2_update(ts):
+            self._resample_and_merge(ts, tsc2)
+            self._resample_and_merge(ts, tsc22)
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 46, 4), 4),
+            (datetime64(2014, 1, 1, 11, 47, 34), 8),
+            (datetime64(2014, 1, 1, 11, 50, 54), 50),
+            (datetime64(2014, 1, 1, 11, 54, 45), 4),
+            (datetime64(2014, 1, 1, 11, 56, 49), 4),
+            (datetime64(2014, 1, 1, 11, 57, 22), 6),
+            (datetime64(2014, 1, 1, 11, 58, 22), 5),
+            (datetime64(2014, 1, 1, 12, 1, 4), 4),
+            (datetime64(2014, 1, 1, 12, 1, 9), 7),
+            (datetime64(2014, 1, 1, 12, 2, 1), 15),
+            (datetime64(2014, 1, 1, 12, 2, 12), 1),
+            (datetime64(2014, 1, 1, 12, 3, 0), 3),
+            (datetime64(2014, 1, 1, 12, 4, 9), 7),
+            (datetime64(2014, 1, 1, 12, 5, 1), 15),
+            (datetime64(2014, 1, 1, 12, 5, 12), 1),
+            (datetime64(2014, 1, 1, 12, 6, 0), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=ts1_update)
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 46, 4), 6),
+            (datetime64(2014, 1, 1, 11, 47, 34), 5),
+            (datetime64(2014, 1, 1, 11, 50, 54), 51),
+            (datetime64(2014, 1, 1, 11, 54, 45), 5),
+            (datetime64(2014, 1, 1, 11, 56, 49), 5),
+            (datetime64(2014, 1, 1, 11, 57, 22), 7),
+            (datetime64(2014, 1, 1, 11, 58, 22), 5),
+            (datetime64(2014, 1, 1, 12, 1, 4), 5),
+            (datetime64(2014, 1, 1, 12, 1, 9), 8),
+            (datetime64(2014, 1, 1, 12, 2, 1), 10),
+            (datetime64(2014, 1, 1, 12, 2, 12), 2),
+            (datetime64(2014, 1, 1, 12, 3, 0), 6),
+            (datetime64(2014, 1, 1, 12, 4, 9), 4),
+            (datetime64(2014, 1, 1, 12, 5, 1), 10),
+            (datetime64(2014, 1, 1, 12, 5, 12), 1),
+            (datetime64(2014, 1, 1, 12, 6, 0), 1)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=ts2_update)
+        output = processor.aggregated(
+            [tsc1['return'], tsc12['return'], tsc2['return'], tsc22['return']],
+            operations=["aggregate", "mean", [
+                "metric",
+                tsc1['return'][0].lookup_key, tsc12['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key, tsc22['return'][0].lookup_key
+            ]])["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 11, 45),
+             numpy.timedelta64(300, 's'), 5.75),
+            (datetime64(2014, 1, 1, 11, 50),
+             numpy.timedelta64(300, 's'), 27.5),
+            (datetime64(2014, 1, 1, 11, 55),
+             numpy.timedelta64(300, 's'), 5.3333333333333339),
+            (datetime64(2014, 1, 1, 12, 0),
+             numpy.timedelta64(300, 's'), 6.0),
+            (datetime64(2014, 1, 1, 12, 5),
+             numpy.timedelta64(300, 's'), 5.1666666666666661),
+            (numpy.datetime64('2014-01-01T11:46:00.000000000'),
+             numpy.timedelta64(60, 's'),
+             5.0),
+            (numpy.datetime64('2014-01-01T11:47:00.000000000'),
+             numpy.timedelta64(60, 's'),
+             6.5),
+            (numpy.datetime64('2014-01-01T11:50:00.000000000'),
+             numpy.timedelta64(60, 's'),
+             50.5),
+            (datetime64(2014, 1, 1, 11, 54),
+             numpy.timedelta64(60, 's'), 4.5),
+            (datetime64(2014, 1, 1, 11, 56),
+             numpy.timedelta64(60, 's'), 4.5),
+            (datetime64(2014, 1, 1, 11, 57),
+             numpy.timedelta64(60, 's'), 6.5),
+            (datetime64(2014, 1, 1, 11, 58),
+             numpy.timedelta64(60, 's'), 5.0),
+            (datetime64(2014, 1, 1, 12, 1),
+             numpy.timedelta64(60, 's'), 6.0),
+            (datetime64(2014, 1, 1, 12, 2),
+             numpy.timedelta64(60, 's'), 7.0),
+            (datetime64(2014, 1, 1, 12, 3),
+             numpy.timedelta64(60, 's'), 4.5),
+            (datetime64(2014, 1, 1, 12, 4),
+             numpy.timedelta64(60, 's'), 5.5),
+            (datetime64(2014, 1, 1, 12, 5),
+             numpy.timedelta64(60, 's'), 6.75),
+            (datetime64(2014, 1, 1, 12, 6),
+             numpy.timedelta64(60, 's'), 2.0),
+        ], list(output))
+
+    def test_aggregated_partial_overlap(self):
+        tsc1 = {'sampling': numpy.timedelta64(1, 's'),
+                'size': 86400, 'agg': 'mean', 'name': 'foo'}
+        tsb1 = carbonara.BoundTimeSerie(block_size=tsc1['sampling'])
+        tsc2 = {'sampling': numpy.timedelta64(1, 's'),
+                'size': 60, 'agg': 'mean', 'name': 'bar'}
+        tsb2 = carbonara.BoundTimeSerie(block_size=tsc2['sampling'])
+
+        tsb1.set_values(numpy.array([
+            (datetime64(2015, 12, 3, 13, 19, 15), 1),
+            (datetime64(2015, 12, 3, 13, 20, 15), 1),
+            (datetime64(2015, 12, 3, 13, 21, 15), 1),
+            (datetime64(2015, 12, 3, 13, 22, 15), 1)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc1))
+
+        tsb2.set_values(numpy.array([
+            (datetime64(2015, 12, 3, 13, 21, 15), 10),
+            (datetime64(2015, 12, 3, 13, 22, 15), 10),
+            (datetime64(2015, 12, 3, 13, 23, 15), 10),
+            (datetime64(2015, 12, 3, 13, 24, 15), 10)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=tsc2))
+
+        output = processor.aggregated(
+            [tsc1['return'], tsc2['return']],
+            operations=["aggregate", "sum", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]])["aggregated"]
+
+        self.assertEqual([
+            (datetime64(
+                2015, 12, 3, 13, 21, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 22, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+        ], list(output))
+
+        dtfrom = datetime64(2015, 12, 3, 13, 17, 0)
+        dtto = datetime64(2015, 12, 3, 13, 25, 0)
+
+        output = processor.aggregated(
+            [tsc1['return'], tsc2['return']],
+            from_timestamp=dtfrom, to_timestamp=dtto,
+            operations=["aggregate", "sum", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], needed_percent_of_overlap=0)["aggregated"]
+        self.assertEqual([
+            (datetime64(
+                2015, 12, 3, 13, 19, 15
+            ), numpy.timedelta64(1, 's'), 1.0),
+            (datetime64(
+                2015, 12, 3, 13, 20, 15
+            ), numpy.timedelta64(1, 's'), 1.0),
+            (datetime64(
+                2015, 12, 3, 13, 21, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 22, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 23, 15
+            ), numpy.timedelta64(1, 's'), 10.0),
+            (datetime64(
+                2015, 12, 3, 13, 24, 15
+            ), numpy.timedelta64(1, 's'), 10.0),
+        ], list(output))
+
+        # Check boundaries are set when overlap=0
+        output = processor.aggregated(
+            [tsc1['return'], tsc2['return']],
+            operations=["aggregate", "sum", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], needed_percent_of_overlap=0)["aggregated"]
+        self.assertEqual([
+            (datetime64(
+                2015, 12, 3, 13, 21, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 22, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+        ], list(output))
+
+        # By default we require 100% of point that overlap
+        # so that fail if from or to is set
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.aggregated,
+                          [tsc1['return'], tsc2['return']],
+                          to_timestamp=dtto,
+                          operations=["aggregate", "sum", [
+                              "metric",
+                              tsc1['return'][0].lookup_key,
+                              tsc2['return'][0].lookup_key
+                          ]])
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.aggregated,
+                          [tsc1['return'], tsc2['return']],
+                          from_timestamp=dtfrom,
+                          operations=["aggregate", "sum", [
+                              "metric",
+                              tsc1['return'][0].lookup_key,
+                              tsc2['return'][0].lookup_key
+                          ]])
+        # Retry with 50% and it works
+        output = processor.aggregated(
+            [tsc1['return'], tsc2['return']], from_timestamp=dtfrom,
+            operations=["aggregate", "sum", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], needed_percent_of_overlap=50.0)["aggregated"]
+        self.assertEqual([
+            (datetime64(
+                2015, 12, 3, 13, 19, 15
+            ), numpy.timedelta64(1, 's'), 1.0),
+            (datetime64(
+                2015, 12, 3, 13, 20, 15
+            ), numpy.timedelta64(1, 's'), 1.0),
+            (datetime64(
+                2015, 12, 3, 13, 21, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 22, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+        ], list(output))
+
+        output = processor.aggregated(
+            [tsc1['return'], tsc2['return']], to_timestamp=dtto,
+            operations=["aggregate", "sum", [
+                "metric",
+                tsc1['return'][0].lookup_key,
+                tsc2['return'][0].lookup_key
+            ]], needed_percent_of_overlap=50.0)["aggregated"]
+        self.assertEqual([
+            (datetime64(
+                2015, 12, 3, 13, 21, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 22, 15
+            ), numpy.timedelta64(1, 's'), 11.0),
+            (datetime64(
+                2015, 12, 3, 13, 23, 15
+            ), numpy.timedelta64(1, 's'), 10.0),
+            (datetime64(
+                2015, 12, 3, 13, 24, 15
+            ), numpy.timedelta64(1, 's'), 10.0),
+        ], list(output))
+
+
+class CrossMetricAggregated(base.TestCase):
+    def setUp(self):
+        super(CrossMetricAggregated, self).setUp()
+        # A lot of tests wants a metric, create one
+        self.metric, __ = self._create_metric()
+
+    def test_get_measures_empty_metrics_no_overlap(self):
+        references = [
+            processor.MetricReference(indexer.Metric(
+                uuid.uuid4(), self.archive_policies['low']), 'mean'),
+            processor.MetricReference(indexer.Metric(
+                uuid.uuid4(), self.archive_policies['low']), 'mean'),
+        ]
+        self.assertRaises(
+            exceptions.UnAggregableTimeseries,
+            processor.get_measures, self.storage, references,
+            operations=["aggregate", "mean", [
+                "metric", ["whatever", "mean"], ["everwhat", "mean"],
+            ]])
+
+    def test_get_measures_empty_metric_needed_overlap_zero(self):
+        m_id = str(self.metric.id)
+        result = processor.get_measures(
+            self.storage, [processor.MetricReference(self.metric, "mean")],
+            operations=["metric", m_id, "mean"], needed_overlap=0)
+        self.assertEqual({m_id: {"mean": []}}, result)
+
+    def test_get_measures_unknown_aggregation(self):
+        metric2 = indexer.Metric(uuid.uuid4(),
+                                 self.archive_policies['low'])
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.assertRaises(storage.AggregationDoesNotExist,
+                          processor.get_measures,
+                          self.storage,
+                          [processor.MetricReference(self.metric, 'last'),
+                           processor.MetricReference(metric2, 'last')],
+                          operations=["aggregate", "mean", [
+                              "metric",
+                              [str(self.metric.id), "last"],
+                              [(metric2.id), "last"],
+                          ]])
+
+    def test_get_measures_unknown_granularity(self):
+        metric2 = indexer.Metric(uuid.uuid4(),
+                                 self.archive_policies['low'])
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.get_measures,
+                          self.storage,
+                          [processor.MetricReference(self.metric, "mean"),
+                           processor.MetricReference(metric2, "mean")],
+                          operations=["aggregate", "mean", [
+                              "metric",
+                              [str(self.metric.id), "mean"],
+                              [str(metric2.id), "mean"],
+                          ]],
+                          granularities=[numpy.timedelta64(12345456, 'ms')])
+
+    def test_add_and_get_measures_different_archives(self):
+        metric2 = indexer.Metric(uuid.uuid4(),
+                                 self.archive_policies['no_granularity_match'])
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+
+        self.assertRaises(exceptions.UnAggregableTimeseries,
+                          processor.get_measures,
+                          self.storage,
+                          [processor.MetricReference(self.metric, "mean"),
+                           processor.MetricReference(metric2, "mean")],
+                          operations=["aggregate", "mean", [
+                              "metric",
+                              [str(self.metric.id), "mean"],
+                              [str(metric2.id), "mean"],
+                          ]])
+
+    def test_add_and_get_measures(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 10, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 13, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]])["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 0, 0, 0),
+             numpy.timedelta64(1, 'D'), 22.25),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 22.25),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(5, 'm'), 39.0),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(5, 'm'), 12.5),
+            (datetime64(2014, 1, 1, 12, 10, 0),
+             numpy.timedelta64(5, 'm'), 24.0)
+        ], values)
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "max", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]])["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 0, 0, 0),
+             numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(5, 'm'), 69),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(5, 'm'), 23),
+            (datetime64(2014, 1, 1, 12, 10, 0),
+             numpy.timedelta64(5, 'm'), 44)
+        ], values)
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]],
+            from_timestamp=datetime64(2014, 1, 1, 12, 10, 0))["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1),
+             numpy.timedelta64(1, 'D'), 22.25),
+            (datetime64(2014, 1, 1, 12),
+             numpy.timedelta64(1, 'h'), 22.25),
+            (datetime64(2014, 1, 1, 12, 10, 0),
+             numpy.timedelta64(5, 'm'), 24.0),
+        ], values)
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]],
+            to_timestamp=datetime64(2014, 1, 1, 12, 5, 0))["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 0, 0, 0),
+             numpy.timedelta64(1, 'D'), 22.25),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 22.25),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(5, 'm'), 39.0),
+        ], values)
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]],
+            from_timestamp=datetime64(2014, 1, 1, 12, 10, 10),
+            to_timestamp=datetime64(2014, 1, 1, 12, 10, 10))["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1),
+             numpy.timedelta64(1, 'D'), 22.25),
+            (datetime64(2014, 1, 1, 12),
+             numpy.timedelta64(1, 'h'), 22.25),
+            (datetime64(2014, 1, 1, 12, 10),
+             numpy.timedelta64(5, 'm'), 24.0),
+        ], values)
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]],
+            from_timestamp=datetime64(2014, 1, 1, 12, 0, 0),
+            to_timestamp=datetime64(2014, 1, 1, 12, 0, 1))["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1),
+             numpy.timedelta64(1, 'D'), 22.25),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 22.25),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(5, 'm'), 39.0),
+        ], values)
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]],
+            from_timestamp=datetime64(2014, 1, 1, 12, 0, 0),
+            to_timestamp=datetime64(2014, 1, 1, 12, 0, 1),
+            granularities=[numpy.timedelta64(5, 'm')])["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(5, 'm'), 39.0),
+        ], values)
+
+    def test_add_and_get_measures_with_holes(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 5, 31), 8),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 42),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 6),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 13, 10), 2),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, 'mean'),
+             processor.MetricReference(metric2, 'mean')],
+            operations=["aggregate", "mean", [
+                "metric",
+                [str(self.metric.id), "mean"],
+                [str(metric2.id), "mean"],
+            ]])["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 0, 0, 0),
+             numpy.timedelta64(1, 'D'), 18.875),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 18.875),
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(5, 'm'), 39.0),
+            (datetime64(2014, 1, 1, 12, 5, 0),
+             numpy.timedelta64(5, 'm'), 11.0),
+            (datetime64(2014, 1, 1, 12, 10, 0),
+             numpy.timedelta64(5, 'm'), 22.0)
+        ], values)
+
+    def test_resample(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            ["resample", "mean", numpy.timedelta64(1, 'D'),
+             ["metric",
+              [str(self.metric.id), "mean"],
+              [str(metric2.id), "mean"]]],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({
+            str(self.metric.id): {
+                "mean": [(datetime64(2014, 1, 1, 0, 0, 0),
+                          numpy.timedelta64(1, 'D'), 39.75)]
+            },
+            str(metric2.id): {
+                "mean": [(datetime64(2014, 1, 1, 0, 0, 0),
+                          numpy.timedelta64(1, 'D'), 4.75)]
+            }
+        }, values)
+
+    def test_resample_minus_2_on_right(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            ["-", ["resample", "mean", numpy.timedelta64(1, 'D'),
+                   ["metric",
+                    [str(self.metric.id), "mean"],
+                    [str(metric2.id), "mean"]]], 2],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({
+            str(self.metric.id): {
+                "mean": [(datetime64(2014, 1, 1, 0, 0, 0),
+                          numpy.timedelta64(1, 'D'), 37.75)]
+            },
+            str(metric2.id): {
+                "mean": [(datetime64(2014, 1, 1, 0, 0, 0),
+                          numpy.timedelta64(1, 'D'), 2.75)]
+            }
+        }, values)
+
+    def test_resample_minus_2_on_left(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            ["-",
+             2,
+             ["resample", "mean", numpy.timedelta64(1, 'D'),
+              ["metric",
+               [str(self.metric.id), "mean"],
+               [str(metric2.id), "mean"]]]],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({
+            str(self.metric.id): {
+                "mean": [(datetime64(2014, 1, 1, 0, 0, 0),
+                          numpy.timedelta64(1, 'D'), -37.75)]
+            },
+            str(metric2.id): {
+                "mean": [(datetime64(2014, 1, 1, 0, 0, 0),
+                          numpy.timedelta64(1, 'D'), -2.75)]
+            }
+        }, values)
+
+    def test_rolling(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 5, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 10, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 15, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 5, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 10, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 15, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            ["/", ["rolling", "sum", 2,
+                   ["metric", [str(self.metric.id), "mean"],
+                    [str(metric2.id), "mean"]]], 2],
+            granularities=[numpy.timedelta64(5, 'm')])
+
+        self.assertEqual({
+            str(self.metric.id): {
+                "mean": [(datetime64(2014, 1, 1, 12, 5, 0),
+                          numpy.timedelta64(5, 'm'), 55.5),
+                         (datetime64(2014, 1, 1, 12, 10, 0),
+                          numpy.timedelta64(5, 'm'), 23),
+                         (datetime64(2014, 1, 1, 12, 15, 0),
+                          numpy.timedelta64(5, 'm'), 24)]
+            },
+            str(metric2.id): {
+                "mean": [(datetime64(2014, 1, 1, 12, 5, 0),
+                          numpy.timedelta64(5, 'm'), 5.5),
+                         (datetime64(2014, 1, 1, 12, 10, 0),
+                          numpy.timedelta64(5, 'm'), 3),
+                         (datetime64(2014, 1, 1, 12, 15, 0),
+                          numpy.timedelta64(5, 'm'), 4)]
+            }
+        }, values)
+
+    def test_binary_operator_with_two_references(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            ["*", ["metric", str(self.metric.id), "mean"],
+                  ["metric", str(metric2.id), "mean"]],
+            granularities=[numpy.timedelta64(1, 'h')])["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 621),
+            (datetime64(2014, 1, 1, 13, 0, 0),
+             numpy.timedelta64(1, 'h'), 84),
+            (datetime64(2014, 1, 1, 14, 0, 0),
+             numpy.timedelta64(1, 'h'), 16),
+            (datetime64(2014, 1, 1, 15, 0, 0),
+             numpy.timedelta64(1, 'h'), 176),
+        ], values)
+
+    def test_binary_operator_ts_on_left(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.trigger_processing()
+
+        values = processor.get_measures(
+            self.storage, [processor.MetricReference(self.metric, "mean")],
+            ["*", ["metric", str(self.metric.id), "mean"], 2],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({str(self.metric.id): {
+            "mean": [
+                (datetime64(2014, 1, 1, 12, 0, 0),
+                 numpy.timedelta64(1, 'h'), 138),
+                (datetime64(2014, 1, 1, 13, 0, 0),
+                 numpy.timedelta64(1, 'h'), 84),
+                (datetime64(2014, 1, 1, 14, 0, 0),
+                 numpy.timedelta64(1, 'h'), 8),
+                (datetime64(2014, 1, 1, 15, 0, 0),
+                 numpy.timedelta64(1, 'h'), 88)]
+        }}, values)
+
+    def test_ternary_operator_clip_min_max_ts_on_left(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.trigger_processing()
+
+        values = processor.get_measures(
+            self.storage, [processor.MetricReference(self.metric, "mean")],
+            ["clip", ["metric", str(self.metric.id), "mean"], 5, 60],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({str(self.metric.id): {
+            "mean": [
+                (datetime64(2014, 1, 1, 12, 0, 0),
+                 numpy.timedelta64(1, 'h'), 60),
+                (datetime64(2014, 1, 1, 13, 0, 0),
+                 numpy.timedelta64(1, 'h'), 42),
+                (datetime64(2014, 1, 1, 14, 0, 0),
+                 numpy.timedelta64(1, 'h'), 5),
+                (datetime64(2014, 1, 1, 15, 0, 0),
+                 numpy.timedelta64(1, 'h'), 44)]
+        }}, values)
+
+    def test_ternary_operator_clip_min_ts_on_left(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.trigger_processing()
+
+        values = processor.get_measures(
+            self.storage, [processor.MetricReference(self.metric, "mean")],
+            ["clip", ["metric", str(self.metric.id), "mean"], 50],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({str(self.metric.id): {
+            "mean": [
+                (datetime64(2014, 1, 1, 12, 0, 0),
+                 numpy.timedelta64(1, 'h'), 69),
+                (datetime64(2014, 1, 1, 13, 0, 0),
+                 numpy.timedelta64(1, 'h'), 50),
+                (datetime64(2014, 1, 1, 14, 0, 0),
+                 numpy.timedelta64(1, 'h'), 50),
+                (datetime64(2014, 1, 1, 15, 0, 0),
+                 numpy.timedelta64(1, 'h'), 50)]
+        }}, values)
+
+    def test_binary_operator_ts_on_right(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.trigger_processing()
+
+        values = processor.get_measures(
+            self.storage, [processor.MetricReference(self.metric, "mean")],
+            ["*", 2, ["metric", str(self.metric.id), "mean"]],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({str(self.metric.id): {
+            "mean": [(datetime64(2014, 1, 1, 12, 0, 0),
+                      numpy.timedelta64(1, 'h'), 138),
+                     (datetime64(2014, 1, 1, 13, 0, 0),
+                      numpy.timedelta64(1, 'h'), 84),
+                     (datetime64(2014, 1, 1, 14, 0, 0),
+                      numpy.timedelta64(1, 'h'), 8),
+                     (datetime64(2014, 1, 1, 15, 0, 0),
+                      numpy.timedelta64(1, 'h'), 88)]
+        }}, values)
+
+    def test_mix(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            [
+                "rolling",
+                "sum",
+                2,
+                ["*", ["metric", str(self.metric.id), "mean"],
+                      ["metric", str(metric2.id), "mean"]],
+            ],
+            granularities=[numpy.timedelta64(1, 'h')])["aggregated"]
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 13, 0, 0),
+             numpy.timedelta64(1, 'h'), 705),
+            (datetime64(2014, 1, 1, 14, 0, 0),
+             numpy.timedelta64(1, 'h'), 100),
+            (datetime64(2014, 1, 1, 15, 0, 0),
+             numpy.timedelta64(1, 'h'), 192),
+        ], values)
+
+    def test_bool(self):
+        metric2, __ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), 2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), 4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            [
+                "gt",
+                [
+                    "/",
+                    [
+                        "*",
+                        ["*", ["metric", str(self.metric.id), "mean"],
+                              ["metric", str(metric2.id), "mean"]],
+                        100,
+                    ],
+                    1000
+                ],
+                10
+            ],
+            granularities=[numpy.timedelta64(1, 'h')])["aggregated"]
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0),
+             numpy.timedelta64(1, 'h'), 1),
+            (datetime64(2014, 1, 1, 13, 0, 0),
+             numpy.timedelta64(1, 'h'), 0),
+            (datetime64(2014, 1, 1, 14, 0, 0),
+             numpy.timedelta64(1, 'h'), 0),
+            (datetime64(2014, 1, 1, 15, 0, 0),
+             numpy.timedelta64(1, 'h'), 1),
+        ], values)
+
+    def test_unary_operator(self):
+        metric2, _ = self._create_metric()
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), -69),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), -4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 45), 44),
+        ])
+        self.incoming.add_measures(metric2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), -9),
+            incoming.Measure(datetime64(2014, 1, 1, 13, 1, 41), -2),
+            incoming.Measure(datetime64(2014, 1, 1, 14, 2, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 15, 3, 10), -4),
+        ])
+        self.trigger_processing([self.metric, metric2])
+
+        values = processor.get_measures(
+            self.storage,
+            [processor.MetricReference(self.metric, "mean"),
+             processor.MetricReference(metric2, "mean")],
+            ["abs", ["metric", [str(self.metric.id), "mean"],
+                     [str(metric2.id), "mean"]]],
+            granularities=[numpy.timedelta64(1, 'h')])
+
+        self.assertEqual({
+            str(self.metric.id): {
+                "mean": [(datetime64(2014, 1, 1, 12, 0, 0),
+                          numpy.timedelta64(1, 'h'), 69),
+                         (datetime64(2014, 1, 1, 13, 0, 0),
+                          numpy.timedelta64(1, 'h'), 42),
+                         (datetime64(2014, 1, 1, 14, 0, 0),
+                          numpy.timedelta64(1, 'h'), 4),
+                         (datetime64(2014, 1, 1, 15, 0, 0),
+                          numpy.timedelta64(1, 'h'), 44)]},
+            str(metric2.id): {
+                "mean": [(datetime64(2014, 1, 1, 12, 0, 0),
+                          numpy.timedelta64(1, 'h'), 9),
+                         (datetime64(2014, 1, 1, 13, 0, 0),
+                          numpy.timedelta64(1, 'h'), 2),
+                         (datetime64(2014, 1, 1, 14, 0, 0),
+                          numpy.timedelta64(1, 'h'), 4),
+                         (datetime64(2014, 1, 1, 15, 0, 0),
+                          numpy.timedelta64(1, 'h'), 4),
+                         ]}
+        }, values)
diff --git a/gnocchi/tests/test_amqp1d.py b/gnocchi/tests/test_amqp1d.py
new file mode 100644
index 0000000000000000000000000000000000000000..527e3984463e7fcf0ca6727931a522756e907a1e
--- /dev/null
+++ b/gnocchi/tests/test_amqp1d.py
@@ -0,0 +1,100 @@
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import json
+import uuid
+
+import mock
+import numpy
+
+from gnocchi import amqp1d
+from gnocchi.tests import base as tests_base
+from gnocchi.tests.test_utils import get_measures_list
+from gnocchi import utils
+
+
+def datetime64(*args):
+    return numpy.datetime64(datetime.datetime(*args))
+
+
+class TestAmqp1d(tests_base.TestCase):
+
+    AMQP1D_USER_ID = str(uuid.uuid4())
+    AMQP1D_PROJECT_ID = str(uuid.uuid4())
+
+    def setUp(self):
+        super(TestAmqp1d, self).setUp()
+        self.conf.set_override("resource_type",
+                               "collectd_amqp1d", "amqp1d")
+        self.conf.set_override("creator",
+                               self.AMQP1D_USER_ID, "amqp1d")
+
+        self.index.create_archive_policy_rule("rule-amqp", "*", "medium")
+
+        self.server = amqp1d.AMQP1Server(self.conf)
+        self.server.processor.incoming = self.incoming
+        self.server.processor.indexer = self.index
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_amqp1d(self, utcnow):
+        utcnow.return_value = utils.datetime_utc(2017, 1, 10, 13, 58, 36)
+
+        metrics = json.dumps([
+            {u'dstypes': [u'gauge'], u'plugin': u'memory', u'dsnames':
+             [u'value'], u'interval': 10.0, u'host': u'www.gnocchi.test.com',
+             u'values': [9], u'time': 1506712460.824, u'plugin_instance':
+             u'', u'type_instance': u'free', u'type': u'memory'},
+            {u'dstypes': [u'derive', u'derive'], u'plugin': u'interface',
+             u'dsnames': [u'rx', u'tx'], u'interval': 10.0, u'host':
+             u'www.gnocchi.test.com', u'values': [2, 5], u'time':
+             1506712460.824, u'plugin_instance': u'ens2f1', u'type_instance':
+             u'', u'type': u'if_errors'}
+        ])
+
+        self.server.on_message(mock.Mock(message=mock.Mock(body=metrics)))
+        self.server.processor.flush()
+
+        resources = self.index.list_resources(
+            self.conf.amqp1d.resource_type,
+            attribute_filter={"=": {"host": "www.gnocchi.test.com"}}
+        )
+        self.assertEqual(1, len(resources))
+        self.assertEqual("www.gnocchi.test.com",
+                         resources[0].host)
+
+        metrics = self.index.list_metrics(attribute_filter={
+            '=': {"resource_id": resources[0].id}
+        })
+        self.assertEqual(3, len(metrics))
+
+        self.trigger_processing(metrics)
+
+        expected_measures = {
+            "memory@memory-free": [
+                (datetime64(2017, 1, 10, 13, 58), numpy.timedelta64(1, 'm'), 9)
+            ],
+            "interface-ens2f1@if_errors-rx": [
+                (datetime64(2017, 1, 10, 13, 58), numpy.timedelta64(1, 'm'), 2)
+            ],
+            "interface-ens2f1@if_errors-tx": [
+                (datetime64(2017, 1, 10, 13, 58), numpy.timedelta64(1, 'm'), 5)
+            ]
+        }
+        for metric in metrics:
+            aggregation = metric.archive_policy.get_aggregation(
+                "mean", numpy.timedelta64(1, 'm'))
+            results = self.storage.get_aggregated_measures(
+                {metric: [aggregation]})[metric]
+            measures = get_measures_list(results)
+            self.assertEqual(expected_measures[metric.name],
+                             measures["mean"])
diff --git a/gnocchi/tests/test_archive_policy.py b/gnocchi/tests/test_archive_policy.py
new file mode 100644
index 0000000000000000000000000000000000000000..1fc95c88dd9e9110730737b928edc42c7f4076f7
--- /dev/null
+++ b/gnocchi/tests/test_archive_policy.py
@@ -0,0 +1,115 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import logging
+
+import numpy
+
+from gnocchi import archive_policy
+from gnocchi import service
+from gnocchi.tests import base
+
+
+class TestArchivePolicy(base.BaseTestCase):
+
+    def test_several_equal_granularities(self):
+        self.assertRaises(ValueError,
+                          archive_policy.ArchivePolicy,
+                          "foobar",
+                          0,
+                          [(10, 12), (20, 30), (20, 30)],
+                          ["*"])
+
+    def test_aggregation_methods(self):
+        conf = service.prepare_service([],
+                                       default_config_files=[],
+                                       logging_level=logging.DEBUG,
+                                       skip_log_opts=True)
+
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [],
+                                          ["*"])
+        self.assertEqual(
+            archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS,
+            ap.aggregation_methods)
+
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [],
+                                          ["last"])
+        self.assertEqual(
+            set(["last"]),
+            ap.aggregation_methods)
+
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [],
+                                          ["*", "-mean"])
+        self.assertEqual(
+            (archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS
+             - set(["mean"])),
+            ap.aggregation_methods)
+
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [],
+                                          ["-mean", "-last"])
+        self.assertEqual(
+            (set(conf.archive_policy.default_aggregation_methods)
+             - set(["mean", "last"])),
+            ap.aggregation_methods)
+
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [],
+                                          ["+12pct"])
+        self.assertEqual(
+            (set(conf.archive_policy.default_aggregation_methods)
+             .union(set(["12pct"]))),
+            ap.aggregation_methods)
+
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [],
+                                          ["+rate:last"])
+        self.assertEqual(
+            (set(conf.archive_policy.default_aggregation_methods)
+             .union(set(["rate:last"]))),
+            ap.aggregation_methods)
+
+    def test_max_block_size(self):
+        ap = archive_policy.ArchivePolicy("foobar",
+                                          0,
+                                          [(20, 60), (10, 300), (10, 5)],
+                                          ["-mean", "-last"])
+        self.assertEqual(ap.max_block_size, numpy.timedelta64(300, 's'))
+
+
+class TestArchivePolicyItem(base.BaseTestCase):
+    def test_zero_size(self):
+        self.assertRaises(ValueError,
+                          archive_policy.ArchivePolicyItem,
+                          0, 1)
+        self.assertRaises(ValueError,
+                          archive_policy.ArchivePolicyItem,
+                          1, 0)
+        self.assertRaises(ValueError,
+                          archive_policy.ArchivePolicyItem,
+                          -1, 1)
+        self.assertRaises(ValueError,
+                          archive_policy.ArchivePolicyItem,
+                          1, -1)
+        self.assertRaises(ValueError,
+                          archive_policy.ArchivePolicyItem,
+                          2, None, 1)
diff --git a/gnocchi/tests/test_bin.py b/gnocchi/tests/test_bin.py
new file mode 100644
index 0000000000000000000000000000000000000000..e802f8f01dfd7adb2ba84ce96a8a24fa874ed6aa
--- /dev/null
+++ b/gnocchi/tests/test_bin.py
@@ -0,0 +1,26 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import os
+import subprocess
+
+from gnocchi.tests import base
+
+
+class BinTestCase(base.BaseTestCase):
+    def test_gnocchi_config_generator_run(self):
+        with open(os.devnull, 'w') as f:
+            subp = subprocess.Popen(['gnocchi-config-generator'], stdout=f)
+        self.assertEqual(0, subp.wait())
diff --git a/gnocchi/tests/test_calendar.py b/gnocchi/tests/test_calendar.py
new file mode 100644
index 0000000000000000000000000000000000000000..297468dc29059295cf18811e4a98c1ad31cd1409
--- /dev/null
+++ b/gnocchi/tests/test_calendar.py
@@ -0,0 +1,110 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import numpy
+from numpy.testing import assert_equal
+
+from gnocchi import calendar
+from gnocchi.tests import base as tests_base
+
+
+class TestCalender(tests_base.TestCase):
+
+    def test_get_year(self):
+        dates = numpy.array(['2018-01-01', '2019-01-01', '2020-01-01'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array(['2018', '2019', '2020'],
+                                 dtype='datetime64[Y]'),
+                     calendar.year(dates))
+
+    def test_half_of_year(self):
+        dates = numpy.arange('2018-01-01', '2018-12-31', dtype='datetime64[D]')
+        assert_equal(numpy.array([1] * 181 + [2] * 183),
+                     calendar.half_of_year(dates))
+
+    def test_half_and_year(self):
+        dates = numpy.arange('2018-01-01', '2018-12-31', dtype='datetime64[D]')
+        assert_equal(numpy.array(['2018-01'] * 181 + ['2018-07'] * 183,
+                                 dtype='datetime64[M]'),
+                     calendar.half_and_year(dates))
+
+    def test_quarter_of_year(self):
+        dates = numpy.arange('2018-01-01', '2018-12-31', dtype='datetime64[D]')
+        assert_equal(numpy.array([1] * 90 + [2] * 91 + [3] * 92 + [4] * 91),
+                     calendar.quarter_of_year(dates))
+
+    def test_quarter_and_year(self):
+        dates = numpy.arange('2018-01-01', '2018-12-31', dtype='datetime64[D]')
+        assert_equal(numpy.array(['2018-01'] * 90 + ['2018-04'] * 91 +
+                                 ['2018-07'] * 92 + ['2018-10'] * 91,
+                                 dtype='datetime64[M]'),
+                     calendar.quarter_and_year(dates))
+
+    def test_get_month_and_year(self):
+        dates = numpy.array(['2018-01-01', '2019-03-01', '2020-05-01'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array(['2018-01', '2019-03', '2020-05'],
+                                 dtype='datetime64[M]'),
+                     calendar.month_and_year(dates))
+
+    def test_day_of_week(self):
+        dates = numpy.arange('2010-01-01', '2020-12-31', dtype='datetime64[D]')
+        expected = numpy.array([i.isocalendar()[2] for i in
+                                dates.astype('datetime64[ms]').astype(object)])
+        # isocalendar sets sunday as 7, we set it as 0.
+        expected[expected == 7] = 0
+        assert_equal(expected, calendar.day_of_week(dates))
+
+    def test_day_of_month(self):
+        dates = numpy.array(['2017-12-29', '2017-12-30', '2017-12-31',
+                             '2018-01-01', '2018-01-02', '2018-01-03'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array([29, 30, 31, 1, 2, 3]),
+                     calendar.day_of_month(dates))
+
+    def test_day_of_year(self):
+        dates = numpy.array(['2017-12-29', '2017-12-30', '2017-12-31',
+                             '2018-01-01', '2018-01-02', '2018-01-03'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array([362, 363, 364, 0, 1, 2]),
+                     calendar.day_of_year(dates))
+        dates = numpy.array(['2016-12-29', '2016-12-30', '2016-12-31'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array([363, 364, 365]),
+                     calendar.day_of_year(dates))
+
+    def test_iso_week_of_year(self):
+        dates = numpy.arange('2010-01-01', '2020-12-31', dtype='datetime64[D]')
+        expected = numpy.array([i.isocalendar()[1] for i in
+                                dates.astype('datetime64[ms]').astype(object)])
+        assert_equal(expected, calendar.iso_week_of_year(dates))
+
+    def test_week_and_year(self):
+        dates = numpy.array(['2017-12-29', '2017-12-30', '2017-12-31',
+                             '2018-01-01', '2018-01-02', '2018-01-03'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array(['2017-12-24', '2017-12-24', '2017-12-31',
+                                  '2017-12-31', '2017-12-31', '2017-12-31'],
+                                 dtype='datetime64[D]'),
+                     calendar.week_and_year(dates))
+        dates = numpy.array(['2016-02-27', '2016-02-28', '2016-02-29'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array(['2016-02-21', '2016-02-28', '2016-02-28'],
+                                 dtype='datetime64[D]'),
+                     calendar.week_and_year(dates))
+
+    def test_month_of_year(self):
+        dates = numpy.array(['2018-01-01', '2019-03-01', '2020-05-01'],
+                            dtype='datetime64[ns]')
+        assert_equal(numpy.array([1, 3, 5]),
+                     calendar.month_of_year(dates))
diff --git a/gnocchi/tests/test_carbonara.py b/gnocchi/tests/test_carbonara.py
new file mode 100644
index 0000000000000000000000000000000000000000..680140fd7d4e86f81d094405fcd780fc9ba4c381
--- /dev/null
+++ b/gnocchi/tests/test_carbonara.py
@@ -0,0 +1,915 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2016 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import functools
+import math
+import operator
+
+import fixtures
+import iso8601
+import numpy
+import six
+
+from gnocchi import carbonara
+from gnocchi.tests import base
+
+
+def datetime64(*args):
+    return numpy.datetime64(datetime.datetime(*args))
+
+
+class TestBoundTimeSerie(base.BaseTestCase):
+    def test_benchmark(self):
+        self.useFixture(fixtures.Timeout(300, gentle=True))
+        carbonara.BoundTimeSerie.benchmark()
+
+    @staticmethod
+    def test_base():
+        carbonara.BoundTimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 6])
+
+    def test_block_size(self):
+        ts = carbonara.BoundTimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 5),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [5, 6],
+            block_size=numpy.timedelta64(5, 's'))
+        self.assertEqual(2, len(ts))
+        ts.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 10), 3),
+                                   (datetime64(2014, 1, 1, 12, 0, 11), 4)],
+                                  dtype=carbonara.TIMESERIES_ARRAY_DTYPE))
+        self.assertEqual(2, len(ts))
+
+    def test_block_size_back_window(self):
+        ts = carbonara.BoundTimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 6],
+            block_size=numpy.timedelta64(5, 's'),
+            back_window=1)
+        self.assertEqual(3, len(ts))
+        ts.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 10), 3),
+                                   (datetime64(2014, 1, 1, 12, 0, 11), 4)],
+                                  dtype=carbonara.TIMESERIES_ARRAY_DTYPE))
+        self.assertEqual(3, len(ts))
+
+    def test_block_size_unordered(self):
+        ts = carbonara.BoundTimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 5),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [5, 23],
+            block_size=numpy.timedelta64(5, 's'))
+        self.assertEqual(2, len(ts))
+        ts.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 11), 3),
+                                   (datetime64(2014, 1, 1, 12, 0, 10), 4)],
+                                  dtype=carbonara.TIMESERIES_ARRAY_DTYPE))
+        self.assertEqual(2, len(ts))
+
+    def test_duplicate_timestamps(self):
+        ts = carbonara.BoundTimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [10, 23])
+        self.assertEqual(2, len(ts))
+        self.assertEqual(10.0, ts[0][1])
+        self.assertEqual(23.0, ts[1][1])
+
+        ts.set_values(numpy.array([(datetime64(2014, 1, 1, 13, 0, 10), 3),
+                                   (datetime64(2014, 1, 1, 13, 0, 11), 9),
+                                   (datetime64(2014, 1, 1, 13, 0, 11), 8),
+                                   (datetime64(2014, 1, 1, 13, 0, 11), 7),
+                                   (datetime64(2014, 1, 1, 13, 0, 11), 4)],
+                                  dtype=carbonara.TIMESERIES_ARRAY_DTYPE))
+        self.assertEqual(4, len(ts))
+        self.assertEqual(10.0, ts[0][1])
+        self.assertEqual(23.0, ts[1][1])
+        self.assertEqual(3.0, ts[2][1])
+        self.assertEqual(9.0, ts[3][1])
+
+
+class TestAggregatedTimeSerie(base.BaseTestCase):
+    def test_benchmark(self):
+        self.useFixture(fixtures.Timeout(300, gentle=True))
+        carbonara.AggregatedTimeSerie.benchmark()
+
+    def test_fetch_basic(self):
+        ts = carbonara.AggregatedTimeSerie.from_data(
+            timestamps=[datetime64(2014, 1, 1, 12, 0, 0),
+                        datetime64(2014, 1, 1, 12, 0, 4),
+                        datetime64(2014, 1, 1, 12, 0, 9)],
+            values=[3, 5, 6],
+            aggregation=carbonara.Aggregation(
+                "mean", numpy.timedelta64(1, 's'), None))
+        self.assertEqual(
+            [(datetime64(2014, 1, 1, 12), 3),
+             (datetime64(2014, 1, 1, 12, 0, 4), 5),
+             (datetime64(2014, 1, 1, 12, 0, 9), 6)],
+            list(ts.fetch()))
+        self.assertEqual(
+            [(datetime64(2014, 1, 1, 12, 0, 4), 5),
+             (datetime64(2014, 1, 1, 12, 0, 9), 6)],
+            list(ts.fetch(
+                from_timestamp=datetime64(2014, 1, 1, 12, 0, 4))))
+        self.assertEqual(
+            [(datetime64(2014, 1, 1, 12, 0, 4), 5),
+             (datetime64(2014, 1, 1, 12, 0, 9), 6)],
+            list(ts.fetch(
+                from_timestamp=numpy.datetime64(iso8601.parse_date(
+                    "2014-01-01 12:00:04")))))
+        self.assertEqual(
+            [(datetime64(2014, 1, 1, 12, 0, 4), 5),
+             (datetime64(2014, 1, 1, 12, 0, 9), 6)],
+            list(ts.fetch(
+                from_timestamp=numpy.datetime64(iso8601.parse_date(
+                    "2014-01-01 13:00:04+01:00")))))
+
+    def test_before_epoch(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(1950, 1, 1, 12),
+             datetime64(2014, 1, 1, 12),
+             datetime64(2014, 1, 1, 12)],
+            [3, 5, 6])
+
+        self.assertRaises(carbonara.BeforeEpochError,
+                          ts.group_serie, 60)
+
+    @staticmethod
+    def _resample(ts, sampling, agg, derived=False):
+        aggregation = carbonara.Aggregation(agg, sampling, None)
+        grouped = ts.group_serie(sampling)
+        if derived:
+            grouped = grouped.derived()
+        return carbonara.AggregatedTimeSerie.from_grouped_serie(
+            grouped, aggregation)
+
+    def test_derived_mean(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime.datetime(2014, 1, 1, 12, 0, 0),
+             datetime.datetime(2014, 1, 1, 12, 0, 4),
+             datetime.datetime(2014, 1, 1, 12, 1, 2),
+             datetime.datetime(2014, 1, 1, 12, 1, 14),
+             datetime.datetime(2014, 1, 1, 12, 1, 24),
+             datetime.datetime(2014, 1, 1, 12, 2, 4),
+             datetime.datetime(2014, 1, 1, 12, 2, 35),
+             datetime.datetime(2014, 1, 1, 12, 2, 42),
+             datetime.datetime(2014, 1, 1, 12, 3, 2),
+             datetime.datetime(2014, 1, 1, 12, 3, 22),  # Counter reset
+             datetime.datetime(2014, 1, 1, 12, 3, 42),
+             datetime.datetime(2014, 1, 1, 12, 4, 9)],
+            [50, 55, 65, 66, 70, 83, 92, 103, 105, 5, 7, 23])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'mean',
+                            derived=True)
+
+        self.assertEqual(5, len(ts))
+        self.assertEqual(
+            [(datetime64(2014, 1, 1, 12, 0, 0), 5),
+             (datetime64(2014, 1, 1, 12, 1, 0), 5),
+             (datetime64(2014, 1, 1, 12, 2, 0), 11),
+             (datetime64(2014, 1, 1, 12, 3, 0), -32),
+             (datetime64(2014, 1, 1, 12, 4, 0), 16)],
+            list(ts.fetch(
+                from_timestamp=datetime64(2014, 1, 1, 12))))
+
+    def test_derived_hole(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime.datetime(2014, 1, 1, 12, 0, 0),
+             datetime.datetime(2014, 1, 1, 12, 0, 4),
+             datetime.datetime(2014, 1, 1, 12, 1, 2),
+             datetime.datetime(2014, 1, 1, 12, 1, 14),
+             datetime.datetime(2014, 1, 1, 12, 1, 24),
+             datetime.datetime(2014, 1, 1, 12, 3, 2),
+             datetime.datetime(2014, 1, 1, 12, 3, 22),
+             datetime.datetime(2014, 1, 1, 12, 3, 42),
+             datetime.datetime(2014, 1, 1, 12, 4, 9)],
+            [50, 55, 65, 66, 70, 105, 108, 200, 202])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'last',
+                            derived=True)
+
+        self.assertEqual(4, len(ts))
+        self.assertEqual(
+            [(datetime64(2014, 1, 1, 12, 0, 0), 5),
+             (datetime64(2014, 1, 1, 12, 1, 0), 4),
+             (datetime64(2014, 1, 1, 12, 3, 0), 92),
+             (datetime64(2014, 1, 1, 12, 4, 0), 2)],
+            list(ts.fetch(
+                from_timestamp=datetime64(2014, 1, 1, 12))))
+
+    def test_74_percentile_serialized(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 6])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), '74pct')
+
+        self.assertEqual(1, len(ts))
+        self.assertEqual(5.48, ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
+
+        # Serialize and unserialize
+        key = ts.get_split_key()
+        o, s = ts.serialize(key)
+        saved_ts = carbonara.AggregatedTimeSerie.unserialize(
+            s, key, ts.aggregation)
+
+        self.assertEqual(ts.aggregation, saved_ts.aggregation)
+
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 6])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), '74pct')
+        saved_ts.merge(ts)
+
+        self.assertEqual(1, len(ts))
+        self.assertEqual(5.48, ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
+
+    def test_95_percentile(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 6])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), '95pct')
+
+        self.assertEqual(1, len(ts))
+        self.assertEqual(5.9000000000000004,
+                         ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
+
+    def _do_test_aggregation(self, name, v1, v2, v3):
+        # NOTE(gordc): test data must have a group of odd count to properly
+        # test 50pct test case.
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 10),
+             datetime64(2014, 1, 1, 12, 0, 20),
+             datetime64(2014, 1, 1, 12, 0, 30),
+             datetime64(2014, 1, 1, 12, 0, 40),
+             datetime64(2014, 1, 1, 12, 1, 0),
+             datetime64(2014, 1, 1, 12, 1, 10),
+             datetime64(2014, 1, 1, 12, 1, 20),
+             datetime64(2014, 1, 1, 12, 1, 30),
+             datetime64(2014, 1, 1, 12, 1, 40),
+             datetime64(2014, 1, 1, 12, 1, 50),
+             datetime64(2014, 1, 1, 12, 2, 0),
+             datetime64(2014, 1, 1, 12, 2, 10)],
+            [3, 5, 2, 3, 5, 8, 11, 22, 10, 42, 9, 4, 2])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), name)
+
+        self.assertEqual(3, len(ts))
+        self.assertEqual(v1, ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
+        self.assertEqual(v2, ts[datetime64(2014, 1, 1, 12, 1, 0)][1])
+        self.assertEqual(v3, ts[datetime64(2014, 1, 1, 12, 2, 0)][1])
+
+    def test_aggregation_first(self):
+        self._do_test_aggregation('first', 3, 8, 4)
+
+    def test_aggregation_last(self):
+        self._do_test_aggregation('last', 5, 9, 2)
+
+    def test_aggregation_count(self):
+        self._do_test_aggregation('count', 5, 6, 2)
+
+    def test_aggregation_sum(self):
+        self._do_test_aggregation('sum', 18, 102, 6)
+
+    def test_aggregation_mean(self):
+        self._do_test_aggregation('mean', 3.6, 17, 3)
+
+    def test_aggregation_median(self):
+        self._do_test_aggregation('median', 3.0, 10.5, 3)
+
+    def test_aggregation_50pct(self):
+        self._do_test_aggregation('50pct', 3.0, 10.5, 3)
+
+    def test_aggregation_56pct(self):
+        self._do_test_aggregation('56pct', 3.4800000000000004,
+                                  10.8, 3.120000000000001)
+
+    def test_aggregation_min(self):
+        self._do_test_aggregation('min', 2, 8, 2)
+
+    def test_aggregation_max(self):
+        self._do_test_aggregation('max', 5, 42, 4)
+
+    def test_aggregation_std(self):
+        self._do_test_aggregation('std', 1.3416407864998738,
+                                  13.266499161421599, 1.4142135623730951)
+
+    def test_aggregation_std_with_unique(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0)], [3])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'std')
+        self.assertEqual(0, len(ts), ts.values)
+
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9),
+             datetime64(2014, 1, 1, 12, 1, 6)],
+            [3, 6, 5, 9])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), "std")
+
+        self.assertEqual(1, len(ts))
+        self.assertEqual(1.5275252316519465,
+                         ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
+
+    def test_different_length_in_timestamps_and_data(self):
+        self.assertRaises(
+            ValueError,
+            carbonara.AggregatedTimeSerie.from_data,
+            carbonara.Aggregation('mean', numpy.timedelta64(3, 's'), None),
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5])
+
+    def test_truncate(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 6])
+        ts = self._resample(ts, numpy.timedelta64(1, 's'), 'mean')
+
+        ts.truncate(datetime64(2014, 1, 1, 12, 0, 0))
+
+        self.assertEqual(2, len(ts))
+        self.assertEqual(5, ts[0][1])
+        self.assertEqual(6, ts[1][1])
+
+    def test_down_sampling(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9)],
+            [3, 5, 7])
+        ts = self._resample(ts, numpy.timedelta64(300, 's'), 'mean')
+
+        self.assertEqual(1, len(ts))
+        self.assertEqual(5, ts[datetime64(2014, 1, 1, 12, 0, 0)][1])
+
+    def test_down_sampling_and_truncate(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 1, 4),
+             datetime64(2014, 1, 1, 12, 1, 9),
+             datetime64(2014, 1, 1, 12, 2, 12)],
+            [3, 5, 7, 1])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'mean')
+
+        ts.truncate(datetime64(2014, 1, 1, 12, 0, 59))
+
+        self.assertEqual(2, len(ts))
+        self.assertEqual(6, ts[datetime64(2014, 1, 1, 12, 1, 0)][1])
+        self.assertEqual(1, ts[datetime64(2014, 1, 1, 12, 2, 0)][1])
+
+    def test_down_sampling_and_truncate_and_method_max(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 1, 4),
+             datetime64(2014, 1, 1, 12, 1, 9),
+             datetime64(2014, 1, 1, 12, 2, 12)],
+            [3, 5, 70, 1])
+        ts = self._resample(ts, numpy.timedelta64(60, 's'), 'max')
+
+        ts.truncate(datetime64(2014, 1, 1, 12, 0, 59))
+
+        self.assertEqual(2, len(ts))
+        self.assertEqual(70, ts[datetime64(2014, 1, 1, 12, 1, 0)][1])
+        self.assertEqual(1, ts[datetime64(2014, 1, 1, 12, 2, 0)][1])
+
+    @staticmethod
+    def _resample_and_merge(ts, agg_dict):
+        """Helper method that mimics _compute_splits_operations workflow."""
+        grouped = ts.group_serie(agg_dict['sampling'])
+        existing = agg_dict.get('return')
+        agg_dict['return'] = carbonara.AggregatedTimeSerie.from_grouped_serie(
+            grouped, carbonara.Aggregation(
+                agg_dict['agg'], agg_dict['sampling'], None))
+        if existing:
+            existing.merge(agg_dict['return'])
+            agg_dict['return'] = existing
+
+    def test_fetch(self):
+        ts = {'sampling': numpy.timedelta64(60, 's'),
+              'size': 10, 'agg': 'mean'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 46, 4), 4),
+            (datetime64(2014, 1, 1, 11, 47, 34), 8),
+            (datetime64(2014, 1, 1, 11, 50, 54), 50),
+            (datetime64(2014, 1, 1, 11, 54, 45), 4),
+            (datetime64(2014, 1, 1, 11, 56, 49), 4),
+            (datetime64(2014, 1, 1, 11, 57, 22), 6),
+            (datetime64(2014, 1, 1, 11, 58, 22), 5),
+            (datetime64(2014, 1, 1, 12, 1, 4), 4),
+            (datetime64(2014, 1, 1, 12, 1, 9), 7),
+            (datetime64(2014, 1, 1, 12, 2, 1), 15),
+            (datetime64(2014, 1, 1, 12, 2, 12), 1),
+            (datetime64(2014, 1, 1, 12, 3, 0), 3),
+            (datetime64(2014, 1, 1, 12, 4, 9), 7),
+            (datetime64(2014, 1, 1, 12, 5, 1), 15),
+            (datetime64(2014, 1, 1, 12, 5, 12), 1),
+            (datetime64(2014, 1, 1, 12, 6, 0, 2), 3)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        tsb.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 6), 5)],
+                                   dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                       before_truncate_callback=functools.partial(
+                           self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual([
+            (numpy.datetime64('2014-01-01T11:46:00.000000000'), 4.0),
+            (numpy.datetime64('2014-01-01T11:47:00.000000000'), 8.0),
+            (numpy.datetime64('2014-01-01T11:50:00.000000000'), 50.0),
+            (datetime64(2014, 1, 1, 11, 54), 4.0),
+            (datetime64(2014, 1, 1, 11, 56), 4.0),
+            (datetime64(2014, 1, 1, 11, 57), 6.0),
+            (datetime64(2014, 1, 1, 11, 58), 5.0),
+            (datetime64(2014, 1, 1, 12, 1), 5.5),
+            (datetime64(2014, 1, 1, 12, 2), 8.0),
+            (datetime64(2014, 1, 1, 12, 3), 3.0),
+            (datetime64(2014, 1, 1, 12, 4), 7.0),
+            (datetime64(2014, 1, 1, 12, 5), 8.0),
+            (datetime64(2014, 1, 1, 12, 6), 4.0)
+        ], list(ts['return'].fetch()))
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 1), 5.5),
+            (datetime64(2014, 1, 1, 12, 2), 8.0),
+            (datetime64(2014, 1, 1, 12, 3), 3.0),
+            (datetime64(2014, 1, 1, 12, 4), 7.0),
+            (datetime64(2014, 1, 1, 12, 5), 8.0),
+            (datetime64(2014, 1, 1, 12, 6), 4.0)
+        ], list(ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))))
+
+    def test_fetch_agg_pct(self):
+        ts = {'sampling': numpy.timedelta64(1, 's'),
+              'size': 3600 * 24, 'agg': '90pct'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 0), 3),
+                                    (datetime64(2014, 1, 1, 12, 0, 0, 123), 4),
+                                    (datetime64(2014, 1, 1, 12, 0, 2), 4)],
+                                   dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                       before_truncate_callback=functools.partial(
+                           self._resample_and_merge, agg_dict=ts))
+
+        result = ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))
+        reference = [
+            (datetime64(
+                2014, 1, 1, 12, 0, 0
+            ), 3.9),
+            (datetime64(
+                2014, 1, 1, 12, 0, 2
+            ), 4)
+        ]
+
+        self.assertEqual(len(reference), len(list(result)))
+
+        for ref, res in zip(reference, result):
+            self.assertEqual(ref[0], res[0])
+            # Rounding \o/
+            self.assertAlmostEqual(ref[1], res[1])
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 2, 113), 110)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        result = ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))
+        reference = [
+            (datetime64(
+                2014, 1, 1, 12, 0, 0
+            ), 3.9),
+            (datetime64(
+                2014, 1, 1, 12, 0, 2
+            ), 99.4)
+        ]
+
+        self.assertEqual(len(reference), len(list(result)))
+
+        for ref, res in zip(reference, result):
+            self.assertEqual(ref[0], res[0])
+            # Rounding \o/
+            self.assertAlmostEqual(ref[1], res[1])
+
+    def test_fetch_nano(self):
+        ts = {'sampling': numpy.timedelta64(200, 'ms'),
+              'size': 10, 'agg': 'mean'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 46, 0, 200123), 4),
+            (datetime64(2014, 1, 1, 11, 46, 0, 340000), 8),
+            (datetime64(2014, 1, 1, 11, 47, 0, 323154), 50),
+            (datetime64(2014, 1, 1, 11, 48, 0, 590903), 4),
+            (datetime64(2014, 1, 1, 11, 48, 0, 903291), 4)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 11, 48, 0, 821312), 5)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 11, 46, 0, 200000), 6.0),
+            (datetime64(2014, 1, 1, 11, 47, 0, 200000), 50.0),
+            (datetime64(2014, 1, 1, 11, 48, 0, 400000), 4.0),
+            (datetime64(2014, 1, 1, 11, 48, 0, 800000), 4.5)
+        ], list(ts['return'].fetch()))
+        self.assertEqual(numpy.timedelta64(200000000, 'ns'),
+                         ts['return'].aggregation.granularity)
+
+    def test_fetch_agg_std(self):
+        # NOTE (gordc): this is a good test to ensure we drop NaN entries
+        # 2014-01-01 12:00:00 will appear if we don't dropna()
+        ts = {'sampling': numpy.timedelta64(60, 's'),
+              'size': 60, 'agg': 'std'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 0), 3),
+                                    (datetime64(2014, 1, 1, 12, 1, 4), 4),
+                                    (datetime64(2014, 1, 1, 12, 1, 9), 7),
+                                    (datetime64(2014, 1, 1, 12, 2, 1), 15),
+                                    (datetime64(2014, 1, 1, 12, 2, 12), 1)],
+                                   dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                       before_truncate_callback=functools.partial(
+                           self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 1, 0), 2.1213203435596424),
+            (datetime64(2014, 1, 1, 12, 2, 0), 9.8994949366116654),
+        ], list(ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))))
+
+        tsb.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 2, 13), 110)],
+                                   dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                       before_truncate_callback=functools.partial(
+                           self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 1, 0), 2.1213203435596424),
+            (datetime64(2014, 1, 1, 12, 2, 0), 59.304300012730948),
+        ], list(ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))))
+
+    def test_fetch_agg_max(self):
+        ts = {'sampling': numpy.timedelta64(60, 's'),
+              'size': 60, 'agg': 'max'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 0, 0), 3),
+                                    (datetime64(2014, 1, 1, 12, 1, 4), 4),
+                                    (datetime64(2014, 1, 1, 12, 1, 9), 7),
+                                    (datetime64(2014, 1, 1, 12, 2, 1), 15),
+                                    (datetime64(2014, 1, 1, 12, 2, 12), 1)],
+                                   dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                       before_truncate_callback=functools.partial(
+                           self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0), 3),
+            (datetime64(2014, 1, 1, 12, 1, 0), 7),
+            (datetime64(2014, 1, 1, 12, 2, 0), 15),
+        ], list(ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))))
+
+        tsb.set_values(numpy.array([(datetime64(2014, 1, 1, 12, 2, 13), 110)],
+                                   dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                       before_truncate_callback=functools.partial(
+                           self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual([
+            (datetime64(2014, 1, 1, 12, 0, 0), 3),
+            (datetime64(2014, 1, 1, 12, 1, 0), 7),
+            (datetime64(2014, 1, 1, 12, 2, 0), 110),
+        ], list(ts['return'].fetch(datetime64(2014, 1, 1, 12, 0, 0))))
+
+    def test_serialize(self):
+        ts = {'sampling': numpy.timedelta64(500, 'ms'), 'agg': 'mean'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 0, 1234), 3),
+            (datetime64(2014, 1, 1, 12, 0, 0, 321), 6),
+            (datetime64(2014, 1, 1, 12, 1, 4, 234), 5),
+            (datetime64(2014, 1, 1, 12, 1, 9, 32), 7),
+            (datetime64(2014, 1, 1, 12, 2, 12, 532), 1)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        key = ts['return'].get_split_key()
+        o, s = ts['return'].serialize(key)
+        self.assertEqual(ts['return'],
+                         carbonara.AggregatedTimeSerie.unserialize(
+                             s, key, ts['return'].aggregation))
+
+    def test_no_truncation(self):
+        ts = {'sampling': numpy.timedelta64(60, 's'), 'agg': 'mean'}
+        tsb = carbonara.BoundTimeSerie()
+
+        for i in six.moves.range(1, 11):
+            tsb.set_values(numpy.array([
+                (datetime64(2014, 1, 1, 12, i, i), float(i))],
+                dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                before_truncate_callback=functools.partial(
+                    self._resample_and_merge, agg_dict=ts))
+            tsb.set_values(numpy.array([
+                (datetime64(2014, 1, 1, 12, i, i + 1), float(i + 1))],
+                dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+                before_truncate_callback=functools.partial(
+                    self._resample_and_merge, agg_dict=ts))
+            self.assertEqual(i, len(list(ts['return'].fetch())))
+
+    def test_back_window(self):
+        """Back window testing.
+
+        Test the back window on an archive is not longer than the window we
+        aggregate on.
+        """
+        ts = {'sampling': numpy.timedelta64(1, 's'), 'size': 60, 'agg': 'mean'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 1, 2300), 1),
+            (datetime64(2014, 1, 1, 12, 0, 1, 4600), 2),
+            (datetime64(2014, 1, 1, 12, 0, 2, 4500), 3),
+            (datetime64(2014, 1, 1, 12, 0, 2, 7800), 4),
+            (datetime64(2014, 1, 1, 12, 0, 3, 8), 2.5)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual(
+            [
+                (datetime64(2014, 1, 1, 12, 0, 1), 1.5),
+                (datetime64(2014, 1, 1, 12, 0, 2), 3.5),
+                (datetime64(2014, 1, 1, 12, 0, 3), 2.5),
+            ],
+            list(ts['return'].fetch()))
+
+    def test_back_window_ignore(self):
+        """Back window testing.
+
+        Test the back window on an archive is not longer than the window we
+        aggregate on.
+        """
+        ts = {'sampling': numpy.timedelta64(1, 's'), 'size': 60, 'agg': 'mean'}
+        tsb = carbonara.BoundTimeSerie(block_size=ts['sampling'])
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 1, 2300), 1),
+            (datetime64(2014, 1, 1, 12, 0, 1, 4600), 2),
+            (datetime64(2014, 1, 1, 12, 0, 2, 4500), 3),
+            (datetime64(2014, 1, 1, 12, 0, 2, 7800), 4),
+            (datetime64(2014, 1, 1, 12, 0, 3, 8), 2.5)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual(
+            [
+                (datetime64(2014, 1, 1, 12, 0, 1), 1.5),
+                (datetime64(2014, 1, 1, 12, 0, 2), 3.5),
+                (datetime64(2014, 1, 1, 12, 0, 3), 2.5),
+            ],
+            list(ts['return'].fetch()))
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 2, 99), 9)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual(
+            [
+                (datetime64(2014, 1, 1, 12, 0, 1), 1.5),
+                (datetime64(2014, 1, 1, 12, 0, 2), 3.5),
+                (datetime64(2014, 1, 1, 12, 0, 3), 2.5),
+            ],
+            list(ts['return'].fetch()))
+
+        tsb.set_values(numpy.array([
+            (datetime64(2014, 1, 1, 12, 0, 2, 99), 9),
+            (datetime64(2014, 1, 1, 12, 0, 3, 9), 4.5)],
+            dtype=carbonara.TIMESERIES_ARRAY_DTYPE),
+            before_truncate_callback=functools.partial(
+                self._resample_and_merge, agg_dict=ts))
+
+        self.assertEqual(
+            [
+                (datetime64(2014, 1, 1, 12, 0, 1), 1.5),
+                (datetime64(2014, 1, 1, 12, 0, 2), 3.5),
+                (datetime64(2014, 1, 1, 12, 0, 3), 3.5),
+            ],
+            list(ts['return'].fetch()))
+
+    def test_split_key(self):
+        self.assertEqual(
+            numpy.datetime64("2014-10-07"),
+            carbonara.SplitKey.from_timestamp_and_sampling(
+                numpy.datetime64("2015-01-01T15:03"),
+                numpy.timedelta64(3600, 's')))
+        self.assertEqual(
+            numpy.datetime64("2014-12-31 18:00"),
+            carbonara.SplitKey.from_timestamp_and_sampling(
+                numpy.datetime64("2015-01-01 15:03:58"),
+                numpy.timedelta64(58, 's')))
+
+        key = carbonara.SplitKey.from_timestamp_and_sampling(
+            numpy.datetime64("2015-01-01 15:03"),
+            numpy.timedelta64(3600, 's'))
+
+        self.assertGreater(key, numpy.datetime64("1970"))
+
+        self.assertGreaterEqual(key, numpy.datetime64("1970"))
+
+    def test_split_key_cmp(self):
+        dt1 = numpy.datetime64("2015-01-01T15:03")
+        dt1_1 = numpy.datetime64("2015-01-01T15:03")
+        dt2 = numpy.datetime64("2015-01-05T15:03")
+        td = numpy.timedelta64(60, 's')
+        td2 = numpy.timedelta64(300, 's')
+
+        self.assertEqual(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td))
+        self.assertEqual(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1_1, td))
+        self.assertNotEqual(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td))
+        self.assertNotEqual(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td2))
+
+        self.assertLess(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td))
+        self.assertLessEqual(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td))
+
+        self.assertGreater(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td))
+        self.assertGreaterEqual(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td))
+
+    def test_split_key_cmp_negative(self):
+        dt1 = numpy.datetime64("2015-01-01T15:03")
+        dt1_1 = numpy.datetime64("2015-01-01T15:03")
+        dt2 = numpy.datetime64("2015-01-05T15:03")
+        td = numpy.timedelta64(60, 's')
+        td2 = numpy.timedelta64(300, 's')
+
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td) !=
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td))
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td) !=
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1_1, td))
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td) ==
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td))
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td) ==
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td2))
+        self.assertRaises(
+            TypeError,
+            operator.le,
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td2))
+        self.assertRaises(
+            TypeError,
+            operator.ge,
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td2))
+        self.assertRaises(
+            TypeError,
+            operator.gt,
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td2))
+        self.assertRaises(
+            TypeError,
+            operator.lt,
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td),
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td2))
+
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td) >=
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td))
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td) >
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td))
+
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td) <=
+            carbonara.SplitKey.from_timestamp_and_sampling(dt1, td))
+        self.assertFalse(
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td) <
+            carbonara.SplitKey.from_timestamp_and_sampling(dt2, td))
+
+    def test_split_key_next(self):
+        self.assertEqual(
+            numpy.datetime64("2015-03-06"),
+            next(carbonara.SplitKey.from_timestamp_and_sampling(
+                numpy.datetime64("2015-01-01 15:03"),
+                numpy.timedelta64(3600, 's'))))
+        self.assertEqual(
+            numpy.datetime64("2015-08-03"),
+            next(next(carbonara.SplitKey.from_timestamp_and_sampling(
+                numpy.datetime64("2015-01-01T15:03"),
+                numpy.timedelta64(3600, 's')))))
+
+    def test_split(self):
+        sampling = numpy.timedelta64(5, 's')
+        points = 100000
+        ts = carbonara.TimeSerie.from_data(
+            timestamps=list(map(datetime.datetime.utcfromtimestamp,
+                                six.moves.range(points))),
+            values=list(six.moves.range(points)))
+        agg = self._resample(ts, sampling, 'mean')
+
+        grouped_points = list(agg.split())
+
+        self.assertEqual(
+            math.ceil((points / sampling.astype(float))
+                      / carbonara.SplitKey.POINTS_PER_SPLIT),
+            len(grouped_points))
+        self.assertEqual("0.0",
+                         str(carbonara.SplitKey(grouped_points[0][0], 0)))
+        # 3600 × 5s = 5 hours
+        self.assertEqual(datetime64(1970, 1, 1, 5),
+                         grouped_points[1][0])
+        self.assertEqual(carbonara.SplitKey.POINTS_PER_SPLIT,
+                         len(grouped_points[0][1]))
+
+    def test_from_timeseries(self):
+        sampling = numpy.timedelta64(5, 's')
+        points = 100000
+        ts = carbonara.TimeSerie.from_data(
+            timestamps=list(map(datetime.datetime.utcfromtimestamp,
+                                six.moves.range(points))),
+            values=list(six.moves.range(points)))
+        agg = self._resample(ts, sampling, 'mean')
+
+        split = [t[1] for t in list(agg.split())]
+
+        self.assertEqual(agg,
+                         carbonara.AggregatedTimeSerie.from_timeseries(
+                             split, aggregation=agg.aggregation))
+
+    def test_resample(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 4),
+             datetime64(2014, 1, 1, 12, 0, 9),
+             datetime64(2014, 1, 1, 12, 0, 11),
+             datetime64(2014, 1, 1, 12, 0, 12)],
+            [3, 5, 6, 2, 4])
+        agg_ts = self._resample(ts, numpy.timedelta64(5, 's'), 'mean')
+        self.assertEqual(3, len(agg_ts))
+
+        agg_ts = agg_ts.resample(numpy.timedelta64(10, 's'))
+        self.assertEqual(2, len(agg_ts))
+        self.assertEqual(5, agg_ts[0][1])
+        self.assertEqual(3, agg_ts[1][1])
+
+    def test_iter(self):
+        ts = carbonara.TimeSerie.from_data(
+            [datetime64(2014, 1, 1, 12, 0, 0),
+             datetime64(2014, 1, 1, 12, 0, 11),
+             datetime64(2014, 1, 1, 12, 0, 12)],
+            [3, 5, 6])
+        self.assertEqual([
+            (numpy.datetime64('2014-01-01T12:00:00'), 3.),
+            (numpy.datetime64('2014-01-01T12:00:11'), 5.),
+            (numpy.datetime64('2014-01-01T12:00:12'), 6.),
+        ], list(ts))
diff --git a/gnocchi/tests/test_chef.py b/gnocchi/tests/test_chef.py
new file mode 100644
index 0000000000000000000000000000000000000000..0fd7a483e02de768d427493b483b8a2767fe155b
--- /dev/null
+++ b/gnocchi/tests/test_chef.py
@@ -0,0 +1,55 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2018 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+
+import numpy
+
+from gnocchi import chef
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi.tests import base
+
+
+def datetime64(*args):
+    return numpy.datetime64(datetime.datetime(*args))
+
+
+class TestChef(base.TestCase):
+    def setUp(self):
+        super(TestChef, self).setUp()
+        self.metric, __ = self._create_metric()
+        self.chef = chef.Chef(self.coord, self.incoming,
+                              self.index, self.storage)
+
+    def test_delete_nonempty_metric_unprocessed(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+        ])
+        self.index.delete_metric(self.metric.id)
+        self.trigger_processing()
+        __, __, details = self.incoming._build_report(True)
+        self.assertNotIn(str(self.metric.id), details)
+        self.chef.expunge_metrics(sync=True)
+
+    def test_delete_expunge_metric(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+        ])
+        self.trigger_processing()
+        self.index.delete_metric(self.metric.id)
+        self.chef.expunge_metrics(sync=True)
+        self.assertRaises(indexer.NoSuchMetric, self.index.delete_metric,
+                          self.metric.id)
diff --git a/gnocchi/tests/test_incoming.py b/gnocchi/tests/test_incoming.py
new file mode 100644
index 0000000000000000000000000000000000000000..b830df2e03b50dbc25230c99a0261eae5a126f73
--- /dev/null
+++ b/gnocchi/tests/test_incoming.py
@@ -0,0 +1,66 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import threading
+import uuid
+
+import numpy
+
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi.tests import base as tests_base
+
+
+class TestIncomingDriver(tests_base.TestCase):
+    def setUp(self):
+        super(TestIncomingDriver, self).setUp()
+        # A lot of tests wants a metric, create one
+        self.metric = indexer.Metric(
+            uuid.uuid4(),
+            self.archive_policies["low"])
+
+    def test_iter_on_sacks_to_process(self):
+        if (self.incoming.iter_on_sacks_to_process ==
+           incoming.IncomingDriver.iter_on_sacks_to_process):
+            self.skipTest("Incoming driver does not implement "
+                          "iter_on_sacks_to_process")
+
+        found = threading.Event()
+
+        sack_to_find = self.incoming.sack_for_metric(self.metric.id)
+
+        def _iter_on_sacks_to_process():
+            for sack in self.incoming.iter_on_sacks_to_process():
+                self.assertIsInstance(sack, incoming.Sack)
+                if sack == sack_to_find:
+                    found.set()
+                    break
+
+        finder = threading.Thread(target=_iter_on_sacks_to_process)
+        finder.daemon = True
+        finder.start()
+
+        # Try for 30s to get a notification about this sack
+        for _ in range(30):
+            if found.wait(timeout=1):
+                break
+            # NOTE(jd) Retry to send measures. It cannot be done only once as
+            # there might be a race condition between the threads
+            self.incoming.finish_sack_processing(sack_to_find)
+            self.incoming.add_measures(self.metric.id, [
+                incoming.Measure(numpy.datetime64("2014-01-01 12:00:01"), 69),
+            ])
+        else:
+            self.fail("Notification for metric not received")
diff --git a/gnocchi/tests/test_indexer.py b/gnocchi/tests/test_indexer.py
new file mode 100644
index 0000000000000000000000000000000000000000..1216eebac597db2b15b7a546e58a999f8bed08a0
--- /dev/null
+++ b/gnocchi/tests/test_indexer.py
@@ -0,0 +1,1360 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import operator
+import uuid
+
+import mock
+import numpy
+
+from gnocchi import archive_policy
+from gnocchi import indexer
+from gnocchi.tests import base as tests_base
+from gnocchi import utils
+
+
+class MockException(Exception):
+    pass
+
+
+class TestIndexer(tests_base.TestCase):
+    def test_get_driver(self):
+        driver = indexer.get_driver(self.conf)
+        self.assertIsInstance(driver, indexer.IndexerDriver)
+
+
+class TestIndexerDriver(tests_base.TestCase):
+
+    def test_str(self):
+        self.assertEqual("%s: %s" % (self.index.__class__.__name__,
+                                     self.conf.indexer.url.replace(
+                                         "root@", "").replace(
+                                             "localhost", "***:***@localhost"
+                                         )),
+                         str(self.index))
+
+    def test_create_archive_policy_already_exists(self):
+        # NOTE(jd) This archive policy
+        # is created by gnocchi.tests on setUp() :)
+        self.assertRaises(indexer.ArchivePolicyAlreadyExists,
+                          self.index.create_archive_policy,
+                          archive_policy.ArchivePolicy("high", 0, {}))
+
+    def test_get_archive_policy(self):
+        ap = self.index.get_archive_policy("low")
+        self.assertEqual({
+            'back_window': 0,
+            'aggregation_methods':
+            set(self.conf.archive_policy.default_aggregation_methods),
+            'definition': [
+                {u'granularity': numpy.timedelta64(5, 'm'),
+                 u'points': 12,
+                 u'timespan': numpy.timedelta64(3600, 's')},
+                {u'granularity': numpy.timedelta64(3600, 's'),
+                 u'points': 24,
+                 u'timespan': numpy.timedelta64(86400, 's')},
+                {u'granularity': numpy.timedelta64(86400, 's'),
+                 u'points': 30,
+                 u'timespan': numpy.timedelta64(2592000, 's')}],
+            'name': u'low'}, dict(ap))
+
+    def test_update_archive_policy(self):
+        self.assertRaises(indexer.UnsupportedArchivePolicyChange,
+                          self.index.update_archive_policy, "low",
+                          [archive_policy.ArchivePolicyItem(granularity=300,
+                                                            points=10)])
+        self.assertRaises(indexer.UnsupportedArchivePolicyChange,
+                          self.index.update_archive_policy, "low",
+                          [archive_policy.ArchivePolicyItem(granularity=300,
+                                                            points=12),
+                           archive_policy.ArchivePolicyItem(granularity=3600,
+                                                            points=12),
+                           archive_policy.ArchivePolicyItem(granularity=5,
+                                                            points=6)])
+        apname = str(uuid.uuid4())
+        self.index.create_archive_policy(archive_policy.ArchivePolicy(
+            apname, 0, [(12, 300), (24, 3600), (30, 86400)]))
+        ap = self.index.update_archive_policy(
+            apname, [archive_policy.ArchivePolicyItem(granularity=300,
+                                                      points=6),
+                     archive_policy.ArchivePolicyItem(granularity=3600,
+                                                      points=24),
+                     archive_policy.ArchivePolicyItem(granularity=86400,
+                                                      points=30)])
+        self.assertEqual({
+            'back_window': 0,
+            'aggregation_methods':
+            set(self.conf.archive_policy.default_aggregation_methods),
+            'definition': [
+                {u'granularity': numpy.timedelta64(300, 's'),
+                 u'points': 6,
+                 u'timespan': numpy.timedelta64(1800, 's')},
+                {u'granularity': numpy.timedelta64(3600, 's'),
+                 u'points': 24,
+                 u'timespan': numpy.timedelta64(86400, 's')},
+                {u'granularity': numpy.timedelta64(86400, 's'),
+                 u'points': 30,
+                 u'timespan': numpy.timedelta64(2592000, 's')}],
+            'name': apname}, dict(ap))
+        ap = self.index.update_archive_policy(
+            apname, [archive_policy.ArchivePolicyItem(granularity=300,
+                                                      points=12),
+                     archive_policy.ArchivePolicyItem(granularity=3600,
+                                                      points=24),
+                     archive_policy.ArchivePolicyItem(granularity=86400,
+                                                      points=30)])
+        self.assertEqual({
+            'back_window': 0,
+            'aggregation_methods':
+            set(self.conf.archive_policy.default_aggregation_methods),
+            'definition': [
+                {u'granularity': numpy.timedelta64(300, 's'),
+                 u'points': 12,
+                 u'timespan': numpy.timedelta64(3600, 's')},
+                {u'granularity': numpy.timedelta64(3600, 's'),
+                 u'points': 24,
+                 u'timespan': numpy.timedelta64(86400, 's')},
+                {u'granularity': numpy.timedelta64(86400, 's'),
+                 u'points': 30,
+                 u'timespan': numpy.timedelta64(2592000, 's')}],
+            'name': apname}, dict(ap))
+
+    def test_delete_archive_policy(self):
+        name = str(uuid.uuid4())
+        self.index.create_archive_policy(
+            archive_policy.ArchivePolicy(name, 0, {}))
+        self.index.delete_archive_policy(name)
+        self.assertRaises(indexer.NoSuchArchivePolicy,
+                          self.index.delete_archive_policy,
+                          name)
+        self.assertRaises(indexer.NoSuchArchivePolicy,
+                          self.index.delete_archive_policy,
+                          str(uuid.uuid4()))
+        metric_id = uuid.uuid4()
+        self.index.create_metric(metric_id, str(uuid.uuid4()), "low")
+        self.assertRaises(indexer.ArchivePolicyInUse,
+                          self.index.delete_archive_policy,
+                          "low")
+        self.index.delete_metric(metric_id)
+
+    def test_list_ap_rules_ordered(self):
+        name = str(uuid.uuid4())
+        self.index.create_archive_policy(
+            archive_policy.ArchivePolicy(name, 0, {}))
+        self.index.create_archive_policy_rule('rule1', 'abc.*', name)
+        self.index.create_archive_policy_rule('rule2', 'abc.xyz.*', name)
+        self.index.create_archive_policy_rule('rule3', 'abc.xyz', name)
+        rules = self.index.list_archive_policy_rules()
+        # NOTE(jd) The test is not isolated, there might be more than 3 rules
+        found = 0
+        for r in rules:
+            if r['metric_pattern'] == 'abc.xyz.*':
+                found = 1
+            if found == 1 and r['metric_pattern'] == 'abc.xyz':
+                found = 2
+            if found == 2 and r['metric_pattern'] == 'abc.*':
+                break
+        else:
+            self.fail("Metric patterns are not ordered")
+
+        # Ensure we can't delete the archive policy
+        self.assertRaises(indexer.ArchivePolicyInUse,
+                          self.index.delete_archive_policy, name)
+
+    def test_create_metric(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        m = self.index.create_metric(r1, creator, "low")
+        self.assertEqual(r1, m.id)
+        self.assertEqual(m.creator, creator)
+        self.assertIsNone(m.name)
+        self.assertIsNone(m.unit)
+        self.assertIsNone(m.resource_id)
+        m2 = self.index.list_metrics(attribute_filter={"=": {"id": r1}})
+        self.assertEqual([m], m2)
+
+    def test_create_named_metric_duplicate(self):
+        m1 = uuid.uuid4()
+        r1 = uuid.uuid4()
+        name = "foobar"
+        creator = str(uuid.uuid4())
+        self.index.create_resource('generic', r1, creator)
+        m = self.index.create_metric(m1, creator, "low",
+                                     name=name,
+                                     resource_id=r1)
+        self.assertEqual(m1, m.id)
+        self.assertEqual(m.creator, creator)
+        self.assertEqual(name, m.name)
+        self.assertEqual(r1, m.resource_id)
+        m2 = self.index.list_metrics(attribute_filter={"=": {"id": m1}})
+        self.assertEqual([m], m2)
+
+        self.assertRaises(indexer.NamedMetricAlreadyExists,
+                          self.index.create_metric, m1, creator, "low",
+                          name=name, resource_id=r1)
+
+    def test_expunge_metric(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        m = self.index.create_metric(r1, creator, "low")
+        self.index.delete_metric(m.id)
+        try:
+            self.index.expunge_metric(m.id)
+        except indexer.NoSuchMetric:
+            # It's possible another test process expunged the metric just
+            # before us; in that case, we're good, we'll just check that the
+            # next call actually really raises NoSuchMetric anyway
+            pass
+        self.assertRaises(indexer.NoSuchMetric,
+                          self.index.delete_metric,
+                          m.id)
+        self.assertRaises(indexer.NoSuchMetric,
+                          self.index.expunge_metric,
+                          m.id)
+
+    def test_create_resource(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        rc = self.index.create_resource('generic', r1, creator)
+        self.assertIsNotNone(rc.started_at)
+        self.assertIsNotNone(rc.revision_start)
+        self.assertEqual({"id": r1,
+                          "revision_start": rc.revision_start,
+                          "revision_end": None,
+                          "creator": creator,
+                          "created_by_user_id": creator,
+                          "created_by_project_id": "",
+                          "user_id": None,
+                          "project_id": None,
+                          "started_at": rc.started_at,
+                          "ended_at": None,
+                          "original_resource_id": str(r1),
+                          "type": "generic",
+                          "metrics": {}},
+                         rc.jsonify())
+        rg = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, rg)
+        self.assertEqual(rc.metrics, rg.metrics)
+
+    def test_create_resource_with_original_resource_id(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        rc = self.index.create_resource('generic', r1, creator,
+                                        original_resource_id="foobar")
+        self.assertIsNotNone(rc.started_at)
+        self.assertIsNotNone(rc.revision_start)
+        self.assertEqual({"id": r1,
+                          "revision_start": rc.revision_start,
+                          "revision_end": None,
+                          "creator": creator,
+                          "created_by_user_id": creator,
+                          "created_by_project_id": "",
+                          "user_id": None,
+                          "project_id": None,
+                          "started_at": rc.started_at,
+                          "ended_at": None,
+                          "original_resource_id": "foobar",
+                          "type": "generic",
+                          "metrics": {}},
+                         rc.jsonify())
+        rg = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, rg)
+        self.assertEqual(rc.metrics, rg.metrics)
+
+    def test_split_user_project_for_legacy_reasons(self):
+        r1 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        creator = user + ":" + project
+        rc = self.index.create_resource('generic', r1, creator)
+        self.assertIsNotNone(rc.started_at)
+        self.assertIsNotNone(rc.revision_start)
+        self.assertEqual({"id": r1,
+                          "revision_start": rc.revision_start,
+                          "revision_end": None,
+                          "creator": creator,
+                          "created_by_user_id": user,
+                          "created_by_project_id": project,
+                          "user_id": None,
+                          "project_id": None,
+                          "started_at": rc.started_at,
+                          "ended_at": None,
+                          "original_resource_id": str(r1),
+                          "type": "generic",
+                          "metrics": {}},
+                         rc.jsonify())
+        rg = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, rg)
+        self.assertEqual(rc.metrics, rg.metrics)
+
+    def test_create_non_existent_metric(self):
+        e = uuid.uuid4()
+        try:
+            self.index.create_resource(
+                'generic', uuid.uuid4(), str(uuid.uuid4()), str(uuid.uuid4()),
+                metrics={"foo": e})
+        except indexer.NoSuchMetric as ex:
+            self.assertEqual(e, ex.metric)
+        else:
+            self.fail("Exception not raised")
+
+    def test_create_resource_already_exists(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_resource('generic', r1, creator)
+        self.assertRaises(indexer.ResourceAlreadyExists,
+                          self.index.create_resource,
+                          'generic', r1, creator)
+
+    def test_create_resource_with_new_metrics(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        rc = self.index.create_resource(
+            'generic', r1, creator,
+            metrics={"foobar": {"archive_policy_name": "low"}})
+        self.assertEqual(1, len(rc.metrics))
+        m = self.index.list_metrics(
+            attribute_filter={"=": {"id": rc.metrics[0].id}})
+        self.assertEqual(m[0], rc.metrics[0])
+
+    def test_delete_resource(self):
+        r1 = uuid.uuid4()
+        self.index.create_resource('generic', r1, str(uuid.uuid4()),
+                                   str(uuid.uuid4()))
+        self.index.delete_resource(r1)
+        self.assertRaises(indexer.NoSuchResource,
+                          self.index.delete_resource,
+                          r1)
+
+    def test_delete_resource_with_metrics(self):
+        creator = str(uuid.uuid4())
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        self.index.create_metric(e1, creator, archive_policy_name="low")
+        self.index.create_metric(e2, creator, archive_policy_name="low")
+        r1 = uuid.uuid4()
+        self.index.create_resource('generic', r1, creator,
+                                   metrics={'foo': e1, 'bar': e2})
+        self.index.delete_resource(r1)
+        self.assertRaises(indexer.NoSuchResource,
+                          self.index.delete_resource,
+                          r1)
+        metrics = self.index.list_metrics(
+            attribute_filter={"in": {"id": [e1, e2]}})
+        self.assertEqual([], metrics)
+
+    def test_delete_resource_non_existent(self):
+        r1 = uuid.uuid4()
+        self.assertRaises(indexer.NoSuchResource,
+                          self.index.delete_resource,
+                          r1)
+
+    def test_create_resource_with_start_timestamp(self):
+        r1 = uuid.uuid4()
+        ts = utils.datetime_utc(2014, 1, 1, 23, 34, 23, 1234)
+        creator = str(uuid.uuid4())
+        rc = self.index.create_resource('generic', r1, creator, started_at=ts)
+        self.assertEqual({"id": r1,
+                          "revision_start": rc.revision_start,
+                          "revision_end": None,
+                          "creator": creator,
+                          "created_by_user_id": creator,
+                          "created_by_project_id": "",
+                          "user_id": None,
+                          "project_id": None,
+                          "started_at": ts,
+                          "ended_at": None,
+                          "original_resource_id": str(r1),
+                          "type": "generic",
+                          "metrics": {}}, rc.jsonify())
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, r)
+
+    def test_create_resource_with_metrics(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(e2, creator,
+                                 archive_policy_name="low")
+        rc = self.index.create_resource('generic', r1, creator,
+                                        metrics={'foo': e1, 'bar': e2})
+        self.assertIsNotNone(rc.started_at)
+        self.assertIsNotNone(rc.revision_start)
+        self.assertEqual({"id": r1,
+                          "revision_start": rc.revision_start,
+                          "revision_end": None,
+                          "creator": creator,
+                          "created_by_user_id": creator,
+                          "created_by_project_id": "",
+                          "user_id": None,
+                          "project_id": None,
+                          "started_at": rc.started_at,
+                          "ended_at": None,
+                          "original_resource_id": str(r1),
+                          "type": "generic",
+                          "metrics": {'foo': str(e1), 'bar': str(e2)}},
+                         rc.jsonify())
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertIsNotNone(r.started_at)
+        self.assertEqual({"id": r1,
+                          "revision_start": r.revision_start,
+                          "revision_end": None,
+                          "creator": creator,
+                          "created_by_user_id": creator,
+                          "created_by_project_id": "",
+                          "type": "generic",
+                          "started_at": rc.started_at,
+                          "ended_at": None,
+                          "user_id": None,
+                          "project_id": None,
+                          "original_resource_id": str(r1),
+                          "metrics": {'foo': str(e1), 'bar': str(e2)}},
+                         r.jsonify())
+
+    def test_update_non_existent_resource_end_timestamp(self):
+        r1 = uuid.uuid4()
+        self.assertRaises(
+            indexer.NoSuchResource,
+            self.index.update_resource,
+            'generic',
+            r1,
+            ended_at=datetime.datetime(2014, 1, 1, 2, 3, 4))
+
+    def test_update_resource_end_timestamp(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_resource('generic', r1, creator)
+        self.index.update_resource(
+            'generic',
+            r1,
+            ended_at=utils.datetime_utc(2043, 1, 1, 2, 3, 4))
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertIsNotNone(r.started_at)
+        self.assertIsNone(r.user_id)
+        self.assertIsNone(r.project_id)
+        self.assertIsNone(r.revision_end)
+        self.assertIsNotNone(r.revision_start)
+        self.assertEqual(r1, r.id)
+        self.assertEqual(creator, r.creator)
+        self.assertEqual(utils.datetime_utc(2043, 1, 1, 2, 3, 4), r.ended_at)
+        self.assertEqual("generic", r.type)
+        self.assertEqual(0, len(r.metrics))
+        self.index.update_resource(
+            'generic',
+            r1,
+            ended_at=None)
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertIsNotNone(r.started_at)
+        self.assertIsNotNone(r.revision_start)
+        self.assertEqual({"id": r1,
+                          "revision_start": r.revision_start,
+                          "revision_end": None,
+                          "ended_at": None,
+                          "created_by_project_id": "",
+                          "created_by_user_id": creator,
+                          "creator": creator,
+                          "user_id": None,
+                          "project_id": None,
+                          "type": "generic",
+                          "started_at": r.started_at,
+                          "original_resource_id": str(r1),
+                          "metrics": {}}, r.jsonify())
+
+    def test_update_resource_metrics(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator, archive_policy_name="low")
+        self.index.create_resource('generic', r1, creator, metrics={'foo': e1})
+        self.index.create_metric(e2, creator, archive_policy_name="low")
+        rc = self.index.update_resource('generic', r1, metrics={'bar': e2})
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, r)
+
+    def test_update_resource_metrics_append(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(e2, creator,
+                                 archive_policy_name="low")
+        self.index.create_resource('generic', r1, creator,
+                                   metrics={'foo': e1})
+        rc = self.index.update_resource('generic', r1, metrics={'bar': e2},
+                                        append_metrics=True)
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, r)
+        metric_names = [m.name for m in rc.metrics]
+        self.assertIn('foo', metric_names)
+        self.assertIn('bar', metric_names)
+
+    def test_update_resource_metrics_append_after_delete(self):
+        r1 = uuid.uuid4()
+        m1 = uuid.uuid4()
+        m2 = uuid.uuid4()
+        m3 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(m1, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(m2, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(m3, creator,
+                                 archive_policy_name="low")
+        self.index.create_resource('generic', r1, creator,
+                                   metrics={'foo': m1})
+        rc = self.index.update_resource('generic', r1, metrics={'bar': m2},
+                                        append_metrics=True)
+        self.index.delete_metric(m1)
+        rc = self.index.update_resource('generic', r1, metrics={'foo': m3},
+                                        append_metrics=True)
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(rc, r)
+        metric_names = [m.name for m in rc.metrics]
+        self.assertEqual(2, len(metric_names))
+        self.assertIn('foo', metric_names)
+        self.assertIn('bar', metric_names)
+
+    def test_update_resource_metrics_append_fail(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(e2, creator,
+                                 archive_policy_name="low")
+        self.index.create_resource('generic', r1, creator,
+                                   metrics={'foo': e1})
+
+        self.assertRaises(indexer.NamedMetricAlreadyExists,
+                          self.index.update_resource,
+                          'generic', r1, metrics={'foo': e2},
+                          append_metrics=True)
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertEqual(e1, r.metrics[0].id)
+
+    def test_update_resource_attribute(self):
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        rtype = mgr.resource_type_from_dict(resource_type, {
+            "col1": {"type": "string", "required": True,
+                     "min_length": 2, "max_length": 15}
+        }, 'creating')
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        # Create
+        self.index.create_resource_type(rtype)
+
+        rc = self.index.create_resource(resource_type, r1, creator,
+                                        col1="foo")
+        rc = self.index.update_resource(resource_type, r1, col1="foo")
+        r = self.index.get_resource(resource_type, r1, with_metrics=True)
+        self.assertEqual(rc, r)
+
+    def test_update_resource_no_change(self):
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        rtype = mgr.resource_type_from_dict(resource_type, {
+            "col1": {"type": "string", "required": True,
+                     "min_length": 2, "max_length": 15}
+        }, 'creating')
+        self.index.create_resource_type(rtype)
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        rc = self.index.create_resource(resource_type, r1, creator,
+                                        col1="foo")
+        updated = self.index.update_resource(resource_type, r1, col1="foo",
+                                             create_revision=False)
+        r = self.index.list_resources(resource_type,
+                                      {"=": {"id": r1}},
+                                      history=True)
+        self.assertEqual(1, len(r))
+        self.assertEqual(dict(rc), dict(r[0]))
+        self.assertEqual(dict(updated), dict(r[0]))
+
+    def test_update_resource_ended_at_fail(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_resource('generic', r1, creator)
+        self.assertRaises(
+            indexer.ResourceValueError,
+            self.index.update_resource,
+            'generic', r1,
+            ended_at=utils.datetime_utc(2010, 1, 1, 1, 1, 1))
+
+    def test_update_resource_unknown_attribute(self):
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        rtype = mgr.resource_type_from_dict(resource_type, {
+            "col1": {"type": "string", "required": False,
+                     "min_length": 1, "max_length": 2},
+        }, 'creating')
+        self.index.create_resource_type(rtype)
+        r1 = uuid.uuid4()
+        self.index.create_resource(resource_type, r1,
+                                   str(uuid.uuid4()), str(uuid.uuid4()))
+        self.assertRaises(indexer.ResourceAttributeError,
+                          self.index.update_resource,
+                          resource_type,
+                          r1, foo="bar")
+
+    def test_update_non_existent_metric(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        self.index.create_resource('generic', r1, str(uuid.uuid4()),
+                                   str(uuid.uuid4()))
+        self.assertRaises(indexer.NoSuchMetric,
+                          self.index.update_resource,
+                          'generic',
+                          r1, metrics={'bar': e1})
+
+    def test_update_non_existent_resource(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        self.index.create_metric(e1, str(uuid.uuid4()),
+                                 archive_policy_name="low")
+        self.assertRaises(indexer.NoSuchResource,
+                          self.index.update_resource,
+                          'generic',
+                          r1, metrics={'bar': e1})
+
+    def test_create_resource_with_non_existent_metrics(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        self.assertRaises(indexer.NoSuchMetric,
+                          self.index.create_resource,
+                          'generic',
+                          r1, str(uuid.uuid4()), str(uuid.uuid4()),
+                          metrics={'foo': e1})
+
+    def test_delete_metric_on_resource(self):
+        r1 = uuid.uuid4()
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(e2, creator,
+                                 archive_policy_name="low")
+        rc = self.index.create_resource('generic', r1, creator,
+                                        metrics={'foo': e1, 'bar': e2})
+        self.index.delete_metric(e1)
+        self.assertRaises(indexer.NoSuchMetric, self.index.delete_metric, e1)
+        r = self.index.get_resource('generic', r1, with_metrics=True)
+        self.assertIsNotNone(r.started_at)
+        self.assertIsNotNone(r.revision_start)
+        self.assertEqual({"id": r1,
+                          "started_at": r.started_at,
+                          "revision_start": rc.revision_start,
+                          "revision_end": None,
+                          "ended_at": None,
+                          "creator": creator,
+                          "created_by_project_id": "",
+                          "created_by_user_id": creator,
+                          "user_id": None,
+                          "project_id": None,
+                          "original_resource_id": str(r1),
+                          "type": "generic",
+                          "metrics": {'bar': str(e2)}}, r.jsonify())
+
+    def test_delete_resource_custom(self):
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {
+                "flavor_id": {"type": "string",
+                              "min_length": 1,
+                              "max_length": 20,
+                              "required": True}
+            }, 'creating'))
+        r1 = uuid.uuid4()
+        created = self.index.create_resource(resource_type, r1,
+                                             str(uuid.uuid4()),
+                                             str(uuid.uuid4()),
+                                             flavor_id="foo")
+        got = self.index.get_resource(resource_type, r1, with_metrics=True)
+        self.assertEqual(created, got)
+        self.index.delete_resource(r1)
+        got = self.index.get_resource(resource_type, r1)
+        self.assertIsNone(got)
+
+    def test_list_resources_by_unknown_field(self):
+        self.assertRaises(indexer.ResourceAttributeError,
+                          self.index.list_resources,
+                          'generic',
+                          attribute_filter={"=": {"fern": "bar"}})
+
+    def test_list_resources_by_user(self):
+        r1 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        g = self.index.create_resource('generic', r1, user + ":" + project,
+                                       user, project)
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"user_id": user}})
+        self.assertEqual(1, len(resources))
+        self.assertEqual(g, resources[0])
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"user_id": 'bad-user'}})
+        self.assertEqual(0, len(resources))
+
+    def test_list_resources_by_created_by_user_id(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        g = self.index.create_resource('generic', r1, creator + ":" + creator)
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"created_by_user_id": creator}})
+        self.assertEqual([g], resources)
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"created_by_user_id": 'bad-user'}})
+        self.assertEqual([], resources)
+
+    def test_list_resources_by_creator(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        g = self.index.create_resource('generic', r1, creator)
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"creator": creator}})
+        self.assertEqual(1, len(resources))
+        self.assertEqual(g, resources[0])
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"creator": 'bad-user'}})
+        self.assertEqual(0, len(resources))
+
+    def test_list_resources_by_user_with_details(self):
+        r1 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        creator = user + ":" + project
+        g = self.index.create_resource('generic', r1, creator,
+                                       user, project)
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {}, 'creating'))
+        r2 = uuid.uuid4()
+        i = self.index.create_resource(resource_type, r2, creator,
+                                       user, project)
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"user_id": user}},
+            details=True,
+        )
+        self.assertEqual(2, len(resources))
+        self.assertIn(g, resources)
+        self.assertIn(i, resources)
+
+    def test_list_resources_by_project(self):
+        r1 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        creator = user + ":" + project
+        g = self.index.create_resource('generic', r1, creator, user, project)
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"project_id": project}})
+        self.assertEqual(1, len(resources))
+        self.assertEqual(g, resources[0])
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"project_id": 'bad-project'}})
+        self.assertEqual(0, len(resources))
+
+    def test_list_resources_with_no_project(self):
+        r1 = uuid.uuid4()
+        r2 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        creator = user + ":" + project
+        g1 = self.index.create_resource('generic', r1, creator, user, project)
+        g2 = self.index.create_resource('generic', r2, creator, None, None)
+
+        # Get null value
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"and": [
+                {"=": {"creator": creator}},
+                {"!=": {"project_id": project}}
+            ]})
+        self.assertEqual(1, len(resources))
+        self.assertEqual(g2, resources[0])
+
+        # Get null and filled values
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"and": [
+                {"=": {"creator": creator}},
+                {"!=": {"project_id": "foobar"}}
+            ]},
+            sorts=["project_id:asc-nullsfirst"])
+        self.assertEqual(2, len(resources))
+        self.assertEqual(g2, resources[0])
+        self.assertEqual(g1, resources[1])
+
+    def test_list_resources_by_duration(self):
+        r1 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        g = self.index.create_resource(
+            'generic', r1, user + ":" + project, user, project,
+            started_at=utils.datetime_utc(2010, 1, 1, 12, 0),
+            ended_at=utils.datetime_utc(2010, 1, 1, 13, 0))
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"and": [
+                {"=": {"user_id": user}},
+                {">": {"lifespan": 1800}},
+            ]})
+        self.assertEqual(1, len(resources))
+        self.assertEqual(g, resources[0])
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"and": [
+                {"=": {"project_id": project}},
+                {">": {"lifespan": 7200}},
+            ]})
+        self.assertEqual(0, len(resources))
+
+    def test_list_resources(self):
+        # NOTE(jd) So this test is a bit fuzzy right now as we uses the same
+        # database for all tests and the tests are running concurrently, but
+        # for now it'll be better than nothing.
+        r1 = uuid.uuid4()
+        g = self.index.create_resource('generic', r1,
+                                       str(uuid.uuid4()), str(uuid.uuid4()))
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {}, 'creating'))
+        r2 = uuid.uuid4()
+        i = self.index.create_resource(resource_type, r2,
+                                       str(uuid.uuid4()), str(uuid.uuid4()))
+        resources = self.index.list_resources('generic')
+        self.assertGreaterEqual(len(resources), 2)
+        g_found = False
+        i_found = False
+        for r in resources:
+            if r.id == r1:
+                self.assertEqual(g, r)
+                g_found = True
+            elif r.id == r2:
+                i_found = True
+            if i_found and g_found:
+                break
+        else:
+            self.fail("Some resources were not found")
+
+        resources = self.index.list_resources(resource_type)
+        self.assertGreaterEqual(len(resources), 1)
+        for r in resources:
+            if r.id == r2:
+                self.assertEqual(i, r)
+                break
+        else:
+            self.fail("Some resources were not found")
+
+    def test_list_resource_attribute_type_numeric(self):
+        """Test that we can pass an integer to filter on a string type."""
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {
+                "flavor_id": {"type": "string",
+                              "min_length": 1,
+                              "max_length": 20,
+                              "required": False},
+            }, 'creating'))
+        r = self.index.list_resources(
+            resource_type, attribute_filter={"=": {"flavor_id": 1.0}})
+        self.assertEqual(0, len(r))
+
+    def test_list_resource_empty_in(self):
+        self.index.create_resource('generic', str(uuid.uuid4()),
+                                   str(uuid.uuid4()), str(uuid.uuid4()))
+        self.assertEqual(
+            [],
+            self.index.list_resources(
+                attribute_filter={"in": {"id": []}}))
+
+    def test_list_resource_weird_date(self):
+        self.assertRaises(
+            indexer.QueryValueError,
+            self.index.list_resources,
+            'generic',
+            attribute_filter={"=": {"started_at": "f00bar"}})
+
+    def test_list_resources_without_history(self):
+        e = uuid.uuid4()
+        rid = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        new_user = str(uuid.uuid4())
+        new_project = str(uuid.uuid4())
+
+        self.index.create_metric(e, user + ":" + project,
+                                 archive_policy_name="low")
+
+        self.index.create_resource('generic', rid, user + ":" + project,
+                                   user, project,
+                                   metrics={'foo': e})
+        r2 = self.index.update_resource('generic', rid, user_id=new_user,
+                                        project_id=new_project,
+                                        append_metrics=True).jsonify()
+
+        self.assertEqual({'foo': str(e)}, r2['metrics'])
+        self.assertEqual(new_user, r2['user_id'])
+        self.assertEqual(new_project, r2['project_id'])
+        resources = self.index.list_resources('generic', history=False,
+                                              details=True)
+        self.assertGreaterEqual(len(resources), 1)
+        expected_resources = [r.jsonify() for r in resources
+                              if r.id == rid]
+        self.assertIn(r2, expected_resources)
+
+    def test_list_resources_with_history(self):
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        rid = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        creator = user + ":" + project
+        new_user = str(uuid.uuid4())
+        new_project = str(uuid.uuid4())
+
+        self.index.create_metric(e1, creator, archive_policy_name="low")
+        self.index.create_metric(e2, creator, archive_policy_name="low")
+        self.index.create_metric(uuid.uuid4(), creator,
+                                 archive_policy_name="low")
+
+        r1 = self.index.create_resource('generic', rid, creator, user, project,
+                                        metrics={'foo': e1, 'bar': e2}
+                                        ).jsonify()
+        r2 = self.index.update_resource('generic', rid, user_id=new_user,
+                                        project_id=new_project,
+                                        append_metrics=True).jsonify()
+
+        r1['revision_end'] = r2['revision_start']
+        r2['revision_end'] = None
+        self.assertEqual({'foo': str(e1),
+                          'bar': str(e2)}, r2['metrics'])
+        self.assertEqual(new_user, r2['user_id'])
+        self.assertEqual(new_project, r2['project_id'])
+        resources = self.index.list_resources('generic', history=True,
+                                              details=False,
+                                              attribute_filter={
+                                                  "=": {"id": rid}})
+        self.assertGreaterEqual(len(resources), 2)
+        resources = sorted(
+            [r.jsonify() for r in resources],
+            key=operator.itemgetter("revision_start"))
+        self.assertEqual([r1, r2], resources)
+
+    def test_list_resources_custom_with_history(self):
+        e1 = uuid.uuid4()
+        e2 = uuid.uuid4()
+        rid = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        new_user = str(uuid.uuid4())
+        new_project = str(uuid.uuid4())
+
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {
+                "col1": {"type": "string", "required": True,
+                         "min_length": 2, "max_length": 15}
+            }, 'creating'))
+
+        self.index.create_metric(e1, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(e2, creator,
+                                 archive_policy_name="low")
+        self.index.create_metric(uuid.uuid4(), creator,
+                                 archive_policy_name="low")
+
+        r1 = self.index.create_resource(resource_type, rid, creator,
+                                        user, project,
+                                        col1="foo",
+                                        metrics={'foo': e1, 'bar': e2}
+                                        ).jsonify()
+        r2 = self.index.update_resource(resource_type, rid, user_id=new_user,
+                                        project_id=new_project,
+                                        col1="bar",
+                                        append_metrics=True).jsonify()
+
+        r1['revision_end'] = r2['revision_start']
+        r2['revision_end'] = None
+        self.assertEqual({'foo': str(e1),
+                          'bar': str(e2)}, r2['metrics'])
+        self.assertEqual(new_user, r2['user_id'])
+        self.assertEqual(new_project, r2['project_id'])
+        self.assertEqual('bar', r2['col1'])
+        resources = self.index.list_resources(resource_type, history=True,
+                                              details=False,
+                                              attribute_filter={
+                                                  "=": {"id": rid}})
+        self.assertGreaterEqual(len(resources), 2)
+        resources = sorted(
+            [r.jsonify() for r in resources],
+            key=operator.itemgetter("revision_start"))
+        self.assertEqual([r1, r2], resources)
+
+    def test_list_resources_started_after_ended_before(self):
+        # NOTE(jd) So this test is a bit fuzzy right now as we uses the same
+        # database for all tests and the tests are running concurrently, but
+        # for now it'll be better than nothing.
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        g = self.index.create_resource(
+            'generic', r1, creator,
+            started_at=utils.datetime_utc(2000, 1, 1, 23, 23, 23),
+            ended_at=utils.datetime_utc(2000, 1, 3, 23, 23, 23))
+        r2 = uuid.uuid4()
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {}, 'creating'))
+        i = self.index.create_resource(
+            resource_type, r2, creator,
+            started_at=utils.datetime_utc(2000, 1, 1, 23, 23, 23),
+            ended_at=utils.datetime_utc(2000, 1, 4, 23, 23, 23))
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={
+                "and":
+                [{">=": {"started_at":
+                         utils.datetime_utc(2000, 1, 1, 23, 23, 23)}},
+                 {"<": {"ended_at":
+                        utils.datetime_utc(2000, 1, 5, 23, 23, 23)}}]})
+        self.assertGreaterEqual(len(resources), 2)
+        g_found = False
+        i_found = False
+        for r in resources:
+            if r.id == r1:
+                self.assertEqual(g, r)
+                g_found = True
+            elif r.id == r2:
+                i_found = True
+            if i_found and g_found:
+                break
+        else:
+            self.fail("Some resources were not found")
+
+        resources = self.index.list_resources(
+            resource_type,
+            attribute_filter={
+                ">=": {
+                    "started_at": datetime.datetime(2000, 1, 1, 23, 23, 23)
+                },
+            })
+        self.assertGreaterEqual(len(resources), 1)
+        for r in resources:
+            if r.id == r2:
+                self.assertEqual(i, r)
+                break
+        else:
+            self.fail("Some resources were not found")
+
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={
+                "<": {
+                    "ended_at": datetime.datetime(1999, 1, 1, 23, 23, 23)
+                },
+            })
+        self.assertEqual(0, len(resources))
+
+    def test_deletes_resources(self):
+        r1 = uuid.uuid4()
+        r2 = uuid.uuid4()
+        user = str(uuid.uuid4())
+        project = str(uuid.uuid4())
+        creator = user + ":" + project
+        metrics = {'foo': {'archive_policy_name': 'medium'}}
+        g1 = self.index.create_resource('generic', r1, creator,
+                                        user, project, metrics=metrics)
+        g2 = self.index.create_resource('generic', r2, creator,
+                                        user, project, metrics=metrics)
+
+        metrics = self.index.list_metrics(
+            attribute_filter={"in": {"id": [g1['metrics'][0]['id'],
+                                            g2['metrics'][0]['id']]}})
+        self.assertEqual(2, len(metrics))
+        for m in metrics:
+            self.assertEqual('active', m['status'])
+
+        deleted = self.index.delete_resources(
+            'generic',
+            attribute_filter={"=": {"user_id": user}})
+        self.assertEqual(2, deleted)
+
+        resources = self.index.list_resources(
+            'generic',
+            attribute_filter={"=": {"user_id": user}})
+        self.assertEqual(0, len(resources))
+
+        metrics = self.index.list_metrics(
+            attribute_filter={"in": {"id": [g1['metrics'][0]['id'],
+                                            g2['metrics'][0]['id']]}},
+            status='delete')
+        self.assertEqual(2, len(metrics))
+        for m in metrics:
+            self.assertEqual('delete', m['status'])
+
+    def test_get_metric(self):
+        e1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator, archive_policy_name="low")
+
+        metric = self.index.list_metrics(attribute_filter={"=": {"id": e1}})
+        self.assertEqual(1, len(metric))
+        metric = metric[0]
+        self.assertEqual(e1, metric.id)
+        self.assertEqual(metric.creator, creator)
+        self.assertIsNone(metric.name)
+        self.assertIsNone(metric.resource_id)
+
+    def test_get_metric_with_details(self):
+        e1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1,
+                                 creator,
+                                 archive_policy_name="low")
+
+        metric = self.index.list_metrics(attribute_filter={"=": {"id": e1}})
+        self.assertEqual(1, len(metric))
+        metric = metric[0]
+        self.assertEqual(e1, metric.id)
+        self.assertEqual(metric.creator, creator)
+        self.assertIsNone(metric.name)
+        self.assertIsNone(metric.resource_id)
+        self.assertEqual(self.archive_policies['low'], metric.archive_policy)
+
+    def test_get_metric_with_bad_uuid(self):
+        e1 = uuid.uuid4()
+        self.assertEqual([], self.index.list_metrics(
+            attribute_filter={"=": {"id": e1}}))
+
+    def test_get_metric_empty_list_uuids(self):
+        self.assertEqual([], self.index.list_metrics(
+            attribute_filter={"in": {"id": []}}))
+
+    def test_list_metrics(self):
+        e1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        self.index.create_metric(e1, creator, archive_policy_name="low")
+        e2 = uuid.uuid4()
+        self.index.create_metric(e2, creator, archive_policy_name="low")
+        metrics = self.index.list_metrics()
+        id_list = [m.id for m in metrics]
+        self.assertIn(e1, id_list)
+        # Test ordering
+        if e1 < e2:
+            self.assertLess(id_list.index(e1), id_list.index(e2))
+        else:
+            self.assertLess(id_list.index(e2), id_list.index(e1))
+
+    def test_list_metrics_resource_filter(self):
+        r1 = uuid.uuid4()
+        creator = str(uuid.uuid4())
+        m1 = uuid.uuid4()
+        m2 = uuid.uuid4()
+        project_id = str(uuid.uuid4())
+        self.index.create_resource("generic", r1, creator,
+                                   project_id=project_id)
+        self.index.create_metric(m1, creator, archive_policy_name="low",
+                                 resource_id=r1)
+        self.index.create_metric(m2, creator, archive_policy_name="low")
+        metrics = self.index.list_metrics(
+            resource_policy_filter={"=": {"project_id": project_id}})
+        id_list = [m.id for m in metrics]
+        self.assertIn(m1, id_list)
+        self.assertNotIn(m2, id_list)
+
+    def test_list_metrics_delete_status(self):
+        e1 = uuid.uuid4()
+        self.index.create_metric(e1, str(uuid.uuid4()),
+                                 archive_policy_name="low")
+        self.index.delete_metric(e1)
+        metrics = self.index.list_metrics()
+        self.assertNotIn(e1, [m.id for m in metrics])
+
+    def test_resource_type_crud(self):
+        mgr = self.index.get_resource_type_schema()
+        rtype = mgr.resource_type_from_dict("indexer_test", {
+            "col1": {"type": "string", "required": True,
+                     "min_length": 2, "max_length": 15}
+        }, "creating")
+
+        # Create
+        self.index.create_resource_type(rtype)
+        self.assertRaises(indexer.ResourceTypeAlreadyExists,
+                          self.index.create_resource_type,
+                          rtype)
+
+        # Get
+        rtype = self.index.get_resource_type("indexer_test")
+        self.assertEqual("indexer_test", rtype.name)
+        self.assertEqual(1, len(rtype.attributes))
+        self.assertEqual("col1", rtype.attributes[0].name)
+        self.assertEqual("string", rtype.attributes[0].typename)
+        self.assertEqual(15, rtype.attributes[0].max_length)
+        self.assertEqual(2, rtype.attributes[0].min_length)
+        self.assertEqual("active", rtype.state)
+
+        # List
+        rtypes = self.index.list_resource_types()
+        for rtype in rtypes:
+            if rtype.name == "indexer_test":
+                break
+        else:
+            self.fail("indexer_test not found")
+
+        # Test resource itself
+        rid = uuid.uuid4()
+        self.index.create_resource("indexer_test", rid,
+                                   str(uuid.uuid4()),
+                                   str(uuid.uuid4()),
+                                   col1="col1_value")
+        r = self.index.get_resource("indexer_test", rid)
+        self.assertEqual("indexer_test", r.type)
+        self.assertEqual("col1_value", r.col1)
+
+        # Update the resource type
+        add_attrs = mgr.resource_type_from_dict("indexer_test", {
+            "col2": {"type": "number", "required": False,
+                     "max": 100, "min": 0},
+            "col3": {"type": "number", "required": True,
+                     "max": 100, "min": 0, "options": {'fill': 15}}
+        }, "creating").attributes
+        self.index.update_resource_type("indexer_test",
+                                        add_attributes=add_attrs)
+
+        # Check the new attribute
+        r = self.index.get_resource("indexer_test", rid)
+        self.assertIsNone(r.col2)
+        self.assertEqual(15, r.col3)
+
+        self.index.update_resource("indexer_test", rid, col2=10)
+
+        rl = self.index.list_resources('indexer_test',
+                                       {"=": {"id": rid}},
+                                       history=True,
+                                       sorts=['revision_start:asc',
+                                              'started_at:asc'])
+        self.assertEqual(2, len(rl))
+        self.assertIsNone(rl[0].col2)
+        self.assertEqual(10, rl[1].col2)
+        self.assertEqual(15, rl[0].col3)
+        self.assertEqual(15, rl[1].col3)
+
+        # Deletion
+        self.assertRaises(indexer.ResourceTypeInUse,
+                          self.index.delete_resource_type,
+                          "indexer_test")
+        self.index.delete_resource(rid)
+        self.index.delete_resource_type("indexer_test")
+
+        # Ensure it's deleted
+        self.assertRaises(indexer.NoSuchResourceType,
+                          self.index.get_resource_type,
+                          "indexer_test")
+
+        self.assertRaises(indexer.NoSuchResourceType,
+                          self.index.delete_resource_type,
+                          "indexer_test")
+
+    def _get_rt_state(self, name):
+        return self.index.get_resource_type(name).state
+
+    def test_resource_type_unexpected_creation_error(self):
+        mgr = self.index.get_resource_type_schema()
+        rtype = mgr.resource_type_from_dict("indexer_test_fail", {
+            "col1": {"type": "string", "required": True,
+                     "min_length": 2, "max_length": 15}
+        }, "creating")
+
+        states = {'before': None,
+                  'after': None}
+
+        def map_and_create_mock(rt, conn):
+            states['before'] = self._get_rt_state("indexer_test_fail")
+            raise MockException("boom!")
+
+        with mock.patch.object(self.index._RESOURCE_TYPE_MANAGER,
+                               "map_and_create_tables",
+                               side_effect=map_and_create_mock):
+            self.assertRaises(MockException,
+                              self.index.create_resource_type,
+                              rtype)
+            states['after'] = self._get_rt_state('indexer_test_fail')
+
+        self.assertEqual([('after', 'creation_error'),
+                          ('before', 'creating')],
+                         sorted(states.items()))
+
+    def test_resource_type_unexpected_deleting_error(self):
+        mgr = self.index.get_resource_type_schema()
+        rtype = mgr.resource_type_from_dict("indexer_test_fail2", {
+            "col1": {"type": "string", "required": True,
+                     "min_length": 2, "max_length": 15}
+        }, "creating")
+        self.index.create_resource_type(rtype)
+
+        states = {'before': None,
+                  'after': None}
+
+        def map_and_create_mock(rt, conn):
+            states['before'] = self._get_rt_state("indexer_test_fail2")
+            raise MockException("boom!")
+
+        with mock.patch.object(self.index._RESOURCE_TYPE_MANAGER,
+                               "unmap_and_delete_tables",
+                               side_effect=map_and_create_mock):
+            self.assertRaises(MockException,
+                              self.index.delete_resource_type,
+                              rtype.name)
+            states['after'] = self._get_rt_state('indexer_test_fail2')
+
+        self.assertEqual([('after', 'deletion_error'),
+                          ('before', 'deleting')],
+                         sorted(states.items()))
+
+        # We can cleanup the mess !
+        self.index.delete_resource_type("indexer_test_fail2")
+
+        # Ensure it's deleted
+        self.assertRaises(indexer.NoSuchResourceType,
+                          self.index.get_resource_type,
+                          "indexer_test_fail2")
+
+        self.assertRaises(indexer.NoSuchResourceType,
+                          self.index.delete_resource_type,
+                          "indexer_test_fail2")
diff --git a/gnocchi/tests/test_influxdb.py b/gnocchi/tests/test_influxdb.py
new file mode 100644
index 0000000000000000000000000000000000000000..5bc801bb4c6c8d684fc60e8c41ab5ab4eb61b897
--- /dev/null
+++ b/gnocchi/tests/test_influxdb.py
@@ -0,0 +1,198 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2017 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import numpy
+import pyparsing
+
+from gnocchi.rest import influxdb
+from gnocchi.tests import base
+
+
+class TestInfluxDBLineProtocol(base.BaseTestCase):
+    def test_line_protocol_parser_ok(self):
+        lines = (
+            ('cpu,cpu=cpu2,host=abydos usage_system=11.1,usage_idle=73.2,usage_nice=0,usage_irq=0,usage_user=15.7,usage_softirq=0,usage_steal=0,usage_guest=0,usage_guest_nice=0,usage_iowait=0 1510150170000000000',  # noqa
+             ['cpu',
+              {'host': 'abydos',
+               'cpu': 'cpu2'},
+              {'usage_guest': 0.0,
+               'usage_nice': 0.0,
+               'usage_steal': 0.0,
+               'usage_iowait': 0.0,
+               'usage_user': 15.7,
+               'usage_idle': 73.2,
+               'usage_softirq': 0.0,
+               'usage_guest_nice': 0.0,
+               'usage_irq': 0.0,
+               'usage_system': 11.1},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('cpu,cpu=cpu-total,host=abydos usage_idle=79.2198049512378,usage_nice=0,usage_iowait=0,usage_steal=0,usage_guest=0,usage_guest_nice=0,usage_system=9.202300575143786,usage_irq=0,usage_softirq=0,usage_user=11.577894473618404 1510150170000000000',  # noqa
+             ['cpu',
+              {'cpu': 'cpu-total',
+               'host': 'abydos'},
+              {'usage_guest': 0.0,
+               'usage_guest_nice': 0.0,
+               'usage_idle': 79.2198049512378,
+               'usage_iowait': 0.0,
+               'usage_irq': 0.0,
+               'usage_nice': 0.0,
+               'usage_softirq': 0.0,
+               'usage_steal': 0.0,
+               'usage_system': 9.202300575143786,
+               'usage_user': 11.577894473618404},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('diskio,name=disk0,host=abydos io_time=11020501i,iops_in_progress=0i,read_bytes=413847966208i,read_time=9816308i,write_time=1204193i,weighted_io_time=0i,reads=33523907i,writes=7321123i,write_bytes=141510539264i 1510150170000000000',  # noqa
+             ['diskio',
+              {'host': 'abydos',
+               'name': 'disk0'},
+              {'io_time': 11020501,
+               'iops_in_progress': 0,
+               'read_bytes': 413847966208,
+               'read_time': 9816308,
+               'reads': 33523907,
+               'weighted_io_time': 0,
+               'write_bytes': 141510539264,
+               'write_time': 1204193,
+               'writes': 7321123},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('disk,path=/,device=disk1s1,fstype=apfs,host=abydos total=250140434432i,free=28950695936i,used=216213557248i,used_percent=88.19130621205531,inodes_total=9223372036854775807i,inodes_free=9223372036850748963i,inodes_used=4026844i 1510150170000000000',  # noqa
+             ['disk',
+              {'device': 'disk1s1', 'fstype': 'apfs',
+               'host': 'abydos', 'path': '/'},
+              {'free': 28950695936,
+               'inodes_free': 9223372036850748963,
+               'inodes_total': 9223372036854775807,
+               'inodes_used': 4026844,
+               'total': 250140434432,
+               'used': 216213557248,
+               'used_percent': 88.19130621205531},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('mem,host=abydos free=16195584i,available_percent=24.886322021484375,used=6452215808i,cached=0i,buffered=0i,active=2122153984i,inactive=2121523200i,used_percent=75.11367797851562,total=8589934592i,available=2137718784i 1510150170000000000',  # noqa
+             ['mem',
+              {'host': 'abydos'},
+              {'active': 2122153984,
+               'available': 2137718784,
+               'available_percent': 24.886322021484375,
+               'buffered': 0,
+               'cached': 0,
+               'free': 16195584,
+               'inactive': 2121523200,
+               'total': 8589934592,
+               'used': 6452215808,
+               'used_percent': 75.11367797851562},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('disk,path=/private/var/vm,device=disk1s4,fstype=apfs,host=abydos inodes_total=9223372036854775807i,inodes_free=9223372036854775803i,inodes_used=4i,total=250140434432i,free=28950695936i,used=4296265728i,used_percent=12.922280752806417 1510150170000000000',  # noqa
+             ['disk',
+              {'device': 'disk1s4',
+               'fstype': 'apfs',
+               'host': 'abydos',
+               'path': '/private/var/vm'},
+              {'free': 28950695936,
+               'inodes_free': 9223372036854775803,
+               'inodes_total': 9223372036854775807,
+               'inodes_used': 4,
+               'total': 250140434432,
+               'used': 4296265728,
+               'used_percent': 12.922280752806417},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('swap,host=abydos used=2689073152i,free=532152320i,used_percent=83.47981770833334,total=3221225472i 1510150170000000000',  # noqa
+             ['swap',
+              {'host': 'abydos'},
+              {'free': 532152320,
+               'total': 3221225472,
+               'used': 2689073152,
+               'used_percent': 83.47981770833334},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('swap,host=abydos in=0i,out=0i 1510150170000000000',
+             ['swap',
+              {'host': 'abydos'},
+              {'in': 0, 'out': 0},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('processes,host=abydos stopped=0i,running=2i,sleeping=379i,total=382i,unknown=0i,idle=0i,blocked=1i,zombies=0i 1510150170000000000',  # noqa
+             ['processes',
+              {'host': 'abydos'},
+              {'blocked': 1,
+               'idle': 0,
+               'running': 2,
+               'sleeping': 379,
+               'stopped': 0,
+               'total': 382,
+               'unknown': 0,
+               'zombies': 0},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('system,host=abydos load5=3.02,load15=3.31,n_users=1i,n_cpus=4i,load1=2.18 1510150170000000000',  # noqa
+             ['system',
+              {'host': 'abydos'},
+              {'load1': 2.18,
+               'load15': 3.31,
+               'load5': 3.02,
+               'n_cpus': 4,
+               'n_users': 1},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('system,host=abydos uptime=337369i,uptime_format="3 days, 21:42" 1510150170000000000',  # noqa
+             ['system',
+              {'host': 'abydos'},
+              {'uptime': 337369, 'uptime_format': '3 days, 21:42'},
+              numpy.datetime64('2017-11-08T14:09:30.000000000')]),
+            ('notag up=1 123234',
+             ['notag',
+              {},
+              {'up': 1.0},
+              numpy.datetime64('1970-01-01T00:00:00.000123234')]),
+            ('notag up=3 ', ['notag', {}, {'up': 3.0}, None]),
+        )
+        for line, result in lines:
+            parsed = list(influxdb.line_protocol.parseString(line))
+            self.assertEqual(result, parsed)
+
+    def test_line_protocol_parser_fail(self):
+        lines = (
+            "measurement, field=1",
+            "measurement, field=1 123",
+            "measurement,tag=value 123",
+            "measurement,tag=value , 123",
+            "measurement,tag=value 123",
+            ",tag=value 123",
+            "foobar,tag=value field=string 123",
+        )
+        for line in lines:
+            self.assertRaises(pyparsing.ParseException,
+                              influxdb.line_protocol.parseString,
+                              line)
+
+    def test_query_parser_ok(self):
+        lines = (
+            "CREATE DATABASE foobar;",
+            "CREATE DATABASE foobar  ;",
+            "CREATE DATABASE foobar  ;;;",
+            "CrEaTe   Database foobar",
+            "create Database    foobar",
+        )
+        for line in lines:
+            parsed = list(influxdb.query_parser.parseString(line))[0]
+            self.assertEqual("foobar", parsed)
+
+    def test_query_parser_fail(self):
+        lines = (
+            "SELECT",
+            "hey yo foobar;",
+            "help database foobar;",
+            "something weird",
+            "create stuff foobar",
+        )
+        for line in lines:
+            self.assertRaises(pyparsing.ParseException,
+                              influxdb.query_parser.parseString,
+                              line)
diff --git a/gnocchi/tests/test_injector.py b/gnocchi/tests/test_injector.py
new file mode 100644
index 0000000000000000000000000000000000000000..b01b7fdd0d865549f6b010efe3771f2b465de1fc
--- /dev/null
+++ b/gnocchi/tests/test_injector.py
@@ -0,0 +1,30 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2018 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+
+from gnocchi.cli import injector
+from gnocchi.tests import base
+
+
+class InjectorTestCase(base.TestCase):
+    def test_inject(self):
+        self.assertEqual(100, injector._inject(
+            self.incoming, self.coord, self.storage, self.index,
+            measures=10, metrics=10))
+
+    def test_inject_process(self):
+        self.assertEqual(100, injector._inject(
+            self.incoming, self.coord, self.storage, self.index,
+            measures=10, metrics=10, process=True))
diff --git a/gnocchi/tests/test_rest.py b/gnocchi/tests/test_rest.py
new file mode 100644
index 0000000000000000000000000000000000000000..74592e109b525aa7a555006ee9d4cd0e326811a6
--- /dev/null
+++ b/gnocchi/tests/test_rest.py
@@ -0,0 +1,2013 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016 Red Hat, Inc.
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import base64
+import calendar
+import contextlib
+import datetime
+from email import utils as email_utils
+import hashlib
+import json
+import uuid
+
+import fixtures
+import iso8601
+from keystonemiddleware import fixture as ksm_fixture
+import mock
+import six
+import testscenarios
+from testtools import testcase
+import webtest
+
+import gnocchi
+from gnocchi import archive_policy
+from gnocchi.rest import api
+from gnocchi.rest import app
+from gnocchi.tests import base as tests_base
+from gnocchi import utils
+
+
+load_tests = testscenarios.load_tests_apply_scenarios
+
+
+class TestingApp(webtest.TestApp):
+    VALID_TOKEN_ADMIN = str(uuid.uuid4())
+    USER_ID_ADMIN = str(uuid.uuid4())
+    PROJECT_ID_ADMIN = str(uuid.uuid4())
+
+    VALID_TOKEN = str(uuid.uuid4())
+    USER_ID = str(uuid.uuid4())
+    PROJECT_ID = str(uuid.uuid4())
+
+    VALID_TOKEN_2 = str(uuid.uuid4())
+    USER_ID_2 = str(uuid.uuid4())
+    PROJECT_ID_2 = str(uuid.uuid4())
+
+    INVALID_TOKEN = str(uuid.uuid4())
+
+    def __init__(self, *args, **kwargs):
+        self.auth_mode = kwargs.pop('auth_mode')
+        self.chef = kwargs.pop('chef')
+        super(TestingApp, self).__init__(*args, **kwargs)
+        # Setup Keystone auth_token fake cache
+        self.token = self.VALID_TOKEN
+        # Setup default user for basic auth
+        self.user = self.USER_ID.encode('ascii')
+
+    @contextlib.contextmanager
+    def use_admin_user(self):
+        if self.auth_mode == "keystone":
+            old_token = self.token
+            self.token = self.VALID_TOKEN_ADMIN
+            try:
+                yield
+            finally:
+                self.token = old_token
+        elif self.auth_mode == "basic":
+            old_user = self.user
+            self.user = b"admin"
+            try:
+                yield
+            finally:
+                self.user = old_user
+        elif self.auth_mode == "remoteuser":
+            old_user = self.user
+            self.user = b"admin"
+            try:
+                yield
+            finally:
+                self.user = old_user
+        else:
+            raise RuntimeError("Unknown auth_mode")
+
+    @contextlib.contextmanager
+    def use_another_user(self):
+        if self.auth_mode != "keystone":
+            raise testcase.TestSkipped("Auth mode is not Keystone")
+        old_token = self.token
+        self.token = self.VALID_TOKEN_2
+        try:
+            yield
+        finally:
+            self.token = old_token
+
+    @contextlib.contextmanager
+    def use_invalid_token(self):
+        if self.auth_mode != "keystone":
+            raise testcase.TestSkipped("Auth mode is not Keystone")
+        old_token = self.token
+        self.token = self.INVALID_TOKEN
+        try:
+            yield
+        finally:
+            self.token = old_token
+
+    def do_request(self, req, *args, **kwargs):
+        if self.auth_mode in "keystone":
+            if self.token is not None:
+                req.headers['X-Auth-Token'] = self.token
+        elif self.auth_mode == "basic":
+            req.headers['Authorization'] = (
+                b"basic " + base64.b64encode(self.user + b":")
+            )
+        elif self.auth_mode == "remoteuser":
+            req.remote_user = self.user
+        response = super(TestingApp, self).do_request(req, *args, **kwargs)
+        for sack in self.chef.incoming.iter_sacks():
+            self.chef.process_new_measures_for_sack(
+                sack, blocking=True, sync=True)
+        return response
+
+
+class RestTest(tests_base.TestCase, testscenarios.TestWithScenarios):
+
+    scenarios = [
+        ('basic', dict(auth_mode="basic")),
+        ('keystone', dict(auth_mode="keystone")),
+        ('remoteuser', dict(auth_mode="remoteuser")),
+    ]
+
+    def setUp(self):
+        super(RestTest, self).setUp()
+
+        if self.auth_mode == "keystone":
+            self.auth_token_fixture = self.useFixture(
+                ksm_fixture.AuthTokenFixture())
+            self.auth_token_fixture.add_token_data(
+                is_v2=True,
+                token_id=TestingApp.VALID_TOKEN_ADMIN,
+                user_id=TestingApp.USER_ID_ADMIN,
+                user_name='adminusername',
+                project_id=TestingApp.PROJECT_ID_ADMIN,
+                role_list=['admin'])
+            self.auth_token_fixture.add_token_data(
+                is_v2=True,
+                token_id=TestingApp.VALID_TOKEN,
+                user_id=TestingApp.USER_ID,
+                user_name='myusername',
+                project_id=TestingApp.PROJECT_ID,
+                role_list=["member"])
+            self.auth_token_fixture.add_token_data(
+                is_v2=True,
+                token_id=TestingApp.VALID_TOKEN_2,
+                user_id=TestingApp.USER_ID_2,
+                user_name='myusername2',
+                project_id=TestingApp.PROJECT_ID_2,
+                role_list=["member"])
+
+        self.conf.set_override("auth_mode", self.auth_mode, group="api")
+
+        self.useFixture(fixtures.MockPatchObject(
+            app.GnocchiHook, "_lazy_load", self._fake_lazy_load))
+
+        self.app = TestingApp(app.load_app(conf=self.conf,
+                                           not_implemented_middleware=False),
+                              chef=self.chef,
+                              auth_mode=self.auth_mode)
+
+    def _fake_lazy_load(self, name):
+        if name == "storage":
+            return self.storage
+        elif name == "indexer":
+            return self.index
+        elif name == "incoming":
+            return self.incoming
+        elif name == "coordinator":
+            return self.coord
+        else:
+            raise RuntimeError("Invalid driver type: %s" % name)
+
+    # NOTE(jd) Used at least by docs
+    @staticmethod
+    def runTest():
+        pass
+
+
+class RootTest(RestTest):
+    def test_deserialize_force_json(self):
+        with self.app.use_admin_user():
+            self.app.post(
+                "/v1/archive_policy",
+                params="foo",
+                status=415)
+
+    def test_capabilities(self):
+        aggregation_methods = set(
+            archive_policy.ArchivePolicy.VALID_AGGREGATION_METHODS)
+        result = self.app.get("/v1/capabilities").json
+        self.assertEqual(
+            sorted(aggregation_methods),
+            sorted(result['aggregation_methods']))
+
+    def test_version(self):
+        with self.app.use_admin_user():
+            r = self.app.get("/")
+        self.assertEqual(
+            json.loads(r.text)['build'],
+            gnocchi.__version__)
+
+    def test_status(self):
+        with self.app.use_admin_user():
+            r = self.app.get("/v1/status")
+        status = json.loads(r.text)
+        self.assertIsInstance(status['storage']['measures_to_process'], dict)
+        self.assertIsInstance(status['storage']['summary']['metrics'], int)
+        self.assertIsInstance(status['storage']['summary']['measures'], int)
+
+
+class ArchivePolicyTest(RestTest):
+    """Test the ArchivePolicies REST API.
+
+    See also gnocchi/tests/gabbi/gabbits/archive.yaml
+    """
+
+    # TODO(chdent): The tests left here involve inspecting the
+    # aggregation methods which gabbi can't currently handle because
+    # the ordering of the results is not predictable.
+
+    def test_post_archive_policy_with_agg_methods(self):
+        name = str(uuid.uuid4())
+        with self.app.use_admin_user():
+            result = self.app.post_json(
+                "/v1/archive_policy",
+                params={"name": name,
+                        "aggregation_methods": ["mean"],
+                        "definition":
+                        [{
+                            "granularity": "1 minute",
+                            "points": 20,
+                        }]},
+                status=201)
+        self.assertEqual("application/json", result.content_type)
+        ap = json.loads(result.text)
+        self.assertEqual(['mean'], ap['aggregation_methods'])
+
+    def test_post_archive_policy_with_agg_methods_minus(self):
+        name = str(uuid.uuid4())
+        with self.app.use_admin_user():
+            result = self.app.post_json(
+                "/v1/archive_policy",
+                params={"name": name,
+                        "aggregation_methods": ["-mean"],
+                        "definition":
+                        [{
+                            "granularity": "1 minute",
+                            "points": 20,
+                        }]},
+                status=201)
+        self.assertEqual("application/json", result.content_type)
+        ap = json.loads(result.text)
+        self.assertEqual(
+            (set(self.conf.archive_policy.default_aggregation_methods)
+             - set(['mean'])),
+            set(ap['aggregation_methods']))
+
+    def test_get_archive_policy(self):
+        result = self.app.get("/v1/archive_policy/medium")
+        ap = json.loads(result.text)
+        ap_dict = self.archive_policies['medium'].jsonify()
+        ap_dict['definition'] = [
+            archive_policy.ArchivePolicyItem(**d).jsonify()
+            for d in ap_dict['definition']
+        ]
+        self.assertEqual(set(ap['aggregation_methods']),
+                         ap_dict['aggregation_methods'])
+        del ap['aggregation_methods']
+        del ap_dict['aggregation_methods']
+        self.assertEqual(ap_dict, ap)
+
+    def test_list_archive_policy(self):
+        result = self.app.get("/v1/archive_policy")
+        aps = json.loads(result.text)
+        # Transform list to set
+        for ap in aps:
+            ap['aggregation_methods'] = set(ap['aggregation_methods'])
+        for name, ap in six.iteritems(self.archive_policies):
+            apj = ap.jsonify()
+            apj['definition'] = [
+                archive_policy.ArchivePolicyItem(**d).jsonify()
+                for d in ap.definition
+            ]
+            self.assertIn(apj, aps)
+
+
+class MetricTest(RestTest):
+
+    def test_get_metric_with_another_user_linked_resource(self):
+        result = self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": str(uuid.uuid4()),
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": TestingApp.USER_ID_2,
+                "project_id": TestingApp.PROJECT_ID_2,
+                "metrics": {"foobar": {"archive_policy_name": "low"}},
+            })
+        resource = json.loads(result.text)
+        metric_id = resource["metrics"]["foobar"]
+        with self.app.use_another_user():
+            self.app.get("/v1/metric/%s" % metric_id)
+
+    def test_list_metric_with_another_user(self):
+        metric_created = self.app.post_json(
+            "/v1/metric",
+            params={"archive_policy_name": "medium"},
+            status=201)
+
+        metric_id = metric_created.json["id"]
+
+        with self.app.use_another_user():
+            metric_list = self.app.get("/v1/metric")
+            self.assertNotIn(metric_id, [m["id"] for m in metric_list.json])
+
+    def test_list_metric_with_another_user_allowed(self):
+        rid = str(uuid.uuid4())
+        r = self.app.post_json("/v1/resource/generic",
+                               params={
+                                   "id": rid,
+                                   "project_id": TestingApp.PROJECT_ID_2,
+                                   "metrics": {
+                                       "disk": {"archive_policy_name": "low"},
+                                   }
+                               })
+        metric_id = r.json['metrics']['disk']
+
+        with self.app.use_another_user():
+            metric_list = self.app.get("/v1/metric")
+            self.assertIn(metric_id, [m["id"] for m in metric_list.json])
+
+    def test_get_metric_with_another_user(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"},
+                                    status=201)
+        self.assertEqual("application/json", result.content_type)
+
+        with self.app.use_another_user():
+            self.app.get(result.headers['Location'], status=403)
+
+    def test_post_archive_policy_no_mean(self):
+        """Test that we have a 404 if mean is not in AP."""
+        ap = str(uuid.uuid4())
+        with self.app.use_admin_user():
+            self.app.post_json(
+                "/v1/archive_policy",
+                params={"name": ap,
+                        "aggregation_methods": ["max"],
+                        "definition": [{
+                            "granularity": "10s",
+                            "points": 20,
+                        }]},
+                status=201)
+        result = self.app.post_json(
+            "/v1/metric",
+            params={"archive_policy_name": ap},
+            status=201)
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 8},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 16}])
+        self.app.get("/v1/metric/%s/measures" % metric['id'],
+                     status=404)
+
+    def test_delete_metric_another_user(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric = json.loads(result.text)
+        with self.app.use_another_user():
+            self.app.delete("/v1/metric/" + metric['id'], status=403)
+
+    def test_add_measure_with_another_user(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "high"})
+        metric = json.loads(result.text)
+        with self.app.use_another_user():
+            self.app.post_json(
+                "/v1/metric/%s/measures" % metric['id'],
+                params=[{"timestamp": '2013-01-01 23:23:23',
+                         "value": 1234.2}],
+                status=403)
+
+    def test_add_measures_back_window(self):
+        ap_name = str(uuid.uuid4())
+        with self.app.use_admin_user():
+            self.app.post_json(
+                "/v1/archive_policy",
+                params={"name": ap_name,
+                        "back_window": 2,
+                        "definition":
+                        [{
+                            "granularity": "1 minute",
+                            "points": 20,
+                        }]},
+                status=201)
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": ap_name})
+        metric = json.loads(result.text)
+        self.app.post_json(
+            "/v1/metric/%s/measures" % metric['id'],
+            params=[{"timestamp": '2013-01-01 23:30:23',
+                     "value": 1234.2}],
+            status=202)
+        self.app.post_json(
+            "/v1/metric/%s/measures" % metric['id'],
+            params=[{"timestamp": '2013-01-01 23:29:23',
+                     "value": 1234.2}],
+            status=202)
+        self.app.post_json(
+            "/v1/metric/%s/measures" % metric['id'],
+            params=[{"timestamp": '2013-01-01 23:28:23',
+                     "value": 1234.2}],
+            status=202)
+        # This one is too old and should not be taken into account
+        self.app.post_json(
+            "/v1/metric/%s/measures" % metric['id'],
+            params=[{"timestamp": '2012-01-01 23:27:23',
+                     "value": 1234.2}],
+            status=202)
+
+        ret = self.app.get("/v1/metric/%s/measures" % metric['id'])
+        result = json.loads(ret.text)
+        self.assertEqual(
+            [[u'2013-01-01T23:28:00+00:00', 60.0, 1234.2],
+             [u'2013-01-01T23:29:00+00:00', 60.0, 1234.2],
+             [u'2013-01-01T23:30:00+00:00', 60.0, 1234.2]],
+            result)
+
+    def test_get_measure_with_another_user(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "low"})
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": '2013-01-01 23:23:23',
+                                    "value": 1234.2}])
+        with self.app.use_another_user():
+            self.app.get("/v1/metric/%s/measures" % metric['id'],
+                         status=403)
+
+    def test_get_measures_with_another_user_allowed(self):
+        rid = str(uuid.uuid4())
+        result = self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": rid,
+                "project_id": TestingApp.PROJECT_ID_2,
+                "metrics": {
+                    "disk": {"archive_policy_name": "low"},
+                }
+            })
+        metric_id = result.json['metrics']['disk']
+        measures_url = "/v1/resource/generic/%s/metric/disk/measures" % rid
+        self.app.post_json(measures_url,
+                           params=[{"timestamp": '2013-01-01 23:23:23',
+                                    "value": 1234.2}])
+        with self.app.use_another_user():
+            result = self.app.get(measures_url)
+            self.assertEqual(
+                [['2013-01-01T00:00:00+00:00', 86400.0, 1234.2],
+                 ['2013-01-01T23:00:00+00:00', 3600.0, 1234.2],
+                 ['2013-01-01T23:20:00+00:00', 300.0, 1234.2]],
+                result.json)
+            result = self.app.get("/v1/metric/%s/measures" % metric_id)
+            self.assertEqual(
+                [['2013-01-01T00:00:00+00:00', 86400.0, 1234.2],
+                 ['2013-01-01T23:00:00+00:00', 3600.0, 1234.2],
+                 ['2013-01-01T23:20:00+00:00', 300.0, 1234.2]],
+                result.json)
+
+    def test_get_measures_with_another_user_disallowed(self):
+        rid = str(uuid.uuid4())
+        result = self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": rid,
+                "metrics": {
+                    "disk": {"archive_policy_name": "low"},
+                }
+            })
+        metric_id = result.json['metrics']['disk']
+        measures_url = "/v1/resource/generic/%s/metric/disk/measures" % rid
+        self.app.post_json(measures_url,
+                           params=[{"timestamp": '2013-01-01 23:23:23',
+                                    "value": 1234.2}])
+        with self.app.use_another_user():
+            self.app.get(measures_url, status=403)
+            self.app.get("/v1/metric/%s/measures" % metric_id, status=403)
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_get_measure_start_relative(self, utcnow):
+        """Make sure the timestamps can be relative to now."""
+        utcnow.return_value = datetime.datetime(2014, 1, 1, 10, 23)
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "high"})
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": utils.utcnow().isoformat(),
+                                    "value": 1234.2}])
+        ret = self.app.get(
+            "/v1/metric/%s/measures?start=-10 minutes"
+            % metric['id'],
+            status=200)
+        result = json.loads(ret.text)
+        now = utils.datetime_utc(2014, 1, 1, 10, 23)
+        self.assertEqual([
+            ['2014-01-01T10:00:00+00:00', 3600.0, 1234.2],
+            [(now
+              - datetime.timedelta(
+                  seconds=now.second,
+                  microseconds=now.microsecond)).isoformat(),
+             60.0, 1234.2],
+            [(now
+              - datetime.timedelta(
+                  microseconds=now.microsecond)).isoformat(),
+             1.0, 1234.2]], result)
+
+    def test_get_measure_stop(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "high"})
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:00',
+                                    "value": 1234.2},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 456}])
+        ret = self.app.get("/v1/metric/%s/measures"
+                           "?stop=2013-01-01 12:00:01" % metric['id'],
+                           status=200)
+        result = json.loads(ret.text)
+        self.assertEqual(
+            [[u'2013-01-01T12:00:00+00:00', 3600.0, 845.1],
+             [u'2013-01-01T12:00:00+00:00', 60.0, 845.1],
+             [u'2013-01-01T12:00:00+00:00', 1.0, 1234.2]],
+            result)
+
+    def test_get_measure_aggregation(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 123.2},
+                                   {"timestamp": '2013-01-01 12:00:03',
+                                    "value": 12345.2},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 1234.2}])
+        ret = self.app.get(
+            "/v1/metric/%s/measures?aggregation=max" % metric['id'],
+            status=200)
+        result = json.loads(ret.text)
+        self.assertEqual([[u'2013-01-01T00:00:00+00:00', 86400.0, 12345.2],
+                          [u'2013-01-01T12:00:00+00:00', 3600.0, 12345.2],
+                          [u'2013-01-01T12:00:00+00:00', 60.0, 12345.2]],
+                         result)
+
+    def test_get_resource_missing_named_metric_measure_aggregation(self):
+        mgr = self.index.get_resource_type_schema()
+        resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(resource_type, {
+                "server_group": {"type": "string",
+                                 "min_length": 1,
+                                 "max_length": 40,
+                                 "required": True}
+            }, 'creating'))
+
+        attributes = {
+            "server_group": str(uuid.uuid4()),
+        }
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric1 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric1['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 8},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 16}])
+
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric2 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric2['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 0},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 4}])
+
+        attributes['id'] = str(uuid.uuid4())
+        attributes['metrics'] = {'foo': metric1['id']}
+        self.app.post_json("/v1/resource/" + resource_type,
+                           params=attributes)
+
+        attributes['id'] = str(uuid.uuid4())
+        attributes['metrics'] = {'bar': metric2['id']}
+        self.app.post_json("/v1/resource/" + resource_type,
+                           params=attributes)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/%s/metric/foo?aggregation=max"
+            % resource_type,
+            params={"=": {"server_group": attributes['server_group']}})
+
+        measures = json.loads(result.text)
+        self.assertEqual([[u'2013-01-01T00:00:00+00:00', 86400.0, 16.0],
+                          [u'2013-01-01T12:00:00+00:00', 3600.0, 16.0],
+                          [u'2013-01-01T12:00:00+00:00', 60.0, 16.0]],
+                         measures)
+
+    def test_search_value(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "high"})
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:00',
+                                    "value": 1234.2},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 456}])
+        metric1 = metric['id']
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "high"})
+        metric = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric['id'],
+                           params=[{"timestamp": '2013-01-01 12:30:00',
+                                    "value": 1234.2},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 456}])
+        metric2 = metric['id']
+
+        ret = self.app.post_json(
+            "/v1/search/metric?metric_id=%s&metric_id=%s"
+            "&stop=2013-01-01 12:10:00" % (metric1, metric2),
+            params={u"∧": [{u"≥": 1000}]},
+            status=200)
+        result = json.loads(ret.text)
+        self.assertEqual(
+            {metric1: [[u'2013-01-01T12:00:00+00:00', 1.0, 1234.2]],
+             metric2: []},
+            result)
+
+
+class ResourceTest(RestTest):
+    def setUp(self):
+        super(ResourceTest, self).setUp()
+        self.attributes = {
+            "id": str(uuid.uuid4()),
+            "started_at": "2014-01-03T02:02:02+00:00",
+            "user_id": str(uuid.uuid4()),
+            "project_id": str(uuid.uuid4()),
+            "name": "my-name",
+        }
+        self.patchable_attributes = {
+            "ended_at": "2014-01-03T02:02:02+00:00",
+            "name": "new-name",
+        }
+        self.resource = self.attributes.copy()
+        # Set original_resource_id
+        self.resource['original_resource_id'] = self.resource['id']
+        self.resource['created_by_user_id'] = TestingApp.USER_ID
+        if self.auth_mode == "keystone":
+            self.resource['created_by_project_id'] = TestingApp.PROJECT_ID
+            self.resource['creator'] = (
+                TestingApp.USER_ID + ":" + TestingApp.PROJECT_ID
+            )
+        elif self.auth_mode in ["basic", "remoteuser"]:
+            self.resource['created_by_project_id'] = ""
+            self.resource['creator'] = TestingApp.USER_ID
+        self.resource['ended_at'] = None
+        self.resource['metrics'] = {}
+        if 'user_id' not in self.resource:
+            self.resource['user_id'] = None
+        if 'project_id' not in self.resource:
+            self.resource['project_id'] = None
+
+        mgr = self.index.get_resource_type_schema()
+        self.resource_type = str(uuid.uuid4())
+        self.index.create_resource_type(
+            mgr.resource_type_from_dict(self.resource_type, {
+                "name": {"type": "string",
+                         "min_length": 1,
+                         "max_length": 40,
+                         "required": True}
+            }, "creating"))
+        self.resource['type'] = self.resource_type
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_post_resource(self, utcnow):
+        utcnow.return_value = utils.datetime_utc(2014, 1, 1, 10, 23)
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes,
+            status=201)
+        resource = json.loads(result.text)
+        self.assertEqual("http://localhost/v1/resource/"
+                         + self.resource_type + "/" + self.attributes['id'],
+                         result.headers['Location'])
+        self.assertIsNone(resource['revision_end'])
+        self.assertEqual(resource['revision_start'],
+                         "2014-01-01T10:23:00+00:00")
+        self._check_etag(result, resource)
+        del resource['revision_start']
+        del resource['revision_end']
+        self.assertEqual(self.resource, resource)
+
+    def test_post_resource_with_invalid_metric(self):
+        metric_id = str(uuid.uuid4())
+        self.attributes['metrics'] = {"foo": metric_id}
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes,
+            status=400)
+        self.assertIn("Metric %s does not exist" % metric_id,
+                      result.text)
+
+    def test_post_resource_with_metric_from_other_user(self):
+        with self.app.use_another_user():
+            metric = self.app.post_json(
+                "/v1/metric",
+                params={'archive_policy_name': "high"})
+        metric_id = json.loads(metric.text)['id']
+        self.attributes['metrics'] = {"foo": metric_id}
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes,
+            status=400)
+        self.assertIn("Metric %s does not exist" % metric_id,
+                      result.text)
+
+    def test_post_resource_already_exist(self):
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes,
+            status=201)
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes,
+            status=409)
+        self.assertIn("Resource %s already exists" % self.attributes['id'],
+                      result.text)
+
+    def test_post_invalid_timestamp(self):
+        self.attributes['started_at'] = "2014-01-01 02:02:02"
+        self.attributes['ended_at'] = "2013-01-01 02:02:02"
+        self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes,
+            status=400)
+
+    @staticmethod
+    def _strtime_to_httpdate(dt):
+        return email_utils.formatdate(calendar.timegm(
+            iso8601.parse_date(dt).timetuple()), usegmt=True)
+
+    def _check_etag(self, response, resource):
+        lastmodified = self._strtime_to_httpdate(resource['revision_start'])
+        etag = hashlib.sha1()
+        etag.update(resource['id'].encode('utf-8'))
+        etag.update(resource['revision_start'].encode('utf8'))
+        self.assertEqual(response.headers['Last-Modified'], lastmodified)
+        self.assertEqual(response.headers['ETag'], '"%s"' % etag.hexdigest())
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_get_resource(self, utcnow):
+        utcnow.return_value = utils.datetime_utc(2014, 1, 1, 10, 23)
+        result = self.app.post_json("/v1/resource/" + self.resource_type,
+                                    params=self.attributes,
+                                    status=201)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type
+                              + "/"
+                              + self.attributes['id'])
+        resource = json.loads(result.text)
+        self.assertIsNone(resource['revision_end'])
+        self.assertEqual(resource['revision_start'],
+                         "2014-01-01T10:23:00+00:00")
+        self._check_etag(result, resource)
+        del resource['revision_start']
+        del resource['revision_end']
+        self.assertEqual(self.resource, resource)
+
+    def test_get_resource_etag(self):
+        result = self.app.post_json("/v1/resource/" + self.resource_type,
+                                    params=self.attributes,
+                                    status=201)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type
+                              + "/"
+                              + self.attributes['id'])
+        resource = json.loads(result.text)
+        etag = hashlib.sha1()
+        etag.update(resource['id'].encode('utf-8'))
+        etag.update(resource['revision_start'].encode('utf-8'))
+        etag = etag.hexdigest()
+        lastmodified = self._strtime_to_httpdate(resource['revision_start'])
+        oldlastmodified = self._strtime_to_httpdate("2000-01-01 00:00:00")
+
+        # if-match and if-unmodified-since
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-match': 'fake'},
+                     status=412)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-match': etag},
+                     status=200)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-unmodified-since': lastmodified},
+                     status=200)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-unmodified-since': oldlastmodified},
+                     status=412)
+        # Some case with '*'
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-none-match': '*'},
+                     status=304)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/wrongid",
+                     headers={'if-none-match': '*'},
+                     status=404)
+        # always prefers if-match if both provided
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-match': etag,
+                              'if-unmodified-since': lastmodified},
+                     status=200)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-match': etag,
+                              'if-unmodified-since': oldlastmodified},
+                     status=200)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-match': '*',
+                              'if-unmodified-since': oldlastmodified},
+                     status=200)
+
+        # if-none-match and if-modified-since
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-none-match': etag},
+                     status=304)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-none-match': 'fake'},
+                     status=200)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-modified-since': lastmodified},
+                     status=304)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-modified-since': oldlastmodified},
+                     status=200)
+        # always prefers if-none-match if both provided
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-modified-since': oldlastmodified,
+                              'if-none-match': etag},
+                     status=304)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-modified-since': oldlastmodified,
+                              'if-none-match': '*'},
+                     status=304)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-modified-since': lastmodified,
+                              'if-none-match': '*'},
+                     status=304)
+        # Some case with '*'
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-match': '*'},
+                     status=200)
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/wrongid",
+                     headers={'if-match': '*'},
+                     status=404)
+
+        # if-none-match and if-match
+        self.app.get("/v1/resource/" + self.resource_type
+                     + "/" + self.attributes['id'],
+                     headers={'if-none-match': etag,
+                              'if-match': etag},
+                     status=304)
+
+        # if-none-match returns 412 instead 304 for PUT/PATCH/DELETE
+        self.app.patch_json("/v1/resource/" + self.resource_type
+                            + "/" + self.attributes['id'],
+                            headers={'if-none-match': '*'},
+                            status=412)
+        self.app.delete("/v1/resource/" + self.resource_type
+                        + "/" + self.attributes['id'],
+                        headers={'if-none-match': '*'},
+                        status=412)
+
+        # if-modified-since is ignored with PATCH/PUT/DELETE
+        self.app.patch_json("/v1/resource/" + self.resource_type
+                            + "/" + self.attributes['id'],
+                            params=self.patchable_attributes,
+                            headers={'if-modified-since': lastmodified},
+                            status=200)
+        self.app.delete("/v1/resource/" + self.resource_type
+                        + "/" + self.attributes['id'],
+                        headers={'if-modified-since': lastmodified},
+                        status=204)
+
+    def test_get_resource_non_admin(self):
+        with self.app.use_another_user():
+            self.app.post_json("/v1/resource/" + self.resource_type,
+                               params=self.attributes,
+                               status=201)
+            self.app.get("/v1/resource/"
+                         + self.resource_type
+                         + "/"
+                         + self.attributes['id'],
+                         status=200)
+
+    def test_get_resource_unauthorized(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        with self.app.use_another_user():
+            self.app.get("/v1/resource/"
+                         + self.resource_type
+                         + "/"
+                         + self.attributes['id'],
+                         status=403)
+
+    def test_get_resource_named_metric(self):
+        self.attributes['metrics'] = {'foo': {'archive_policy_name': "high"}}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        self.app.get("/v1/resource/"
+                     + self.resource_type
+                     + "/"
+                     + self.attributes['id']
+                     + "/metric/foo/measures",
+                     status=200)
+
+    def test_list_resource_metrics_unauthorized(self):
+        self.attributes['metrics'] = {'foo': {'archive_policy_name': "high"}}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        with self.app.use_another_user():
+            self.app.get(
+                "/v1/resource/" + self.resource_type
+                + "/" + self.attributes['id'] + "/metric",
+                status=403)
+
+    def test_delete_resource_named_metric(self):
+        self.attributes['metrics'] = {'foo': {'archive_policy_name': "high"}}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        self.app.delete("/v1/resource/"
+                        + self.resource_type
+                        + "/"
+                        + self.attributes['id']
+                        + "/metric/foo",
+                        status=204)
+        self.app.delete("/v1/resource/"
+                        + self.resource_type
+                        + "/"
+                        + self.attributes['id']
+                        + "/metric/foo/measures",
+                        status=404)
+
+    def test_get_resource_unknown_named_metric(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        self.app.get("/v1/resource/"
+                     + self.resource_type
+                     + "/"
+                     + self.attributes['id']
+                     + "/metric/foo",
+                     status=404)
+
+    def test_post_append_metrics_already_exists(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        metrics = {'foo': {'archive_policy_name': "high"}}
+        self.app.post_json("/v1/resource/" + self.resource_type
+                           + "/" + self.attributes['id'] + "/metric",
+                           params=metrics, status=200)
+        metrics = {'foo': {'archive_policy_name': "low"}}
+        self.app.post_json("/v1/resource/" + self.resource_type
+                           + "/" + self.attributes['id']
+                           + "/metric",
+                           params=metrics,
+                           status=409)
+
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type + "/"
+                              + self.attributes['id'])
+        result = json.loads(result.text)
+        self.assertTrue(uuid.UUID(result['metrics']['foo']))
+
+    def test_post_append_metrics(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        metrics = {'foo': {'archive_policy_name': "high"}}
+        self.app.post_json("/v1/resource/" + self.resource_type
+                           + "/" + self.attributes['id'] + "/metric",
+                           params=metrics, status=200)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type + "/"
+                              + self.attributes['id'])
+        result = json.loads(result.text)
+        self.assertTrue(uuid.UUID(result['metrics']['foo']))
+
+    def test_post_append_metrics_created_by_different_user(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        with self.app.use_another_user():
+            metric = self.app.post_json(
+                "/v1/metric",
+                params={'archive_policy_name': "high"})
+        metric_id = json.loads(metric.text)['id']
+        result = self.app.post_json("/v1/resource/" + self.resource_type
+                                    + "/" + self.attributes['id'] + "/metric",
+                                    params={str(uuid.uuid4()): metric_id},
+                                    status=400)
+        self.assertIn("Metric %s does not exist" % metric_id, result.text)
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_patch_resource_metrics(self, utcnow):
+        utcnow.return_value = utils.datetime_utc(2014, 1, 1, 10, 23)
+        result = self.app.post_json("/v1/resource/" + self.resource_type,
+                                    params=self.attributes,
+                                    status=201)
+        r = json.loads(result.text)
+
+        utcnow.return_value = utils.datetime_utc(2014, 1, 2, 6, 49)
+        new_metrics = {'foo': {'archive_policy_name': "medium"}}
+        self.app.patch_json(
+            "/v1/resource/" + self.resource_type + "/"
+            + self.attributes['id'],
+            params={'metrics': new_metrics},
+            status=200)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type + "/"
+                              + self.attributes['id'])
+        result = json.loads(result.text)
+        self.assertTrue(uuid.UUID(result['metrics']['foo']))
+        self.assertIsNone(result['revision_end'])
+        self.assertIsNone(r['revision_end'])
+        self.assertEqual(result['revision_start'], "2014-01-01T10:23:00+00:00")
+        self.assertEqual(r['revision_start'], "2014-01-01T10:23:00+00:00")
+
+        del result['metrics']
+        del result['revision_start']
+        del result['revision_end']
+        del r['metrics']
+        del r['revision_start']
+        del r['revision_end']
+        self.assertEqual(r, result)
+
+    def test_patch_resource_existent_metrics_from_another_user(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        with self.app.use_another_user():
+            result = self.app.post_json(
+                "/v1/metric",
+                params={'archive_policy_name': "medium"})
+        metric_id = json.loads(result.text)['id']
+        result = self.app.patch_json(
+            "/v1/resource/"
+            + self.resource_type
+            + "/"
+            + self.attributes['id'],
+            params={'metrics': {'foo': metric_id}},
+            status=400)
+        self.assertIn("Metric %s does not exist" % metric_id, result.text)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type
+                              + "/"
+                              + self.attributes['id'])
+        result = json.loads(result.text)
+        self.assertEqual({}, result['metrics'])
+
+    def test_patch_resource_non_existent_metrics(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        e1 = str(uuid.uuid4())
+        result = self.app.patch_json(
+            "/v1/resource/"
+            + self.resource_type
+            + "/"
+            + self.attributes['id'],
+            params={'metrics': {'foo': e1}},
+            status=400)
+        self.assertIn("Metric %s does not exist" % e1, result.text)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type
+                              + "/"
+                              + self.attributes['id'])
+        result = json.loads(result.text)
+        self.assertEqual({}, result['metrics'])
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_patch_resource_attributes(self, utcnow):
+        utcnow.return_value = utils.datetime_utc(2014, 1, 1, 10, 23)
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        utcnow.return_value = utils.datetime_utc(2014, 1, 2, 6, 48)
+        presponse = self.app.patch_json(
+            "/v1/resource/" + self.resource_type
+            + "/" + self.attributes['id'],
+            params=self.patchable_attributes,
+            status=200)
+        response = self.app.get("/v1/resource/" + self.resource_type
+                                + "/" + self.attributes['id'])
+        result = json.loads(response.text)
+        presult = json.loads(presponse.text)
+        self.assertEqual(result, presult)
+        for k, v in six.iteritems(self.patchable_attributes):
+            self.assertEqual(v, result[k])
+        self.assertIsNone(result['revision_end'])
+        self.assertEqual(result['revision_start'],
+                         "2014-01-02T06:48:00+00:00")
+        self._check_etag(response, result)
+
+        # Check the history
+        history = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type + "?history=true",
+            headers={"Accept": "application/json"},
+            params={"=": {"id": result['id']}},
+            status=200)
+        history = json.loads(history.text)
+        self.assertGreaterEqual(len(history), 2)
+        self.assertEqual(result, history[1])
+
+        h = history[0]
+        for k, v in six.iteritems(self.attributes):
+            self.assertEqual(v, h[k])
+        self.assertEqual(h['revision_end'],
+                         "2014-01-02T06:48:00+00:00")
+        self.assertEqual(h['revision_start'],
+                         "2014-01-01T10:23:00+00:00")
+
+    def test_patch_resource_attributes_unauthorized(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        with self.app.use_another_user():
+            self.app.patch_json(
+                "/v1/resource/" + self.resource_type
+                + "/" + self.attributes['id'],
+                params=self.patchable_attributes,
+                status=403)
+
+    def test_patch_resource_ended_at_before_started_at(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        self.app.patch_json(
+            "/v1/resource/"
+            + self.resource_type
+            + "/"
+            + self.attributes['id'],
+            params={'ended_at': "2000-05-05 23:23:23"},
+            status=400)
+
+    def test_patch_resource_no_partial_update(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        e1 = str(uuid.uuid4())
+        result = self.app.patch_json(
+            "/v1/resource/" + self.resource_type + "/"
+            + self.attributes['id'],
+            params={'ended_at': "2044-05-05 23:23:23",
+                    'metrics': {"foo": e1}},
+            status=400)
+        self.assertIn("Metric %s does not exist" % e1, result.text)
+        result = self.app.get("/v1/resource/"
+                              + self.resource_type + "/"
+                              + self.attributes['id'])
+        result = json.loads(result.text)
+        del result['revision_start']
+        del result['revision_end']
+        self.assertEqual(self.resource, result)
+
+    def test_patch_resource_non_existent(self):
+        self.app.patch_json(
+            "/v1/resource/" + self.resource_type
+            + "/" + str(uuid.uuid4()),
+            params={},
+            status=404)
+
+    def test_patch_resource_non_existent_with_body(self):
+        self.app.patch_json(
+            "/v1/resource/" + self.resource_type
+            + "/" + str(uuid.uuid4()),
+            params=self.patchable_attributes,
+            status=404)
+
+    def test_patch_resource_unknown_field(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        result = self.app.patch_json(
+            "/v1/resource/" + self.resource_type + "/"
+            + self.attributes['id'],
+            params={'foobar': 123},
+            status=400,
+            headers={"Accept": "application/json"}
+        )
+
+        result_description = result.json['description']
+        self.assertEqual("Invalid input", result_description['cause'])
+        self.assertIn(
+            "extra keys not allowed @ data[", result_description['reason']
+        )
+
+    def test_delete_resource(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        self.app.get("/v1/resource/" + self.resource_type + "/"
+                     + self.attributes['id'],
+                     status=200)
+        self.app.delete("/v1/resource/" + self.resource_type + "/"
+                        + self.attributes['id'],
+                        status=204)
+        self.app.get("/v1/resource/" + self.resource_type + "/"
+                     + self.attributes['id'],
+                     status=404)
+
+    def test_delete_resource_with_metrics(self):
+        metric = self.app.post_json(
+            "/v1/metric",
+            params={'archive_policy_name': "high"})
+        metric_id = json.loads(metric.text)['id']
+        metric_name = six.text_type(uuid.uuid4())
+        self.attributes['metrics'] = {metric_name: metric_id}
+        self.app.get("/v1/metric/" + metric_id,
+                     status=200)
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        self.app.get("/v1/resource/" + self.resource_type + "/"
+                     + self.attributes['id'],
+                     status=200)
+        self.app.delete("/v1/resource/" + self.resource_type + "/"
+                        + self.attributes['id'],
+                        status=204)
+        self.app.get("/v1/resource/" + self.resource_type + "/"
+                     + self.attributes['id'],
+                     status=404)
+        self.app.get("/v1/metric/" + metric_id,
+                     status=404)
+
+    def test_delete_resource_unauthorized(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+        with self.app.use_another_user():
+            self.app.delete("/v1/resource/" + self.resource_type + "/"
+                            + self.attributes['id'],
+                            status=403)
+
+    def test_delete_resource_non_existent(self):
+        result = self.app.delete("/v1/resource/" + self.resource_type + "/"
+                                 + self.attributes['id'],
+                                 status=404)
+        self.assertIn(
+            "Resource %s does not exist" % self.attributes['id'],
+            result.text)
+
+    def test_post_resource_with_metrics(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric = json.loads(result.text)
+        self.attributes['metrics'] = {"foo": metric['id']}
+        result = self.app.post_json("/v1/resource/" + self.resource_type,
+                                    params=self.attributes,
+                                    status=201)
+        resource = json.loads(result.text)
+        self.assertEqual("http://localhost/v1/resource/"
+                         + self.resource_type + "/"
+                         + self.attributes['id'],
+                         result.headers['Location'])
+        self.resource['metrics'] = self.attributes['metrics']
+        del resource['revision_start']
+        del resource['revision_end']
+        self.assertEqual(self.resource, resource)
+
+    def test_post_resource_with_null_metrics(self):
+        self.attributes['metrics'] = {"foo": {"archive_policy_name": "low"}}
+        result = self.app.post_json("/v1/resource/" + self.resource_type,
+                                    params=self.attributes,
+                                    status=201)
+        resource = json.loads(result.text)
+        self.assertEqual("http://localhost/v1/resource/"
+                         + self.resource_type + "/"
+                         + self.attributes['id'],
+                         result.headers['Location'])
+        self.assertEqual(self.attributes['id'], resource["id"])
+        metric_id = uuid.UUID(resource['metrics']['foo'])
+        result = self.app.get("/v1/metric/" + str(metric_id) + "/measures",
+                              status=200)
+
+    def test_search_datetime(self):
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes,
+                           status=201)
+        result = self.app.get("/v1/resource/" + self.resource_type
+                              + "/" + self.attributes['id'])
+        result = json.loads(result.text)
+
+        resources = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={"and": [{"=": {"id": result['id']}},
+                            {"=": {"ended_at": None}}]},
+            status=200)
+        resources = json.loads(resources.text)
+        self.assertGreaterEqual(len(resources), 1)
+        self.assertEqual(result, resources[0])
+
+        resources = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type + "?history=true",
+            headers={"Accept": "application/json"},
+            params={"and": [
+                {"=": {"id": result['id']}},
+                {"or": [{">=": {"revision_end": '2014-01-03T02:02:02'}},
+                        {"=": {"revision_end": None}}]}
+            ]},
+            status=200)
+        resources = json.loads(resources.text)
+        self.assertGreaterEqual(len(resources), 1)
+        self.assertEqual(result, resources[0])
+
+    def test_search_resource_by_original_resource_id(self):
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes)
+        created_resource = json.loads(result.text)
+        original_id = created_resource['original_resource_id']
+        result = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={"eq": {"original_resource_id": original_id}},
+            status=200)
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 1)
+        self.assertEqual(created_resource, resources[0])
+
+    def test_search_resources_by_user(self):
+        u1 = str(uuid.uuid4())
+        self.attributes['user_id'] = u1
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes)
+        created_resource = json.loads(result.text)
+        result = self.app.post_json("/v1/search/resource/generic",
+                                    params={"eq": {"user_id": u1}},
+                                    status=200)
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 1)
+        result = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={"=": {"user_id": u1}},
+            status=200)
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 1)
+        self.assertEqual(created_resource, resources[0])
+
+    def test_search_resources_with_another_project_id(self):
+        u1 = str(uuid.uuid4())
+        result = self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": str(uuid.uuid4()),
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": u1,
+                "project_id": TestingApp.PROJECT_ID_2,
+            })
+        g = json.loads(result.text)
+
+        with self.app.use_another_user():
+            result = self.app.post_json(
+                "/v1/resource/generic",
+                params={
+                    "id": str(uuid.uuid4()),
+                    "started_at": "2014-01-01 03:03:03",
+                    "user_id": u1,
+                    "project_id": str(uuid.uuid4()),
+                })
+            j = json.loads(result.text)
+            g_found = False
+            j_found = False
+
+            result = self.app.post_json(
+                "/v1/search/resource/generic",
+                params={"=": {"user_id": u1}},
+                status=200)
+            resources = json.loads(result.text)
+            self.assertGreaterEqual(len(resources), 2)
+            for r in resources:
+                if r['id'] == str(g['id']):
+                    self.assertEqual(g, r)
+                    g_found = True
+                elif r['id'] == str(j['id']):
+                    self.assertEqual(j, r)
+                    j_found = True
+                if g_found and j_found:
+                    break
+            else:
+                self.fail("Some resources were not found")
+
+    def test_search_resources_by_unknown_field(self):
+        result = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={"=": {"foobar": "baz"}},
+            status=400)
+        self.assertIn("Resource type " + self.resource_type
+                      + " has no foobar attribute",
+                      result.text)
+
+    def test_search_resources_started_after(self):
+        # NOTE(jd) So this test is a bit fuzzy right now as we uses the same
+        # database for all tests and the tests are running concurrently, but
+        # for now it'll be better than nothing.
+        result = self.app.post_json(
+            "/v1/resource/generic/",
+            params={
+                "id": str(uuid.uuid4()),
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": str(uuid.uuid4()),
+                "project_id": str(uuid.uuid4()),
+            })
+        g = json.loads(result.text)
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes)
+        i = json.loads(result.text)
+        result = self.app.post_json(
+            "/v1/search/resource/generic",
+            params={"≥": {"started_at": "2014-01-01"}},
+            status=200)
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 2)
+
+        i_found = False
+        g_found = False
+        for r in resources:
+            if r['id'] == str(g['id']):
+                self.assertEqual(g, r)
+                g_found = True
+            elif r['id'] == str(i['id']):
+                i_found = True
+            if i_found and g_found:
+                break
+        else:
+            self.fail("Some resources were not found")
+
+        result = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={">=": {"started_at": "2014-01-03"}})
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 1)
+        for r in resources:
+            if r['id'] == str(i['id']):
+                self.assertEqual(i, r)
+                break
+        else:
+            self.fail("Some resources were not found")
+
+    def test_list_resources_with_bad_details(self):
+        result = self.app.get("/v1/resource/generic?details=awesome",
+                              status=400)
+        self.assertIn(
+            b"Unable to parse `details': invalid truth value",
+            result.body)
+
+    def _do_test_list_resources_with_detail(self, request):
+        # NOTE(jd) So this test is a bit fuzzy right now as we uses the same
+        # database for all tests and the tests are running concurrently, but
+        # for now it'll be better than nothing.
+        result = self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": str(uuid.uuid4()),
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": str(uuid.uuid4()),
+                "project_id": str(uuid.uuid4()),
+            })
+        g = json.loads(result.text)
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes)
+        i = json.loads(result.text)
+        result = request()
+        self.assertEqual(200, result.status_code)
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 2)
+
+        i_found = False
+        g_found = False
+        for r in resources:
+            if r['id'] == str(g['id']):
+                self.assertEqual(g, r)
+                g_found = True
+            elif r['id'] == str(i['id']):
+                i_found = True
+                # Check we got all the details
+                self.assertEqual(i, r)
+            if i_found and g_found:
+                break
+        else:
+            self.fail("Some resources were not found")
+
+        result = self.app.get("/v1/resource/" + self.resource_type)
+        resources = json.loads(result.text)
+        self.assertGreaterEqual(len(resources), 1)
+        for r in resources:
+            if r['id'] == str(i['id']):
+                self.assertEqual(i, r)
+                break
+        else:
+            self.fail("Some resources were not found")
+
+    def test_list_resources_with_another_project_id(self):
+        result = self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": str(uuid.uuid4()),
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": TestingApp.USER_ID_2,
+                "project_id": TestingApp.PROJECT_ID_2,
+            })
+        g = json.loads(result.text)
+
+        with self.app.use_another_user():
+            result = self.app.post_json(
+                "/v1/resource/generic",
+                params={
+                    "id": str(uuid.uuid4()),
+                    "started_at": "2014-01-01 03:03:03",
+                    "user_id": str(uuid.uuid4()),
+                    "project_id": str(uuid.uuid4()),
+                })
+            j = json.loads(result.text)
+
+            g_found = False
+            j_found = False
+
+            result = self.app.get("/v1/resource/generic")
+            self.assertEqual(200, result.status_code)
+            resources = json.loads(result.text)
+            self.assertGreaterEqual(len(resources), 2)
+            for r in resources:
+                if r['id'] == str(g['id']):
+                    self.assertEqual(g, r)
+                    g_found = True
+                elif r['id'] == str(j['id']):
+                    self.assertEqual(j, r)
+                    j_found = True
+                if g_found and j_found:
+                    break
+            else:
+                self.fail("Some resources were not found")
+
+    def test_list_resources_with_details(self):
+        self._do_test_list_resources_with_detail(
+            lambda: self.app.get("/v1/resource/generic?details=true"))
+
+    def test_search_resources_with_details(self):
+        self._do_test_list_resources_with_detail(
+            lambda: self.app.post("/v1/search/resource/generic?details=true"))
+
+    def test_get_res_named_metric_measure_aggregated_policies_invalid(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "low"})
+        metric1 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric1['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 16}])
+
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name":
+                                            "no_granularity_match"})
+        metric2 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric2['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 4}])
+
+        # NOTE(sileht): because the database is never cleaned between each test
+        # we must ensure that the query will not match resources from an other
+        # test, to achieve this we set a different name on each test.
+        name = str(uuid.uuid4())
+        self.attributes['name'] = name
+
+        self.attributes['metrics'] = {'foo': metric1['id']}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        self.attributes['id'] = str(uuid.uuid4())
+        self.attributes['metrics'] = {'foo': metric2['id']}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/"
+            + self.resource_type + "/metric/foo?aggregation=max",
+            params={"=": {"name": name}},
+            status=400,
+            headers={"Accept": "application/json"})
+        self.assertEqual("Metrics can't being aggregated",
+                         result.json['description']['cause'])
+        self.assertEqual("No granularity match",
+                         result.json['description']['reason'])
+        self.assertEqual(
+            sorted([[metric1['id'], 'max'], [metric2['id'], 'max']]),
+            sorted(result.json['description']['detail']))
+
+    def test_get_res_named_metric_measure_aggregation_nooverlap(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric1 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric1['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 8},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 16}])
+
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric2 = json.loads(result.text)
+
+        # NOTE(sileht): because the database is never cleaned between each test
+        # we must ensure that the query will not match resources from an other
+        # test, to achieve this we set a different name on each test.
+        name = str(uuid.uuid4())
+        self.attributes['name'] = name
+
+        self.attributes['metrics'] = {'foo': metric1['id']}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        self.attributes['id'] = str(uuid.uuid4())
+        self.attributes['metrics'] = {'foo': metric2['id']}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/" + self.resource_type
+            + "/metric/foo?aggregation=max",
+            params={"=": {"name": name}},
+            expect_errors=True)
+        self.assertEqual(400, result.status_code, result.text)
+        self.assertIn("No overlap", result.text)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/" + self.resource_type
+            + "/metric/foo?aggregation=max&needed_overlap=5&start=2013-01-01",
+            params={"=": {"name": name}},
+            expect_errors=True)
+        self.assertEqual(400, result.status_code, result.text)
+        self.assertIn("No overlap", result.text)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/"
+            + self.resource_type + "/metric/foo?aggregation=min"
+            + "&needed_overlap=0&start=2013-01-01T00:00:00%2B00:00",
+            params={"=": {"name": name}})
+        self.assertEqual(200, result.status_code, result.text)
+        measures = json.loads(result.text)
+        self.assertEqual([['2013-01-01T00:00:00+00:00', 86400.0, 8.0],
+                          ['2013-01-01T12:00:00+00:00', 3600.0, 8.0],
+                          ['2013-01-01T12:00:00+00:00', 60.0, 8.0]],
+                         measures)
+
+    def test_get_res_named_metric_measure_aggregation_nominal(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric1 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric1['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 8},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 16}])
+
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric2 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric2['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 0},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 4}])
+
+        # NOTE(sileht): because the database is never cleaned between each test
+        # we must ensure that the query will not match resources from an other
+        # test, to achieve this we set a different name on each test.
+        name = str(uuid.uuid4())
+        self.attributes['name'] = name
+
+        self.attributes['metrics'] = {'foo': metric1['id']}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        self.attributes['id'] = str(uuid.uuid4())
+        self.attributes['metrics'] = {'foo': metric2['id']}
+        self.app.post_json("/v1/resource/" + self.resource_type,
+                           params=self.attributes)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/" + self.resource_type
+            + "/metric/foo?aggregation=max",
+            params={"=": {"name": name}},
+            expect_errors=True)
+
+        self.assertEqual(200, result.status_code, result.text)
+        measures = json.loads(result.text)
+        self.assertEqual([[u'2013-01-01T00:00:00+00:00', 86400.0, 16.0],
+                          [u'2013-01-01T12:00:00+00:00', 3600.0, 16.0],
+                          [u'2013-01-01T12:00:00+00:00', 60.0, 16.0]],
+                         measures)
+
+        result = self.app.post_json(
+            "/v1/aggregation/resource/"
+            + self.resource_type + "/metric/foo?aggregation=min",
+            params={"=": {"name": name}},
+            expect_errors=True)
+
+        self.assertEqual(200, result.status_code)
+        measures = json.loads(result.text)
+        self.assertEqual([['2013-01-01T00:00:00+00:00', 86400.0, 0],
+                          ['2013-01-01T12:00:00+00:00', 3600.0, 0],
+                          ['2013-01-01T12:00:00+00:00', 60.0, 0]],
+                         measures)
+
+    def test_get_aggregated_measures_across_entities_no_match(self):
+        result = self.app.post_json(
+            "/v1/aggregation/resource/"
+            + self.resource_type + "/metric/foo?aggregation=min",
+            params={"=": {"name": "none!"}},
+            expect_errors=True)
+
+        self.assertEqual(200, result.status_code)
+        measures = json.loads(result.text)
+        self.assertEqual([], measures)
+
+    def test_get_aggregated_measures_across_entities(self):
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric1 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric1['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 8},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 16}])
+
+        result = self.app.post_json("/v1/metric",
+                                    params={"archive_policy_name": "medium"})
+        metric2 = json.loads(result.text)
+        self.app.post_json("/v1/metric/%s/measures" % metric2['id'],
+                           params=[{"timestamp": '2013-01-01 12:00:01',
+                                    "value": 0},
+                                   {"timestamp": '2013-01-01 12:00:02',
+                                    "value": 4}])
+        # Check with one metric
+        result = self.app.get("/v1/aggregation/metric"
+                              "?aggregation=mean&metric=%s" % (metric2['id']))
+        measures = json.loads(result.text)
+        self.assertEqual([[u'2013-01-01T00:00:00+00:00', 86400.0, 2.0],
+                          [u'2013-01-01T12:00:00+00:00', 3600.0, 2.0],
+                          [u'2013-01-01T12:00:00+00:00', 60.0, 2.0]],
+                         measures)
+
+        # Check with two metrics
+        result = self.app.get("/v1/aggregation/metric"
+                              "?aggregation=mean&metric=%s&metric=%s" %
+                              (metric1['id'], metric2['id']))
+        measures = json.loads(result.text)
+        self.assertEqual([[u'2013-01-01T00:00:00+00:00', 86400.0, 7.0],
+                          [u'2013-01-01T12:00:00+00:00', 3600.0, 7.0],
+                          [u'2013-01-01T12:00:00+00:00', 60.0, 7.0]],
+                         measures)
+
+    def test_search_resources_with_like(self):
+        result = self.app.post_json(
+            "/v1/resource/" + self.resource_type,
+            params=self.attributes)
+        created_resource = json.loads(result.text)
+
+        result = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={"like": {"name": "my%"}},
+            status=200)
+
+        resources = json.loads(result.text)
+        self.assertIn(created_resource, resources)
+
+        result = self.app.post_json(
+            "/v1/search/resource/" + self.resource_type,
+            params={"like": {"name": str(uuid.uuid4())}},
+            status=200)
+        resources = json.loads(result.text)
+        self.assertEqual([], resources)
+
+
+class GenericResourceTest(RestTest):
+    def test_list_resources_tied_to_user(self):
+        resource_id = str(uuid.uuid4())
+        self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": resource_id,
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": str(uuid.uuid4()),
+                "project_id": str(uuid.uuid4()),
+            })
+
+        with self.app.use_another_user():
+            result = self.app.get("/v1/resource/generic")
+            resources = json.loads(result.text)
+            for resource in resources:
+                if resource['id'] == resource_id:
+                    self.fail("Resource found")
+
+    def test_get_resources_metric_tied_to_user(self):
+        resource_id = str(uuid.uuid4())
+        self.app.post_json(
+            "/v1/resource/generic",
+            params={
+                "id": resource_id,
+                "started_at": "2014-01-01 02:02:02",
+                "user_id": TestingApp.USER_ID_2,
+                "project_id": TestingApp.PROJECT_ID_2,
+                "metrics": {"foobar": {"archive_policy_name": "low"}},
+            })
+
+        # This user created it, she can access it
+        self.app.get(
+            "/v1/resource/generic/%s/metric/foobar" % resource_id)
+
+        with self.app.use_another_user():
+            # This user "owns it", it should be able to access it
+            self.app.get(
+                "/v1/resource/generic/%s/metric/foobar" % resource_id)
+
+    def test_search_resources_invalid_query(self):
+        result = self.app.post_json(
+            "/v1/search/resource/generic",
+            params={"wrongoperator": {"user_id": "bar"}},
+            status=400,
+            headers={"Accept": "application/json"},
+        )
+
+        result_description = result.json['description']
+        self.assertEqual("Invalid input", result_description['cause'])
+        self.assertIn(
+            "extra keys not allowed @ data[", result_description['reason']
+        )
+
+
+class QueryStringSearchAttrFilterTest(tests_base.TestCase):
+    def _do_test(self, expr, expected):
+        req = api.QueryStringSearchAttrFilter._parse(expr)
+        self.assertEqual(expected, req)
+
+    def test_search_query_builder(self):
+        self._do_test('foo=7EED6CC3-EDC8-48C9-8EF6-8A36B9ACC91C',
+                      {"=": {"foo": "7EED6CC3-EDC8-48C9-8EF6-8A36B9ACC91C"}})
+        self._do_test('foo=7EED6CC3EDC848C98EF68A36B9ACC91C',
+                      {"=": {"foo": "7EED6CC3EDC848C98EF68A36B9ACC91C"}})
+        self._do_test('foo=bar', {"=": {"foo": "bar"}})
+        self._do_test('foo!=1', {"!=": {"foo": 1.0}})
+        self._do_test('foo=True', {"=": {"foo": True}})
+        self._do_test('foo=null', {"=": {"foo": None}})
+        self._do_test('foo="null"', {"=": {"foo": "null"}})
+        self._do_test('foo in ["null", "foo"]',
+                      {"in": {"foo": ["null", "foo"]}})
+        self._do_test(u'foo="quote" and bar≠1',
+                      {"and": [{u"≠": {"bar": 1}},
+                               {"=": {"foo": "quote"}}]})
+        self._do_test('foo="quote" or bar like "%%foo"',
+                      {"or": [{"like": {"bar": "%%foo"}},
+                              {"=": {"foo": "quote"}}]})
+
+        self._do_test('not (foo="quote" or bar like "%%foo" or foo="what!" '
+                      'or bar="who?")',
+                      {"not": {"or": [
+                          {"=": {"bar": "who?"}},
+                          {"=": {"foo": "what!"}},
+                          {"like": {"bar": "%%foo"}},
+                          {"=": {"foo": "quote"}},
+                      ]}})
+
+        self._do_test('(foo="quote" or bar like "%%foo" or not foo="what!" '
+                      'or bar="who?") and cat="meme"',
+                      {"and": [
+                          {"=": {"cat": "meme"}},
+                          {"or": [
+                              {"=": {"bar": "who?"}},
+                              {"not": {"=": {"foo": "what!"}}},
+                              {"like": {"bar": "%%foo"}},
+                              {"=": {"foo": "quote"}},
+                          ]}
+                      ]})
+
+        self._do_test('foo="quote" or bar like "%%foo" or foo="what!" '
+                      'or bar="who?" and cat="meme"',
+                      {"or": [
+                          {"and": [
+                              {"=": {"cat": "meme"}},
+                              {"=": {"bar": "who?"}},
+                          ]},
+                          {"=": {"foo": "what!"}},
+                          {"like": {"bar": "%%foo"}},
+                          {"=": {"foo": "quote"}},
+                      ]})
+
+        self._do_test('foo="quote" or bar like "%%foo" and foo="what!" '
+                      'or bar="who?" or cat="meme"',
+                      {"or": [
+                          {"=": {"cat": "meme"}},
+                          {"=": {"bar": "who?"}},
+                          {"and": [
+                              {"=": {"foo": "what!"}},
+                              {"like": {"bar": "%%foo"}},
+                          ]},
+                          {"=": {"foo": "quote"}},
+                      ]})
+
+
+class TestMeasureQuery(tests_base.TestCase):
+    def test_equal(self):
+        q = api.SearchMetricController.MeasureQuery({"=": 4})
+        self.assertTrue(q(4))
+        self.assertFalse(q(40))
+
+    def test_gt(self):
+        q = api.SearchMetricController.MeasureQuery({">": 4})
+        self.assertTrue(q(40))
+        self.assertFalse(q(4))
+
+    def test_and(self):
+        q = api.SearchMetricController.MeasureQuery(
+            {"and": [{">": 4}, {"<": 10}]})
+        self.assertTrue(q(5))
+        self.assertFalse(q(40))
+        self.assertFalse(q(1))
+
+    def test_or(self):
+        q = api.SearchMetricController.MeasureQuery(
+            {"or": [{"=": 4}, {"=": 10}]})
+        self.assertTrue(q(4))
+        self.assertTrue(q(10))
+        self.assertFalse(q(-1))
+
+    def test_modulo(self):
+        q = api.SearchMetricController.MeasureQuery(
+            {"=": [{"%": 5}, 0]})
+        self.assertTrue(q(5))
+        self.assertTrue(q(10))
+        self.assertFalse(q(-1))
+        self.assertFalse(q(6))
+
+    def test_math(self):
+        q = api.SearchMetricController.MeasureQuery(
+            {
+                u"and": [
+                    # v+5 is bigger 0
+                    {u"≥": [{u"+": 5}, 0]},
+                    # v-6 is not 5
+                    {u"≠": [5, {u"-": 6}]},
+                ],
+            }
+        )
+        self.assertTrue(q(5))
+        self.assertTrue(q(10))
+        self.assertFalse(q(11))
+
+    def test_empty(self):
+        q = api.SearchMetricController.MeasureQuery({})
+        self.assertFalse(q(5))
+        self.assertFalse(q(10))
+
+    def test_bad_format(self):
+        self.assertRaises(api.SearchMetricController.MeasureQuery.InvalidQuery,
+                          api.SearchMetricController.MeasureQuery,
+                          {"foo": [{"=": 4}, {"=": 10}]})
+
+        self.assertRaises(api.SearchMetricController.MeasureQuery.InvalidQuery,
+                          api.SearchMetricController.MeasureQuery,
+                          {"=": [1, 2, 3]})
diff --git a/gnocchi/tests/test_statsd.py b/gnocchi/tests/test_statsd.py
new file mode 100644
index 0000000000000000000000000000000000000000..ce6c52c4eb8ee6b6237a515cb9b978cd35c09831
--- /dev/null
+++ b/gnocchi/tests/test_statsd.py
@@ -0,0 +1,175 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2016-2017 Red Hat, Inc.
+# Copyright © 2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import uuid
+
+import mock
+import numpy
+
+from gnocchi import indexer
+from gnocchi import statsd
+from gnocchi.tests import base as tests_base
+from gnocchi.tests.test_utils import get_measures_list
+from gnocchi import utils
+
+
+def datetime64(*args):
+    return numpy.datetime64(datetime.datetime(*args))
+
+
+class TestStatsd(tests_base.TestCase):
+
+    STATSD_USER_ID = str(uuid.uuid4())
+    STATSD_PROJECT_ID = str(uuid.uuid4())
+    STATSD_ARCHIVE_POLICY_NAME = "medium"
+
+    def setUp(self):
+        super(TestStatsd, self).setUp()
+
+        self.conf.set_override("resource_id",
+                               str(uuid.uuid4()), "statsd")
+        self.conf.set_override("creator",
+                               self.STATSD_USER_ID, "statsd")
+        self.conf.set_override("archive_policy_name",
+                               self.STATSD_ARCHIVE_POLICY_NAME, "statsd")
+        ap = self.ARCHIVE_POLICIES["medium"]
+        self.aggregations = ap.get_aggregations_for_method("mean")
+
+        self.stats = statsd.Stats(self.conf)
+        # Replace storage/indexer with correct ones that have been upgraded
+        self.stats.incoming = self.incoming
+        self.stats.indexer = self.index
+        self.server = statsd.StatsdServer(self.stats)
+
+    def test_flush_empty(self):
+        self.server.stats.flush()
+
+    @mock.patch.object(utils, 'utcnow')
+    def _test_gauge_or_ms(self, metric_type, utcnow):
+        metric_name = "test_gauge_or_ms"
+        metric_key = metric_name + "|" + metric_type
+        utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 58, 36)
+        self.server.datagram_received(
+            ("%s:1|%s" % (metric_name, metric_type)).encode('ascii'),
+            ("127.0.0.1", 12345))
+        self.stats.flush()
+
+        r = self.stats.indexer.get_resource('generic',
+                                            self.conf.statsd.resource_id,
+                                            with_metrics=True)
+
+        metric = r.get_metric(metric_key)
+
+        self.trigger_processing([metric])
+
+        measures = self.storage.get_aggregated_measures(
+            {metric: self.aggregations})[metric]
+        measures = get_measures_list(measures)
+        self.assertEqual({"mean": [
+            (datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 1.0),
+            (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 1.0),
+            (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0)
+        ]}, measures)
+
+        utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 59, 37)
+        # This one is going to be ignored
+        self.server.datagram_received(
+            ("%s:45|%s" % (metric_name, metric_type)).encode('ascii'),
+            ("127.0.0.1", 12345))
+        self.server.datagram_received(
+            ("%s:2|%s" % (metric_name, metric_type)).encode('ascii'),
+            ("127.0.0.1", 12345))
+        self.stats.flush()
+
+        self.trigger_processing([metric])
+
+        measures = self.storage.get_aggregated_measures(
+            {metric: self.aggregations})[metric]
+        measures = get_measures_list(measures)
+        self.assertEqual({"mean": [
+            (datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 1.5),
+            (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 1.5),
+            (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0),
+            (datetime64(2015, 1, 7, 13, 59), numpy.timedelta64(1, 'm'), 2.0)
+        ]}, measures)
+
+    def test_gauge(self):
+        self._test_gauge_or_ms("g")
+
+    def test_ms(self):
+        self._test_gauge_or_ms("ms")
+
+    @mock.patch.object(utils, 'utcnow')
+    def test_counter(self, utcnow):
+        metric_name = "test_counter"
+        metric_key = metric_name + "|c"
+        utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 58, 36)
+        self.server.datagram_received(
+            ("%s:1|c" % metric_name).encode('ascii'),
+            ("127.0.0.1", 12345))
+        self.stats.flush()
+
+        r = self.stats.indexer.get_resource('generic',
+                                            self.conf.statsd.resource_id,
+                                            with_metrics=True)
+        metric = r.get_metric(metric_key)
+        self.assertIsNotNone(metric)
+
+        self.trigger_processing([metric])
+
+        measures = self.storage.get_aggregated_measures(
+            {metric: self.aggregations})[metric]
+        measures = get_measures_list(measures)
+        self.assertEqual({"mean": [
+            (datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 1.0),
+            (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 1.0),
+            (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0)
+        ]}, measures)
+
+        utcnow.return_value = utils.datetime_utc(2015, 1, 7, 13, 59, 37)
+        self.server.datagram_received(
+            ("%s:45|c" % metric_name).encode('ascii'),
+            ("127.0.0.1", 12345))
+        self.server.datagram_received(
+            ("%s:2|c|@0.2" % metric_name).encode('ascii'),
+            ("127.0.0.1", 12345))
+        self.stats.flush()
+
+        self.trigger_processing([metric])
+
+        measures = self.storage.get_aggregated_measures(
+            {metric: self.aggregations})[metric]
+        measures = get_measures_list(measures)
+        self.assertEqual({"mean": [
+            (datetime64(2015, 1, 7), numpy.timedelta64(1, 'D'), 28),
+            (datetime64(2015, 1, 7, 13), numpy.timedelta64(1, 'h'), 28),
+            (datetime64(2015, 1, 7, 13, 58), numpy.timedelta64(1, 'm'), 1.0),
+            (datetime64(2015, 1, 7, 13, 59), numpy.timedelta64(1, 'm'), 55.0)
+        ]}, measures)
+
+
+class TestStatsdArchivePolicyRule(TestStatsd):
+    STATSD_ARCHIVE_POLICY_NAME = ""
+
+    def setUp(self):
+        super(TestStatsdArchivePolicyRule, self).setUp()
+        try:
+            self.stats.indexer.create_archive_policy_rule(
+                "statsd", "*", "medium")
+        except indexer.ArchivePolicyRuleAlreadyExists:
+            # Created by another test run
+            pass
diff --git a/gnocchi/tests/test_storage.py b/gnocchi/tests/test_storage.py
new file mode 100644
index 0000000000000000000000000000000000000000..903f7e6c38baf01b31e067eef6160354cf88ae21
--- /dev/null
+++ b/gnocchi/tests/test_storage.py
@@ -0,0 +1,1241 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2014-2015 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import uuid
+
+import mock
+import numpy
+import six.moves
+
+from gnocchi import archive_policy
+from gnocchi import carbonara
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import storage
+from gnocchi.storage import ceph
+from gnocchi.storage import file
+from gnocchi.storage import redis
+from gnocchi.storage import s3
+from gnocchi.storage import swift
+from gnocchi.tests import base as tests_base
+from gnocchi.tests.test_utils import get_measures_list
+
+
+def datetime64(*args):
+    return numpy.datetime64(datetime.datetime(*args))
+
+
+class TestStorageDriver(tests_base.TestCase):
+    def setUp(self):
+        super(TestStorageDriver, self).setUp()
+        # A lot of tests wants a metric, create one
+        self.metric, __ = self._create_metric()
+
+    def test_driver_str(self):
+        driver = storage.get_driver(self.conf)
+
+        if isinstance(driver, file.FileStorage):
+            s = driver.basepath
+        elif isinstance(driver, ceph.CephStorage):
+            s = driver.rados.get_fsid()
+        elif isinstance(driver, redis.RedisStorage):
+            s = driver._client
+        elif isinstance(driver, s3.S3Storage):
+            s = driver._bucket_name
+        elif isinstance(driver, swift.SwiftStorage):
+            s = driver._container_prefix
+
+        self.assertEqual(str(driver), "%s: %s" % (
+                         driver.__class__.__name__, s))
+
+    def test_get_driver(self):
+        driver = storage.get_driver(self.conf)
+        self.assertIsInstance(driver, storage.StorageDriver)
+
+    def test_file_driver_subdir_len(self):
+        driver = storage.get_driver(self.conf)
+        if not isinstance(driver, file.FileStorage):
+            self.skipTest("not file driver")
+
+        # Check the default
+        self.assertEqual(2, driver.SUBDIR_LEN)
+
+        metric = mock.Mock(id=uuid.UUID("12345678901234567890123456789012"))
+        expected = (driver.basepath + "/12/34/56/78/90/12/34/56/78/90/12/34/56"
+                    "/78/90/12/12345678-9012-3456-7890-123456789012")
+        self.assertEqual(expected, driver._build_metric_dir(metric))
+
+        driver._file_subdir_len = 16
+        expected = (driver.basepath + "/1234567890123456/7890123456"
+                    "789012/12345678-9012-3456-7890-123456789012")
+        self.assertEqual(expected, driver._build_metric_dir(metric))
+
+        driver._file_subdir_len = 15
+        expected = (driver.basepath + "/123456789012345/67890123456"
+                    "7890/12/12345678-9012-3456-7890-123456789012")
+        self.assertEqual(expected, driver._build_metric_dir(metric))
+
+    def test_corrupted_split(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+        ])
+        self.trigger_processing()
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(5, 'm'))
+
+        with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.unserialize',
+                        side_effect=carbonara.InvalidData()):
+            results = self.storage._get_splits_and_unserialize({
+                self.metric: {
+                    aggregation: [
+                        carbonara.SplitKey(
+                            numpy.datetime64(1387800000, 's'),
+                            numpy.timedelta64(5, 'm'))
+                    ],
+                },
+            })[self.metric][aggregation]
+            self.assertEqual(1, len(results))
+            self.assertIsInstance(results[0], carbonara.AggregatedTimeSerie)
+            # Assert it's an empty one since corrupted
+            self.assertEqual(0, len(results[0]))
+            self.assertEqual(results[0].aggregation, aggregation)
+
+    def test_get_splits_and_unserialize(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+        ])
+        self.trigger_processing()
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(5, 'm'))
+
+        results = self.storage._get_splits_and_unserialize({
+            self.metric: {
+                aggregation: [
+                    carbonara.SplitKey(
+                        numpy.datetime64(1387800000, 's'),
+                        numpy.timedelta64(5, 'm')),
+                ],
+            },
+        })[self.metric][aggregation]
+        self.assertEqual(1, len(results))
+        self.assertIsInstance(results[0], carbonara.AggregatedTimeSerie)
+        # Assert it's not empty one since corrupted
+        self.assertGreater(len(results[0]), 0)
+        self.assertEqual(results[0].aggregation, aggregation)
+
+    def test_corrupted_data(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+        ])
+        self.trigger_processing()
+
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 13, 0, 1), 1),
+        ])
+
+        with mock.patch('gnocchi.carbonara.AggregatedTimeSerie.unserialize',
+                        side_effect=carbonara.InvalidData()):
+            with mock.patch('gnocchi.carbonara.BoundTimeSerie.unserialize',
+                            side_effect=carbonara.InvalidData()):
+                self.trigger_processing()
+
+        m = self.storage.get_aggregated_measures(
+            {self.metric:
+                self.metric.archive_policy.get_aggregations_for_method(
+                    'mean')},)[self.metric]
+        m = get_measures_list(m)['mean']
+        self.assertIn((datetime64(2014, 1, 1),
+                       numpy.timedelta64(1, 'D'), 1), m)
+        self.assertIn((datetime64(2014, 1, 1, 13),
+                       numpy.timedelta64(1, 'h'), 1), m)
+        self.assertIn((datetime64(2014, 1, 1, 13),
+                       numpy.timedelta64(5, 'm'), 1), m)
+
+    def test_aborted_initial_processing(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 5),
+        ])
+        with mock.patch.object(self.storage, '_store_unaggregated_timeseries',
+                               side_effect=Exception):
+            try:
+                self.trigger_processing()
+            except Exception:
+                pass
+
+        with mock.patch('gnocchi.storage.LOG') as LOG:
+            self.trigger_processing()
+            self.assertFalse(LOG.error.called)
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("mean")
+        )
+
+        m = self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]
+        m = get_measures_list(m)['mean']
+        self.assertIn((datetime64(2014, 1, 1),
+                       numpy.timedelta64(1, 'D'), 5.0), m)
+        self.assertIn((datetime64(2014, 1, 1, 12),
+                       numpy.timedelta64(1, 'h'), 5.0), m)
+        self.assertIn((datetime64(2014, 1, 1, 12),
+                       numpy.timedelta64(5, 'm'), 5.0), m)
+
+    def test_delete_nonempty_metric(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+        ])
+        self.trigger_processing()
+        self.storage._delete_metric(self.metric)
+        self.trigger_processing()
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("mean")
+        )
+
+        self.assertRaises(storage.MetricDoesNotExist,
+                          self.storage.get_aggregated_measures,
+                          {self.metric: aggregations})
+        self.assertEqual(
+            {self.metric: None},
+            self.storage._get_or_create_unaggregated_timeseries(
+                [self.metric]))
+
+    def test_measures_reporting_format(self):
+        report = self.incoming.measures_report(True)
+        self.assertIsInstance(report, dict)
+        self.assertIn('summary', report)
+        self.assertIn('metrics', report['summary'])
+        self.assertIn('measures', report['summary'])
+        self.assertIn('details', report)
+        self.assertIsInstance(report['details'], dict)
+        report = self.incoming.measures_report(False)
+        self.assertIsInstance(report, dict)
+        self.assertIn('summary', report)
+        self.assertIn('metrics', report['summary'])
+        self.assertIn('measures', report['summary'])
+        self.assertNotIn('details', report)
+
+    def test_measures_reporting(self):
+        m2, __ = self._create_metric('medium')
+        for i in six.moves.range(60):
+            self.incoming.add_measures(self.metric.id, [
+                incoming.Measure(datetime64(2014, 1, 1, 12, 0, i), 69),
+            ])
+            self.incoming.add_measures(m2.id, [
+                incoming.Measure(datetime64(2014, 1, 1, 12, 0, i), 69),
+            ])
+        report = self.incoming.measures_report(True)
+        self.assertIsInstance(report, dict)
+        self.assertEqual(2, report['summary']['metrics'])
+        self.assertEqual(120, report['summary']['measures'])
+        self.assertIn('details', report)
+        self.assertIsInstance(report['details'], dict)
+        report = self.incoming.measures_report(False)
+        self.assertIsInstance(report, dict)
+        self.assertEqual(2, report['summary']['metrics'])
+        self.assertEqual(120, report['summary']['measures'])
+
+    def test_get_aggregated_measures(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, i, j), 100)
+            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
+        self.trigger_processing([self.metric])
+
+        aggregations = self.metric.archive_policy.aggregations
+
+        measures = self.storage.get_aggregated_measures(
+            {self.metric: aggregations})
+        self.assertEqual(1, len(measures))
+        self.assertIn(self.metric, measures)
+        measures = measures[self.metric]
+        self.assertEqual(len(aggregations), len(measures))
+        self.assertGreater(len(measures[aggregations[0]]), 0)
+        for agg in aggregations:
+            self.assertEqual(agg, measures[agg].aggregation)
+
+    def test_get_aggregated_measures_multiple(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, i, j), 100)
+            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
+        m2, __ = self._create_metric('medium')
+        self.incoming.add_measures(m2.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, i, j), 100)
+            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
+        self.trigger_processing([self.metric, m2])
+
+        aggregations = self.metric.archive_policy.aggregations
+
+        measures = self.storage.get_aggregated_measures(
+            {self.metric: aggregations,
+             m2: m2.archive_policy.aggregations})
+
+        self.assertEqual({self.metric, m2}, set(measures.keys()))
+        self.assertEqual(len(aggregations), len(measures[self.metric]))
+        self.assertGreater(len(measures[self.metric][aggregations[0]]), 0)
+        for agg in aggregations:
+            self.assertEqual(agg, measures[self.metric][agg].aggregation)
+        self.assertEqual(len(m2.archive_policy.aggregations),
+                         len(measures[m2]))
+        self.assertGreater(
+            len(measures[m2][m2.archive_policy.aggregations[0]]), 0)
+        for agg in m2.archive_policy.aggregations:
+            self.assertEqual(agg, measures[m2][agg].aggregation)
+
+    def test_add_measures_big(self):
+        m, __ = self._create_metric('high')
+        self.incoming.add_measures(m.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, i, j), 100)
+            for i in six.moves.range(0, 60) for j in six.moves.range(0, 60)])
+        self.trigger_processing([m])
+
+        aggregations = (
+            m.archive_policy.get_aggregations_for_method("mean")
+        )
+
+        self.assertEqual(3661, len(
+            get_measures_list(self.storage.get_aggregated_measures(
+                {m: aggregations})[m])['mean']))
+
+    @mock.patch('gnocchi.carbonara.SplitKey.POINTS_PER_SPLIT', 48)
+    def test_add_measures_update_subset_split(self):
+        m, m_sql = self._create_metric('medium')
+        measures = [
+            incoming.Measure(datetime64(2014, 1, 6, i, j, 0), 100)
+            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)]
+        self.incoming.add_measures(m.id, measures)
+        self.trigger_processing([m])
+
+        # add measure to end, in same aggregate time as last point.
+        self.incoming.add_measures(m.id, [
+            incoming.Measure(datetime64(2014, 1, 6, 1, 58, 1), 100)])
+
+        with mock.patch.object(self.storage, '_store_metric_splits') as c:
+            # should only resample last aggregate
+            self.trigger_processing([m])
+        count = 0
+        for call in c.mock_calls:
+            # policy is 60 points and split is 48. should only update 2nd half
+            args = call[1]
+            for metric, key_agg_data_offset in six.iteritems(args[0]):
+                if metric.id == m_sql.id:
+                    for key, aggregation, data, offset in key_agg_data_offset:
+                        if (key.sampling == numpy.timedelta64(1, 'm')
+                           and aggregation.method == "mean"):
+                            count += 1
+        self.assertEqual(1, count)
+
+    def test_add_measures_update_subset(self):
+        m, m_sql = self._create_metric('medium')
+        measures = [
+            incoming.Measure(datetime64(2014, 1, 6, i, j, 0), 100)
+            for i in six.moves.range(2) for j in six.moves.range(0, 60, 2)]
+        self.incoming.add_measures(m.id, measures)
+        self.trigger_processing([m])
+
+        # add measure to end, in same aggregate time as last point.
+        new_point = datetime64(2014, 1, 6, 1, 58, 1)
+        self.incoming.add_measures(m.id, [incoming.Measure(new_point, 100)])
+
+        with mock.patch.object(self.incoming, 'add_measures') as c:
+            self.trigger_processing([m])
+        for __, args, __ in c.mock_calls:
+            self.assertEqual(
+                list(args[3])[0][0], carbonara.round_timestamp(
+                    new_point, args[1].granularity * 10e8))
+
+    def test_delete_old_measures(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("mean")
+        )
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 23.0),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+        # One year later…
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2015, 1, 1, 12, 0, 1), 69),
+        ])
+        self.trigger_processing()
+
+        self.assertEqual({"mean": [
+            (datetime64(2015, 1, 1), numpy.timedelta64(1, 'D'), 69),
+            (datetime64(2015, 1, 1, 12), numpy.timedelta64(1, 'h'), 69),
+            (datetime64(2015, 1, 1, 12), numpy.timedelta64(5, 'm'), 69),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'D'))
+        self.assertEqual({
+            self.metric: {
+                agg: {carbonara.SplitKey(numpy.datetime64(1244160000, 's'),
+                                         numpy.timedelta64(1, 'D'))},
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'h'))
+        self.assertEqual({
+            self.metric: {
+                agg: {carbonara.SplitKey(numpy.datetime64(1412640000, 's'),
+                                         numpy.timedelta64(1, 'h'))},
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(5, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {carbonara.SplitKey(numpy.datetime64(1419120000, 's'),
+                                         numpy.timedelta64(5, 'm'))},
+            }
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+
+    def test_get_measures_return(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2016, 1, 2, 13, 7, 31), 42),
+            incoming.Measure(datetime64(2016, 1, 4, 14, 9, 31), 4),
+            incoming.Measure(datetime64(2016, 1, 6, 15, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(5, 'm'))
+
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [
+                    carbonara.SplitKey(
+                        numpy.datetime64(1451520000, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    )]}})
+        self.assertEqual(1, len(data))
+        data = data[self.metric]
+        self.assertEqual(1, len(data))
+        data = data[aggregation]
+        self.assertEqual(1, len(data))
+        self.assertIsInstance(data[0], bytes)
+        self.assertGreater(len(data[0]), 0)
+        existing = data[0]
+
+        # Now retrieve an existing and a non-existing key
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [
+                    carbonara.SplitKey(
+                        numpy.datetime64(1451520000, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    ),
+                    carbonara.SplitKey(
+                        numpy.datetime64(1451520010, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    ),
+                ]}})
+        self.assertEqual(1, len(data))
+        data = data[self.metric]
+        self.assertEqual(1, len(data))
+        data = data[aggregation]
+        self.assertEqual(2, len(data))
+        self.assertIsInstance(data[0], bytes)
+        self.assertGreater(len(data[0]), 0)
+        self.assertEqual(existing, data[0])
+        self.assertIsNone(data[1])
+
+        # Now retrieve a non-existing and an existing key
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [
+                    carbonara.SplitKey(
+                        numpy.datetime64(155152000, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    ),
+                    carbonara.SplitKey(
+                        numpy.datetime64(1451520000, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    )
+                ]}})
+        self.assertEqual(1, len(data))
+        data = data[self.metric]
+        self.assertEqual(1, len(data))
+        data = data[aggregation]
+        self.assertEqual(2, len(data))
+        self.assertIsInstance(data[1], bytes)
+        self.assertGreater(len(data[1]), 0)
+        self.assertEqual(existing, data[1])
+        self.assertIsNone(data[0])
+
+        m2, _ = self._create_metric()
+        # Now retrieve a non-existing (= no aggregated measures) metric
+        data = self.storage._get_splits({
+            m2: {
+                aggregation: [
+                    carbonara.SplitKey(
+                        numpy.datetime64(1451520010, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    ),
+                    carbonara.SplitKey(
+                        numpy.datetime64(1451520000, 's'),
+                        numpy.timedelta64(5, 'm'),
+                    )
+                ]}})
+        self.assertEqual({m2: {aggregation: [None, None]}}, data)
+
+    def test_rewrite_measures(self):
+        # Create an archive policy that spans on several splits. Each split
+        # being 3600 points, let's go for 36k points so we have 10 splits.
+        apname = str(uuid.uuid4())
+        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
+        self.index.create_archive_policy(ap)
+        self.metric = indexer.Metric(uuid.uuid4(), ap)
+        self.index.create_metric(self.metric.id, str(uuid.uuid4()),
+                                 apname)
+
+        # First store some points scattered across different splits
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2016, 1, 2, 13, 7, 31), 42),
+            incoming.Measure(datetime64(2016, 1, 4, 14, 9, 31), 4),
+            incoming.Measure(datetime64(2016, 1, 6, 15, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {
+                    carbonara.SplitKey(numpy.datetime64(1451520000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451736000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451952000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                },
+            }
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+
+        if self.storage.WRITE_FULL:
+            assertCompressedIfWriteFull = self.assertTrue
+        else:
+            assertCompressedIfWriteFull = self.assertFalse
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451520000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451736000, 's'),
+                    numpy.timedelta64(60, 's'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451952000, 's'),
+                    numpy.timedelta64(60, 's'),
+                )]}})[self.metric][aggregation][0]
+        assertCompressedIfWriteFull(
+            carbonara.AggregatedTimeSerie.is_compressed(data))
+
+        self.assertEqual({"mean": [
+            (datetime64(2016, 1, 1, 12), numpy.timedelta64(1, 'm'), 69),
+            (datetime64(2016, 1, 2, 13, 7), numpy.timedelta64(1, 'm'), 42),
+            (datetime64(2016, 1, 4, 14, 9), numpy.timedelta64(1, 'm'), 4),
+            (datetime64(2016, 1, 6, 15, 12), numpy.timedelta64(1, 'm'), 44),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation]})[self.metric]))
+
+        # Now store brand new points that should force a rewrite of one of the
+        # split (keep in mind the back window size in one hour here). We move
+        # the BoundTimeSerie processing timeserie far away from its current
+        # range.
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 10, 16, 18, 45), 45),
+            incoming.Measure(datetime64(2016, 1, 10, 17, 12, 45), 46),
+        ])
+        self.trigger_processing()
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {
+                    carbonara.SplitKey(numpy.datetime64(1452384000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451736000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451520000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451952000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                },
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451520000, 's'),
+                    numpy.timedelta64(60, 's'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451736000, 's'),
+                    numpy.timedelta64(60, 's'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451952000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        # Now this one is compressed because it has been rewritten!
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [
+                    carbonara.SplitKey(
+                        numpy.datetime64(1452384000, 's'),
+                        numpy.timedelta64(60, 's'),
+                    )]}})[self.metric][aggregation][0]
+        assertCompressedIfWriteFull(
+            carbonara.AggregatedTimeSerie.is_compressed(data))
+
+        self.assertEqual({"mean": [
+            (datetime64(2016, 1, 1, 12), numpy.timedelta64(1, 'm'), 69),
+            (datetime64(2016, 1, 2, 13, 7), numpy.timedelta64(1, 'm'), 42),
+            (datetime64(2016, 1, 4, 14, 9), numpy.timedelta64(1, 'm'), 4),
+            (datetime64(2016, 1, 6, 15, 12), numpy.timedelta64(1, 'm'), 44),
+            (datetime64(2016, 1, 10, 16, 18), numpy.timedelta64(1, 'm'), 45),
+            (datetime64(2016, 1, 10, 17, 12), numpy.timedelta64(1, 'm'), 46),
+            ]}, get_measures_list(self.storage.get_aggregated_measures(
+                {self.metric: [aggregation]})[self.metric]))
+
+    def test_rewrite_measures_multiple_granularities(self):
+        apname = str(uuid.uuid4())
+        # Create an archive policy with two different granularities
+        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60), (36000, 1)])
+        self.index.create_archive_policy(ap)
+        self.metric = indexer.Metric(uuid.uuid4(), ap)
+        self.index.create_metric(self.metric.id, str(uuid.uuid4()),
+                                 apname)
+
+        # First store some points
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 6, 18, 15, 46), 43),
+            incoming.Measure(datetime64(2016, 1, 6, 18, 15, 47), 43),
+            incoming.Measure(datetime64(2016, 1, 6, 18, 15, 48), 43),
+        ])
+        self.trigger_processing()
+
+        # Add some more points, mocking out WRITE_FULL attribute of the current
+        # driver, so that rewrite happens
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 7, 18, 15, 49), 43),
+            incoming.Measure(datetime64(2016, 1, 7, 18, 15, 50), 43),
+            incoming.Measure(datetime64(2016, 1, 7, 18, 18, 46), 43),
+        ])
+        driver = storage.get_driver(self.conf)
+        with mock.patch.object(driver.__class__, 'WRITE_FULL', False):
+            self.trigger_processing()
+
+    def test_rewrite_measures_oldest_mutable_timestamp_eq_next_key(self):
+        """See LP#1655422"""
+        # Create an archive policy that spans on several splits. Each split
+        # being 3600 points, let's go for 36k points so we have 10 splits.
+        apname = str(uuid.uuid4())
+        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
+        self.index.create_archive_policy(ap)
+        self.metric = indexer.Metric(uuid.uuid4(), ap)
+        self.index.create_metric(self.metric.id, str(uuid.uuid4()),
+                                 apname)
+
+        # First store some points scattered across different splits
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2016, 1, 2, 13, 7, 31), 42),
+            incoming.Measure(datetime64(2016, 1, 4, 14, 9, 31), 4),
+            incoming.Measure(datetime64(2016, 1, 6, 15, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {
+                    carbonara.SplitKey(numpy.datetime64(1451520000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451736000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451952000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                },
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+
+        if self.storage.WRITE_FULL:
+            assertCompressedIfWriteFull = self.assertTrue
+        else:
+            assertCompressedIfWriteFull = self.assertFalse
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+
+        data = self.storage._get_splits(
+            {self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451520000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits(
+            {self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451736000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits(
+            {self.metric: {aggregation: [carbonara.SplitKey(
+                numpy.datetime64(1451952000, 's'),
+                numpy.timedelta64(1, 'm')
+            )]}})[self.metric][aggregation][0]
+        assertCompressedIfWriteFull(
+            carbonara.AggregatedTimeSerie.is_compressed(data))
+
+        self.assertEqual({"mean": [
+            (datetime64(2016, 1, 1, 12), numpy.timedelta64(1, 'm'), 69),
+            (datetime64(2016, 1, 2, 13, 7), numpy.timedelta64(1, 'm'), 42),
+            (datetime64(2016, 1, 4, 14, 9), numpy.timedelta64(1, 'm'), 4),
+            (datetime64(2016, 1, 6, 15, 12), numpy.timedelta64(1, 'm'), 44),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation]})[self.metric]))
+
+        # Now store brand new points that should force a rewrite of one of the
+        # split (keep in mind the back window size is one hour here). We move
+        # the BoundTimeSerie processing timeserie far away from its current
+        # range.
+
+        # Here we test a special case where the oldest_mutable_timestamp will
+        # be 2016-01-10T00:00:00 = 1452384000.0, our new split key.
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 10, 0, 12), 45),
+        ])
+        self.trigger_processing()
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {
+                    carbonara.SplitKey(numpy.datetime64('2016-01-10T00:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64('2016-01-02T12:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64('2015-12-31T00:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64('2016-01-05T00:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                },
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+        data = self.storage._get_splits({
+            self.metric: {
+                agg: [carbonara.SplitKey(
+                    numpy.datetime64(1451520000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][agg][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                agg: [carbonara.SplitKey(
+                    numpy.datetime64(1451736000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][agg][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                agg: [carbonara.SplitKey(
+                    numpy.datetime64(1451952000, 's'),
+                    numpy.timedelta64(60, 's')
+                )]}})[self.metric][agg][0]
+        # Now this one is compressed because it has been rewritten!
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                agg: [carbonara.SplitKey(
+                    numpy.datetime64(1452384000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][agg][0]
+        assertCompressedIfWriteFull(
+            carbonara.AggregatedTimeSerie.is_compressed(data))
+
+        self.assertEqual({"mean": [
+            (datetime64(2016, 1, 1, 12), numpy.timedelta64(1, 'm'), 69),
+            (datetime64(2016, 1, 2, 13, 7), numpy.timedelta64(1, 'm'), 42),
+            (datetime64(2016, 1, 4, 14, 9), numpy.timedelta64(1, 'm'), 4),
+            (datetime64(2016, 1, 6, 15, 12), numpy.timedelta64(1, 'm'), 44),
+            (datetime64(2016, 1, 10, 0, 12), numpy.timedelta64(1, 'm'), 45),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation]})[self.metric]))
+
+    def test_rewrite_measures_corruption_missing_file(self):
+        # Create an archive policy that spans on several splits. Each split
+        # being 3600 points, let's go for 36k points so we have 10 splits.
+        apname = str(uuid.uuid4())
+        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
+        self.index.create_archive_policy(ap)
+        self.metric = indexer.Metric(uuid.uuid4(), ap)
+        self.index.create_metric(self.metric.id, str(uuid.uuid4()),
+                                 apname)
+
+        # First store some points scattered across different splits
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2016, 1, 2, 13, 7, 31), 42),
+            incoming.Measure(datetime64(2016, 1, 4, 14, 9, 31), 4),
+            incoming.Measure(datetime64(2016, 1, 6, 15, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {
+                    carbonara.SplitKey(numpy.datetime64('2015-12-31T00:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64('2016-01-02T12:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64('2016-01-05T00:00:00'),
+                                       numpy.timedelta64(1, 'm')),
+                },
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+        if self.storage.WRITE_FULL:
+            assertCompressedIfWriteFull = self.assertTrue
+        else:
+            assertCompressedIfWriteFull = self.assertFalse
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation:
+                [carbonara.SplitKey(
+                    numpy.datetime64(1451520000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451736000, 's'),
+                    numpy.timedelta64(1, 'm')
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451952000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        assertCompressedIfWriteFull(
+            carbonara.AggregatedTimeSerie.is_compressed(data))
+
+        self.assertEqual({"mean": [
+            (datetime64(2016, 1, 1, 12),
+             numpy.timedelta64(1, 'm'), 69),
+            (datetime64(2016, 1, 2, 13, 7),
+             numpy.timedelta64(1, 'm'), 42),
+            (datetime64(2016, 1, 4, 14, 9),
+             numpy.timedelta64(1, 'm'), 4),
+            (datetime64(2016, 1, 6, 15, 12),
+             numpy.timedelta64(1, 'm'), 44),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation]})[self.metric]))
+
+        # Test what happens if we delete the latest split and then need to
+        # compress it!
+        self.storage._delete_metric_splits(
+            {self.metric: [(carbonara.SplitKey(
+                numpy.datetime64(1451952000, 's'),
+                numpy.timedelta64(1, 'm'),
+            ), aggregation)]})
+
+        # Now store brand new points that should force a rewrite of one of the
+        # split (keep in mind the back window size in one hour here). We move
+        # the BoundTimeSerie processing timeserie far away from its current
+        # range.
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 10, 16, 18, 45), 45),
+            incoming.Measure(datetime64(2016, 1, 10, 17, 12, 45), 46),
+        ])
+        self.trigger_processing()
+
+    def test_rewrite_measures_corruption_bad_data(self):
+        # Create an archive policy that spans on several splits. Each split
+        # being 3600 points, let's go for 36k points so we have 10 splits.
+        apname = str(uuid.uuid4())
+        ap = archive_policy.ArchivePolicy(apname, 0, [(36000, 60)])
+        self.index.create_archive_policy(ap)
+        self.metric = indexer.Metric(uuid.uuid4(), ap)
+        self.index.create_metric(self.metric.id, str(uuid.uuid4()),
+                                 apname)
+
+        # First store some points scattered across different splits
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2016, 1, 2, 13, 7, 31), 42),
+            incoming.Measure(datetime64(2016, 1, 4, 14, 9, 31), 4),
+            incoming.Measure(datetime64(2016, 1, 6, 15, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        agg = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+        self.assertEqual({
+            self.metric: {
+                agg: {
+                    carbonara.SplitKey(numpy.datetime64(1451520000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451736000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                    carbonara.SplitKey(numpy.datetime64(1451952000, 's'),
+                                       numpy.timedelta64(1, 'm')),
+                },
+            },
+        }, self.storage._list_split_keys({self.metric: [agg]}))
+
+        if self.storage.WRITE_FULL:
+            assertCompressedIfWriteFull = self.assertTrue
+        else:
+            assertCompressedIfWriteFull = self.assertFalse
+
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(1, 'm'))
+
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451520000, 's'),
+                    numpy.timedelta64(60, 's'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451736000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        self.assertTrue(carbonara.AggregatedTimeSerie.is_compressed(data))
+        data = self.storage._get_splits({
+            self.metric: {
+                aggregation: [carbonara.SplitKey(
+                    numpy.datetime64(1451952000, 's'),
+                    numpy.timedelta64(1, 'm'),
+                )]}})[self.metric][aggregation][0]
+        assertCompressedIfWriteFull(
+            carbonara.AggregatedTimeSerie.is_compressed(data))
+
+        self.assertEqual({"mean": [
+            (datetime64(2016, 1, 1, 12), numpy.timedelta64(1, 'm'), 69),
+            (datetime64(2016, 1, 2, 13, 7), numpy.timedelta64(1, 'm'), 42),
+            (datetime64(2016, 1, 4, 14, 9), numpy.timedelta64(1, 'm'), 4),
+            (datetime64(2016, 1, 6, 15, 12), numpy.timedelta64(1, 'm'), 44),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation]})[self.metric]))
+
+        # Test what happens if we write garbage
+        self.storage._store_metric_splits({
+            self.metric: [
+                (carbonara.SplitKey(
+                    numpy.datetime64(1451952000, 's'),
+                    numpy.timedelta64(1, 'm')),
+                 aggregation, b"oh really?", None),
+            ]})
+
+        # Now store brand new points that should force a rewrite of one of the
+        # split (keep in mind the back window size in one hour here). We move
+        # the BoundTimeSerie processing timeserie far away from its current
+        # range.
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2016, 1, 10, 16, 18, 45), 45),
+            incoming.Measure(datetime64(2016, 1, 10, 17, 12, 45), 46),
+        ])
+        self.trigger_processing()
+
+    def test_updated_measures(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+        ])
+        self.trigger_processing()
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("mean")
+        )
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 55.5),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 55.5),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 42.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 23.0),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("max")
+        )
+
+        self.assertEqual({"max": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 69),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 69.0),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 42.0),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("min")
+        )
+
+        self.assertEqual({"min": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 4),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 4),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 4.0),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+    def test_add_and_get_splits(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+        self.trigger_processing()
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("mean")
+        )
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 23.0),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations})[self.metric]))
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations},
+            from_timestamp=datetime64(2014, 1, 1, 12, 10, 0))[self.metric]))
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+            (datetime64(2014, 1, 1, 12, 5), numpy.timedelta64(5, 'm'), 23.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations},
+            to_timestamp=datetime64(2014, 1, 1, 12, 6, 0))[self.metric]))
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12, 10), numpy.timedelta64(5, 'm'), 44.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations},
+            to_timestamp=datetime64(2014, 1, 1, 12, 10, 10),
+            from_timestamp=datetime64(2014, 1, 1, 12, 10, 10))[self.metric]))
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations},
+            from_timestamp=datetime64(2014, 1, 1, 12, 0, 0),
+            to_timestamp=datetime64(2014, 1, 1, 12, 0, 2))[self.metric]))
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1), numpy.timedelta64(1, 'D'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: aggregations},
+            from_timestamp=datetime64(2014, 1, 1, 12),
+            to_timestamp=datetime64(2014, 1, 1, 12, 0, 2))[self.metric]))
+
+        aggregation_1h = (
+            self.metric.archive_policy.get_aggregation(
+                "mean", numpy.timedelta64(1, 'h'))
+        )
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(1, 'h'), 39.75),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation_1h]},
+            from_timestamp=datetime64(2014, 1, 1, 12, 0, 0),
+            to_timestamp=datetime64(2014, 1, 1, 12, 0, 2))[self.metric]))
+
+        aggregation_5m = (
+            self.metric.archive_policy.get_aggregation(
+                "mean", numpy.timedelta64(5, 'm'))
+        )
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1, 12), numpy.timedelta64(5, 'm'), 69.0),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {self.metric: [aggregation_5m]},
+            from_timestamp=datetime64(2014, 1, 1, 12, 0, 0),
+            to_timestamp=datetime64(2014, 1, 1, 12, 0, 2))[self.metric]))
+
+        self.assertEqual({"mean": []},
+                         get_measures_list(
+                             self.storage.get_aggregated_measures(
+                                 {self.metric:
+                                     [carbonara.Aggregation(
+                                         "mean", numpy.timedelta64(42, 's'),
+                                      None)]})[self.metric]))
+
+    def test_get_measure_unknown_aggregation(self):
+        self.incoming.add_measures(self.metric.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 1), 69),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 7, 31), 42),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 9, 31), 4),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 12, 45), 44),
+        ])
+
+        aggregations = (
+            self.metric.archive_policy.get_aggregations_for_method("last")
+        )
+
+        self.assertRaises(
+            storage.MetricDoesNotExist,
+            self.storage.get_aggregated_measures,
+            {self.metric: aggregations})
+
+    def test_resize_policy(self):
+        name = str(uuid.uuid4())
+        ap = archive_policy.ArchivePolicy(name, 0, [(3, 5)])
+        self.index.create_archive_policy(ap)
+        m = self.index.create_metric(uuid.uuid4(), str(uuid.uuid4()), name)
+        m = self.index.list_metrics(attribute_filter={"=": {"id": m.id}})[0]
+        self.incoming.add_measures(m.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 0), 1),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 5), 1),
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 10), 1),
+        ])
+        self.trigger_processing([m])
+
+        aggregation = m.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(5, 's'))
+
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1, 12, 0, 0), numpy.timedelta64(5, 's'), 1),
+            (datetime64(2014, 1, 1, 12, 0, 5), numpy.timedelta64(5, 's'), 1),
+            (datetime64(2014, 1, 1, 12, 0, 10), numpy.timedelta64(5, 's'), 1),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {m: [aggregation]})[m]))
+        # expand to more points
+        self.index.update_archive_policy(
+            name, [archive_policy.ArchivePolicyItem(granularity=5, points=6)])
+        m = self.index.list_metrics(attribute_filter={"=": {"id": m.id}})[0]
+        self.incoming.add_measures(m.id, [
+            incoming.Measure(datetime64(2014, 1, 1, 12, 0, 15), 1),
+        ])
+        self.trigger_processing([m])
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1, 12, 0, 5), numpy.timedelta64(5, 's'), 1),
+            (datetime64(2014, 1, 1, 12, 0, 10), numpy.timedelta64(5, 's'), 1),
+            (datetime64(2014, 1, 1, 12, 0, 15), numpy.timedelta64(5, 's'), 1),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {m: [aggregation]})[m]))
+        # shrink timespan
+        self.index.update_archive_policy(
+            name, [archive_policy.ArchivePolicyItem(granularity=5, points=2)])
+        m = self.index.list_metrics(attribute_filter={"=": {"id": m.id}})[0]
+        aggregation = m.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(5, 's'))
+        self.assertEqual({"mean": [
+            (datetime64(2014, 1, 1, 12, 0, 10), numpy.timedelta64(5, 's'), 1),
+            (datetime64(2014, 1, 1, 12, 0, 15), numpy.timedelta64(5, 's'), 1),
+        ]}, get_measures_list(self.storage.get_aggregated_measures(
+            {m: [aggregation]})[m]))
+
+    def test_resample_no_metric(self):
+        """https://github.com/gnocchixyz/gnocchi/issues/69"""
+        aggregation = self.metric.archive_policy.get_aggregation(
+            "mean", numpy.timedelta64(300, 's'))
+        self.assertRaises(storage.MetricDoesNotExist,
+                          self.storage.get_aggregated_measures,
+                          {self.metric:
+                              [aggregation]},
+                          datetime64(2014, 1, 1),
+                          datetime64(2015, 1, 1),
+                          resample=numpy.timedelta64(1, 'h'))
diff --git a/gnocchi/tests/test_utils.py b/gnocchi/tests/test_utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..50856a8b7c6a442d075447e9119df1d86d405e43
--- /dev/null
+++ b/gnocchi/tests/test_utils.py
@@ -0,0 +1,164 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import itertools
+import os
+import uuid
+
+import iso8601
+import mock
+
+from gnocchi import storage
+from gnocchi.tests import base as tests_base
+from gnocchi import utils
+
+
+class TestUtils(tests_base.TestCase):
+    def _do_test_datetime_to_unix_timezone_change(self, expected, dt):
+        self.assertEqual(expected, utils.datetime_to_unix(dt))
+        with mock.patch.dict(os.environ, {'TZ': 'UTC'}):
+            self.assertEqual(expected, utils.datetime_to_unix(dt))
+        with mock.patch.dict(os.environ, {'TZ': 'Europe/Paris'}):
+            self.assertEqual(expected, utils.datetime_to_unix(dt))
+        with mock.patch.dict(os.environ, {'TZ': 'US/Eastern'}):
+            self.assertEqual(expected, utils.datetime_to_unix(dt))
+
+    def test_datetime_to_unix_timezone_change_utc(self):
+        dt = datetime.datetime(2015, 1, 1, 10, 0, tzinfo=iso8601.iso8601.UTC)
+        self._do_test_datetime_to_unix_timezone_change(1420106400.0, dt)
+
+    def test_datetime_to_unix_timezone_change_offset(self):
+        dt = datetime.datetime(2015, 1, 1, 15, 0,
+                               tzinfo=iso8601.iso8601.FixedOffset(5, 0, '+5h'))
+        self._do_test_datetime_to_unix_timezone_change(1420106400.0, dt)
+
+    def test_to_timestamp_empty(self):
+        self.assertEqual([], utils.to_timestamps([]))
+
+    def test_to_timestamps_epoch(self):
+        self.assertEqual(
+            utils.to_datetime("1425652440"),
+            datetime.datetime(2015, 3, 6, 14, 34,
+                              tzinfo=iso8601.iso8601.UTC))
+        self.assertEqual(
+            utils.to_datetime("1425652440.4"),
+            datetime.datetime(2015, 3, 6, 14, 34, 0, 400000,
+                              tzinfo=iso8601.iso8601.UTC))
+        self.assertEqual(
+            utils.to_datetime(1425652440),
+            datetime.datetime(2015, 3, 6, 14, 34,
+                              tzinfo=iso8601.iso8601.UTC))
+        self.assertEqual(
+            utils.to_datetime(utils.to_timestamp(1425652440.4)),
+            datetime.datetime(2015, 3, 6, 14, 34, 0, 400000,
+                              tzinfo=iso8601.iso8601.UTC))
+
+    def test_to_timestamps_relative(self):
+        with mock.patch('gnocchi.utils.utcnow') as utcnow:
+            utcnow.return_value = datetime.datetime(
+                2015, 3, 6, 14, 34, tzinfo=iso8601.iso8601.UTC)
+            self.assertEqual(
+                utils.to_datetime("-10 minutes"),
+                datetime.datetime(2015, 3, 6, 14, 24,
+                                  tzinfo=iso8601.iso8601.UTC))
+
+
+class TestResourceUUID(tests_base.TestCase):
+    def test_conversion(self):
+        self.assertEqual(
+            uuid.UUID('ba571521-1de6-5aff-b183-1535fd6eb5d0'),
+            utils.ResourceUUID(
+                uuid.UUID('ba571521-1de6-5aff-b183-1535fd6eb5d0'),
+                "bar"))
+        self.assertEqual(
+            uuid.UUID('ba571521-1de6-5aff-b183-1535fd6eb5d0'),
+            utils.ResourceUUID("foo", "bar"))
+        self.assertEqual(
+            uuid.UUID('4efb21f6-3d19-5fe3-910b-be8f0f727846'),
+            utils.ResourceUUID("foo", None))
+        self.assertEqual(
+            uuid.UUID('853e5c64-f45e-58b2-999c-96df856fbe3d'),
+            utils.ResourceUUID("foo", ""))
+
+
+class StopWatchTest(tests_base.TestCase):
+    def test_no_states(self):
+        watch = utils.StopWatch()
+        self.assertRaises(RuntimeError, watch.stop)
+
+    def test_start_stop(self):
+        watch = utils.StopWatch()
+        watch.start()
+        watch.stop()
+
+    def test_no_elapsed(self):
+        watch = utils.StopWatch()
+        self.assertRaises(RuntimeError, watch.elapsed)
+
+    def test_elapsed(self):
+        watch = utils.StopWatch()
+        watch.start()
+        watch.stop()
+        elapsed = watch.elapsed()
+        self.assertAlmostEqual(elapsed, watch.elapsed())
+
+    def test_context_manager(self):
+        with utils.StopWatch() as watch:
+            pass
+        self.assertGreater(watch.elapsed(), 0)
+
+
+class ParallelMap(tests_base.TestCase):
+    def test_parallel_map_one(self):
+        utils.parallel_map.MAX_WORKERS = 1
+        starmap = itertools.starmap
+        with mock.patch("itertools.starmap") as sm:
+            sm.side_effect = starmap
+            self.assertEqual([1, 2, 3],
+                             utils.parallel_map(lambda x: x,
+                                                [[1], [2], [3]]))
+            sm.assert_called()
+
+    def test_parallel_map_four(self):
+        utils.parallel_map.MAX_WORKERS = 4
+        starmap = itertools.starmap
+        with mock.patch("itertools.starmap") as sm:
+            sm.side_effect = starmap
+            self.assertEqual([1, 2, 3],
+                             utils.parallel_map(lambda x: x,
+                                                [[1], [2], [3]]))
+            sm.assert_not_called()
+
+
+class ReturnNoneOnFailureTest(tests_base.TestCase):
+    def test_works(self):
+
+        @utils.return_none_on_failure
+        def foobar():
+            raise Exception("boom")
+
+        self.assertIsNone(foobar())
+
+
+def get_measures_list(measures_agg):
+    return {
+        aggmethod: list(itertools.chain(
+            *[[(timestamp, measures_agg[agg].aggregation.granularity, value)
+               for timestamp, value in measures_agg[agg]]
+              for agg in sorted(aggs,
+                                key=storage.ATTRGETTER_GRANULARITY,
+                                reverse=True)]))
+        for aggmethod, aggs in itertools.groupby(measures_agg.keys(),
+                                                 storage.ATTRGETTER_METHOD)
+    }
diff --git a/gnocchi/tests/utils.py b/gnocchi/tests/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..a9773ce994d176b44920d05447e63d93461b6866
--- /dev/null
+++ b/gnocchi/tests/utils.py
@@ -0,0 +1,25 @@
+# -*- encoding: utf-8 -*-
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+from oslo_config import cfg
+from oslo_policy import opts as policy_opts
+
+from gnocchi import opts
+
+
+def prepare_conf():
+    conf = cfg.ConfigOpts()
+
+    opts.set_defaults()
+    policy_opts.set_defaults(conf)
+    return conf
diff --git a/gnocchi/utils.py b/gnocchi/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..ad2d2b88279348c4eb7052dc11df0bd4e6521818
--- /dev/null
+++ b/gnocchi/utils.py
@@ -0,0 +1,359 @@
+# -*- encoding: utf-8 -*-
+#
+# Copyright © 2015-2017 Red Hat, Inc.
+# Copyright © 2015-2016 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License"); you may
+# not use this file except in compliance with the License. You may obtain
+# a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+# License for the specific language governing permissions and limitations
+# under the License.
+import datetime
+import distutils.util
+import errno
+import itertools
+import multiprocessing
+import os
+import uuid
+
+from concurrent import futures
+import daiquiri
+import iso8601
+import monotonic
+import numpy
+import pytimeparse
+import six
+from stevedore import driver
+import tenacity
+
+
+LOG = daiquiri.getLogger(__name__)
+
+
+# uuid5 namespace for id transformation.
+# NOTE(chdent): This UUID must stay the same, forever, across all
+# of gnocchi to preserve its value as a URN namespace.
+RESOURCE_ID_NAMESPACE = uuid.UUID('0a7a15ff-aa13-4ac2-897c-9bdf30ce175b')
+
+
+def ResourceUUID(value, creator):
+    if isinstance(value, uuid.UUID):
+        return value
+    if '/' in value:
+        raise ValueError("'/' is not supported in resource id")
+    try:
+        return uuid.UUID(value)
+    except ValueError:
+        if len(value) <= 255:
+            if creator is None:
+                creator = "\x00"
+            # value/creator must be str (unicode) in Python 3 and str (bytes)
+            # in Python 2. It's not logical, I know.
+            if six.PY2:
+                value = value.encode('utf-8')
+                creator = creator.encode('utf-8')
+            return uuid.uuid5(RESOURCE_ID_NAMESPACE,
+                              value + "\x00" + creator)
+        raise ValueError(
+            'transformable resource id >255 max allowed characters')
+
+
+def UUID(value):
+    try:
+        return uuid.UUID(value)
+    except Exception as e:
+        raise ValueError(e)
+
+
+unix_universal_start64 = numpy.datetime64("1970")
+
+
+def to_timestamps(values):
+    try:
+        if len(values) == 0:
+            return []
+        if isinstance(values[0], (numpy.datetime64, datetime.datetime)):
+            times = numpy.array(values)
+        else:
+            try:
+                # Try to convert to float. If it works, then we consider
+                # timestamps to be number of seconds since Epoch
+                # e.g. 123456 or 129491.1293
+                float(values[0])
+            except ValueError:
+                try:
+                    # Try to parse the value as a string of ISO timestamp
+                    # e.g. 2017-10-09T23:23:12.123
+                    numpy.datetime64(values[0])
+                except ValueError:
+                    # Last chance: it can be relative timestamp, so convert
+                    # to timedelta relative to now()
+                    # e.g. "-10 seconds" or "5 minutes"
+                    times = numpy.fromiter(
+                        numpy.add(numpy.datetime64(utcnow()),
+                                  [to_timespan(v, True) for v in values]),
+                        dtype='datetime64[ns]', count=len(values))
+                else:
+                    times = numpy.array(values, dtype='datetime64[ns]')
+            else:
+                times = numpy.array(values, dtype='float') * 10e8
+    except ValueError:
+        raise ValueError("Unable to convert timestamps")
+
+    times = times.astype('datetime64[ns]')
+
+    if (times < unix_universal_start64).any():
+        raise ValueError('Timestamp must be after Epoch')
+
+    return times
+
+
+def to_timestamp(value):
+    return to_timestamps([value])[0]
+
+
+def to_datetime(value):
+    return timestamp_to_datetime(to_timestamp(value))
+
+
+def timestamp_to_datetime(v):
+    return datetime.datetime.utcfromtimestamp(
+        v.astype(float) / 10e8).replace(tzinfo=iso8601.iso8601.UTC)
+
+
+def to_timespan(value, allow_le_zero=False):
+    if value is None:
+        raise ValueError("Invalid timespan")
+    try:
+        seconds = float(value)
+    except Exception:
+        seconds = pytimeparse.parse(value)
+        if seconds is None:
+            raise ValueError("Unable to parse timespan")
+    seconds = numpy.timedelta64(int(seconds * 10e8), 'ns')
+    if not allow_le_zero and seconds <= numpy.timedelta64(0, 'ns'):
+        raise ValueError("Timespan must be positive")
+    return seconds
+
+
+_ONE_SECOND = numpy.timedelta64(1, 's')
+
+
+def timespan_total_seconds(td):
+    return td / _ONE_SECOND
+
+
+def utcnow():
+    """Version of utcnow() that returns utcnow with a correct TZ."""
+    return datetime.datetime.now(tz=iso8601.iso8601.UTC)
+
+
+def normalize_time(timestamp):
+    """Normalize time in arbitrary timezone to UTC naive object."""
+    offset = timestamp.utcoffset()
+    if offset is None:
+        return timestamp
+    return timestamp.replace(tzinfo=None) - offset
+
+
+def datetime_utc(*args):
+    return datetime.datetime(*args, tzinfo=iso8601.iso8601.UTC)
+
+
+unix_universal_start = datetime_utc(1970, 1, 1)
+
+
+def datetime_to_unix(timestamp):
+    return (timestamp - unix_universal_start).total_seconds()
+
+
+def dt_in_unix_ns(timestamp):
+    return int(datetime_to_unix(timestamp) * int(10e8))
+
+
+def get_default_workers():
+    try:
+        default_workers = multiprocessing.cpu_count() or 1
+    except NotImplementedError:
+        default_workers = 1
+    return default_workers
+
+
+def grouper(iterable, n):
+    it = iter(iterable)
+    while True:
+        chunk = tuple(itertools.islice(it, n))
+        if not chunk:
+            return
+        yield chunk
+
+
+def ensure_paths(paths):
+    for p in paths:
+        try:
+            os.makedirs(p)
+        except OSError as e:
+            if e.errno != errno.EEXIST:
+                raise
+
+
+def strtobool(v):
+    if isinstance(v, bool):
+        return v
+    return bool(distutils.util.strtobool(v))
+
+
+class StopWatch(object):
+    """A simple timer/stopwatch helper class.
+
+    Inspired by: apache-commons-lang java stopwatch.
+
+    Not thread-safe (when a single watch is mutated by multiple threads at
+    the same time). Thread-safe when used by a single thread (not shared) or
+    when operations are performed in a thread-safe manner on these objects by
+    wrapping those operations with locks.
+
+    It will use the `monotonic`_ pypi library to find an appropriate
+    monotonically increasing time providing function (which typically varies
+    depending on operating system and python version).
+
+    .. _monotonic: https://pypi.python.org/pypi/monotonic/
+    """
+    _STARTED = object()
+    _STOPPED = object()
+
+    def __init__(self):
+        self._started_at = None
+        self._stopped_at = None
+        self._state = None
+
+    def start(self):
+        """Starts the watch (if not already started).
+
+        NOTE(harlowja): resets any splits previously captured (if any).
+        """
+        if self._state == self._STARTED:
+            return self
+        self._started_at = monotonic.monotonic()
+        self._state = self._STARTED
+        return self
+
+    @staticmethod
+    def _delta_seconds(earlier, later):
+        # Uses max to avoid the delta/time going backwards (and thus negative).
+        return max(0.0, later - earlier)
+
+    def elapsed(self):
+        """Returns how many seconds have elapsed."""
+        if self._state not in (self._STARTED, self._STOPPED):
+            raise RuntimeError("Can not get the elapsed time of a stopwatch"
+                               " if it has not been started/stopped")
+        if self._state == self._STOPPED:
+            elapsed = self._delta_seconds(self._started_at, self._stopped_at)
+        else:
+            elapsed = self._delta_seconds(
+                self._started_at, monotonic.monotonic())
+        return elapsed
+
+    def __enter__(self):
+        """Starts the watch."""
+        self.start()
+        return self
+
+    def __exit__(self, type, value, traceback):
+        """Stops the watch (ignoring errors if stop fails)."""
+        try:
+            self.stop()
+        except RuntimeError:
+            pass
+
+    def stop(self):
+        """Stops the watch."""
+        if self._state == self._STOPPED:
+            return self
+        if self._state != self._STARTED:
+            raise RuntimeError("Can not stop a stopwatch that has not been"
+                               " started")
+        self._stopped_at = monotonic.monotonic()
+        self._state = self._STOPPED
+        return self
+
+    def reset(self):
+        """Stop and re-start the watch."""
+        self.stop()
+        return self.start()
+
+
+def get_driver_class(namespace, conf):
+    """Return the storage driver class.
+
+    :param conf: The conf to use to determine the driver.
+    """
+    return driver.DriverManager(namespace,
+                                conf.driver).driver
+
+
+def sequencial_map(fn, list_of_args):
+    return list(itertools.starmap(fn, list_of_args))
+
+
+def parallel_map(fn, list_of_args):
+    """Run a function in parallel."""
+
+    if parallel_map.MAX_WORKERS == 1:
+        return sequencial_map(fn, list_of_args)
+
+    with futures.ThreadPoolExecutor(
+            max_workers=parallel_map.MAX_WORKERS) as executor:
+        # We use 'list' to iterate all threads here to raise the first
+        # exception now, not much choice
+        return list(executor.map(lambda args: fn(*args), list_of_args))
+
+
+parallel_map.MAX_WORKERS = get_default_workers()
+
+
+def return_none_on_failure(f):
+    try:
+        # Python 3
+        fname = f.__qualname__
+    except AttributeError:
+        fname = f.__name__
+
+    @six.wraps(f)
+    def _return_none_on_failure(*args, **kwargs):
+        try:
+            return f(*args, **kwargs)
+        except Exception as e:
+            LOG.critical("Unexpected error while calling %s: %s",
+                         fname, e, exc_info=True)
+
+    return _return_none_on_failure
+
+
+# Retry with exponential backoff for up to 1 minute
+wait_exponential = tenacity.wait_exponential(multiplier=0.5, max=60)
+
+retry_on_exception = tenacity.Retrying(wait=wait_exponential)
+
+
+class _retry_on_exception_and_log(tenacity.retry_if_exception_type):
+    def __init__(self, msg):
+        super(_retry_on_exception_and_log, self).__init__()
+        self.msg = msg
+
+    def __call__(self, attempt):
+        if attempt.failed:
+            LOG.error(self.msg, exc_info=attempt.exception())
+        return super(_retry_on_exception_and_log, self).__call__(attempt)
+
+
+def retry_on_exception_and_log(msg):
+    return tenacity.Retrying(
+        wait=wait_exponential, retry=_retry_on_exception_and_log(msg)).wraps
diff --git a/logo/gnocchi-bw.eps b/logo/gnocchi-bw.eps
new file mode 100644
index 0000000000000000000000000000000000000000..809af940480cb0149e1fd735017ad9286821a535
--- /dev/null
+++ b/logo/gnocchi-bw.eps
@@ -0,0 +1,5771 @@
+%!PS-Adobe-3.1 EPSF-3.0
+%ADO_DSC_Encoding: MacOS Roman
+%%Title: gnocchi-nb.eps
+%%Creator: Adobe Illustrator(R) 13.0
+%%For: Thierry Ung
+%%CreationDate: 4/3/17
+%%BoundingBox: 0 0 1096 840
+%%HiResBoundingBox: 0 0 1096 840
+%%CropBox: 0 0 1096 840
+%%LanguageLevel: 2
+%%DocumentData: Clean7Bit
+%ADOBeginClientInjection: DocumentHeader "AI11EPS"
+%%AI8_CreatorVersion: 13.0.0
%AI9_PrintingDataBegin
%AI3_Cropmarks: 36.0000 36.0000 1060.0000 804.0000
+%ADO_BuildNumber: Adobe Illustrator(R) 13.0.0 x409 R agm 4.4378 ct 5.1039
%ADO_ContainsXMP: MainFirst
%AI7_Thumbnail: 128 100 8
%%BeginData: 5212 Hex Bytes
%0000330000660000990000CC0033000033330033660033990033CC0033FF
%0066000066330066660066990066CC0066FF009900009933009966009999
%0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66
%00FF9900FFCC3300003300333300663300993300CC3300FF333300333333
%3333663333993333CC3333FF3366003366333366663366993366CC3366FF
%3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99
%33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033
%6600666600996600CC6600FF6633006633336633666633996633CC6633FF
%6666006666336666666666996666CC6666FF669900669933669966669999
%6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33
%66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF
%9933009933339933669933999933CC9933FF996600996633996666996699
%9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33
%99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF
%CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399
%CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933
%CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF
%CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC
%FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699
%FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33
%FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100
%000011111111220000002200000022222222440000004400000044444444
%550000005500000055555555770000007700000077777777880000008800
%000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB
%DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF
%00FF0000FFFFFF0000FF00FFFFFF00FFFFFF
%524C45FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFF
%FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD9CFFA87D7D
%7DFD08FFA87DA8FD6FFF2727F827F82727FD05FF7D27F827F827A8FD33FF
%A85252F827F827277D7DFD11FF7D27F827F8527DFD17FF5227F827275227
%277DFFFFFF7D27F827F827F827A8FD30FFA852F827F827F827F827F827F8
%7DFD0EFF2727F827F827F827F8A8FD15FF52F852FFFFFF52F87DFFFFFF27
%F852A8FF7D27F87DFD2FFF7D27F827277DFD04A87D52F827F8A8FD0CFF52
%27F8FD04A87D2727F8A8FD14FF52277DFFFFFF27277DFFFFFFF8277DFFFF
%FF522752FD2EFF7D27F8277DFD09FF272752FFFFFFA8FD07FF2727F8A8A8
%FD05FF2727F8FD07FF7DFD09FFA8A8FF52F87DFFFFFF52F87DFFFFFF27F8
%A8FFFFFF52F852FD2EFF27F852FD0BFF27F8522727F827F827277DA8FF7D
%27F8A8FFA87D7D52A8FFFF27277DFFFFFF5227F827F827277DA8FF5227F8
%27F827F8277DFFFFFF2727F827277D272727FFFFFFF82752FD2DFF7DF827
%A8FFA8FFFFFF7DA8A8FFFF7DF827F827F827F827F827F8277D27F8A8A8FD
%05FF7D7DA87DF827A87DF827F827F827F827F827F827F827F827F827F87D
%FFFFFF52F827F827F827F827F852F827F8A8FD2DFF2727A8FD04FFA8F827
%F827F8522727F8527DFD05A87D27F827F827FFFD06A8FFFFFFA827F827F8
%27277DFD04A87D27F827277D7DFFA8A87D277DFFFFFFFD04A85227F82752
%7D7D7D27277DFD2CFF7D27F8FFA8FFA8A8F827F827F827F827F827FD08FF
%A852F827A8FFFD04A87D7D527D7DFF5227F82727A8FD06FF272727FD07FF
%277DFD08FFA827F87DFFFFFF52F87DFD2CFF7DF852FD04FFF827F827F827
%F827F827F8FD09FFA827F8FD0DFFF82727FD08FF2727FD08FF277DFD09FF
%7D277DFFFFFF52277DFD2CFF522752FFA8FFA827F827F827F827F827F827
%FFFFFFA8277DA8FFFFFF5227A8FFFD067D527D7DFFFF27F8A8FFFFFFA852
%52527DF8A8FFFFFFA85252527DF87DFFFFFFA852A8FD04FFF87DFFFFFF52
%F87DFD2CFF7DF87DFD04FFF827F827F8A8FFFFFFA8F8FFFFFFA827F852FF
%FFFF7DF8A8FD0CFF5227FD04FF2727F827F852FD04FFF827F827F8277DFF
%FFFF2727F8A8FFFFFF277DFFFFFF27277DFD2CFF522752FFA8FFA827F827
%F8277DFFA8FF7D27A8FFA8A8F82727FFA8FF7D27A8FFA87D527D52527DA8
%A8A8FF5227FFFFFF5227F827F82727FFFFFF5227F827F827F87DFFFFFF52
%F82752FFFFFF277DFFFFFF52F87DFD2CFF7DF87DFD04FFF827F827F8A8FF
%FFFFA8F8FFFFFFA827F827FFFFFFA8F8FFA8A8FD0AFF7D7DFFFFFF52F827
%F827F87DFFFFFF52F827F827F8277DFFFFFF2727F87DFFFFFF527DFFFFFF
%52277DFD2CFF7D27F8FFA8FFA87DF827F8277DFFA8FF7D27A8FFA8A8F827
%F8FFA8FF7D27A8FFA8FFFFFF7D7D7DA8A8A8FF7D52FFA8FF7D27F827F827
%52FFFFFF5227F827F827F87DA8FFFF52F8277DFFA8FF52A8A8FFA852F87D
%FD2DFFF827A8FFFFFFA852F827F8A8FFFFFFA8F8FFFFFFA827F827FFFFFF
%A8F8A8FFA87D7D7DA8A8FD05FF527DFD04FFF827F827F87DFD04FFF827F8
%27F827A8FFFFFF5227F8A8FFFFFF7DA8FFFFFF52277DFD2DFF52F852A8FF
%A8FFA8A85252A8FFA8FF7D27A8FFA8A8F827F8FFA8FF7D2727FFA8FD06FF
%A87DA8A827F8FFA8FFA8A8522752A852FFA8FFA8A8525252A827A8A8FFA8
%7DF8277DFFA8FF52A8A8FFA87DF87DFD2DFFA827F8A8FD0BFFA8F8FFFFFF
%A827F827FFFFFFA8F827FD04FFA8A87D7D7DFFFFA8F8277DFD08FF7D7DFD
%08FF7DA8FFFFFF5227F8A8FFFFFF7DA8FFFFFF52277DFD2EFF5227F87DA8
%FFA8FFA8FFA8FFA8FF7D27A8FFA8A8F827F8FFA8FF7D27F8A8A87D7DA8A8
%FFA87D7DFF2727F827A8FFA8FFA8FFA8FF7D27A8FFA8FFA8FFA8FF7DA8A8
%FFA87DF8277DFFA8FF52A8A8FFA87DF87DFD2FFF2727F827A8FD07FFA852
%F8FFA8FF7D27F827A8FFA87DF82727FFFFFFA87D52A8FFFF7D27F827F827
%52FD05FFA87DF8277DFD06FF7D7DFFA8FF5227F87DA8FFA8527DFFA8FF52
%277DFD30FF5227F827F827F827F827F827F827F827F827F827F827F827F8
%27F8277DFFA8A8A8FFA8FF7D27F852A827F827F827F827F827F827F827F8
%27F827F827F827F827F827F827F827F827F827F827F827F87DFD31FFA87D
%2727F827F827F82727522727F827F87D7D27F827F827527DF82727A8A8FF
%A8A82727F852FFFFFFA82727F827F827F8527DA82727F827F827F827F827
%F82727A87D27F827F827F827F827F87DFD35FFA8A87DFD04A8FD10FF7DF8
%27F827F827F827F87DFD06FFA8A87DA8A8A8FD04FFA8A87DA87DFD62FF7D
%52F82752527DFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFF
%FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFF
%FDFCFFFDFCFFFDFCFFFD15FFFF
%%EndData
+%ADOEndClientInjection: DocumentHeader "AI11EPS"
+%%Pages: 1
+%%DocumentNeededResources: 
+%%DocumentSuppliedResources: procset Adobe_AGM_Image 1.0 0
+%%+ procset Adobe_CoolType_Utility_T42 1.0 0
+%%+ procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
+%%+ procset Adobe_CoolType_Core 2.31 0
+%%+ procset Adobe_AGM_Core 2.0 0
+%%+ procset Adobe_AGM_Utils 1.0 0
+%%DocumentFonts: 
+%%DocumentNeededFonts: 
+%%DocumentNeededFeatures: 
+%%DocumentSuppliedFeatures: 
+%%DocumentProcessColors:  Black
+%%DocumentCustomColors: 
+%%CMYKCustomColor: 
+%%RGBCustomColor: 
+%%EndComments
+                                                                                                                                                                                                                                                  
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+%%BeginDefaults
+%%ViewingOrientation: 1 0 0 1
+%%EndDefaults
+%%BeginProlog
+%%BeginResource: procset Adobe_AGM_Utils 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{currentpacking	true setpacking}if
+userdict/Adobe_AGM_Utils 73 dict dup begin put
+/bdf
+{bind def}bind def
+/nd{null def}bdf
+/xdf
+{exch def}bdf
+/ldf 
+{load def}bdf
+/ddf
+{put}bdf	
+/xddf
+{3 -1 roll put}bdf	
+/xpt
+{exch put}bdf
+/ndf
+{
+	exch dup where{
+		pop pop pop
+	}{
+		xdf
+	}ifelse
+}def
+/cdndf
+{
+	exch dup currentdict exch known{
+		pop pop
+	}{
+		exch def
+	}ifelse
+}def
+/gx
+{get exec}bdf
+/ps_level
+	/languagelevel where{
+		pop systemdict/languagelevel gx
+	}{
+		1
+	}ifelse
+def
+/level2 
+	ps_level 2 ge
+def
+/level3 
+	ps_level 3 ge
+def
+/ps_version
+	{version cvr}stopped{-1}if
+def
+/set_gvm
+{currentglobal exch setglobal}bdf
+/reset_gvm
+{setglobal}bdf
+/makereadonlyarray
+{
+	/packedarray where{pop packedarray
+	}{
+		array astore readonly}ifelse
+}bdf
+/map_reserved_ink_name
+{
+	dup type/stringtype eq{
+		dup/Red eq{
+			pop(_Red_)
+		}{
+			dup/Green eq{
+				pop(_Green_)
+			}{
+				dup/Blue eq{
+					pop(_Blue_)
+				}{
+					dup()cvn eq{
+						pop(Process)
+					}if
+				}ifelse
+			}ifelse
+		}ifelse
+	}if
+}bdf
+/AGMUTIL_GSTATE 22 dict def
+/get_gstate
+{
+	AGMUTIL_GSTATE begin
+	/AGMUTIL_GSTATE_clr_spc currentcolorspace def
+	/AGMUTIL_GSTATE_clr_indx 0 def
+	/AGMUTIL_GSTATE_clr_comps 12 array def
+	mark currentcolor counttomark
+		{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 3 -1 roll put
+		/AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 add def}repeat pop
+	/AGMUTIL_GSTATE_fnt rootfont def
+	/AGMUTIL_GSTATE_lw currentlinewidth def
+	/AGMUTIL_GSTATE_lc currentlinecap def
+	/AGMUTIL_GSTATE_lj currentlinejoin def
+	/AGMUTIL_GSTATE_ml currentmiterlimit def
+	currentdash/AGMUTIL_GSTATE_do xdf/AGMUTIL_GSTATE_da xdf
+	/AGMUTIL_GSTATE_sa currentstrokeadjust def
+	/AGMUTIL_GSTATE_clr_rnd currentcolorrendering def
+	/AGMUTIL_GSTATE_op currentoverprint def
+	/AGMUTIL_GSTATE_bg currentblackgeneration cvlit def
+	/AGMUTIL_GSTATE_ucr currentundercolorremoval cvlit def
+	currentcolortransfer cvlit/AGMUTIL_GSTATE_gy_xfer xdf cvlit/AGMUTIL_GSTATE_b_xfer xdf
+		cvlit/AGMUTIL_GSTATE_g_xfer xdf cvlit/AGMUTIL_GSTATE_r_xfer xdf
+	/AGMUTIL_GSTATE_ht currenthalftone def
+	/AGMUTIL_GSTATE_flt currentflat def
+	end
+}def
+/set_gstate
+{
+	AGMUTIL_GSTATE begin
+	AGMUTIL_GSTATE_clr_spc setcolorspace
+	AGMUTIL_GSTATE_clr_indx{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 1 sub get
+	/AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 sub def}repeat setcolor
+	AGMUTIL_GSTATE_fnt setfont
+	AGMUTIL_GSTATE_lw setlinewidth
+	AGMUTIL_GSTATE_lc setlinecap
+	AGMUTIL_GSTATE_lj setlinejoin
+	AGMUTIL_GSTATE_ml setmiterlimit
+	AGMUTIL_GSTATE_da AGMUTIL_GSTATE_do setdash
+	AGMUTIL_GSTATE_sa setstrokeadjust
+	AGMUTIL_GSTATE_clr_rnd setcolorrendering
+	AGMUTIL_GSTATE_op setoverprint
+	AGMUTIL_GSTATE_bg cvx setblackgeneration
+	AGMUTIL_GSTATE_ucr cvx setundercolorremoval
+	AGMUTIL_GSTATE_r_xfer cvx AGMUTIL_GSTATE_g_xfer cvx AGMUTIL_GSTATE_b_xfer cvx
+		AGMUTIL_GSTATE_gy_xfer cvx setcolortransfer
+	AGMUTIL_GSTATE_ht/HalftoneType get dup 9 eq exch 100 eq or
+		{
+		currenthalftone/HalftoneType get AGMUTIL_GSTATE_ht/HalftoneType get ne
+			{
+			 mark AGMUTIL_GSTATE_ht{sethalftone}stopped cleartomark
+			}if
+		}{
+		AGMUTIL_GSTATE_ht sethalftone
+		}ifelse
+	AGMUTIL_GSTATE_flt setflat
+	end
+}def
+/get_gstate_and_matrix
+{
+	AGMUTIL_GSTATE begin
+	/AGMUTIL_GSTATE_ctm matrix currentmatrix def
+	end
+	get_gstate
+}def
+/set_gstate_and_matrix
+{
+	set_gstate
+	AGMUTIL_GSTATE begin
+	AGMUTIL_GSTATE_ctm setmatrix
+	end
+}def
+/AGMUTIL_str256 256 string def
+/AGMUTIL_src256 256 string def
+/AGMUTIL_dst64 64 string def
+/AGMUTIL_srcLen nd
+/AGMUTIL_ndx nd
+/AGMUTIL_cpd nd
+/capture_cpd{
+	//Adobe_AGM_Utils/AGMUTIL_cpd currentpagedevice ddf
+}def
+/thold_halftone
+{
+	level3
+		{sethalftone currenthalftone}
+		{
+			dup/HalftoneType get 3 eq
+			{
+				sethalftone currenthalftone
+			}{
+				begin
+				Width Height mul{
+					Thresholds read{pop}if
+				}repeat
+				end
+				currenthalftone
+			}ifelse
+		}ifelse
+}def 
+/rdcmntline
+{
+	currentfile AGMUTIL_str256 readline pop
+	(%)anchorsearch{pop}if
+}bdf
+/filter_cmyk
+{	
+	dup type/filetype ne{
+		exch()/SubFileDecode filter
+	}{
+		exch pop
+	}
+	ifelse
+	[
+	exch
+	{
+		AGMUTIL_src256 readstring pop
+		dup length/AGMUTIL_srcLen exch def
+		/AGMUTIL_ndx 0 def
+		AGMCORE_plate_ndx 4 AGMUTIL_srcLen 1 sub{
+			1 index exch get
+			AGMUTIL_dst64 AGMUTIL_ndx 3 -1 roll put
+			/AGMUTIL_ndx AGMUTIL_ndx 1 add def
+		}for
+		pop
+		AGMUTIL_dst64 0 AGMUTIL_ndx getinterval
+	}
+	bind
+	/exec cvx
+	]cvx
+}bdf
+/filter_indexed_devn
+{
+	cvi Names length mul names_index add Lookup exch get
+}bdf
+/filter_devn
+{	
+	4 dict begin
+	/srcStr xdf
+	/dstStr xdf
+	dup type/filetype ne{
+		0()/SubFileDecode filter
+	}if
+	[
+	exch
+		[
+			/devicen_colorspace_dict/AGMCORE_gget cvx/begin cvx
+			currentdict/srcStr get/readstring cvx/pop cvx
+			/dup cvx/length cvx 0/gt cvx[
+				Adobe_AGM_Utils/AGMUTIL_ndx 0/ddf cvx
+				names_index Names length currentdict/srcStr get length 1 sub{
+					1/index cvx/exch cvx/get cvx
+					currentdict/dstStr get/AGMUTIL_ndx/load cvx 3 -1/roll cvx/put cvx
+					Adobe_AGM_Utils/AGMUTIL_ndx/AGMUTIL_ndx/load cvx 1/add cvx/ddf cvx
+				}for
+				currentdict/dstStr get 0/AGMUTIL_ndx/load cvx/getinterval cvx
+			]cvx/if cvx
+			/end cvx
+		]cvx
+		bind
+		/exec cvx
+	]cvx
+	end
+}bdf
+/AGMUTIL_imagefile nd
+/read_image_file
+{
+	AGMUTIL_imagefile 0 setfileposition
+	10 dict begin
+	/imageDict xdf
+	/imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+	/imbufIdx 0 def
+	/origDataSource imageDict/DataSource get def
+	/origMultipleDataSources imageDict/MultipleDataSources get def
+	/origDecode imageDict/Decode get def
+	/dstDataStr imageDict/Width get colorSpaceElemCnt mul string def
+	imageDict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+	{
+		/imbufCnt imageDict/DataSource get length def
+		/imbufs imbufCnt array def
+		0 1 imbufCnt 1 sub{
+			/imbufIdx xdf
+			imbufs imbufIdx imbufLen string put
+			imageDict/DataSource get imbufIdx[AGMUTIL_imagefile imbufs imbufIdx get/readstring cvx/pop cvx]cvx put
+		}for
+		DeviceN_PS2{
+			imageDict begin
+		 	/DataSource[DataSource/devn_sep_datasource cvx]cvx def
+			/MultipleDataSources false def
+			/Decode[0 1]def
+			end
+		}if
+	}{
+		/imbuf imbufLen string def
+		Indexed_DeviceN level3 not and DeviceN_NoneName or{
+			/srcDataStrs[imageDict begin
+				currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+				{
+					Width Decode length 2 div mul cvi string
+				}repeat
+				end]def		
+			imageDict begin
+		 	/DataSource[AGMUTIL_imagefile Decode BitsPerComponent false 1/filter_indexed_devn load dstDataStr srcDataStrs devn_alt_datasource/exec cvx]cvx def
+			/Decode[0 1]def
+			end
+		}{
+			imageDict/DataSource[1 string dup 0 AGMUTIL_imagefile Decode length 2 idiv string/readstring cvx/pop cvx names_index/get cvx/put cvx]cvx put
+			imageDict/Decode[0 1]put
+		}ifelse
+	}ifelse
+	imageDict exch
+	load exec
+	imageDict/DataSource origDataSource put
+	imageDict/MultipleDataSources origMultipleDataSources put
+	imageDict/Decode origDecode put	
+	end
+}bdf
+/write_image_file
+{
+	begin
+	{(AGMUTIL_imagefile)(w+)file}stopped{
+		false
+	}{
+		Adobe_AGM_Utils/AGMUTIL_imagefile xddf 
+		2 dict begin
+		/imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+		MultipleDataSources{DataSource 0 get}{DataSource}ifelse type/filetype eq{
+			/imbuf imbufLen string def
+		}if
+		1 1 Height MultipleDataSources not{Decode length 2 idiv mul}if{
+			pop
+			MultipleDataSources{
+			 	0 1 DataSource length 1 sub{
+					DataSource type dup
+					/arraytype eq{
+						pop DataSource exch gx
+					}{
+						/filetype eq{
+							DataSource exch get imbuf readstring pop
+						}{
+							DataSource exch get
+						}ifelse
+					}ifelse
+					AGMUTIL_imagefile exch writestring
+				}for
+			}{
+				DataSource type dup
+				/arraytype eq{
+					pop DataSource exec
+				}{
+					/filetype eq{
+						DataSource imbuf readstring pop
+					}{
+						DataSource
+					}ifelse
+				}ifelse
+				AGMUTIL_imagefile exch writestring
+			}ifelse
+		}for
+		end
+		true
+	}ifelse
+	end
+}bdf
+/close_image_file
+{
+	AGMUTIL_imagefile closefile(AGMUTIL_imagefile)deletefile
+}def
+statusdict/product known userdict/AGMP_current_show known not and{
+	/pstr statusdict/product get def
+	pstr(HP LaserJet 2200)eq 	
+	pstr(HP LaserJet 4000 Series)eq or
+	pstr(HP LaserJet 4050 Series )eq or
+	pstr(HP LaserJet 8000 Series)eq or
+	pstr(HP LaserJet 8100 Series)eq or
+	pstr(HP LaserJet 8150 Series)eq or
+	pstr(HP LaserJet 5000 Series)eq or
+	pstr(HP LaserJet 5100 Series)eq or
+	pstr(HP Color LaserJet 4500)eq or
+	pstr(HP Color LaserJet 4600)eq or
+	pstr(HP LaserJet 5Si)eq or
+	pstr(HP LaserJet 1200 Series)eq or
+	pstr(HP LaserJet 1300 Series)eq or
+	pstr(HP LaserJet 4100 Series)eq or 
+	{
+ 		userdict/AGMP_current_show/show load put
+		userdict/show{
+		 currentcolorspace 0 get
+		 /Pattern eq
+		 {false charpath f}
+		 {AGMP_current_show}ifelse
+		}put
+	}if
+	currentdict/pstr undef
+}if
+/consumeimagedata
+{
+	begin
+	AGMIMG_init_common
+	currentdict/MultipleDataSources known not
+		{/MultipleDataSources false def}if
+	MultipleDataSources
+		{
+		DataSource 0 get type
+		dup/filetype eq
+			{
+			1 dict begin
+			/flushbuffer Width cvi string def
+			1 1 Height cvi
+				{
+				pop
+				0 1 DataSource length 1 sub
+					{
+					DataSource exch get
+					flushbuffer readstring pop pop
+					}for
+				}for
+			end
+			}if
+		dup/arraytype eq exch/packedarraytype eq or DataSource 0 get xcheck and
+			{
+			Width Height mul cvi
+				{
+				0 1 DataSource length 1 sub
+					{dup DataSource exch gx length exch 0 ne{pop}if}for
+				dup 0 eq
+					{pop exit}if
+				sub dup 0 le
+					{exit}if
+				}loop
+			pop
+			}if		
+		}
+		{
+		/DataSource load type 
+		dup/filetype eq
+			{
+			1 dict begin
+			/flushbuffer Width Decode length 2 idiv mul cvi string def
+			1 1 Height{pop DataSource flushbuffer readstring pop pop}for
+			end
+			}if
+		dup/arraytype eq exch/packedarraytype eq or/DataSource load xcheck and
+			{
+				Height Width BitsPerComponent mul 8 BitsPerComponent sub add 8 idiv Decode length 2 idiv mul mul
+					{
+					DataSource length dup 0 eq
+						{pop exit}if
+					sub dup 0 le
+						{exit}if
+					}loop
+				pop
+			}if
+		}ifelse
+	end
+}bdf
+/addprocs
+{
+	 2{/exec load}repeat
+	 3 1 roll
+	 [5 1 roll]bind cvx
+}def
+/modify_halftone_xfer
+{
+	currenthalftone dup length dict copy begin
+	 currentdict 2 index known{
+	 	1 index load dup length dict copy begin
+		currentdict/TransferFunction known{
+			/TransferFunction load
+		}{
+			currenttransfer
+		}ifelse
+		 addprocs/TransferFunction xdf 
+		 currentdict end def
+		currentdict end sethalftone
+	}{
+		currentdict/TransferFunction known{
+			/TransferFunction load 
+		}{
+			currenttransfer
+		}ifelse
+		addprocs/TransferFunction xdf
+		currentdict end sethalftone		
+		pop
+	}ifelse
+}def
+/clonearray
+{
+	dup xcheck exch
+	dup length array exch
+	Adobe_AGM_Core/AGMCORE_tmp -1 ddf 
+	{
+	Adobe_AGM_Core/AGMCORE_tmp 2 copy get 1 add ddf 
+	dup type/dicttype eq
+		{
+			Adobe_AGM_Core/AGMCORE_tmp get
+			exch
+			clonedict
+			Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf 
+		}if
+	dup type/arraytype eq
+		{
+			Adobe_AGM_Core/AGMCORE_tmp get exch
+			clonearray
+			Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf 
+		}if
+	exch dup
+	Adobe_AGM_Core/AGMCORE_tmp get 4 -1 roll put
+	}forall
+	exch{cvx}if
+}bdf
+/clonedict
+{
+	dup length dict
+	begin
+	{
+		dup type/dicttype eq
+			{clonedict}if
+		dup type/arraytype eq
+			{clonearray}if
+		def
+	}forall
+	currentdict
+	end
+}bdf
+/DeviceN_PS2
+{
+	/currentcolorspace AGMCORE_gget 0 get/DeviceN eq level3 not and
+}bdf
+/Indexed_DeviceN
+{
+	/indexed_colorspace_dict AGMCORE_gget dup null ne{
+		dup/CSDBase known{
+			/CSDBase get/CSD get_res/Names known 
+		}{
+			pop false
+		}ifelse
+	}{
+		pop false
+	}ifelse
+}bdf
+/DeviceN_NoneName
+{	
+	/Names where{
+		pop
+		false Names
+		{
+			(None)eq or
+		}forall
+	}{
+		false
+	}ifelse
+}bdf
+/DeviceN_PS2_inRip_seps
+{
+	/AGMCORE_in_rip_sep where
+	{
+		pop dup type dup/arraytype eq exch/packedarraytype eq or
+		{
+			dup 0 get/DeviceN eq level3 not and AGMCORE_in_rip_sep and
+			{
+				/currentcolorspace exch AGMCORE_gput
+				false
+			}{
+				true
+			}ifelse
+		}{
+			true
+		}ifelse
+	}{
+		true
+	}ifelse
+}bdf
+/base_colorspace_type
+{
+	dup type/arraytype eq{0 get}if
+}bdf
+/currentdistillerparams where{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+{
+	/pdfmark_5{cleartomark}bind def
+}{
+	/pdfmark_5{pdfmark}bind def
+}ifelse
+/ReadBypdfmark_5
+{
+	currentfile exch 0 exch/SubFileDecode filter
+	/currentdistillerparams where 
+	{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+	{flushfile cleartomark}
+	{/PUT pdfmark}ifelse 	
+}bdf
+/xpdfm
+{
+	{
+		dup 0 get/Label eq
+		{
+			aload length[exch 1 add 1 roll/PAGELABEL
+		}{
+			aload pop
+			[{ThisPage}<<5 -2 roll>>/PUT
+		}ifelse
+		pdfmark_5
+	}forall
+}bdf
+/ds{
+	Adobe_AGM_Utils begin
+}bdf
+/dt{
+	currentdict Adobe_AGM_Utils eq{
+		end
+	}if
+}bdf
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_AGM_Core 2.0 0
+%%Version: 2.0 0
+%%Copyright: Copyright(C)1997-2007 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+	currentpacking
+	true setpacking
+}if
+userdict/Adobe_AGM_Core 209 dict dup begin put
+/Adobe_AGM_Core_Id/Adobe_AGM_Core_2.0_0 def
+/AGMCORE_str256 256 string def
+/AGMCORE_save nd
+/AGMCORE_graphicsave nd
+/AGMCORE_c 0 def
+/AGMCORE_m 0 def
+/AGMCORE_y 0 def
+/AGMCORE_k 0 def
+/AGMCORE_cmykbuf 4 array def
+/AGMCORE_screen[currentscreen]cvx def
+/AGMCORE_tmp 0 def
+/AGMCORE_&setgray nd
+/AGMCORE_&setcolor nd
+/AGMCORE_&setcolorspace nd
+/AGMCORE_&setcmykcolor nd
+/AGMCORE_cyan_plate nd
+/AGMCORE_magenta_plate nd
+/AGMCORE_yellow_plate nd
+/AGMCORE_black_plate nd
+/AGMCORE_plate_ndx nd
+/AGMCORE_get_ink_data nd
+/AGMCORE_is_cmyk_sep nd
+/AGMCORE_host_sep nd
+/AGMCORE_avoid_L2_sep_space nd
+/AGMCORE_distilling nd
+/AGMCORE_composite_job nd
+/AGMCORE_producing_seps nd
+/AGMCORE_ps_level -1 def
+/AGMCORE_ps_version -1 def
+/AGMCORE_environ_ok nd
+/AGMCORE_CSD_cache 0 dict def
+/AGMCORE_currentoverprint false def
+/AGMCORE_deltaX nd
+/AGMCORE_deltaY nd
+/AGMCORE_name nd
+/AGMCORE_sep_special nd
+/AGMCORE_err_strings 4 dict def
+/AGMCORE_cur_err nd
+/AGMCORE_current_spot_alias false def
+/AGMCORE_inverting false def
+/AGMCORE_feature_dictCount nd
+/AGMCORE_feature_opCount nd
+/AGMCORE_feature_ctm nd
+/AGMCORE_ConvertToProcess false def
+/AGMCORE_Default_CTM matrix def
+/AGMCORE_Default_PageSize nd
+/AGMCORE_Default_flatness nd
+/AGMCORE_currentbg nd
+/AGMCORE_currentucr nd
+/AGMCORE_pattern_paint_type 0 def
+/knockout_unitsq nd
+currentglobal true setglobal
+[/CSA/Gradient/Procedure]
+{
+	/Generic/Category findresource dup length dict copy/Category defineresource pop
+}forall
+setglobal
+/AGMCORE_key_known
+{
+	where{
+		/Adobe_AGM_Core_Id known
+	}{
+		false
+	}ifelse
+}ndf
+/flushinput
+{
+	save
+	2 dict begin
+	/CompareBuffer 3 -1 roll def
+	/readbuffer 256 string def
+	mark
+	{
+	currentfile readbuffer{readline}stopped
+		{cleartomark mark}
+		{
+		not
+			{pop exit}
+		if
+		CompareBuffer eq
+			{exit}
+		if
+		}ifelse
+	}loop
+	cleartomark
+	end
+	restore
+}bdf
+/getspotfunction
+{
+	AGMCORE_screen exch pop exch pop
+	dup type/dicttype eq{
+		dup/HalftoneType get 1 eq{
+			/SpotFunction get
+		}{
+			dup/HalftoneType get 2 eq{
+				/GraySpotFunction get
+			}{
+				pop
+				{
+					abs exch abs 2 copy add 1 gt{
+						1 sub dup mul exch 1 sub dup mul add 1 sub
+					}{
+						dup mul exch dup mul add 1 exch sub
+					}ifelse
+				}bind
+			}ifelse
+		}ifelse
+	}if
+}def
+/np
+{newpath}bdf
+/clp_npth
+{clip np}def
+/eoclp_npth
+{eoclip np}def
+/npth_clp
+{np clip}def
+/graphic_setup
+{
+	/AGMCORE_graphicsave save store
+	concat
+	0 setgray
+	0 setlinecap
+	0 setlinejoin
+	1 setlinewidth
+	[]0 setdash
+	10 setmiterlimit
+	np
+	false setoverprint
+	false setstrokeadjust
+	//Adobe_AGM_Core/spot_alias gx
+	/Adobe_AGM_Image where{
+		pop
+		Adobe_AGM_Image/spot_alias 2 copy known{
+			gx
+		}{
+			pop pop
+		}ifelse
+	}if
+	/sep_colorspace_dict null AGMCORE_gput
+	100 dict begin
+	/dictstackcount countdictstack def
+	/showpage{}def
+	mark
+}def
+/graphic_cleanup
+{
+	cleartomark
+	dictstackcount 1 countdictstack 1 sub{end}for
+	end
+	AGMCORE_graphicsave restore
+}def
+/compose_error_msg
+{
+	grestoreall initgraphics	
+	/Helvetica findfont 10 scalefont setfont
+	/AGMCORE_deltaY 100 def
+	/AGMCORE_deltaX 310 def
+	clippath pathbbox np pop pop 36 add exch 36 add exch moveto
+	0 AGMCORE_deltaY rlineto AGMCORE_deltaX 0 rlineto
+	0 AGMCORE_deltaY neg rlineto AGMCORE_deltaX neg 0 rlineto closepath
+	0 AGMCORE_&setgray
+	gsave 1 AGMCORE_&setgray fill grestore 
+	1 setlinewidth gsave stroke grestore
+	currentpoint AGMCORE_deltaY 15 sub add exch 8 add exch moveto
+	/AGMCORE_deltaY 12 def
+	/AGMCORE_tmp 0 def
+	AGMCORE_err_strings exch get
+		{
+		dup 32 eq
+			{
+			pop
+			AGMCORE_str256 0 AGMCORE_tmp getinterval
+			stringwidth pop currentpoint pop add AGMCORE_deltaX 28 add gt
+				{
+				currentpoint AGMCORE_deltaY sub exch pop
+				clippath pathbbox pop pop pop 44 add exch moveto
+				}if
+			AGMCORE_str256 0 AGMCORE_tmp getinterval show( )show
+			0 1 AGMCORE_str256 length 1 sub
+				{
+				AGMCORE_str256 exch 0 put
+				}for
+			/AGMCORE_tmp 0 def
+			}{
+				AGMCORE_str256 exch AGMCORE_tmp xpt
+				/AGMCORE_tmp AGMCORE_tmp 1 add def
+			}ifelse
+		}forall
+}bdf
+/AGMCORE_CMYKDeviceNColorspaces[
+	[/Separation/None/DeviceCMYK{0 0 0}]
+	[/Separation(Black)/DeviceCMYK{0 0 0 4 -1 roll}bind]
+	[/Separation(Yellow)/DeviceCMYK{0 0 3 -1 roll 0}bind]
+	[/DeviceN[(Yellow)(Black)]/DeviceCMYK{0 0 4 2 roll}bind]
+	[/Separation(Magenta)/DeviceCMYK{0 exch 0 0}bind]
+	[/DeviceN[(Magenta)(Black)]/DeviceCMYK{0 3 1 roll 0 exch}bind]
+	[/DeviceN[(Magenta)(Yellow)]/DeviceCMYK{0 3 1 roll 0}bind]
+	[/DeviceN[(Magenta)(Yellow)(Black)]/DeviceCMYK{0 4 1 roll}bind]
+	[/Separation(Cyan)/DeviceCMYK{0 0 0}]
+	[/DeviceN[(Cyan)(Black)]/DeviceCMYK{0 0 3 -1 roll}bind]
+	[/DeviceN[(Cyan)(Yellow)]/DeviceCMYK{0 exch 0}bind]
+	[/DeviceN[(Cyan)(Yellow)(Black)]/DeviceCMYK{0 3 1 roll}bind]
+	[/DeviceN[(Cyan)(Magenta)]/DeviceCMYK{0 0}]
+	[/DeviceN[(Cyan)(Magenta)(Black)]/DeviceCMYK{0 exch}bind]
+	[/DeviceN[(Cyan)(Magenta)(Yellow)]/DeviceCMYK{0}]
+	[/DeviceCMYK]
+]def
+/ds{
+	Adobe_AGM_Core begin
+	/currentdistillerparams where
+		{
+		pop currentdistillerparams/CoreDistVersion get 5000 lt
+			{<</DetectBlends false>>setdistillerparams}if
+		}if	
+	/AGMCORE_ps_version xdf
+	/AGMCORE_ps_level xdf
+	errordict/AGM_handleerror known not{
+		errordict/AGM_handleerror errordict/handleerror get put
+		errordict/handleerror{
+			Adobe_AGM_Core begin
+			$error/newerror get AGMCORE_cur_err null ne and{
+				$error/newerror false put
+				AGMCORE_cur_err compose_error_msg
+			}if
+			$error/newerror true put
+			end
+			errordict/AGM_handleerror get exec
+			}bind put
+		}if
+	/AGMCORE_environ_ok 
+		ps_level AGMCORE_ps_level ge
+		ps_version AGMCORE_ps_version ge and 
+		AGMCORE_ps_level -1 eq or
+	def
+	AGMCORE_environ_ok not
+		{/AGMCORE_cur_err/AGMCORE_bad_environ def}if
+	/AGMCORE_&setgray systemdict/setgray get def
+	level2{
+		/AGMCORE_&setcolor systemdict/setcolor get def
+		/AGMCORE_&setcolorspace systemdict/setcolorspace get def
+	}if
+	/AGMCORE_currentbg currentblackgeneration def
+	/AGMCORE_currentucr currentundercolorremoval def
+	/AGMCORE_Default_flatness currentflat def
+	/AGMCORE_distilling
+		/product where{
+			pop systemdict/setdistillerparams known product(Adobe PostScript Parser)ne and
+		}{
+			false
+		}ifelse
+	def
+	/AGMCORE_GSTATE AGMCORE_key_known not{
+		/AGMCORE_GSTATE 21 dict def
+		/AGMCORE_tmpmatrix matrix def
+		/AGMCORE_gstack 32 array def
+		/AGMCORE_gstackptr 0 def
+		/AGMCORE_gstacksaveptr 0 def
+		/AGMCORE_gstackframekeys 14 def
+		/AGMCORE_&gsave/gsave ldf
+		/AGMCORE_&grestore/grestore ldf
+		/AGMCORE_&grestoreall/grestoreall ldf
+		/AGMCORE_&save/save ldf
+		/AGMCORE_&setoverprint/setoverprint ldf
+		/AGMCORE_gdictcopy{
+			begin
+			{def}forall
+			end
+		}def
+		/AGMCORE_gput{
+			AGMCORE_gstack AGMCORE_gstackptr get
+			3 1 roll
+			put
+		}def
+		/AGMCORE_gget{
+			AGMCORE_gstack AGMCORE_gstackptr get
+			exch
+			get
+		}def
+		/gsave{
+			AGMCORE_&gsave
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gstackptr 1 add
+			dup 32 ge{limitcheck}if
+			/AGMCORE_gstackptr exch store
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gdictcopy
+		}def
+		/grestore{
+			AGMCORE_&grestore
+			AGMCORE_gstackptr 1 sub
+			dup AGMCORE_gstacksaveptr lt{1 add}if
+			dup AGMCORE_gstack exch get dup/AGMCORE_currentoverprint known
+				{/AGMCORE_currentoverprint get setoverprint}{pop}ifelse
+			/AGMCORE_gstackptr exch store
+		}def
+		/grestoreall{
+			AGMCORE_&grestoreall
+			/AGMCORE_gstackptr AGMCORE_gstacksaveptr store 
+		}def
+		/save{
+			AGMCORE_&save
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gstackptr 1 add
+			dup 32 ge{limitcheck}if
+			/AGMCORE_gstackptr exch store
+			/AGMCORE_gstacksaveptr AGMCORE_gstackptr store
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gdictcopy
+		}def
+		/setoverprint{
+			dup/AGMCORE_currentoverprint exch AGMCORE_gput AGMCORE_&setoverprint
+		}def	
+		0 1 AGMCORE_gstack length 1 sub{
+				AGMCORE_gstack exch AGMCORE_gstackframekeys dict put
+		}for
+	}if
+	level3/AGMCORE_&sysshfill AGMCORE_key_known not and
+	{
+		/AGMCORE_&sysshfill systemdict/shfill get def
+		/AGMCORE_&sysmakepattern systemdict/makepattern get def
+		/AGMCORE_&usrmakepattern/makepattern load def
+	}if
+	/currentcmykcolor[0 0 0 0]AGMCORE_gput
+	/currentstrokeadjust false AGMCORE_gput
+	/currentcolorspace[/DeviceGray]AGMCORE_gput
+	/sep_tint 0 AGMCORE_gput
+	/devicen_tints[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]AGMCORE_gput
+	/sep_colorspace_dict null AGMCORE_gput
+	/devicen_colorspace_dict null AGMCORE_gput
+	/indexed_colorspace_dict null AGMCORE_gput
+	/currentcolor_intent()AGMCORE_gput
+	/customcolor_tint 1 AGMCORE_gput
+	/absolute_colorimetric_crd null AGMCORE_gput
+	/relative_colorimetric_crd null AGMCORE_gput
+	/saturation_crd null AGMCORE_gput
+	/perceptual_crd null AGMCORE_gput
+	currentcolortransfer cvlit/AGMCore_gray_xfer xdf cvlit/AGMCore_b_xfer xdf
+		 cvlit/AGMCore_g_xfer xdf cvlit/AGMCore_r_xfer xdf
+	<<
+	/MaxPatternItem currentsystemparams/MaxPatternCache get
+	>>
+	setuserparams
+	end
+}def
+/ps
+{
+	/setcmykcolor where{
+		pop
+		Adobe_AGM_Core/AGMCORE_&setcmykcolor/setcmykcolor load put
+	}if
+	Adobe_AGM_Core begin
+	/setcmykcolor
+	{
+		4 copy AGMCORE_cmykbuf astore/currentcmykcolor exch AGMCORE_gput
+		1 sub 4 1 roll
+		3{
+			3 index add neg dup 0 lt{
+				pop 0
+			}if
+			3 1 roll
+		}repeat
+		setrgbcolor pop
+	}ndf
+	/currentcmykcolor
+	{
+		/currentcmykcolor AGMCORE_gget aload pop
+	}ndf
+	/setoverprint
+	{pop}ndf
+	/currentoverprint
+	{false}ndf
+	/AGMCORE_cyan_plate 1 0 0 0 test_cmyk_color_plate def
+	/AGMCORE_magenta_plate 0 1 0 0 test_cmyk_color_plate def
+	/AGMCORE_yellow_plate 0 0 1 0 test_cmyk_color_plate def
+	/AGMCORE_black_plate 0 0 0 1 test_cmyk_color_plate def
+	/AGMCORE_plate_ndx 
+		AGMCORE_cyan_plate{
+			0
+		}{
+			AGMCORE_magenta_plate{
+				1
+			}{
+				AGMCORE_yellow_plate{
+					2
+				}{
+					AGMCORE_black_plate{
+						3
+					}{
+						4
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+		def
+	/AGMCORE_have_reported_unsupported_color_space false def
+	/AGMCORE_report_unsupported_color_space
+	{
+		AGMCORE_have_reported_unsupported_color_space false eq
+		{
+			(Warning: Job contains content that cannot be separated with on-host methods. This content appears on the black plate, and knocks out all other plates.)==
+			Adobe_AGM_Core/AGMCORE_have_reported_unsupported_color_space true ddf
+		}if
+	}def
+	/AGMCORE_composite_job
+		AGMCORE_cyan_plate AGMCORE_magenta_plate and AGMCORE_yellow_plate and AGMCORE_black_plate and def
+	/AGMCORE_in_rip_sep
+		/AGMCORE_in_rip_sep where{
+			pop AGMCORE_in_rip_sep
+		}{
+			AGMCORE_distilling 
+			{
+				false
+			}{
+				userdict/Adobe_AGM_OnHost_Seps known{
+					false
+				}{
+					level2{
+						currentpagedevice/Separations 2 copy known{
+							get
+						}{
+							pop pop false
+						}ifelse
+					}{
+						false
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+	def
+	/AGMCORE_producing_seps AGMCORE_composite_job not AGMCORE_in_rip_sep or def
+	/AGMCORE_host_sep AGMCORE_producing_seps AGMCORE_in_rip_sep not and def
+	/AGM_preserve_spots 
+		/AGM_preserve_spots where{
+			pop AGM_preserve_spots
+		}{
+			AGMCORE_distilling AGMCORE_producing_seps or
+		}ifelse
+	def
+	/AGM_is_distiller_preserving_spotimages
+	{
+		currentdistillerparams/PreserveOverprintSettings known
+		{
+			currentdistillerparams/PreserveOverprintSettings get
+				{
+					currentdistillerparams/ColorConversionStrategy known
+					{
+						currentdistillerparams/ColorConversionStrategy get
+						/sRGB ne
+					}{
+						true
+					}ifelse
+				}{
+					false
+				}ifelse
+		}{
+			false
+		}ifelse
+	}def
+	/convert_spot_to_process where{pop}{
+		/convert_spot_to_process
+		{
+			//Adobe_AGM_Core begin
+			dup map_alias{
+				/Name get exch pop
+			}if
+			dup dup(None)eq exch(All)eq or
+				{
+				pop false
+				}{
+				AGMCORE_host_sep
+				{
+					gsave
+					1 0 0 0 setcmykcolor currentgray 1 exch sub
+					0 1 0 0 setcmykcolor currentgray 1 exch sub
+					0 0 1 0 setcmykcolor currentgray 1 exch sub
+					0 0 0 1 setcmykcolor currentgray 1 exch sub
+					add add add 0 eq
+					{
+						pop false
+					}{
+						false setoverprint
+						current_spot_alias false set_spot_alias
+						1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+						set_spot_alias
+						currentgray 1 ne
+					}ifelse
+					grestore
+				}{
+					AGMCORE_distilling
+					{
+						pop AGM_is_distiller_preserving_spotimages not
+					}{
+						//Adobe_AGM_Core/AGMCORE_name xddf
+						false
+						//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 0 eq
+						AGMUTIL_cpd/OverrideSeparations known and
+						{
+							AGMUTIL_cpd/OverrideSeparations get
+							{
+								/HqnSpots/ProcSet resourcestatus
+								{
+									pop pop pop true
+								}if
+							}if
+						}if					
+						{
+							AGMCORE_name/HqnSpots/ProcSet findresource/TestSpot gx not
+						}{
+							gsave
+							[/Separation AGMCORE_name/DeviceGray{}]AGMCORE_&setcolorspace
+							false
+							AGMUTIL_cpd/SeparationColorNames 2 copy known
+							{
+								get
+								{AGMCORE_name eq or}forall
+								not
+							}{
+								pop pop pop true
+							}ifelse
+							grestore
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+			end
+		}def
+	}ifelse
+	/convert_to_process where{pop}{
+		/convert_to_process
+		{
+			dup length 0 eq
+				{
+				pop false
+				}{
+				AGMCORE_host_sep
+				{
+				dup true exch
+					{
+					dup(Cyan)eq exch
+					dup(Magenta)eq 3 -1 roll or exch
+					dup(Yellow)eq 3 -1 roll or exch
+					dup(Black)eq 3 -1 roll or
+						{pop}
+						{convert_spot_to_process and}ifelse
+					}
+				forall
+					{
+					true exch
+						{
+						dup(Cyan)eq exch
+						dup(Magenta)eq 3 -1 roll or exch
+						dup(Yellow)eq 3 -1 roll or exch
+						(Black)eq or and
+						}forall
+						not
+					}{pop false}ifelse
+				}{
+				false exch
+					{
+					/PhotoshopDuotoneList where{pop false}{true}ifelse
+						{
+						dup(Cyan)eq exch
+						dup(Magenta)eq 3 -1 roll or exch
+						dup(Yellow)eq 3 -1 roll or exch
+						dup(Black)eq 3 -1 roll or
+						{pop}
+						{convert_spot_to_process or}ifelse
+						}
+						{
+						convert_spot_to_process or
+						}
+					ifelse
+					}
+				forall
+				}ifelse
+			}ifelse
+		}def
+	}ifelse	
+	/AGMCORE_avoid_L2_sep_space 
+		version cvr 2012 lt 
+		level2 and 
+		AGMCORE_producing_seps not and
+	def
+	/AGMCORE_is_cmyk_sep
+		AGMCORE_cyan_plate AGMCORE_magenta_plate or AGMCORE_yellow_plate or AGMCORE_black_plate or
+	def
+	/AGM_avoid_0_cmyk where{
+		pop AGM_avoid_0_cmyk
+	}{
+		AGM_preserve_spots 
+		userdict/Adobe_AGM_OnHost_Seps known 
+		userdict/Adobe_AGM_InRip_Seps known or
+		not and
+	}ifelse
+	{
+		/setcmykcolor[
+			{
+				4 copy add add add 0 eq currentoverprint and{
+					pop 0.0005
+				}if
+			}/exec cvx
+			/AGMCORE_&setcmykcolor load dup type/operatortype ne{
+				/exec cvx
+			}if
+		]cvx def
+	}if
+	/AGMCORE_IsSeparationAProcessColor
+		{
+		dup(Cyan)eq exch dup(Magenta)eq exch dup(Yellow)eq exch(Black)eq or or or
+		}def
+	AGMCORE_host_sep{
+		/setcolortransfer
+		{
+			AGMCORE_cyan_plate{
+				pop pop pop
+			}{
+			 	AGMCORE_magenta_plate{
+			 		4 3 roll pop pop pop
+			 	}{
+			 		AGMCORE_yellow_plate{
+			 			4 2 roll pop pop pop
+			 		}{
+			 			4 1 roll pop pop pop
+			 		}ifelse
+			 	}ifelse
+			}ifelse
+			settransfer 
+		}	
+		def
+		/AGMCORE_get_ink_data
+			AGMCORE_cyan_plate{
+				{pop pop pop}
+			}{
+			 	AGMCORE_magenta_plate{
+			 		{4 3 roll pop pop pop}
+			 	}{
+			 		AGMCORE_yellow_plate{
+			 			{4 2 roll pop pop pop}
+			 		}{
+			 			{4 1 roll pop pop pop}
+			 		}ifelse
+			 	}ifelse
+			}ifelse
+		def
+		/AGMCORE_RemoveProcessColorNames
+			{
+			1 dict begin
+			/filtername
+				{
+				dup/Cyan eq 1 index(Cyan)eq or
+					{pop(_cyan_)}if
+				dup/Magenta eq 1 index(Magenta)eq or
+					{pop(_magenta_)}if
+				dup/Yellow eq 1 index(Yellow)eq or
+					{pop(_yellow_)}if
+				dup/Black eq 1 index(Black)eq or
+					{pop(_black_)}if
+				}def
+			dup type/arraytype eq
+				{[exch{filtername}forall]}
+				{filtername}ifelse
+			end
+			}def
+		level3{
+			/AGMCORE_IsCurrentColor
+				{
+				dup AGMCORE_IsSeparationAProcessColor
+					{
+					AGMCORE_plate_ndx 0 eq
+						{dup(Cyan)eq exch/Cyan eq or}if
+					AGMCORE_plate_ndx 1 eq
+						{dup(Magenta)eq exch/Magenta eq or}if
+					AGMCORE_plate_ndx 2 eq
+						{dup(Yellow)eq exch/Yellow eq or}if
+					AGMCORE_plate_ndx 3 eq
+						{dup(Black)eq exch/Black eq or}if
+					AGMCORE_plate_ndx 4 eq
+						{pop false}if
+					}{
+					gsave
+					false setoverprint
+					current_spot_alias false set_spot_alias
+					1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+					set_spot_alias
+					currentgray 1 ne
+					grestore
+					}ifelse
+				}def
+			/AGMCORE_filter_functiondatasource
+				{	
+				5 dict begin
+				/data_in xdf
+				data_in type/stringtype eq
+					{
+					/ncomp xdf
+					/comp xdf
+					/string_out data_in length ncomp idiv string def
+					0 ncomp data_in length 1 sub
+						{
+						string_out exch dup ncomp idiv exch data_in exch ncomp getinterval comp get 255 exch sub put
+						}for
+					string_out
+					}{
+					string/string_in xdf
+					/string_out 1 string def
+					/component xdf
+					[
+					data_in string_in/readstring cvx
+						[component/get cvx 255/exch cvx/sub cvx string_out/exch cvx 0/exch cvx/put cvx string_out]cvx
+						[/pop cvx()]cvx/ifelse cvx
+					]cvx/ReusableStreamDecode filter
+				}ifelse
+				end
+				}def
+			/AGMCORE_separateShadingFunction
+				{
+				2 dict begin
+				/paint? xdf
+				/channel xdf
+				dup type/dicttype eq
+					{
+					begin
+					FunctionType 0 eq
+						{
+						/DataSource channel Range length 2 idiv DataSource AGMCORE_filter_functiondatasource def
+						currentdict/Decode known
+							{/Decode Decode channel 2 mul 2 getinterval def}if
+						paint? not
+							{/Decode[1 1]def}if
+						}if
+					FunctionType 2 eq
+						{
+						paint?
+							{
+							/C0[C0 channel get 1 exch sub]def
+							/C1[C1 channel get 1 exch sub]def
+							}{
+							/C0[1]def
+							/C1[1]def
+							}ifelse			
+						}if
+					FunctionType 3 eq
+						{
+						/Functions[Functions{channel paint? AGMCORE_separateShadingFunction}forall]def			
+						}if
+					currentdict/Range known
+						{/Range[0 1]def}if
+					currentdict
+					end}{
+					channel get 0 paint? AGMCORE_separateShadingFunction
+					}ifelse
+				end
+				}def
+			/AGMCORE_separateShading
+				{
+				3 -1 roll begin
+				currentdict/Function known
+					{
+					currentdict/Background known
+						{[1 index{Background 3 index get 1 exch sub}{1}ifelse]/Background xdf}if
+					Function 3 1 roll AGMCORE_separateShadingFunction/Function xdf
+					/ColorSpace[/DeviceGray]def
+					}{
+					ColorSpace dup type/arraytype eq{0 get}if/DeviceCMYK eq
+						{
+						/ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+						}{
+						ColorSpace dup 1 get AGMCORE_RemoveProcessColorNames 1 exch put
+						}ifelse
+					ColorSpace 0 get/Separation eq
+						{
+							{
+								[1/exch cvx/sub cvx]cvx
+							}{
+								[/pop cvx 1]cvx
+							}ifelse
+							ColorSpace 3 3 -1 roll put
+							pop
+						}{
+							{
+								[exch ColorSpace 1 get length 1 sub exch sub/index cvx 1/exch cvx/sub cvx ColorSpace 1 get length 1 add 1/roll cvx ColorSpace 1 get length{/pop cvx}repeat]cvx
+							}{
+								pop[ColorSpace 1 get length{/pop cvx}repeat cvx 1]cvx
+							}ifelse
+							ColorSpace 3 3 -1 roll bind put
+						}ifelse
+					ColorSpace 2/DeviceGray put																		
+					}ifelse
+				end
+				}def
+			/AGMCORE_separateShadingDict
+				{
+				dup/ColorSpace get
+				dup type/arraytype ne
+					{[exch]}if
+				dup 0 get/DeviceCMYK eq
+					{
+					exch begin 
+					currentdict
+					AGMCORE_cyan_plate
+						{0 true}if
+					AGMCORE_magenta_plate
+						{1 true}if
+					AGMCORE_yellow_plate
+						{2 true}if
+					AGMCORE_black_plate
+						{3 true}if
+					AGMCORE_plate_ndx 4 eq
+						{0 false}if		
+					dup not currentoverprint and
+						{/AGMCORE_ignoreshade true def}if
+					AGMCORE_separateShading
+					currentdict
+					end exch
+					}if
+				dup 0 get/Separation eq
+					{
+					exch begin
+					ColorSpace 1 get dup/None ne exch/All ne and
+						{
+						ColorSpace 1 get AGMCORE_IsCurrentColor AGMCORE_plate_ndx 4 lt and ColorSpace 1 get AGMCORE_IsSeparationAProcessColor not and
+							{
+							ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq 
+								{
+								/ColorSpace
+									[
+									/Separation
+									ColorSpace 1 get
+									/DeviceGray
+										[
+										ColorSpace 3 get/exec cvx
+										4 AGMCORE_plate_ndx sub -1/roll cvx
+										4 1/roll cvx
+										3[/pop cvx]cvx/repeat cvx
+										1/exch cvx/sub cvx
+										]cvx									
+									]def
+								}{
+								AGMCORE_report_unsupported_color_space
+								AGMCORE_black_plate not
+									{
+									currentdict 0 false AGMCORE_separateShading
+									}if
+								}ifelse
+							}{
+							currentdict ColorSpace 1 get AGMCORE_IsCurrentColor
+							0 exch 
+							dup not currentoverprint and
+								{/AGMCORE_ignoreshade true def}if
+							AGMCORE_separateShading
+							}ifelse	
+						}if			
+					currentdict
+					end exch
+					}if
+				dup 0 get/DeviceN eq
+					{
+					exch begin
+					ColorSpace 1 get convert_to_process
+						{
+						ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq 
+							{
+							/ColorSpace
+								[
+								/DeviceN
+								ColorSpace 1 get
+								/DeviceGray
+									[
+									ColorSpace 3 get/exec cvx
+									4 AGMCORE_plate_ndx sub -1/roll cvx
+									4 1/roll cvx
+									3[/pop cvx]cvx/repeat cvx
+									1/exch cvx/sub cvx
+									]cvx									
+								]def
+							}{
+							AGMCORE_report_unsupported_color_space
+							AGMCORE_black_plate not
+								{
+								currentdict 0 false AGMCORE_separateShading
+								/ColorSpace[/DeviceGray]def
+								}if
+							}ifelse
+						}{
+						currentdict
+						false -1 ColorSpace 1 get
+							{
+							AGMCORE_IsCurrentColor
+								{
+								1 add
+								exch pop true exch exit
+								}if
+							1 add
+							}forall
+						exch 
+						dup not currentoverprint and
+							{/AGMCORE_ignoreshade true def}if
+						AGMCORE_separateShading
+						}ifelse
+					currentdict
+					end exch
+					}if
+				dup 0 get dup/DeviceCMYK eq exch dup/Separation eq exch/DeviceN eq or or not
+					{
+					exch begin
+					ColorSpace dup type/arraytype eq
+						{0 get}if
+					/DeviceGray ne
+						{
+						AGMCORE_report_unsupported_color_space
+						AGMCORE_black_plate not
+							{
+							ColorSpace 0 get/CIEBasedA eq
+								{
+								/ColorSpace[/Separation/_ciebaseda_/DeviceGray{}]def
+								}if
+							ColorSpace 0 get dup/CIEBasedABC eq exch dup/CIEBasedDEF eq exch/DeviceRGB eq or or
+								{
+								/ColorSpace[/DeviceN[/_red_/_green_/_blue_]/DeviceRGB{}]def
+								}if
+							ColorSpace 0 get/CIEBasedDEFG eq
+								{
+								/ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+								}if
+							currentdict 0 false AGMCORE_separateShading
+							}if
+						}if
+					currentdict
+					end exch
+					}if
+				pop
+				dup/AGMCORE_ignoreshade known
+					{
+					begin
+					/ColorSpace[/Separation(None)/DeviceGray{}]def
+					currentdict end
+					}if
+				}def
+			/shfill
+				{
+				AGMCORE_separateShadingDict 
+				dup/AGMCORE_ignoreshade known
+					{pop}
+					{AGMCORE_&sysshfill}ifelse
+				}def
+			/makepattern
+				{
+				exch
+				dup/PatternType get 2 eq
+					{
+					clonedict
+					begin
+					/Shading Shading AGMCORE_separateShadingDict def
+					Shading/AGMCORE_ignoreshade known
+					currentdict end exch
+					{pop<</PatternType 1/PaintProc{pop}/BBox[0 0 1 1]/XStep 1/YStep 1/PaintType 1/TilingType 3>>}if
+					exch AGMCORE_&sysmakepattern
+					}{
+					exch AGMCORE_&usrmakepattern
+					}ifelse
+				}def
+		}if
+	}if
+	AGMCORE_in_rip_sep{
+		/setcustomcolor
+		{
+			exch aload pop
+			dup 7 1 roll inRip_spot_has_ink not	{
+				4{4 index mul 4 1 roll}
+				repeat
+				/DeviceCMYK setcolorspace
+				6 -2 roll pop pop
+			}{
+				//Adobe_AGM_Core begin
+					/AGMCORE_k xdf/AGMCORE_y xdf/AGMCORE_m xdf/AGMCORE_c xdf
+				end
+				[/Separation 4 -1 roll/DeviceCMYK
+				{dup AGMCORE_c mul exch dup AGMCORE_m mul exch dup AGMCORE_y mul exch AGMCORE_k mul}
+				]
+				setcolorspace
+			}ifelse
+			setcolor
+		}ndf
+		/setseparationgray
+		{
+			[/Separation(All)/DeviceGray{}]setcolorspace_opt
+			1 exch sub setcolor
+		}ndf
+	}{
+		/setseparationgray
+		{
+			AGMCORE_&setgray
+		}ndf
+	}ifelse
+	/findcmykcustomcolor
+	{
+		5 makereadonlyarray
+	}ndf
+	/setcustomcolor
+	{
+		exch aload pop pop
+		4{4 index mul 4 1 roll}repeat
+		setcmykcolor pop
+	}ndf
+	/has_color
+		/colorimage where{
+			AGMCORE_producing_seps{
+				pop true
+			}{
+				systemdict eq
+			}ifelse
+		}{
+			false
+		}ifelse
+	def
+	/map_index
+	{
+		1 index mul exch getinterval{255 div}forall
+	}bdf
+	/map_indexed_devn
+	{
+		Lookup Names length 3 -1 roll cvi map_index
+	}bdf
+	/n_color_components
+	{
+		base_colorspace_type
+		dup/DeviceGray eq{
+			pop 1
+		}{
+			/DeviceCMYK eq{
+				4
+			}{
+				3
+			}ifelse
+		}ifelse
+	}bdf
+	level2{
+		/mo/moveto ldf
+		/li/lineto ldf
+		/cv/curveto ldf
+		/knockout_unitsq
+		{
+			1 setgray
+			0 0 1 1 rectfill
+		}def
+		level2/setcolorspace AGMCORE_key_known not and{
+			/AGMCORE_&&&setcolorspace/setcolorspace ldf
+			/AGMCORE_ReplaceMappedColor
+			{
+				dup type dup/arraytype eq exch/packedarraytype eq or
+				{
+					/AGMCORE_SpotAliasAry2 where{
+						begin
+						dup 0 get dup/Separation eq
+						{
+							pop
+							dup length array copy
+							dup dup 1 get
+							current_spot_alias
+							{
+								dup map_alias
+								{
+									false set_spot_alias
+									dup 1 exch setsepcolorspace
+									true set_spot_alias
+									begin
+									/sep_colorspace_dict currentdict AGMCORE_gput
+									pop pop	pop
+									[
+										/Separation Name 
+										CSA map_csa
+										MappedCSA 
+										/sep_colorspace_proc load
+									]
+									dup Name
+									end
+								}if
+							}if
+							map_reserved_ink_name 1 xpt
+						}{
+							/DeviceN eq 
+							{
+								dup length array copy
+								dup dup 1 get[
+									exch{
+										current_spot_alias{
+											dup map_alias{
+												/Name get exch pop
+											}if
+										}if
+										map_reserved_ink_name
+									}forall 
+								]1 xpt
+							}if
+						}ifelse
+						end
+					}if
+				}if
+			}def
+			/setcolorspace
+			{
+				dup type dup/arraytype eq exch/packedarraytype eq or
+				{
+					dup 0 get/Indexed eq
+					{
+						AGMCORE_distilling
+						{
+							/PhotoshopDuotoneList where
+							{
+								pop false
+							}{
+								true
+							}ifelse
+						}{
+							true
+						}ifelse
+						{
+							aload pop 3 -1 roll
+							AGMCORE_ReplaceMappedColor
+							3 1 roll 4 array astore
+						}if
+					}{
+						AGMCORE_ReplaceMappedColor
+					}ifelse
+				}if
+				DeviceN_PS2_inRip_seps{AGMCORE_&&&setcolorspace}if
+			}def
+		}if	
+	}{
+		/adj
+		{
+			currentstrokeadjust{
+				transform
+				0.25 sub round 0.25 add exch
+				0.25 sub round 0.25 add exch
+				itransform
+			}if
+		}def
+		/mo{
+			adj moveto
+		}def
+		/li{
+			adj lineto
+		}def
+		/cv{
+			6 2 roll adj
+			6 2 roll adj
+			6 2 roll adj curveto
+		}def
+		/knockout_unitsq
+		{
+			1 setgray
+			8 8 1[8 0 0 8 0 0]{<ffffffffffffffff>}image
+		}def
+		/currentstrokeadjust{
+			/currentstrokeadjust AGMCORE_gget
+		}def
+		/setstrokeadjust{
+			/currentstrokeadjust exch AGMCORE_gput
+		}def
+		/setcolorspace
+		{
+			/currentcolorspace exch AGMCORE_gput
+		}def
+		/currentcolorspace
+		{
+			/currentcolorspace AGMCORE_gget
+		}def
+		/setcolor_devicecolor
+		{
+			base_colorspace_type
+			dup/DeviceGray eq{
+				pop setgray
+			}{
+				/DeviceCMYK eq{
+					setcmykcolor
+				}{
+					setrgbcolor
+				}ifelse
+			}ifelse
+		}def
+		/setcolor
+		{
+			currentcolorspace 0 get
+			dup/DeviceGray ne{
+				dup/DeviceCMYK ne{
+					dup/DeviceRGB ne{
+						dup/Separation eq{
+							pop
+							currentcolorspace 3 gx
+							currentcolorspace 2 get
+						}{
+							dup/Indexed eq{
+								pop
+								currentcolorspace 3 get dup type/stringtype eq{
+									currentcolorspace 1 get n_color_components
+									3 -1 roll map_index
+								}{
+									exec
+								}ifelse
+								currentcolorspace 1 get
+							}{
+								/AGMCORE_cur_err/AGMCORE_invalid_color_space def
+								AGMCORE_invalid_color_space
+							}ifelse
+						}ifelse
+					}if
+				}if
+			}if
+			setcolor_devicecolor
+		}def
+	}ifelse
+	/sop/setoverprint ldf
+	/lw/setlinewidth ldf
+	/lc/setlinecap ldf
+	/lj/setlinejoin ldf
+	/ml/setmiterlimit ldf
+	/dsh/setdash ldf
+	/sadj/setstrokeadjust ldf
+	/gry/setgray ldf
+	/rgb/setrgbcolor ldf
+	/cmyk[
+		/currentcolorspace[/DeviceCMYK]/AGMCORE_gput cvx
+		/setcmykcolor load dup type/operatortype ne{/exec cvx}if
+	]cvx bdf
+	level3 AGMCORE_host_sep not and{
+		/nzopmsc{
+			6 dict begin
+			/kk exch def
+			/yy exch def
+			/mm exch def
+			/cc exch def
+			/sum 0 def
+			cc 0 ne{/sum sum 2#1000 or def cc}if
+			mm 0 ne{/sum sum 2#0100 or def mm}if
+			yy 0 ne{/sum sum 2#0010 or def yy}if
+			kk 0 ne{/sum sum 2#0001 or def kk}if
+			AGMCORE_CMYKDeviceNColorspaces sum get setcolorspace
+			sum 0 eq{0}if
+			end
+			setcolor
+		}bdf
+	}{
+		/nzopmsc/cmyk ldf
+	}ifelse
+	/sep/setsepcolor ldf
+	/devn/setdevicencolor ldf
+	/idx/setindexedcolor ldf
+	/colr/setcolor ldf
+	/csacrd/set_csa_crd ldf
+	/sepcs/setsepcolorspace ldf
+	/devncs/setdevicencolorspace ldf
+	/idxcs/setindexedcolorspace ldf
+	/cp/closepath ldf
+	/clp/clp_npth ldf
+	/eclp/eoclp_npth ldf
+	/f/fill ldf
+	/ef/eofill ldf
+	/@/stroke ldf
+	/nclp/npth_clp ldf
+	/gset/graphic_setup ldf
+	/gcln/graphic_cleanup ldf
+	/ct/concat ldf
+	/cf/currentfile ldf
+	/fl/filter ldf
+	/rs/readstring ldf
+	/AGMCORE_def_ht currenthalftone def
+	/clonedict Adobe_AGM_Utils begin/clonedict load end def
+	/clonearray Adobe_AGM_Utils begin/clonearray load end def
+	currentdict{
+		dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+			bind
+		}if
+		def
+	}forall
+	/getrampcolor
+	{
+		/indx exch def
+		0 1 NumComp 1 sub
+		{
+			dup
+			Samples exch get
+			dup type/stringtype eq{indx get}if
+			exch
+			Scaling exch get aload pop
+			3 1 roll
+			mul add
+		}for
+		ColorSpaceFamily/Separation eq 
+		{sep}
+		{
+			ColorSpaceFamily/DeviceN eq
+			{devn}{setcolor}ifelse
+		}ifelse
+	}bdf
+	/sssetbackground{
+		aload pop 
+		ColorSpaceFamily/Separation eq 
+		{sep}
+		{
+			ColorSpaceFamily/DeviceN eq
+			{devn}{setcolor}ifelse
+		}ifelse	
+	}bdf
+	/RadialShade
+	{
+		40 dict begin
+		/ColorSpaceFamily xdf
+		/background xdf
+		/ext1 xdf
+		/ext0 xdf
+		/BBox xdf
+		/r2 xdf
+		/c2y xdf
+		/c2x xdf
+		/r1 xdf
+		/c1y xdf
+		/c1x xdf
+		/rampdict xdf
+		/setinkoverprint where{pop/setinkoverprint{pop}def}if
+		gsave
+		BBox length 0 gt
+		{
+			np
+			BBox 0 get BBox 1 get moveto
+			BBox 2 get BBox 0 get sub 0 rlineto
+			0 BBox 3 get BBox 1 get sub rlineto
+			BBox 2 get BBox 0 get sub neg 0 rlineto
+			closepath
+			clip
+			np
+		}if
+		c1x c2x eq
+		{
+			c1y c2y lt{/theta 90 def}{/theta 270 def}ifelse
+		}{
+			/slope c2y c1y sub c2x c1x sub div def
+			/theta slope 1 atan def
+			c2x c1x lt c2y c1y ge and{/theta theta 180 sub def}if
+			c2x c1x lt c2y c1y lt and{/theta theta 180 add def}if
+		}ifelse
+		gsave
+		clippath
+		c1x c1y translate
+		theta rotate
+		-90 rotate
+		{pathbbox}stopped
+		{0 0 0 0}if
+		/yMax xdf
+		/xMax xdf
+		/yMin xdf
+		/xMin xdf
+		grestore
+		xMax xMin eq yMax yMin eq or
+		{
+			grestore
+			end
+		}{
+			/max{2 copy gt{pop}{exch pop}ifelse}bdf
+			/min{2 copy lt{pop}{exch pop}ifelse}bdf
+			rampdict begin
+			40 dict begin
+			background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+			gsave
+			c1x c1y translate
+			theta rotate
+			-90 rotate
+			/c2y c1x c2x sub dup mul c1y c2y sub dup mul add sqrt def
+			/c1y 0 def
+			/c1x 0 def
+			/c2x 0 def
+			ext0
+			{
+				0 getrampcolor
+				c2y r2 add r1 sub 0.0001 lt
+				{
+					c1x c1y r1 360 0 arcn
+					pathbbox
+					/aymax exch def
+					/axmax exch def
+					/aymin exch def
+					/axmin exch def
+					/bxMin xMin axmin min def
+					/byMin yMin aymin min def
+					/bxMax xMax axmax max def
+					/byMax yMax aymax max def
+					bxMin byMin moveto
+					bxMax byMin lineto
+					bxMax byMax lineto
+					bxMin byMax lineto
+					bxMin byMin lineto
+					eofill
+				}{
+					c2y r1 add r2 le
+					{
+						c1x c1y r1 0 360 arc
+						fill
+					}
+					{
+						c2x c2y r2 0 360 arc fill
+						r1 r2 eq
+						{
+							/p1x r1 neg def
+							/p1y c1y def
+							/p2x r1 def
+							/p2y c1y def
+							p1x p1y moveto p2x p2y lineto p2x yMin lineto p1x yMin lineto
+							fill
+						}{
+							/AA r2 r1 sub c2y div def
+							AA -1 eq
+							{/theta 89.99 def}
+							{/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+							ifelse
+							/SS1 90 theta add dup sin exch cos div def
+							/p1x r1 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+							/p1y p1x SS1 div neg def
+							/SS2 90 theta sub dup sin exch cos div def
+							/p2x r1 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+							/p2y p2x SS2 div neg def
+							r1 r2 gt
+							{
+								/L1maxX p1x yMin p1y sub SS1 div add def
+								/L2maxX p2x yMin p2y sub SS2 div add def
+							}{
+								/L1maxX 0 def
+								/L2maxX 0 def
+							}ifelse
+							p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+							L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+							fill
+						}ifelse
+					}ifelse
+				}ifelse
+			}if
+		c1x c2x sub dup mul
+		c1y c2y sub dup mul
+		add 0.5 exp
+		0 dtransform
+		dup mul exch dup mul add 0.5 exp 72 div
+		0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+		72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+		1 index 1 index lt{exch}if pop
+		/hires xdf
+		hires mul
+		/numpix xdf
+		/numsteps NumSamples def
+		/rampIndxInc 1 def
+		/subsampling false def
+		numpix 0 ne
+		{
+			NumSamples numpix div 0.5 gt
+			{
+				/numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+				/rampIndxInc NumSamples 1 sub numsteps div def
+				/subsampling true def
+			}if
+		}if
+		/xInc c2x c1x sub numsteps div def
+		/yInc c2y c1y sub numsteps div def
+		/rInc r2 r1 sub numsteps div def
+		/cx c1x def
+		/cy c1y def
+		/radius r1 def
+		np
+		xInc 0 eq yInc 0 eq rInc 0 eq and and
+		{
+			0 getrampcolor
+			cx cy radius 0 360 arc
+			stroke
+			NumSamples 1 sub getrampcolor
+			cx cy radius 72 hires div add 0 360 arc
+			0 setlinewidth
+			stroke
+		}{
+			0
+			numsteps
+			{
+				dup
+				subsampling{round cvi}if
+				getrampcolor
+				cx cy radius 0 360 arc
+				/cx cx xInc add def
+				/cy cy yInc add def
+				/radius radius rInc add def
+				cx cy radius 360 0 arcn
+				eofill
+				rampIndxInc add
+			}repeat
+			pop
+		}ifelse
+		ext1
+		{
+			c2y r2 add r1 lt
+			{
+				c2x c2y r2 0 360 arc
+				fill
+			}{
+				c2y r1 add r2 sub 0.0001 le
+				{
+					c2x c2y r2 360 0 arcn
+					pathbbox
+					/aymax exch def
+					/axmax exch def
+					/aymin exch def
+					/axmin exch def
+					/bxMin xMin axmin min def
+					/byMin yMin aymin min def
+					/bxMax xMax axmax max def
+					/byMax yMax aymax max def
+					bxMin byMin moveto
+					bxMax byMin lineto
+					bxMax byMax lineto
+					bxMin byMax lineto
+					bxMin byMin lineto
+					eofill
+				}{
+					c2x c2y r2 0 360 arc fill
+					r1 r2 eq
+					{
+						/p1x r2 neg def
+						/p1y c2y def
+						/p2x r2 def
+						/p2y c2y def
+						p1x p1y moveto p2x p2y lineto p2x yMax lineto p1x yMax lineto
+						fill
+					}{
+						/AA r2 r1 sub c2y div def
+						AA -1 eq
+						{/theta 89.99 def}
+						{/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+						ifelse
+						/SS1 90 theta add dup sin exch cos div def
+						/p1x r2 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+						/p1y c2y p1x SS1 div sub def
+						/SS2 90 theta sub dup sin exch cos div def
+						/p2x r2 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+						/p2y c2y p2x SS2 div sub def
+						r1 r2 lt
+						{
+							/L1maxX p1x yMax p1y sub SS1 div add def
+							/L2maxX p2x yMax p2y sub SS2 div add def
+						}{
+							/L1maxX 0 def
+							/L2maxX 0 def
+						}ifelse
+						p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+						L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+						fill
+					}ifelse
+				}ifelse
+			}ifelse
+		}if
+		grestore
+		grestore
+		end
+		end
+		end
+		}ifelse
+	}bdf
+	/GenStrips
+	{
+		40 dict begin
+		/ColorSpaceFamily xdf
+		/background xdf
+		/ext1 xdf
+		/ext0 xdf
+		/BBox xdf
+		/y2 xdf
+		/x2 xdf
+		/y1 xdf
+		/x1 xdf
+		/rampdict xdf
+		/setinkoverprint where{pop/setinkoverprint{pop}def}if
+		gsave
+		BBox length 0 gt
+		{
+			np
+			BBox 0 get BBox 1 get moveto
+			BBox 2 get BBox 0 get sub 0 rlineto
+			0 BBox 3 get BBox 1 get sub rlineto
+			BBox 2 get BBox 0 get sub neg 0 rlineto
+			closepath
+			clip
+			np
+		}if
+		x1 x2 eq
+		{
+			y1 y2 lt{/theta 90 def}{/theta 270 def}ifelse
+		}{
+			/slope y2 y1 sub x2 x1 sub div def
+			/theta slope 1 atan def
+			x2 x1 lt y2 y1 ge and{/theta theta 180 sub def}if
+			x2 x1 lt y2 y1 lt and{/theta theta 180 add def}if
+		}
+		ifelse
+		gsave
+		clippath
+		x1 y1 translate
+		theta rotate
+		{pathbbox}stopped
+		{0 0 0 0}if
+		/yMax exch def
+		/xMax exch def
+		/yMin exch def
+		/xMin exch def
+		grestore
+		xMax xMin eq yMax yMin eq or
+		{
+			grestore
+			end
+		}{
+			rampdict begin
+			20 dict begin
+			background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+			gsave
+			x1 y1 translate
+			theta rotate
+			/xStart 0 def
+			/xEnd x2 x1 sub dup mul y2 y1 sub dup mul add 0.5 exp def
+			/ySpan yMax yMin sub def
+			/numsteps NumSamples def
+			/rampIndxInc 1 def
+			/subsampling false def
+			xStart 0 transform
+			xEnd 0 transform
+			3 -1 roll
+			sub dup mul
+			3 1 roll
+			sub dup mul
+			add 0.5 exp 72 div
+			0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+			72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+			1 index 1 index lt{exch}if pop
+			mul
+			/numpix xdf
+			numpix 0 ne
+			{
+				NumSamples numpix div 0.5 gt
+				{
+					/numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+					/rampIndxInc NumSamples 1 sub numsteps div def
+					/subsampling true def
+				}if
+			}if
+			ext0
+			{
+				0 getrampcolor
+				xMin xStart lt
+				{
+					xMin yMin xMin neg ySpan rectfill
+				}if
+			}if
+			/xInc xEnd xStart sub numsteps div def
+			/x xStart def
+			0
+			numsteps
+			{
+				dup
+				subsampling{round cvi}if
+				getrampcolor
+				x yMin xInc ySpan rectfill
+				/x x xInc add def
+				rampIndxInc add
+			}repeat
+			pop
+			ext1{
+				xMax xEnd gt
+				{
+					xEnd yMin xMax xEnd sub ySpan rectfill
+				}if
+			}if
+			grestore
+			grestore
+			end
+			end
+			end
+		}ifelse
+	}bdf
+}def
+/pt
+{
+	end
+}def
+/dt{
+}def
+/pgsv{
+	//Adobe_AGM_Core/AGMCORE_save save put
+}def
+/pgrs{
+	//Adobe_AGM_Core/AGMCORE_save get restore
+}def
+systemdict/findcolorrendering known{
+	/findcolorrendering systemdict/findcolorrendering get def
+}if
+systemdict/setcolorrendering known{
+	/setcolorrendering systemdict/setcolorrendering get def
+}if
+/test_cmyk_color_plate
+{
+	gsave
+	setcmykcolor currentgray 1 ne
+	grestore
+}def
+/inRip_spot_has_ink
+{
+	dup//Adobe_AGM_Core/AGMCORE_name xddf
+	convert_spot_to_process not
+}def
+/map255_to_range
+{
+	1 index sub
+	3 -1 roll 255 div mul add
+}def
+/set_csa_crd
+{
+	/sep_colorspace_dict null AGMCORE_gput
+	begin
+		CSA get_csa_by_name setcolorspace_opt
+		set_crd
+	end
+}
+def
+/map_csa
+{
+	currentdict/MappedCSA known{MappedCSA null ne}{false}ifelse
+	{pop}{get_csa_by_name/MappedCSA xdf}ifelse
+}def
+/setsepcolor
+{
+	/sep_colorspace_dict AGMCORE_gget begin
+		dup/sep_tint exch AGMCORE_gput
+		TintProc
+	end
+}def
+/setdevicencolor
+{
+	/devicen_colorspace_dict AGMCORE_gget begin
+		Names length copy
+		Names length 1 sub -1 0
+		{
+			/devicen_tints AGMCORE_gget 3 1 roll xpt
+		}for
+		TintProc
+	end
+}def
+/sep_colorspace_proc
+{
+	/AGMCORE_tmp exch store
+	/sep_colorspace_dict AGMCORE_gget begin
+	currentdict/Components known{
+		Components aload pop 
+		TintMethod/Lab eq{
+			2{AGMCORE_tmp mul NComponents 1 roll}repeat
+			LMax sub AGMCORE_tmp mul LMax add NComponents 1 roll
+		}{
+			TintMethod/Subtractive eq{
+				NComponents{
+					AGMCORE_tmp mul NComponents 1 roll
+				}repeat
+			}{
+				NComponents{
+					1 sub AGMCORE_tmp mul 1 add NComponents 1 roll
+				}repeat
+			}ifelse
+		}ifelse
+	}{
+		ColorLookup AGMCORE_tmp ColorLookup length 1 sub mul round cvi get
+		aload pop
+	}ifelse
+	end
+}def
+/sep_colorspace_gray_proc
+{
+	/AGMCORE_tmp exch store
+	/sep_colorspace_dict AGMCORE_gget begin
+	GrayLookup AGMCORE_tmp GrayLookup length 1 sub mul round cvi get
+	end
+}def
+/sep_proc_name
+{
+	dup 0 get 
+	dup/DeviceRGB eq exch/DeviceCMYK eq or level2 not and has_color not and{
+		pop[/DeviceGray]
+		/sep_colorspace_gray_proc
+	}{
+		/sep_colorspace_proc
+	}ifelse
+}def
+/setsepcolorspace
+{
+	current_spot_alias{
+		dup begin
+			Name map_alias{
+				exch pop
+			}if
+		end
+	}if
+	dup/sep_colorspace_dict exch AGMCORE_gput
+	begin
+	CSA map_csa
+	/AGMCORE_sep_special Name dup()eq exch(All)eq or store
+	AGMCORE_avoid_L2_sep_space{
+		[/Indexed MappedCSA sep_proc_name 255 exch 
+			{255 div}/exec cvx 3 -1 roll[4 1 roll load/exec cvx]cvx 
+		]setcolorspace_opt
+		/TintProc{
+			255 mul round cvi setcolor
+		}bdf
+	}{
+		MappedCSA 0 get/DeviceCMYK eq 
+		currentdict/Components known and 
+		AGMCORE_sep_special not and{
+			/TintProc[
+				Components aload pop Name findcmykcustomcolor 
+				/exch cvx/setcustomcolor cvx
+			]cvx bdf
+		}{
+ 			AGMCORE_host_sep Name(All)eq and{
+ 				/TintProc{
+					1 exch sub setseparationgray 
+				}bdf
+ 			}{
+				AGMCORE_in_rip_sep MappedCSA 0 get/DeviceCMYK eq and 
+				AGMCORE_host_sep or
+				Name()eq and{
+					/TintProc[
+						MappedCSA sep_proc_name exch 0 get/DeviceCMYK eq{
+							cvx/setcmykcolor cvx
+						}{
+							cvx/setgray cvx
+						}ifelse
+					]cvx bdf
+				}{
+					AGMCORE_producing_seps MappedCSA 0 get dup/DeviceCMYK eq exch/DeviceGray eq or and AGMCORE_sep_special not and{
+	 					/TintProc[
+							/dup cvx
+							MappedCSA sep_proc_name cvx exch
+							0 get/DeviceGray eq{
+								1/exch cvx/sub cvx 0 0 0 4 -1/roll cvx
+							}if
+							/Name cvx/findcmykcustomcolor cvx/exch cvx
+							AGMCORE_host_sep{
+								AGMCORE_is_cmyk_sep
+								/Name cvx 
+								/AGMCORE_IsSeparationAProcessColor load/exec cvx
+								/not cvx/and cvx 
+							}{
+								Name inRip_spot_has_ink not
+							}ifelse
+							[
+		 						/pop cvx 1
+							]cvx/if cvx
+							/setcustomcolor cvx
+						]cvx bdf
+ 					}{
+						/TintProc{setcolor}bdf
+						[/Separation Name MappedCSA sep_proc_name load]setcolorspace_opt
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+	}ifelse
+	set_crd
+	setsepcolor
+	end
+}def
+/additive_blend
+{
+ 	3 dict begin
+ 	/numarrays xdf
+ 	/numcolors xdf
+ 	0 1 numcolors 1 sub
+ 		{
+ 		/c1 xdf
+ 		1
+ 		0 1 numarrays 1 sub
+ 			{
+			1 exch add/index cvx
+ 			c1/get cvx/mul cvx
+ 			}for
+ 		numarrays 1 add 1/roll cvx 
+ 		}for
+ 	numarrays[/pop cvx]cvx/repeat cvx
+ 	end
+}def
+/subtractive_blend
+{
+	3 dict begin
+	/numarrays xdf
+	/numcolors xdf
+	0 1 numcolors 1 sub
+		{
+		/c1 xdf
+		1 1
+		0 1 numarrays 1 sub
+			{
+			1 3 3 -1 roll add/index cvx 
+			c1/get cvx/sub cvx/mul cvx
+			}for
+		/sub cvx
+		numarrays 1 add 1/roll cvx
+		}for
+	numarrays[/pop cvx]cvx/repeat cvx
+	end
+}def
+/exec_tint_transform
+{
+	/TintProc[
+		/TintTransform cvx/setcolor cvx
+	]cvx bdf
+	MappedCSA setcolorspace_opt
+}bdf
+/devn_makecustomcolor
+{
+	2 dict begin
+	/names_index xdf
+	/Names xdf
+	1 1 1 1 Names names_index get findcmykcustomcolor
+	/devicen_tints AGMCORE_gget names_index get setcustomcolor
+	Names length{pop}repeat
+	end
+}bdf
+/setdevicencolorspace
+{
+	dup/AliasedColorants known{false}{true}ifelse 
+	current_spot_alias and{
+		7 dict begin
+		/names_index 0 def
+		dup/names_len exch/Names get length def
+		/new_names names_len array def
+		/new_LookupTables names_len array def
+		/alias_cnt 0 def
+		dup/Names get
+		{
+			dup map_alias{
+				exch pop
+				dup/ColorLookup known{
+					dup begin
+					new_LookupTables names_index ColorLookup put
+					end
+				}{
+					dup/Components known{
+						dup begin
+						new_LookupTables names_index Components put
+						end
+					}{
+						dup begin
+						new_LookupTables names_index[null null null null]put
+						end
+					}ifelse
+				}ifelse
+				new_names names_index 3 -1 roll/Name get put
+				/alias_cnt alias_cnt 1 add def 
+			}{
+				/name xdf				
+				new_names names_index name put
+				dup/LookupTables known{
+					dup begin
+					new_LookupTables names_index LookupTables names_index get put
+					end
+				}{
+					dup begin
+					new_LookupTables names_index[null null null null]put
+					end
+				}ifelse
+			}ifelse
+			/names_index names_index 1 add def 
+		}forall
+		alias_cnt 0 gt{
+			/AliasedColorants true def
+			/lut_entry_len new_LookupTables 0 get dup length 256 ge{0 get length}{length}ifelse def
+			0 1 names_len 1 sub{
+				/names_index xdf
+				new_LookupTables names_index get dup length 256 ge{0 get length}{length}ifelse lut_entry_len ne{
+					/AliasedColorants false def
+					exit
+				}{
+					new_LookupTables names_index get 0 get null eq{
+						dup/Names get names_index get/name xdf
+						name(Cyan)eq name(Magenta)eq name(Yellow)eq name(Black)eq
+						or or or not{
+							/AliasedColorants false def
+							exit
+						}if
+					}if
+				}ifelse
+			}for
+			lut_entry_len 1 eq{
+				/AliasedColorants false def
+			}if
+			AliasedColorants{
+				dup begin
+				/Names new_names def
+				/LookupTables new_LookupTables def
+				/AliasedColorants true def
+				/NComponents lut_entry_len def
+				/TintMethod NComponents 4 eq{/Subtractive}{/Additive}ifelse def
+				/MappedCSA TintMethod/Additive eq{/DeviceRGB}{/DeviceCMYK}ifelse def
+				currentdict/TTTablesIdx known not{
+					/TTTablesIdx -1 def
+				}if
+				end
+			}if
+		}if
+		end
+	}if
+	dup/devicen_colorspace_dict exch AGMCORE_gput
+	begin
+	currentdict/AliasedColorants known{
+		AliasedColorants
+	}{
+		false
+	}ifelse
+	dup not{
+		CSA map_csa
+	}if
+	/TintTransform load type/nulltype eq or{
+		/TintTransform[
+			0 1 Names length 1 sub
+				{
+				/TTTablesIdx TTTablesIdx 1 add def
+				dup LookupTables exch get dup 0 get null eq
+					{
+					1 index
+					Names exch get
+					dup(Cyan)eq
+						{
+						pop exch
+						LookupTables length exch sub
+						/index cvx
+						0 0 0
+						}
+						{
+						dup(Magenta)eq
+							{
+							pop exch
+							LookupTables length exch sub
+							/index cvx
+							0/exch cvx 0 0
+							}{
+							(Yellow)eq
+								{
+								exch
+								LookupTables length exch sub
+								/index cvx
+								0 0 3 -1/roll cvx 0
+								}{
+								exch
+								LookupTables length exch sub
+								/index cvx
+								0 0 0 4 -1/roll cvx
+								}ifelse
+							}ifelse
+						}ifelse
+					5 -1/roll cvx/astore cvx
+					}{
+					dup length 1 sub
+					LookupTables length 4 -1 roll sub 1 add
+					/index cvx/mul cvx/round cvx/cvi cvx/get cvx
+					}ifelse
+					Names length TTTablesIdx add 1 add 1/roll cvx
+				}for
+			Names length[/pop cvx]cvx/repeat cvx
+			NComponents Names length
+ 			TintMethod/Subtractive eq
+ 				{
+ 				subtractive_blend
+ 				}{
+ 				additive_blend
+ 				}ifelse
+		]cvx bdf
+	}if
+	AGMCORE_host_sep{
+		Names convert_to_process{
+			exec_tint_transform
+		}
+		{	
+			currentdict/AliasedColorants known{
+				AliasedColorants not
+			}{
+				false
+			}ifelse
+			5 dict begin
+			/AvoidAliasedColorants xdf
+			/painted? false def
+			/names_index 0 def
+			/names_len Names length def
+			AvoidAliasedColorants{
+				/currentspotalias current_spot_alias def
+				false set_spot_alias
+			}if
+			Names{
+				AGMCORE_is_cmyk_sep{
+					dup(Cyan)eq AGMCORE_cyan_plate and exch
+					dup(Magenta)eq AGMCORE_magenta_plate and exch
+					dup(Yellow)eq AGMCORE_yellow_plate and exch
+					(Black)eq AGMCORE_black_plate and or or or{
+						/devicen_colorspace_dict AGMCORE_gget/TintProc[
+							Names names_index/devn_makecustomcolor cvx
+						]cvx ddf
+						/painted? true def
+					}if
+					painted?{exit}if
+				}{
+					0 0 0 0 5 -1 roll findcmykcustomcolor 1 setcustomcolor currentgray 0 eq{
+					/devicen_colorspace_dict AGMCORE_gget/TintProc[
+						Names names_index/devn_makecustomcolor cvx
+					]cvx ddf
+					/painted? true def
+					exit
+					}if
+				}ifelse
+				/names_index names_index 1 add def
+			}forall
+			AvoidAliasedColorants{
+				currentspotalias set_spot_alias
+			}if
+			painted?{
+				/devicen_colorspace_dict AGMCORE_gget/names_index names_index put
+			}{
+				/devicen_colorspace_dict AGMCORE_gget/TintProc[
+					names_len[/pop cvx]cvx/repeat cvx 1/setseparationgray cvx
+ 					0 0 0 0/setcmykcolor cvx
+				]cvx ddf
+			}ifelse
+			end
+		}ifelse
+	}
+	{
+		AGMCORE_in_rip_sep{
+			Names convert_to_process not
+		}{
+			level3
+		}ifelse
+		{
+			[/DeviceN Names MappedCSA/TintTransform load]setcolorspace_opt
+			/TintProc level3 not AGMCORE_in_rip_sep and{
+				[
+					Names/length cvx[/pop cvx]cvx/repeat cvx
+				]cvx bdf
+			}{
+				{setcolor}bdf
+			}ifelse
+		}{
+			exec_tint_transform
+		}ifelse
+	}ifelse
+	set_crd
+	/AliasedColorants false def
+	end
+}def
+/setindexedcolorspace
+{
+	dup/indexed_colorspace_dict exch AGMCORE_gput
+	begin
+		currentdict/CSDBase known{
+			CSDBase/CSD get_res begin
+			currentdict/Names known{
+				currentdict devncs
+			}{
+				1 currentdict sepcs
+			}ifelse
+			AGMCORE_host_sep{
+				4 dict begin
+				/compCnt/Names where{pop Names length}{1}ifelse def
+				/NewLookup HiVal 1 add string def
+				0 1 HiVal{
+					/tableIndex xdf
+					Lookup dup type/stringtype eq{
+						compCnt tableIndex map_index
+					}{
+						exec
+					}ifelse
+					/Names where{
+						pop setdevicencolor
+					}{
+						setsepcolor
+					}ifelse
+					currentgray
+					tableIndex exch
+					255 mul cvi 
+					NewLookup 3 1 roll put
+				}for
+				[/Indexed currentcolorspace HiVal NewLookup]setcolorspace_opt
+				end
+			}{
+				level3
+				{
+					currentdict/Names known{
+						[/Indexed[/DeviceN Names MappedCSA/TintTransform load]HiVal Lookup]setcolorspace_opt
+					}{
+						[/Indexed[/Separation Name MappedCSA sep_proc_name load]HiVal Lookup]setcolorspace_opt
+					}ifelse
+				}{
+				[/Indexed MappedCSA HiVal
+					[
+					currentdict/Names known{
+						Lookup dup type/stringtype eq
+							{/exch cvx CSDBase/CSD get_res/Names get length dup/mul cvx exch/getinterval cvx{255 div}/forall cvx}
+							{/exec cvx}ifelse
+							/TintTransform load/exec cvx
+					}{
+						Lookup dup type/stringtype eq
+							{/exch cvx/get cvx 255/div cvx}
+							{/exec cvx}ifelse
+							CSDBase/CSD get_res/MappedCSA get sep_proc_name exch pop/load cvx/exec cvx
+					}ifelse
+					]cvx
+				]setcolorspace_opt
+				}ifelse
+			}ifelse
+			end
+			set_crd
+		}
+		{
+			CSA map_csa
+			AGMCORE_host_sep level2 not and{
+				0 0 0 0 setcmykcolor
+			}{
+				[/Indexed MappedCSA 
+				level2 not has_color not and{
+					dup 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or{
+						pop[/DeviceGray]
+					}if
+					HiVal GrayLookup
+				}{
+					HiVal 
+					currentdict/RangeArray known{
+						{
+							/indexed_colorspace_dict AGMCORE_gget begin
+							Lookup exch 
+							dup HiVal gt{
+								pop HiVal
+							}if
+							NComponents mul NComponents getinterval{}forall
+							NComponents 1 sub -1 0{
+								RangeArray exch 2 mul 2 getinterval aload pop map255_to_range
+								NComponents 1 roll
+							}for
+							end
+						}bind
+					}{
+						Lookup
+					}ifelse
+				}ifelse
+				]setcolorspace_opt
+				set_crd
+			}ifelse
+		}ifelse
+	end
+}def
+/setindexedcolor
+{
+	AGMCORE_host_sep{
+		/indexed_colorspace_dict AGMCORE_gget
+		begin
+		currentdict/CSDBase known{
+			CSDBase/CSD get_res begin
+			currentdict/Names known{
+				map_indexed_devn
+				devn
+			}
+			{
+				Lookup 1 3 -1 roll map_index
+				sep
+			}ifelse
+			end
+		}{
+			Lookup MappedCSA/DeviceCMYK eq{4}{1}ifelse 3 -1 roll
+			map_index
+			MappedCSA/DeviceCMYK eq{setcmykcolor}{setgray}ifelse
+		}ifelse
+		end
+	}{
+		level3 not AGMCORE_in_rip_sep and/indexed_colorspace_dict AGMCORE_gget/CSDBase known and{
+			/indexed_colorspace_dict AGMCORE_gget/CSDBase get/CSD get_res begin
+			map_indexed_devn
+			devn
+			end
+		}
+		{
+			setcolor
+		}ifelse
+	}ifelse
+}def
+/ignoreimagedata
+{
+	currentoverprint not{
+		gsave
+		dup clonedict begin
+		1 setgray
+		/Decode[0 1]def
+		/DataSource<FF>def
+		/MultipleDataSources false def
+		/BitsPerComponent 8 def
+		currentdict end
+		systemdict/image gx
+		grestore
+		}if
+	consumeimagedata
+}def
+/add_res
+{
+	dup/CSD eq{
+		pop 
+		//Adobe_AGM_Core begin
+		/AGMCORE_CSD_cache load 3 1 roll put
+		end
+	}{
+		defineresource pop
+	}ifelse
+}def
+/del_res
+{
+	{
+		aload pop exch
+		dup/CSD eq{
+			pop 
+			{//Adobe_AGM_Core/AGMCORE_CSD_cache get exch undef}forall
+		}{
+			exch
+			{1 index undefineresource}forall
+			pop
+		}ifelse
+	}forall
+}def
+/get_res
+{
+	dup/CSD eq{
+		pop
+		dup type dup/nametype eq exch/stringtype eq or{
+			AGMCORE_CSD_cache exch get
+		}if
+	}{
+		findresource
+	}ifelse
+}def
+/get_csa_by_name
+{
+	dup type dup/nametype eq exch/stringtype eq or{
+		/CSA get_res
+	}if
+}def
+/paintproc_buf_init
+{
+	/count get 0 0 put
+}def
+/paintproc_buf_next
+{
+	dup/count get dup 0 get
+	dup 3 1 roll
+	1 add 0 xpt
+	get				
+}def
+/cachepaintproc_compress
+{
+	5 dict begin
+	currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+	/ppdict 20 dict def
+	/string_size 16000 def
+	/readbuffer string_size string def
+	currentglobal true setglobal 
+	ppdict 1 array dup 0 1 put/count xpt
+	setglobal
+	/LZWFilter 
+	{
+		exch
+		dup length 0 eq{
+			pop
+		}{
+			ppdict dup length 1 sub 3 -1 roll put
+		}ifelse
+		{string_size}{0}ifelse string
+	}/LZWEncode filter def
+	{		
+		ReadFilter readbuffer readstring
+		exch LZWFilter exch writestring
+		not{exit}if
+	}loop
+	LZWFilter closefile
+	ppdict				
+	end
+}def
+/cachepaintproc
+{
+	2 dict begin
+	currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+	/ppdict 20 dict def
+	currentglobal true setglobal 
+	ppdict 1 array dup 0 1 put/count xpt
+	setglobal
+	{
+		ReadFilter 16000 string readstring exch
+		ppdict dup length 1 sub 3 -1 roll put
+		not{exit}if
+	}loop
+	ppdict dup dup length 1 sub()put					
+	end	
+}def
+/make_pattern
+{
+	exch clonedict exch
+	dup matrix currentmatrix matrix concatmatrix 0 0 3 2 roll itransform
+	exch 3 index/XStep get 1 index exch 2 copy div cvi mul sub sub
+	exch 3 index/YStep get 1 index exch 2 copy div cvi mul sub sub
+	matrix translate exch matrix concatmatrix
+			 1 index begin
+		BBox 0 get XStep div cvi XStep mul/xshift exch neg def
+		BBox 1 get YStep div cvi YStep mul/yshift exch neg def
+		BBox 0 get xshift add
+		BBox 1 get yshift add
+		BBox 2 get xshift add
+		BBox 3 get yshift add
+		4 array astore
+		/BBox exch def
+		[xshift yshift/translate load null/exec load]dup
+		3/PaintProc load put cvx/PaintProc exch def
+		end
+	gsave 0 setgray
+	makepattern
+	grestore
+}def
+/set_pattern
+{
+	dup/PatternType get 1 eq{
+		dup/PaintType get 1 eq{
+			currentoverprint sop[/DeviceGray]setcolorspace 0 setgray
+		}if
+	}if
+	setpattern
+}def
+/setcolorspace_opt
+{
+	dup currentcolorspace eq{pop}{setcolorspace}ifelse
+}def
+/updatecolorrendering
+{
+	currentcolorrendering/RenderingIntent known{
+		currentcolorrendering/RenderingIntent get
+	}
+	{
+		Intent/AbsoluteColorimetric eq 
+		{
+			/absolute_colorimetric_crd AGMCORE_gget dup null eq
+		}
+		{
+			Intent/RelativeColorimetric eq
+			{
+				/relative_colorimetric_crd AGMCORE_gget dup null eq
+			}
+			{
+				Intent/Saturation eq
+				{
+					/saturation_crd AGMCORE_gget dup null eq
+				}
+				{
+					/perceptual_crd AGMCORE_gget dup null eq
+				}ifelse
+			}ifelse
+		}ifelse
+		{
+			pop null	
+		}
+		{
+			/RenderingIntent known{null}{Intent}ifelse
+		}ifelse
+	}ifelse
+	Intent ne{
+		Intent/ColorRendering{findresource}stopped
+		{
+			pop pop systemdict/findcolorrendering known
+			{
+ 				Intent findcolorrendering
+ 				{
+ 					/ColorRendering findresource true exch
+ 				}
+ 				{
+ 					/ColorRendering findresource
+					product(Xerox Phaser 5400)ne
+					exch
+ 				}ifelse
+				dup Intent/AbsoluteColorimetric eq 
+				{
+					/absolute_colorimetric_crd exch AGMCORE_gput
+				}
+				{
+					Intent/RelativeColorimetric eq
+					{
+						/relative_colorimetric_crd exch AGMCORE_gput
+					}
+					{
+						Intent/Saturation eq
+						{
+							/saturation_crd exch AGMCORE_gput
+						}
+						{
+							Intent/Perceptual eq
+							{
+								/perceptual_crd exch AGMCORE_gput
+							}
+							{
+								pop
+							}ifelse
+						}ifelse
+					}ifelse
+				}ifelse
+				1 index{exch}{pop}ifelse
+			}
+			{false}ifelse
+		}
+		{true}ifelse
+		{
+			dup begin
+			currentdict/TransformPQR known{
+				currentdict/TransformPQR get aload pop
+				3{{}eq 3 1 roll}repeat or or
+			}
+			{true}ifelse
+			currentdict/MatrixPQR known{
+				currentdict/MatrixPQR get aload pop
+				1.0 eq 9 1 roll 0.0 eq 9 1 roll 0.0 eq 9 1 roll
+				0.0 eq 9 1 roll 1.0 eq 9 1 roll 0.0 eq 9 1 roll
+				0.0 eq 9 1 roll 0.0 eq 9 1 roll 1.0 eq
+				and and and and and and and and
+			}
+			{true}ifelse
+			end
+			or
+			{
+				clonedict begin
+				/TransformPQR[
+					{4 -1 roll 3 get dup 3 1 roll sub 5 -1 roll 3 get 3 -1 roll sub div
+					3 -1 roll 3 get 3 -1 roll 3 get dup 4 1 roll sub mul add}bind
+					{4 -1 roll 4 get dup 3 1 roll sub 5 -1 roll 4 get 3 -1 roll sub div
+					3 -1 roll 4 get 3 -1 roll 4 get dup 4 1 roll sub mul add}bind
+					{4 -1 roll 5 get dup 3 1 roll sub 5 -1 roll 5 get 3 -1 roll sub div
+					3 -1 roll 5 get 3 -1 roll 5 get dup 4 1 roll sub mul add}bind
+				]def
+				/MatrixPQR[0.8951 -0.7502 0.0389 0.2664 1.7135 -0.0685 -0.1614 0.0367 1.0296]def
+				/RangePQR[-0.3227950745 2.3229645538 -1.5003771057 3.5003465881 -0.1369979095 2.136967392]def
+				currentdict end
+			}if
+			setcolorrendering_opt
+		}if		
+	}if
+}def
+/set_crd
+{
+	AGMCORE_host_sep not level2 and{
+		currentdict/ColorRendering known{
+			ColorRendering/ColorRendering{findresource}stopped not{setcolorrendering_opt}if
+		}{
+			currentdict/Intent known{
+				updatecolorrendering
+			}if
+		}ifelse
+		currentcolorspace dup type/arraytype eq
+			{0 get}if
+		/DeviceRGB eq
+			{
+			currentdict/UCR known
+				{/UCR}{/AGMCORE_currentucr}ifelse
+			load setundercolorremoval
+			currentdict/BG known 
+				{/BG}{/AGMCORE_currentbg}ifelse
+			load setblackgeneration
+			}if
+	}if
+}def
+/set_ucrbg
+{
+	dup null eq{pop/AGMCORE_currentbg load}{/Procedure get_res}ifelse setblackgeneration
+	dup null eq{pop/AGMCORE_currentucr load}{/Procedure get_res}ifelse setundercolorremoval
+}def
+/setcolorrendering_opt
+{
+	dup currentcolorrendering eq{
+		pop
+	}{
+		clonedict
+		begin
+			/Intent Intent def
+			currentdict
+		end
+		setcolorrendering
+	}ifelse
+}def
+/cpaint_gcomp
+{
+	convert_to_process//Adobe_AGM_Core/AGMCORE_ConvertToProcess xddf
+	//Adobe_AGM_Core/AGMCORE_ConvertToProcess get not
+	{
+		(%end_cpaint_gcomp)flushinput
+	}if
+}def
+/cpaint_gsep
+{
+	//Adobe_AGM_Core/AGMCORE_ConvertToProcess get
+	{	
+		(%end_cpaint_gsep)flushinput
+	}if
+}def
+/cpaint_gend
+{np}def
+/T1_path
+{
+	currentfile token pop currentfile token pop mo
+	{
+		currentfile token pop dup type/stringtype eq
+			{pop exit}if 
+		0 exch rlineto 
+		currentfile token pop dup type/stringtype eq
+			{pop exit}if 
+		0 rlineto
+	}loop
+}def
+/T1_gsave
+	level3
+	{/clipsave}
+	{/gsave}ifelse
+	load def
+/T1_grestore
+	level3
+	{/cliprestore}
+	{/grestore}ifelse 
+	load def
+/set_spot_alias_ary
+{
+	dup inherit_aliases
+	//Adobe_AGM_Core/AGMCORE_SpotAliasAry xddf
+}def
+/set_spot_normalization_ary
+{
+	dup inherit_aliases
+	dup length
+	/AGMCORE_SpotAliasAry where{pop AGMCORE_SpotAliasAry length add}if
+	array
+	//Adobe_AGM_Core/AGMCORE_SpotAliasAry2 xddf
+	/AGMCORE_SpotAliasAry where{
+		pop
+		AGMCORE_SpotAliasAry2 0 AGMCORE_SpotAliasAry putinterval
+		AGMCORE_SpotAliasAry length
+	}{0}ifelse
+	AGMCORE_SpotAliasAry2 3 1 roll exch putinterval
+	true set_spot_alias
+}def
+/inherit_aliases
+{
+	{dup/Name get map_alias{/CSD put}{pop}ifelse}forall
+}def
+/set_spot_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		/AGMCORE_current_spot_alias 3 -1 roll put
+	}{
+		pop
+	}ifelse
+}def
+/current_spot_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		/AGMCORE_current_spot_alias get
+	}{
+		false
+	}ifelse
+}def
+/map_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		begin
+			/AGMCORE_name xdf
+			false	
+			AGMCORE_SpotAliasAry2{
+				dup/Name get AGMCORE_name eq{
+					/CSD get/CSD get_res
+					exch pop true
+					exit
+				}{
+					pop
+				}ifelse
+			}forall
+		end
+	}{
+		pop false
+	}ifelse
+}bdf
+/spot_alias
+{
+	true set_spot_alias
+	/AGMCORE_&setcustomcolor AGMCORE_key_known not{
+		//Adobe_AGM_Core/AGMCORE_&setcustomcolor/setcustomcolor load put
+	}if
+	/customcolor_tint 1 AGMCORE_gput
+	//Adobe_AGM_Core begin
+	/setcustomcolor
+	{
+		//Adobe_AGM_Core begin
+		dup/customcolor_tint exch AGMCORE_gput
+		1 index aload pop pop 1 eq exch 1 eq and exch 1 eq and exch 1 eq and not
+		current_spot_alias and{1 index 4 get map_alias}{false}ifelse
+		{
+			false set_spot_alias
+			/sep_colorspace_dict AGMCORE_gget null ne
+			3 1 roll 2 index{
+				exch pop/sep_tint AGMCORE_gget exch
+			}if
+			mark 3 1 roll
+			setsepcolorspace
+			counttomark 0 ne{
+				setsepcolor
+			}if
+			pop
+			not{/sep_tint 1.0 AGMCORE_gput}if
+			pop
+			true set_spot_alias
+		}{
+			AGMCORE_&setcustomcolor
+		}ifelse
+		end
+	}bdf
+	end
+}def
+/begin_feature
+{
+	Adobe_AGM_Core/AGMCORE_feature_dictCount countdictstack put
+	count Adobe_AGM_Core/AGMCORE_feature_opCount 3 -1 roll put
+	{Adobe_AGM_Core/AGMCORE_feature_ctm matrix currentmatrix put}if
+}def
+/end_feature
+{
+	2 dict begin
+	/spd/setpagedevice load def
+	/setpagedevice{get_gstate spd set_gstate}def
+	stopped{$error/newerror false put}if
+	end
+	count Adobe_AGM_Core/AGMCORE_feature_opCount get sub dup 0 gt{{pop}repeat}{pop}ifelse
+	countdictstack Adobe_AGM_Core/AGMCORE_feature_dictCount get sub dup 0 gt{{end}repeat}{pop}ifelse
+	{Adobe_AGM_Core/AGMCORE_feature_ctm get setmatrix}if
+}def
+/set_negative
+{
+	//Adobe_AGM_Core begin
+	/AGMCORE_inverting exch def
+	level2{
+		currentpagedevice/NegativePrint known AGMCORE_distilling not and{
+			currentpagedevice/NegativePrint get//Adobe_AGM_Core/AGMCORE_inverting get ne{
+				true begin_feature true{
+						<</NegativePrint//Adobe_AGM_Core/AGMCORE_inverting get>>setpagedevice
+				}end_feature
+			}if
+			/AGMCORE_inverting false def
+		}if
+	}if
+	AGMCORE_inverting{
+		[{1 exch sub}/exec load dup currenttransfer exch]cvx bind settransfer
+ 		AGMCORE_distilling{
+ 			erasepage
+ 		}{
+ 			gsave np clippath 1/setseparationgray where{pop setseparationgray}{setgray}ifelse
+ 			/AGMIRS_&fill where{pop AGMIRS_&fill}{fill}ifelse grestore
+ 		}ifelse
+	}if
+	end
+}def
+/lw_save_restore_override{
+	/md where{
+		pop
+		md begin
+		initializepage
+		/initializepage{}def
+		/pmSVsetup{}def
+		/endp{}def
+		/pse{}def
+		/psb{}def
+		/orig_showpage where
+			{pop}
+			{/orig_showpage/showpage load def}
+		ifelse
+		/showpage{orig_showpage gR}def
+		end
+	}if
+}def
+/pscript_showpage_override{
+	/NTPSOct95 where
+	{
+		begin
+		showpage
+		save
+		/showpage/restore load def
+		/restore{exch pop}def
+		end
+	}if
+}def
+/driver_media_override
+{
+	/md where{
+		pop
+		md/initializepage known{
+			md/initializepage{}put
+		}if
+		md/rC known{
+			md/rC{4{pop}repeat}put
+		}if
+	}if
+	/mysetup where{
+		/mysetup[1 0 0 1 0 0]put
+	}if
+	Adobe_AGM_Core/AGMCORE_Default_CTM matrix currentmatrix put
+	level2
+		{Adobe_AGM_Core/AGMCORE_Default_PageSize currentpagedevice/PageSize get put}if
+}def
+/driver_check_media_override
+{
+ 	/PrepsDict where
+ 		{pop}
+		{
+		Adobe_AGM_Core/AGMCORE_Default_CTM get matrix currentmatrix ne
+		Adobe_AGM_Core/AGMCORE_Default_PageSize get type/arraytype eq
+			{
+			Adobe_AGM_Core/AGMCORE_Default_PageSize get 0 get currentpagedevice/PageSize get 0 get eq and
+			Adobe_AGM_Core/AGMCORE_Default_PageSize get 1 get currentpagedevice/PageSize get 1 get eq and
+			}if
+			{
+			Adobe_AGM_Core/AGMCORE_Default_CTM get setmatrix
+			}if
+		}ifelse
+}def
+AGMCORE_err_strings begin
+	/AGMCORE_bad_environ(Environment not satisfactory for this job. Ensure that the PPD is correct or that the PostScript level requested is supported by this printer. )def
+	/AGMCORE_color_space_onhost_seps(This job contains colors that will not separate with on-host methods. )def
+	/AGMCORE_invalid_color_space(This job contains an invalid color space. )def
+end
+/set_def_ht
+{AGMCORE_def_ht sethalftone}def
+/set_def_flat
+{AGMCORE_Default_flatness setflat}def
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_CoolType_Core 2.31 0
%%Copyright: Copyright 1997-2006 Adobe Systems Incorporated. All Rights Reserved.
%%Version: 2.31 0
10 dict begin
/Adobe_CoolType_Passthru currentdict def
/Adobe_CoolType_Core_Defined userdict/Adobe_CoolType_Core known def
Adobe_CoolType_Core_Defined
	{/Adobe_CoolType_Core userdict/Adobe_CoolType_Core get def}
if
userdict/Adobe_CoolType_Core 70 dict dup begin put
/Adobe_CoolType_Version 2.31 def
/Level2?
	systemdict/languagelevel known dup
		{pop systemdict/languagelevel get 2 ge}
	if def
Level2? not
	{
	/currentglobal false def
	/setglobal/pop load def
	/gcheck{pop false}bind def
	/currentpacking false def
	/setpacking/pop load def
	/SharedFontDirectory 0 dict def
	}
if
currentpacking
true setpacking
currentglobal false setglobal
userdict/Adobe_CoolType_Data 2 copy known not
	{2 copy 10 dict put}
if
get
	 begin
	/@opStackCountByLevel 32 dict def
	/@opStackLevel 0 def
	/@dictStackCountByLevel 32 dict def
	/@dictStackLevel 0 def
	 end
setglobal
currentglobal true setglobal
userdict/Adobe_CoolType_GVMFonts known not
	{userdict/Adobe_CoolType_GVMFonts 10 dict put}
if
setglobal
currentglobal false setglobal
userdict/Adobe_CoolType_LVMFonts known not
	{userdict/Adobe_CoolType_LVMFonts 10 dict put}
if
setglobal
/ct_VMDictPut
	{
	dup gcheck{Adobe_CoolType_GVMFonts}{Adobe_CoolType_LVMFonts}ifelse
	3 1 roll put
	}bind def
/ct_VMDictUndef
	{
	dup Adobe_CoolType_GVMFonts exch known
		{Adobe_CoolType_GVMFonts exch undef}
		{
			dup Adobe_CoolType_LVMFonts exch known
			{Adobe_CoolType_LVMFonts exch undef}
			{pop}
			ifelse
		}ifelse
	}bind def
/ct_str1 1 string def
/ct_xshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			_ct_x _ct_y moveto
			0
			rmoveto
		}
		ifelse
		/_ct_i _ct_i 1 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/ct_yshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			_ct_x _ct_y moveto
			0 exch
			rmoveto
		}
		ifelse
		/_ct_i _ct_i 1 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/ct_xyshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			{_ct_na _ct_i 1 add get}stopped 
			{pop pop pop}
			{
				_ct_x _ct_y moveto
				rmoveto
			}
			ifelse
		}
		ifelse
		/_ct_i _ct_i 2 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/xsh{{@xshow}stopped{Adobe_CoolType_Data begin ct_xshow end}if}bind def
/ysh{{@yshow}stopped{Adobe_CoolType_Data begin ct_yshow end}if}bind def
/xysh{{@xyshow}stopped{Adobe_CoolType_Data begin ct_xyshow end}if}bind def
currentglobal true setglobal
/ct_T3Defs
{
/BuildChar
{
	1 index/Encoding get exch get
	1 index/BuildGlyph get exec
}bind def
/BuildGlyph
{
	exch begin
	GlyphProcs exch get exec
	end
}bind def
}bind def
setglobal
/@_SaveStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@vmState currentglobal def false setglobal
		@opStackCountByLevel
		@opStackLevel
		2 copy known not
			{
			2 copy
			3 dict dup/args
			7 index
			5 add array put
			put get
			}
			{
			get dup/args get dup length 3 index lt
				{
				dup length 5 add array exch
				1 index exch 0 exch putinterval
				1 index exch/args exch put
				}
				{pop}
			ifelse
			}
		ifelse
			begin
			count 1 sub
			1 index lt
				{pop count}
			if
			dup/argCount exch def
			dup 0 gt
				{
				args exch 0 exch getinterval 
			astore pop
				}
				{pop}
			ifelse
			count
			/restCount exch def
			end
		/@opStackLevel @opStackLevel 1 add def
		countdictstack 1 sub
		@dictStackCountByLevel exch @dictStackLevel exch put
		/@dictStackLevel @dictStackLevel 1 add def
		@vmState setglobal
		end
	}bind def
/@_RestoreStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@opStackLevel @opStackLevel 1 sub def
		@opStackCountByLevel @opStackLevel get
			begin
			count restCount sub dup 0 gt
				{{pop}repeat}
				{pop}
			ifelse
			args 0 argCount getinterval{}forall
			end
		/@dictStackLevel @dictStackLevel 1 sub def
		@dictStackCountByLevel @dictStackLevel get
		end
	countdictstack exch sub dup 0 gt
		{{end}repeat}
		{pop}
	ifelse
	}bind def
/@_PopStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@opStackLevel @opStackLevel 1 sub def
		/@dictStackLevel @dictStackLevel 1 sub def
		end
	}bind def
/@Raise
	{
	exch cvx exch errordict exch get exec
	stop
	}bind def
/@ReRaise
	{
	cvx $error/errorname get errordict exch get exec
	stop
	}bind def
/@Stopped
	{
	0 @#Stopped
	}bind def
/@#Stopped
	{
	@_SaveStackLevels
	stopped
		{@_RestoreStackLevels true}
		{@_PopStackLevels false}
	ifelse
	}bind def
/@Arg
	{
	Adobe_CoolType_Data
		begin
		@opStackCountByLevel @opStackLevel 1 sub get
		begin
		args exch
		argCount 1 sub exch sub get
		end
		end
	}bind def
currentglobal true setglobal
/CTHasResourceForAllBug
	Level2?
		{
		1 dict dup
				/@shouldNotDisappearDictValue true def
				Adobe_CoolType_Data exch/@shouldNotDisappearDict exch put
				begin
				count @_SaveStackLevels
					{(*){pop stop}128 string/Category resourceforall}
				stopped pop
				@_RestoreStackLevels
				currentdict Adobe_CoolType_Data/@shouldNotDisappearDict get dup 3 1 roll ne dup 3 1 roll
					{
						 /@shouldNotDisappearDictValue known
								{
										 {
												end
												currentdict 1 index eq
													{pop exit}
												if
										 }
									 loop
								}
						 if
					}
					{
						 pop
						 end
					}
				ifelse
		}
		{false}
	ifelse
	def
true setglobal
/CTHasResourceStatusBug
	Level2?
		{
		mark
			{/steveamerige/Category resourcestatus}
		stopped
			{cleartomark true}
			{cleartomark currentglobal not}
		ifelse
		}
		{false}
	ifelse
	def
setglobal
/CTResourceStatus
		{
		mark 3 1 roll
		/Category findresource
			begin
			({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
				{cleartomark false}
				{{3 2 roll pop true}{cleartomark false}ifelse}
			ifelse
			end
		}bind def
/CTWorkAroundBugs
	{
	Level2?
		{
		/cid_PreLoad/ProcSet resourcestatus
			{
			pop pop
			currentglobal
			mark
				{
				(*)
					{
					dup/CMap CTHasResourceStatusBug
						{CTResourceStatus}
						{resourcestatus}
					ifelse
						{
						pop dup 0 eq exch 1 eq or
							{
							dup/CMap findresource gcheck setglobal
							/CMap undefineresource
							}
							{
							pop CTHasResourceForAllBug
								{exit}
								{stop}
							ifelse
							}
						ifelse
						}
						{pop}
					ifelse
					}
				128 string/CMap resourceforall
				}
			stopped
				{cleartomark}
			stopped pop
			setglobal
			}
		if
		}
	if
	}bind def
/ds
	{
	Adobe_CoolType_Core
		begin
		CTWorkAroundBugs
		/mo/moveto load def
		/nf/newencodedfont load def
		/msf{makefont setfont}bind def
		/uf{dup undefinefont ct_VMDictUndef}bind def
		/ur/undefineresource load def
		/chp/charpath load def
		/awsh/awidthshow load def
		/wsh/widthshow load def
		/ash/ashow load def
		/@xshow/xshow load def
		/@yshow/yshow load def
		/@xyshow/xyshow load def
		/@cshow/cshow load def
		/sh/show load def
		/rp/repeat load def
		/.n/.notdef def
		end
		currentglobal false setglobal
	 userdict/Adobe_CoolType_Data 2 copy known not
		 {2 copy 10 dict put}
		if
		get
		begin
		/AddWidths? false def
		/CC 0 def
		/charcode 2 string def
		/@opStackCountByLevel 32 dict def
		/@opStackLevel 0 def
		/@dictStackCountByLevel 32 dict def
		/@dictStackLevel 0 def
		/InVMFontsByCMap 10 dict def
		/InVMDeepCopiedFonts 10 dict def
		end
		setglobal
	}bind def
/dt
	{
	currentdict Adobe_CoolType_Core eq
		{end}
	if
	}bind def
/ps
	{
	Adobe_CoolType_Core begin
	Adobe_CoolType_GVMFonts begin
	Adobe_CoolType_LVMFonts begin
	SharedFontDirectory begin
	}bind def
/pt
	{
	end
	end
	end
	end
	}bind def
/unload
	{
	systemdict/languagelevel known
		{
		systemdict/languagelevel get 2 ge
			{
			userdict/Adobe_CoolType_Core 2 copy known
				{undef}
				{pop pop}
			ifelse
			}
		if
		}
	if
	}bind def
/ndf
	{
	1 index where
		{pop pop pop}
		{dup xcheck{bind}if def}
	ifelse
	}def
/findfont systemdict
	begin
	userdict
		begin
		/globaldict where{/globaldict get begin}if
			dup where pop exch get
		/globaldict where{pop end}if
		end
	end
Adobe_CoolType_Core_Defined
	{/systemfindfont exch def}
	{
	/findfont 1 index def
	/systemfindfont exch def
	}
ifelse
/undefinefont
	{pop}ndf
/copyfont
	{
	currentglobal 3 1 roll
	1 index gcheck setglobal
	dup null eq{0}{dup length}ifelse
	2 index length add 1 add dict
		begin
		exch
			{
			1 index/FID eq
				{pop pop}
				{def}
			ifelse
			}
		forall
		dup null eq
			{pop}
			{{def}forall}
		ifelse
		currentdict
		end
	exch setglobal
	}bind def
/copyarray
	{
	currentglobal exch
	dup gcheck setglobal
	dup length array copy
	exch setglobal
	}bind def
/newencodedfont
	{
	currentglobal
		{
		SharedFontDirectory 3 index known
			{SharedFontDirectory 3 index get/FontReferenced known}
			{false}
		ifelse
		}
		{
		FontDirectory 3 index known
			{FontDirectory 3 index get/FontReferenced known}
			{
			SharedFontDirectory 3 index known
				{SharedFontDirectory 3 index get/FontReferenced known}
				{false}
			ifelse
			}
		ifelse
		}
	ifelse
	dup
		{
		3 index findfont/FontReferenced get
		2 index dup type/nametype eq
			{findfont}
		if ne
			{pop false}
		if
		}
	if
	dup
		{
		1 index dup type/nametype eq
			{findfont}
		 if
		dup/CharStrings known
			{
			/CharStrings get length
			4 index findfont/CharStrings get length
			ne
				{
				pop false
				}
			if 
			}
			{pop}
			ifelse
		}
	if
		{
		pop
		1 index findfont
		/Encoding get exch
		0 1 255
			{2 copy get 3 index 3 1 roll put}
		for
		pop pop pop
		}
		{
		currentglobal
	 4 1 roll
		dup type/nametype eq
		 {findfont}
	 if
	 dup gcheck setglobal
		dup dup maxlength 2 add dict
			begin
			exch
				{
				1 index/FID ne
				2 index/Encoding ne and
					{def}
					{pop pop}
				ifelse
				}
			forall
			/FontReferenced exch def
			/Encoding exch dup length array copy def
			/FontName 1 index dup type/stringtype eq{cvn}if def dup
			currentdict
			end
		definefont ct_VMDictPut
		setglobal
		}
	ifelse
	}bind def
/SetSubstituteStrategy
	{
	$SubstituteFont
		begin
		dup type/dicttype ne
			{0 dict}
		if
		currentdict/$Strategies known
			{
			exch $Strategies exch 
			2 copy known
				{
				get
				2 copy maxlength exch maxlength add dict
					begin
					{def}forall
					{def}forall
					currentdict
					dup/$Init known
						{dup/$Init get exec}
					if
					end
				/$Strategy exch def
				}
				{pop pop pop}
			ifelse
			}
			{pop pop}
		ifelse
		end
	}bind def
/scff
	{
	$SubstituteFont
		begin
		dup type/stringtype eq
			{dup length exch}
			{null}
		ifelse
		/$sname exch def
		/$slen exch def
		/$inVMIndex
			$sname null eq
				{
				1 index $str cvs
				dup length $slen sub $slen getinterval cvn
				}
				{$sname}
			ifelse def
		end
		{findfont}
	@Stopped
		{
		dup length 8 add string exch
		1 index 0(BadFont:)putinterval
		1 index exch 8 exch dup length string cvs putinterval cvn
			{findfont}
		@Stopped
			{pop/Courier findfont}
		if
		}
	if
	$SubstituteFont
		begin
		/$sname null def
		/$slen 0 def
		/$inVMIndex null def
		end
	}bind def
/isWidthsOnlyFont
	{
	dup/WidthsOnly known
		{pop pop true}
		{
		dup/FDepVector known
			{/FDepVector get{isWidthsOnlyFont dup{exit}if}forall}
			{
			dup/FDArray known
				{/FDArray get{isWidthsOnlyFont dup{exit}if}forall}
				{pop}
			ifelse
			}
		ifelse
		}
	ifelse
	}bind def
/ct_StyleDicts 4 dict dup begin
		 /Adobe-Japan1 4 dict dup begin
					 Level2?
								{
								/Serif
								/HeiseiMin-W3-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiMin-W3}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiMin-W3/CIDFont resourcestatus
								{pop pop/HeiseiMin-W3}
								{/Ryumin-Light}
								ifelse
							}
							{/Ryumin-Light}
							ifelse
								}
								ifelse
								def
								/SansSerif
								/HeiseiKakuGo-W5-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiKakuGo-W5}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiKakuGo-W5/CIDFont resourcestatus
								{pop pop/HeiseiKakuGo-W5}
								{/GothicBBB-Medium}
								ifelse
							}
							{/GothicBBB-Medium}
							ifelse
								}
								ifelse
								def
								/HeiseiMaruGo-W4-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiMaruGo-W4}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiMaruGo-W4/CIDFont resourcestatus
								{pop pop/HeiseiMaruGo-W4}
								{
									/Jun101-Light-RKSJ-H/Font resourcestatus
									{pop pop/Jun101-Light}
									{SansSerif}
									ifelse
								}
								ifelse
							}
							{
								/Jun101-Light-RKSJ-H/Font resourcestatus
								{pop pop/Jun101-Light}
								{SansSerif}
								ifelse
							}
							ifelse
								}
								ifelse
								/RoundSansSerif exch def
								/Default Serif def
								}
								{
								/Serif/Ryumin-Light def
								/SansSerif/GothicBBB-Medium def
								{
								(fonts/Jun101-Light-83pv-RKSJ-H)status
								}stopped
								{pop}{
										 {pop pop pop pop/Jun101-Light}
										 {SansSerif}
										 ifelse
										 /RoundSansSerif exch def
								}ifelse
								/Default Serif def
								}
					 ifelse
		 end
		 def
		 /Adobe-Korea1 4 dict dup begin
					/Serif/HYSMyeongJo-Medium def
					/SansSerif/HYGoThic-Medium def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
		 /Adobe-GB1 4 dict dup begin
					/Serif/STSong-Light def
					/SansSerif/STHeiti-Regular def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
		 /Adobe-CNS1 4 dict dup begin
					/Serif/MKai-Medium def
					/SansSerif/MHei-Medium def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
end
def
Level2?{currentglobal true setglobal}if
/ct_BoldRomanWidthProc 
	{
	stringwidth 1 index 0 ne{exch .03 add exch}if setcharwidth
	0 0
	}bind def
/ct_Type0WidthProc 
	{
	 dup stringwidth 0 0 moveto 
	 2 index true charpath pathbbox
	 0 -1 
	 7 index 2 div .88 
	 setcachedevice2
	 pop
	0 0
	}bind def
/ct_Type0WMode1WidthProc 
	{
	 dup stringwidth 
	 pop 2 div neg -0.88
	2 copy
	moveto 
	0 -1
	 5 -1 roll true charpath pathbbox
	 setcachedevice
	}bind def
/cHexEncoding
[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
/ct_BoldBaseFont 
	 11 dict begin
		/FontType 3 def
		/FontMatrix[1 0 0 1 0 0]def
		/FontBBox[0 0 1 1]def
		/Encoding cHexEncoding def 
		/_setwidthProc/ct_BoldRomanWidthProc load def
		/_bcstr1 1 string def
		/BuildChar
		{
			exch begin
				_basefont setfont
				_bcstr1 dup 0 4 -1 roll put
				dup 
				_setwidthProc
				3 copy 
				moveto				
				show
				_basefonto setfont
				moveto
				show
			end
		}bind def
		 currentdict
	 end 
def
systemdict/composefont known
{
/ct_DefineIdentity-H
{
	/Identity-H/CMap resourcestatus
	{
		pop pop
	}
	{
		/CIDInit/ProcSet findresource begin
		 12 dict begin
		 begincmap
		 /CIDSystemInfo 3 dict dup begin
			 /Registry(Adobe)def
			 /Ordering(Identity)def
			 /Supplement 0 def
		 end def
		 /CMapName/Identity-H def
		 /CMapVersion 1.000 def
		 /CMapType 1 def
		 1 begincodespacerange
		 <0000><FFFF>
		 endcodespacerange
		 1 begincidrange
		 <0000><FFFF>0
		 endcidrange
		 endcmap
		 CMapName currentdict/CMap defineresource pop
		 end
		 end
	 }
	 ifelse
}
def
/ct_BoldBaseCIDFont 
	 11 dict begin
		/CIDFontType 1 def
		/CIDFontName/ct_BoldBaseCIDFont def
		/FontMatrix[1 0 0 1 0 0]def
		/FontBBox[0 0 1 1]def
		/_setwidthProc/ct_Type0WidthProc load def
		/_bcstr2 2 string def
		/BuildGlyph
		{
			exch begin		 
				_basefont setfont
				_bcstr2 1 2 index 256 mod put
				_bcstr2 0 3 -1 roll 256 idiv put
				_bcstr2 dup _setwidthProc		 
				3 copy 
				moveto
				show
				_basefonto setfont
				moveto
				show
			end
		}bind def
		 currentdict
	 end 
def
}if
Level2?{setglobal}if
/ct_CopyFont{
	{
		1 index/FID ne 2 index/UniqueID ne and
		{def}{pop pop}ifelse
	}forall
}bind def
/ct_Type0CopyFont 
{
	exch
	dup length dict
	begin
	ct_CopyFont
	[
	exch
	FDepVector 
	{
		 dup/FontType get 0 eq
		{	
		1 index ct_Type0CopyFont 
		/_ctType0 exch definefont
		}
		{
		/_ctBaseFont exch
		2 index exec
		}
		 ifelse 
		 exch
	}
	forall 
	pop
	]				
	/FDepVector exch def
	currentdict
	end
}bind def
/ct_MakeBoldFont
{
	 dup/ct_SyntheticBold known
	{
		dup length 3 add dict begin 
		ct_CopyFont 
		/ct_StrokeWidth .03 0 FontMatrix idtransform pop def 
		/ct_SyntheticBold true def
		currentdict 
		end 
		definefont
	}
	{
		dup dup length 3 add dict
		begin
			ct_CopyFont
			/PaintType 2 def
			/StrokeWidth .03 0 FontMatrix idtransform pop def
			/dummybold currentdict
		end
		definefont
		dup/FontType get dup 9 ge exch 11 le and 
		{
			ct_BoldBaseCIDFont
			dup length 3 add dict copy begin
			dup/CIDSystemInfo get/CIDSystemInfo exch def
			ct_DefineIdentity-H
			/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
			/_basefont exch def
			/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
			/_basefonto exch def
			currentdict
			end
			/CIDFont defineresource
		}
		{
			ct_BoldBaseFont
			dup length 3 add dict copy begin
			/_basefont exch def
			/_basefonto exch def
			currentdict
			end
			definefont
		}
		ifelse
	}
	ifelse
}bind def
/ct_MakeBold{
	1 index 
	1 index
	findfont
	currentglobal 5 1 roll
	dup gcheck setglobal
		dup
		 /FontType get 0 eq
			{
				dup/WMode known{dup/WMode get 1 eq}{false}ifelse
				version length 4 ge
				and
					{version 0 4 getinterval cvi 2015 ge}
					{true}
				ifelse 
					{/ct_Type0WidthProc}
					{/ct_Type0WMode1WidthProc}
				ifelse
				ct_BoldBaseFont/_setwidthProc 3 -1 roll load put
						{ct_MakeBoldFont}ct_Type0CopyFont definefont
			}
			{
				dup/_fauxfont known not 1 index/SubstMaster known not and
				{
					 ct_BoldBaseFont/_setwidthProc /ct_BoldRomanWidthProc load put
					 ct_MakeBoldFont 
				}
				{
				2 index 2 index eq
					{exch pop	}
					{
						dup length dict begin
						ct_CopyFont
						currentdict
						end
						definefont 
					}
				ifelse
				}
			ifelse
			}
		 ifelse
		 pop pop pop
		 setglobal
}bind def
/?str1 256 string def
/?set
	{
	$SubstituteFont
		begin
		/$substituteFound false def
		/$fontname 1 index def
		/$doSmartSub false def
		end
	dup
	 findfont
	$SubstituteFont
		begin
		$substituteFound
			{false}
			{
			dup/FontName known
				{
				dup/FontName get $fontname eq
				1 index/DistillerFauxFont known not and
				/currentdistillerparams where
					{pop false 2 index isWidthsOnlyFont not and}
				if
				}
				{false}
			ifelse
			}
		ifelse
		exch pop
		/$doSmartSub true def
		end
		{
		5 1 roll pop pop pop pop
		findfont
		}
		{
		1 index
		findfont
		dup/FontType get 3 eq
		{
			6 1 roll pop pop pop pop pop false
		}
		{pop true}
		ifelse
		{
		$SubstituteFont
		begin
		pop pop
		/$styleArray 1 index def
		/$regOrdering 2 index def
		pop pop
		0 1 $styleArray length 1 sub
		{
			$styleArray exch get
			ct_StyleDicts $regOrdering
			2 copy known
			{
				get
				exch 2 copy known not
				{pop/Default}
				if
				get
				dup type/nametype eq
				{
				?str1 cvs length dup 1 add exch
				?str1 exch(-)putinterval
				exch dup length exch ?str1 exch 3 index exch putinterval
				add ?str1 exch 0 exch getinterval cvn
				}
				{
				pop pop/Unknown
				}
				ifelse
			}
			{
				pop pop pop pop/Unknown
			}
			ifelse
		}
		for
		end
		findfont 
		}if
		}
	ifelse
	currentglobal false setglobal 3 1 roll
	null copyfont definefont pop
	setglobal
	}bind def
setpacking
userdict/$SubstituteFont 25 dict put
1 dict
	begin
	/SubstituteFont
		dup $error exch 2 copy known
			{get}
			{pop pop{pop/Courier}bind}
		ifelse def
	/currentdistillerparams where dup
		{
		pop pop
		currentdistillerparams/CannotEmbedFontPolicy 2 copy known
			{get/Error eq}
			{pop pop false}
		ifelse
		}
	if not
		{
		countdictstack array dictstack 0 get
			begin
			userdict
				begin
				$SubstituteFont
					begin
					/$str 128 string def
					/$fontpat 128 string def
					/$slen 0 def
					/$sname null def
					/$match false def
					/$fontname null def
					/$substituteFound false def
					/$inVMIndex null def
					/$doSmartSub true def
					/$depth 0 def
					/$fontname null def
					/$italicangle 26.5 def
					/$dstack null def
					/$Strategies 10 dict dup
						begin
						/$Type3Underprint
							{
							currentglobal exch false setglobal
							11 dict
								begin
								/UseFont exch
									$WMode 0 ne
										{
										dup length dict copy
										dup/WMode $WMode put
										/UseFont exch definefont
										}
									if def
								/FontName $fontname dup type/stringtype eq{cvn}if def
								/FontType 3 def
								/FontMatrix[.001 0 0 .001 0 0]def
								/Encoding 256 array dup 0 1 255{/.notdef put dup}for pop def
								/FontBBox[0 0 0 0]def
								/CCInfo 7 dict dup
									begin
									/cc null def
									/x 0 def
									/y 0 def
									end def
								/BuildChar
									{
									exch
										begin
										CCInfo
											begin
											1 string dup 0 3 index put exch pop
											/cc exch def
											UseFont 1000 scalefont setfont
											cc stringwidth/y exch def/x exch def
											x y setcharwidth
											$SubstituteFont/$Strategy get/$Underprint get exec
											0 0 moveto cc show
											x y moveto
											end
										end
									}bind def
								currentdict
								end
							exch setglobal
							}bind def
						/$GetaTint
							2 dict dup
								begin
								/$BuildFont
									{
									dup/WMode known
										{dup/WMode get}
										{0}
									ifelse
									/$WMode exch def
									$fontname exch
									dup/FontName known
										{
										dup/FontName get
										dup type/stringtype eq{cvn}if
										}
										{/unnamedfont}
									ifelse
									exch
									Adobe_CoolType_Data/InVMDeepCopiedFonts get
									1 index/FontName get known
										{
										pop
										Adobe_CoolType_Data/InVMDeepCopiedFonts get
										1 index get
										null copyfont
										}
										{$deepcopyfont}
									ifelse
									exch 1 index exch/FontBasedOn exch put
									dup/FontName $fontname dup type/stringtype eq{cvn}if put
									definefont
									Adobe_CoolType_Data/InVMDeepCopiedFonts get
										begin
										dup/FontBasedOn get 1 index def
										end
									}bind def
								/$Underprint
									{
									gsave
									x abs y abs gt
										{/y 1000 def}
										{/x -1000 def 500 120 translate}
									ifelse
									Level2?
										{
										[/Separation(All)/DeviceCMYK{0 0 0 1 pop}]
										setcolorspace
										}
										{0 setgray}
									ifelse
									10 setlinewidth
									x .8 mul
									[7 3]
										{
										y mul 8 div 120 sub x 10 div exch moveto
										0 y 4 div neg rlineto
										dup 0 rlineto
										0 y 4 div rlineto
										closepath
										gsave
										Level2?
											{.2 setcolor}
											{.8 setgray}
										ifelse
										fill grestore
										stroke
										}
									forall
									pop
									grestore
									}bind def
								end def
						/$Oblique
							1 dict dup
								begin
								/$BuildFont
									{
									currentglobal exch dup gcheck setglobal
									null copyfont
										begin
										/FontBasedOn
										currentdict/FontName known
											{
											FontName
											dup type/stringtype eq{cvn}if
											}
											{/unnamedfont}
										ifelse
										def
										/FontName $fontname dup type/stringtype eq{cvn}if def
										/currentdistillerparams where
											{pop}
											{
											/FontInfo currentdict/FontInfo known
												{FontInfo null copyfont}
												{2 dict}
											ifelse
											dup
												begin
												/ItalicAngle $italicangle def
												/FontMatrix FontMatrix
												[1 0 ItalicAngle dup sin exch cos div 1 0 0]
												matrix concatmatrix readonly
												end
											4 2 roll def
											def
											}
										ifelse
										FontName currentdict
										end
									definefont
									exch setglobal
									}bind def
								end def
						/$None
							1 dict dup
								begin
								/$BuildFont{}bind def
								end def
						end def
					/$Oblique SetSubstituteStrategy
					/$findfontByEnum
						{
						dup type/stringtype eq{cvn}if
						dup/$fontname exch def
						$sname null eq
							{$str cvs dup length $slen sub $slen getinterval}
							{pop $sname}
						ifelse
						$fontpat dup 0(fonts/*)putinterval exch 7 exch putinterval
						/$match false def
						$SubstituteFont/$dstack countdictstack array dictstack put
						mark
							{
							$fontpat 0 $slen 7 add getinterval
								{/$match exch def exit}
							$str filenameforall
							}
						stopped
							{
							cleardictstack
							currentdict
							true
							$SubstituteFont/$dstack get
								{
								exch
									{
									1 index eq
										{pop false}
										{true}
									ifelse
									}
									{begin false}
								ifelse
								}
							forall
							pop
							}
						if
						cleartomark
						/$slen 0 def
						$match false ne
							{$match(fonts/)anchorsearch pop pop cvn}
							{/Courier}
						ifelse
						}bind def
					/$ROS 1 dict dup
						begin
						/Adobe 4 dict dup
							begin
							/Japan1 [/Ryumin-Light/HeiseiMin-W3
										 /GothicBBB-Medium/HeiseiKakuGo-W5
										 /HeiseiMaruGo-W4/Jun101-Light]def
							/Korea1 [/HYSMyeongJo-Medium/HYGoThic-Medium]def
							/GB1	 [/STSong-Light/STHeiti-Regular]def
							/CNS1	[/MKai-Medium/MHei-Medium]def
							end def
						end def
					/$cmapname null def
					/$deepcopyfont
						{
						dup/FontType get 0 eq
							{
							1 dict dup/FontName/copied put copyfont
								begin
								/FDepVector FDepVector copyarray
								0 1 2 index length 1 sub
									{
									2 copy get $deepcopyfont
									dup/FontName/copied put
									/copied exch definefont
									3 copy put pop pop
									}
								for
								def
								currentdict
								end
							}
							{$Strategies/$Type3Underprint get exec}
						ifelse
						}bind def
					/$buildfontname
						{
						dup/CIDFont findresource/CIDSystemInfo get
							begin
							Registry length Ordering length Supplement 8 string cvs
							3 copy length 2 add add add string
							dup 5 1 roll dup 0 Registry putinterval
							dup 4 index(-)putinterval
							dup 4 index 1 add Ordering putinterval
							4 2 roll add 1 add 2 copy(-)putinterval
							end
						1 add 2 copy 0 exch getinterval $cmapname $fontpat cvs exch
						anchorsearch
							{pop pop 3 2 roll putinterval cvn/$cmapname exch def}
							{pop pop pop pop pop}
						ifelse
						length
						$str 1 index(-)putinterval 1 add
						$str 1 index $cmapname $fontpat cvs putinterval
						$cmapname length add
						$str exch 0 exch getinterval cvn
						}bind def
					/$findfontByROS
						{
						/$fontname exch def
						$ROS Registry 2 copy known
							{
							get Ordering 2 copy known
								{get}
								{pop pop[]}
							ifelse
							}
							{pop pop[]}
						ifelse
						false exch
							{
							dup/CIDFont resourcestatus
								{
								pop pop
								save
								1 index/CIDFont findresource
								dup/WidthsOnly known
									{dup/WidthsOnly get}
									{false}
								ifelse
								exch pop
								exch restore
									{pop}
									{exch pop true exit}
								ifelse
								}
								{pop}
							ifelse
							}
						forall
							{$str cvs $buildfontname}
							{
							false(*)
								{
								save exch
								dup/CIDFont findresource
								dup/WidthsOnly known
									{dup/WidthsOnly get not}
									{true}
								ifelse
								exch/CIDSystemInfo get
								dup/Registry get Registry eq
								exch/Ordering get Ordering eq and and
									{exch restore exch pop true exit}
									{pop restore}
								ifelse
								}
							$str/CIDFont resourceforall
								{$buildfontname}
								{$fontname $findfontByEnum}
							ifelse
							}
						ifelse
						}bind def
					end
				end
				currentdict/$error known currentdict/languagelevel known and dup
					{pop $error/SubstituteFont known}
				if
				dup
					{$error}
					{Adobe_CoolType_Core}
				ifelse
				begin
					{
					/SubstituteFont
					/CMap/Category resourcestatus
						{
						pop pop
						{
						$SubstituteFont
							begin
							/$substituteFound true def
							dup length $slen gt
							$sname null ne or
							$slen 0 gt and
								{
								$sname null eq
									{dup $str cvs dup length $slen sub $slen getinterval cvn}
									{$sname}
								ifelse
								Adobe_CoolType_Data/InVMFontsByCMap get
								1 index 2 copy known
									{
									get
									false exch
										{
										pop
										currentglobal
											{
											GlobalFontDirectory 1 index known
												{exch pop true exit}
												{pop}
											ifelse
											}
											{
											FontDirectory 1 index known
												{exch pop true exit}
												{
												GlobalFontDirectory 1 index known
													{exch pop true exit}
													{pop}
												ifelse
												}
											ifelse
											}
										ifelse
										}
									forall
									}
									{pop pop false}
								ifelse
									{
									exch pop exch pop
									}
									{
									dup/CMap resourcestatus
										{
										pop pop
										dup/$cmapname exch def
										/CMap findresource/CIDSystemInfo get{def}forall
										$findfontByROS
										}
										{
										128 string cvs
										dup(-)search
											{
											3 1 roll search
												{
												3 1 roll pop
													{dup cvi}
												stopped
													{pop pop pop pop pop $findfontByEnum}
													{
													4 2 roll pop pop
													exch length
													exch
													2 index length
													2 index
													sub
													exch 1 sub -1 0
														{
														$str cvs dup length
														4 index
														0
														4 index
														4 3 roll add
														getinterval
														exch 1 index exch 3 index exch
														putinterval
														dup/CMap resourcestatus
															{
															pop pop
															4 1 roll pop pop pop
															dup/$cmapname exch def
															/CMap findresource/CIDSystemInfo get{def}forall
															$findfontByROS
															true exit
															}
															{pop}
														ifelse
														}
													for
													dup type/booleantype eq
														{pop}
														{pop pop pop $findfontByEnum}
													ifelse
													}
												ifelse
												}
												{pop pop pop $findfontByEnum}
											ifelse
											}
											{pop pop $findfontByEnum}
										ifelse
										}
									ifelse
									}
								ifelse
								}
								{//SubstituteFont exec}
							ifelse
							/$slen 0 def
							end
						}
						}
						{
						{
						$SubstituteFont
							begin
							/$substituteFound true def
							dup length $slen gt
							$sname null ne or
							$slen 0 gt and
								{$findfontByEnum}
								{//SubstituteFont exec}
							ifelse
							end
						}
						}
					ifelse
					bind readonly def
					Adobe_CoolType_Core/scfindfont/systemfindfont load put
					}
					{
					/scfindfont
						{
						$SubstituteFont
							begin
							dup systemfindfont
							dup/FontName known
								{dup/FontName get dup 3 index ne}
								{/noname true}
							ifelse
							dup
								{
								/$origfontnamefound 2 index def
								/$origfontname 4 index def/$substituteFound true def
								}
							if
							exch pop
								{
								$slen 0 gt
								$sname null ne
								3 index length $slen gt or and
									{
									pop dup $findfontByEnum findfont
									dup maxlength 1 add dict
										begin
											{1 index/FID eq{pop pop}{def}ifelse}
										forall
										currentdict
										end
									definefont
									dup/FontName known{dup/FontName get}{null}ifelse
									$origfontnamefound ne
										{
										$origfontname $str cvs print
										( substitution revised, using )print
										dup/FontName known
											{dup/FontName get}{(unspecified font)}
										ifelse
										$str cvs print(.\n)print
										}
									if
									}
									{exch pop}
								ifelse
								}
								{exch pop}
							ifelse
							end
						}bind def
					}
				ifelse
				end
			end
		Adobe_CoolType_Core_Defined not
			{
			Adobe_CoolType_Core/findfont
				{
				$SubstituteFont
					begin
					$depth 0 eq
						{
						/$fontname 1 index dup type/stringtype ne{$str cvs}if def
						/$substituteFound false def
						}
					if
					/$depth $depth 1 add def
					end
				scfindfont
				$SubstituteFont
					begin
					/$depth $depth 1 sub def
					$substituteFound $depth 0 eq and
						{
						$inVMIndex null ne
							{dup $inVMIndex $AddInVMFont}
						if
						$doSmartSub
							{
							currentdict/$Strategy known
								{$Strategy/$BuildFont get exec}
							if
							}
						if
						}
					if
					end
				}bind put
			}
		if
		}
	if
	end
/$AddInVMFont
	{
	exch/FontName 2 copy known
		{
		get
		1 dict dup begin exch 1 index gcheck def end exch
		Adobe_CoolType_Data/InVMFontsByCMap get exch
		$DictAdd
		}
		{pop pop pop}
	ifelse
	}bind def
/$DictAdd
	{
	2 copy known not
		{2 copy 4 index length dict put}
	if
	Level2? not
		{
		2 copy get dup maxlength exch length 4 index length add lt
		2 copy get dup length 4 index length add exch maxlength 1 index lt
			{
			2 mul dict
				begin
				2 copy get{forall}def
				2 copy currentdict put
				end
			}
			{pop}
		ifelse
		}
	if
	get
		begin
			{def}
		forall
		end
	}bind def
end
end
%%EndResource
currentglobal true setglobal
%%BeginResource: procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
%%Copyright: Copyright 1987-2006 Adobe Systems Incorporated.
%%Version: 1.23 0
systemdict/languagelevel known dup
	{currentglobal false setglobal}
	{false}
ifelse
exch
userdict/Adobe_CoolType_Utility 2 copy known
	{2 copy get dup maxlength 27 add dict copy}
	{27 dict}
ifelse put
Adobe_CoolType_Utility
	begin
	/@eexecStartData
		 <BAB431EA07F209EB8C4348311481D9D3F76E3D15246555577D87BC510ED54E
		 118C39697FA9F6DB58128E60EB8A12FA24D7CDD2FA94D221FA9EC8DA3E5E6A1C
		 4ACECC8C2D39C54E7C946031DD156C3A6B4A09AD29E1867A>def
	/@recognizeCIDFont null def
	/ct_Level2? exch def
	/ct_Clone? 1183615869 internaldict dup
			/CCRun known not
			exch/eCCRun known not
			ct_Level2? and or def
ct_Level2?
	{globaldict begin currentglobal true setglobal}
if
	/ct_AddStdCIDMap
		ct_Level2?
			{{
				mark
				Adobe_CoolType_Utility/@recognizeCIDFont currentdict put
					{
					((Hex)57 StartData
					 0615 1e27 2c39 1c60 d8a8 cc31 fe2b f6e0
					 7aa3 e541 e21c 60d8 a8c9 c3d0 6d9e 1c60
					 d8a8 c9c2 02d7 9a1c 60d8 a849 1c60 d8a8
					 cc36 74f4 1144 b13b 77)0()/SubFileDecode filter cvx exec
					}
				stopped
					{
					 cleartomark
					 Adobe_CoolType_Utility/@recognizeCIDFont get
					 countdictstack dup array dictstack
					 exch 1 sub -1 0
						 {
						 2 copy get 3 index eq
								{1 index length exch sub 1 sub{end}repeat exit}
								{pop}
						 ifelse
						 }
					 for
					 pop pop
					 Adobe_CoolType_Utility/@eexecStartData get eexec
					}
					{cleartomark}
				ifelse
			}}
			{{
				Adobe_CoolType_Utility/@eexecStartData get eexec
			}}
		ifelse bind def
userdict/cid_extensions known
dup{cid_extensions/cid_UpdateDB known and}if
	{
	 cid_extensions
	 begin
	/cid_GetCIDSystemInfo
		{
		 1 index type/stringtype eq
			{exch cvn exch}
		 if
		 cid_extensions
			 begin
			 dup load 2 index known
				{
				 2 copy
				 cid_GetStatusInfo
				 dup null ne
					{
					 1 index load
					 3 index get
					 dup null eq
						 {pop pop cid_UpdateDB}
						 {
						 exch
						 1 index/Created get eq
							 {exch pop exch pop}
							 {pop cid_UpdateDB}
						 ifelse
						 }
					 ifelse
					}
					{pop cid_UpdateDB}
				 ifelse
				}
				{cid_UpdateDB}
			 ifelse
			 end
		}bind def
	 end
	}
if
ct_Level2?
	{end setglobal}
if
	/ct_UseNativeCapability? systemdict/composefont known def
	/ct_MakeOCF 35 dict def
	/ct_Vars 25 dict def
	/ct_GlyphDirProcs 6 dict def
	/ct_BuildCharDict 15 dict dup
		begin
		/charcode 2 string def
		/dst_string 1500 string def
		/nullstring()def
		/usewidths? true def
		end def
	ct_Level2?{setglobal}{pop}ifelse
	ct_GlyphDirProcs
		begin
		/GetGlyphDirectory
			{
			systemdict/languagelevel known
				{pop/CIDFont findresource/GlyphDirectory get}
				{
				1 index/CIDFont findresource/GlyphDirectory
				get dup type/dicttype eq
					{
					dup dup maxlength exch length sub 2 index lt
						{
						dup length 2 index add dict copy 2 index
						/CIDFont findresource/GlyphDirectory 2 index put
						}
					if
					}
				if
				exch pop exch pop
				}
			ifelse
			+
			}def
		/+
			{
			systemdict/languagelevel known
				{
				currentglobal false setglobal
				3 dict begin
					/vm exch def
				}
				{1 dict begin}
			ifelse
			/$ exch def
			systemdict/languagelevel known
				{
				vm setglobal
				/gvm currentglobal def
				$ gcheck setglobal
				}
			if
			?{$ begin}if
			}def
		/?{$ type/dicttype eq}def
		/|{
			userdict/Adobe_CoolType_Data known
				{
			Adobe_CoolType_Data/AddWidths? known
				{
				 currentdict Adobe_CoolType_Data
					begin
					 begin
						AddWidths?
								{
								Adobe_CoolType_Data/CC 3 index put
								?{def}{$ 3 1 roll put}ifelse
								CC charcode exch 1 index 0 2 index 256 idiv put
								1 index exch 1 exch 256 mod put
								stringwidth 2 array astore
								currentfont/Widths get exch CC exch put
								}
								{?{def}{$ 3 1 roll put}ifelse}
							ifelse
					end
				end
				}
				{?{def}{$ 3 1 roll put}ifelse}	ifelse
				}
				{?{def}{$ 3 1 roll put}ifelse}
			ifelse
			}def
		/!
			{
			?{end}if
			systemdict/languagelevel known
				{gvm setglobal}
			if
			end
			}def
		/:{string currentfile exch readstring pop}executeonly def
		end
	ct_MakeOCF
		begin
		/ct_cHexEncoding
		[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
		/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
		/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
		/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
		/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
		/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
		/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
		/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
		/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
		/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
		/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
		/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
		/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
		/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
		/ct_CID_STR_SIZE 8000 def
		/ct_mkocfStr100 100 string def
		/ct_defaultFontMtx[.001 0 0 .001 0 0]def
		/ct_1000Mtx[1000 0 0 1000 0 0]def
		/ct_raise{exch cvx exch errordict exch get exec stop}bind def
		/ct_reraise
			{cvx $error/errorname get(Error: )print dup(						 )cvs print
					errordict exch get exec stop
			}bind def
		/ct_cvnsi
			{
			1 index add 1 sub 1 exch 0 4 1 roll
				{
				2 index exch get
				exch 8 bitshift
				add
				}
			for
			exch pop
			}bind def
		/ct_GetInterval
			{
			Adobe_CoolType_Utility/ct_BuildCharDict get
				begin
				/dst_index 0 def
				dup dst_string length gt
					{dup string/dst_string exch def}
				if
				1 index ct_CID_STR_SIZE idiv
				/arrayIndex exch def
				2 index arrayIndex get
				2 index
				arrayIndex ct_CID_STR_SIZE mul
				sub
					{
					dup 3 index add 2 index length le
						{
						2 index getinterval
						dst_string dst_index 2 index putinterval
						length dst_index add/dst_index exch def
						exit
						}
						{
						1 index length 1 index sub
						dup 4 1 roll
						getinterval
						dst_string dst_index 2 index putinterval
						pop dup dst_index add/dst_index exch def
						sub
						/arrayIndex arrayIndex 1 add def
						2 index dup length arrayIndex gt
							 {arrayIndex get}
							 {
							 pop
							 exit
							 }
						ifelse
						0
						}
					ifelse
					}
				loop
				pop pop pop
				dst_string 0 dst_index getinterval
				end
			}bind def
		ct_Level2?
			{
			/ct_resourcestatus
			currentglobal mark true setglobal
				{/unknowninstancename/Category resourcestatus}
			stopped
				{cleartomark setglobal true}
				{cleartomark currentglobal not exch setglobal}
			ifelse
				{
					{
					mark 3 1 roll/Category findresource
						begin
						ct_Vars/vm currentglobal put
						({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
							{cleartomark false}
							{{3 2 roll pop true}{cleartomark false}ifelse}
						ifelse
						ct_Vars/vm get setglobal
						end
					}
				}
				{{resourcestatus}}
			ifelse bind def
			/CIDFont/Category ct_resourcestatus
				{pop pop}
				{
				currentglobal true setglobal
				/Generic/Category findresource
				dup length dict copy
				dup/InstanceType/dicttype put
				/CIDFont exch/Category defineresource pop
				setglobal
				}
			ifelse
			ct_UseNativeCapability?
				{
				/CIDInit/ProcSet findresource begin
				12 dict begin
				begincmap
				/CIDSystemInfo 3 dict dup begin
				 /Registry(Adobe)def
				 /Ordering(Identity)def
				 /Supplement 0 def
				end def
				/CMapName/Identity-H def
				/CMapVersion 1.000 def
				/CMapType 1 def
				1 begincodespacerange
				<0000><FFFF>
				endcodespacerange
				1 begincidrange
				<0000><FFFF>0
				endcidrange
				endcmap
				CMapName currentdict/CMap defineresource pop
				end
				end
				}
			if
			}
			{
			/ct_Category 2 dict begin
			/CIDFont 10 dict def
			/ProcSet	2 dict def
			currentdict
			end
			def
			/defineresource
				{
				ct_Category 1 index 2 copy known
					{
					get
					dup dup maxlength exch length eq
						{
						dup length 10 add dict copy
						ct_Category 2 index 2 index put
						}
					if
					3 index 3 index put
					pop exch pop
					}
					{pop pop/defineresource/undefined ct_raise}
				ifelse
				}bind def
			/findresource
				{
				ct_Category 1 index 2 copy known
					{
					get
					2 index 2 copy known
						{get 3 1 roll pop pop}
						{pop pop/findresource/undefinedresource ct_raise}
					ifelse
					}
					{pop pop/findresource/undefined ct_raise}
				ifelse
				}bind def
			/resourcestatus
				{
				ct_Category 1 index 2 copy known
					{
					get
					2 index known
					exch pop exch pop
						{
						0 -1 true
						}
						{
						false
						}
					ifelse
					}
					{pop pop/findresource/undefined ct_raise}
				ifelse
				}bind def
			/ct_resourcestatus/resourcestatus load def
			}
		ifelse
		/ct_CIDInit 2 dict
			begin
			/ct_cidfont_stream_init
				{
					{
					dup(Binary)eq
						{
						pop
						null
						currentfile
						ct_Level2?
							{
								{cid_BYTE_COUNT()/SubFileDecode filter}
							stopped
								{pop pop pop}
							if
							}
						if
						/readstring load
						exit
						}
					if
					dup(Hex)eq
						{
						pop
						currentfile
						ct_Level2?
							{
								{null exch/ASCIIHexDecode filter/readstring}
							stopped
								{pop exch pop(>)exch/readhexstring}
							if
							}
							{(>)exch/readhexstring}
						ifelse
						load
						exit
						}
					if
					/StartData/typecheck ct_raise
					}
				loop
				cid_BYTE_COUNT ct_CID_STR_SIZE le
					{
					2 copy cid_BYTE_COUNT string exch exec
					pop
					1 array dup
					3 -1 roll
					0 exch put
					}
					{
					cid_BYTE_COUNT ct_CID_STR_SIZE div ceiling cvi
					dup array exch 2 sub 0 exch 1 exch
						{
						2 copy
						5 index
						ct_CID_STR_SIZE
						string
						6 index exec
						pop
						put
						pop
						}
					for
					2 index
					cid_BYTE_COUNT ct_CID_STR_SIZE mod string
					3 index exec
					pop
					1 index exch
					1 index length 1 sub
					exch put
					}
				ifelse
				cid_CIDFONT exch/GlyphData exch put
				2 index null eq
					{
					pop pop pop
					}
					{
					pop/readstring load
					1 string exch
						{
						3 copy exec
						pop
						dup length 0 eq
							{
							pop pop pop pop pop
							true exit
							}
						if
						4 index
						eq
							{
							pop pop pop pop
							false exit
							}
						if
						}
					loop
					pop
					}
				ifelse
				}bind def
			/StartData
				{
				mark
					{
					currentdict
					dup/FDArray get 0 get/FontMatrix get
					0 get 0.001 eq
						{
						dup/CDevProc known not
							{
							/CDevProc 1183615869 internaldict/stdCDevProc 2 copy known
								{get}
								{
								pop pop
								{pop pop pop pop pop 0 -1000 7 index 2 div 880}
								}
							ifelse
							def
							}
						if
						}
						{
						/CDevProc
							{
							 pop pop pop pop pop
							 0
							 1 cid_temp/cid_CIDFONT get
							/FDArray get 0 get
							/FontMatrix get 0 get div
							 7 index 2 div
							 1 index 0.88 mul
							}def
						}
					ifelse
					/cid_temp 15 dict def
					cid_temp
						begin
						/cid_CIDFONT exch def
						3 copy pop
						dup/cid_BYTE_COUNT exch def 0 gt
							{
							ct_cidfont_stream_init
							FDArray
								{
								/Private get
								dup/SubrMapOffset known
									{
									begin
									/Subrs SubrCount array def
									Subrs
									SubrMapOffset
									SubrCount
									SDBytes
									ct_Level2?
										{
										currentdict dup/SubrMapOffset undef
										dup/SubrCount undef
										/SDBytes undef
										}
									if
									end
									/cid_SD_BYTES exch def
									/cid_SUBR_COUNT exch def
									/cid_SUBR_MAP_OFFSET exch def
									/cid_SUBRS exch def
									cid_SUBR_COUNT 0 gt
										{
										GlyphData cid_SUBR_MAP_OFFSET cid_SD_BYTES ct_GetInterval
										0 cid_SD_BYTES ct_cvnsi
										0 1 cid_SUBR_COUNT 1 sub
											{
											exch 1 index
											1 add
											cid_SD_BYTES mul cid_SUBR_MAP_OFFSET add
											GlyphData exch cid_SD_BYTES ct_GetInterval
											0 cid_SD_BYTES ct_cvnsi
											cid_SUBRS 4 2 roll
											GlyphData exch
											4 index
											1 index
											sub
											ct_GetInterval
											dup length string copy put
											}
										for
										pop
										}
									if
									}
									{pop}
								ifelse
								}
							forall
							}
						if
						cleartomark pop pop
						end
					CIDFontName currentdict/CIDFont defineresource pop
					end end
					}
				stopped
					{cleartomark/StartData ct_reraise}
				if
				}bind def
			currentdict
			end def
		/ct_saveCIDInit
			{
			/CIDInit/ProcSet ct_resourcestatus
				{true}
				{/CIDInitC/ProcSet ct_resourcestatus}
			ifelse
				{
				pop pop
				/CIDInit/ProcSet findresource
				ct_UseNativeCapability?
					{pop null}
					{/CIDInit ct_CIDInit/ProcSet defineresource pop}
				ifelse
				}
				{/CIDInit ct_CIDInit/ProcSet defineresource pop null}
			ifelse
			ct_Vars exch/ct_oldCIDInit exch put
			}bind def
		/ct_restoreCIDInit
			{
			ct_Vars/ct_oldCIDInit get dup null ne
				{/CIDInit exch/ProcSet defineresource pop}
				{pop}
			ifelse
			}bind def
		/ct_BuildCharSetUp
			{
			1 index
				begin
				CIDFont
					begin
					Adobe_CoolType_Utility/ct_BuildCharDict get
						begin
						/ct_dfCharCode exch def
						/ct_dfDict exch def
						CIDFirstByte ct_dfCharCode add
						dup CIDCount ge
							{pop 0}
						if
						/cid exch def
							{
							GlyphDirectory cid 2 copy known
								{get}
								{pop pop nullstring}
							ifelse
							dup length FDBytes sub 0 gt
								{
								dup
								FDBytes 0 ne
									{0 FDBytes ct_cvnsi}
									{pop 0}
								ifelse
								/fdIndex exch def
								dup length FDBytes sub FDBytes exch getinterval
								/charstring exch def
								exit
								}
								{
								pop
								cid 0 eq
									{/charstring nullstring def exit}
								if
								/cid 0 def
								}
							ifelse
							}
						loop
			}def
		/ct_SetCacheDevice
			{
			0 0 moveto
			dup stringwidth
			3 -1 roll
			true charpath
			pathbbox
			0 -1000
			7 index 2 div 880
			setcachedevice2
			0 0 moveto
			}def
		/ct_CloneSetCacheProc
			{
			1 eq
				{
				stringwidth
				pop -2 div -880
				0 -1000 setcharwidth
				moveto
				}
				{
				usewidths?
					{
					currentfont/Widths get cid
					2 copy known
						{get exch pop aload pop}
						{pop pop stringwidth}
					ifelse
					}
					{stringwidth}
				ifelse
				setcharwidth
				0 0 moveto
				}
			ifelse
			}def
		/ct_Type3ShowCharString
			{
			ct_FDDict fdIndex 2 copy known
				{get}
				{
				currentglobal 3 1 roll
				1 index gcheck setglobal
				ct_Type1FontTemplate dup maxlength dict copy
					begin
					FDArray fdIndex get
					dup/FontMatrix 2 copy known
						{get}
						{pop pop ct_defaultFontMtx}
					ifelse
					/FontMatrix exch dup length array copy def
					/Private get
					/Private exch def
					/Widths rootfont/Widths get def
					/CharStrings 1 dict dup/.notdef
						<d841272cf18f54fc13>dup length string copy put def
					currentdict
					end
				/ct_Type1Font exch definefont
				dup 5 1 roll put
				setglobal
				}
			ifelse
			dup/CharStrings get 1 index/Encoding get
			ct_dfCharCode get charstring put
			rootfont/WMode 2 copy known
				{get}
				{pop pop 0}
			ifelse
			exch
			1000 scalefont setfont
			ct_str1 0 ct_dfCharCode put
			ct_str1 exch ct_dfSetCacheProc
			ct_SyntheticBold
				{
				currentpoint
				ct_str1 show
				newpath
				moveto
				ct_str1 true charpath
				ct_StrokeWidth setlinewidth
				stroke
				}
				{ct_str1 show}
			ifelse
			}def
		/ct_Type4ShowCharString
			{
			ct_dfDict ct_dfCharCode charstring
			FDArray fdIndex get
			dup/FontMatrix get dup ct_defaultFontMtx ct_matrixeq not
				{ct_1000Mtx matrix concatmatrix concat}
				{pop}
			ifelse
			/Private get
			Adobe_CoolType_Utility/ct_Level2? get not
				{
				ct_dfDict/Private
				3 -1 roll
					{put}
				1183615869 internaldict/superexec get exec
				}
			if
			1183615869 internaldict
			Adobe_CoolType_Utility/ct_Level2? get
				{1 index}
				{3 index/Private get mark 6 1 roll}
			ifelse
			dup/RunInt known
				{/RunInt get}
				{pop/CCRun}
			ifelse
			get exec
			Adobe_CoolType_Utility/ct_Level2? get not
				{cleartomark}
			if
			}bind def
		/ct_BuildCharIncremental
			{
				{
				Adobe_CoolType_Utility/ct_MakeOCF get begin
				ct_BuildCharSetUp
				ct_ShowCharString
				}
			stopped
				{stop}
			if
			end
			end
			end
			end
			}bind def
		/BaseFontNameStr(BF00)def
		/ct_Type1FontTemplate 14 dict
			begin
			/FontType 1 def
			/FontMatrix [0.001 0 0 0.001 0 0]def
			/FontBBox [-250 -250 1250 1250]def
			/Encoding ct_cHexEncoding def
			/PaintType 0 def
			currentdict
			end def
		/BaseFontTemplate 11 dict
			begin
			/FontMatrix [0.001 0 0 0.001 0 0]def
			/FontBBox [-250 -250 1250 1250]def
			/Encoding ct_cHexEncoding def
			/BuildChar/ct_BuildCharIncremental load def
			ct_Clone?
				{
				/FontType 3 def
				/ct_ShowCharString/ct_Type3ShowCharString load def
				/ct_dfSetCacheProc/ct_CloneSetCacheProc load def
				/ct_SyntheticBold false def
				/ct_StrokeWidth 1 def
				}
				{
				/FontType 4 def
				/Private 1 dict dup/lenIV 4 put def
				/CharStrings 1 dict dup/.notdef<d841272cf18f54fc13>put def
				/PaintType 0 def
				/ct_ShowCharString/ct_Type4ShowCharString load def
				}
			ifelse
			/ct_str1 1 string def
			currentdict
			end def
		/BaseFontDictSize BaseFontTemplate length 5 add def
		/ct_matrixeq
			{
			true 0 1 5
				{
				dup 4 index exch get exch 3 index exch get eq and
				dup not
					{exit}
				if
				}
			for
			exch pop exch pop
			}bind def
		/ct_makeocf
			{
			15 dict
				begin
				exch/WMode exch def
				exch/FontName exch def
				/FontType 0 def
				/FMapType 2 def
			dup/FontMatrix known
				{dup/FontMatrix get/FontMatrix exch def}
				{/FontMatrix matrix def}
			ifelse
				/bfCount 1 index/CIDCount get 256 idiv 1 add
					dup 256 gt{pop 256}if def
				/Encoding
					256 array 0 1 bfCount 1 sub{2 copy dup put pop}for
					bfCount 1 255{2 copy bfCount put pop}for
					def
				/FDepVector bfCount dup 256 lt{1 add}if array def
				BaseFontTemplate BaseFontDictSize dict copy
					begin
					/CIDFont exch def
					CIDFont/FontBBox known
						{CIDFont/FontBBox get/FontBBox exch def}
					if
					CIDFont/CDevProc known
						{CIDFont/CDevProc get/CDevProc exch def}
					if
					currentdict
					end
				BaseFontNameStr 3(0)putinterval
				0 1 bfCount dup 256 eq{1 sub}if
					{
					FDepVector exch
					2 index BaseFontDictSize dict copy
						begin
						dup/CIDFirstByte exch 256 mul def
						FontType 3 eq
							{/ct_FDDict 2 dict def}
						if
						currentdict
						end
					1 index 16
					BaseFontNameStr 2 2 getinterval cvrs pop
					BaseFontNameStr exch definefont
					put
					}
				for
				ct_Clone?
					{/Widths 1 index/CIDFont get/GlyphDirectory get length dict def}
				if
				FontName
				currentdict
				end
			definefont
			ct_Clone?
				{
				gsave
				dup 1000 scalefont setfont
				ct_BuildCharDict
					begin
					/usewidths? false def
					currentfont/Widths get
						begin
						exch/CIDFont get/GlyphDirectory get
							{
							pop
							dup charcode exch 1 index 0 2 index 256 idiv put
							1 index exch 1 exch 256 mod put
							stringwidth 2 array astore def
							}
						forall
						end
					/usewidths? true def
					end
				grestore
				}
				{exch pop}
			ifelse
			}bind def
		currentglobal true setglobal
		/ct_ComposeFont
			{
			ct_UseNativeCapability?
				{				
				2 index/CMap ct_resourcestatus
					{pop pop exch pop}
					{
					/CIDInit/ProcSet findresource
						begin
						12 dict
							begin
							begincmap
							/CMapName 3 index def
							/CMapVersion 1.000 def
							/CMapType 1 def
							exch/WMode exch def
							/CIDSystemInfo 3 dict dup
								begin
								/Registry(Adobe)def
								/Ordering
								CMapName ct_mkocfStr100 cvs
								(Adobe-)search
									{
									pop pop
									(-)search
										{
										dup length string copy
										exch pop exch pop
										}
										{pop(Identity)}
									ifelse
									}
									{pop (Identity)}
								ifelse
								def
								/Supplement 0 def
								end def
							1 begincodespacerange
							<0000><FFFF>
							endcodespacerange
							1 begincidrange
							<0000><FFFF>0
							endcidrange
							endcmap
							CMapName currentdict/CMap defineresource pop
							end
						end
					}
				ifelse
				composefont
				}
				{
				3 2 roll pop
				0 get/CIDFont findresource
				ct_makeocf
				}
			ifelse
			}bind def
			setglobal
		/ct_MakeIdentity
			{
			ct_UseNativeCapability?
				{
				1 index/CMap ct_resourcestatus
					{pop pop}
					{
					/CIDInit/ProcSet findresource begin
					12 dict begin
					begincmap
					/CMapName 2 index def
					/CMapVersion 1.000 def
					/CMapType 1 def
					/CIDSystemInfo 3 dict dup
						begin
						/Registry(Adobe)def
						/Ordering
						CMapName ct_mkocfStr100 cvs
						(Adobe-)search
							{
							pop pop
							(-)search
								{dup length string copy exch pop exch pop}
								{pop(Identity)}
							ifelse
							}
							{pop(Identity)}
						ifelse
						def
						/Supplement 0 def
						end def
					1 begincodespacerange
					<0000><FFFF>
					endcodespacerange
					1 begincidrange
					<0000><FFFF>0
					endcidrange
					endcmap
					CMapName currentdict/CMap defineresource pop
					end
					end
					}
				ifelse
				composefont
				}
				{
				exch pop
				0 get/CIDFont findresource
				ct_makeocf
				}
			ifelse
			}bind def
		currentdict readonly pop
		end
	end
%%EndResource
setglobal
%%BeginResource: procset Adobe_CoolType_Utility_T42 1.0 0
%%Copyright: Copyright 1987-2004 Adobe Systems Incorporated.
%%Version: 1.0 0
userdict/ct_T42Dict 15 dict put
ct_T42Dict begin
/Is2015?
{
 version
 cvi
 2015
 ge
}bind def
/AllocGlyphStorage
{
 Is2015?
 {	
	pop
 }
 {
	{string}forall
 }ifelse
}bind def
/Type42DictBegin
{
25 dict begin
 /FontName exch def
 /CharStrings 256 dict 
begin
	 /.notdef 0 def
	 currentdict 
end def
 /Encoding exch def
 /PaintType 0 def
 /FontType 42 def
 /FontMatrix[1 0 0 1 0 0]def
 4 array astore cvx/FontBBox exch def
 /sfnts
}bind def
/Type42DictEnd 
{
 currentdict dup/FontName get exch definefont end
ct_T42Dict exch
dup/FontName get exch put
}bind def
/RD{string currentfile exch readstring pop}executeonly def
/PrepFor2015
{
Is2015?
{		 
	/GlyphDirectory 
	 16
	 dict def
	 sfnts 0 get
	 dup
	 2 index
	(glyx)
	 putinterval
	 2 index 
	(locx)
	 putinterval
	 pop
	 pop
}
{
	 pop
	 pop
}ifelse			
}bind def
/AddT42Char
{
Is2015?
{
	/GlyphDirectory get 
	begin
	def
	end
	pop
	pop
}
{
	/sfnts get
	4 index
	get
	3 index
 2 index
	putinterval
	pop
	pop
	pop
	pop
}ifelse
}bind def
/T0AddT42Mtx2
{
/CIDFont findresource/Metrics2 get begin def end
}bind def
end
%%EndResource
currentglobal true setglobal
%%BeginFile: MMFauxFont.prc
%%Copyright: Copyright 1987-2001 Adobe Systems Incorporated. 
%%All Rights Reserved.
userdict /ct_EuroDict 10 dict put
ct_EuroDict begin
/ct_CopyFont 
{
    { 1 index /FID ne {def} {pop pop} ifelse} forall
} def
/ct_GetGlyphOutline
{
   gsave
   initmatrix newpath
   exch findfont dup 
   length 1 add dict 
   begin 
		ct_CopyFont 
		/Encoding Encoding dup length array copy 
		dup
		4 -1 roll
		0 exch put   
		def
		currentdict
   end
   /ct_EuroFont exch definefont
   1000 scalefont setfont
   0 0 moveto
   [
       <00> stringwidth 
       <00> false charpath
       pathbbox
       [
       {/m cvx} {/l cvx} {/c cvx} {/cp cvx} pathforall
   grestore
   counttomark 8 add
}
def
/ct_MakeGlyphProc
{
   ] cvx
   /ct_PSBuildGlyph cvx
   ] cvx
} def
/ct_PSBuildGlyph 
{ 
 	gsave 
	8 -1 roll pop 
	7 1 roll 
        6 -2 roll ct_FontMatrix transform 6 2 roll
        4 -2 roll ct_FontMatrix transform 4 2 roll
        ct_FontMatrix transform 
	currentdict /PaintType 2 copy known {get 2 eq}{pop pop false} ifelse  
	dup  9 1 roll 
	{  
		currentdict /StrokeWidth 2 copy known  
		{   
			get 2 div   
			0 ct_FontMatrix dtransform pop
			5 1 roll  
			4 -1 roll 4 index sub   
			4 1 roll   
			3 -1 roll 4 index sub  
			3 1 roll   
			exch 4 index add exch  
			4 index add  
			5 -1 roll pop  
		}  
		{	 
			pop pop 
		}  
		ifelse  
	}       
    if  
	setcachedevice  
        ct_FontMatrix concat
        ct_PSPathOps begin 
		exec 
	end 
	{  
		currentdict /StrokeWidth 2 copy known  
			{ get }  
			{ pop pop 0 }  
  	    ifelse  
		setlinewidth stroke  
	}  
	{   
	    fill  
	}  
	ifelse  
    grestore
} def 
/ct_PSPathOps 4 dict dup begin 
	/m {moveto} def 
	/l {lineto} def 
	/c {curveto} def 
	/cp {closepath} def 
end 
def 
/ct_matrix1000 [1000 0 0 1000 0 0] def
/ct_AddGlyphProc  
{
   2 index findfont dup length 4 add dict 
   begin 
	ct_CopyFont 
	/CharStrings CharStrings dup length 1 add dict copy
      begin
         3 1 roll def  
         currentdict 
      end 
      def
      /ct_FontMatrix ct_matrix1000 FontMatrix matrix concatmatrix def
      /ct_PSBuildGlyph /ct_PSBuildGlyph load def
      /ct_PSPathOps /ct_PSPathOps load def
      currentdict
   end
   definefont pop
}
def
systemdict /languagelevel known
{
	/ct_AddGlyphToPrinterFont {
		2 copy
		ct_GetGlyphOutline 3 add -1 roll restore 
		ct_MakeGlyphProc 
		ct_AddGlyphProc
	} def
}
{
	/ct_AddGlyphToPrinterFont {
	    pop pop restore
		Adobe_CTFauxDict /$$$FONTNAME get
		/Euro
		Adobe_CTFauxDict /$$$SUBSTITUTEBASE get
		ct_EuroDict exch get
		ct_AddGlyphProc
	} def
} ifelse
/AdobeSansMM 
{ 
556 0 24 -19 541 703 
	{ 
	541 628 m 
	510 669 442 703 354 703 c 
	201 703 117 607 101 444 c 
	50 444 l 
	25 372 l 
	97 372 l 
	97 301 l 
	49 301 l 
	24 229 l 
	103 229 l 
	124 67 209 -19 350 -19 c 
	435 -19 501 25 509 32 c 
	509 131 l 
	492 105 417 60 343 60 c 
	267 60 204 127 197 229 c 
	406 229 l 
	430 301 l 
	191 301 l 
	191 372 l 
	455 372 l 
	479 444 l 
	194 444 l 
	201 531 245 624 348 624 c 
	433 624 484 583 509 534 c 
	cp 
	556 0 m 
	}
ct_PSBuildGlyph
} def
/AdobeSerifMM 
{ 
500 0 10 -12 484 692 
	{ 
	347 298 m 
	171 298 l 
	170 310 170 322 170 335 c 
	170 362 l 
	362 362 l 
	374 403 l 
	172 403 l 
	184 580 244 642 308 642 c 
	380 642 434 574 457 457 c 
	481 462 l 
	474 691 l 
	449 691 l 
	433 670 429 657 410 657 c 
	394 657 360 692 299 692 c 
	204 692 94 604 73 403 c 
	22 403 l 
	10 362 l 
	70 362 l 
	69 352 69 341 69 330 c 
	69 319 69 308 70 298 c 
	22 298 l 
	10 257 l 
	73 257 l 
	97 57 216 -12 295 -12 c 
	364 -12 427 25 484 123 c 
	458 142 l 
	425 101 384 37 316 37 c 
	256 37 189 84 173 257 c 
	335 257 l 
	cp 
	500 0 m 
	} 
ct_PSBuildGlyph 
} def 
end		
%%EndFile
setglobal
Adobe_CoolType_Core begin /$Oblique SetSubstituteStrategy end
%%BeginResource: procset Adobe_AGM_Image 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+	currentpacking
+	true setpacking
+}if
+userdict/Adobe_AGM_Image 71 dict dup begin put
+/Adobe_AGM_Image_Id/Adobe_AGM_Image_1.0_0 def
+/nd{
+	null def
+}bind def
+/AGMIMG_&image nd
+/AGMIMG_&colorimage nd
+/AGMIMG_&imagemask nd
+/AGMIMG_mbuf()def
+/AGMIMG_ybuf()def
+/AGMIMG_kbuf()def
+/AGMIMG_c 0 def
+/AGMIMG_m 0 def
+/AGMIMG_y 0 def
+/AGMIMG_k 0 def
+/AGMIMG_tmp nd
+/AGMIMG_imagestring0 nd
+/AGMIMG_imagestring1 nd
+/AGMIMG_imagestring2 nd
+/AGMIMG_imagestring3 nd
+/AGMIMG_imagestring4 nd
+/AGMIMG_imagestring5 nd
+/AGMIMG_cnt nd
+/AGMIMG_fsave nd
+/AGMIMG_colorAry nd
+/AGMIMG_override nd
+/AGMIMG_name nd
+/AGMIMG_maskSource nd
+/AGMIMG_flushfilters nd
+/invert_image_samples nd
+/knockout_image_samples	nd
+/img nd
+/sepimg nd
+/devnimg nd
+/idximg nd
+/ds
+{
+	Adobe_AGM_Core begin
+	Adobe_AGM_Image begin
+	/AGMIMG_&image systemdict/image get def
+	/AGMIMG_&imagemask systemdict/imagemask get def
+	/colorimage where{
+		pop
+		/AGMIMG_&colorimage/colorimage ldf
+	}if
+	end
+	end
+}def
+/ps
+{
+	Adobe_AGM_Image begin
+	/AGMIMG_ccimage_exists{/customcolorimage where 
+		{
+			pop
+			/Adobe_AGM_OnHost_Seps where
+			{
+			pop false
+			}{
+			/Adobe_AGM_InRip_Seps where
+				{
+				pop false
+				}{
+					true
+				}ifelse
+			}ifelse
+			}{
+			false
+		}ifelse 
+	}bdf
+	level2{
+		/invert_image_samples
+		{
+			Adobe_AGM_Image/AGMIMG_tmp Decode length ddf
+			/Decode[Decode 1 get Decode 0 get]def
+		}def
+		/knockout_image_samples
+		{
+			Operator/imagemask ne{
+				/Decode[1 1]def
+			}if
+		}def
+	}{	
+		/invert_image_samples
+		{
+			{1 exch sub}currenttransfer addprocs settransfer
+		}def
+		/knockout_image_samples
+		{
+			{pop 1}currenttransfer addprocs settransfer
+		}def
+	}ifelse
+	/img/imageormask ldf
+	/sepimg/sep_imageormask ldf
+	/devnimg/devn_imageormask ldf
+	/idximg/indexed_imageormask ldf
+	/_ctype 7 def
+	currentdict{
+		dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+			bind
+		}if
+		def
+	}forall
+}def
+/pt
+{
+	end
+}def
+/dt
+{
+}def
+/AGMIMG_flushfilters
+{
+	dup type/arraytype ne
+		{1 array astore}if
+	dup 0 get currentfile ne
+		{dup 0 get flushfile}if
+		{
+		dup type/filetype eq
+			{
+			dup status 1 index currentfile ne and
+				{closefile}
+				{pop}
+			ifelse
+			}{pop}ifelse
+		}forall
+}def
+/AGMIMG_init_common
+{
+	currentdict/T known{/ImageType/T ldf currentdict/T undef}if
+	currentdict/W known{/Width/W ldf currentdict/W undef}if
+	currentdict/H known{/Height/H ldf currentdict/H undef}if
+	currentdict/M known{/ImageMatrix/M ldf currentdict/M undef}if
+	currentdict/BC known{/BitsPerComponent/BC ldf currentdict/BC undef}if
+	currentdict/D known{/Decode/D ldf currentdict/D undef}if
+	currentdict/DS known{/DataSource/DS ldf currentdict/DS undef}if
+	currentdict/O known{
+		/Operator/O load 1 eq{
+			/imagemask
+		}{
+			/O load 2 eq{
+				/image 
+			}{
+				/colorimage
+			}ifelse
+		}ifelse
+		def
+		currentdict/O undef
+	}if
+	currentdict/HSCI known{/HostSepColorImage/HSCI ldf currentdict/HSCI undef}if
+	currentdict/MD known{/MultipleDataSources/MD ldf currentdict/MD undef}if
+	currentdict/I known{/Interpolate/I ldf currentdict/I undef}if
+	currentdict/SI known{/SkipImageProc/SI ldf currentdict/SI undef}if
+	/DataSource load xcheck not{
+		DataSource type/arraytype eq{
+			DataSource 0 get type/filetype eq{
+				/_Filters DataSource def
+				currentdict/MultipleDataSources known not{
+					/DataSource DataSource dup length 1 sub get def 
+				}if
+			}if
+		}if
+		currentdict/MultipleDataSources known not{
+			/MultipleDataSources DataSource type/arraytype eq{
+				DataSource length 1 gt
+			}
+			{false}ifelse def
+		}if
+	}if
+	/NComponents Decode length 2 div def
+	currentdict/SkipImageProc known not{/SkipImageProc{false}def}if
+}bdf
+/imageormask_sys
+{
+	begin
+		AGMIMG_init_common
+		save mark
+		level2{
+			currentdict
+			Operator/imagemask eq{
+				AGMIMG_&imagemask
+			}{
+				use_mask{
+					process_mask AGMIMG_&image
+				}{
+					AGMIMG_&image
+				}ifelse
+			}ifelse
+		}{
+			Width Height
+			Operator/imagemask eq{
+				Decode 0 get 1 eq Decode 1 get 0 eq	and
+				ImageMatrix/DataSource load
+				AGMIMG_&imagemask
+			}{
+				BitsPerComponent ImageMatrix/DataSource load
+				AGMIMG_&image
+			}ifelse
+		}ifelse
+		currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+		cleartomark restore
+	end
+}def
+/overprint_plate
+{
+	currentoverprint{
+		0 get dup type/nametype eq{
+			dup/DeviceGray eq{
+				pop AGMCORE_black_plate not
+			}{
+				/DeviceCMYK eq{
+					AGMCORE_is_cmyk_sep not
+				}if
+			}ifelse
+		}{
+			false exch
+			{
+				 AGMOHS_sepink eq or
+			}forall
+			not
+		}ifelse
+	}{
+		pop false
+	}ifelse
+}def
+/process_mask
+{
+	level3{
+		dup begin
+		/ImageType 1 def
+		end
+		4 dict begin
+			/DataDict exch def
+			/ImageType 3 def
+			/InterleaveType 3 def
+			/MaskDict 9 dict begin
+				/ImageType 1 def
+				/Width DataDict dup/MaskWidth known{/MaskWidth}{/Width}ifelse get def
+				/Height DataDict dup/MaskHeight known{/MaskHeight}{/Height}ifelse get def
+				/ImageMatrix[Width 0 0 Height neg 0 Height]def
+				/NComponents 1 def
+				/BitsPerComponent 1 def
+				/Decode DataDict dup/MaskD known{/MaskD}{[1 0]}ifelse get def
+				/DataSource Adobe_AGM_Core/AGMIMG_maskSource get def
+			currentdict end def
+		currentdict end
+	}if
+}def
+/use_mask
+{
+	dup/Mask known	{dup/Mask get}{false}ifelse
+}def
+/imageormask
+{
+	begin
+		AGMIMG_init_common
+		SkipImageProc{
+			currentdict consumeimagedata
+		}
+		{
+			save mark
+			level2 AGMCORE_host_sep not and{
+				currentdict
+				Operator/imagemask eq DeviceN_PS2 not and{
+					imagemask
+				}{
+					AGMCORE_in_rip_sep currentoverprint and currentcolorspace 0 get/DeviceGray eq and{
+						[/Separation/Black/DeviceGray{}]setcolorspace
+						/Decode[Decode 1 get Decode 0 get]def
+					}if
+					use_mask{
+						process_mask image
+					}{
+						DeviceN_NoneName DeviceN_PS2 Indexed_DeviceN level3 not and or or AGMCORE_in_rip_sep and 
+						{
+							Names convert_to_process not{
+								2 dict begin
+								/imageDict xdf
+								/names_index 0 def
+								gsave
+								imageDict write_image_file{
+									Names{
+										dup(None)ne{
+											[/Separation 3 -1 roll/DeviceGray{1 exch sub}]setcolorspace
+											Operator imageDict read_image_file
+											names_index 0 eq{true setoverprint}if
+											/names_index names_index 1 add def
+										}{
+											pop
+										}ifelse
+									}forall
+									close_image_file
+								}if
+								grestore
+								end
+							}{
+								Operator/imagemask eq{
+									imagemask
+								}{
+									image
+								}ifelse
+							}ifelse
+						}{
+							Operator/imagemask eq{
+								imagemask
+							}{
+								image
+							}ifelse
+						}ifelse
+					}ifelse
+				}ifelse
+			}{
+				Width Height
+				Operator/imagemask eq{
+					Decode 0 get 1 eq Decode 1 get 0 eq	and
+					ImageMatrix/DataSource load
+					/Adobe_AGM_OnHost_Seps where{
+						pop imagemask
+					}{
+						currentgray 1 ne{
+							currentdict imageormask_sys
+						}{
+							currentoverprint not{
+								1 AGMCORE_&setgray
+								currentdict imageormask_sys
+							}{
+								currentdict ignoreimagedata
+							}ifelse				 		
+						}ifelse
+					}ifelse
+				}{
+					BitsPerComponent ImageMatrix 
+					MultipleDataSources{
+						0 1 NComponents 1 sub{
+							DataSource exch get
+						}for
+					}{
+						/DataSource load
+					}ifelse
+					Operator/colorimage eq{
+						AGMCORE_host_sep{
+							MultipleDataSources level2 or NComponents 4 eq and{
+								AGMCORE_is_cmyk_sep{
+									MultipleDataSources{
+										/DataSource DataSource 0 get xcheck
+											{
+											[
+											DataSource 0 get/exec cvx
+											DataSource 1 get/exec cvx
+											DataSource 2 get/exec cvx
+											DataSource 3 get/exec cvx
+											/AGMCORE_get_ink_data cvx
+											]cvx
+											}{
+											DataSource aload pop AGMCORE_get_ink_data
+											}ifelse def
+									}{
+										/DataSource 
+										Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul 
+										/DataSource load
+										filter_cmyk 0()/SubFileDecode filter def
+									}ifelse
+									/Decode[Decode 0 get Decode 1 get]def
+									/MultipleDataSources false def
+									/NComponents 1 def
+									/Operator/image def
+									invert_image_samples
+						 			1 AGMCORE_&setgray
+									currentdict imageormask_sys
+								}{
+									currentoverprint not Operator/imagemask eq and{
+ 			 							1 AGMCORE_&setgray
+ 			 							currentdict imageormask_sys
+ 			 						}{
+ 			 							currentdict ignoreimagedata
+ 			 						}ifelse
+								}ifelse
+							}{	
+								MultipleDataSources NComponents AGMIMG_&colorimage						
+							}ifelse
+						}{
+							true NComponents colorimage
+						}ifelse
+					}{
+						Operator/image eq{
+							AGMCORE_host_sep{
+								/DoImage true def
+								currentdict/HostSepColorImage known{HostSepColorImage not}{false}ifelse
+								{
+									AGMCORE_black_plate not Operator/imagemask ne and{
+										/DoImage false def
+										currentdict ignoreimagedata
+					 				}if
+								}if
+						 		1 AGMCORE_&setgray
+								DoImage
+									{currentdict imageormask_sys}if
+							}{
+								use_mask{
+									process_mask image
+								}{
+									image
+								}ifelse
+							}ifelse
+						}{
+							Operator/knockout eq{
+								pop pop pop pop pop
+								currentcolorspace overprint_plate not{
+									knockout_unitsq
+								}if
+							}if
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+			cleartomark restore
+		}ifelse
+		currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+}def
+/sep_imageormask
+{
+ 	/sep_colorspace_dict AGMCORE_gget begin
+	CSA map_csa
+	begin
+	AGMIMG_init_common
+	SkipImageProc{
+		currentdict consumeimagedata
+	}{
+		save mark 
+		AGMCORE_avoid_L2_sep_space{
+			/Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+		}if
+ 		AGMIMG_ccimage_exists 
+		MappedCSA 0 get/DeviceCMYK eq and
+		currentdict/Components known and 
+		Name()ne and 
+		Name(All)ne and 
+		Operator/image eq and
+		AGMCORE_producing_seps not and
+		level2 not and
+		{
+			Width Height BitsPerComponent ImageMatrix 
+			[
+			/DataSource load/exec cvx
+			{
+				0 1 2 index length 1 sub{
+					1 index exch
+					2 copy get 255 xor put
+				}for
+			}/exec cvx
+			]cvx bind
+			MappedCSA 0 get/DeviceCMYK eq{
+				Components aload pop
+			}{
+				0 0 0 Components aload pop 1 exch sub
+			}ifelse
+			Name findcmykcustomcolor
+			customcolorimage
+		}{
+			AGMCORE_producing_seps not{
+				level2{
+ 					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne AGMCORE_avoid_L2_sep_space not and currentcolorspace 0 get/Separation ne and{
+						[/Separation Name MappedCSA sep_proc_name exch dup 0 get 15 string cvs(/Device)anchorsearch{pop pop 0 get}{pop}ifelse exch load]setcolorspace_opt
+						/sep_tint AGMCORE_gget setcolor
+					}if
+					currentdict imageormask
+				}{
+					currentdict
+					Operator/imagemask eq{
+						imageormask
+					}{
+						sep_imageormask_lev1
+					}ifelse
+				}ifelse
+ 			}{
+				AGMCORE_host_sep{
+					Operator/knockout eq{
+						currentdict/ImageMatrix get concat
+						knockout_unitsq
+					}{
+						currentgray 1 ne{
+ 							AGMCORE_is_cmyk_sep Name(All)ne and{
+ 								level2{
+ 									Name AGMCORE_IsSeparationAProcessColor 
+ 									{
+ 										Operator/imagemask eq{
+ 											//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+ 												/sep_tint AGMCORE_gget 1 exch sub AGMCORE_&setcolor
+ 											}if
+ 										}{
+											invert_image_samples
+ 										}ifelse
+	 								}{
+	 									//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+	 										[/Separation Name[/DeviceGray]
+	 										{
+	 											sep_colorspace_proc AGMCORE_get_ink_data
+												1 exch sub
+	 										}bind
+											]AGMCORE_&setcolorspace
+											/sep_tint AGMCORE_gget AGMCORE_&setcolor
+										}if
+ 									}ifelse
+ 									currentdict imageormask_sys
+	 							}{
+	 								currentdict
+									Operator/imagemask eq{
+										imageormask_sys
+									}{
+										sep_image_lev1_sep
+									}ifelse
+	 							}ifelse
+ 							}{
+ 								Operator/imagemask ne{
+									invert_image_samples
+ 								}if
+		 						currentdict imageormask_sys
+ 							}ifelse
+ 						}{
+ 							currentoverprint not Name(All)eq or Operator/imagemask eq and{
+								currentdict imageormask_sys 
+								}{
+								currentoverprint not
+									{
+ 									gsave 
+ 									knockout_unitsq
+ 									grestore
+									}if
+								currentdict consumeimagedata 
+		 					}ifelse
+ 						}ifelse
+		 			}ifelse
+ 				}{
+					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+						currentcolorspace 0 get/Separation ne{
+							[/Separation Name MappedCSA sep_proc_name exch 0 get exch load]setcolorspace_opt
+							/sep_tint AGMCORE_gget setcolor
+						}if
+					}if
+					currentoverprint 
+					MappedCSA 0 get/DeviceCMYK eq and 
+					Name AGMCORE_IsSeparationAProcessColor not and
+					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{Name inRip_spot_has_ink not and}{false}ifelse 
+					Name(All)ne and{
+						imageormask_l2_overprint
+					}{
+						currentdict imageormask
+ 					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+		cleartomark restore
+	}ifelse
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+	end
+}def
+/colorSpaceElemCnt
+{
+	mark currentcolor counttomark dup 2 add 1 roll cleartomark
+}bdf
+/devn_sep_datasource
+{
+	1 dict begin
+	/dataSource xdf
+	[
+		0 1 dataSource length 1 sub{
+			dup currentdict/dataSource get/exch cvx/get cvx/exec cvx
+			/exch cvx names_index/ne cvx[/pop cvx]cvx/if cvx
+		}for
+	]cvx bind
+	end
+}bdf		
+/devn_alt_datasource
+{
+	11 dict begin
+	/convProc xdf
+	/origcolorSpaceElemCnt xdf
+	/origMultipleDataSources xdf
+	/origBitsPerComponent xdf
+	/origDecode xdf
+	/origDataSource xdf
+	/dsCnt origMultipleDataSources{origDataSource length}{1}ifelse def
+	/DataSource origMultipleDataSources
+		{
+			[
+			BitsPerComponent 8 idiv origDecode length 2 idiv mul string
+			0 1 origDecode length 2 idiv 1 sub
+				{
+				dup 7 mul 1 add index exch dup BitsPerComponent 8 idiv mul exch
+				origDataSource exch get 0()/SubFileDecode filter
+				BitsPerComponent 8 idiv string/readstring cvx/pop cvx/putinterval cvx
+				}for 
+			]bind cvx
+		}{origDataSource}ifelse 0()/SubFileDecode filter def		
+	[
+		origcolorSpaceElemCnt string
+		0 2 origDecode length 2 sub
+			{
+			dup origDecode exch get dup 3 -1 roll 1 add origDecode exch get exch sub 2 BitsPerComponent exp 1 sub div
+			1 BitsPerComponent 8 idiv{DataSource/read cvx/not cvx{0}/if cvx/mul cvx}repeat/mul cvx/add cvx
+			}for
+		/convProc load/exec cvx
+		origcolorSpaceElemCnt 1 sub -1 0
+			{
+			/dup cvx 2/add cvx/index cvx
+			3 1/roll cvx/exch cvx 255/mul cvx/cvi cvx/put cvx
+			}for
+	]bind cvx 0()/SubFileDecode filter
+	end
+}bdf
+/devn_imageormask
+{
+ 	/devicen_colorspace_dict AGMCORE_gget begin
+	CSA map_csa
+	2 dict begin
+	dup
+	/srcDataStrs[3 -1 roll begin
+		AGMIMG_init_common
+		currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+		{
+			Width Decode length 2 div mul cvi
+			{
+				dup 65535 gt{1 add 2 div cvi}{exit}ifelse
+			}loop
+			string
+		}repeat
+		end]def
+	/dstDataStr srcDataStrs 0 get length string def
+	begin
+	AGMIMG_init_common
+	SkipImageProc{
+		currentdict consumeimagedata
+	}{
+		save mark 
+		AGMCORE_producing_seps not{
+			level3 not{
+				Operator/imagemask ne{
+					/DataSource[[
+						DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+						colorSpaceElemCnt/devicen_colorspace_dict AGMCORE_gget/TintTransform get 
+						devn_alt_datasource 1/string cvx/readstring cvx/pop cvx]cvx colorSpaceElemCnt 1 sub{dup}repeat]def				
+					/MultipleDataSources true def
+					/Decode colorSpaceElemCnt[exch{0 1}repeat]def
+				}if
+			}if
+			currentdict imageormask
+ 		}{
+			AGMCORE_host_sep{
+				Names convert_to_process{
+					CSA get_csa_by_name 0 get/DeviceCMYK eq{
+						/DataSource
+							Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul 
+							DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+							4/devicen_colorspace_dict AGMCORE_gget/TintTransform get 
+							devn_alt_datasource
+						filter_cmyk 0()/SubFileDecode filter def
+						/MultipleDataSources false def
+						/Decode[1 0]def
+						/DeviceGray setcolorspace
+			 			currentdict imageormask_sys
+ 					}{
+						AGMCORE_report_unsupported_color_space
+						AGMCORE_black_plate{
+							/DataSource
+								DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+								CSA get_csa_by_name 0 get/DeviceRGB eq{3}{1}ifelse/devicen_colorspace_dict AGMCORE_gget/TintTransform get
+								devn_alt_datasource
+							/MultipleDataSources false def
+							/Decode colorSpaceElemCnt[exch{0 1}repeat]def
+				 			currentdict imageormask_sys
+				 		}{
+	 						gsave 
+	 						knockout_unitsq
+	 						grestore
+							currentdict consumeimagedata 
+						}ifelse
+ 					}ifelse
+				}
+				{	
+					/devicen_colorspace_dict AGMCORE_gget/names_index known{
+	 					Operator/imagemask ne{
+	 						MultipleDataSources{
+		 						/DataSource[DataSource devn_sep_datasource/exec cvx]cvx def
+								/MultipleDataSources false def
+	 						}{
+								/DataSource/DataSource load dstDataStr srcDataStrs 0 get filter_devn def
+	 						}ifelse
+							invert_image_samples
+	 					}if
+			 			currentdict imageormask_sys
+	 				}{
+	 					currentoverprint not Operator/imagemask eq and{
+							currentdict imageormask_sys 
+							}{
+							currentoverprint not
+								{
+	 							gsave 
+	 							knockout_unitsq
+	 							grestore
+								}if
+							currentdict consumeimagedata 
+			 			}ifelse
+	 				}ifelse
+	 			}ifelse
+ 			}{
+				currentdict imageormask
+			}ifelse
+		}ifelse
+		cleartomark restore
+	}ifelse
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+	end
+	end
+}def
+/imageormask_l2_overprint
+{
+	currentdict
+	currentcmykcolor add add add 0 eq{
+		currentdict consumeimagedata
+	}{
+		level3{			
+			currentcmykcolor 
+			/AGMIMG_k xdf 
+			/AGMIMG_y xdf 
+			/AGMIMG_m xdf 
+			/AGMIMG_c xdf
+			Operator/imagemask eq{
+				[/DeviceN[
+				AGMIMG_c 0 ne{/Cyan}if
+				AGMIMG_m 0 ne{/Magenta}if
+				AGMIMG_y 0 ne{/Yellow}if
+				AGMIMG_k 0 ne{/Black}if
+				]/DeviceCMYK{}]setcolorspace
+				AGMIMG_c 0 ne{AGMIMG_c}if
+				AGMIMG_m 0 ne{AGMIMG_m}if
+				AGMIMG_y 0 ne{AGMIMG_y}if
+				AGMIMG_k 0 ne{AGMIMG_k}if
+				setcolor			
+			}{	
+				/Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+				[/Indexed 				
+					[
+						/DeviceN[
+							AGMIMG_c 0 ne{/Cyan}if
+							AGMIMG_m 0 ne{/Magenta}if
+							AGMIMG_y 0 ne{/Yellow}if
+							AGMIMG_k 0 ne{/Black}if
+						]
+						/DeviceCMYK{
+							AGMIMG_k 0 eq{0}if
+							AGMIMG_y 0 eq{0 exch}if
+							AGMIMG_m 0 eq{0 3 1 roll}if
+							AGMIMG_c 0 eq{0 4 1 roll}if						
+						}
+					]
+					255
+					{
+						255 div 
+						mark exch
+						dup	dup dup
+						AGMIMG_k 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 1 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_y 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 2 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_m 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 3 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_c 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						counttomark 1 add -1 roll pop
+					}
+				]setcolorspace
+			}ifelse
+			imageormask_sys
+		}{
+	write_image_file{
+		currentcmykcolor
+		0 ne{
+			[/Separation/Black/DeviceGray{}]setcolorspace
+			gsave
+			/Black
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 1 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Yellow/DeviceGray{}]setcolorspace
+			gsave
+			/Yellow
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 2 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Magenta/DeviceGray{}]setcolorspace
+			gsave
+			/Magenta
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 3 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Cyan/DeviceGray{}]setcolorspace
+			gsave
+			/Cyan 
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+				close_image_file
+			}{
+				imageormask
+			}ifelse
+		}ifelse
+	}ifelse
+}def
+/indexed_imageormask
+{
+	begin
+		AGMIMG_init_common
+		save mark 
+ 		currentdict
+ 		AGMCORE_host_sep{
+			Operator/knockout eq{
+				/indexed_colorspace_dict AGMCORE_gget dup/CSA known{
+					/CSA get get_csa_by_name
+				}{
+					/Names get
+				}ifelse
+				overprint_plate not{
+					knockout_unitsq
+				}if
+			}{
+				Indexed_DeviceN{
+					/devicen_colorspace_dict AGMCORE_gget dup/names_index known exch/Names get convert_to_process or{
+			 			indexed_image_lev2_sep
+					}{
+						currentoverprint not{
+							knockout_unitsq
+			 			}if
+			 			currentdict consumeimagedata
+					}ifelse
+				}{
+		 			AGMCORE_is_cmyk_sep{
+						Operator/imagemask eq{
+							imageormask_sys
+						}{
+							level2{
+								indexed_image_lev2_sep
+							}{
+								indexed_image_lev1_sep
+							}ifelse
+						}ifelse
+					}{
+						currentoverprint not{
+							knockout_unitsq
+			 			}if
+			 			currentdict consumeimagedata
+					}ifelse
+				}ifelse
+			}ifelse
+ 		}{
+			level2{
+				Indexed_DeviceN{
+					/indexed_colorspace_dict AGMCORE_gget begin
+				}{
+					/indexed_colorspace_dict AGMCORE_gget dup null ne
+					{
+						begin
+						currentdict/CSDBase known{CSDBase/CSD get_res/MappedCSA get}{CSA}ifelse
+						get_csa_by_name 0 get/DeviceCMYK eq ps_level 3 ge and ps_version 3015.007 lt and
+						AGMCORE_in_rip_sep and{
+							[/Indexed[/DeviceN[/Cyan/Magenta/Yellow/Black]/DeviceCMYK{}]HiVal Lookup]
+							setcolorspace
+						}if
+						end
+					}
+					{pop}ifelse
+				}ifelse
+				imageormask
+				Indexed_DeviceN{
+					end
+				}if
+			}{
+				Operator/imagemask eq{
+					imageormask
+				}{
+					indexed_imageormask_lev1
+				}ifelse
+			}ifelse
+ 		}ifelse
+		cleartomark restore
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+}def
+/indexed_image_lev2_sep
+{
+	/indexed_colorspace_dict AGMCORE_gget begin
+	begin
+		Indexed_DeviceN not{
+			currentcolorspace 
+			dup 1/DeviceGray put
+			dup 3
+			currentcolorspace 2 get 1 add string
+			0 1 2 3 AGMCORE_get_ink_data 4 currentcolorspace 3 get length 1 sub
+			{
+			dup 4 idiv exch currentcolorspace 3 get exch get 255 exch sub 2 index 3 1 roll put
+			}for 
+			put	setcolorspace
+		}if
+		currentdict 
+		Operator/imagemask eq{
+			AGMIMG_&imagemask
+		}{
+			use_mask{
+				process_mask AGMIMG_&image
+			}{
+				AGMIMG_&image
+			}ifelse
+		}ifelse
+	end end
+}def
+ /OPIimage
+ {
+ 	dup type/dicttype ne{
+ 		10 dict begin
+ 			/DataSource xdf
+ 			/ImageMatrix xdf
+ 			/BitsPerComponent xdf
+ 			/Height xdf
+ 			/Width xdf
+ 			/ImageType 1 def
+ 			/Decode[0 1 def]
+ 			currentdict
+ 		end
+ 	}if
+ 	dup begin
+ 		/NComponents 1 cdndf
+ 		/MultipleDataSources false cdndf
+ 		/SkipImageProc{false}cdndf
+ 		/Decode[
+ 				0 
+ 				currentcolorspace 0 get/Indexed eq{
+ 					2 BitsPerComponent exp 1 sub
+ 				}{
+ 					1
+ 				}ifelse
+ 		]cdndf
+ 		/Operator/image cdndf
+ 	end
+ 	/sep_colorspace_dict AGMCORE_gget null eq{
+ 		imageormask
+ 	}{
+ 		gsave
+ 		dup begin invert_image_samples end
+ 		sep_imageormask
+ 		grestore
+ 	}ifelse
+ }def
+/cachemask_level2
+{
+	3 dict begin
+	/LZWEncode filter/WriteFilter xdf
+	/readBuffer 256 string def
+	/ReadFilter
+		currentfile
+		0(%EndMask)/SubFileDecode filter
+		/ASCII85Decode filter
+		/RunLengthDecode filter
+	def
+	{
+		ReadFilter readBuffer readstring exch
+		WriteFilter exch writestring
+		not{exit}if
+	}loop
+	WriteFilter closefile
+	end
+}def
+/spot_alias
+{
+	/mapto_sep_imageormask 
+	{
+		dup type/dicttype ne{
+			12 dict begin
+				/ImageType 1 def
+				/DataSource xdf
+				/ImageMatrix xdf
+				/BitsPerComponent xdf
+				/Height xdf
+				/Width xdf
+				/MultipleDataSources false def
+		}{
+			begin
+		}ifelse
+				/Decode[/customcolor_tint AGMCORE_gget 0]def
+				/Operator/image def
+				/SkipImageProc{false}def
+				currentdict 
+			end
+		sep_imageormask
+	}bdf
+	/customcolorimage
+	{
+		Adobe_AGM_Image/AGMIMG_colorAry xddf
+		/customcolor_tint AGMCORE_gget
+		<<
+			/Name AGMIMG_colorAry 4 get
+			/CSA[/DeviceCMYK]
+			/TintMethod/Subtractive
+			/TintProc null
+			/MappedCSA null
+			/NComponents 4 
+			/Components[AGMIMG_colorAry aload pop pop]
+		>>
+		setsepcolorspace
+		mapto_sep_imageormask
+	}ndf
+	Adobe_AGM_Image/AGMIMG_&customcolorimage/customcolorimage load put
+	/customcolorimage
+	{
+		Adobe_AGM_Image/AGMIMG_override false put
+		current_spot_alias{dup 4 get map_alias}{false}ifelse
+		{
+			false set_spot_alias
+			/customcolor_tint AGMCORE_gget exch setsepcolorspace
+			pop
+			mapto_sep_imageormask
+			true set_spot_alias
+		}{
+			//Adobe_AGM_Image/AGMIMG_&customcolorimage get exec
+		}ifelse			
+	}bdf
+}def
+/snap_to_device
+{
+	6 dict begin
+	matrix currentmatrix
+	dup 0 get 0 eq 1 index 3 get 0 eq and
+	1 index 1 get 0 eq 2 index 2 get 0 eq and or exch pop
+	{
+		1 1 dtransform 0 gt exch 0 gt/AGMIMG_xSign? exch def/AGMIMG_ySign? exch def
+		0 0 transform
+		AGMIMG_ySign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+		AGMIMG_xSign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+		itransform/AGMIMG_llY exch def/AGMIMG_llX exch def
+		1 1 transform
+		AGMIMG_ySign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+		AGMIMG_xSign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+		itransform/AGMIMG_urY exch def/AGMIMG_urX exch def			
+		[AGMIMG_urX AGMIMG_llX sub 0 0 AGMIMG_urY AGMIMG_llY sub AGMIMG_llX AGMIMG_llY]concat
+	}{
+	}ifelse
+	end
+}def
+level2 not{
+	/colorbuf
+	{
+		0 1 2 index length 1 sub{
+			dup 2 index exch get 
+			255 exch sub 
+			2 index 
+			3 1 roll 
+			put
+		}for
+	}def
+	/tint_image_to_color
+	{
+		begin
+			Width Height BitsPerComponent ImageMatrix 
+			/DataSource load
+		end
+		Adobe_AGM_Image begin
+			/AGMIMG_mbuf 0 string def
+			/AGMIMG_ybuf 0 string def
+			/AGMIMG_kbuf 0 string def
+			{
+				colorbuf dup length AGMIMG_mbuf length ne
+					{
+					dup length dup dup
+					/AGMIMG_mbuf exch string def
+					/AGMIMG_ybuf exch string def
+					/AGMIMG_kbuf exch string def
+					}if
+				dup AGMIMG_mbuf copy AGMIMG_ybuf copy AGMIMG_kbuf copy pop
+			}
+			addprocs
+			{AGMIMG_mbuf}{AGMIMG_ybuf}{AGMIMG_kbuf}true 4 colorimage	
+		end
+	}def			
+	/sep_imageormask_lev1
+	{
+		begin
+			MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+				{
+					255 mul round cvi GrayLookup exch get
+				}currenttransfer addprocs settransfer
+				currentdict imageormask
+			}{
+				/sep_colorspace_dict AGMCORE_gget/Components known{
+					MappedCSA 0 get/DeviceCMYK eq{
+						Components aload pop
+					}{
+						0 0 0 Components aload pop 1 exch sub
+					}ifelse
+					Adobe_AGM_Image/AGMIMG_k xddf 
+					Adobe_AGM_Image/AGMIMG_y xddf 
+					Adobe_AGM_Image/AGMIMG_m xddf 
+					Adobe_AGM_Image/AGMIMG_c xddf 
+					AGMIMG_y 0.0 eq AGMIMG_m 0.0 eq and AGMIMG_c 0.0 eq and{
+						{AGMIMG_k mul 1 exch sub}currenttransfer addprocs settransfer
+						currentdict imageormask
+					}{
+						currentcolortransfer
+						{AGMIMG_k mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_y mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_m mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_c mul 1 exch sub}exch addprocs 4 1 roll
+						setcolortransfer
+						currentdict tint_image_to_color
+					}ifelse
+				}{
+					MappedCSA 0 get/DeviceGray eq{
+						{255 mul round cvi ColorLookup exch get 0 get}currenttransfer addprocs settransfer
+						currentdict imageormask
+					}{
+						MappedCSA 0 get/DeviceCMYK eq{
+							currentcolortransfer
+							{255 mul round cvi ColorLookup exch get 3 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 2 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 1 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 0 get 1 exch sub}exch addprocs 4 1 roll
+							setcolortransfer 
+							currentdict tint_image_to_color
+						}{
+							currentcolortransfer
+							{pop 1}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 2 get}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 1 get}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 0 get}exch addprocs 4 1 roll
+							setcolortransfer 
+							currentdict tint_image_to_color
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+		end
+	}def
+	/sep_image_lev1_sep
+	{
+		begin
+			/sep_colorspace_dict AGMCORE_gget/Components known{
+				Components aload pop
+				Adobe_AGM_Image/AGMIMG_k xddf 
+				Adobe_AGM_Image/AGMIMG_y xddf 
+				Adobe_AGM_Image/AGMIMG_m xddf 
+				Adobe_AGM_Image/AGMIMG_c xddf 
+				{AGMIMG_c mul 1 exch sub}
+				{AGMIMG_m mul 1 exch sub}
+				{AGMIMG_y mul 1 exch sub}
+				{AGMIMG_k mul 1 exch sub}
+			}{
+				{255 mul round cvi ColorLookup exch get 0 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 1 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 2 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 3 get 1 exch sub}
+			}ifelse
+			AGMCORE_get_ink_data currenttransfer addprocs settransfer
+			currentdict imageormask_sys
+		end
+	}def
+	/indexed_imageormask_lev1
+	{
+		/indexed_colorspace_dict AGMCORE_gget begin
+		begin
+			currentdict
+			MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+				{HiVal mul round cvi GrayLookup exch get HiVal div}currenttransfer addprocs settransfer
+				imageormask
+			}{
+				MappedCSA 0 get/DeviceGray eq{
+					{HiVal mul round cvi Lookup exch get HiVal div}currenttransfer addprocs settransfer
+					imageormask
+				}{
+					MappedCSA 0 get/DeviceCMYK eq{
+						currentcolortransfer
+						{4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi		 Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						setcolortransfer 
+						tint_image_to_color
+					}{
+						currentcolortransfer
+						{pop 1}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 2 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 1 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 		Lookup exch get HiVal div}exch addprocs 4 1 roll
+						setcolortransfer 
+						tint_image_to_color
+					}ifelse
+				}ifelse
+			}ifelse
+		end end
+	}def
+	/indexed_image_lev1_sep
+	{
+		/indexed_colorspace_dict AGMCORE_gget begin
+		begin
+			{4 mul HiVal mul round cvi		 Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}
+			AGMCORE_get_ink_data currenttransfer addprocs settransfer
+			currentdict imageormask_sys
+		end end
+	}def
+}if
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndProlog
+%%BeginSetup
+Adobe_AGM_Utils begin
+2 2010 Adobe_AGM_Core/ds gx
+Adobe_CoolType_Core/ds get exec
Adobe_AGM_Image/ds gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndSetup
+%%Page: (Page 1) 1
+%%EndPageComments
+%%BeginPageSetup
+%ADOBeginClientInjection: PageSetup Start "AI11EPS"
+%AI12_RMC_Transparency: Balance=75 RasterRes=300 GradRes=150 Text=0 Stroke=1 Clip=1 OP=0
+%ADOEndClientInjection: PageSetup Start "AI11EPS"
+Adobe_AGM_Utils begin
+Adobe_AGM_Core/ps gx
+Adobe_AGM_Utils/capture_cpd gx
+Adobe_CoolType_Core/ps get exec
Adobe_AGM_Image/ps gx
+%ADOBeginClientInjection: PageSetup End "AI11EPS"
+/currentdistillerparams where
{pop currentdistillerparams /CoreDistVersion get 5000 lt} {true} ifelse
{ userdict /AI11_PDFMark5 /cleartomark load put
userdict /AI11_ReadMetadata_PDFMark5 {flushfile cleartomark } bind put}
{ userdict /AI11_PDFMark5 /pdfmark load put
userdict /AI11_ReadMetadata_PDFMark5 {/PUT pdfmark} bind put } ifelse
[/NamespacePush AI11_PDFMark5
[/_objdef {ai_metadata_stream_123} /type /stream /OBJ AI11_PDFMark5
[{ai_metadata_stream_123}
currentfile 0 (%  &&end XMP packet marker&&)
/SubFileDecode filter AI11_ReadMetadata_PDFMark5
<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 4.1-c036 46.277092, Fri Feb 23 2007 14:16:18        ">
+   <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+      <rdf:Description rdf:about=""
+            xmlns:dc="http://purl.org/dc/elements/1.1/">
+         <dc:format>application/postscript</dc:format>
+         <dc:title>
+            <rdf:Alt>
+               <rdf:li xml:lang="x-default">Web</rdf:li>
+            </rdf:Alt>
+         </dc:title>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xap="http://ns.adobe.com/xap/1.0/"
+            xmlns:xapGImg="http://ns.adobe.com/xap/1.0/g/img/">
+         <xap:CreatorTool>Adobe Illustrator CS3</xap:CreatorTool>
+         <xap:CreateDate>2017-04-03T09:56:19+02:00</xap:CreateDate>
+         <xap:ModifyDate>2017-04-03T10:02:52+02:00</xap:ModifyDate>
+         <xap:MetadataDate>2017-04-03T10:02:52+02:00</xap:MetadataDate>
+         <xap:Thumbnails>
+            <rdf:Alt>
+               <rdf:li rdf:parseType="Resource">
+                  <xapGImg:width>256</xapGImg:width>
+                  <xapGImg:height>76</xapGImg:height>
+                  <xapGImg:format>JPEG</xapGImg:format>
+                  <xapGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA&#xA;AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK&#xA;DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f&#xA;Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgATAEAAwER&#xA;AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA&#xA;AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB&#xA;UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE&#xA;1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ&#xA;qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy&#xA;obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp&#xA;0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo&#xA;+DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FWGebvzi/LjynK9&#xA;vrOtQreps1jAGuJwf5XSIPwP+vTFXnl1/wA5gfl9HKUt9L1SdB/uwpAgPyBmJp88Vdbf85gfl88g&#xA;W40rVYUP7apbvT5j1lNPlirNfLn5+flTr8iQ22uxWty5AEF8rWpqegDyhYyT4BsVegKyuodCGVgC&#xA;rA1BB6EHFW8VdirsVdirsVSDzh588peT7EXnmHUYrKN6+jEavNKR2jiQM7e9BQd8VeRX/wDzmF5K&#xA;imKWOjajcxAkepJ6MNfcAPIfvpiqceX/APnKv8r9TmWG+N5oztt6l3CHir2+OBpSPmVAxV61pmqa&#xA;ZqllHfaZdw3tlMKxXNvIssbfJkJBxVFYq7FXYq7FXYq7FXYq7FXzR+fv/OQfmXSPM1x5V8pzrYiw&#xA;CrqGohEkleZ1DGOPmGVFQNQmleXhTdV4ddeefzL8xSmCbW9V1Fnr/oyTzupr1pEh4/hiqBeLzl5S&#xA;1S2vZYr/AEPU2Hr2s0qy20zLy+2vIKzKWHyOKvuX8oPOV15x/LzSNdvQBfTo8V5xFFMsEjRM4Hbn&#xA;w5U7VxVlGp6nY6Xp1zqV/MtvZWcTz3MzdEjjBZjt7DFXxx+Zf/OSnnXzJfzW/l+6l0LQlJWBLZuF&#xA;1Io/blmX41J/lQgDpv1xVgkXm78yNKkj1FNY1e0aYhkuWnuFWQj3ZqP+OKvof8hv+cib3zDqMPlX&#xA;ze6Nqk/w6bqiqsYnYCvpTKoChyPssoAPTr1VfQFzc29rby3NzKkFvAjSTTSMFREUVZmY7AAdTir5&#xA;G/OP/nJPWdfuZ9G8ozyaboKExyX0ZKXN1Q0JDDeOI9gPiI+1144qw7yH+RP5iedYlvbKzWz0yTdd&#xA;Sv2MUbg94wA0knzVae+KvWdP/wCcNIfTRtR80MZDQyR29oAo8QHeU1+fH6MVbvf+cM7coxsvNTq9&#xA;TxWazDAjsCyzLT50xV575u/5xj/M3QIpLm0gh1yzjBZnsGJmCjxgcI5PsnLFVT/nHz81/MegectJ&#xA;8vTXUt15f1W4jsTYyMXWGSdgkUkNa8KSMOQGxFe9CFX2lirsVdirsVYb+Z/5peXvy/0M32pP6t/O&#xA;GXTdOQ/vJ5FH/CopI5v29zQFV8XgeefzV88kktqGt6gxJJPCGCFPvEcUYP8Atsd1Xt+j/wDOG9mb&#xA;NG1fzJIbpgC6WtuojU0FVDSMxbfvRflirH/N3/OInmjT7Z7ny3qkOtcAW+pyp9VnI8EJaSNj82XF&#xA;Xm3kjz/52/LLzJJ9W9W3MUvDVdFuQyRycTRkkjYfA47OBUfLYqvt/wAk+cdI84+WrPX9Kcm2ul+O&#xA;JvtxSrtJE4H7SNt79RscVT3FXYq7FXYq7FXYq7FXwP8Anj/5NrzR/wAxrf8AEVxV9w+ULa3t/K+k&#xA;pBEkKG0gYrGoUVMS1NB3xV8w/wDOYn/KcaL/ANswf9REuKvYP+cYf/JO6V/xmu/+oh8VQ/8AzlNq&#xA;VzZ/lLcxQEqt/d21tOQafu+RlI+kxAYq8e/5xO8o6LrXm7UtT1OBLp9GgjksoZQGUTTOQJeJ2JQJ&#xA;tXoTXrTFX1pqmlabq1hNp+p2sV5Y3C8JreZQ6MPcHFX59eZbU+U/zB1O20uU10LVJksZSeTD6rcH&#xA;0iT/ADDgK++KvoD/AJyy/MmW2t7byNps3F7pBda0yHf0q/uYCR/MQXYeAXscVYz/AM45/kxY67H/&#xA;AIv8x263GmRyMml2Eg5RzvGeLyyKRRo0b4Qvdga7DdV9WQ3IVQhUBVFBxFAAPbFWFeaPz1/K7y1d&#xA;y2Oo60j38JKy2trHJcOrDqrGNWRW7UZgcVSrTf8AnJn8oL2YRNqstmzGitc20yr9LIrqPpxVHfmr&#xA;+ZOkaZ+VOr6/ouowXhuIvqWn3NrKsg+sXH7sFWQkco1YyU/ycVfEGi6veaNq9nq1kVF7YTJcWrOo&#xA;dVliYMjcTseLAHfFWWan+eP5takSbjzPex8u1qy2o+j6usXjiqWHzj+Zd1+/Oua1Pz/3b9aunrTb&#xA;7XI16YqmGkfnP+a2jTA2/mW/cxmhiu5DdKKbceFx6gHTFXu/5Xf85VWGqTJpfneOLTLgj91q8NRb&#xA;OQOkqHkYyf5gSpP8uKvBPN3mLX/zN/MR7lFaS61S5S00u0J2ihLcIY/BQAasfGrYq+tPyo/J3QPI&#xA;liy2rNca1dRKmoao32jTfhEhqqIG3p1O3KtBRVmd3oWqzBvR8w39qT9n047BgP8AkZaufxxVBwWn&#xA;nywkJ+vWet2xNfTuIjZXAHtLD6sTfL0l+eKvKP8AnJr8t/8AEHlVPONjp0kOu6UAL6EBXkksqnkW&#xA;ETOG9EnkCDsvKvsq82/5xf8AzNby75p/wvfP/uJ8wSqsLE7RXtOMZHtNtGffj4HFXrv5s/8AOSmg&#xA;+ULibR9CiTWdei+GZixFpbv/ACyMu8jDuiHbuwO2KvnLzB+ef5r69Mxn8wXVtHIaLbWDfVEAP7I9&#xA;Hi7D/WY4qlTeZ/zOijE7atraR7lZTcXYX4dzRuXbFU+8uf8AOQf5saFIpTXJNRhX7VvqQ+tK3zd/&#xA;333OMVei6v8A85h6zLoMEWlaJDa644Iu7qZzLbodxWGMcWJI3+Nvh6fFiryfVvzc/NPXbkm58x6g&#xA;zyHaC1la3jPfaK39NdvliqEk82/mdZcZ5NZ1u2oA6ytc3cezbAhiw2OKpBqWp6hqd9Nf6hcPdXtw&#xA;3Oe4lYs7tSlWY9Tir9FfK/8AyjWk/wDMFb/8mlxV8vf85gqz+e9ERAWZtNAVQKkk3ElABiryLS/O&#xA;Hn3y3HHBpusalpUAJZLeKeaGImpJPp1CHdvDFWSax+eXnTzD5Pu/K/mV49VtZ+DwXjIsVzDJE4ZD&#xA;yjAVxsQ3JakH7WKpN+W35ja55B8xLrOlBJQ6GG8s5a+nNCSCVNNwQRVW7H2qMVey63/zmNez6VJD&#xA;o/l5bLU5E4rdT3HrxxMR9pYxHHzIPSpp4jtirxXyJYW/mT8xNFs9YuCItU1GJbyZ6s0jSygsu37U&#xA;rHjXxNcVRfnrUL7zp+amqSwMJZ9V1M2tjyJpw9QW9utd9ggUYq+4dD0ey0XRrHSLFeFpYQR28APX&#xA;jGoUE+JNKk+OKvJ/+cmPzHv/ACz5ctdE0mVoNR1z1BLcoaPFaxcQ/Eg1VpC4UHwDd6Yq+RcVdiq4&#xA;SSCMxhiI2IZkrsStQCR4jkcVeh/kB5e0XzB+aGm6ZrNol7YSR3DyW0leLNHCzrWhHRhir7X0ryl5&#xA;V0hVGlaPZWAX7P1a3iiOxr1RR33xVNcVSjzD5Q8r+Y7ZrbXNLttQiYUrNGrOvuj/AG0PupBxV8c/&#xA;n1+Tg/L7WYLnTWeby7qfL6o0nxPDKm7wO3fY8kY9RUfsklVlf/OK35f2Go3915xvHZpNJm+rafAp&#xA;IAleKskj060SQBR7mvbFX04+k/WxKxvLmDmOI9CT0+PuKDFUBceSpJVonmLWYDv8UdyhO/8ArxON&#xA;sVWw+X/Odm1bXzQbuMVpFqdlDNt4c7Y2bfTviqdWX6WdGi1OG248SC8Duweu28ToOII/y2xV8Efm&#xA;b5buvKH5i6zpkSG0W1vHn00xtQrbyN6tsyMtNxGy9OhxVOvyZ/JvU/zF1aVpJGs9AsmX9IX4ALlm&#xA;3EMNdjIw6k7KNz2BVfY3lD8uPJXlG1SDQdKgtpEFGuyoe5fxLzNVzXwrTwAxVkmKvnz/AJy3tvLN&#xA;l5TsJF021XXtRvQqXyxIs4hiRml/eABj8TIKHxxV57+RP/OP7edI18w+YjJb+W1crbQRnjLdsho1&#xA;G/YiBFCw3J2FOuKvrDy95V8t+XLNbPQtNt9Ot1FCsCBS1O7v9pz7sScVTRlVlKsAVIoQdwQcVfJP&#xA;/OW1v5asfMuj2Ol6dbWeoNbSXeozW8SxtIJX4RepwAqR6TnfffFX1J5X/wCUa0n/AJgrf/k0uKo/&#xA;6tb+v9Y9JPrHHh63Ec+NSePLrTfpirri2trmFoLmJJ4XFHikUOrD3Vqg4q+fvz4/5x60GbQ7zzP5&#xA;Rs1sNRsUa4vdOgFIZ4VHKRo4xskiLvRNm8K4q81/5xWsbK8/M2WG8t47mL9G3DenMiyLUSRUNGBF&#xA;cVfXn+F/LX/Vpsv+keL/AJpxV8D+VQF/MrRwuwGs21KbUpdLiqp+VkSXH5meWBNMIwdVtJDI+/Jk&#xA;mVwu5G7sOP04q+9sVfJP/OWTu35kWKkEKmkwhfA1uLgkjFXiuKsy/Kn8u/8AH3md9D/SH6N4W0lz&#xA;9Y9H16+myLx4c4uvPrXFXr3/AEJ3/wB/d/3Lv+zrFWMfkLo/6F/Pq50j1frDaWdRtFuOPDmYC0XM&#xA;LVuPIDpU4q+vFuZR1ofnir4b82/md+ZVn5v1aGHzVqqpZ31xDCi3cyxhY5mVR6YbgdvEYq+pP+cf&#xA;/wAwtS86eQEvdZkEmqWNzJY3NxQJ6vBEkWQhaLUpKAadxiqF/wCcm9Kh1D8oNUmI5yadNbXcNKGj&#xA;essLH/kXM2KpB/ziz5etbD8um1dGL3Os3MjzbmipbM0KJxrQbhmrSpr7DFXrJ8s6NemRrqKSUtQM&#xA;DPPQgdKgPTFUj1Ty7+VVjIx1K5t7J+jLNqUsFKUH2TOoGKoe3l/KOB/9E80RQM1AqxeYJwtaUFI/&#xA;rRT/AIXFWT6PqGhu4jstcS/BHFYvrMM5LePIVkJ/2WKvmH/nMKGzXz3o8sZH1uTTFE6jrwWeX02P&#xA;zqw+jFX0V+VHlC28p+QNH0eJAk6wJPfMOr3MwDzEnvRjxHsAMVYd/wA5DfnDe+Q9ItNP0Xj+n9VD&#xA;tFO4Di3hjIDS8GqGZiaJUU2JPShVfLEn5s/mfJctct5r1YSMeRVbydY6+0asEA9gMVRF15o87fmZ&#xA;rnl3Qtb1Fr6b6wtlYyuiKyfW5ERmYoq8vsgktvQYq+89J0ux0nS7TS7CMQ2VlClvbxD9mONQqj7h&#xA;irwD/nI7889d8vasPKPle4+p3kcSy6pfqFMq+qvKOGItXgeBDM1K7ihG+KvBLb82/wA0Le4FxH5r&#xA;1VpAa8ZLuaVPH+7kZk/DFUq81+bvMHmzV21jXrn63qDRpE03BI6rGvFfhjCqPoGKv0G8r/8AKNaT&#xA;/wAwVv8A8mlxV89/85UefPOWg+Z9K07RNZutMs57D1pY7SQwlpDLInIulH+yo74qgv8AnGn84PN2&#xA;oecR5V1/UptUtL+GV7OS7cyzxzwJ6lBK5LsrRo1VJO9CKb1VfUbojoyOoZGBVlYVBB2IIOKvkr/n&#xA;HDTo9M/PbWtNj/u7KDULdK9aRXKIP+I4q+tsVfnp5X/8mXpH/bZt/wDqKXFXab6flT8y7X62Cseg&#xA;61H9YB6hbO6HP6f3eKvvoEEVG4PQ4q+af+cufK84vNF80RIWhaNtNunrsrIzTQbf5XOT7sVfOuKv&#xA;ZP8AnFT/AMmbN/2zbj/k5Fir68xV8s/lT/601r3/ADGax/yefFX1Nir8+/PX/KbeYf8AtpXn/J98&#xA;VfSP/OI7N/gfV1r8I1NiB7m3ir+rFWd/nd/5KjzL/wAwn/G64qwb/nElbg+RNUke5d4P0m0cNqac&#xA;IysETM67cvj9QVFafDt3qq9jbyxol9IwvoXu0ap9GeaaWLv/ALrdynfwxVRubf8ALfy0oa4h0jRw&#xA;BVeSW1saewopPXtiq218/wDkYqRYXqTqT0s4JpgTTr+5jeu3fFU4s9bs7sj0o7pa9DLaXUI8Ossa&#xA;Yq+cv+cxrSxj1TyjfPHWSVLyG5I2ZoYXgZV+j1np88VfTPrxfzDFXyP/AM5gRzf8rB0meh+rPpMa&#xA;Rv8AsmRLmcuB7gOtfoxV4TirN/ySMY/NjyuZKcfrydRXehp+OKvvnmn8w+/FXwd+fiTp+b/mYT7u&#xA;bhGX/UaGNo/+EIxVgGKuxV+jvlt1j8u6XG54ulpArKeoIiUEYq+Xv+cwvi86aJIN0Om8QfdZ5Cf+&#xA;JDFWHf8AON7hPzo8useg+udP+YGfFX2+bteynFXzJ+TJr/zkr53l6FJNXcD56ii/8bYq+lzdSnwH&#xA;yGKvgTQkWP8ANXTkQUVNdhVR7C8AGKs4/wCco/JUuh/mJJrMUZGneYEFzG4Hwi4jASdPmTxk/wBl&#xA;ir27/nH/APMaDzb5Kgs7mWuuaKiWt6jH4njUUhnFSSeSCjH+YHxGKs/8waBpPmDR7rR9Xt1utPvE&#xA;4TwtUVoQQQRuGVgCpHQ4q+bvMv8AziRr8V4zeWtXtrmyYsVjv+cMyD9leUSSJIfFqJ8sVZR+R/5G&#xA;+cfJPm+TW9ZmsmtWtJbYR28sjyc3eNgaNGi8fgP7WKvecVfLP5U/+tNa9/zGax/yefFX1Nir8+/P&#xA;X/KbeYf+2lef8n3xV9If84j/APKE6x/20j/yYixVnn53f+So8y/8wn/G64q+fv8AnFrzLqln5+/Q&#xA;Ud0F0zVIZXntHoQ8sEZdGjqQQ4ANadV6jYEKvqy60mG8lrcz3DQ/74jmeBKdDUwmNmB8GYjFUVp/&#xA;ljyzp372x0y0tn+000cMauf8pnpyJ9ycVQd75+8qW1ybOO+F9fj/AI8dPR72cf60duJCv+ypiqLs&#xA;NR1q9kVzph0+0J3a8kT6wR7QwmVRX/KkBH8uKvlX/nLrVzdfmJY6ckvOHTtOj5RbUSaeSR36d2j9&#xA;PFX0J+WHm6DzZ5G0nWElWS5eBIr8L+xdRKFmUrUlfiHIV/ZIPfFUo/OT8qLf8wtCggjnW01jT2eT&#xA;T7pwSnxgB4pAN+D8V+IAkEd9wVXz5N/zi7+aiTNGsVlKgNBKlyAp9xyVW+8Yq7Wfye83flfbaX53&#xA;1K4tbptO1S0f6naNI1FVjLyeR0jp8UYSgB+1ir660zUrLVNOtdSsZRNZXkST28o6NHIoZTv7HFXl&#xA;P53/AJFSeermDWtGuYrTXIIxBLHcclhniViVqyKzK68jQ8TXYbUxV41H/wA4vfmo8oRobKNT/uxr&#xA;kcR/wKs34Yqx78zvyk1n8vYtJ/Sl5b3U2qLOeNtzKRmApUcnCFqiUfsjFX29p3/HPtf+MMf/ABEY&#xA;q+Y/+cvP+Un0H/mCk/5OnFWF/wDOOv8A5OTy/wD9Hn/UDPir7YxV82/kv/60f55+erf91KPFX0li&#xA;r4M0X/ybFh/23ov+owYq+2fzN/L3S/PnlW40S9Iimr6theU5NBcKCFcDuN+LDuDir4nmg89/lX50&#xA;HMPpmtWRPBx8UU8RNKivwyxPT/aI2VfQvkj/AJyl8n6pBHB5ojfRNRAAedVea0dthVSgaRKnejLQ&#xA;D9o4q9KtPzH/AC+vGRLbzNpcskn2IheQcztX7HPl+GKty/mR+XkTvHL5o0lJIyVdGvrYMGBoQRzr&#xA;UYqxvXP+cg/yo0j1FOsi/nReQhsY3n5eyygCGvzfFXgP5WeePLtv+d+oeaNRuRp2lX8uozxyXAoV&#xA;Fy7PGr8OfxfFTFX0Z/yu78qP+pltP+H/AOacVfFvm27tr3zXrV5auJba5v7maCQVoySTMysK77g4&#xA;q90/5xq/MLyX5a8qapZ67q0On3M1+Zoo5eVWQwovIUBHVTirMfzZ/Nn8udW/LnXtO07Xre5vrm34&#xA;QQJz5O3NTQVXFXyr5d1/UvL2uWWtaZJ6V9YSiWFjXiSNirAEVV1JVh3BxV956JqkPmny5putafeS&#xA;21vqFuk4EBiYguAWjYuklGRqqadCMVS/VNN8mQyGHU7e61+9AquntJcX7b7gtbu5giU/zOFX3xVH&#xA;aZb+bWjEGlaXpnlfTAfhRgLmenj6FsYYEP8Az1fFUv8AzB/MGw/LjyxPf6vqralq06sNMsJBCjSz&#xA;duCRIjCJSauzE0HepFVXxhYaZ5z/ADD803P1ZJdY168WW7uGJHJhGvJiSaKooAqjYdFHbFU8/K/8&#xA;1vMX5b6xcRrAbjTpn4anpM1YzzjPHkhIrHKvQ7b9CNgQq+ofLP57/lfr1uJE1qHTZgAZLbUmW0dC&#xA;e3KQ+k3+wc4qyaTzt5MjtRdya9pyWpCsJ2u4BHxanE8y9KGu2KvKfz7/ADN/LnU/y+1XQLTXbe91&#xA;S6EL2kdnW5UtFPHIayx1iX4VI3evsemKvKPyb/PnUPI0f6I1SGTUfLbMXjhjI9e1ZjyYwciFZWJq&#xA;0ZIFdwRvyVfSmi/nJ+WGr2yz23mOyh5bGK8lW0kB8OE/pk/RtiqcXfnfyXZxiS71/TrdGNFaW7gQ&#xA;E9aAs43xV4B/zkz5+8g+ZNE02w0XU49R1exvObGAO0awSRMHpLT02q4TZWOKvW7D87PyqSxt1bzJ&#xA;aqyxIGU8wQQo2+zirwL/AJyX84eWfM3mDR59B1CPUIbe0dJni5UVjISAagdsVYr+R+t6Tof5oaLq&#xA;mrXKWen2/wBa9a5krxXnaTRrWgPVmAxV9Xf8ru/Kj/qZbT/h/wDmnFXhP5Wed/Kmlfnn5u13UdSi&#xA;ttIvzqX1O8flwk9a/SWOlAT8SKSMVe7f8ru/Kj/qZbT/AIf/AJpxV8heW5Eu/wA1NKktj6qXGuwN&#xA;CVB+IPeKVoPeuKv0GxVIPOXkTyr5y0z9HeYLFLuJamGX7M0TH9qKRaMp237HvXFXz15r/wCcPdUj&#xA;leXyprUVxASStpqIMUijw9aJXVz/ALBcVYHcf84z/nNFKUTQ0nUdJY7yzCn5epKjfhiq2L/nGn86&#xA;HcK2gLGD1dryyIH/AAMzH8MVZHov/OIv5hXbK2p32n6bEftDm9xKPkqKEP8AyMxVF69/zh/5yt7h&#xA;Romr2WoWxAq9yJLWUNTf4FE60r/l4qlf/QpX5p/790z/AKSJP+qWKsm8r/8AOHupSW90/mfWYraZ&#xA;omWyi08NMFlP2XmaVY6qvdFG/wDMMVY9df8AOI35lxzukF3pc8IPwS+tKhI7VUxbHFVNf+cSfzSL&#xA;AGfS1BNCxuJaD32hOKpp5p/5xN1/SvJ0d/pV5+l/MUDs99YRLwjaHj0tuVGd0I705A7AEUZV5p5R&#xA;/Mz8wfIF09np11JbQxS8rnSLxC0XMbMrxPR4yf2uBU++KvVrL/nLx4LdUk8oxGU1aVob30kZ23Zg&#xA;ht3IqfFjiqWeYP8AnLbzbdxNFoek2uk81KmaV2u5VJ6MlRDGCP8AKRsVeb6bo35ifmf5lkeFLnWt&#xA;UmI+s3cp/dQoTtzc0jiQV2UU8FHbFX2D+Tv5P6T+XejOgdbzXb0KdS1ClAeO4iiB3WNa/NjuewCq&#xA;D/M78gPJvnqR7+jaTrrDfUbZQRIe3rxGgkp4gq3virwLXv8AnFH80LCVv0YLTWYK/u2hmWCQj/KS&#xA;49NQfk5xVj6/847/AJytN6Q8tycqkVNxaBdv8szcfxxVkug/84m/mXfOp1OSy0iE/b9SX15R8lhD&#xA;oT/sxirP9T/5w70FtDhi0zXLiPWogTLdXEatbzE9B6SkNEPfm304q8x1f/nFv83LGUra2VrqiDpJ&#xA;a3USAj5XJgb8MVSu3/5x1/OWd+K+XHTpVpLi0QCv+tKK/RirJdP/AOcSPzLuLN5rm602ynABjtpZ&#xA;pHYnuGaKN0XbwJ/jiqn/ANClfmn/AL90z/pIk/6pYqjtG/5xC8+T30a6tqOn2VjUetLC8k8vHvwQ&#xA;pGpPzYYqjvNf/OIPmOHUHfytqdtdaax+CO/ZorhPZmjjZH+fw/LFUj/6FK/NP/fumf8ASRJ/1SxV&#xA;Vk/5xF/M1YUdbzSXkb7UQnnBX6TAF/HFVL/oUr80/wDfumf9JEn/AFSxV6V+Tf8AzjPc+Vtfg8x+&#xA;aLyC6vrI87CxtObxJIRQSSPIqFmSuyhaV3rir37FXYq7FXYq7FXYq7FXYq7FXYq7FXYq87/Nz/lT&#xA;H1Bf+VhfU+VP9H5cvrtP+KvQ/wBI4/L4fHFXzRqP/QsX1pvQ/wAW+n2+rfUfT+j1/wB59+Ksr8k/&#xA;9ClfWI/rf6Q9eq+n+m/V4cv8r6p+5+fP4cVfTflj/C36Hh/wx9S/Q/8Aun9Hel9XrQVp6Pw18cVT&#xA;XFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq/wD/2Q==</xapGImg:image>
+               </rdf:li>
+            </rdf:Alt>
+         </xap:Thumbnails>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xapMM="http://ns.adobe.com/xap/1.0/mm/"
+            xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#">
+         <xapMM:DocumentID>uuid:88B45AE7E519E7119A76BA5BC76AA065</xapMM:DocumentID>
+         <xapMM:InstanceID>uuid:24AD93F6E619E7119A76BA5BC76AA065</xapMM:InstanceID>
+         <xapMM:DerivedFrom rdf:parseType="Resource">
+            <stRef:instanceID>uuid:87B45AE7E519E7119A76BA5BC76AA065</stRef:instanceID>
+            <stRef:documentID>uuid:86B45AE7E519E7119A76BA5BC76AA065</stRef:documentID>
+         </xapMM:DerivedFrom>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:illustrator="http://ns.adobe.com/illustrator/1.0/">
+         <illustrator:StartupProfile>Web</illustrator:StartupProfile>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xapTPg="http://ns.adobe.com/xap/1.0/t/pg/"
+            xmlns:stDim="http://ns.adobe.com/xap/1.0/sType/Dimensions#"
+            xmlns:xapG="http://ns.adobe.com/xap/1.0/g/">
+         <xapTPg:MaxPageSize rdf:parseType="Resource">
+            <stDim:w>14400.000000</stDim:w>
+            <stDim:h>14400.000000</stDim:h>
+            <stDim:unit>Pixels</stDim:unit>
+         </xapTPg:MaxPageSize>
+         <xapTPg:NPages>1</xapTPg:NPages>
+         <xapTPg:HasVisibleTransparency>False</xapTPg:HasVisibleTransparency>
+         <xapTPg:HasVisibleOverprint>False</xapTPg:HasVisibleOverprint>
+         <xapTPg:PlateNames>
+            <rdf:Seq>
+               <rdf:li>Black</rdf:li>
+            </rdf:Seq>
+         </xapTPg:PlateNames>
+         <xapTPg:SwatchGroups>
+            <rdf:Seq>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Groupe de nuances par défaut</xapG:groupName>
+                  <xapG:groupType>0</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Blanc</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Noir</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Rouge RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Jaune RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Vert RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Cyan RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Bleu RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Magenta RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=193 V=39 B=45</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>193</xapG:red>
+                           <xapG:green>39</xapG:green>
+                           <xapG:blue>45</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=237 V=28 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>237</xapG:red>
+                           <xapG:green>28</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=241 V=90 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>241</xapG:red>
+                           <xapG:green>90</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=247 V=147 B=30</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>247</xapG:red>
+                           <xapG:green>147</xapG:green>
+                           <xapG:blue>30</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=251 V=176 B=59</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>251</xapG:red>
+                           <xapG:green>176</xapG:green>
+                           <xapG:blue>59</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=252 V=238 B=33</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>252</xapG:red>
+                           <xapG:green>238</xapG:green>
+                           <xapG:blue>33</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=217 V=224 B=33</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>217</xapG:red>
+                           <xapG:green>224</xapG:green>
+                           <xapG:blue>33</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=140 V=198 B=63</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>140</xapG:red>
+                           <xapG:green>198</xapG:green>
+                           <xapG:blue>63</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=57 V=181 B=74</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>57</xapG:red>
+                           <xapG:green>181</xapG:green>
+                           <xapG:blue>74</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=146 B=69</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>146</xapG:green>
+                           <xapG:blue>69</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=104 B=55</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>104</xapG:green>
+                           <xapG:blue>55</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=34 V=181 B=115</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>34</xapG:red>
+                           <xapG:green>181</xapG:green>
+                           <xapG:blue>115</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=169 B=157</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>169</xapG:green>
+                           <xapG:blue>157</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=41 V=171 B=226</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>41</xapG:red>
+                           <xapG:green>171</xapG:green>
+                           <xapG:blue>226</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=113 B=188</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>113</xapG:green>
+                           <xapG:blue>188</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=46 V=49 B=146</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>46</xapG:red>
+                           <xapG:green>49</xapG:green>
+                           <xapG:blue>146</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=27 V=20 B=100</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>27</xapG:red>
+                           <xapG:green>20</xapG:green>
+                           <xapG:blue>100</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=102 V=45 B=145</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>102</xapG:red>
+                           <xapG:green>45</xapG:green>
+                           <xapG:blue>145</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=147 V=39 B=143</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>147</xapG:red>
+                           <xapG:green>39</xapG:green>
+                           <xapG:blue>143</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=158 V=0 B=93</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>158</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>93</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=212 V=20 B=90</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>212</xapG:red>
+                           <xapG:green>20</xapG:green>
+                           <xapG:blue>90</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=237 V=30 B=121</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>237</xapG:red>
+                           <xapG:green>30</xapG:green>
+                           <xapG:blue>121</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=199 V=178 B=153</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>199</xapG:red>
+                           <xapG:green>178</xapG:green>
+                           <xapG:blue>153</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=153 V=134 B=117</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>153</xapG:red>
+                           <xapG:green>134</xapG:green>
+                           <xapG:blue>117</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=115 V=99 B=87</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>115</xapG:red>
+                           <xapG:green>99</xapG:green>
+                           <xapG:blue>87</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=83 V=71 B=65</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>83</xapG:red>
+                           <xapG:green>71</xapG:green>
+                           <xapG:blue>65</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=198 V=156 B=109</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>198</xapG:red>
+                           <xapG:green>156</xapG:green>
+                           <xapG:blue>109</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=166 V=124 B=82</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>166</xapG:red>
+                           <xapG:green>124</xapG:green>
+                           <xapG:blue>82</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=140 V=98 B=57</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>140</xapG:red>
+                           <xapG:green>98</xapG:green>
+                           <xapG:blue>57</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=117 V=76 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>117</xapG:red>
+                           <xapG:green>76</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=96 V=56 B=19</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>96</xapG:red>
+                           <xapG:green>56</xapG:green>
+                           <xapG:blue>19</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=66 V=33 B=11</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>66</xapG:red>
+                           <xapG:green>33</xapG:green>
+                           <xapG:blue>11</xapG:blue>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Groupe de couleurs Web</xapG:groupName>
+                  <xapG:groupType>1</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=236 V=28 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>236</xapG:red>
+                           <xapG:green>28</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=169 B=157</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>169</xapG:green>
+                           <xapG:blue>157</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=102 V=45 B=145</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>102</xapG:red>
+                           <xapG:green>45</xapG:green>
+                           <xapG:blue>145</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=139 V=146 B=152 1</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>139</xapG:red>
+                           <xapG:green>146</xapG:green>
+                           <xapG:blue>152</xapG:blue>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Niveaux de gris</xapG:groupName>
+                  <xapG:groupType>1</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=100</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>255</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=90</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>229</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=80</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>204</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=70</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>178</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=60</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>153</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=50</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>127</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=40</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>101</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=30</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>76</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=20</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>50</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=10</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>25</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=5</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>12</xapG:gray>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+            </rdf:Seq>
+         </xapTPg:SwatchGroups>
+      </rdf:Description>
+   </rdf:RDF>
+</x:xmpmeta>
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                           
+<?xpacket end="w"?>
%  &&end XMP packet marker&&
[{ai_metadata_stream_123}
<</Type /Metadata /Subtype /XML>>
/PUT AI11_PDFMark5
[/Document
1 dict begin /Metadata {ai_metadata_stream_123} def
currentdict end /BDC AI11_PDFMark5
+%ADOEndClientInjection: PageSetup End "AI11EPS"
+%%EndPageSetup
+1 -1 scale 0 -840 translate
+pgsv
+[1 0 0 1 0 0 ]ct
+gsave
+np
+gsave
+0 0 mo
+0 840 li
+1096 840 li
+1096 0 li
+cp
+clp
+[1 0 0 1 0 0 ]ct
+904.575 334.257 mo
+904.575 320.361 898.182 311.771 892.811 307.004 cv
+885.783 300.775 877.066 297.482 867.603 297.482 cv
+858.135 297.482 849.418 300.775 842.393 307.006 cv
+837.025 311.763 830.63 320.354 830.63 334.257 cv
+830.63 342.282 832.76 348.533 835.592 353.299 cv
+832.309 351 828.712 349.018 824.782 347.397 cv
+817.438 344.379 809.047 342.776 799.205 342.523 cv
+799.205 314.912 li
+799.205 309.227 796.713 303.826 792.384 300.14 cv
+788.849 297.129 784.381 295.508 779.801 295.508 cv
+778.775 295.508 777.742 295.588 776.715 295.754 cv
+747.302 300.492 li
+737.896 302.006 730.983 310.123 730.983 319.648 cv
+730.983 344.484 li
+730.277 344.316 729.57 344.156 728.861 344.004 cv
+722.946 342.736 716.463 342.093 709.595 342.093 cv
+698.512 342.093 688.313 344.105 679.273 348.078 cv
+675.158 349.891 671.305 352.037 667.71 354.475 cv
+665.828 352.313 663.445 350.541 660.656 349.357 cv
+655.197 347.042 649.498 345.239 643.722 344.003 cv
+637.803 342.734 631.32 342.092 624.455 342.092 cv
+613.37 342.092 603.172 344.105 594.137 348.078 cv
+585.168 352.024 577.379 357.498 570.993 364.338 cv
+570.505 364.861 570.043 365.404 569.571 365.941 cv
+567.759 359.079 564.273 352.445 559.656 347.231 cv
+554.654 334.644 545.437 324.396 532.739 317.38 cv
+522.827 311.904 510.623 308.563 500.098 308.44 cv
+499.868 308.439 499.64 308.438 499.413 308.438 cv
+477.133 308.438 463.671 320.701 457.115 331.523 cv
+448.029 337.533 440.207 347.509 437.402 358.122 cv
+436.752 358.81 436.147 359.513 435.567 360.225 cv
+430.352 354.689 423.794 350.346 416.015 347.283 cv
+407.797 344.057 398.276 342.486 386.911 342.486 cv
+377.26 342.486 367.872 343.197 359 344.601 cv
+353.201 345.52 348.15 346.486 343.704 347.52 cv
+344.549 345.173 li
+347.646 336.569 344.319 326.972 336.56 322.135 cv
+334.878 321.084 332.65 319.895 329.551 318.387 cv
+326.144 316.729 322.15 315.226 317.317 313.781 cv
+312.746 312.423 307.58 311.275 301.503 310.271 cv
+295.283 309.251 288.347 308.732 280.892 308.732 cv
+269.104 308.732 257.795 310.749 247.286 314.723 cv
+236.409 318.832 226.785 324.981 218.687 332.993 cv
+210.643 340.949 204.265 350.688 199.728 361.941 cv
+195.257 373.034 192.99 385.702 192.99 399.594 cv
+192.99 413.145 195.016 425.551 199.007 436.455 cv
+203.167 447.837 209.248 457.667 217.078 465.667 cv
+224.963 473.729 234.529 479.92 245.506 484.068 cv
+256.024 488.043 267.729 490.059 280.299 490.059 cv
+293.684 490.059 304.816 489.127 314.333 487.21 cv
+321.051 485.856 326.354 484.569 330.603 483.257 cv
+333.919 485.905 338.121 487.492 342.695 487.492 cv
+372.106 487.492 li
+378.378 487.492 383.956 484.516 387.503 479.898 cv
+391.05 484.516 396.628 487.492 402.899 487.492 cv
+432.311 487.492 li
+438.632 487.492 444.246 484.467 447.789 479.788 cv
+453.011 488.951 460.835 493.996 468.687 496.009 cv
+475.153 500.054 484.721 503.735 498.12 503.737 cv
+505.128 503.737 512.654 502.694 520.491 500.64 cv
+540.638 495.358 557.324 483.058 564.939 468.345 cv
+565.798 467.53 566.584 466.669 567.328 465.78 cv
+567.969 466.562 568.627 467.331 569.308 468.082 cv
+575.643 475.081 583.667 480.618 593.157 484.54 cv
+602.332 488.334 613.062 490.256 625.048 490.256 cv
+632.506 490.256 639.527 489.592 645.93 488.281 cv
+652.391 486.952 657.784 485.354 662.418 483.393 cv
+664.66 482.444 666.634 481.103 668.309 479.489 cv
+671.426 481.392 674.758 483.078 678.293 484.538 cv
+687.468 488.332 698.197 490.256 710.188 490.256 cv
+717.642 490.256 724.662 489.592 731.066 488.281 cv
+735.035 487.465 738.597 486.545 741.831 485.505 cv
+744.413 486.776 747.316 487.492 750.389 487.492 cv
+779.801 487.492 li
+786.072 487.492 791.649 484.518 795.196 479.901 cv
+798.745 484.518 804.321 487.492 810.593 487.492 cv
+840.004 487.492 li
+842.283 487.492 844.469 487.096 846.5 486.374 cv
+848.531 487.096 850.717 487.492 852.995 487.492 cv
+882.407 487.492 li
+893.124 487.492 901.811 478.805 901.811 468.089 cv
+901.811 364.26 li
+901.811 360.595 900.797 357.169 899.031 354.245 cv
+902.149 349.367 904.575 342.82 904.575 334.257 cv
+cp
+false sop
+/0 
+[/DeviceCMYK] /CSA add_res
+0 0 0 0.9 cmyk
+f
+852.995 468.089 mo
+882.407 468.089 li
+882.407 364.26 li
+852.995 364.26 li
+852.995 468.089 li
+cp
+855.265 346.988 mo
+858.75 350.082 862.865 351.627 867.602 351.627 cv
+872.34 351.627 876.451 350.082 879.939 346.988 cv
+883.425 343.897 885.17 339.654 885.17 334.256 cv
+885.17 328.861 883.425 324.618 879.939 321.524 cv
+876.451 318.434 872.34 316.886 867.602 316.886 cv
+862.865 316.886 858.75 318.434 855.265 321.524 cv
+851.776 324.618 850.034 328.861 850.034 334.256 cv
+850.034 339.654 851.776 343.897 855.265 346.988 cv
+cp
+830.826 375.116 mo
+827.471 370.906 822.995 367.65 817.403 365.346 cv
+811.809 363.045 804.801 361.891 796.381 361.891 cv
+793.485 361.891 790.49 362.188 787.4 362.779 cv
+784.306 363.371 781.773 363.998 779.8 364.654 cv
+779.8 314.912 li
+750.389 319.648 li
+750.389 468.089 li
+779.8 468.089 li
+779.8 389.131 li
+781.642 388.607 783.714 388.113 786.019 387.65 cv
+788.319 387.191 790.786 386.96 793.42 386.96 cv
+799.999 386.96 804.505 388.934 806.941 392.882 cv
+809.375 396.83 810.594 403.54 810.594 413.016 cv
+810.594 468.089 li
+840.005 468.089 li
+840.005 409.463 li
+840.005 402.356 839.314 395.91 837.932 390.118 cv
+836.55 384.329 834.182 379.33 830.826 375.116 cv
+cp
+724.597 444.6 mo
+720.516 445.259 716.7 445.586 713.147 445.586 cv
+703.146 445.586 696.138 443.02 692.125 437.887 cv
+688.11 432.755 686.104 425.52 686.104 416.174 cv
+686.104 407.359 688.242 400.253 692.521 394.855 cv
+696.795 389.461 703.278 386.763 711.963 386.763 cv
+716.041 386.763 719.727 387.157 723.017 387.947 cv
+726.305 388.736 729.334 389.658 732.097 390.711 cv
+738.216 367.221 li
+733.873 365.379 729.398 363.964 724.794 362.977 cv
+720.186 361.99 715.121 361.496 709.595 361.496 cv
+701.171 361.496 693.671 362.945 687.091 365.839 cv
+680.51 368.735 674.918 372.648 670.313 377.584 cv
+665.705 382.52 662.187 388.311 659.753 394.955 cv
+657.316 401.601 656.101 408.673 656.101 416.174 cv
+656.101 423.94 657.119 431.146 659.161 437.788 cv
+661.199 444.435 664.423 450.193 668.833 455.061 cv
+673.24 459.931 678.865 463.779 685.71 466.607 cv
+692.551 469.436 700.711 470.852 710.187 470.852 cv
+716.37 470.852 722.03 470.324 727.162 469.272 cv
+732.294 468.218 736.569 466.972 739.992 465.522 cv
+735.848 441.44 li
+732.424 442.89 728.673 443.942 724.597 444.6 cv
+cp
+639.458 444.6 mo
+635.378 445.259 631.562 445.586 628.01 445.586 cv
+618.008 445.586 610.999 443.02 606.987 437.887 cv
+602.972 432.755 600.967 425.52 600.967 416.174 cv
+600.967 407.359 603.104 400.253 607.382 394.855 cv
+611.656 389.461 618.141 386.763 626.824 386.763 cv
+630.902 386.763 634.588 387.157 637.879 387.947 cv
+641.166 388.736 644.195 389.658 646.959 390.711 cv
+653.078 367.221 li
+648.735 365.379 644.26 363.964 639.655 362.977 cv
+635.048 361.99 629.983 361.496 624.456 361.496 cv
+616.033 361.496 608.532 362.945 601.953 365.839 cv
+595.372 368.735 589.78 372.648 585.176 377.584 cv
+580.566 382.52 577.048 388.311 574.614 394.955 cv
+572.178 401.601 570.963 408.673 570.963 416.174 cv
+570.963 423.94 571.98 431.146 574.022 437.788 cv
+576.061 444.435 579.284 450.193 583.694 455.061 cv
+588.103 459.931 593.728 463.779 600.572 466.607 cv
+607.413 469.436 615.573 470.852 625.048 470.852 cv
+631.232 470.852 636.892 470.324 642.023 469.272 cv
+647.156 468.218 651.431 466.972 654.854 465.522 cv
+650.709 441.44 li
+647.286 442.89 643.535 443.942 639.458 444.6 cv
+cp
+422.836 375.116 mo
+419.413 370.906 414.773 367.65 408.92 365.346 cv
+403.063 363.045 395.725 361.891 386.911 361.891 cv
+378.226 361.891 369.935 362.518 362.039 363.766 cv
+354.143 365.019 347.695 366.366 342.695 367.813 cv
+342.695 468.089 li
+372.106 468.089 li
+372.106 387.947 li
+373.947 387.685 376.054 387.453 378.422 387.256 cv
+380.791 387.059 383.027 386.96 385.134 386.96 cv
+391.975 386.96 396.647 388.934 399.149 392.882 cv
+401.647 396.83 402.899 403.54 402.899 413.016 cv
+402.899 468.089 li
+432.311 468.089 li
+432.311 409.463 li
+432.311 402.356 431.586 395.91 430.14 390.118 cv
+428.689 384.329 426.256 379.33 422.836 375.116 cv
+cp
+297.472 443.414 mo
+295.628 443.809 293.49 444.073 291.057 444.203 cv
+288.62 444.336 285.693 444.4 282.273 444.4 cv
+275.957 444.4 270.429 443.316 265.691 441.145 cv
+260.954 438.973 257.007 435.913 253.849 431.966 cv
+250.69 428.018 248.322 423.314 246.743 417.852 cv
+245.163 412.393 244.374 406.305 244.374 399.594 cv
+244.374 385.775 247.563 374.889 253.947 366.924 cv
+260.328 358.964 270.692 354.982 285.036 354.982 cv
+291.483 354.982 297.438 355.806 302.9 357.449 cv
+308.359 359.097 313.196 361.037 317.408 363.272 cv
+326.291 338.6 li
+325.237 337.941 323.494 337.02 321.06 335.836 cv
+318.624 334.65 315.534 333.5 311.783 332.381 cv
+308.032 331.265 303.558 330.277 298.36 329.42 cv
+293.16 328.566 287.337 328.137 280.891 328.137 cv
+271.416 328.137 262.5 329.717 254.145 332.875 cv
+245.787 336.033 238.517 340.672 232.333 346.791 cv
+226.146 352.91 221.279 360.38 217.726 369.195 cv
+214.172 378.012 212.396 388.145 212.396 399.594 cv
+212.396 410.912 214.006 420.979 217.232 429.794 cv
+220.455 438.612 225.029 446.048 230.951 452.099 cv
+236.873 458.153 244.009 462.759 252.368 465.917 cv
+260.723 469.075 270.035 470.654 280.299 470.654 cv
+292.272 470.654 302.339 469.83 310.5 468.187 cv
+318.658 466.543 324.58 464.997 328.265 463.548 cv
+328.265 395.843 li
+297.472 395.843 li
+297.472 443.414 li
+cp
+0 0 0 0 cmyk
+f
+499.871 327.844 mo
+479.593 327.609 472.617 343.076 471.746 345.664 cv
+462.806 348.957 454.521 360.719 455.829 367.776 cv
+449.177 372.482 444.763 378.48 449.724 388.479 cv
+444.926 393.477 441.001 405.299 449.506 412.943 cv
+441.818 426.47 450.486 434.057 454.739 437.174 cv
+450.813 449.406 459.539 459.127 461.498 460.463 cv
+463.078 470.579 467.977 477.244 476.324 477.636 cv
+482.209 482.576 494.498 487.394 515.571 481.87 cv
+533.066 477.282 545.821 466.344 549.147 455.993 cv
+557.104 451.877 556.777 439.526 556.342 436.938 cv
+562.828 423.118 558.739 411.298 556.342 405.886 cv
+560.702 397.006 555.143 380.422 551.546 376.951 cv
+551.872 369.836 547.456 361.543 542.825 357.896 cv
+536.173 335.078 511.879 327.983 499.871 327.844 cv
+cp
+f
+502.25 467.75 mo
+495.838 466.606 492.5 462.25 489 455.25 cv
+486.897 453.815 478.75 444.25 477.25 432 cv
+474.695 430.128 471.25 418.5 471.5 409.75 cv
+469.75 403.75 468.349 397.448 470 388.75 cv
+467.75 379.25 467.599 372.865 472.75 367.5 cv
+472.75 358 475.359 351.052 482.5 346 cv
+481.349 339.791 484.277 333.904 491.679 328.695 cv
+477.657 331.937 472.487 343.462 471.746 345.664 cv
+462.806 348.957 454.521 360.719 455.829 367.776 cv
+449.177 372.482 444.763 378.48 449.724 388.479 cv
+444.926 393.477 441.001 405.299 449.506 412.943 cv
+441.818 426.47 450.486 434.057 454.739 437.174 cv
+450.813 449.406 459.539 459.127 461.498 460.463 cv
+463.078 470.579 467.977 477.244 476.324 477.636 cv
+482.209 482.576 494.498 487.394 515.571 481.87 cv
+522.207 480.13 528.155 477.474 533.171 474.285 cv
+516.934 476.368 507.505 472.161 502.25 467.75 cv
+cp
+0 0 0 0.05 cmyk
+f
+479.905 346.547 mo
+479.905 346.547 498.071 344.899 507.586 346.71 cv
+517.031 348.507 533.404 356.603 533.404 356.603 cv
+533.404 356.603 508.984 349.163 501.732 348.135 cv
+493.03 346.898 479.905 346.547 479.905 346.547 cv
+cp
+0 0 0 0.75 cmyk
+f
+464.782 368.029 mo
+464.782 368.029 488.936 365.72 503.083 367.014 cv
+517.229 368.308 540.275 375.997 540.275 375.997 cv
+540.275 375.997 514.27 371.326 499.886 369.709 cv
+489.149 368.502 464.782 368.029 464.782 368.029 cv
+cp
+f
+460.468 387.674 mo
+460.468 387.674 484.75 385.621 499.593 386.067 cv
+514.435 386.512 540.681 391.008 540.681 391.008 cv
+540.681 391.008 506.098 388.892 494.801 388.754 cv
+483.505 388.617 460.468 387.674 460.468 387.674 cv
+cp
+f
+461.11 412.032 mo
+461.11 412.032 487.129 405.443 501.163 404.417 cv
+517.788 403.2 544.817 406.357 544.817 406.357 cv
+544.817 406.357 509.509 406.268 498.869 407.439 cv
+487.606 408.681 461.11 412.032 461.11 412.032 cv
+cp
+f
+464.962 436.38 mo
+464.962 436.38 490.357 427.354 504.871 425.765 cv
+519.387 424.175 546.102 424.177 546.102 424.177 cv
+546.102 424.177 511.032 427.614 500.03 429.181 cv
+489.032 430.748 464.962 436.38 464.962 436.38 cv
+cp
+f
+545.674 439.174 mo
+545.674 439.174 524.613 448.131 510.928 451.999 cv
+497.242 455.868 469.725 459.093 469.725 459.093 cv
+469.725 459.093 501.297 452.146 511.654 448.944 cv
+522.01 445.742 545.674 439.174 545.674 439.174 cv
+cp
+f
+484.328 475.342 mo
+484.328 475.342 498.696 467.484 507.908 464.136 cv
+525.13 457.875 538.541 456.817 538.541 456.817 cv
+538.541 456.817 514.27 464.576 505.585 467.402 cv
+498.535 469.697 484.328 475.342 484.328 475.342 cv
+cp
+f
+750.389 468.089 mo
+779.8 468.089 li
+779.8 423.76 li
+770.099 424.447 760.291 425.042 750.389 425.543 cv
+750.389 468.089 li
+cp
+724.597 444.6 mo
+720.516 445.259 716.7 445.586 713.147 445.586 cv
+703.146 445.586 696.138 443.02 692.125 437.887 cv
+689.906 435.051 688.324 431.549 687.332 427.428 cv
+682.405 427.474 677.462 427.5 672.5 427.5 cv
+667.27 427.5 662.06 427.471 656.868 427.419 cv
+657.378 431.016 658.142 434.472 659.161 437.788 cv
+661.199 444.435 664.423 450.193 668.833 455.061 cv
+673.24 459.931 678.865 463.779 685.71 466.607 cv
+692.551 469.436 700.711 470.852 710.187 470.852 cv
+716.37 470.852 722.03 470.324 727.162 469.272 cv
+732.294 468.218 736.569 466.972 739.992 465.522 cv
+735.848 441.44 li
+732.424 442.89 728.673 443.942 724.597 444.6 cv
+cp
+852.995 416.62 mo
+852.995 468.089 li
+882.407 468.089 li
+882.407 412.573 li
+872.766 414.02 862.957 415.37 852.995 416.62 cv
+cp
+810.594 468.089 mo
+840.005 468.089 li
+840.005 418.184 li
+830.335 419.297 820.527 420.317 810.594 421.24 cv
+810.594 468.089 li
+cp
+639.458 444.6 mo
+635.378 445.259 631.562 445.586 628.01 445.586 cv
+618.008 445.586 610.999 443.02 606.987 437.887 cv
+604.494 434.701 602.779 430.701 601.835 425.894 cv
+591.57 425.423 581.405 424.852 571.351 424.183 cv
+571.815 428.952 572.701 433.489 574.022 437.788 cv
+576.061 444.435 579.284 450.193 583.694 455.061 cv
+588.103 459.931 593.728 463.779 600.572 466.607 cv
+607.413 469.436 615.573 470.852 625.048 470.852 cv
+631.232 470.852 636.892 470.324 642.023 469.272 cv
+647.156 468.218 651.431 466.972 654.854 465.522 cv
+650.709 441.44 li
+647.286 442.89 643.535 443.942 639.458 444.6 cv
+cp
+402.117 401.792 mo
+402.637 404.961 402.899 408.698 402.899 413.016 cv
+402.899 468.089 li
+432.311 468.089 li
+432.311 409.463 li
+432.311 408.838 432.298 408.226 432.287 407.611 cv
+422.005 405.783 411.942 403.842 402.117 401.792 cv
+cp
+297.472 443.414 mo
+295.628 443.809 293.49 444.073 291.057 444.203 cv
+288.62 444.336 285.693 444.4 282.273 444.4 cv
+275.957 444.4 270.429 443.316 265.691 441.145 cv
+260.954 438.973 257.007 435.913 253.849 431.966 cv
+250.69 428.018 248.322 423.314 246.743 417.852 cv
+245.163 412.393 244.374 406.305 244.374 399.594 cv
+244.374 385.775 247.563 374.889 253.947 366.924 cv
+256.633 363.573 260.034 360.937 264.132 358.996 cv
+253.701 354.222 244.047 349.257 235.23 344.12 cv
+234.243 344.98 233.271 345.863 232.333 346.791 cv
+226.146 352.91 221.279 360.38 217.726 369.195 cv
+214.172 378.012 212.396 388.145 212.396 399.594 cv
+212.396 410.912 214.006 420.979 217.232 429.794 cv
+220.455 438.612 225.029 446.048 230.951 452.099 cv
+236.873 458.153 244.009 462.759 252.368 465.917 cv
+260.723 469.075 270.035 470.654 280.299 470.654 cv
+292.272 470.654 302.339 469.83 310.5 468.187 cv
+318.658 466.543 324.58 464.997 328.265 463.548 cv
+328.265 395.843 li
+297.472 395.843 li
+297.472 443.414 li
+cp
+342.695 468.089 mo
+372.106 468.089 li
+372.106 395.013 li
+361.997 392.548 352.188 389.961 342.695 387.26 cv
+342.695 468.089 li
+cp
+0 0 0 0.05 cmyk
+f
+0.5 lw
+0 lc
+0 lj
+4 ml
+[] 0 dsh
+true sadj
+27 804 mo
+0 804 li
+/0 
+<<
+/Name (All)
+/CSA /0 get_csa_by_name
+/MappedCSA /0 /CSA get_res
+/TintMethod /Subtractive
+/TintProc null
+/NComponents 4 
+/Components [ 0.858823 0.85098 0.788235 1 ] 
+>>
+/CSD add_res
+1 /0 /CSD get_res sepcs
+1 sep
+@
+36 813 mo
+36 840 li
+@
+27 36 mo
+0 36 li
+@
+36 27 mo
+36 0 li
+@
+1069 36 mo
+1096 36 li
+@
+1060 27 mo
+1060 0 li
+@
+1069 804 mo
+1096 804 li
+@
+1060 813 mo
+1060 840 li
+@
+%ADOBeginClientInjection: EndPageContent "AI11EPS"
+userdict /annotatepage 2 copy known {get exec}{pop pop} ifelse
+%ADOEndClientInjection: EndPageContent "AI11EPS"
+grestore
+grestore
+pgrs
+%%PageTrailer
+%ADOBeginClientInjection: PageTrailer Start "AI11EPS"
+[/EMC AI11_PDFMark5
[/NamespacePop AI11_PDFMark5
+%ADOEndClientInjection: PageTrailer Start "AI11EPS"
+[
+[/CSA [/0 ]]
+[/CSD [/0 ]]
+] del_res
+Adobe_AGM_Image/pt gx
+Adobe_CoolType_Core/pt get exec
Adobe_AGM_Core/pt gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%Trailer
+Adobe_AGM_Image/dt get exec
+Adobe_CoolType_Core/dt get exec
Adobe_AGM_Core/dt get exec
+%%EOF
+%AI9_PrintingDataEnd

userdict /AI9_read_buffer 256 string put
userdict begin
/ai9_skip_data
{
	mark
	{
		currentfile AI9_read_buffer { readline } stopped
		{
		}
		{
			not
			{
				exit
			} if
			(%AI9_PrivateDataEnd) eq
			{
				exit
			} if
		} ifelse
	} loop
	cleartomark
} def
end
userdict /ai9_skip_data get exec
%AI9_PrivateDataBegin
%!PS-Adobe-3.0 EPSF-3.0
%%Creator: Adobe Illustrator(R) 13.0
%%AI8_CreatorVersion: 13.0.0
%%For: (Thierry Ung) ()
%%Title: (gnocchi-nb.eps)
%%CreationDate: 4/3/17 10:02 AM
%AI9_DataStream
%Gb"-6fodW&E?P#[p(-uI1;lPX\@EXJ%N1s2S0W:t"G?bf0u=r^'YCCb2,d"@FZ#6Y,CTlICeRW'F#VF(XA"]U[_jCI,;hU-m8-a1
%>JKsmpu09fhn477h=8Ctr;?!+OR^@%H+9?!ke?iOYM]96lQ71[l29N++5ab\s63T,5CEG+LEB85hL>7p_p@e3DdRqk^YBW\^Ic2P
%^3TVPqVgJH=.eoHqZ1)Rr8$n`2fHJ,gE5>Oj6Maj^Pfu2i:&i+p>>o>lFYY4^VfdKrgWXh^]!\lf=reP^Ad85hNa.OJ.JO7iVQus
%rq*13YWUF%rg\H-hu@WLQ_3.EpN)ed1#SO0REOIor_Ib;mP"Rump:MDn,N*d2"Ns"r6Mkh+$Y/Qma]AX<E^BbjbGSI9mNTHJ+NNm
%pMXDeHB%NYr-0%?."D&Q11'n.qg:?IGKV\tj^k(qlZBi>V`1gZq<)t:GkLa!pRaYIn9Z)OYMOY$ZZc-=l.9kQ^G6<``aPOtoZ?"h
%q"FC$^OQ:G;s@WncpD+<`J8A<^Vre&mpFtdqMrTm)Q:F8L>TZ?ptb73AVE@]rUJ@+&+>!S(6h$5lgiT,Isq:c#B"DSh(QNmrVe_t
%rYtS+bNQil\snnuLFMle\D/Z//8t"iBdf1a&+fQ]mLcf,/P+>h=fl%rM>u\X2rD;os77mbT'+P]Q[/;h51'H3e]0W+eFfBP]:"-,
%3::GNZcEZ"\;^C8Vq:B_!!V=V^SpN._u26;9b7$DmgBW>J+X!mDCk1@W@eSW`US*8deVFk=(RB3ZFGD_qHrZNs8!&kDCD\"i-3K9
%HWf;$kp4Uei,8njBaF3B#k[BS$K%_!jU?@p%0,DtGNB6Wja=a6S'(m-/.:7>PS\^o=l?GoJ\0h\r._gY-b/JaJo@5UnB'ntpKXok
%l1LK$I/A5Dc`^FH%eZ4c9DtP3d\lS/q)efS"oO+e?qg&/mXd188EhoI&c@B1?psK'`[ru4hh<f$Dtji4"M^WVRO;hf7KgJ7h!c,Q
%0ID#kbpGRqmXfGpDn+tV"dlA%oUP'2As$[3Ia^5ijOAPA48"T)%R$.9Ouqk2GJX(42mbt'8>E'.BZ8m_g]c`%'rG--n&"kX2uEoU
%U\0E"e8p/UDn$1p#JXsf#tnr<l1sg:_#`c1i/arWooE]*m\4ujH(g@!n"Jpo07#7Y/F,`Ka:%Eed0XF%kht2PCAJXa#J]4M_J^85
%bmAe7I`"<&3qn>9[d5$'3I1a8\)=LKMI7CqS1oY962$#6.EssINb4iR9Ci?NQPGXf;;^=bGri?.hl"fb76RQ>8_?^P:3/cS>k(8X
%>B`/Wa*f,C[I,T6Vi(A)h1GA9s5;WHLLd=7<L-djlMs)2R=oM1foWp?J*"5j/q)4G!FP>_W1rJ,3792I0>69LK3/"E0D@n-"K?n8
%2,3^mrCa>>VZ5g^Siu]2E1g"l?[<`RoRHg40DEH]9T]+;?C)&u?iSPickk)N=?H:PlUq3#J+U4/DiCT]Y3.=Zk7`?3Q$MrpQLao,
%:Zq!/^6m3to.\YnL\Z7N73&4F]$BqHI\2C;]h[_tKDUK$*Tf2UZ/,k*?I.K_qpUA*l$?(!g^!<'(N;U`JSe\b0L2gVlg55Wr6k;)
%Ic/sE_=QULoE9Cl:P3;t-fmO@1:/p-IULLI6\JU)*^HdD$!MqP?YB2)Sr_3V1rfB^qB"@`pTU"BY0u``][UTR',UM2oGX>^aVYQM
%K(Xlk+:*BFhh9>1Og_$>=bAfs'BoP";JG/>mop2cbd[__n44FDBEm42ar9#Q&raQ,A(;H[45kDZ:M1&^GK/%TY[[60_&"#jI+nIk
%H,]YQcO#BcHh7dCqtL\\]?]++;i;[C>8RIF>/]:CKPPa3Bg3#_E(=+^qf9b5^4X8n()ra6L+<`f`A5)!ec7B=m\@<]*'XbDd9P\t
%oL=fsJbD%ka/eh4et&C=pG7ghe*->V'7?;H4j/(N=NS6bgl&Q)nftK\=M@<Fjo(V8P[<'fZ4gX$/PaI[Gj"V\qeIhMI@(1jV%oSW
%>j$W\9O0TEF.e@D/diD]o]\X5LW<\I$KJR96S!*<VRCefYQ[Kb4m-H6[DLURQ;7/9)gXd,<dMbIF@FGtB9?5RY`c67@MiKt$`?"\
%Gr[!Y;HhJA,[8oIegf4W.t>`Yd^[p#.F08!d?3BQ<5d2e_:07SVf_SQGmE!-[l5gkg7))epA,G;QhLB/khd+N0tjdMGs*2&H)4PT
%,k>ce=*aqm.\&mOmmW0!%qDT_.aiBZQ7tbJZ]-DEZVE)?c4[`JlSI9YMfX62'CW_YNTLeVn$&3GTnsN0fS_Qr^.l:XkpYp*S:R_X
%+(3GN_2rU"cDogcGa>nW-Q_Xp_R%k(.JoU[!l7Y.7-H;oA`XVrf<b),XBjm.g7^:)qX`((dFg$#9AcW'#pn;Dc,/hV+.9ZTdl*da
%7$65Q<=38N$W/2..TX.an^@%00gX)m4%o4a$C=[&\Cl%lh=;e8/nb-V)$aU$j57TL#CG`MUr7Q2c\ZSDpY&cEduASQfjfe?31`_C
%Cl:^W\,<kb<8u>Lqj@TMM)-UYpHV],4)S1U\t<W`ll'UbO2U,m%HA-!5<p9:m+WZ+^h7"ZhL>W27s]CUn.&F:5DVMo=!'Z>rU:hU
%VXO&=?bcU4j7f?\`W(G^[i`cKDr-,\e,:kI1B6cDDgs;[rRJq!5CWVkn%\hZro3FlLOX]"pR@5:Jc>**QiHhF?[r%=B6Sjqho_Q%
%+5d"c$FI\9s6j=.q>P";[eK*k61asCmeH=m++A_6Rh$UtQ8X3^iL:qH^:nsm0BJDlYOC[M0FGJ,qrZ/4hu<DglZ,qqru]X@IeE!T
%n*c9T?bcX'L[>c#s6o1CG9:1?"+TdRH%#E=2#[S?\_Q%hrkhG@K8+mOSt:8ofO!IKqs`jp[lV$#+$[I7iR=:rrSqbTqi?3eiT$gT
%*k-#g-hU9nJYuRfhnCQ*rr)<CI!rmg^TP%aTA+hipp7/B?[XKKMB>/0n8J."f<=%[\7&V?laqZEnFk*;n&dT]II0l84l4]a2/J$$
%OY/OthZ)'j[i`?a*ZEZ)GHX6eiK!^L%>TuU*a%$4hn8ud&;+?ipu[d.\.A&Bs6p,t;[77j:OiC3C%8?Tm=mdpp=9%A+7BcT*QbKk
%(L$]GRIb@rp&1q8\:WW9(O=q?DuL(:=Lgd@PC?S(\0]R1Iea!:*f%AfHh7&?'l*<:*"5%beE,Tf%;<r"s4cAa2t-eFhd3uFIIKIB
%47Kl@DS/&[j8Qmn=%BjV+Ui?]6QW+5)%OQ(3";55$9DE6II`SorSe@ekYM3EYN%>5E?)k5bK60mH!JA,GT9Lh@;4PB)6[HS@gF64
%*2YEF6TBg!3/:/:7N]Yf@Z2bu35m0bi\@%a3F:]&%EXsC32Dp^RaR^lF@[dGcKULIm.6T3:-%Jdc[!WfI*AL9cBipLL<15Q,#2jp
%TG=&aEZM%;F=OB8V/QUUm-u!,Yg1O6:33#MIIfQSKe59MlSP6sbC!U<p_U]0&M7E?h-8<*SlOL6dE$<0No`;RkAjkNFZkuBEecD-
%LGB$gE%'a,P[5I&Tl^:fO7F=>4I>A5Y4LUMY3Y%m*:PGAH7Y8XJ'2*-A!rQk3p'oi"*EseO$-qkW3/=LlWgU9nTqj>b%dKib+-IM
%RH@G$bPc!XgQ>R(ep#[[53!Ol\B:LMYgQ\+>#tNPlO>?cS1aWe/39oCk;?&Jl>p(Ob/;pDB'jU2h9PR=K)ir6!XqWOJcgT.Omuh=
%#oCY'!9at$6<KI3-qYU[-s@`k0MZ/s$,R%bMj#Z)JV_]BNJ+5raDjK+3$E<I.WR4\"N6:O(Cj;P7\h/k3$V$,63mqNeR*:?:Na&e
%>MF=DAUS!0Bmk'/T6ZD:0Cmau@p*AGkD*+7bU`'d=mW`L1]FJbHb89[c"5FFh/VQ"\iX5E>5\'a1F^-4-ZT2(!'_2/#SVkR*#t]*
%+9^'u5m]f6J4Lhs+c%;?!@3;RL15ogkQ(rULa&cqO[Wpi+Nt<YWGDB_YQ(C&]j?M!"]GG'UZ*e4/E#mS(hqSi.B\d*U5)N4/-R">
%.Km6fGR7J4KuFd["E];!#=qTBfE\p1/pQOe`he9&9>RoT_(Lf3b(Q\rBQPii'+2V#"\DJ<V0"H9aV=q\E5A_hAOBV@\ZFSsD"=iI
%ls08UnSg*BGUtq#.Z8U6?HR6(.2=BoS5-!*M:T,VG8CtfNb-6a"`^DkZfrC3iAt<DFgt[0Qco2F\fAmJYnZ8f$<^#g==EBXnX#rj
%WL*5PHn*YG!*J7gI@AHlm$1[2hg"jf$nb=;QY.A_PF(k<WGIVp:5)M>#_*a"'tG#nE[k&M:jP0hroVYp=QiL?L>6bs`9:*Td]!Z>
%:^/hE]opR9N.m"GKpiIo[@lfqT^B!,n5:TB.asLeP)"YYIMp:W\00TiQ!JELA_uQ)Hb!sh=Jg*i)f-qppJSHUWGKC<far5XRCQ":
%`'*Fa,/T]&HnNT^/;pfu*B9<)T??"$5R;`Y$=10+eN9$R4Hu,$bBi<VWj/Ym=B;q&r.W.\$`uFTF^L70j?ei8_1o5Q6p\\//YqZ:
%YiH)2ZBY;O'mc/Q#).Y!4q-JZ@+DgGf!4/c14l)A(S$D;6B23S#Ce?#GV3*DoS5ZLg<T+t=8#`UQ'YWV>ZY$.pQFhLbNb\2Y'!`Y
%;;H:o[EtJt:X<>$mcsefp[,<=pN-&qK.H;%M-k.SA`#[]kE`N8G@1:9pot)lm'R,5Ig,CYpVlS0jS4B!oUgT4s-M&lB9hXJ/A#1c
%qsNL6+*N;pr2;_e&p^T!NGpSK*N?+qE*F*kZ>C8\C`mJ_>ijV&@1u]/+h?s<9KB?<YmP*':XPG47Z7eImq6^Ydj\(ea1PhSJMk-B
%"A[oNbM\nN:5"_:(fK&9PDZ"i<"VN0\B(!t)XgiHU!C-[Fnsu^M#7BC=A)I76X<fWP$sRsB=R<IMU4C()+qj+#e.M*E3)_ri/j&\
%Xs9)uKVN5]![^=8:Sr:GTe;T[FNT%21"8FA>b=\%oqfr<)Z+LJHNPrhE(XhgL@Du$]#/JLNYT7(cGKZ[O,,joHNRBTGg..k+ZOOF
%XH/;43iiRk?4.a-CR1_JLsj<Cl5IfjP*</DHS!66_aNhr5eoNK4(B,1\5uFI9>\@`oHX@>CoXf)N2U;+6CEj5-DiKSAYKMZ>perZ
%\X*+U[UVL-n:o4?m0tFi_f`=R_\6a2*q`:'H5;JLVa8NO+$ct;DgVKAl%;.J1T^-ra%%#oj8NKLE(u/(1jk3/]#Du-NX1*2cBIJW
%+.ES/`<bB?%`piX-XK):BWsOm.Pk,*$rgDiW3AktDs)_iLP!^IK%!.j3Y)D;a+EaLS6Wo0WU,55m"])eq/b42f1VgPQOua?H,e]>
%-M*bQPNY;-T@>t2d#suEla*JE;44<_05/@:T6=rBi$2]S=:*V*@Y91?8mOKj%]FG2TmHIm?j#D4jno1O@R='DSR8*33i`a0jRZ#(
%N(-7s=@WMW<b4-84b1j/*H:*uN*=;m]ub-`J;)s,84CrV*,&):S<6Y16Y9q>jTE`MN'FYt.0<BM"R/T@E0qfuKF^c*U,"9o:S[N1
%k0:]CMo.]'0bY@"CQWR/Oj^ltBLegd*?%CAcMW;>ILWK=.bS^,Mnu?!>5S@/8N76RLrk5tN_tLl7:lYT2W/k*2FaH#Ni8nU/6pBu
%S/cTjGRWSJ%#d`.Gp"gc<%dCaR?<.jX3?Au%edh^;@lLr*;d>2Flr"C(S6T;"'u@mF#/\q+Ojs:lp.o[i@[)8N8ALrEemp`":fH5
%b,rl8r.l?U.f%2C?MQ'$s#kb*dt)E3'%c0H6th!r0"/*kk\Nr;Cd\LA;WkseA2579c%tq(:*uen-c0_,]=ci-\)qp=)Qr[t\_+fY
%?/0,V6>#9KK\8V6Y@<=*o>^?KCW=nDhi&eGGMUJu1Rt)_LHSIq3WKP2&f(nt5XKLJ@3$Q"ai=eG1?hN.]hL[sR(t$o2!o-2M%h^A
%-4bqrW9cg"%L/"Ms5j$f?A7RWl*dj<e,$L6jK!;oB-bts,hg&O/BG^7'0B7/Q>V>IKp`riI=j5[F@oLrpW.a06!MV3KqM-0A^0rr
%jB49Jb)DkVK(pSWm2</:3*9a44`UnJ.n'@4XC")X0@<jc_,(nW_^Tg*jF;-dI4V0TVRteNKYl*QRrK>DJl/-6GmMSQ_Ag#,_j`Up
%Bt52Ok36dRpLsu7=*]@oN^&aL%FWHc2j?7TDdO\HZU>;p2/edh/?L1I+-I.YEOh-=*c&5kW'0L:Q]V_uBpIP6jntj7<IrCfoH&Wd
%QamMD)B8*2\G%`Tg92<+$i:X816XDcA&>C82mM<%7+4?EjW=bdL6\DjV4gHIq!#LH4]Cas$a^(cd^Wht`k*5ZJWG<n%a""p\i2b_
%$hD!'&5lW+oR%pGk'6\lT/(fIe%joO`1PMAd/q/<d5'S)M!;@5](jTgl+-Z4NuriKme)TlP!1B,aeGJ.Pq4Vm)&63b--^sLJ]P6!
%dNhuEb-b6s!2^L+CCob;bkZf#)Ek0&2G-(OcJ@g9\W/aF+^P#\COe"'AG@G0*%Q*7AO,rq(NXG?-[im&3drPAMk=T`&7@g7Wb7cS
%ot\;?N,NhF*Rfcb>E(7*H%d,)LN:g)QEC9^>kR[4eQX8AKfFLHepo%47_X&J0]1=t1^YQnp?:FpHDT/W=4agjk06n.$AX"_>_gjP
%>[8_fW%Yj=:<dp!Y)Binp7dgk]Hk5%g/ft0UoVlE[b`/QZQ0ta&<ZpDd97JX4RE9`WsGV7pMKY9G6_&&]eA%Jo]V5.f!WOlop67r
%^@d/b;o%BB2KOL#r"2@WXa7sY2284fE/gQM\^ZQh^.!np>q@1&'P"Lc1f$Wl)j-0c>poST@;"Y;0;LX;5$-XHcM-U+mkh9BM(Xj2
%L9@1NA@>Y5%/$1EViFagF]tZf7a5OSB4g5`^I@&+Z<,jo_sZp/Ft.][Ut_t@qnLskRggoQfNrG'>&EZ`"t)!?pCkSs:e/3T6EW%r
%OY3e_jAg-2#n=T/N-.T3RR*M6nh`B#^]To]EQK3R_2<X.i9+MG0Ko8/"noa5rg2tZkJ0WAhK:(=eTK[4kU_9,3_c??-ZZ)SWtV>i
%K;BV@Ek%24=,n+pidr(-ngQpRNLVl-3HfZ.SFIl=if7\@2*-1Sg9\)V==_Zsjo]nQOl2!]Z!;i5P()L:?6nho24*kd9S\HJ&<\C6
%8>>BY>X_rR`qrPR];"7AhOY5LP:N!G`[[tQhWbCacj(\&`WsrhZ3r?oRj7#JUYNpC>;LRB/F3@oAnONEJR,e,mr([&K`esaZ(mpI
%Nh>Ce-XM3;n:O!e.ni/gH;+S9Nei)DfBF1A!;""gLE=14+<@QAJCH_+\r#l.U"U60_I-Q`7lR3jj+,up-X:X6T:kEL*E^-*LWBTj
%+ctB..KJU]LlY;dLbFRHLa#57#TX0[AAK(rNdP=83VaLUWBl$oePar%\tWH4^,7D5A<cDh>Z8>+rJ.fO]kc<7%dh6(I,^Fr<S30I
%qdO(>;-KPjFXcV!K1Z`6aqEfVXOE\AZRJu(/*[lA)YLCk,B=*NQ(dlN?*g_u?:T&WbdGgZ=HQdja^!M(WOZJZKmGc/9&$C0mt^Vj
%n?SD2F`s\Sk3BX?]@Wai*</<ZoQ.Z#*q0Y]lBed=5dkeI4D]DdN#.mLEblc<p0>_>1KeiJ;jTfC-p'LKi`@qM30o[]?d/=TcmK&S
%E\SfV^:9T9Ml9Q94^%'lK"a(qNN1b&BW<!#NdetMoVag2AXjd4YSi/_KS"D9=\IC&&t(7W=II?e2ckiVApJqCr#tmr0k[$%Z9)=c
%Xl&&H$a8E\\F@LdcnuktG3Q%D@J(;i?[VA[<DR#m-u>6Br&DX$&A:(B"<a<HnhDE9NpI;Z.LqQ@7Wm9qO^M3[MRA=7&](`LX+Le]
%o-/:tM@Pcfn!7#kD9A.*L`?6nSoIU+nMb^IU"qr&f-`5@s8+0^mpnSiQ[5TY[Y8s9,8T2*^AY5rIB@_+h=E-pmr[%1D1CP^^S1>1
%hts"^[,W=`V/1BLkCq&'KfAn!p,MF0QFMj>0^N]4fa]=M9%@WOKBEL+B1pI*OMagW0d.q+8XWP=PBe]X30h#^Km[4MI*l1nAY*:q
%#+V;1c:l5LJI)?,85h70WITi@,7eAek$V59kL%LFK,6Qr1i<<P!XDK=%1&F3HCP1P]VEG)HE)Tu6<;pi`&S`bWf-L<gsV^=8M<KJ
%p]r)W7Xes7Hl^RH6Caq9jpWJAYgJ_#W1lchG#<`gEC!frJ5%!R'i7tfQHTf9o>#Ktr,T:%[(.6&\VEA]l#ghr[g"thEbsq;X@pdg
%U9a,Up$G7e/h?C"o[e+QMVgno4VkTm&PL!p;1'D$9@8&DB%$tS%N;Gu=sK$U"Kdg(-p4\!G?&W^_sB"U2&[bb;r_s12B3D&7]qTf
%XMtN#7u0Gt9G4-]@bjn^<p<?T:",-/Y"nfi!OGRhGb+SM%@A0\`8<KG&itFrnZcZnSPHkhr)5EI^7"o,g?uhOd;0#-SR>R)#45."
%G[<!3Rj@0GjTOT.^Xt8Mq2GH+iGN5S&mNsD\=+4_rRBaI\3\j/=i4(jYRR*"TXqf`N('9o^dbc;d*BUEFNlk\7eSJ$?;!(iD"E6%
%X#6Cb2JmO_[/FClQEO"FHU84G[p]NtYb^`X*@E_C#n>VPoruSG:hce`@cr[R\8g>JkH`OLqIu/QSM#uGQbWt'`*+-X_(XsRGuPm9
%LRV6<MIq^qn_X%:d3Yf<EQ&Q7XN@GCQ?[!n7QDeVqpR/"db5uPdH93,R!\#s\,Nd,XcYM4lDR!H@Nl^nEa5)J;>23a>)hWVSc6E*
%gW7E4%@rs_:_H:Q1ET:s,5_b>jb,Z3^>L9^g_b#oWck"p.JPnGM[P<jKoRZr;EVaOARkWKXPpp-KRZhR:BgJ/`;Uu&>/5TnNo)d8
%r"B9S_f:/mL#<\frMW;m)''t^?Sqap1fJa_N#Z_ebidQ/B@`LCrjW1q)OgeDs/l9mYb0FFJH(/uN;;!@0V^p!;Mg<'^pF"pM.*&^
%XQ!b9(R-GKm23aAEj[B(K/d76SL`T:V>fRWJ9^Z#qNore`[>#,gF6s;*`0$f1YA>Cn3:,&H%Ps8MpV)13UQI3==<,&m3.-G(Zq":
%dV[g,DbsN=_juc*_kA.3(<r[U>1_-a)Z7gZ94r@5CW3^3DeuhJXn#WsU5oe<riQ1J.^2M:mqFJV&mJA'DPb7PTjVXfk(A!(WlB-"
%:$/1MJ@0.(Vop'=P5@kC&SphLj'EnfIQQ494pSPlKBnmk<nOed;RJ![%=U=fP>t=[NN1lH`5!oZJmiH4aMp'B2NIF69Q]?k;qg6!
%G=GBRW4'RMCfF([XpMNW/[dZ/[q[cT3qFuDD/l_p3_'?c'_*[<<TGAV3aj,hT4">BV_]ATUKFuR\ajLQ\Gtj=4'+B5"WX!N\(g-F
%PjcW(EP)P=A!JP%)3(<0E8fO!Lp`/,&\OmT$+_hco_kjdal6Ijag2m+Wg@Zh(.>/d__9@P40c[2g6"$An<Jf)d0lb]*:>2#7"L0o
%3I<;eF*YD/"jO,%qb&e<cJRu1IQrE']U<23&UJdgJX?"297DkE@CZVR]F47Y%c4dH^JKRDh#9^;M"s5#]eBsLoPseVlrl>9F!XrI
%fjc,i)3Ms'OH+<9btC#^);0+=#'`APaY.7t?D+RLo1TDAX1*:_j-$Q1N"Gb5]AZQ=%=cN<peu@RT<]F8Phkg>e)4$T%=cN<peu@R
%T<]F8Phki,m"TusfaT%G?MIXb_p=[sroq[_\$&$h"?$1S88Fo]ect:587J&gAij`k*]Z?:UL2cn1hE=93Hp/:46h)PW#XFLp"J2I
%,@rBG/Ck-*G<nub\3/aAAitEsa#e"^$6!n7q_P6Cg'TFK*E7#IKU(,TB@P-cCt;gK.U0*>D3@C>TM!q71('o]0`oKe38#$DdDe!K
%9bFBLF@@U9d$@:0KYCCmZV$5?1g.u3WrcVjNToUt&XlXsNn`X9l`#N;\Y7HD)QpUpS=b!V)UdjkeS.71FLqF(Mmj8C3lm4Kc:]Fo
%%(AqDF@1L"f/U_"7;e=KLf8,4Met91e2?F)$uqAoV6<ICB\$ef/eA_),_-bKZ2mE1_@d5c,)X3Z:fF_<UTI?>@Mt#5/<Dl&Zk$o>
%(>2rG0Iq[)i7,>NZjn!&?%EHc9&nHoNF_O[rbU.Kj.aE"\?^roq?0hWEhaPp,)oNbcj^)@f-(@fBVnhKj?]c_#nP7\I$l^tDIUqU
%Kb[JMW[ACM6!a.q1qQiVdtejp]'14ST+fX#AB2gq:a_nk6+t.RYCo#1-qaR%i1mMNoG9e@&P<NE0ZJ!Nkhom5i<5+EV#3PR$si2k
%[I4+X2t).kTBgr"J)s^V5:W1V:QPKf!"F=ePi%KcJtn&dqI/A.aSo^u2t=-u56q%)S.=u*dGL:Z$5ZI?5:?8Uqq;Jr[,D;a7^dG3
%QA^Hfb:-6RKR5S]_)gTTV.^eY8hLP*]P+h*#urF0C1D?VUT:(cNS2D8<Y<m%@F]lJCY37*UI3VAZ#2LT7q.))i#H0Y%irRJ=;g*V
%4GSoV3%N^&[ZbIhX+]<pI^D"3A_Tl8*HqCIW1me>_)G8\A`=_OnNB)8#r6`?_;3#!49]bfm@1F6E0^lP*BJ7La,0-5+4r_CEr/U&
%h9imYWJZfCi)IJ6`^n%70J`r&cAJ!;Uh"h]i+0gJGm>L@Z_eK]8H?XcFkoKgbIYjEDKA)iYSL^!WL[Qso`e$>-)O+&5t:[M`$Ccs
%X&W<k3=#?Y<5.D1>tF]_13UOt2MAb@6YEfXWc8fVZfOmmS"k`i<7#0ON*hrs%.]lGP1rX<q%VNNn)(m0p$VK3GMb.=Y)"f$nEY\.
%NNQI^RlnXT\2ghgf5+<K<]P5!paBR8:>AN.?=G:GP@j]U>T]1@6kYAR_J14N(X=XU6A^YB_bMPR@:UJLSWdBC:Ht(>]k-YZ%/u-t
%G<>_N0:Vfi>BoH<FZ)E5jn,c0:`IATWthV-<>X_[Nb&d4F<?f]_!n?KE`[;kYX5>n4XTGNEGl4$b.Tr!j&M;-d^22B#=DT=oL]fX
%B4N<(T]tO+:0Vk$<TpYZLE(+XP:n8'nel@$P@$/Y%4/kSE.Z9%paY%F!S9nnojIWs(M-Po1\KeJ;b:i#%),;O_LC&n`JN?Z=\fl*
%^Mm5Ul<eroIb!FTD0&TqOqY<sd?504e79nr^AO8sRG9[8FUbKk'c[lfcLpLRV#>6X[CFt)UuTV.IMIZ,&HfJk_c>NrgStu/UEb(;
%"f&ik+Zg[M*OQos2)XjC"Z=k#f?D';C_4o\Rj6G+iDUC'M@7KJD^OuS)C<e/LBO6;]R+JW_\[!Fh/JP<To>'oO`NP2N@*Ut1l4"i
%c<['o^bZMJfKVX]f#`VP'!P>0P%,.,4dj+K)W+f[S2k@Uc22p"!_B+`+r)AA(M,uV\WG"ejQR;_=tn3@@em,$[Ur4"?;kZ3@P+'-
%bN?L4'3u8Q3KpP)*;dh)DrOaU'?<7Lm":U+3O$X*aI\(G<HrNSL4<S*pP#"IU.drQh*b&[jm-hts(O]Lhq#mmnB-h&poLVi5rHI/
%0ra;/?]8h=)@at;A_K0k=dLBW'r-aMC:mE*1:d]^UQ>U&#:lOI1pNa1cB,uoKEn)/Y>Q=)#qp<Bl0!b*>kK/SUTR<"\LtP!.eB=2
%R)ofJY!-<9,+"M9Y+)r!`0In5crg1k;FH&E8i9jE37$TC5>kMQYFS[0Z:2UcXj[WK+O1rMgH%X61K-V`eeK$trm>UsBg8iod[ZnK
%Fah2q+gGa?-OY^j>YWeY#o0"]*#DRoP'W<+RqH+R;>V0O,Jg)1n%pIQl:Z4l^t>&sT)ZG<;_QYY_`9T9FLb.UNSHZq(<r(PF%2VB
%OAo&oXn"X/ZQiQ3Zm<Z)V/F+--Z=u9$FQVGBS1e8nMrKg`[8R.$*[+c/#R*bCq;-dk)'E#jL);HXP+Xp:$*ppXife(P,qV@&X?'P
%F\sY[&M.4O8F!u;ObM%(A/HK=#ZVL.Ca(#I8RREce5EHs8m/hAC8QH#)#;X&(jYthh6:pli=,:4Z"`X/T'IXO]7'?5lfU/D?(ScZ
%bmM$_MnEB4LD?TqksdO94bYU)R79dXbaSKZ,ANKp9f%MC:_X$j2#7$F&9T52]i^KEPbM&_MFt#BaeH)f6$5<nZ'lc6)HF11;q266
%p+`O`X("=GT?-"k*k3Q=3$GR_+&#/UYi;q<]3ChNC\+[b6+%3WN',/2F]_qmlC'Th,8tCLM4f12D4AI^D=Agqd\H^*1"r#K,\Xu:
%<#6J=?9:+&rg'bE])!pIm?""WV2WZ.^@4R2n[0dsR3M_d)?>ul%T8].'K8cscN=*m[=0F8Ep&ndROJ;Vk%iXVA96O%[F4-Hfd(EJ
%o;nH.lb6<^W\(&[j=F6hY3&4iBkukta<GV0>Z6Q8/FNo,/uniZT^5[Z2dRbVK'E]D[Lb1;4d9!=&GQ;#oM%^Ujh&TY\4DXQ"V7+:
%#5o>m`+bSHAc3'_80seI)1A>o4MZt*TH*Jm]3m8L.irdWruU^oK@gs0h?e>h1\r]IHqGI-If\]7S=e$u&LU,>iG:7OS8)@1=/MF7
%C2f>m,@t2(?SLb<!p_%NNM!i$AiSrFqr'K7i/B!<,Hc]*hNUcHgt3mD??l'I=O08O;VFRF:C\r6P(N<mE>]64%$GNR7V$'?d?rQA
%r;]d&)qWs*CQu/F&*E'0<gVWSjufSr(>T1b0q*@[(Qd^Z2,@jrI"Z0g^:i>?Nr^uIMkQI#acj6e's0B1O>9/*:!T`NOQD4Cr/gET
%_@d/RUrIQ%`nKK_4gLKh<h$'k2k(M_2$'anl@c;1YMu'cq#3=S\gF)P&c=KkBq'fUii8>^>d]QmIR[T8\-_A+7oaJ;Wbk/3$)Mel
%P.F[%OWjjd0gP"N=lhZ`YaWqh_PS$m&+@?/7V`sY1YSLh=,>uB%Pu:t`)[%T5iQCN<)3i8P`4S6Kcsa;>$n/Xed-Vr"tA\=@Z2+3
%QF2O8RcoqNrt6gfLJUpZp:?"3b;K)X[QF,@(SP?OW"HUV=(pRQp*4[Wf)ODdOB-:(NW@7-Zk8(b9dU1m[Aq_>WoKQgCfR0u[+'p*
%dkq0'Ib!PGnRO5#HPT7]Jr)&er'U"9fpHjC<H%!^kd9A?f"t]r]eg6h##p%d+k>bUD]6(ipN5LkX,^k?de)WeV8VX%G>I_Ji4elt
%T_Ei,Ri89,N52;;W4U.RqQ)4QG_\LfbTCuBX)H>f1_Y_t,T-e5q/B),goe&?5.@*G;3o1F.8']r3co`fgFO^hL6Fo+Nh-<,<G47:
%Y/?Hh:0:N22!dY0lWn"?[#gj0YleM#!*,,f"K3cK=pq@uT)On5rko-ejaYEQfq3T7T<W<)B0+ZJ$o-(,"2gpVl-)n#iZ_AQ^gRs;
%a(=0Up4HLLKO$;_U5$$8oI'&`V4JX*fX#b5C*lM6W>UI)GF_Y>Q-Q#4o6$q,k,S+AW<5+N@.+!oJ-alI"Y/+u`n>a1N3lqZ86p&e
%;iHjY(:deGQ2T/*b2XD^^.l/b:WZ^#?U]?H_>`HfMV)_+,MW0FA#bFVNt.h+^-D4uG98[VXu#t@14`5gInEbN5CbXbs'@1<JaNE:
%BQ02r/`0s#<R)<6PB'?VMj165XN-F48?GV"@g\7f#YX,&1A;hom\lanURuJ"njAn*)fEV,A`ej'qX<a-;d"B,.K?P';N?uZ3bHNc
%r^`ap;Hg.`5B9n>@I#K1M'dPhbMnWed1C,kc%?M!:bYJ;%LsC-FeT9H_tgDUOYqs@iPQ]ka7Kcq%ojCD4P@<;aE!fAHC[P>1$<J[
%,sB7;DS#;*OM>qRhjKIo_GG+SYYi+SSC,*=+["QULZZ=S28\cCUGgh-*8TKMUGL"(-hdEj"0c=68W%2Oq+Z6np#NT2r)Hs3Pl.]3
%o!^(7Hq1/CeO;RB,0,"Xd1f1&#NFG19kB<Xe!`9;DCjbn[P8Q`=R#F74,[(bh:-*+EbDF4)dD^N>!LSH.^o%FUJ,`g5&FU'Ao%G!
%)AfAFL<c0%MVUC,*Ibf6-jh/r'n_>DZf1%)WBaZJGcu:0)5c_?ND92gHFdj!cT3PT(RKeh2]YuQk[[,`V5'"?Y,:(2$2/?XeYGlM
%)su*Lk1JlB:JK23K9N)$ZneUt3G)3OGr;^b9Pis80&<Ak6KLICVhcTcS[("Em>('252W?7r)m=dX-kdp:Yg?95MuqL-g4Aq3oDC]
%`]``omQ,f]2u&\W8mJ=RcS]m\c`OKib^</sJ1&t%DOG.rS6_KUs(PsB:_W_XP6uBr9M2Q+QrY[kn:K,8*aCM'J"VWuY)B(3gFKK,
%YHNY`6ZYg7[ZfA+/o<q`<>IK]h3ueik#p.,8]h%5T1g#=;[m@rH2IrW!E!Zu9YE'ZCQg-;Ml:nqMBGu>R]nDYdQ5K!E%kDuC4$>c
%1Gi*F7Ws4]ACK`]_E!NuRb3]+13Tk*F1%#E2B)e^KQ*@o@`AjEM'7m_=m'VaAi1C@9S-#jHR,crlnU.os4C>Pe"%QL^W"A6>K1>k
%9k5i2^@G#hK>tDUfLmp8`AP3kW.K[JWuEKj*V"Eg4Yk9m\WU:Ym1PY_*_#-4IaK%!;4'hr2V*]R)AE*X34jMc0Uu66)G'>)5HK#n
%FuY`+F`<*4>n:Os."4[,R&c%^<ASuNM):;]k?:p<G)1<Dc(J=R5gAR64"Peu3#<ek*aBopFhJ0#<k1@Vq_)FPd&>^mErYPt,1]!d
%/8BW^@clPpnkQ_54a<Tg(jGN)`42RXr8lY\nE\T6CQkfY?.;JYmRcO#H7V[Z.\*U'k)>l0:<qL/(sZmkblkC3k=r%XCJ<%eHF'/u
%Idj5V)(W7Ls.#fep<ooe>tQtls!J$fgfR8)]khAur9Qh2J#Y4<cQ;e07XiKK;lP.^I`cNm`g*IJ$B!toW!\X74ZNGrc/HBQ)O%D?
%pL3k5ZkLpE'^g_f-JQ+_P4JZ:ELdB3"ll-b*^'u,1Un.pE*kR+',T$mOSc;LT^q]F"id.75[,LaMrDD&os,%$I;&GXl_rZk#<n4D
%EXFuf=#E=8_Kg9(do:[f]8Mk6>nS!QQT(6_^_/[\nAb:[WUR<=E^0&9j@^sGEU4\qC9O(Uh+Jlkj/*LA9$Na^P3e^'Par<;1R=fJ
%BPR6,ds$tZ."WQKJ.i<?#?t)KQ)MBRHBLFBMDT?-#>;I9TcGK(CNK,`$?h^,7Cli.HZIZh=ON]j*&N()>T=NT'A!E8RN.CLn:kPr
%(VH?gi3^&AR<M^nBCfj;RTSj>X4`hpc?1tqH;CnpTTAd,RtJG/C[>A;G"(\Q'?MlnU5fNHfJZOi4<Q0?Ko=Ce-BC9#/LjO5^HX4]
%;Zu9;":W-)J9=7i`F)DS.2<&%NIc)T'dcW-&_BPq<rRX#Uk6Cn.WDH`A$mCmSfGdn:R\SP'Rk_Faf,%m$jFg#P)<&bH_f=^Xe'$f
%hA+RdTZilSNeFq(h'@HAG<Mk?#J&r9kIS&mrJ&3p<@s+\[p#)ET-7Q-h$1N5j%Xq$n0_rDq9s&>SOf>Ol1'MoAK*aaA)-mdY2,Gn
%GK8P*d(PaiKh?U1Z18MZ?a":KA?Mk44]+%_a2puGa*"9"F]A<B4'Zhj9%HLQ?6T?0ae96':&cX03)Y@-blLtR5jm]aFIJs][a@R6
%r1pC[UoT:r9hjoaM'/iH8_eQRoR0<<g8<V__PT_iqb-rlf5hksigSUf_0o;<TOiJ\%XP9TF<=4=W9Ti,%uRA_6roMIe-WQ%A?DdK
%3iKW^YS9sbR8gbeS>@/`VdgN7e]eDRE2P,<6Vq!fds@oS&/XO'6^S7Ff&d184t#?.,(;W7[=aJdOpuq(9YeCGHqQ*P=].,kkk,0r
%E.$fE26NK!GJ1_FrCb;LY:bd%?.V\T^X`Y;Z+1A,>,[P1@WK'4bUtcb9?lrM3F9.>?6n!XGNYZ`mcDleMCmed1h3$B,uT8.S*Hip
%4l#+Bd4N&W3c'frf?$&.DW'1ra_G,;Z->.2h%O(X%b+HsDM&Eh^Z8hCKRfTZG2V_7&;VO78O<VAUYpAUqth)iFQ_-O'S^#f>;+JK
%-2a%L4-!Ub7PO:bj9n^^4**'cn)o-!Gri=+X$dnqr#)Ns8XdAM+u%`pIl?J^AH"Zqd7#\3roZ-:*uSK4h]J+Q9:YH(F!fgJoD9P&
%IQpEgLS"'3*O0Ckc"QZ/J;2uN%4WL%7EClsND\\T:2(HZj^dkF826e;J)n.DbflSs,!6e"+o`Vgi(dMs:(#][_:'BZKk;_?f5;&=
%A92#p,;Qf,p(G#B,;Pcp^f$H8$]Zj#)7A%%\g,6oSb^@$(b.ML/f^gSdEuo@8&GH)4bb_j.sa2WIZX$c)''$W(kEJFpo-jMeP5A;
%PL\u.U@5GnM:5EO[LPm[**tK8Y?-]o?&o,@)f800lBgq(6ut'BdUXinjXR;uWWR!7PaQPArII'nN-5an">YeCm/St$JDKP5rH[?A
%)@%$G:,\:?X!QN3Z*cL3VEF'^E5okt3dFjeYI1.WNiSMb-b79GkcCu'+1Dj?72fF!#<pb>csB2(lFP^"L\7eHf*NWR:YcCB(6c\f
%:snGd>;Lu%.suD@-*G%`TjlfWFPS9@cXa2h\X&3]_jW=$$/k0;mCO3^#%+;4((YsnFhn[UI?Gp:-$H@W_ZlPgSD#kr)]++"EJ"o`
%[u%;Bou#e@7U"dqb`GKCKfrDRcJ_Jhg;#M+pi+?&e;1<cN!Bn.WVp>57!(!oY%+,Lr46;ihmBN0[GD+FoWEuH.na+hWU%ae0\bJ7
%nugC5['I6!`N@iDWgs2C's=gQc,T$lik4>b@s9/U)d+c6),gl^WMcS&KO!-eWm6lH[AkGu.6_U+@quB'f@IsmI'hnFh"+j\rl`lF
%)%Sa:lbRCs)NkGb1A&%G>"r92LN.F,E7Gl.9)81!n6@<\jZbeI8Cs0t0K8tK7ZsnH@bo6qYDuJuF`2NElVH0cn5FtK2>3qUPBi8p
%=%]dE2=g+&I,*tpf?GfALD9;T;T+K^brr(aDjBisKm[]4I<Vtd,cdAPeFSZW^+/BLU7O5gY0kj*Sc.9`UkU3rhH+Jk;FWQ(UWIG]
%q*q,t%8^].ihIt.NIIKVr4S8=Onk_Kp&3DtFliOt;EdlN<.B"ETrt*Q0uMA%bsh%d<:*gG+#+bG8$#3hKr5q2D=Q)WOL)>>4^du"
%9M*6kjE=4h%i[abAakXUaZWh!ES3poBPTCBqkrc22?]VYX__Zf9\.@na[?$2V<O:.e8:`NkY.AR`&11]ZWCG(":aN@BUIeq9i1e\
%Q?B3WI>4<84_2-Mo]k)2g^Q`S>%cc%3u0:Jh1N_Kess\`/\S_2/_:iIW`eX2_j6b"`3Q5JX_VK:=q745Xgb3Jf>pr")RsoYK2n&Y
%f$nhEd@;/6Cp'nkqp]2daQ@q$=0Yp*h+SNGclU%LgUs&DR#Od0S/W*A=eXfQ]Fe'T(=ASeXa=k^EdlTkabBY\[^7%CLDZAE1X8RE
%jL3>9]3crm4dYf*ia)^:+i)$N`jS#Dh6U:1QD(HcD_jXZ]Lg@iP%q!=q6lK'Wgd]0[Bsfbi`5:OD<&6K)uldt>pf4!@=@fCr#;Uc
%Uo%%L?aCK4[UEEW95K4V,aPJ>cKXO<Q>Ka#&"QfuG>S<`)6/AeR7*dB^;]WFA(mmQD'U#8c'FT`D#VU)'X&EFc^\@]B!3jPcGZM1
%AuY'7C0[2QSW^A5]t.%s2'\8Q4GX;=_JK("2@n>"h8\,Q5Sj:a(o)P9Om<8+OP-aJik-^1BQjGg#k#(EeoY[.PO:,05@TF0he($2
%ZbJGLFp6SW#[eONiD/]hVtE*P#I.Tn:8hB&&7]$^Op.CWfp6<=X5l1Qbo9'[,d,cM+#QWDUt.u(`*oJJ,p%0!j.UP;3XJ3DFjUU,
%RHlG?h9/u5)!`OE&g^lD9L2F[MJdHl;Au0E[\:a)2]Xp'YVaq,^iGmZl*AHc`<BAuAF&_@Fe3\4\W,098t'+8+NKN=UaEP_YMa'T
%r7GQXW!P]D^(FIfAG.N\4IfQMo-=W.O=+mcMpV'5I9%_V8MBb6Zf>dVR;Bb![kCT]Y=grRE<f+uUpWKGVaPfddkh\XIZ^t_XQPqg
%;P._Y\`SlBVU35#S'nUJ)Nu5X&l(&LDrilN&@FNFPM:bS(P^rn-Qa9=1%LinSAHc?bga#S$g3YBSLJhSPuGf@7oF0T"PZck(5Y5Q
%o7#iH^oWHX.]B4.`lPfN,UT`5`+q(+cnK2QO7#\`haR=->NS9=,?t(:R,Uags(0"\f;'RPfg)tTGTNH9d3KijQn>\NE%V*2E]*NG
%AlBM$0Zb>-F4'+\?-c')%&-KWfFuS&6*-MmZbA&<M,<$34BR#WW[A=(n[RC[i0FnQR3Rplm<I__j>c&Xn*K3hJ+uQQo68Io2R)/C
%C?<FTk$kM(j0VF:?7sjl7Q3Gt3],YL,KccAF^j(MaYp?em]=o`EAak4j`GC.M`o;:`Jo$nC(C-qB;6f2^JJ86du*<m1TlK0AfS(g
%lXi>NiV[d]m"'c<JJ)PF]T1`?RHEPKUt]4R-e+1nboR_63Ou\"]Tdm!9,rBGD.Db+h8EW')TL?^Obth+Hb9tmX*j;+'9c'iF^]]N
%0XGjn<'FB(4s%)]RSHeKildeZ9q&&-6B#'/GHL3`=4!].:W2=?PGNVskdi3jXC"g/UF&W\+C/0OG)B+`"FQPg3&-J!#LMa+n0V*V
%;7440DP$83J"J(JPPtEVW&T_m1MC>d/iNmt)_#9)hJa23IijgaGB$o8jb;E`MMt`6"nZR%L2+k]*_TmEgLW:.k<lcR1##TbcYj3g
%'L]>f;F-*/-:/FeRWd)*9i@s'U1uYep0kOn:aJX/oE&kqT2qd@EkDW@!qh(XY9&GToK4\GpO5G"@?K"0O[,AqjWAon=CFQh$@++=
%1t2[I0UW03)4D/dD(Bc!TA.f#FHYH05d;oS8M=:CIF`4?8*-LoU[4J/ZEJ#bO?I]?<u)7u;6&t*)nXDI<)Zi"CSdejiD@a#1i4!(
%]X9'6;=55\K-^tM@l%9O7nlJ(k2_\a>W!Ie0Ylj@IlT"!41gc$G0m%3D%<_NEcs5&[mXINUMr7*PC751H##?))4j0`Y?nO-8A6.e
%fHWD'`f3$!RmETF>#(KUF'#EM9`s;5DbML#\']%m'j]@-<Go=Hkto4H91\]#R@%$#m9_[rdHB(Ok(H/$MF[P$O5_a=^"\2hlr?3K
%CMi)_VC'BYCfIkM;Q)Y$])8m<!g?df.0/+;n[=iL-Va,V@&HpqPHjd@[7NMA?%sR/B27Pkds-nTdjaH3?.@p,@XLpf`nUkmn(&be
%2JFcid#cleni8#"dgPKg7b2Kh/tRY5-c[\a5c(+c'ba?pE[&3-H>Qk^g4"M!Z,m041ro""<^1ag3m(PTrj7n*Um>F0&OSDp0SYOb
%.kRb*8MQU'.0tQ01tA9k#oMu"9=cq-Ro-j>aqQ&fZ\=H_aMqC1QMb>d/i[$gaZ9aJhTKioq=[&)rL!T86lcBq@K]Fe)(?4XE1scI
%^E%e,7h(+C_o,',rQ4nR18RiA,rs"\S.c]g\l9=YS6UP=M$USoS%P<ZNKSPjCG6\l[]U9JfNA0#N^Xknm^,)Ho`5o!;Qf1WG(o3d
%ADN0R.^rWYA8O[9f3s6d7(H$+L!cm?On]pL!Yps>:cmO56d&45^$\B0J$7^+V=1RdVpTRIW`;Q4Mpto`.EAleStf7hLLa=_UTs0a
%?,l7KAu#P4H6r9jJQSPhl42FA<-X9n8hZ/jl]T[d9&l(TC+s\b5tKn7WC9?V,fOmB[4^ctoa^Gg"f92oU5*akM6:*Z<E08LB8WdV
%6lMU=IMrTf:?=Q+-srl6Hu;:c"tkld>!Gl\7WDVZC98PQ8$%)2P58Kuprg)@4Gb_67D@8NB/J_-9sJ00<@jW8DNf^PgJ$0kDuC*8
%YpbNj9^;OMGef$i&P[Xjhgb(D\W/bOs&f`UQbT[(p7crp.`rRXMH_t0hWnXRD==9=pdiG$[6`6t.%5]Q[ir\k@)fo/J[-'O[SgX?
%l_e+SB1B%u:fl6t#La(5l)i>@e:VG9W:'87ClitOL5`"[PulSE]&q6@Ku>u)K$B<PYN+nG5AKIVn$XPg^a)qlLFB^9p7dgk]Hk5%
%g/fsE8'<g)*c8p`=VET],Sbr8G_>N\1@A2M'C;kGS<i!?h3Zq<9[34DA2hl1Y?H*XRm,XT:Uc20m1cdrL5u&L"o1[DSS6P(ri@hj
%NIbjRA^$u_AFV6L[-1s2aeBWI?/o;=X#P+MG8>@&pLVY"@,J5%cdgPi'kf6A\VP%JpHc"(`Y#I&f6:i!3]8hHBph"$24KGnfj#c4
%9?a98)g?5,Y4pi`447usU]'(ffZaVd/Sm)L"g,4VDhF+J\F/YDp^_Zp/`Ls$.</2M$WRAiKPWTH&4)2X[>"0.3A#5ie.eZpYpN?s
%e@Iu\P?nr4F/@"gYcb&TYo+s+rWtVYi.k'kROQk.k]2eg[p.hCeG)Hi<%[Y[q35T'GM,4%iK7&Sc42@W*$%7Qe$BfQnOYfDb8r&_
%*7ra6=7s\8fa^iB+W'YmVJ&bH.IY,qQl#lJBp0n!WO3;_9go3"n_i*O7CY>(9r!0k<iIsjVs;cQd>ZA8nK;kl""@8t+Dc,!GTTe_
%$]K08rZ#'hG:rInLc8_OGiC9`1D_/XeJ9OBFc:h_2-Mf2+!BJFj;UM$_FPL(RlMqh!O[!A`q#(4J9d?SAp09()&72neT)7eEh\kp
%E:=6Mj[Rb@ATVbkOUYX$i]o[n5n$pn%(/'f*p\KF=eH*9#+gsJRh9!UGQL]pkQIR]Tn+c),c3TmM>?>:ds7V&>N=1:WZ"p)VHn.8
%(5B3,nkI_805(=4\es1;ftCG:U=`h2\>@%&)Ns/Q<E?&4PXMW,:+?L6+r6Fib2la]5FEciP+a:<St`0<J9?t(-5+I<@\3G$:&SIs
%:)K2LO(-fP@VQ"5CbV%k-,r\bdjhqC(#u*p:?rb^?I\VrJt2^@7u8Df*8gVuN(QjI8pc7E[T,L:`F_'FVV>Y6ZoL%RaiZ#Kc)*s0
%/bSC2+'G/B7DVJD52Xl74gLO&Q@'EY+05:/H=P]-\kAgSW4EG!%Z.S+XOMh:*QJ5Yh+>fV-5=*;QeUWUYUD8.^9BAZ%<&B^jim8s
%/bh^(a($8/SRm"&Bk(XJHu.bY)<kt'hRhV)WXn?:[u7H_c<OEkh3RmP%57!k)[%9H055`(f:*d.[gSX'B>>2R@U/"F'$TVn^K,>'
%+un.qF`;nS?5$sd&p/p\I^cu$BN)&b8)3&$rJZC"O2&Nc%dj8K]KUVoah*&np.j1KHYFJJBDV^J4Fs>V;r>d)3Y&68]\W00;X\_V
%f56#sD82e;-:R6hd*n$<N8KrH$^G'(\6$RH9AHZ"L(\j.,[%+Xe;HP3rUWY*X0q*.KRAmfFH;W2/5nNn\I4+=?H[`D>YSe$@$V"e
%T-MWT`^t^f!@i>)Rn:VV^`sA%!TmcjH7)`F(qe3UTlC/?(H?@XDEuibd,QrBA$$A:ePldUKWG@uM308r\uhDeRS?dWD,gs@0L!?I
%8ngZ,%d4c`29b7G%-(OR:;[$nJmC4u:]cUl-=G;WHj/"7U;,tl1<FWJg"X^+Ems/si8l$Vn>=gF^"aOqgrlr]mq5qcSD8FXD(?Y#
%aum+$*ik3%E1I>k\!;4QZ$fW/g3$%c5?*TQ?fG:t7a@6nhPPC!Q_\MV;E)S/eILQ"=9`oQ&S&a?DlK*L89;_\UK4MFS+N2I^"1bf
%=a.L,re--;-*:FM$M\I:5BlDoND?DN[X<DM7VtYZY@\oTQCr)"h;7csI,35T8TO6#:*?_=M]F%.h^AIAmT()TZ#r3^X*N[U:C[;S
%)l,Wg3FfOOlH-ccVN>a+DG4:\9[Y?12AY#'H!QK3qREnZD.QsF1MnGA6Zq8G:!87?ZVdu/G=:bU32*Kr]3ts#Yb^`Xan<ce,@8o>
%.n/^RS!IaM8_!.;Bgpo#gihkdd_/-1P2`#bb*j2M!M<JP,:`HlS8Jk=(HSf>r[]e2*9K#TnqiV%S62%Vo+H!e5rY"VfGf[^=c@Ap
%FS;V<#,JY"%\h4WQSJ6]!H+Y\CHZ0s$=dARI:-.IXdO>E[rOc54G6h75HrZrno7VBA5%A!8N>h"%jm>\VM`]?66-JAE0?;gor!it
%;94DJm>cQr?0Je=;Z<&)_qKF`#BeB.kM2`?%gcY\`.%6u4c>?XZeoNWF)*Pm(XJbZ^car;VMLg=R@RBZ`rD!0C/9,lU:TD\kdc"-
%&aI5[^n\.`YkHu>rsAQoX\*;9]jQ:H#0^`l=BOl:-3;Hha!?B?p2PeEOBbVbTXJ>=YKBk7!,Ij@hd6,5/+kR)!lUS3Fk1HEg]fT!
%iEFu2@70^<H9I\4r`k3XfcnNOjI]Y0VC,sQHr-mXnb`#J0tfj&Q.jZ'K2cbc=0JAAN#KU%6sC1;0!3P_W7"jK)X:,aN:(_g\6aNY
%[bAo*GqBp3fK45V^U+>a')d.qbc8T^'rG4"0PeOd[2/@"4_@](\rA%X;,tD8[GB;C%S"Qg--qCTGD^CDOJFLWbk`mG3Xr6%$<^fh
%-Gmd&a>WhT<)9r=WYg#6@C>n+@T0coL67I0A8N=N@@??#[nb\2BAOLh3MY:8e$;h*[C@R9G-ooa(u;4B`9>FW(q7Jp#4#M"%H:C;
%X2oMIPbUaf5J]Uc:M2e37^6i.Eoki+rl6m.F0<@!>_gE(=-AL/GW1ZW^jIt6]$G,E1?a]/9FBZ#G@-E=TD8<KKe+\L8rFNNMd`qb
%\/u)&RH/Y>Di6uJiJ"92prh1k);M([SafGr,g)M7;R*bphM6\p2IcKYR\UL%%P>C:_;3eg0WbUB78KGXYIoF"f[<MsmK"3o6G/qC
%[m@_]e0'H!57o>R^/)I:JVQMT3.=&7XW+"F'!1RBQRs'_mnAm7BjQD2bK-RX1;NcA:>Y&EI#5k7jc,jZcPsP_9,C(f7VB;ObI-\C
%LO-M.'Nm"Y-<G="W4ic$X<g&iiaRg!f-KpNMWqR,aO0)CWMMk_*6Lt7fI-dF[;sNfW`g"W:0`81W\.@1coH-/)0*6NEoTlnnXtb-
%;FZHlnRMucqaQEXFc!+mFJtO>'oEVUfi9MbJthD_\Eu?G?B&;=nK*\iV/]kOSsO5j8rm!,LqOC4#A^h<4a;c-U2u1:-p"WsoK"Gt
%<0X*JXpX=JnY+V0WLiTA'uS<':YQ7%a]521;=eNk132O^23&D/(bs$i@_&M)O'9tcePhJknu@_\eMC-#D3`mZft@$gJ*^A94*Q,E
%?.V5hjd*ZmWfsX]=e$4\:4R?$S0G^?]IS'/Jcp`.PdW'1[@^S,6`Z8JrL>K'AbS5/F+\''O@DJFK0e*N+Es%Z=dY/OjbZS4ToDj7
%3Zpd:jf9`Q@O`6*E2?:RWMfrM2)C8X1FK:UFYu`$!a??OLJlI(f)NMX/,UW+YicOJ7)Wg9%<6lZ?soJ6FrgQI3?k)gES,aYpGTMN
%h!=h)23*$!QbCC9c`)tsFiiARd&l/I8!?;Jg#Na\ncqlE!E1\]Ae;/>RVeF(1engVAAT2hX.q,)S(6OC1o52oKAG,M_RHr(O,]3k
%\lp!O$h^gR/5J322m'IQ<_`"cmS.K_)B[1-=17rser9.3Y0Ni#hd>"#0s`K`>RMZql4?P&d?Zu!8qL(Q-&R<>6$G.Whf33RBY()i
%W/@Oub!*5s_@>V8CVfE:EnWZ5=Kf>>EG(Tl0"(A[?*A9Sj*$,rIGK4#G?21YgYSO`*U?C"Qr^J:1'OeXXnns]&;u\K&,F-r)kU5=
%dl+%+&tq9O7ONR.$1Tn'=-SC0;'@,q5qX`.6MrU`O\UGDT8h?7ap-''A>%@];^O0t'T.:qKQTMoV<>s]h#pa^q?6Wa;0d.-/@,,Z
%PnbC)']mj['3uh1hCKu@$].X%PZfM5jM[!n@Oigt85@rrM.EraSJ66p5Z/<fogVt=oc0_4MbY9hXL*4,->bL#@=B6i6`Gh-F"LsN
%McBMN'Y+i"0CD+4E_uR/eDi#A)W>\A_P8p8itGtDrGZ#>$AL#l*ZO1N)RT$/H!DgA.eR[^Zf&FcUNeXiVD.4**L&sD*7RR%LlbAC
%80^WfB=O'JXhRtt0bifIZQq9:7Flic[Tj3+2H[Hbj<+'`Y"Yf=-2[Lr8Ej*NI$-mS(ss'HSYEuOCA!$T[bu^.RCl`'aQ4X+?QS=Q
%PM-jD,St)FO?lL8O]T;`#$FV84^EQ8P%A$J7H@Xk1L&*>/K;PF"QKLoZkP6kk5%XNr=<[_=5*!IRr0Njg2QpIi4c>a@_Gjnc0?(T
%oMIVj/Yon:SFW$MqOir0)Cac[l#U3&EApK=`l[*`3OIcnZa7ru.=3&:q#52Bd"\N2;8oX>ITAr0)X(c"jb*)[2\kauO]HC)qaUJP
%Y[i('H8jC!>[?isaJ?F7Mg$20FM6O07k1oIlXmh9="\@AHVZEag9BCn*7INX]bOn@2*A.hn?_3,#HI%!#GorUA6$U!G5[Cqi^o<u
%^7acB>J.<%^%Lf,QY<e_Ap`^+[(:F93H"Q!<C0sHK+_s,_dli[=m&hkc!Xl^W@pgIkU<0s(V=+;Z>M.g7PZo?-/4c#mTG])f\>GE
%8@I7#o-E(e/(lO>_t7Y$2mYH"Mecqe'.5F+S;X5gbC&BPpLdr?q2!%jAa<`ZRPhs.Q3tBP1mMDP'i.ciik5!0VGsumibVW&R\e&K
%R"e@SZMM&%N,PaV[8-ZSQSjig#FiZ<VMcGia1`M:J+YTN!KFOEZ+2a]O8[E!ri;kj,eNM+:U=U(X`Z)dZ8>45iW*;](ZTa?SbfGR
%<Nl%^Fh^6JXBpeGcUk?G4#96uN#,*hgs:sro>*S[)-L3$D<0m>R_6^YOUqJV]Z:WH9RJ6oU=SNB_@n27]VH&@7i!505cHRps%JZ"
%96-ZXcEh1g*dY2q5g*9iMRE\S6aED%(Str6WX8>XR?4sO6*b:L=XbX?eOoM$-d/TVTLF=4.G@X?i\:5(B6CC9pIY$(.P2Sqm&2Bl
%FH^p9bZ^`.34d;3g:!u^o5=,3e=8K,GSlAD:(gf%f#/kp/"tj'o0UdCQ/fX\H:KB?9m-uneQp86K=sCG6B'uek1$XT;gkEPFo:DU
%qhNk1ah_>bMY<&>>KVmtE43e)Q_*2U$%%J%HB>qgfsk@Eirsekf\jlRdm[9(M9?'V85ME&/H5n/3-"c-;ghe()9itM"@icc0_rop
%-M=:d8/+%G"U1FU_27c*E?k]2_K;Wu_MSP-K*ZR]r's=d(WVK0_#]Q[3cPRk,W)Y0EF.bY]\)R/NrPIMYDZ`]HK>rrB/h^SXtNBL
%.T25N'VJ!ukf`<E6KQrsb4Zs?O=Y6R[B-6(ditr_EJUuE@P=+ZR$lLV<?m%(JZP,[KugbnHWP?7>>LoEdHO]6hMHZ#_)pl'3P%c'
%LV=u87];0dO"bccQeoNZfuWq.<#lt.hn8l1Cf[Ls"nE1qpr_rH2"UaG:XDl@1=s_>T:aVZcZuXuRW%d%G-bE?VRA8E1Yba"WbkA.
%DL/<X.LhkE_fCTL5&ngrT"=jcB_]Q)H[pTWc>gb<$NJc0;/=#S/h;Ld0eIUgokI];<K=6i\_TC(X46Yt<OY]R9o?O?1h1aS`*RMi
%T:$022(/9Pcg@,F?oXL^VO1.S[q<u[Cn]Ardbqthdd1Era+'[+(@2A4X_8iK#Le^#d[!h56Gg0#15cK7V1hnuR8l)lShio>YGZ0%
%Z^5gJC1V_%oPHL7>W&EPQIm-)RPiO$1$`dj?/:?5IEJuoE@2,/4./)te&6'47["3cQ$*CRNN:5>boHcZbFXr%(VMHu?8\Feaegk7
%4X"#rdd2+ggJ(D9VQ/"bW9"m?SC,^W<-=8$AiIk8\'!8$:q$?\,uEAOV'HV)#_r\D/6O*3Rb7h8'rr5c@Ho]mQ9"'?/&;dmX%Z^1
%:7LS%?B5&Q9FU]tBMA')1!h#I>R[%a076ICHl[(,f7cP1*Cli>nd^\uV-)Ng`=iNj(QLt?U,(dqV28e?kP75[NmEjJh5*Ar'W4N-
%D<GT4dSP30X@M;3"_eI-k9&k(8m=lWX%h)I'3JpW<FQ#G_WB@S;9iF'EgB]#TTjD!Q]oR/hTgQIAPN)Me2(@/\m@:=`]?W03rQ\j
%CmKfj(:=:G6`uYS=ZU-Y;l)s[l]1S\>rgF7bcu7([7Oq+.&p&%2^u1q4Ce+[=#DpF/N_B[OLL(:[,-UEH7h2*.lXc92M/VV2ViOT
%<dAJ/?2Bml(HQ4Up!mVaFM#pp=VDsB;VX[Cg1uP]lkaBRZdioE0hGNl4Mb`Q30:;&=?nhX:gIc*JfQ^!(utf]Q/56sq8$K#YZ#hu
%<+s&Q=hl)E5pC3('+0s9V8dS=#U'?RLjP5!@75AMUb^:)NFeA0S^48@s55I0`VGOJ>!,u-WLDRq?a&as'@gr/jVHAb"M^/,6_/J[
%g?e$sL<n!=YOoa@Kfc9AAhH]<@e3*nP90+h4keN(Df\Vb.Gn0!;WDksfT9/b<h#4L,Ape&[Y4VYO:jI)HO=WFe&9fV:`Z#N!&?hT
%gPd;2>cL_T&>K@n7Y)hB"%7!@b4eh;C#e@O@SK`T)lYI5JVL,]`Ag5aKp0TrL.,^?6Bpp#"C[ChEpba;^cl;.[@5'XVtFW7$rSVM
%"L5R-.a2)NCi+n[fB0E(flQR1ZO=-[6']AsOufZk&$**59oTUB%]\B52B$D)PQ7GuM*(^0e\+aXPpb-aT@H*s,0EPk/7VE6H(08s
%F-^XF[MB/28TERL+PS@fgM@gU2*Xe!&%9X)R\$kJ\X!ZV27JI?pQUU[jUjB+#c]-i^Em8^Dc62)<2$`m3+@lJ-%dJ&8oT#2fql9F
%8Ij8`]<Z]7N&6g`VD]Y6VWcWb<!RI(;a#6;fRoE7TJ@#oc*IqhKZ-gZ*QgfuHckTBWgGi3Bl)plX]p5*WIDl"ARtLoN[L<Yio.0l
%Ntb<^jNP$sO,Z6V/*52.iF7\9i8r$uQo`tK3fs9/2Uat:F&8q5Cs.<Q;b*W>mbX>?j'B)\FmL+&pjB<>&.%WsouZhGiZd+NK@gnD
%EJWi?2L'e]aP0'1r?<$#Kr\k<nnMp`oFblA[GT-35*f]oS(dNgf&B&P8cekXDa1o7e]0W;5J;@Bh3VK;Zc7IPrq:^6,g[pRHM-OO
%l]:MX+a!PVpM^'*O)J?S/4WKa%g`>g:u%m&Ki>SS^@3U>3L(s_;_^sWHp8fFj-ch@mD%Yqs&XLC57?tQn)E.Vmf)0p%!USOr3q/W
%riFh$B0b8=h,acgIT?j;07UBH%m]URBto<SgjN7QLV&$g^W/`$H=*9ar8u1(F)0Ld.GpFqbMihT22K^Yj>UqT^l41tpY/d%]>&\2
%+8":iNX!KX/Z$^'J^+9f^tp*T:S.!Pfq[#[#<!Yg&>1[\,VL4N4iMajq0.9;SEG^$3Sp[1C^:,HI!PFIn=*uC('IE(KsVtAo2l"l
%@t12GS'g-j+^cP%q!c%r7i)Ml?VdnaQPMF6(Q,LT!s.'<QhJh8IH_*t9ptqPS/(m>5C+R]4=']to]JF7\SYDu[KCuGbN)??g9AP:
%W,?"f;sYJi^6c'S0:9541b>J3R#%8FK]AjR:U&97c:Yg/&Y1L'BSMHj^)Z*S3X[T(h>:>u%6OV=i#62K.\4fDr>Z2m*"/HFB'nZ*
%8`&j9h"L!'4ouWs0G=X;L^IKpI,%*G*fg!A"idk/q`$s,ALrQ,dBTdWrEXnK+D@44%`()&cIhYD#(nFA(uYgN2h`j_`!r0;[[dG!
%UkU65j=07`K@>@%d`Zk#VgJ<>Z5`/#.hH]8R8#mJ@`G4N(`j?##FO6lds6_<;`Xisd*+Sq*^$Rk[6!hlC)TQDR-ueV%f&0WKf6gE
%04Wc6#"G0(YK#sS@bqS77,Y70h.sG%F>;Utb7`P];>9,=[KTcq'qn:\A2'<r54d@A9i:bG0MB+&9]C!q"3^*.aG."dfHD&'Ki5ja
%f&tQf03t$DHOpEq31Xs0&FFFiQlmr@'il(^g)EsHC3bJS#nK%HoBG/j$ZH:2rks'2_#ltHM_6bVG2df]/26G=&fXf6a0p?AE%10$
%P\'FBCr3Uc%E?\O"cg=]K:X9q':\/A'=a_FCPjSm_OC&hOJdkE]rg#C1Hcr8,GmV]O/'7HGlT]0*$PQ=U#4qdS[d\^m:&'J,I[hb
%c)'m=nHM1q$_d67Cg^bc+/ah5)Cc/U'PofWrGE4EW=L/q1R`baJ"OU2^$9%B3t'i]`&2@h67]_FeieRYguGmEeo4)a%e*\DQ8O`g
%nPM8r3Hnn;->`:PeR<=@n&Oq`2Ki3"cL)CYHT#DXmXZG>GeS=C0*r$ZcL!?*a>Wd#*U_+&^9n3RRBZ,<h.I^F@XBU4(`oG<Ccb"M
%Coh&N#%]r96;N7M]ecnL@JSOV\o+^f!cqtdF8[ni3SIN<hKcHaW,'Dh1)%8Im0^:13`_2170UTM]lWL\A[(1>qY[pe2>gKGkYQ.<
%M>Y(D0OUkFc!e_/%jp34WmdP%kO75\e/XhLhlr:(Zh,M!I<,8\it&nkG#U?fkF]<DmSJ@]qp4]NE]&0)^(pio+CBkKa9CH.q23W$
%S7I0#rN_O!\K$Zg"b3o]KD*-B#e#`ZJ@1O6fY(XDo`gXjR%mQ2OKrQ?WYS4P/haC*UmEQ$eYY9YR04RZBt<U?J.!\X(ei_u+V?ba
%M]WXf"],Fg.0!#sH3"Q</d+tc"Yjt\7i!"6m4M$tEWVN?TD3,?0!#=AD&Oeh`2$ZK6_(Z?+s7'kc=PpXh-PN3h=YiW/,B7q1=BIt
%M.c<SQkKP@4E8R&RENAKdKTa8Z@>[*5`CiGOb'XrLW*-%#)M1(_$R_S3;Kl412)'j3rojXAdAl1!p42l!\>R-TWV/VpPHXk:2$'e
%0kJ)*:?3ZPY+Eqp)ZBWEA4Fms0f3A?/ruP/%*E9hU;Bjd7t06<:Q&)1rVH_1);glQ"O8,gn;SYUO27J\,quQK?h#NGY`@MW86lFs
%DbU)1O=(0kE#^TO8EC8XG0<*)ngK54PMDX<BZ!^e=G!me'^TUO^u$edOF&?N4XeLBBn8eR+/b-:)K\dZ4A5ed;=C'm,X(_HSe%SY
%A?=ZYP;>p*8djK[ApP$$?P59t_2<7\Sg`7,YeUhp?r1)_E1!I(K22R0@?ssS\8VP(=:DK"jopmGd&d<*+rFEi!>s#*>m)lJ32Q'[
%n/sf$MW;ePL,mGjMnl&/GiP*,P5+p.11lW$OXd2a3`%LdbX(r-QL0r9aF`k8li'T$-lY9\Tb=\-UB#_n"r8A4H19SkN'nDDPXjMK
%"^#Z-@3l,DZTK](!<ePi.]#dg"Gk_ra[#/amES:maC-oX+<\goe1P8h5&g[Yf#uPsAj`VQ=d=aS[R('n[Q$Z(l.hS',KSYPH<<2$
%#FTP\^]?.E-d[kEKR6"F:mQ3bG!T!AMlX3<F/-"a7uhVbCl\Xh(7Bb276uT<b4BAe;q]-RYDq!)V_S@\bj7aMBm,(ORq)7p'&].&
%D1*hb'kMa15f=i%C[[Hb80u49F!\%h(kq8;6)eQp!ZU;c$4Ge6S-S0&,>p]CHnS$QQt/[YoV@C:8`i-*+Tucs8ds31l\RGBd25fu
%S3cjgJKbZNl6mkW/4PL5b+;6nBH%5fZmr\_8D6&W!9?QY^I#VZA,t2O1l"qG<Vh5U+Q0>jEuk[2_!aeO7V+*rREgct`,8mu\MsYr
%8-_Cg5Rd]qQY&]n)o/GC;+_9Bng7q-m`/02(Uok<dV;e!NU<SCqbPLhJ`q*rj^H>GmOc5kH$c1F3!<E;%01X?V#`QOY[`B8c:)F,
%]4]jk3U+WhW8ItAR8%%e$IeeW3BrpO0VACW\4YItjm;%aGtQ5_^cCe--PY73FJi]IVQ"Mc5k+:"=B5EP1ZTf'V1B>+c0(at)NtF,
%(<kro)GX.ugNcZ1%8M6&:<Qc5Mk76_D<<,F;oT>@e,`rVVh'3VQ\!a.aS-\Leqf+8>b5+o<Q;3GBIEjNcqmB5-71&q4[>)R`JL#T
%:ag@Q%#,@u*14SFc+"VuO'YjoR0pbJ\9DAI[Xe>C\_RNL)O^*!bhPT-1UA2m+@lnlo,nI*cn5=E;5P[be9q?WJ`99=VV:H#/]u4)
%)FV+l#i9mCi"J%'#,8jp]t:B^6<RLrM-(Yr#?[STELuSheen]qSNufPJYO2M$Xh\Fje<nJf$K6]^uXTr/^KMZ]cL4Qes-FhX^[kA
%V(K[*EnDm=-*ooc]J_IgJdHuL29G3!IgT?b$5@bW^R8]ei$4Z-#ooA-n_JT]+>!*i</U./q[/ms1ZCoR)#4(!oRdAPmgW\##[$j,
%T''?LJ?B4R*X`fV3WWmM819Q*:q5GRK3;/,-Y3AH.q;"nXa3ghD_ND7"/@g=RY>iqZE#^#B49obLq=^Y+UL.[-n<$RCfG*kc#nAa
%B=@G@%'mjIYg]QF-=W(VadYAk!O!-o80SHBMi\s5T?i/9$sY,^YJU$mG[07A?8bJ0qr!L5&\*rWO=<5P.,?Su[jpKCKV7F\`3KFq
%s"Y-_Rt4GFiT/$OqSn(WN-s4fK+'3k^go5QK7(.u=T$tVp-TH"]?n5)/Np5Y7'&r\QIA`.?kt@-^g?5:N^MrU7t"Jd,!pLrWhA]N
%%=j6^mBX).KKn[o3feY,6s9sfQ@WV+,lks,_am0HVb,S$4Om(:+W2+eq0kVX5!0Y>Sn@4'9I'Z;/FsAnBa?8+s6,[%.Gl3U$C2R(
%4cecr&np>d!B;%OC[s;:"W.T]_D=9rM-.QB'#^4DRY,l0[ZDD8G,51;&WUF3!`)JjM_`45$uC`X2LqGenc`"oJ*G-fF!i,6Ku_&5
%REGq*3P3a.Et,o.b5CA>h"_@@6Z.*&YS=EY&(8OH`^F\65i\Y&8KQsN_kZrV"p[BF9`RFd_?>kY1a`K>jV.b9maQ+Y4C*UWcdG(=
%Jq\b%h'j<=*J)YS\B&bJ2$_.$i46-C$5BIa,RK;G!(KpZGJ)Z6Tbtq94Tt_hT7lL(XWE"HNi&o!7eEPXWs0qG_Lqtmknrt11p=ab
%Yt5cT"KPuYQT]?/^qr.S)JC3B`oJF6j\63C@3nCg`/i:g7M:q@3;A>B"LW`t1l!j\Ece*")CJp]=V\qN-s/EG7+)UMaij$Y_[McJ
%c_<;?hXJa:EWF]t2_D;#!#nnH"i9lSi"*C4n]5@G5:=8K:p';Z;=:NdS"L=Ti[gd3Wk)(Nh)R5EBC/YhbfBlg&+u<'gB,_0Z[t!S
%^f_6b%Uj)p6].pX"pVjG>$W.#WGIGKjCd.9'tKN04UMHNFE5>hV)KY-R7/c+<hR[Z%01X=ko/`If/mU&4@LSA[VUg5ZnoQd_5To`
%T4eTtD'j$#<ju=6N2.>I&9]5;f/S!e<i\"(02n-OX&Ola@)i>qBpHRlM3UM9/gOSsS<YT*F;oh;L!K:?6^_4C5QWrSd+ssfqSXR:
%(t3#9O@(;OW'&\9W#T,eW?1)9pAP]dqdl-_8$PIFmpXLP*Wdkb!jFsWs1E*R!go7+?Md;.%q-T7&V5eG#KT^o8)#_Ug;/.W<-$V@
%J;mr.>RLOq.V7Qq,iGEP/:&(."*;"s;Y!,02hDQplj$>00P5BbA#&[E@-Mh"%C-CP2V\uR]O.BsARNadi#NE;0cJ/5,rG[(hK`(;
%6ij%mU10^G<L%CmlN)ZJ3DF2mO"AN*kC)#^gP_J.ULSbNi7-.mO)jkL5]%X%ZGiS^6?UM1.^XgJ&>FF;Bt:uikoqPaN?$S4!UBcB
%1'7Upl#3/koSD-U$G40F*B'Rg%("]Z*e<)1n(i$Y(I,P/mJkYJ6[RkTrAT_DnM)766LP9VDJZD:2qoOS,!(qt?Ub-KW.hT+Y\8Mh
%a>g,O$K8&DY?'RJpWELA[Z0X([?:2:A;S7UIE'VfE5.i>`1@&j&SqYY&V/)u/;X?8jHSHnXkYoMgJ6Zl",',Oo1-U$oRq`487)&\
%Lj*G"E/%,8G8XrP)-r')`a2?Se.j)5"Jn8m2o*OAJ0XE*:No9h2E_+O9HJc6kiLY4'9<Uh7L2A"3e[<[cJSa6Kaf*uqh4a+:[B.>
%GR,."_MZ]g&&)%!2f[sa$@<L3G8A&a#q%th"5L>k"2Hb=Jbt-`4T]9bV6a!`nDj]lAAV0E66HW6"<=^(:dbb\Eu$^6JMcTm-^ob"
%OK:H5"1&!N(o7I*C_3&P+L_CccG2IIBHBVc*nJgAQu?[=h&NDFZMf&\!,qGF1CbcQ!LIe6$sU`)-jliDOEnsq-AU4Z?l1r,,VZt9
%6rc5J_*<p19D%Ve10Rlbo8,mf5kafF-!qF&1]VB)+R<AH3p?WR`&U4nOA:dcq##P//nV[#$<B)T=\mp.bf1ja-YX$@&LO*"5h`1<
%cs96GUs26aZ7HDP_%_Dg[Z>^n\Lsr!%2?%u-1D;i0`W!i5Ah@H$)H?=>+TSm^gslN#u9l"3io9po-!F4@SWoYAY9(e_FdIN8PP&W
%S7H>3OW1[`*+_@tZ<#]I%"8mY4MVDuJ>cqM-BNETdL##7AXAI`O9O?%m4/C9cD=8S!G<a1JZ(I/6-ht?+DOF)Z4"4Taj8C#<3/Nb
%)?Fo\.iDp2&GdLWOZdU<lnSKa&S.]bP0$s`O>8JBB5WcV4&.aA*]WdXAfQUg"(rLgQq[Z;aH@\WKJ:p$&.Esq&.&(Mpdkk^I0:?1
%bM\em`WresZji7j0aa<UTHBp5\;R_:AgDj+KQo_t"!mR?7%.%e>8i%-9^Vp%+pa-Zp]:<#oM4hj=Enb$!'Rol!H=k:+:-iQ,SR5o
%(hJUNSH?7?2e8W_bA>=Cd?t<F(h?g_?r6qLW`,tA/GBLBA<I56<+q&[Hjika!#XlYah<A_"dl$U2C$VK!\?HNk'9BK%g820']K@D
%BPB?GUX1nj+Rle8!DEj$Zs+"d!7-QGAe%4<4K.Uh^G,%*^cBVskn>q9aahQ=K@gq>Nj<"G0''e-]b<*eZKha5(EDh;+tRkE/r1sO
%6:sVkRYe*.B)2<[\O8BSO1-M#R/NfiT`5:Y@AJYn(24bROboYKQmX%1EtB$67qIblTL0t*M]B6q_@^o9_2put?j>fQ9'GNrER;B.
%d5#h,1,S/12D>2V.B>/@Km?dF*,'/E%9R6,(o/Ml%a/Y"PB&3\2@$VTBAj<p]H7C?UQmtD@,WR_@?4r67h-?/rh:mS!'Rj=.".bf
%*SD<aVFJ&hkfN23MjksK46;i@C5>fC#0%dD$Z6I(!#0(s;[rHp<t#SF:l'_geAS;u1(aI*(`8P+9PIb=?-$s%W/ubXpSKKT:ZbX[
%(1)?RF*/J1!>O+"`rK5Q*bf@5Z9e[%K+R[s:Q.+TDIT$%.EhJOd@MR'MU6%`"153?;F/tp50CPM\Le7!!hAoTafM*WYbF*\]=qbD
%1ARoT@fVDuk8_\k'$u]FjQPMpbsq9?C6tgQ3sf\!L+P1`EUpt?q,nY%#6UD_jt/j]!.H=%/-JPUKcYpV'LnsM1DgA,!M\fY*.=,j
%%5NSn$^lV$[=KXPDMZHtNeO#3FK0n8MMI;]0l&rS"+,;*37/_1-]G]V&p\@l[fVbM6u"T*<7$&WJQD=cbc&3FTUZ`o=mBu.h&ppb
%O+Z$gckV'cOXMp=b@4^UD@h=^nt!T)s'EL^W6fKL!Q.0aJuMahjre&746]%u->ak0iOX[a*@GEu,C'Y/E*?_8R2+k[JEPTYDhI``
%P!3phbhtdfM\fb8#(2(Ph96>d?iW/:Z&U*=!,tf$LLYA'JOPubLiEY&S)VAqjpFLV*s'DR^Ffb'q>*GO*X&6(RChRcKi!ef3:aba
%49/KLmuO5#)[7#p$ZbdtdHd@#H=k%'OM7@$4m8:6Wo/T'Nn?TkU,%atb_5Hc*!M0hV4>m_VQrqj+Un]9^s"cO>>o"L#]']4E1SjE
%&F*@*D]hs*9^E3HT]i8s;=J/'bH_BhJe#9^]Hn,.B?Z6N9@="Go@VuH)u*51E00IL#%IH2*,Lh0"Tg%,1WkU[%NMK1$%O7cLEtAW
%.AP\//:p9"b[)5jPf6oro*%kH5u6Eqn2QB6W9l@!8uJH<pBVa]-Y8kg3'n@j8]cLu.I(U.YQ?LLlNftC9>DuWoGjZX]&sWgW.CU>
%&c0Jp/:b@/TVBM2T]=We2bq5c@Jl^5nOP,E!.qUZ*]?FkU<lf;UbknM<p;p.hF:K[eQiCc/3=G,[.X<S*%Dee7)15Wjha!,!mZ^3
%9ULO0"#pgs#bLcm!Xk]uh?64e)]r(knIiD.&DEm.O+<m]B-PGW$ef+$1X9L[#0OZD#3]S.=d>#0Cu#4^8!eNsd.]=6qfoJNm`!h+
%-HJ;&i:N5oW/Fus?RW,CUkmJQ5YS#icf/8rF(Q2.i`.??+;@M3$`M9N$n_XKo4*muV@Wl?""@.NS.f$P.kOVf7;G"-$&kC\E]9-U
%;$%66:c&RgVZpaRM.o"IaDVbJ3O,RkS4:2.g3Q^"GgYJ&7/Z$E@^7*rMi//uY=^mi*)mM@[@g@um3]<2-Q]C^kbQ':#`":$PdLa&
%Oe,MaOXb)7]W`4_5c+)boSq9$8GXT&5>,o^<.`25#DOpl6I`@2>[Y%27;79T*sd\jP!hF[$*sWFl7sQoM,4;EIY2u4gB2s\8.Sf;
%&O"7YN!:G>")-;aLB4OPMSY3kSuC6`i&!rtIB9K1?5`$!8#N`+9S+8e+0LanYfrjT0,HoK,X&(]:^$0tEn$kO39"V3/(o2f<8Ckp
%Qc4gfIRpb_]Lr"SH_k#ak!0hr!g_kF`^Y:BZ<9K^0+EGR-gask;\"Q':,hjij[Va"DX^2_"sADcd-=27CtA2-Z>bO]cj^lYpQlkR
%nRLE5Ekm&KC'56I'([<s!6F[r(1LV2=bV[tdDfrCa@hkM8PU3-b8Cjonr@[fbMGm.%#U_VNme1MbLr7q]2,uf--6iT,61!k%[_c]
%"+F?Q(^^_rH3,YOX\jmse]^<=m#n2T6)L]gjI+OR&_Kb<rPIReqd'dFnmEZ7T#,_1nrnZET;0iAWj%Cpkl+2Ib2sk<;/e[>\[.E-
%]%\ZmW]4BCP_WZ`)Zj2nEuM0"7mPH27T/qV"0(W0+UB:&(LU5J5[leZL1SGXgqo6V3*5`Y(*6T.,T9?-i["jZe1of$WUrho<f)ZA
%YT0j&1=U,@1Ksknf+>:Md9&.*-^@9%C6mm(!eVV;3ARQcB7:J!,SETK:8:E#apo2k&2)?q`CUef<OaWr.Y;2k3^@YSKU1i7:9(+N
%m,1urTGs-GWgjWNPs7jgK:0BTGc:5N^rk-$Kom9U^a!&qHQi&)q?2X1?t$H[)Uo64eYrhjEX%DE'(3uMlpI@0!)BO9?nYpkk:l*3
%7[u+9H&/nH@?XaoDdM\-DD$GLE?tcI\#+l3i<7T+atJr4UC^fr$73M6$iopSSMn850NXhP]W-R"7$IrPhGHa(dCLY7!24O8.i@'A
%B4YV)0sPdih-1"%(KJI'M,#%?WR(EuTa!=2Zkm[RPrBi',m&e90L)-ub,mq2B%bu?T&iurS-b.d8khI>eVTh%l7#(0J7_Wdr5<5_
%$AOeLnJHe!@aR"`%Eqh^N@\815`"-1KERNDpA>UB3J,X^QtE,P]ec\s;`*,U)Yk(Ilc]2D23>mNS]2f<R+2P180&/Tac7-[$8kYq
%.b2`fk'/%'qSY1=&b.Kha>>*G(CpR%A<PZ;`&Iu2A<@42<^3F;@_M&Ei!)L`%T0S6ODuH0(1i4YbnQ%&;p1K'!ZXMsW["/@An=mS
%WT^hme`ou\#6L$gA&9HG1OtnB2R>BD`=CmH!lJOKWESf?9^6AammXsNX1TK=<OcWUCC7T>gkeCk$[>MNV:hY#);9nl0rLYjQ_1[Y
%<Het7>JqM>Xr(Nfj0BTeKVj<CM*YrR-:^uh:P.m3[/Y7S^DH#1iaiFr+HPfc'+YI&3]$ot2K5TecWGuS^qtB=p@=o+^RhF1OWa'u
%:fGcD4D7^N&o=`ArMU[b$YCsTO&Hd?$2bO7VtEm)%SSA-1ANTt%\i$PB-qY"K=NB<K5SMGW6BlP+Dm"<#\+/E!RS9`s7HFEKKpTt
%2YK83"Y,oTa9*aM5d2ru)9smDUtNWjJ<Q=j0ll=ZK#R'oeT:hoWn\a)4D;N(Ts"o5GS[JAmB-kHBnI$2q*%9G;[4!Ai)XHA&uCn'
%@,9Y_mUaHH.STT4)X@fojMD%2egG%47NkSe$"t)bRoIh[W9b4U+W_aD!oWnE?FhsFEmTa;$s_&:P#-liWg?-gh_P8\8WZJZA5EuW
%P/W?W5Z;N]!*-Rq=UbV-n&f;']#g9`CKV<rVV"C?F]Y`95VcL\oC[@Sc5#E85OkkW;itTF1r"/b7WN^MHc7ibO34@J[=De,cQKJ@
%W@/9BI9&5L!8*3Bj2dZ@!4%nO$1WT1"#e@*<>XfuLcuY"'DnHcAIaqULc1=\P<^KQY<?U>$4t+iq7N5][MuP$f-!#\[YrE84H";h
%A9KkE31nF5S58LJ[TghTCs%<n8/W-#3o8($Mr\?h,81h=ZP%jZ#*K]%]rsq4@[Lg7P-J)!2Bjoad=F_2Rtt)dT+tCS?B,^Z<6oAp
%[k_^XGE=%pKQ=h<>abPj&lEiJ557#j,-Xs/",WD/E&be_6)1I@>ssFh8WbX*JIWT((3rq+[N$O$Ke+&DLH>WM6pk+-Q%<PV;?3FS
%rr94s-=Cj7eY02n09U13]ZA*YD;!O,7N*l-F8&+m22aqi4H]o./BNf4mchGAdoJ,s?F2[F]E%t)8-f@)AKbH++V2egjcP3\OCuLi
%$%+"+6@O)KplZJ12WNFtbOD-&gV()JgTu'aDp+GY:=jNdlI=MPe1!FXeatQb=m[P#gc$r]>*ue_dGSCRAu?$u%9pj10e2iYHqBQg
%9mqjb^d)$qkGQtV/He=C7-WAU,S>Qe#gDk%\:Hh9cLjm]N_L2)M)a1/So)pA&7QMkI8bBEHn/e9-&6'P7%iZ5XSGE,mjts$)1m\G
%G+9`61Hqm?IN/OtW8)@4Z9%0r`%/qj@u&<FnL.5#("Rou-m9$*:;t(G!(_!W#5>VrcNn>q>4;_QNOsn#I`Yddr4FQt&CJp[lTq'+
%@TfBOE(9qp<Of'N4m469\`a'>"5KHIMkCj0JS+BX=*9OgV/T>`4XpH9/G(l\0l&o=2MFMPg>:JG_/3h.]okW$7FZO'5$L.<d5rI,
%;6Le)K/g&bZ9/7\V^K[sW,j-%6;Im2j;^LVk$bU_bfHGpq&os.9pI@`]?s<f4?Sr*\6'F2'\T-lciE%fCFnD8pVhc+Okk8!=u$m;
%gpY'XR<MT..;0Uh=g:BU-@iYp-"JuI@<-aa$gZR0%NXQ<'I\ge?QQ/,(^]Albmk)b;+DHb[c<@h&e5)]\?Rh_3,7Xp]]mcf9Y(IR
%e2`+DV[ebc"e_`5.oP^7a1#:B0@>:r/4LeL*T<`*nfE=K8AjGNWX'+RJe_)Td*jF:VRP^dZH&-mjXE[c>Z8(ARLqI!jW34ZMSm)1
%3**:=86StV!lNuOYeo0[!0eKk3)Mi3@"mI2#gSAPMaI^'&C\UNWjo(rd$&F<#$S!4IGG[4i%3"VZ*JX3W\@Ko+fF#\$/PY`"hami
%CqpEg?nO\Il9($Aa+kWqEtk^tI(ad3Ql!)\2bo&:704.t,*6oZ<)2/3Bd$:i=58J5d#&IETa6ka0M/:e(4kau(<^_9+Cn-=f@Hm^
%ipt;kI"Ieh5mUHgIX?uF'2/#?*R>+..DF8%RW*fX/O]q*RpDgeKNek%#4<8E#$;/!+c`D'XGGBc2CehLiQdQsNe&ZTF:B]d(q-kO
%`lrF-_@o\s[_T]:r[Y]hIZG]a$UCB%6X%TO01WhI,StNL=K4$!P]IZm.>qCm+u4aT&MXD/;UK-&[Y"iR,[l;]TfTYcZ'NB38o8ao
%_=q\0XWkb"\,&3o1ss+l%[ZDJjN=\Y_@7qjeThRt_*of`,VEPhREL.6YR#_43)F7W72&bf$(,Li)J)]3%.$!7Ck],Q'.I#$,h(!I
%5fF>5dG/r$74jFQC5],3=BOb"))cE`$X`$de-Io,\;7&62sUXG)Os0ao[o5!`DeTDWUm?9P&#S,TNceK$9Out9Dp?$a!H<$'^1AJ
%<Y@aE)rUI](q+NoQA@`R=On)d,b^.'//?5+F*+*B$XSWsNCPB/NtQ<\(0&@m=9u-?Mnr8UgTkN37U$">;]s:C+.=fQ(4uV_XY!`[
%erfUcr4\-6i\7#304nECq8g>Y&pH0SgM9hX\akheb7\\M0?b212HW)MO[Lnp"ur6pn8>ZLrR8jVA!#3QS=I]\(#FHW+X;&WBSgdR
%NF#e#(`8L.WRNE[%Ig#`Priam!@E7pl<\'f;\90Ra/dMf<)e0k_5P8ENaQt9QjNrdPBYR?5m":-9ECEYXGdu2IUA4^5<+AfM%qu?
%#h\':(e&)7\2U:B.k.tGf_)0C#'^Z9T7ZUS$=F/;NgWMC9JE'TXqZcJ8sVb!qq:t9HN74Zh_b`Kn@)L0,J)MOC)+qGr"t[NZj^X=
%I7kbE)3\MWCKOL)0[Nf@.P9^7g:lPrJF5-aKAR]V`6C6XIs';NZugNUo\q-5K(DAp:f'(Y=eJ$R1%?B'3anQ-Bd,au#?7[QUd1XC
%ONX[%=sPi1LU[I$=!5'L8]7XOD`]^!%=k=ccTi!'\-VfQK<>R#Ppu_qV[3BeG9CQj=n>TQfhjS5]A;!1:dbDGB5:_A&9VssKpJUP
%n'YZ1))H=bRO9?4Y!o>/p>*?W[P#>lCQ#u&e%bMLko,r_hX!!HKUr>oK2m4:LAM@gUKeN=ETdU8V_o:mWc0*43)loc#oAZI2[Q>]
%'#$#>6.s7Nf+&676._IB)$12^0\J<$D[s<NcOuf=ck,gUb*L?I\BQ%J\tSXg.]tB8f-jur*bfD)oLU87jE;R+GVT$(7-G;qI$rSV
%oitRU\;Q*Q)KSWW%&9n(IVak=!_i,\W0Q%1:M+^djHCuebd[a[TqSUJG`ZXB.Xsm]S"+lsQkP/pU3>!A;M-Wtbb>OT<4Ol0+("_T
%%.pr0$jGYG(h-ljLoMZA&s%]_?/q-aNbMAR;L#ObG@lpU2&7,7aU(:aS7iV1l/>4A9XfgE0>I)lE#uVZOqlH@1f[cc:rHcf,&":X
%o%:Oidd9rF*,6O)^k'?7i;+f-9qZg<6DG*:"8m%r6;K4h(mk`&NL(Z)+jah#_lWuVQT7B3s1'5"LlIC\"Vq3egG]I(c'-+828qZ:
%clHkOe,%:Ke8=0a<J3:Po>n.oY[2?H$FXcZ@f3$kA-&M4..i6="Y/lEUEWV.ON8;<jpgfI65"V)BUl_'L_)KQ]SsW?/Fs`<RE=V"
%r<YA)l[l]-T-I3&SEe3>5b1q5NNXO_H_V#eiaTJC\@o$Q_;\;sM2S]WO5su&<?0jF69ih.WWKFcj?^1!8I^B1i,J.ZLr(5Clm?_g
%NlE$r+TMN'raM.i8E_BDaGt/D#"0FU"su,n<Y:Z02(uXo67aPm,HF--pO_NZ+e0:9^nI5eEEa2k-?t,.J[2Eb)WH,[_%,?B-4_7X
%7:;b)nr6J=AQaE4np0msU+8&[M?:<nf:u&7(Y;["I+^@ChV`a8SGZ6WAah/,LVfK4p5qu9h!tXXK545^_X;@+iZ=:M,<[JPdC?t.
%;%T0koD^TpMpQ)As,;m?!rVO'Zi"jXUT*8$WTr;?;<W]=3aF;T#&i(;=:&-,3kfJ"M!Hho1,@u!F8.=4IKIQ1eF<>4:-<+sU4O,2
%WldiIoN^92.)kAD;"60?_ojgoU"\Lp+Ns:FJ_h=?m^Lp00LG<t-eT^;=7pJ'feea5A#>P8)W5U@.R2e.gKhZmi:5[(,ncKilq"Lp
%72+=!lG=)7o0WV6T"9'.MfYHn`-$LY&>Aa1?*BFgZSlG7<:@bP^CI/9U:bW4ld5P4;"<A\;KQml$:E5jBdejp,#lJeT'Z5cMd5ir
%%ELFk29"Z)]9A%9REm$NSO[`DYRA1&8<-0TQ7Q..b0r=P*Z=5e1P7"F&U9a#2J[?]@%dmonA4(Z`D$C:ZC!DdGP)#Y=]eq]o<VSg
%L6XQ9k)1V?Lc%'j02rq?%a1B/n1+\u$'jVhO"!rYJ1[.fOBn5$`8W8&-B&Q?lS;[KErNnkQH[rHjQSo71<3I-@?7]@c41P\6B+!,
%D+[AT=(<q>$U0j#\\CKfi>=E2LN^Wi=s03)MCI%S7A)7Q;[R`e@aZ9;7`jeY#o1\]_d*BPO57S[6*Nb98Z=7.:a"d2i=Vjdbh+@d
%B4di=aZOZuUa/;,Mrf"$W%(H*A5cLIp>spMmN-Q?<i,_LPn`Y102r.lVS^T*(6+BGfj$u+41Z&WIYJc0E$@2DV(]PZYT`FnYS_Ll
%reU&SNZ))e*,(-q<mHjWQoh--n#[DKKg:iW',i&__t*5sl^f>YKVS%HEJM0Vb=9:&-Mp-]%@'%,b.ncWN"JT/,!LULE`!tR6D1r\
%F4/3SLM2J(3(?:gTk?-u^oGOE.TD%mOf1s")tXV=2`i?a9b'"mK.a=Y)<)DI3RP*V#Ru]6X$iUf@)e0](72lmo<'HC!XW!4o\,VU
%r;_i`fCNn?d:4VSaM<ij*KfO(LrnZOW2tK/R"6mrn?LA"*#VjUGdNd\K+okB&Q'\e#P'iDEaHltG(,J9Mod[;AIk9Mp?4UcUTAg&
%QPZf60U.[//:I^Q$[\N,N2Ak;11LTLiC,#5%0Cq[,#h!C^1<F5Wh?;2KAX<(O0/_-kjQQKOW:nlaNliuj2t?cPssUa3TB(c"D&1j
%9=t?oeJhr!X1CjgV,R=A_nb,k0YbT+lG!$L04b3/=o'eg@^@DbD_Q<3oj:k`#C2R?Yk:k0EN;j08pQ/D*K`F8--)fhS#<G457LV2
%A;K:OS7+$8D$INN\W<;&/)Bi1[pdK-!*3)<9JkgGTc2.U:7iameC!%RkR*uC:Ut[R1a7tL7Dki]0bX[7V4nJVc0SV_)d8q$1hC'Y
%AU@$nN'K&0;`$ib@R]^YU5E@=]lsN%3=-9bT!f3O(&d:8i]+Km'?Y3M@i#tkO9pLJ7AM[`%Q1Qj$shZ?6)$2lg;AIJk#5o:Hi.IB
%Z3G/Q^U+o1W$90rCIWA5e=P8<#:;J5kkWDJj7BZ^1$D$G(g1LA:Hh@;l^k79YWX#%Ftd:.L+f4A3$\\/eV1[,ajNAie1,S!0k,`(
%;WI-pe9Wmo.F,.fYW1Ssq+X),RYp3Xl!5FIPUEXWJdd&J>_fR6VGTVRnJ2)[%b;2bgchfqK$V#pH.kf0eVSqKJPE/^j7N]O7\(V0
%@kd.OZrQ8i)94R)QF:@$YCj"*aL1S!eU3LTG]lZ>kB)$p69oC.blVhGab%kZHBelAeM.H+E`(!Bb.iO.L08k$4Kuh#Vh_:5O3A?9
%o>0ap`1O!d)lYeOGe"Qeas71mN!;BU-(-gu"f[lpZ8F2*1B2t'#t%N3NG6CllgV5,SJn$tc)juY"IS85\CCFf<I4%r!;Y_J9BFbq
%%@.g?5<)RY_h8)s*Xu/F=A!BFT.YaBF2chn/6sJBr4sLiYM%F)r9b$NG3?tEWFBa"rkjr%OX!HB.cW]rF\X6cIeq6VeBJJj.o\-&
%UYp.o%$5fNii,Xe*Z/7Gp&ndBR"k`WJ>pb\Bh$n?,q>+UPups29B607i,7OA;<[Zec&7\UJ,t\pJIP+O?s]n(oW$>LJiW4#kbkU8
%\l7jYC1""_0S"]Zp0m9pl5Ngd*$dTA4Rlu<2Ls<$,aj=76HUQ)`cE_:*Rb['3f/!V?!6gj(<H]<H)akR*hXGiG;9.iN!uro0\)Q4
%EZiO"14VZAjKOr]XRO$N@S=VP;[U]@7Y!T[$K`\2gB+]t!=S$?_]o+X^QiN/M'AGpR!Hm6C$O@@JVsA^XUAlf1rTO-jCjV\i^^1j
%?tOJL,sjD4m&,^a0/-p?hrJK$0L6Iq4/aL@.ZYW:Se9;CAgdZm=([&0+q<fI`hZP!k+tit4'8o(9K6]ORgeM%WG==n4fj/`dYM=D
%(2=1j[^c1p!V%]Fh2@,M!8'!<>W`qN+,`'iFF$%S(pM@'[.VAF5a$VS*"<JfP%DG7l"`ZiaRaV#F+EGhkXXZs81rp70K05VAnfCD
%]PZTJAZK4E>=sa4^l%).%)_"[qUj6UN=YI''i]]2_($f7O'qH<%1='C8.4kg0pCIh6?;V:i=&%LXJlPumEmgu>11sZ2SddMPJt31
%[LB]d797gF\QD\8D\nQU"mej>DX#sBBe%n\X]m\JJg0qT$#P<K`Z)]<8W0.ZlBhQV-@R22OTC"_oi;)*R68#I'gBu/=*`_`RG**L
%BuKL._A5(;WJ]ei@6,q\8C"59!O8*-pSUB:Vg((p8;8mT?NW':;h<%.&l6C5OB^M,7ICFa=udEu'o5hT4tq&5q1fQ8R5EEoX9H6F
%V3?E>]G65?Fa.UAUu=Zn3'F4"RbS7UM9<qQL?(51>]3:F7G4[2"S`+6@WSfRc]g^)l3A47K(1TCSq*)YIFG!%'HTF;Y6FVWiWK(i
%oRhRf;1UTqJEA7I-O1KsJ%@)VUAr1haqi#f\,N5r+]o-=DktCCi*5?0>Xj/H5!d4E_c>AQ5+!o3P.fRL7`X>%62JpWU@\Osa4XFI
%,ZKGc$W.tU7B#4))FBA#cj2sdFH13j?`rDuB7`MBc\$_c.ZKam3>",\ZEc\QlH0^iAd0=#LP66Yj9E-%aI-]N-K.Ij3IYffE:cCJ
%4.K)XMlH&2G;f[=2X^U>j1l@XUWE).OX#2W;e]pBUIY51D=-OI[X[L+0'VAV"0"[CQ-72=BBsuN:$Od'8S@"_DEo_E)ClQY_FA64
%7o+%e+"o.,QqX??GrLZ=X&@:#Csi+is!jV2Eq-ao\T=OqF&t8b!8n-iR[j^"4;CYYBZ[UR$_l_!Ee))2JlSk3j%SZVS6:A^.!#*F
%S5(AgPG(La9"4GK-Jd1iA23_<Gk$&$;t&SjhXo<.HrLD6Z;_H/2AcpiRJ3E9`ZB48`P,S+RfqOCEXfI42kk</b+NFQ&:2.oFgcr?
%;6-qC@OXNB9"Wc0)g6O@eWBC>qcSGHKcNtG/X4]B[jEalmpf85%ObQ(*)%(,Vqge&l`W)D&hYLM@E18A0=ZX[pLFRY2S@ftX#9>j
%biVBPWFDTQ8Fo;R+JfUHdS29$TM?=6<+@%CSUjaQoKLRb*qlq(qA@geKm@!rO^'?-E;/GR;j41KBXsH=]el^)MmL2u[K1FU$q=NQ
%d57;/D_/FJ2]j^8nt?<L<EQJ2e"=fWjU.trQq]@Q#S?*o@Ga3Mm^TVUZM%I3*0IY\\g(rOCJU"m]pFG\2kH(_R\)d*9,/XtYJonB
%d!N(sm(LckV9%GMbe;*QMp/lUgl'B>5CIY>MoFW?2AV4iV^sleY6cCg,E&`Y&'Fq!\OS$m]g!Um#`.i][^Q(m9s&J-c!V]\>D\%t
%#0Y&NWD`N!ji-D28>4Y4>msf7?PCK.@aV?BWH>ciRT[(o::`ZVT:4!#Et`3@aH?pLnO.2qA_3ebfNQ5;,%`a2)lC0-],'O@h4\-+
%amd;qEZqOUn8$H)>c\5Z@?Xr4JbJrVnaV38CnlEm=O5pF6E;fBcG.5WUht:'GhLO)b"LqhdIoX:%8CumWG&Cg7]3!0B+`j,@Gda`
%;FnaTiPd8s4o`/VoQVQoe@M)8l^t^0$@NE;58hdZh=QMmFq`,F"tD.Ks7oa[;rd*PZEiJAae=8b@4E9n\/@50g`N/',@Qgn6W9\>
%o%LrAC+dH5KuL)DnNTpjO%S?NYTkJZKidhinJ5HK[ZSMeQ+f.r#PI9.\bpp%52(_JNJ09+>Fdj/+%b5ROmnmQ`O47CC3QA=*,n]M
%ieUDE[^X"L4%72Kk?[hUd(\0&<'&i/`)(a\<n_409mnBCV=:K:.GMF54.ac0_Cl<Hgq)ps@P5;_Rp-.M0JnKC@BuQq?:nCIr_N:;
%VrMDWNsUJ:0qN;UGgZ'ii-!;!k%sQUU6I)a%M%t;b[4r<a-Lq+GoM'Sa^hE=3n;1,7"on.>'LA?)^"PJ!#\,U5mD,Lqaf28X;c(#
%K=iocip"C1J:nu8=(-YRP'AmMM(dL]%sLg^<%+R%ftMR-nWf0d0o-I#-Q'j0/AX%!f;*7W]>`q2Lk9&=`8-nb1r1m)8('$%^'WK@
%l40B30)#!sL5Brf"\0Ga_osct=>\tjU%Qb*#_r`$0TeJ4?!ST\LQS<MR!Fub;)h77oiW^?B^php-ngp+R.+nD2Ng$,LM7'DQWQ*'
%0]>OM2L?\VLRFoVR$Kr9BqK(1-C@;m0_,*ZI_s&ACa0_P6ptOl$?NW_F-j5-%2NC?&6PUNgHTUdn(uFmbW\@hib3BcekkX;qhV6@
%DTk"1pp2Btl:/=),-KKB=#44/>V5U+,H_f%[>:lk3*4k*cP7Wm/RiF2^s_"cY#//C`6fch`]74uUdZ7Tib/ooCO_@&R*Lu\+ci+`
%Mg,*QLTQ7Or6+:d0A8<m[Z0s#hDV&4+)RL6q6oO*ieaQKIs%fKOgOni>K3CaG,pH3[F)<bHS[*s5gYijL!DId4hHF#[I(;)A*HJ9
%FkS4+Z'hl(9.]+toG^21:o-A-.L[K@n)l>2QEY%oH(iE5[$5cu9g)b5/tB'n8[MS:>V`tWSij?0bcL3;FsQQXa^X4<j69f$fhI4.
%FY"P/SCf/c:i7bS8/9HgQ"*OEe?dD)Tr+[LrsD7Pb;X8J[m6Es&D+(Ep/5<ST$3bn?VEOrnVblY(gr9Fe*/Meleg]?2#$BcK%cj?
%9]ZV\:tZ"VL#41Tj5[!*ahCrLf/5RYG%#?Ja^-qiR0k&&RRRfBI,>rr`VTj??>\O()>^pMTp-X[-esjHP5PM5[I-ap[N#*bfg7Eb
%%DU4=lF$$k.8+>lq#Y]maam3qeY5X3W+_lKC&FZ(VUWOmSL?_kK/,gW20%+792j,J?]Bc`)MBYML`J<)7-"@+5hONs'/FF#m1lMR
%6n'/&+"W->H:KG_U`r5W;U_ot1`PR]Eqc!$>h3aY8ZJlE\lo`e[:r@>Z=;SQ99He"WCnm[F_QNriCI<j-%0JG6YghPM8g&p0([.q
%5,TnD>%F.:E*(`%'NfJ68Ls^(_JbSN$aE2gl_,[EPk5JSC$?@H&-Ll5LqWik8Z.H`nPg`%^\stjU'sn9p'e1eAS+&s.SaU2@e_7,
%A7qr_l5#_Wi<RfjZ89/7M.C6E46LZiXY)N0AO/h\F4#fhp@1Mr,c%Zd4h*`bg5ajZkF<X&_-5&'!c:=2MtPWN^e9<<8sPg1Vn'4C
%RDo&JC%QR_fdNW)9BUGC15kS23(5s8<p_78#s2\g;g;4$`TM[%H_>,0='=9RS2DsugI2oqcNi&;o3K$uW&d?r5$E.\FbeQH>!4CU
%_*rj5G?u7go1a^3HAmPi!U/l=dR#NuZ#,o+<ROq*0@T*)XadblL%;W,CC#?lZ!DUlFSK1CZNhclrn:2ECbGq1*?04oK;ndCo9%81
%UhuaqjC3Kfr&h?=6W97:T72m,+)0#1)5sl\WOPRR`1]*K-B!o,@[8940+U+u'@GJ7d[BZH@:sur'k0Lg*P?NC3$6H^`^@Bad5po[
%\5P'Y@arl(UP$"deMGHS-]pV=nqT'2+s_332k?O]VU.N8cLeVX]:=qk9s!J^FPd!m5kXOc*DW/Fe"`UHD5&Co&A,4qD!!hNOXqBD
%'m,>uTdJ91?]B5<[!=To3u+4#_B9dgWSi`2(LWdK8_Ku`P%)5aZ3:2GK:h,[D=Bu-5<ug`a!l$2,^h&rNA@p;O1IslOddp"=N9-Q
%-=f7i4o`"&d\l,<6]VQ'Zsl/TPZgT#00nT>2DiI1-PE7L.33>k=jN_,b#`02a-LqIddcf=RIOfC<D58ZT`2ca$M-HgO>>'>38XB1
%Jq)Db#&>g+"jqEA4FJR9l,<R+UGp2@_t,?/JE<AEa&CY<Z;^l_]CFlG':dr]@tDk!GW(hD\!?n5]l;WXcH/Klr#ECOh7Dqq_-N.\
%0Rr%jb4cnOpET]eMe+JtdP&"3etOSkdejKt-D,j'IAo4INM&G1k9@Mg&ER=-/OS:ULgsVi,s"Y9#(btdH)=*Rc+WSrMOpU,.(S4(
%U8-elBkdC<A?sL*R&nHbRTf=HpimruU$2823^/RaO/(aq)%@5iO5lp^gsPY33EM&tO.0FAkkMZq]m%loEHHRjO)*52].bh^&Xj\7
%].erXF:`oP9rE'1_i?'iq7%p#>Mtj/6_!Uf4#[$_E(.Ik*?sSObq3:q:?6M$8>dEg7b]9\Y++5OCt@DHL.c8*\*[-EQZe;AnnrJP
%/g&D7KH>9jGiOb()]*e[,&^`IN=4dD$Eo.P0"K%L(V/UqP/MP-UV?LjWD';Di"Zh'B&_q8S_205@rg&f92dGb?]>6\pC*jRBek@3
%EM9+8#,mqH`40k-Oo.&m<K);_$_c!*nFs?*UIJDASso/D@ln`*)4L1Kr!4f&/jNf9Z_M?j,?7l$o%P^khhHDjW8E=iEJXK=o%QQ]
%S&fM(gZ+1h2NclDo%QQ]S&fMH^:P.Rj@&'^MO[Ml_I8+#7ZeLe]#M04?DqQHXjrt>Mt.?sPWtqnQpUQn#;)N3,Q'f14F]iq6m,A+
%N;:nLMo5'a,PPpL8^1rrBkhLqfkcIgbKJ*XDapeMc7&EdX3&X8pJe39c:+(DW`c0ke`Gr#9^##j/fNEr50*OpZ]llq<&4e2;!3`\
%hpb1O?;-n@Kss7?KN$VT&l,hF-Q:=O'?,4m5V<f.i1uW*c4'/U^9dYnQSGUrchYG=Ug<W<`8$3_QAoCuQ8/\RURc-D@*N%iriNY3
%kWmY&GekEE]-&o8M6l.C^4Na.+@'\;QmsF$*/aVl?!up7WH8e((oYnY;IT*LT.[,\bj]t#4J0fM?Q)ge!JV7Khb5a@?LPreKWuRe
%P=Erqjb$dIB[S4QEth:d"d?Ab0^5+;N=M<GA#2"uPBK""CVn).EB>Y-'O0'.qoIGoNBXpR\MGdjKX[uu_No(O,RrP5^6+pg#`"E]
%IO(giaX1K^=m;4ndA5OlJqS&5]_K.#WH[gpiEUO[QVEusFJ.-M._,"H0g=QVL3G2'5.,&GVbNC\[k2]m#15&#EnpoS6D<!X)."F5
%(IB>/`lAI)Z#:4pKZ01X,s8_lf:%k8(3&"M,\5Lr)a8N(HL'h;-JU/+in7r7b0\QE&eqe\YKJLE2R'\))E")NV_&Q#2*8mG=[&Tu
%7>B,4nTGaD%&LAf*SkHdbOSKnYroCD3ll7m'j\&)["XnVX47r>kdY/#+.'>A1PdXgef./%3AA#a(gX4]/%O??)URb)1V+PC>'@?8
%Xk^SXIPK[hCmEP<o1RMKYSM;85SQ=;$R$(&&bZcAendXo&4LiNKpil?/2rIULa"rnXsN5dA5h6\>r]`23oZ,C4Vi`;f'HFAN5e,`
%XNLHcW7kWF,3u>lXKuf`kZfCL%S_nX[::3miRJETEF8)MD(^T>jO]4c)QdM_/4.++)>Q-n<ZAIl^IGhSL;i6*#Z#W+?_c`$DWKD_
%78=p"2'Te)X84[h9>ZQTPHI5YF?Y[27at0s%?Y"4ZpCd8jOD7&%];eDT2I5ioX$tlD&h)81WVm%1$t=G=cTdIkCT;)H-"u--]2\D
%AghJM>I:uB,J&!+W%SWK#%(p170[+7O/P2N^<e?3AFR"7_7*O=;Hf#+?g)aip+uM2%kY*^,)\+[p`bVLZM53Vip5seX2q'JZUnU$
%+ilpik^cu^(n)SX.D%%=c^dU<rEjXdQssooY=?fL`cC=(Y.f-eg7Kl1>\P=)3kMXaQ?qNiA#DC\nB\o\3G,4I@60Pkdqi7=%/`D6
%F@Cm*A4<asKG_9D4#[(GWJ6Fe%EF\b><3OT<YS+PIu^(R?hV;H%g(t%UeCT]H]Q?J')Oc?d;>;/*.okCaYlgEVJ(i=A]he\bd:>>
%6D+s4A5bi<823V8#uHPZLmeS8LB]<cLfoILqnPQ%CrKpT[fT9+\fHDhfn%HH5_^+m=+G-<At(\sf)n)g=dC683ce\PE4,h(EF=8$
%*JUOrrj"loXN!)"Yld_q-HEQ)N[@tA5V&^9MT+;OR,=TtERqFsCCYfe_`E^4pV9s+[M4_GYp,Yu!o[qJVHE%:?*PClOA:lY1Mi^%
%%m7e]_fdjmd"J6b0upMX5\GNg6]AH(^pY+:'pDG+#I\*%C0OT/1RoRXV5l-Yj$5Xk@Mg(Ie.EP'f7\'_][$F?L0/ruq&Sba)[#6M
%F]T>;H;^3Qa-.qg3/P'U2jOZ4/PFn0Cmu8<NM1*'(X^"p-9A`3'RkSB<$Q"BQ7d;1)PF"lVSp2))(fflCtQ"m,ot/3C-1p[Tercd
%.R:d7el#kL"&T?Y=^S'%in^gIL>'S%GV*&u@/1bmdUEFM-W*Mk%G7&-C0=<UJ>Hb`!,br>h7/a_djtqY2*^1n/;E2H.3F=?qtbg`
%/#I4+=Fi/.*Ij/Z*XFhc@EsrT>Pp"*EF[qb2Gb*,A"\7pMCFR>SC9fr`&>6(,Gc9,n5'[4"8C/&fP@8efP>#fI%eN$h*KV%Be`?6
%=Jrn($s^-,?m@PEU64tR1F=ZEUK*8;K?H_f$^^-dYEA@8]/huC6"_VN<3skIXsJVneV6B-EV%jN+G:-A4:,CZbS5fW;73\Pk,HTA
%]csc3e!J7SK4V@YZ*_-@PY+i]WH/dGL!^B#lImX,46f`bW6h`N#3=T4ko\%8<gMQ!7#5\tP`XWJ\co]tM@g#:UC'\k[M7-MR-?$?
%#B_>i)+;sFR$):WmK%gXd$6@8JpYiI.GLN(WPWBfYfac0r!ZG`RD>O8p'j@=Z0cRT)R=9\2De-*I-btK<$/l8j/G4g'<7*n:%bck
%X$0R,e]/$!qWKe9+$Y#,<?L$>FK=Z+V(%"62;8.+TEa3R3l$Poa)SUCeS5Ch`s2ME'mt+][@J-GLYA[bOB9Y*TbjI%FMjoATQi<]
%du%iLX:!Jq5jAGQYo;6B!@Om(:ea:Zorh4=gAo6)>@h"@3%_4od]g0Va)&ZOLoOkTflH+%"_q%'Z1.K*IS,`B>IYg4Fp+KP"2UuU
%\:Yg%]AT:ud:FlUHk\ChThEmE,@tK4Po6_i['*Ys.i!+kY=8hC:[]!=DLbD;<7I"\[4-ia$jHusC!A!VVbrr7aB,\,QqEQK%A7c@
%-+9[^_EpUCR3:-=ONEX@!Rn6';uiS"`ea1og:2K'&!XlP+G;=8Fo:>@W9PdCYWWN<qkhmI(jNHJ*<t5>$rB(5ORi.kJYW;W:L@7C
%7(u\P7o-V+bSWD?+^<V&S^#LjVdc5#fL0`Ak\Qi,7E/@;$hQ$q/t49ojE%0?Y^D#-+:WFVWg-nk*\]QX!Y"*e*CU;l$JZO8_(;',
%#3:ueOS+n3L?19r3<M6BMP&CmH)`SpTMog(lC4lT?760E(EV65oEb[u=*0;1.bHBo;p$ar:E\scBdFAG"*aln7@@RZ4>tD(C(eFR
%a=QnYO)ZRa2(b1<'PBh=p/[B-'oT(ocoKLc5Y<ie!i.t<+tNsm#HIU9<jZGH6fAh]Vb49]<Ghq#0J_4V_UcStKD#)=eIa>GAtX`)
%`<aMW.MQ#.9;fq*f09<OW'_IG\/H+dS$kXEo*na>a\en71XHY2>ZK_Sm#MrXOIq39fUG%ck)4,_9T&J?;c?UDHqbaa*_7#BUp/:f
%G/.H2ni]MHr5Z:q3_,#KI5$q0\+3m^CcN1_&Eip=80M*S4bsp274-Wjc:Z+e!8q1gb.%\tN*4SK8HhNuM<rl*m4nsAjlkU1O&gC[
%O=6jP8T5Q'(ZS^G9li@UmChk<BmnRdI<XI,1j%n$IJ-hT.W)pRs6AKepoSI_oBTa^\9H]0%"-g#l.8)hZ+iHrpuA"BjMr>;XV6%-
%!aa]TqORZZaaa0L^OQ:\s8L8mqW+9_cL"nsnF`tIr<IiJq/8FpljGI/qt.?]pZHHJ%X0+Eh=.E$s7:a8jq"'^`@mg2l5Pe6oW<fL
%lO\MnhrJ^hkANQY[I69SjS-C^\W=Ms[:?\LVZ#q&J,O4+pm]>'rpLs+X%2&fG&2[RFL/^%rO(7jkq-h!j):N8?r4Jg2mBH%KDUCX
%p(#a@V`-=Hi%0M(JTHdgEX/KDJ-d,R:fB10g5I6cQ6(A9XG!D8;dqdX;5k2.bil=Vn/%>8(MP&?_GD_/JeRK/dbneA;q+`%6WR/V
%LERt,*MuX;Z!lDZlT_\i5`)a8gM>W[Q-)#uhaVDq*SSttHcWr`//bmI.EeqW5VY'*\q=a^5S,M9s0a6GSL[`["8TEeV&:3>T3k=e
%;kK#Gh<_4Q1=,/YCX^NC@P^DaFqL34A7c"M3PoF_=Nr9Ye%f]>n7`61H,D!T<qD8G6`_%+T/p(++?m3;>%il;;-IXTfVb!3>%g;*
%T*V8IT2rkmJsJeRqF$t>'7HTNoU2gWX,4C<<)+X%)LbjarGH0SV+%V'I;=&CZ)W]-!b,.7T*<'2+[5LV<ChN"%BHAqR0TgjJ^9iq
%2/Ra'!jS.<1Q.(6/i#4ND!mC!$GBS[G)?!IR=ceYQ<ngp7HAOKKhE@cK$kHBZM_#O=2="ib:[(DnO6+:<Pd\<='Tn8l5OYFZf\,(
%=23b[Q'X)7)#C4;r6gk^gsk#ZgKV.(gM6&Z%FXOB4-nKsZ=N4=b<)l?H':+1ke8cq@Ua`Opu-W_eI?;sJLXZ7/@k85b?(h!jP5#C
%pT!+rphar!qqL!*qU_7+3-p@nC!E[$0=e="`f^^rXh9+r[;WGIgOK.;^\ZGiTDnImTDIl7rlJAOHh-qqC]3oVQ^-*'EoHR>DsHc&
%-i_q^r;!lVIdPnIrO7:Rf5H0;bFe[]8)O-RPBZbDhfRuN5Rf9Z7bR+AqAk(ZR=G5(P<]!01I(ci1I(j.;ucj!-dM\D4'q=B<Rq:f
%!'XJe;27oZrdFh0M=:H%VuQG](LDd@$bprcfcP_D4&>e1qd+b]86p;5LXhQaQ`Rhk)9Vm67iq]MP<X?\([#0%-2G2qrl&5,pQHu>
%nb>V'0/KEZ[.4hn=%b3\r;,ip-[tMoTE!3ccXLlhB3==5cT_;uc[Y<U:@.A^rpku5n,]Uohrh`Q,G5#(O.L^G.o,ECo\spVqX3N2
%np7c`r@0^aV&+?WZ0lo1q]>d_O.J!X:MN#lU[L>E^>Cn^gpH)#-+&;?2XNU%NkbJ2e'G^0?CXu?n[YR4ri&-+%W8e&)3/1J`@Gi3
%NrR\N^@\&D^A(UV/b@DKci3)lIJ>Zie^^#F.$cKtWcKlgN_b/f-blUqLL^#K++NmGDk5N0XTY-oUMIIP^$:5WI.G%P,".Q>p>8+/
%-`?Z\!DZJMZISf"^[q!!rp]+$&]+n<TDu`o5Pgh\H<tUUKhI_:FSUo>io*[^qkl*sl2X<DM][HIY?uca?[Wem=E2g@n[C[Th.LJ3
%"C8\0dr0a$bmF8:=?EIcB)BO$5Q,LbX&n9!BE%S/[uG=<Ba(Okq>9=n<;>.]IJW=EgXe8\iS<ltcCdMa0a(=aBcG:E^XesP^n>Zf
%hYa@alO]Fa>j::Ps6HlgkSH*X)A_a2hSNQCkpp4u?i?*?s5:HZ2/2=QF$KHj7p^U'L84k"HM*#@V>#B2HU"Qfdk>)'g%R]-T$fgX
%&@`f_hQk%Ip_.$3iU\q<!8@>/f08`/]AjYPD^<s#jcNo"hrh`JbR7E0`%PJDiL0._mX;i6,%#g;DX&OPK@]]j4T&8fq>PkDh/Y0^
%pg;qYb$R:odC\frZ:-EsM-V\DT75k+Da%IC1aq+'qO<O/EdChoc1h_:Yq)ZQa'\`Cc[P&4<S>S,^[p.Q^\7G'kt97CQ<\Or2-g=E
%YQ)],^.fI_YJV41[m0VLf8gGa?#qH)A11&NXH)[0n\u)<<r!s"Q%E"bpl-Zp`E'23`!TqU1,@n)Hf@-Es7*GAs5J`QAh<g].qRU>
%XH<h$5PN+*rr&f<"2>ANH[3jNbK:",'d-?@55Dd6Ie6R2qsr@RrNH5>'I[$6Y7O!!A0!o1g:.('8,jK<<dg)VKVEn8d>:f*H2X5i
%ejq2EZ$X`a%I25eLQ@M,j)+t(bOg=FX&hq5@OTnK$?fH]fju\;IKepF2`4VV8<kJo0=lD]V*mBOTq'IZ6N7@ereO1S'eD"Q&7Yos
%&VC=9[h#4CmTGG5f%GIA]Wrg'1]oD,!IZu.N#.7Ca/B:<Ng[1=DK"8afl.B"&,[q5**m_=Fr+,!CgZ[UN>jTn*2#De&]kmiBq)CX
%*L@1/19*ehqoU%;QD8qbLe(Q`e!PSTnq)<;htW4%h?0B?hXQ=*6e6MR#JTd$F"b1Nrqq[-4SSJ4J*9T?%BDVda'X;l]O->_J*O'V
%H;!)n-Jn:=5HU0OkC<-#5H`gJrp_(1f+<E(UN<7MFUDK#TgkS[rT_?T<j,Nu@VoreV#TW(<3,VOp$.'pr_d<7p%tG2SSk_T;X96p
%i4T"ej#Sk5J+'$*M8+Np7%I]`lKNNf._Vn6qNQL/,q%tOIOSfu?Tn9];<c=i$sVst?ZLFg&"[]ERXKYS7ZQ;+6J)%`o,hS0hY;3f
%9#(6&fW;jJ\L&dho`eUah6i*Oo(1WYd7Wj6\$GYt_BZG9rsV,;5YCL57]#_*Wq8O=XjVQn0me9l*bf5Wplm^+&U\TUcj];)c,fh!
%Ir;"/':_dE0`4WBr)(07ht2QJQ(EAj=%\4fn"3L08NcQ;a?!,sTuY#*@5n*#1-W_PLJNW(o;&!]?BG)$(q0A6F")U8$Vo:cB`)rN
%F$ThWU8CeYQbIG(K(@7@J#qPZ94+VE5`9bFqFb0D(MTtScTbdCcAS<u-/ME]rf\7CIe\DN2<nFc_23A`km*Ke4obJ1HtQ_ja*lE,
%^A9'=cZnsb?[VM)<4sdBPBuO"^\m.Kci*e)5C`+!:O_P\D[`V$?/Y_M+2.!rmsb#CroePFpX+Sbp:5-sWkTJ3ig:^u32T8u>OuF8
%`[WNL-*iJBBGo98EbH9l<fUc_,c-BulZFr`.99,"0Gafff?5L-0)M(mb?:KIp*7m-?9B$I/Qq&!EHN#rX(i\+rX6?F*;qODH*"eT
%cC9)!ngnb%&oe92i'?b*M_A5II/0V*/ml?Z&p1A.0":c,b<&a2EQ&YC=0B(2dcE@/0"LK2jd3*Zii9%\?2KLpY?c;DPq,p@]'SE+
%:<01I?#o1e/$?V[]%&apE8up:XM4\FCTk<Y`MJo+['ZRcZHG;3](Yt>DH_,)/Wt5!c"h4.!QoRj[]HRoP)G-GnV3NNQ>Tp=%R+ZH
%PA\*k3Z9XYH=8IXK%Jh'Za=o3q@-a"fs#q]D:4jlY(NGL#O0Zp0C?Q,\#rrm[<YDhGP@CTS%@1S]Ck;*Y<usc;IA>)='YEU)fEBl
%F2R,lYo_\04g?T.2F,Y/??+L&?iNbP^?g_m=0K0BZYOW`7o;/=W(Kk[Agk6&osIk+.N68QYJ0EOC#+Ng0j3%gTbtA<atY:oh/jYo
%at[1BnYK;l$[o59e1I`4Bj%l]=ba*%h4*!QG4Y!!.tq&Pkp;['KhZ`-l+qte&08g"9O6o9!fc(3MCU?lA1="BALjptILS+O;`)C4
%[c6iE<lZsPBXIB^S$!R`".C`AXhK)JL<\?r*qO:,^29?L6_gp2V#-26(Q"=,hJgZl%F@*n#mI"N1odT>Rl@E;6q'ZeOMBl%SYW'T
%!hsIK^;3l+I%,ZL%3b9.\9`;BZ*1_;C,'n!V5j+nBQfO6]UUW&0)I.!d2HeBmE/'Tk,SgEYIZp-mY84==0G^Yc8[F]G57DVk^\t0
%5teK-?(HY7I'ehRIN[]UY\5tn:E[6ogOPNlJgLXMj6!iQqS2.#FW34NRd]+=\SK*ogMR%\#:t;XA*HG*08H:+4k=dokh603Xppq;
%@<5\glAEae>7OFG<a5r"fA(E4g3&tU=h"+U>@>?7_<&LM9T!/]l4p@nXE5t?Ug)F.B`TYbIPu6:o#k*@Ke*AQ'Q1MPL?l$IFVX?<
%$J#?^/d$9<'l^#8Y.stIl`Pq^^mM.l0Z**->I!aV/$HP\lDq)#U16VdAd9/XFR_*u5-]Lg`>;P5FKF\hV,6rMAX(NlkK5oEj2)#>
%Gi>mC+Z'/*O&C>/fo-2&%8<=d#"Gtu@$1[d&#V&WniYPU$&"X2/I%d#j<R.@0k#:6/2S^=^U?`L4.h8pJJFXP(NhtpVH=%$VFeXk
%5b]5.UhOA`R'#%fYcOV>1s?m[B[m>Er#3!KG+`':cAP^TQ);2#VWq$,H#Pl^I:[ienli/cK>\tW6US.u)5+9M@(j\4o?`:K#bpGE
%h]^KQ;q>PVX1e@q:(RtBS^Q%'?X5+'n(]jBo^9>P^2i,V/[o'Js)+ZDmQ$H#3qKB/pl/@GR7\ORA),Mk<SZ#LAFfBmGk_g+p$R%=
%o+0^Ms.#[nCY*0)I776;4h4WYr2pS.fjB0e'-I7\8)O.3De/>)moi#l]lEA#b\Q)3hEUnuBE$it?hfE&r9_P]qj(<]mrE:k?XC0T
%:]/EU*r4Hd55V1+q<D%ro#8X6q9_]]pIot\-8HB=.mjuRHHZ?bhrk"/?bZ'tDggXr?hj8#romWn2)1ngO^0OL\\J.+5Q/c$J)Mnb
%s40-Drm.3dln@5mk$N>c\!M7)^;';%J,H!9I-(f4I,ukSI,Ql2o>,YKG3T"^?$)BNY_FP)gDf&C_2nWPB-)R#1Oo/65Oc_.qeHk_
%jpc,plaFNJilp`FrgjXJlnA@t@iosigOAHfEd6p('n!g+)#K>+IdK)Oq"?rM_mdDF2p\)*pC@.6DmD"*C7V^"BHgM=F1.<+*4`sj
%(RSmOr73G]pC/3.4FG2X/[nc`dKLGg\n0^op$6L%eR55kIbIGXb`!o.PE*Zia:+uTMZlU:(ebC29%b$0nZ%ZtB'*0g:!CP^6H:eo
%hb*'F[KbpX&!?qImG0;cQAR\Z&msoIh_QIH30Lg,G8*X0i)mh?T5p-jGIhEWBd&ejZDY?jo&%&"6NaVkA=I/X4IhIF$fedFAh3e@
%b[mT%lQ\r#XjM$Mm?k6/N&"W)aKs?UVGqJhY*C7^qjqBX1Kp]4fs$,BU57U/#fg8uf%t5:(rph&OXlI[*<9D&[BYnmG+Waf,ogg,
%JraUp:ct5j8rDTW9DLW^"KYLpPWtF+T6mpr_n8cr77X,n[Mn4*PmrmKh6LLi$6)&?"Ij!sa,GZWZt#D33K+V31oV_6jBrq<&'2H#
%R\^n<Z4?)RMPC;Yome"aSlM&,ah&7U,m@Ipba;stS;dpZ%]&N#JZZ,W2_JI<Q#dMV+0<UA:^Y``nujM,(@XeY]I.-%Ajr5SL4q;j
%+`np/innYQ"W`LqJqXaNh=PuGUU?Q4=qSAD<\a!GN.Gdlaf0&b-@^CiTkpPVY__dG!6'ttJ@#$[Rt&7D,&94SFLl3g-8V43N.#H"
%bbH1G>6;kI+$$=Eg(u/+*9j?p?fT@YVIh7*akR\=kA7Tg=[ulf:mq\#Z\Zi,TJ:M8Af$G@,;U-L9EU]n6_c)@jr05@aNP/5%?[&S
%2!Q2>s(GA#AtdRb.82@-V>M%0K?4;!PY%Z>d(#a60Sr!@-i-6'kjrii[$3,%0Icn+Lt^F3)Ke^+3t$a6<?#mCOK!rU4<mc0NVPu>
%+WBs)9Lq#&)O[JLQoeiUJ]=eO1D:a?&Xccf@j^up/csM;C3J<9Z=Ks01l0giZLl#q.p$#o5-%^:YELI=1QbqO@E4oDLWS`;SNA`7
%GK$lY8:'u4\nkC0cB6>gn,Fg5]`6s7U$]=lW#E6niJQCd;\+BQZ*4/I;sa)t?[_9.Dn*:RROZQh\>&;\LHk=Z-\(^bq3Tasr:</K
%cS)sjgOIt8$S)R&]`4*ASpLCK5QAn[?[D6Thtl)_n*);PNeQB^0@*Up]Qrc):He6hr>PjXC#P[a]3b9Uo?[?XHNM1P'ZT9b#0"2T
%n=_I?d8d(u.K7CO[k2)MCR3e"cQdQ>6h+f\Gdr5<`T=s:r=;-_9\h4]kF_T=CAi19EG&Eel4ZtD^VBZhkJ-`ah5'S;=6t>0X3]DC
%GsiU$q@fi'`;IL<7R$HTUT;5GI/^/Wn%<,o"\Y/"a?90[L)2kr5P_"8lRohXaiYW`&V4Zla^^_JbEjbcFm&Z%U[]@96tOE],tkEZ
%rlA"]DNpamO8%PBC+5DHBUI4$4nXrrCRQ[Md?Z6+n8;i5R[W],C\c/GIQ@jP)3Ra$HMT:TY.cS8%3jLnX-SL9Dh^1)4&7eK*4Hj>
%%BqL0^\qs@)h7*b@N-t'^:qG'r8l8>p"k$UMeKWbcTh+&B"L)#BuEi0SIAaJ=;tgP<HoiV!uR`PWdO`S>#IH)A3Dh!Gl?NKlY#B2
%Z?Oi,i4&Alhb2Z$06tt!_I;]7CV&s<;=^>Ts-1Y=./IIk_6\fPWV?G3Vl?0f)>2rIf+r*h"R[*fR\_bNjPS&(3WbKjiEC@eQ*4/q
%ee?'qq?E/>p#WWs7STGHIKlf%2f3gMWlf"C9FF83/:>B4j*2T1K1^:U_/L.#[pkB#84q*L$r-#adgZ7)-?`e\V?9&_)K]b.$Ft\f
%?r/Ps<,)-=VJ^`FLi6[C2W1R>Qb,sp0`sG8)SF\YeZE8mZ/KB1OM\`GhF+*&3Ku%W8)",cb]6C\I052CkuJspZ7i(B1,OT,7RTV5
%QkA$@@-/=<:<c&`n7rVl<0_b>jGJ`J6nY)7lGKeEO]KdO2kJkJLE=a&M"^i)]mCUD*ldDAqGifJ;8@IQGW+rLHMoXQAj$>Z(kHGE
%iYgD^Nt7KWYJlj7/^&ZQWs6huAm61#6ed7!?%YBmVCB)B+2N[h1.>`DiWmod91mF8c*FX``no)9g(Y[`3N^J4NWT0a)bQhb7ucc!
%D$7K55^I9=T]n*[#39R-@dJ>TWNFXuiB-'^DI5FW@Ba87b=]Tk"AHtd^VejE<uYCHqD]fpq;kC:N^*Q2l6&=2T:*96*e.di[@ST(
%8W'qurXH".M5PjqM0^AYI4hm#1Z-58'B,J:RVNpQ>bh&]\YIJQK0QD\:]9!s4*e#a5B!*?mDElA*tN8,T=+Q'onGF'pBPg<q3G:s
%S3/fX]0pFQg:i!abF07MoMOd`<#&h>%\JJ8+\bX7T[;CK&1M](1D),.GU9@`*DgA>6HU=ZN#T9\ikL6]ZRg8a:Y%e=e5-0X5EOWH
%VQ!`[.eolM^q<aI+q60-=>=$h/?0dA\83%+6eSn!e3%R_6i@I.W!mL^A/gao$3mn5;3X+L4XS@\c+ljW.VhFN23ic%V%lAJI$bI!
%VRb6f)I3egl7jD*Mh..\UT%jBYU.=(1k(1H6=X2V;]?]i`,oISD(;s0!<ao#FpN14B&o8WNejAIZZM[AFB2k^WnDJ7Cc!nh`s3Y8
%!uisp#+KM/XH=E/dE":S+AFT642jkWpe+bf5-VeE?R%GD^.oW9K?\;)]ih]YLh0I4gHqClD\p4uX#?hX1+XFs-q@pN#@Ko/bCk.=
%(afH=9qagVB&)(")4I\SCG=+k.Pu$1-20ijLgVl.FCoAaAO(^$'S*tf#nWR-M_Doj%=jUb*SM22WX;<^@)I3u;_%r"0j>A0G[Lts
%HduMpO[NCR1gjg/%[/r^ANLU)is9?$J<!Au<E)HO#bVE&Fr3ns*%j*Jb$'akU=q_4K,RZFh53:;XCPL8'b1IZ8<*\mL.6`oB'>ri
%%T^7N\ghY06H[qEAsB/XgE72g\7h0SN$_8"fYf1GUbcHC33l/ScuBJoD#oBKm0D&D$r$DtQ'F3m88A.LJj@+?1VAAU*"8mLd]@(@
%0&UGCChl(KJC'VBpjd`$'LU?lb/,@F(6eX$U/7KmX?oeK.QEm]93B#P"=W1^*skW86<?jU1*R!'5SW_a!X]cZ%WtK2UU"Ab%V[AL
%_[mq7TmS(-mDs+mkV=AcfE_u'$>Nun,4:Cf.8P`$BSe0WJ0n:EJ/5K$8ttXW:-jbOQ,.u)<5Yc\TKiS*4YOs\M9E+n(gR-:;(8PW
%5XC(#?=)2m)PH..L/A-XY)W@Y^H.!=;\fjADrB?3)siRc].R"d"s*e\a0q@FlSo=%L?:%@@8+bg"7dUNF>N^7iYGT)Q+!]nCr/=X
%@%d'QL-@:B.Gu5:1D@9T62ahDB>Cp&5X&hd:1Tm0X>_c9Bb]*\Wm1XV)`HM@P7N>NrY0NK)oRXm*%uo0QotR=VGPJM_/f>;PY79,
%[Be`Y1K1i3d46DfG)'-GMmhT9;C^r-[E0c6gqi>Fg+ATs\3R>?4f#0b'\FoOdE5.:ErSfEN!rL%W%R-9_q`raY(ELK1iEl/%hr;.
%#S9BS](nH2Tj6E.-.*DKR&C6NE$*h3!_o&n:-$)(1I$sE7b\Yc5)c3#UDlNiH.>TQ%Ogj,AI__ZC8@G#6'8Eh9d6Z9<I*k$NJuOA
%n*O5s?e#H]<C7@/977+K,+rH`Jf_5(^'mt,P"0NJTF/b[(&/iL!p1.%FeqA=MhsMn"lp7dk=/I5kCZ_Z2.1jJQuj7s!!/*?0D_-0
%2a#?(5k488G_Zb$mHGP=?hT1Ds5dT]@g<Pa\(Vmr?d*1"cmGdZK,PsT,5*du@@93H.`VaTK8IaUB%rt>i=Dr@H@S[G!tug^cQ$G6
%:"jP[)>H*E?uTCNAfW>i%C7@.@*flM?+/EEot[6Q;Y:8LQ8L]B%N"[iJs_E&\Heqb+&iG=d='pr`YAJ&-d[OGj'l^tTJJfo+\(3$
%?p9OClD(+$<5_Kk2l/Or0S9o`c00=Ag)lt2c*#;p]7IG+)Qls!7`UePR"?erKD$#;Xkgc<<0atOl;]_W=E4TA+RE)<gD/[Tc'@]$
%Ku>l\]@fd`'Qk<p!gCX/]h5:lCmMYT5hT%gPj!jM"=BJlYS+#sfYTq'*-/2Z;6qgX"+hLQ@_t!jYQUhGHRktKgr-"XL]>P>e+r]G
%b!\Xb:/$BhQl?ElA^.Y#;59#e]mHO`2'og-78jq\JlGJ';%8Oo1[C5!TXeKA?p62P21i@!G)^,!)E/Uh+f]>%B[e'T1ka>PVbo1:
%\R],^#59;kNi>T$dA+Rb5B:\>!>qgT"0)hSE"`Kn2^8c$0nI%YAt)'k1lW>#94/rB.ulUJW_QWFXdkQ[7olp[aCV5hV/0$Z9ZEu8
%.?dE95+63Lfi,bs<P^I`hI-]<'E2;"6D]eZ<="/);0<q[c</'Pp5?B&^).Ji<*?#3b?;CJXR/VX@;T)BX/Z;)6*+DAAR>k&0<3BH
%Q+^&"Mu%3`2RZkCOLd[p0L:>u)?lE`5pbXUG9Tmu0DQP(;:RJTlF_f"37NJ;Wc8`%Lc'%@_A@Op>_TU_Ks1!9K/;F:I/D-<\(!O9
%CP'0_YB:WeYfZE72X0A#O]&4CDIeZQCj6`*ceM,DQ7GlG8X%7nKd$tJE+bQ`<"-e]hE9O9c@'d7ZclRFXJcJM_NV=1DAjASaNT:<
%_?,OP`J-qo=Y3kolG[r](o492@^L5OAZ_C>,t&Jr&Xr2tlOH)=_B)C"cqEpD;ceOP1:J[,DXRi3#Xu/S!?#m0dN&EGVc'.<Ks&ZM
%K;WH0T,.Zp]92JKNYjS=?k3]+L+`5g)q:N*GW@6H#*$Y:j;\5ER#0L_GmL9"CM]L*.RahZ7b0,B0l0s[*EkGP%n\P,FlB4)@KpWk
%L9nkEb&n:-W5hStPRa]*:Fu2,)k%q?o8ZQ=-dF)/:GV?HB^C\u<#k>+/Xlk)kF=]g/P&a)%S^FHJ-(OeG[OOL,u`M>&Te1B(F"I6
%:XFgK-0Vs%(^I4H<,SG!jaa72`m=a`&t0&P:t6kTlql80e.S4jAsg;88R"kcQo9),2.PA4`Ii2CYe=_'nVf@CeH9A<Y>n/Z#@u/:
%=F&XW+BP6]:S2$?&Ch2t/Y0%JFMEpKF'sJSWCBs0r`12f"l>s+^tu"6#A/[c(<uQqf%D!`A3ruI"IQ(;<6]LjW)A'H64"PAc$3Pu
%e;H:ARA3'uj\A3T^&kfPIqs_4TFm.R59mss8hg[)5:^4DQ.ddJ_SJLeDA&]hRjJA$gae/R26,U?Zi)AS4QHR=X#Nqb3!P[#)B^bD
%`$X)HR?s0rR@3\lG/l>AFQNQG4-;&)MP=lM0HZXcoGubMWm3oT[q<4mfmmP2:8gfih'<%meRdl0YUiKbTnXNqS=F!m>YNZg(.:Ne
%YF>Y!Z$7W+jFKU_k%J=;9b7;XS,$:]J.)O_b]BupXA7ooE3*'sUj3d[-J65e?e-0_YLSLj^^1d*o'*ChM?/QB,Q(n=Q9gAZNa_.d
%%m7c`*GY3X6c#gN4QmMK/<+>\_[M3Y_"QLf)s0H;RUbm/-JK<@U<pA=K=Da8lW*mM/3#`#eL;+%ng]BPYqHgI@SV='+g;d8q6>SK
%30n3HR0CVk$4UKDWJotjZ`9`\J\dE::b9RtjW.((4M-M?W!1A.0Jo2O'q.*+I^l&un0TPZcA-ZA:<_41-J0;3B[4XmK$c\7Q7mS4
%V>P3rR+S6I=m_>VGp!]q4PPN"8S\h$&?ljrA(;\K&3^@Ea:cQi+dYc_/8L+q't#g/HMA)rO/]?NIM37:16l82L)iMeYc6Ql3E<j5
%*j?4pehBj[o"$S1U\Ga8Nlb+?"G_f`N\UlQ6,j5YkeCY,=r<6LP@CgFF^d8S+;$m^D-0bgb"aE@Al3/XO<uZ[1I]*p/AeH4gI'N'
%`/"S!;g_m`)%16mQ+uDr'B?B@gn!r:Kg0jf*O\;R+RDhh3>2PmH`horZRZHD.$j8dEi;#GFAs#'=bdMQk,_IHE@>ABaK%cc?8KB4
%d(ik*$4I]$?V)Pdeu7m+1,+W&XptZCAOSOn'./aZ7LoXp4K(C?7Y1N8P'bsuBc4feae%#H!9kp$0V1c!Q7l7gl"kNi<SL$/?$\FM
%J/8hf!FoT^5fc[_CkP\('VeEi7UrB!qjLLJCSV0eO)3Q!cK+9Fk^cu^(j)o)W*S68^8E[_?Z[9T_Ja8ec#C5sIifYWcqP39fNUOf
%q,1X!g)nY\-bojGQj5ulA,YjAqIP.=SPDo?j&;/@"=na7L=RRi4;CZ(Ot'0@H`@e9.!7ljQ0!uDjT@r,dr?"1KM@+O?F=4i7!\;i
%3NsF00F_M`MM:.B%]"/"MtU\SCgT;o:#-%8c@)]9k?UY.02?Md>=YgQ7=RLM2OB\"6hb<+H6;?ipYg,K?n,Bqk$Y/"LGBgF(aTig
%WrS:+19oAs:`q6KB]'[1IXjY["</#u)YfBX(\R9f6+7'eeposl#mK`$m[tFK[UAh+V1d()e_lJjJ,!.-3>nmM%:Z7em+INaXA?*t
%`&JK7-NEuBbY>8Vn(!"S:%gW;1gb+[!FB;pT6idX"S`EoLW"foBsD)OX<*5u1$Zj'*m0A](?,9hIp*fNX4G3j$f\4m:go9]Pnki&
%-@ADTSPsVJ9Dl9VGRH:C15lt&6PTFdO]Vn:*$I\[Cs?ZmX()fA_r9'$Yr_LrSjfL&d;rqYG</BG!KDj<LA)Yf1dN/HklkqfPfpi/
%l"C"G_,M>[(G+H.$)<\t!=GY^.XG3Z/_)>1']Y#`odf^S$`Sfo7Qu%5X]n4?5Go%S=]oF6?Ab0m>)Wnb$6QiF9_+bUG?iiKF\#fP
%Al+'5-BVW2D&lO#E'.L/17!CI:o.-r`\@<*V`!:A^#>L2Y>B/'K[+mF_;0O^boTQA/-od`]r=(1g<o1K;-rp$np:%M82RS1mfJ-'
%Zeh$l5qkuZ,'le.Doc@j>BpL$%Fi2'qN#\[Nc<1me0mA(`4MW%$W=GPT90gnBa%g!)r#0`0rR)T0m/L7b0,FHQ:3hna_Y19B?heH
%[.Pi9i0\anADoQ2,%c:^PCq)6Fr((+?YcWUF.hO",n\ArNpjX&SVicrQ,%K`M/S1`?HV0'.cqF%Q#^C)e.aT-C*mGZ4iStpW^)]a
%WFEF^)\R3rjOK[USiH%Yk4>3;AVY>7AT+A%0XIb.Q,b!QGOuG_SiA;S)M>J]\I;%s[\X)0J?UMW6p0iX)'*nl$+I"D9^\-la67Is
%fFY#9PfhF\"!q!qe6kC9biOJJ]AmTa;7rA!aT/WC9Rgq0jVFk]4@XiUduVW`NX@uI*U*))?_9s0GL_Gf->L>eaL`.=<g*(_B,_^`
%,j?OFhn6'&F#XQ:=Yp2s(c,5iI,j1q6BAu.Y!+dL?'f2\WM?)"Nd,lZU/^7=)#@*nVcS7'D3eM!4T'J.bNNI1ppaOmP--X5cBW.-
%"RPD_Y*>9Mni>pI:e6S$,mtmTE-4es,Z.e9Kk;n:mWI`_XR<)gW&]A?0QKj05b1<@D+qp%?*e;V]U\a$Sl<pt2E5?L@!+oBNYX#r
%>*bl)Tm4SIT1)k'Go6)BAom;B'b^<Q#.&jso.^eZ-#/\/(j!R]#4f;RBk/ujbakmZl@YM0]IRE,)03=B9WkI%=IL!sc_od%]Uq'?
%V6e:L%@rI@8J6-1"3/b8=smp':i7bSTHI[6:?M*X"dfMVg%T]U.-)%!S/@Wg8&\k2Rn";lB80O`)/-C26olCrV+<#B\PQISRA<8H
%=3hr>q5")9e1I*Aj4m+:f6Ci=cAGuS^Hi@[X"gj7fi)cL2`9\h8?BnH"Db#/ZNGbf9a_q]Di>f7,VCHSMVoMcI#82hY+tObe5iP-
%T:#7V4W?p,VUG"@o[b!U*/URdY+I4BYa@2VX>gkcOYd>7"*M'MeJKPk=7SW(WsStl"0!^_._](K,!fR\Oap]_2C#1T7qRX`;=^GL
%>4sXNR9R@4Z%qML&V\8)Sj^Vm(\8@@,uS`@=m"]0$L11R)r:.,'&%2Cb$=7kfr/E%Q%.ClB?[SE(A=+5T/DUgR;4UHGs#\F*4e]>
%-`:USS1HS5WA%07q+WBPVOf$?rVWP!IgoltN]rn?T.P&8+_%(%?i+bQrrQeTZ_lo0??"\"Hp,Sb"UE.9.=uuIJf?@7a^"((`69!Y
%RFOa;c2@[LEWa-S8Bd3rU*!7[nE!MM_k8643Z)J%=gf;S;Sn-RbVhK.E??/B0>Qf@2LEEYaWV1^g@:$R.^ZBQf^Xj/_VBC0aS[+'
%)3@U)=HEP\EZ`L@Va@Oq>7n>>OmCR)?)odTjM%^)k5.bbCa,$K/YS%TB#3!4rINQYjteWuoqNTR_OtO`g;8LU^0D.-U)::M!tlg#
%LaXKWc@e4<.9Kfm:WP9H#W;7n+%.@5.7K!`^ahQm\j<VA1DnH>^&Sb2/T`D;9^>MI00b]dXqJu4\+@/[b=a$oZPO$JhUL<cK4oDg
%@ek68'b%BEcBEg#O.mm^pS]p*;:]+d_E:QR;[&d\AL*P/8dgDH1IATH3Z3e:Of9sSq0!e`q,;)Bq0]>%iQ`bC\hQkA'?2(RXs1nd
%b8rdTT2K/ukVQmF[k#uIbG`d'e<j^2.lMhnf5-\kRLP0t<Uo0,:iC$;X,OfaQkPbIAL4YML#N_!rUG-3Ge='PYLk8aSL*k'$0n#X
%/J++9/DK1'%2tH>X2_>^.J,cn>-Hd^)7i^5%$4`<]<b'^'6D'5cGQ='J1=)Z1cuO?GOE':O?7UQd<p>Y4YDlH\q%sk*G+/hiC)cQ
%_*Z,I$2RpE*PLc(%e_)^b.HNmi0,H2Kg.#2#Xu(AkXZe2>_^p5Cgs_2lt%(^;6fc-GU_r3D),(<+r?0r>qKD4T"GJ5N416oU%P%;
%B75JNDCKUh&9C/#_(GHiC]*]Bb=9*A+@-D(LFEE*_cL/t]K?&Y)4SCJ,LcCQO9//!,K-oF$@Pa,9\0cndA/!K1e6ic!Et7;`OnA(
%MGMITl.i;7q149"c%:[L)]I-ud?'(Hh=S"-q"e\+UHh0H3jJiET0PaJJ"O_h^#P0$QPo^X,__1O1kZ\pn2I^"Mj?2+fS3ZZ1YNE/
%'(NZ6;QHFtA0=L#@c%[5iM:&DFsY[!$'UZWp,c#e0nmmlDb7oNd`=7ZX<glZ_<.@:^)_$0M]pH/6Ddu;r='Wr0Pq=\:(cJ+.@:q)
%E;9:q1(MPpKM:6_8)V+DM)uLh?[n>9(2_,8PAMhsNhpgVV[j4Ce@hq\in5T,1-h*K:"pn+kr@]MT5g>`;kUCD+*0CLp!'/Thd]R'
%6]J`9)(3T]p`LFlii:I^I2glZ,V;Ghd<%*4]G/?=C&6dZNfT%/:,+<V:t`RWobG2n9X^a]7L[k0:+6iR;H6Z"<0h-YD&Uuic6#]b
%kQL`Y;)C\[[TN\Bh+.J#L8Zf/'N;pXDSk^nW0kqp:<')N9H8pQ=Z+#[G\r/Ls!BA`Yc$FsM%^4d5+otPT-LW5S4"Zd/=eRS]]r1X
%!i'WN[i8"*ia`198Y?BNK5dDm:sA_fYrJ(LFM:C)+fAe<OQF>6kV3lJ>e\4D$l`Be5rlRu0b&`,l)_103PnPN:.R*>'N*-b<'FBD
%a6CMlH=G%;`VGkmnIIhtf>sJ_$>3/)K#K,eKAo3)QmVfc57!tB&?<)DIIhIDo[u:$6c+BCniEg"U_UGAc6"+&&#AfB6TLuV3n\k-
%b8?`h]H784&\W0u,P%#oV%)d$PW$%W2K..,/>VhAi=+9?cXQ4VhX7/@g5%<5PU<:hQ&`l)TElLtg7\2_+,B'[Dde_n_se,!lit`H
%K`F"*b4&L,4RLR-_q4^rDAl7!b1\^mdjdbEfICg*-=mp=$/`L-&='S`MAQ4jn\glX.R^3@p82p,@IcG>bf+?ckED]^b#!\JL#bhD
%0NS]GW\UQR8#Cpf[ZL0^fMT@SF`?\h9)4G2I/Y\brce,W>N1?!NhhbO14-0eZq=q0,Q*eo+R8%X4A"d:p=uEN>e6(rs'2Kl*S+il
%d)deq:*_M?e^//k$MBNm?E^4<VY05`:#1@p2bg<+RuUDrnh=dAF2D[#(L=WdPGsbB'K9a>gW'$O?=/'"V($n.XK4ruHXInl>RZuk
%j@Z*=k6o^8G7;/(_\5>B-<t.W_QD('Q$X:SO]dgR1Wp)am:qYX"@4q%nq[$'O?2LC94r31.J9p%S,d?PV_BM5^nkL6V8(d_GE)T>
%n_\Kc]?,O]kjJ`Uh5%gO66W&;F8I.ne)+1)]>G6?^[hZOn&'Rj0^&.R1Cl#GV$b1^DZu2&XR!L$kEY4:j'_)"KhI!h4e5*;JhI=<
%1D`;X\Pb/;%p@:%Ud?M[.[N?n-t8C2+FC>n0:o(T_/T/38d/97aIU=3m*4Ll4)u5:IW&8WMrA?^a5=K=\VWm:&-$Q!J,GJ(;0t9=
%iWjL\;?i*LJTro0DgM!3V6/f;T5+ga5IQ(*5VD;%rGnCWFXE<t_UmW6@POJn.i%k)SNX[^I!gC3R"b6^V1@[(PBiKXc-@Q6q7$-#
%;7oua[6@e34Vc9Z[e]L\pkta'q/i8\_@&6DiR%r``\8hEIV[Ecal]mHLl"^d&guIFR$lnZ[k-mU`_tJ8+KC)3)\C&G><G.Td<L'<
%d^-=Y1RncJmS&9PIgY*%%7Bn"=*9'QHD4@ejt_"ce_>q=D*8V[E*+qZ.F0"TDCrfsfE5\+CpC5*39U0J"P*=66"Me;d_beD(7)rJ
%]F>54<2/2oYpNCf*^=SG2:f*B<qkrI1LI@<@h*gY)N<fP-sBp?d9)8=1Mii6?r!V_`Q9e9@KdQk:o\?P<(tlk?rW>-C18q,3u91Z
%R.PWb8?DoPjbf`#%(YE;KD?2uNa[ZC=f2G!5@Jm`\PbWMp0m=9Jr>*i;@AIN`\#2oTFK-[%qgWAXA(JE6Q#+Z'h4"8"RFmE-kcPT
%8elC2GgVer0Tq/:]1#IF@\.o?3mrJd0I\$"(Q*)ooahR853,&L&e.:4e,5%RR%_&g;49agW";T#`XeAm;c'-9o;5r0#+IcW'[IT9
%Eq+NT9'"F2]VGQ,I$R`=0bX.A'o)r`S4.'l;C\oLjUb-q5]><CoA%hR<*cnIQW/Bh2D",\TOCuSJ.bt?iOiB6o;>jXY^<5qm7sO9
%nZc%S<BOJL=&;rm[QqGV9h@_BlpN9diC_"u.DBo8%u%hV7ZqDH#[^_lA"1o.Hcf&=>.s[W\OZH68t"I;2k+qE[-Ff1.KQ?fagYI"
%XCX=dd4\+`Q!_i/2%`G>'i-1"823K7Ym*IIWEf7J!7Vm5[J4hhVc'eXJ74o<@V>B%Q$Q*BS01tD@LG0D<.#&B0;LA"FPJ*NV</Ck
%mNPdP(\7Vg.%SMl;@us7Eg`tn/<I):4tr%@",6>W!8XPX%!e2ZiY\Ik0(@#`,grY@^_lIV1I-$q`91=5`Y9f:gaP0HKhg2cosG3#
%[h1QI8%]+l0pG>LdmsKCnpNAe.*BCLh>H+1InB[lLXC:_?7QrYo$2Cmba/;OaYGFqB!@nEg#>6NQE8'pHiU0DJ70>^991g6^AIN4
%WSY9,pL1un/VpT$N&J_b`toe"2FllVPCY`<Uhn>G@O[QgX->-1MQYTo(`-Cn:\luE.KO\Dfms'%->!(:+SlsA*:abrTB\sE\mKcD
%:ibk0OIKV8d*d8cE-sd;A.kh7."iP%,4-EmWAgW'4JI8aPn^2V:\uK?Zb5`LS&HF`:IJdIK5?m7eCpb+=Qg'd`>d-*E2^-Ej0:Kb
%6;GiiT"<W323j5"+(13#Q'Y(Rj7-$9m71(eVHnI-T5e@$2,!a^"[)8m:M!J?M1Go.dQ-(fjjL'>!De90X7_T'6`"rL5:4qCLD1rd
%$,a28D@bmLK"+f=R*N`-:qIcXq.)%CT8;#dSs<`U8Dej0nl"O@'Q:$\\PmW)-\\GGM6jT]KpG;@$h^A2aqR.NiAtdTZCK"+L_Y0I
%Gb$T4-er6b1'umr[8A!k(1YI.?qIAa.?qjIW3g;&'k1lQX/_%M+Z>Y']ihg8et%VD@\uVg"=ETYeh<4f:p.`p'b$q?>[]CZFRuZU
%!iqVR2'M9$auQ-0e=!:<\RnaA$n<U3&VgFuWC3mTJO5p$P6/j(F4tJ6"80-B5+.04.:j0d=99olcXs^OlcHes;N2N5,3*/R?2Zdf
%#F!To_**R"<D0p,C[]>18Pr4<l;kBoZE2rmYs)dHgf-HrSk>=eA7]=;96!nc3pjNQW&rRO,aCLdBWbut%VuiG0Q,d=N-[YLdWj.Q
%efpcc<:j?BVRaVg>*>?c2:YfgQ1n<59V&..c>,H[)QB]Hkf2k]MAk$S+UFK8##hS!h=4sQJjmu6l3Lm'1@IfE3[0uXV4OiEOu5`B
%`ANm^A]6-2'W#Bh/Yj`!51?PN'k0J7Y[bo+<GRR^5p'7]lC!lp(WK@l9Y:j*_P4D2^=&9E<P/EX`qs4Y_A;F''P>-_3hB_+Y\M]$
%R^kO1NU[2Y5f%GJZ2>AAk@c_ZNMVHUiGYcOQ2rtBj&:rF&QZ&p#!o&iRml?ih#=#,Rqs3c4i>d#74[Ag6ACEeae=TdYbb49%r.$Q
%;4iFlP0/g:O\V\h>2*H92cqIh2KUC,`Uee0HX>nrYb)ghLd5Cb,XRa?#8H,dV>Z@iXB@Y!c^uTgYmsX`@r74KqQLH,\r=>Qcip?<
%j10!>X\1LTbS3X<<d0Bd]5ifqeVP\`eWcAI3I;p8A$qI5@2KSk>*KO$Hr5%/;,:-n[3r2*X"'$,4(;6<Tan7Ql3DGl,r0+/e7!&t
%_-D!.p&"K>k7l`#]tCl9\W*&#R`[M4s2,RMkS5OfQ<!]cWk&`[=R+FZ6mAu<B<XiQlKB[!XDf@@"YqB.B3)9>*\<V)_8VH+oh2%A
%(,*8LVLDli$j/S>'/.t+K<"N69@WPn<qW@jTSY'+%h+J==8Lcq\jY1i/s$!RR*P^+'_SK(KXk$(O7AP4OZR&$.ugkaGIOMc2:5U&
%nm9hLHH1']Z83G"mIrTaSZ-G2p3P>j`LQm8#.c0(5<I\@#4%QcZQ?tcZR3D/lKuo/F,A"d0&q9VH;de6_:GYk5$kqj>[\#Ja/b!\
%]B\*NQ!_0V:!Gd+Q$`,Ae%,C5U&#D0B"^P@"D5o9L[.RoRaq7^,'Q*!3,dG2`M.h?Wc$d'`K3TU4qDLUr=J.k<F$bB*T&]R?2#VB
%;<=gu9OYGDD^(emM\h0.BAIt6C+<A11=LKLP:[8CVZO$JE%[]]k:boH=d?\R"Rp4E@+]dakFqlQ?EJdH&4@L:`=QNmD\<89)Gc(Z
%3&DI9-[VI6R\@3d#%o,Ngf8<M6pBsYY#]UDH3+3IK.6g\F'WsK"9gj%iQ4u0//qB]lY+@Kf)k%UFiTN<gs[DM=>cDI)@`MhN7f3`
%-bPG7::3j^F\sKb_IA8M_B[&^kfq_!RIJ(Ko;Tr_1i"!G4lf@k/4jHZcYnfoAeQ<\p=;*T4bc9CEeqS@&OKF>+tk%p;7D*Vat*+X
%@G9>SHPIrW\?9fk.!hg0/;F'<H<C15)/46&'<QK2(G4+'Y(UQ7MUp==7S7/=^VTn4K^u9clZC2sDJ=2%a7JiF\b?nkP5VIW8oAUS
%bVA)i4"Rq9Pa06eM+QX5P+P%O=YD=_-Coa;@a\P,AlN^kOqP-5d0n<;iFL&q:?>+;7K2);Va.M3#d5=^E_4-jn`O_kK\$j5Zn"13
%_CPn;cBX!@-,D]i84mGI<>C`OYDOcDi_rbuEi<`'4T^?t!&W05^nLl4QVNi)m*-"O2s1tC@OqZd!rZ["l=h#9C&u@6'o+N_TsXsS
%/@=MYp]4PjQ8erNE+0-":#o5O^6A?=om#Be*VED^KGE;f`HX9Yc\HQEer\IBAKkrUWMu!97UG6h-.B2O4%o&H<BU/4<;'8gIB$`k
%0rl0`&XpYQaJ$%4^gKUoVWcRNZ9SmRTV[;5.t7\%?]&(&]d*K&pY*W;MjWOkqX"O>hn+12d2a17`K+lhVl`29h7"oL-Sr;U,(&Xi
%_-=BjD<Od7@tf'f?$GWnf5BDef:JQ1p"Qqch7$hmY'_[,%IR9kgRX5)YEe4d^k2_&>NBq=[bW%[ZtoQ)WPYgTSSCqjkH8Yh\)qt0
%IDrpW4'jqSCV0"poVZm!9FSjK)^[&G)G8iqrE05TSQi4Cp(R`Q4Pm!ne%MS:P8c"DmN!`ODVf*1/I`3Rl]1[%R,2ehUN9gJhD\KC
%Z#u?gDB4#a^%_1@mZ/Bfh5S0Ef&OBNT,9])CMP)ZffZL8ZS^=B-TU&28LA6B!?e%dn`*-A^@\T#Z*!&7PK!MmlQ.?+*=o4n816hQ
%GAY*C_bo+tTNp]qJ)^GIr3ZIa^u"//>Pl#!#M8X/FK`qe*0/"%V2e=6_62%>h$N,,?2]u`kdKllS_EqbI0[2CY^'=ML:[Zeh9r0k
%8+s'Ugm1LL:Z,$Jc+D#O->Ce`g_70fWHA_f:#3Q;fpD->I7&4-o6mIlToo!q&c[j&[dBR63]bN&7um''bpG>Z2Xf;5XiJT>[j,He
%4b[mX2kSa'Cd,+(&M5=6p(`V'Zrn>Ql_l-X\#0"Kiip/>rZ,;#>MQs"24U?b\9k/q=9%.ECQIo?iYVi$QbE-^CNit\\be=(5E`['
%X#d]_3,*PK[M&kPe(h;GQfMKEW;4RecI:(Q'-#267[ci16aC`[mJ_[.q"FB5[r/o-O1gqU`VlSI;P^'d*kQ9m:4D'4F)X5<C&?<6
%GLGLL>BW4r)cQ3+pB'S#P;hsc\U05qE;RQ`f$;Y`gUAIN4E*Mf8V&S&GHfD$]g/:*I0+u[*WescN^HpieHsI0[;TH;9JnN2gNE*/
%o>*i`iIM4m%ug8<H#e*k^0t<>3Hj")S`.k(GP*$Dmi*;b+%`$YFn5=YM2okFrU.RuF>E;>PP=Db<Akr`duX/C*#s4mk\,J26*k4Y
%Gc#OkY^_%a!Y`*8!R!Ngb^'0?,:>9kp)Y4-FQ_St`oN\4DYQUug5"33:;pPMq=Z''PB9&O9fp()gYq./]nc=s99hjpRp"W2n:`if
%^Qg`8!^#RQJ$GbipH?XD$2&l:[h$Q4p[-B7$#G*!qqqHNa/1GXqf:!\j_GU$=oZA0KqOZOqdcBgAdQK:T;R*)H[83V.>20=Dk>\V
%^gPdhSZuLHqr)nONU-#i:([e-LKQ\!\:\)QV`+6ERYOZ)hE2JkoI(G4^th$30I+2n8>3Y';e=Otd+3+$T:"D>YdPkG3n.s$UE2!o
%KAeo?7fmPb[V'gYQ8,;kOuOiE48V*#HOoR@D?^TWdi%?g])+lQCe73*\NP.@j036@X0T<,hd8r8`SnXm_s)#j"3*Lhk+-i?kjI[,
%`W`,0N">=t0W[IEqqr('E+6s18%+LS#C7GYg=5k)0q%9!B92,"%o46N#>O!a=Yqh?HV<dW3k2oY]!qF\puDXXG'n_1\TqkOEm'FE
%iu@D?!ScBTjat+**4pj0E9Q6*j+k%Y>n];[+bP8P?qd1Tg3Zn6U(Alb^A$"5Hdh\@l0dqlUNqFkHL\_qm?>$RS`lM7%;t'uFSU1Y
%4bkr7rGr%G`5eqoH*"BbCWFPlStmh4(+?_oBMAJQmRY=kp&genm4F8!a%i]nqNOnShVOA"p@Yb1S_aTpD82i1#"9Rm!aK,HI@kOU
%Bs`)[FSPlSmlck=B9ks%'lX\X,YI]^2mW(O,g'?643AOgRdTF-R(&\ja)#QIZeu.?TR$)(8&pU95,nUIa\$KO,l9%(%"&I%^5CaE
%PpNkC:APZBjkkZ=WN&#j$f#oTIuW2S?>ORBr'Sm,Q!-91>$'MnBR;>#&"`=CF,AXhYNt]DC'-r15@gn<LD"to3/(_C(%^\)$L>`]
%A\3A$p6aPrEXn3/=+u-QlYJ!I$qIRpj34#8qK,BjiD\b;9a#f:hI8-F2=o/WZ9<=IAc(L*`_Pb'`]n:r`[mY,Tg)K;A(STB.G;%9
%YL&qY8o'1bZ/NfCJ$dd3hct$(pXiF"U$[oi:&*lY(4!I'B0Ng_-V"Q==i2nE(s\V*'(gTn;RaBp36CU+:cO=9"9VE.S&SX6am\_+
%<V&?eSrSs'>D=g)HU>AR:EB2KL8`o[i;j#r4eN7lGM9U8_qu[-*E2g7>5MbPXL1abLXAr?*03?VqeW?Z=ralGm$QSt%aaW6g]Ip<
%lkM1f&5j[EBs/LD0rjlIHFCV`\Isp2X.rmNcB\3dM=]jX(@9]7EQT?&o\bgt?@9lJV@pn@n@d"ibBuqBd21BR3e2,["^s1U-NaOV
%Dkq*:ZA`Am1S<A'GW).u#7Hdi1hT(=?>(Q[/LVN50,Wl]RC!L.EUX\jdpR#[aS5&`@uaE<pX2A]oCZN#_uUico1)>1-=8B4]/B&!
%l/Z[D]3\b1S:I=++un\l?MXk+jgA+'hefRHj6?I\f6LWK9N<UP#\mlBUNjLl$@6aSrV!BrHi!MA@[_s;S!3^S_:GD\lcYgq=d(\e
%l>&#[M)I,(h?R8aCKnbs)8"ZjnSC4b=5F-OQdqXt^TqoHjh":Pke"E54ML2HJkc*\]19u)i>,(fluAYACUs?=O8!p.5'=:0E,kSU
%Hchoob">^U3QuZ\r:jI_Uh`\[C4!6Lcu$!(.e'hMSDUSPV[ga!P&!A`XT,p9FE&J?B=XYOma6.`kEQ-J8\Xi'$rOKmAfFKT)&dgA
%"P[m3bE/%`B\Hojf>I!2D'3Scq/4[6K@@boTq5S,QcY,(_hiLOB\&kg0f-^"d&[=4H$4o_Gqi!&[Wu07.4`TgIHhD\jMTtW\pTmT
%l'"!AD)Z^i#>S#mOO/GDfu[=)ri$DLYSk50I5f]_cdk[%s)b.F(1.=1dlWg;RuY=+6W)8I!Aqrkb3P>5q!q)lmJVsgI^S^.kQ#lK
%Y+\YU$%?I_NSjP%?-fTEHgYcU!B/rk1(@DJr[_[>gXe[hl"G-F.%\G$ZBER3E0BVhYWeip4r<2KA"iI:*FF@Hhqa^4J!5ZLk'+ri
%],@gS%pgN>4BBfY]7mY?#jZL-ObN5Yb4F44LYK3'E`"7HqHE8IDtTmVDf=&3p"K/u?:a9e)T/ZC@c9pXT3C"2_h0/S8g2aag+t':
%BI?E-HcKd_l8UI=SGs63./ti<lGqiQJ8>,G59%<7:,hIZN5Kn\SoeKV+m[*bp[(p`ffe5gmUM)RDKGIlNU$rGU0"cCIII3%Rq6;1
%k4n:W0CIPHX4rfO\s'HAS4J/*pC]TjAW,C?m:JMPb"BINcG@$75(CTc+J"PXot+3$%G*'Ff,ekFMs#k`V3SO6nN2Okr?("/K0'V>
%O.*WEFuOj<O"^%WL?XDukh"@QPCA.`=&;L^L'.pX?M#WaoIJWZ?U+CpWRij:UgC:DIpOVn]m$=LP.Le?V"Rm[oG:.gE3_I74cG$S
%2Q9d=)jQsiH=_f^id:>tqk.<MC%KYe'A%I%Su^1EHIeB8anb5Fng7G?1`)NibPf@<o#qW^7+D%D][F>tl54JT*Ycbhq'-rl4oVn?
%Ng7:VmA>&NS%+'L^W)1]jkF;s@7dOZ$hJo7.Z4U`KYE-qkI("c\9rMd0Z:6"aE:t5lddSZeF)e1>B24u-ECokX])jfJ%Pm>A6"Bc
%?;e2/%!Q!?P56=U^iPM"!lj`h6?-l6FK)kEomTo,cS"db_t:c\l0'BkD^65[CAEL3mQ)'tdg*KkW,>nKd\q%'d7_9Gf8feVYFR3r
%*nTirQJ0k*J62PF>Kg-WPGt@[g_b@UCZ5FM6D%V5=Zf0GP'^:N<!"e#e>m0/b)932iElK>Q#DRnMuC@IEXOf$26QRUoqSg?P0^Z[
%/B><'X2^U%_`6^-3QQFE]tCGZ0-BV68s_-[X6jNQDeZBKjNQo6cdfG\/"lPQ0:eri]r0D/cV(=0gXHJ=0j"'T=5RfHT1^4bUWf'b
%jn_^3X.N_<p\llW1PA+'4cgr$!&:@!Cap6\b$O1Gdt8Z1n>I;@!k%jm#WB6gkKj]ucF>uep\^AP:Hgk%V^TlOU9)uHRVUI"Q>A(f
%`\u[+`2bQ3pHOr;q*c/4>M:`YDf#CIFB2ct\,Z'lX7*b:psT.6D5XmIrWT<DB\pbQern!i:*1TS/sb"VkIp/=IUB$ZG5=iQGj>=?
%b9nG,Dqh+\\,EZ=<@i9l`HM:8Sif\]F3B+^)s`CFLEQF`WHM0;oqPSf)t0d[LV4_J.#b\p([@:6c"k!jHp*?5RZ$M0<_4rlPl+l<
%V>h:V::;5/CXp./2Sb.ck)1bQS_'V0gV%[<L$]@b67P]V"]DFCHghf\<mOC+Rcj*OXmrt!Q.\tQc/RXBC>7Gh&K#)]LG=rZcn.'<
%InRbeQ]luqmgWiT:sC(FkDCd:s#qFF3W*rmbh'T[7c_`:f>!Kq.Y!$9ErgddZgcPV]oOI5:j40431B_sC@emcX(EI>[C+LniY6@:
%rE'!PhiNW`j_u7DfZ.S!A!KgkQ]i&TW1=X&0.5T#i[X]nAWHm,\CJ4%Dh*Z#.<ZMFqph%*DVbV,Fc`&q.1E=ae132Ai\:2`W9=Ct
%$<DTFinlggA3p3>X#;jK*DeJ4Kt5!kc7XHE\#]2_f(1m?Qe0;'/ukJM>DrRl10u"NVNWH'm]N.6:=T&?g?6X'(tkB#B^os;/i[Rm
%<.j,\F2?6-f?7JM)lHT9,j0=\O\tW);<Sa`k&"B`YrSEi@^)o8n(O!'rD?hCj\n@Y=tZep]62bYp-Ir@oJ4ARE>3s#NooTAilUBL
%8^O*>f9P0N#dX)5.3==DUS0i/=dZH)Xq*pJnm17.c2&S+eNHu\C3ScSZ2+90DJf(P<B>i-;PROqUpr!j(Ras`bA@K#5j$jd5Lna;
%B]Ebu(ia'i@lnFOJGeD?-24^r[8tG?&Bk<_gIuVKcd1KqY5pj=Zla(EgtFOIQ4?(3kY\$Dp!W"j#D5>N(b=W=7mF,)27rn_ChcQa
%")Hc%I%aE'C/=HD%9PCUDS;nfJM$!r.-b12W'uI3lH@f-&fC':'Xo^Ud/\?@A(U9;pqeipi;Dm5l?Otf^RP<9hW=msK_IBJJIH`<
%@H;_uFQE&Qg1NQ[CZ#[BZT:/XTfXCU^WKkW^LmUZUN`]lm#`'jSif]Sd6?56]J$@fO[A@N7BGBp()5\hb%FSbGcd7<7^%l@#J#/$
%4*O`a:VV4VSX$$)=S#D43V(2IQ_?oe5'>Z9E_t1*ImHkR#R+\IQ\Q)Gd.MjteX&S"lXdaol"\')L#m_+Uf%,9r>;p/j*JE6$7L?c
%3-ck;N?pp]=!=o:+G"^O/g/NO!;adjDWs,bKRJ,?IsY"up,&/O.quQl4K=#@+d\D1J2mDnPbjH(BBALPk"J]4/'Z5keakoW=S'M>
%6h.UrZa2Q.`Tk./Xk"S[C;n_m;c#O*_[Io's.4b]?NR\>RJZPZ3r5)e>tk%PU=`+EbZB"s76pd)nul8t(^+:d!sG\dYpa2D0>1Y>
%W]YZ.e*hu<i`PF3Z$DYSXt3=Z0r\qi0#k`(H?-dI7?]6->eT7ePs;GG?MR[HCCYQ]E3PR00@cX/TZY<=dWtLc](pl9l5(I98bUU3
%nkC^6W:.OQddtZf@^<&gb4G5Rai`K-j.LjN1O-fbQt>>\f:ZNZ(WmiMjaX\]VoMFd>4(VF#Jf6@eOrF&Y*$T\l5"[B$*_Xn[[Tc*
%:BB-M*O'n'k^]O0rqC?+ff6MEZe%$u$-F'lL\os;CtR'kH?u&@2BA4hXE2,RRH%']QeKP11:V\tYm,AbqQIV6J*M=;%MIG(H#%JI
%`ZbqWI7/fIjn.KA!&T%kFkc=,"*4&<V0gkM7$DTsS\]6;o-t9$l%cn$Rt(5hi0aCaD'&nh+UHT@bA(>;\Ff5ofm,(iQ55akeGE9G
%p#Pl#a(qdoS0+`oVYX3Y&'/\Th#5.VKmId;XHiL'dc["DoF;/6h"Qb?e)JMC\\%2?e^Ou.L%TN!;l`tA086&O*fI'\qk-)lfo,-=
%r.T4&cOW<AfrZn0"Z@=Rr`2`O=7=QiC+Jb^g"4YD[?l%R6Qj@^d!AEu7gA6/M^#K-Qie)XDI7/%ojWi/-F%-/\(@p<l6EIn,b@u]
%hbVro$m6d8D<LPAJr(QtkP7id,^,g5=ubK?32jPD>VED.Rao#efZkkDX;%;tcBYCMTM"nCG4CIsPb%D<)0f#QX6j96;r#Q%DRZis
%r?ig'hNiK9%k*W#kq('@0F&!#*rrU]AeMa-?GSl&?)G'"d6[4Rrr];!*Io8'Sc,`$X*Ds]F]bIr'/AfLj&@h0IH[fl1j5s_nJ#%'
%5(&iTTWAXTmF`:`\h@m8CgqRM"-s4]cXN,Mltl4:L<OJTpM+=3m'4>@1:uepk$)32?\5Y.\0>)e%]s+1?rbuEl'F]HBRDWQNn,<!
%.bLts!kUNlhVouh\K9'3Q@[5t1E.EOn^QEl*m_R6_k[*qHos2e3K@`r0<4O^[juEOolcLQQfF@\U!sa%+OfE:LU=UA^EJn#;$.:'
%l]RcIP`BB@jdLB6OT"#'`qj0oqn'=CgsLHf>'''n?iR[$c>)2$9<NSDDkGP29C#A9-m.?d.,s5pVNORo_0@OXBXqqFH*O2uIQ@=(
%X4QaAp.f"WlkWZaNnH2SRJ!q5s%Ks$q,lXJQZsEohsRq"_tDTRDf(ThBZ-6NiNt8gO*mY!2'hR-ocO+bpk5j*<VN0<h4*6d6iTFk
%lE._)Fdr+GoQG3bpp+O7HVigPVigHjF=j*Np@q&-J'\;s>+-7.de9?=KKX-i3P1ag(fYrW4tO0Yre!i1$a.dOGC9^eYG5''">t)C
%Dlm:CjLcBbW3VLE3%gJJ#/01<$^[dbTThTU%GSr=Qa\%-(VTm`rqE'&K,uQsHb_/7p-50`:r(&jRX7ibJB9aep)X^5)W'$BQ*e80
%ahXRtcO_S^Y13UZPs>$+qt?[@-F7,&]!k=hZ[b@-:WiF;,5Mf'/)?"IK7XOWrEQa[r7AM<TBn/193o:u]7YGoO\0Lth=qH%eb.XN
%4Xc9KhUTU*ZgOjnTMkZAX5>apHri@9n#[p#$>7D.K7Y/na:@nFeuC8PG@$4)!@s-G]"R!R73"-I"Q%%BH;cE8G@=>@2jWs9[AB2D
%`h;^(dPIhBh"j!?K@[1ilE6CY@_u#;p)>iSkfl'dHKrPfTgHYC#Mi*uhZF*8Ib[pq!J=^T\DriLNs8#mo)O"_GJ3V'anC,f4hfZU
%%30KY]"),?)\b<IFB.;FQ7k(%!7/+i#03p.c^O<qB.a*1E7!''U%(^,`Y(INh;QY.`F?WUTJbMrhj8]q4l:W\q[Elp)2Go;/+q3j
%!.6A<2u=sOs$?DQ?iOD2f@%BchXpDp1apM5iO++TA<nj"#-DrjLOX+=KC0OSh5aGWiY`G"Tj_a:E_6=^[:]>aO))!d+6NG02plGb
%#No/$%[n7ZRf.rjeEu2[%]JrDBt`h<l[AVCd2;LC:O=A3'">J#e"FSK7^D8"1uI]6DD0q@3I=R>`F'7ZI/J:VWPbCJh@M3MrmBDl
%S&njb"5\:%^;%kr^$2_:]>0msc)`hT[p]sf[P5o4:\gO7>5AJVed(%t]C24<A'gLY]R-L"drJo3N<U+V:KoCh'LuSI?+B1AO&lQG
%-oV:e4ah&DU49ZVK*k!$\QaCX50Y@k2?0Rc%<upjZus(Lqg@lE?fSSt1];j%b8^@HmpO!#.I9;_)c)\7X8(2*__`IOLoW3"pj<?a
%k*p;(VC;%_qV>0GDjmupWO;WJIUD+o/lhqT\!H$mrU\`O^si;JcIV\Uci<5>G8!WV1)Y?Kn6ENY.rGc]".nqn*3?)O3T$aghBd)4
%;Bb?.Ynjf^m:C21WNpXBE)kWuGe02'X/qj.4\mYc[E@b*`U8(5i9P"KY+hFkq`imgec2bL`0C;N]d&W[s*Ams9Yg7XIpZZg+9Xh*
%qYSB_F>X#qm@+4>kTS!\omtaiqT?\CiNM_Lc:i]NoCV!.=J*p>ocT'50)"T/q>H?Rf.Yq(g[`+]H/GNVo>BatA:/=EmqK8CIWlW%
%LrI8B&te?BT*sb_rPS1B4rn*!h9t+\07L64omJm2?V.+.C[lat3Wq0^^7Iq5cdNEuf,MK([9q,:q^!SsTf3ef&bk:RpBMGV1fO?H
%1T+dr:[bCUnSIm^4J"^J"hX?nq';QE8e(:A!SX02G])[SFJ4U'Q$p*Grpp/s3A/a)p;hqnm<]_pd>J#C4(KYNXBEFn'].F`;kW*o
%f>-/j\G9`\?<CMua;N"Cif@"@D_-^crjTY0Z1GJMo\l&3c=*R!Fmu'qp"Na0TNo13lj!s!/:a#X/[!s`_n$%hhhDQnLs*Ve2B(D[
%25l>bKk"5e\aZL*Y0B6OT5FYd3!Y_/(S0JI/06eUI9p)\YQokZ<^0CSlsW$W7Vqtp+&m!aK*RsN^N(s,F:V$Q6bo@&9nMd%(c:GD
%i1R$.2n5-g[?<ul3%4&:Va97cg4M0J_S+O&--rab=jT$>Gktuf`89E4dX1j!*=qNB]K*aA*foesGB!;=E,Z1iZc+Nb[l6IG$X[P1
%RIX-lO't9NWhB@#7=[*9CJfIhrs3#FRl12uj;dS,ndhF".:nR1oQ7;?luDj$@a+`.W!j>Fq:YO;5+d)57PK5#nCFQcn=Kl"J@*AA
%AU&Q05$(`J?2dpGQUKDYc1X%RC'hpF^_q!;gMg[`?9VKf1LDl&C\]Sjb1sFq8C0SW$naobR+]3]JneoX;/A8nk^\$?oDnkq6HM)U
%ds+86_3=/0-KUX.7e3kW%tc8$f2++YWkcecMks(K_.]Lj5=\J<gh2&)r(bFRIBeHHL%N^/7Fc;]mJR\WcVAXRm9b%g^_aA5&BBN,
%#d[f2LBB('r\]_N@4bsi/#9MX?t><E,+uOW/FI*Fmij5nfm%'ob_]Ier57N7F"GB;_7a5do$ibNm"N>?('Z<#lY1"H]r\,T?28UQ
%@'/;1H=D_n]5_'8CH&q]eV32t^rFZ_Tp5@NAaOfY+$(M'm90AMb3=!gJmAg;kT)mr*DD`t^nlmGX#mGgQKFKB%+Nl:UHuT3BIIO<
%>j()6e6\CRa'>Z;/^^Trfk<JElP:-Y()4PmC=h"KMo/Y-ke_DDF-'aLlfuc4g1Xak&QgT6nmm8r=\2p(8a/SMBsJ4Bc@g:?a^7L#
%beG[$3gUA#j&,UAdbVfglRr=qKi9"#njD5Z.:=g8[4M3qfIQsU+"\PQH,&`7:!()BplE5pabfD;)\;PPHm!;@hL76GSp:npYP\RJ
%9_]:DQhn/j,.$C,Nu$m6&m$Vuj$W_:C^GK;g(!/O;-?Zsos'/2H@TBAV),]%n[[W@WXc9J98pEr+51+MmE)!o@IfKLE+%Z$$6uIE
%c\KS6HTi$@3Z.=&+_XJ:KD)).@%^,)0\O(SEi@:Pg;-W9?1e*]>,"VG0mT55SsLO6iU_pQU38H;mKf;K.je#^"l]CSMmTn')*lEj
%Br?WOS"krP0uL>/+IZ-@B]n0*CD0:92SO?Nc^"G+31p2i#gVC*B`7bZCGSf#N[9DoCk#`al*\I-TZYl`E&/p2N+"-@B^Y@bp,<XC
%C^`saI-<sWqZ&O/DE.ij`kb_U2aY;`2L78[#bmXkgD"8ZGp^Bb-/Y\\iW;t(imfV.K#oFH2^`(gaIl,$Hpq$1i_CE<->+JrbJikk
%+h:sR+Sh/K[@ODm`u?/:WNjDd]T"Yu7f\*j))`QrV-%3.8PR8+/0&_mgqTs/0`Y>5d^]""i;,?5%X5fV*fC<rC>[SW,5F$"R_7,$
%-f"@Npt6c)&7jPSDIE#]gt*ESPQ0Cs?MO>>"T44<hSEqk.Z"D!4bf5T^lkSqeut9KgopH8"HIkgh]>1i\jq+6[_2ebj<4j$-%MT"
%(d=ESrI)aS+tg$kUL9<`*5Nr5D<pX;-BIUBJM**_h&FH2rIrVlOBA*i^%&MTFa>.mYsa+<,3jcg#8)?*Yn:;'EDcb"U&lYQ/2YLT
%G8q\qOM;!m'BH?:A$U`*\i^F><>nf"3O^KOj&Mr3^lKcc.+?0WX;iid#Wks,A+L"e8WD/O?*O4^EJm-1D2XB"#ZhLK5VG3*))qYg
%Y8kRH.>q8&lAZQ<:D72j"i\>5IE*T)-=Qti4"uO^hG"3AE+`Tq=O[N<Z3VIn4?6s:Z:l?]Mn2kCj3HP4<F$T"dp4r`\M4thnC6t7
%FM?p#CEA@j7eKhkB8%c+_Jh']hhmtEe+2Q,nZh(]8mJ2u$P@ZE(uOP+gkOl,Za"$"eBcVY8A/8TIJ+/B4AJ+NautNnn'd[d69(8B
%Fi=SW+s2;]9hK5k<J5q_a<#mgZ/Z#N)AMfMNq!9%&Jlf,60V31@gPBsgu<!]+-(M*5"?"OO8s.AJ>;d$][?$.)SAjY_*K/A<^r#j
%@;!ILRSj;_K]DlBC&c\DUJ4#NVrMhT5MlY;1He5iSPAI9qRHtBSJA\1s0P&T'+=GKmCP8mPmasl?(jW8*Q?GYNN"I]!PFQg,]uOM
%I#*l*F`A1184O34_fkl2=j$n3*h.l/`aQ/@qgiRlrB#K%K9qs4QN*a_@VQ&M;[tS136]Hm]SbKHZ[oNOrCia`6,!&.K#<`FOo5D]
%:7t5I)bm!N`q9+]ahk.<]5UE\o4diD(Qr,)OY!$Ji^4)hKE/@RfP"t)SOG7T"(e1LZ@L5tp:4.1LH:@B9<n*Y?!=s?m!K`MhURN-
%$`B.skC39@?SA\&aF5=)_f/alEgVXX?""Xck`LRd&tZLBTeX\dm`S#_8@\L(5?V[C8n[^ZSV@`T$c'Dm"3j[Z.#BFk4Vh]^N6ist
%BY#NdL<^tTA->UC\roR/'gd2J)e.]]W&r,2AY;J6&WtAVU8L6`kTB,=S-O$/dHPc0.JXW+_rM*ajUk13+0j<*'MF#Vj?UQRfF'F^
%!]&AZmJXIPQTb\>&VpfGnaaXh\8;p;jfJSirHUhi@pd65hI(!pa@JE+@T!'F6'23B:*M$sdVhK7$W&Q-q*IXC/RS]nW#@WmLbPe^
%[r;Um=VBEW!SIe^9+(D2cZqEAnn:m[BA`\/@qAmqT$q_(IMHt0`,'2P_COi).Y]:OTaS&sA(Ku<20@dlFCE^Oj4)G0=4"LY"XgLI
%$a64]-"^(C(LBDjW>;#6L+^,j/s_l!%:+2^*G'H=TS>)s4fWt"5n.+E"eF*M2ba`eGQjY3O,^<9F!i4<=.&s_C"o=AnA1AT`P8[T
%']eLA=R(o=gg5J(7];YoBRNfkefHB%a9[dnklj$eN0?mU*90+_%aHmM$VnT>2T(qpl%m;dR0ZP%GDn2NZg?3-,7Sg<j]qHNj[[]k
%A>":$3MWJd>kp@r%2L^qiK?4nNfI7!(QI%RZOX9ajn=X75W)"\#c%?kA<>R"CgqH:GLX*hBY:0qWpWp=c,a?QG:)BqN^6UX9^=Dk
%Mr"<K2I7nJA?jlWW??(r$%N>=lT.bhO_+lBAQ:N?\Na\=mM%4V=W86&`2;t-aaGW\,P%pkq2=_V#D^\GF.H`![%[0.,1Eeh9T;*I
%W$RCtD>UnO/DHkZH!P'X*J^%k2p$-YYCL;lLt.7r>hJLH"\-$0JTY@%rGZmgS3"CRUC/:/ff_7q1Q7kb=16R-\28&t^UY@=V%?Y\
%cN=]A$I443[P$#-l&2<Wi5_eVq[Pi+?t#!T^6(9-_8tQ7iAEYXrgjR2fo_?Rb*6m,F%_cI+a-#cC)ICg_4csq(MRiq8Km5>r2SSH
%7&U#Rc_HOqBL'3g7i!csU`Sa/%&oTbEf6S#Ck`U*q09c_In!7YIrd>r&JL(3lj^LK6]A^/1EqKmH[][t0!Gc-f4o;/YP(]r^r0A*
%2rP5U7n/)IT&:6jdBl+V*5Ndf&)%6i_f;ir-uaiR0$Wl1:TW#?U?RfW:%'h*eg%Xn<Q28$U!QsbZdS#`*4NnKWFTa9T`oI#>UR$*
%b&7M3d`5`fM5Y;"9-4Op;a2.b>f4bOQ=D-d'bsL#.#o5MbR1sQ-nqYV/oXFSH=SR>)p9H>1*pp&.2Grm(X40\.Q]d[;-_2,]OuG)
%o%Q<g7n-b5In/.5;*6V'r6G64=_c\'4>KrrDhQg5[69L>`dQG=^U$'$dX4/4G'6CJ1:6R71-X6?qVS(T_Toh5W^r\^5&VF%9Rr`W
%!Ac4S5%8LA^da06ajIC>MM%Tho9I5gke-F6b4>#H/8>eV:\jL=JLH6Wo>9n=`i2<uo?3YH9B7L4B^;YfPN4jsj``(MlGCX50i.($
%:po5[]"7EP)1=;?GG=6cL"Fl.1+oq3G]\NC("WU2;WYjXBW6;3o"q[[D&q>ni>Gcld)AJGml?;P30k(2_C[ro4kq=GO,[KWkFVo@
%l[$(?1/oLM_i;`6SP_Nin`6YuE)\ujmtm]gq14a\_g/S^bjaM+lf!6:*Q?<9>RqY0P^jY$_`@G0RF4%SH8m!N/0M$HSs2eKFImf`
%H8kspcAD050c[9!7`gK,b*&-Np.qS>Y=BoR0b\S<5@KtrOX%NkO2lJdB/Z&>7f?"bA']H=[[[5:i(%jqdnkge2rE;]rD59A$0>S%
%>Bh<OM[6H>J,Ch1Al95KhiCL8d$pSs"]J.r*t'@3g3%aoE`.rV%e-%j)7G$-EZ-Y)'1R`(XB4\"$8Q_s!^o$0=Z<FSi-;j5Bj-(%
%f%?DfLm_,<TfK_'3iruqR.9sUY\g0c*G/,0.T4T*;3?2'2oCMPk\PDWWRlVUW(?%5j]PQY50WJ-_g@5@OUD%\`A\h1@ZaD#)*&XC
%o4d!;eEFq<4&'2a]KRO$O@)se9esR!F]O[Y+Ym:oNnB/$IP%<//s).=ZkO#CZ&onemR9lI$5^'&ZRpm#b&80.'Q-%FEmdiFcP],p
%h>]('E5#Y4R?tboq8X]5B-9F6YQE/HaWZ@6mKAC::5&B,A*o[)W.kIgVY`@Nm(ir;AF.Ng&G[#$b[\tIaRFGj"a;hM-!uHB_?ie>
%^ad)B+.[J;#fY&[.7M/\LNh6<4;]-46[!78005T&+'ju-^0J@Me][gBEO!kD[qaaRot0m\l3AD(<6Bp^oBJYTqma5*3BgUG\^)\n
%X)S,X/m%<SfC`cbH"4S2F@($B3rI*45/hDdS'G1g/D-XeU06ikE7R3l]!#?b?lLV]X*.=EgI5N#);`TXLK)pR[FtY6.LUL#p.\Ag
%gP?3K?SQe%]N.+5:qTd)25h9@jfYiZ'Y>C,Jn%W$#oHr*D%5b^G/F\C5D(HPUf9m=h0f/E+#kZMhcW#H(_lH!91XE[qk.E-]<GbR
%74!3*`o4e24m:7@I#7grh1qXhO$^Xo[rW<E..FKFmBl<<eO=XhJbdL!c#/VL01:+^XDjd*(qk1OjR1JIV$K4r<dsm&>BI`NgQGVM
%[B669++[T%bIJ^_EpM7`!N^=a*@%RR88sGeg9^3-<F_Ij^VZ$E+3M,$SlB8h5&#85LLn0Elm=a-HfTS#<13"u\M:d:6\h7->RgKB
%95l-$&U3F2KjGr^,-PoSQcK?50At]rLq1)AV_mtIJ]L*;O@nau95X7sj.b#Gc`$I+rR+uH>sL?kdlW"9h5#LenZ(jOli>cJ2lg[S
%^KN\uo3nMqVXsBcqU!EMeT?8JFAl.&NY08@bd="PRa)!NDf,0+Wp0Gm)L4Zjf[Y(m7X[+M4AIc)?_#+Z.q_gUk>t=>M2]ZORa07*
%i]\/r(=dMU^387WSQ.OQa>m<XXJYTiV/tg[cD$cQ:MA5Fca9.dDbAlMNMitWo@q#^@P%1/68b=krqU.n9iXnHgssJ-KdWgr$e?nB
%=\Ugj`$VXa)t"'Z;O_edh4tT$YF<b__MZUNRa/)=Wi_k,24\(ZZ'e\Bq*dr2Eo<juVXpjbo3/?H=4?u1O1?J[Z`'bmX/5F1Qg(R8
%r.*b;/Sc]`F:g"5Al)4b=%a?M+5c\pi+B:K,m2M8^Y%iX'dI1==PO"&T=[+X'g>(K(/2?-omLt3;[V&--G$B5<4dqjVi`pm\rbEn
%P.+26.tV4B%0`b[]0IODlIgYb_c)H2$6n:Y2\nUQ9o3G*Cdk;#Ns[8L7#EZ.'*>(,<9eU+Ek*bGT@ibe[Tb$PUfiItjr@8MpnGAT
%C=`<d\T=Ba'hotrs-gQ-%^R0!4rj6a]9#XfBSCPjc3+[(:Gsi:RrUgL"+]%2`@V\)\VoHEg`HL,T0SHHa@1;MC6^8WfA)e%Dr]mZ
%#C;*Bc-smghjVSHkN[8(Ap"lY.u07E?Uq"B_9uAI;qLU<mR54:!;s3E8,Wh[7U0"K)F&G)I/XutdZ)KLNCBj$$r5f<>*Xhm;.btZ
%[M.m$o3ua))n(Pm%bD@7Hj'j<s0JDNi]UDHQCm)%"`H<Kna*QTNb=ahFW<+TC.)P&_,jUq)c?EuRQ;-L]`]A]*_VIDcX\6GAL12<
%do%=]io/]ZQ,9:P];W'`Ks`'_Y^6anXWr]DPk2Pp*\O:aT*gCF<Vg3>GelF*Fs]W"%!Tdq5Z%IGUckB0E8PGe&qSb?b*t[+D;IpS
%7TeZ$3Rjs:WU:ogjj!,r6^3ilX4TQ3#qTU(L\?':OY.Oj?2;"!SlVD\%%gE[;7)eg)6]Vfi#\qm,DmjSQ1plg0e_PV&4h$h_jD$B
%e@j9tKac.S,:c\:P2TA!gtD@crcd-l7#"&fgeE69of+ncKkTLga5&9r+01eIh39jMHm7I42j:hKTKDS]?Bt!,hEuEW;(e:WqhD]p
%pUt0oI9<7_\;P[53@:`LUp/$5f*JfkUDMKFbhnDYKoJMSXF6IcoQ;$>0LB'1O#&2c^&IW(DBJVC/(W%VhCQDt,+tPG'\6_HAti56
%/N124i,dkIWAT\l+TooTcGur'pS$eXG?"2uQ1H0:MlHYYNdKZ8&!h47!>13Vm^aHDb:U\&Q8&hKKLI]O*W$I_\6hq#N9-_TMb%aN
%-q-gR?'"A:\Qt&GSl(E5X?2,k)&GR1/=8/QV&6Lqe9ob$VFZSQ-(jcWL,_dO5=1Z(<".N4MBV(eN*G=G^3C#JWPS>QH?MW%?q&]R
%ZRBS,L-a%18)o74AF.Ij3B;p+1R,X/1:RmA\)?o14[/R1j@-(p*mN*dTdNNZWh.+C>]&cf$MD$$hfrb$=f9^8DXn*7U3EoB[UY0Z
%A\ELi]rZ@7V5A:=>q+s/.[>$_#LbYnIndbCpiK<Xn1K"S@6=Gd]^i+YM^>Sem4.1k]>Km9-Ru[liA.H0qa!Yqnln/[(B8CFA9Yjj
%E:%"N^A_@C*0dLrPRm]sN@!4?CsFLbn"BD`fA,J0a.ViMZ#4J+drY::]GKBq(N6G+JD.d6A70+TGAtn*(?#;4#f(5V?]I524Crc/
%g\'>P]aCh<q\!-t=E!TaccJc:TM)E4gN1`7*PXnbHf)3D_i$b[F1&]^7_k[#drfr0GL/\qe00qjLbt(tcnD."F(K%ZrSLfgI*]mA
%2TNOL7I!`B*O,8H1"fHPPYRE]l\!NW]:VFg`'ln)(R?tI@Ah:oDD>AlM4-XE-C_3=m^Ri[_*\i?P4TQ58ur(<A5Zp6_?:T3+Wm1N
%C=Q<A0X8dfK8ne\3`u*QTLIKo^RJB/WoKpVKbT&_:S+)h.W&.>VND*&$(lZ/a>msP:N`1lbu%D[US\R86j@u0/1pE?LQqoFqh`@h
%Q]O>3,+HntDQK=5jaW#:fBo["^.eQ!Y%[1e[MC`J)<-:FF5pkQC0V3lIn%sZ\$[)AH!k@<e,.1'CGRrZ&bb!5A3K(cee<@q]t'oG
%=WBbd21SQi`C=#_fNE0Z<YYS$-Y_X=q!hD050ULQf,U'n4:6^DfuOc\_%E6Sot[(X(/9U*I@UaJ]>98.q75ht@U[m2d)cj%"G4YW
%;]!+C8>%:.g^"LPit[_krR#hd6nTasoq$54n*\gc<hkm<QT*KcdW#0.np2\QS:_+$\8t80GtM#"!kS3@D^hUIS?(RG^o@%R<G/SL
%I[6H@17[GhEdt+7$5)F^Y-&5uCuj]mj$7GArBAUF.EiE6i[c6""a2e'/j-3Ug@R-l'k#N05]9-d"9i6H9Mb7ZNM%mnQ5rJq!Q<fd
%]F*#tImGrBNXXGEWW9qP3/6F%'35-[:aOK**6s0Gg7`4ue70&ZZjP0f,p>-r6T%nqi0`Xr4bo"Z5o!esPh`tc:V;k8B4t9dm:ko[
%q8U".D;n4agp^knn%+b0c`8bs9DM'7a>SrLq3[/S4<$6`?HPd#`:7`\S)@1.5;1gY0BEST+*hIbnjU7,Ef*Nc>[@T(<(je4R+2+L
%Use1O>Q&/R2AG$62j6k:R@OdEh7\:*V0NH<-HNXg^"4pm/*'W=2:9*d*j#"hl8Rt\:u6Qt_S&9eQM?jAq??5IEN<Hnfhg_clPBkk
%VnUb94>Xq7\XMei^Bg*01Uu%t<H88RF"R2Cjh6-%9,kU[EjC&j_Jf+:kQVTjHi:etjI`,,rk3cpgD#+:q#pHgKkHnZS-A;7:=Sj0
%^u?Soei2-mIc5m=U-(6Af.[&&^G/NW$G=tDgsceSJfaeq5JO9Kqc:J2n)Km'c$T)0rPEoTh4MNsIc1;B7n35Pp\KR[cMCM9:3<\3
%ImCuce^&:u_o2<G%B%$&DtPpXPU8"Y1%+]BB4`lZm$dQQ5MnG[V*ZjdC&32T_@s*.'rdDSlfY;*)S4I;ka\[[n+@%nTQqd:[.jEN
%db6-Zngs5%$]bdYE?F'PVg,`?j.!dN_>dQNnZ+\Jn+@&!&I2t'Nd%[6\0fO:hP\-?0Dnkcfi4f-b0PkiAok._GeD_!753u>Ic7d4
%9lV94(eGWSp[Lt+R#Z<ihb<Iu)ehr835&bq?GCK_iq%,Vl0e3`.^j<Pl=X**9mNq7KjML&qFskQ2SPUnLgsrOlPHG\>lTAg5EgHm
%Al8:`qHgpMFg'9NlY#l'nS\!@me%MPkK;so^WDDmX]ta/pEMbp8n6I8L#)#1Em!';p\G'_0gcBeP%#*]lPH/V1uqT(n6Wl&ni1=t
%`1kjGXX'Z=jja&$NFu<fHEWs!E=Lah2bj"=o3T1NN/0oEnrC<503g6=,)K7TlPEk=CJ3aQme$t&e^$`-S*41n0I;P>0XJ:@0<,L<
%h7\:*Sb1e((Kt*+`k?i[_I5hIBB4F<_M*W9.:K/159q:R;TP)j>Stc]n=2!T0'XM^-MJO`?&E(qP&km-]t8+._oJS>OZRlqj<FK^
%Qdb%bN7kcRKZXGA&h)#hm.P$&@7=9\H'K;dF+r`3]\(XKc'MVIDHH%eip#(_@3Lt>=VB&)jcZ]UH>d#AT>AEcZ_QdLc)\h,q=muT
%'ee*u$M3XhAS(E,I%d#(pc`RIGM[diZ%R/QqR]ZjSm/D;O8HW"S^+LCmp.mPcAq_XSX8BG_tVL+7U^L[G1T82n*VZneA&HiDu],G
%Zhj6^\LdHdM<!Z;[js)NkC5NWK`nm[[[OdD4e60Vb\hI]Dt0l^[%I,rG.X4o#P%F\@lH$)pHLI7:K!=UF.4+RrT(W:2&:Z7\AnO)
%>IXLDRHk@>2O8^5ftBm\Ghf7)h"[p]f!W1AglaX^]lCG9ord%rh-N@Hh!b"n,\(2[^3$.%Y?odQXnm?XrQ2bTP2.@m>lHhU#M(7U
%*P\-tJtCaLIOfd8o=^]Rhce]*#P\&5oMX].?r:/,/11pf)r&R1a'7km^9jqOXQ!(/VX%D\D(Y\g[)gaDoC]4j]_d-[R#D3=7j)"/
%]5Dqp36C>In`fVHU,l`X>Kc`gD!Bu5_kt!b9<;I.^,Ht&B,B0rVYG=:b5QG#N%TVI(CU2Lm:)d`]NWpN];aM0\\2)1?EN&2D49tW
%h\j(iY0rbmE][30"A\KPNQ-5VIjHo$CgmrKe<A.Z/\ZD?Z;u7#..F`@hmKAC5Fj<0I,osmk&Zh=4T+;;g+QoQ6k6s9dCK8J+`<ms
%kG_[Lb:bE1=i;%c.7J^fm#$$h>!ZK::]h:&d3&9hbcsBNHCZVc&f90Nmd#(rWu^Wi[eO$2D8j+@mi=;`2U*fII;_8>4!.E7O0OB2
%.-l8&b!0)@o$.KDlS$fkcLE$d\FjGtfLsUaq*o0!J_GoamU[BA2fA4_f'>?ZKARCRgBr[SOS:^c?2EQM8k[#c"kNG.D;rCui%RO%
%J=c]6bN"F"LH1:g/-=NNnYV\k4R;0%W<i8H7gI-,:X+=ae*-MinqnfIKe'OrRDSC^LBp[OZLh*e.4`_&`(ol>'J?elEPgmYU6a>.
%\&DA8Cse*0^cWKM-!PHCN\t>IoN/9oKAE.gO.9^7Hsr_XQ8r7<.N>;6?:hhU:,6%T/>OnZis,LkYCt?U:TIRN3Bt9fd7[">Sj1GQ
%T0#k<W&E7)-Tl%o/Q=C4G_YqBVgXt^`=bN1o2IDI#?SB&p-LA(E&T$E;6i3^B%MMG%L-<<V&ofTRWi-?rX;'F]J^YCmj8VCmWNaB
%E3N-ApbDiq9tCN]@Pp2&;h//OBL0G][Y\`Tk"B?:5gJ%FDGCiD]^Fb-6DepLd6sX6)nF3KT;2iRl#'2i;b](%J!`Hd=BoYS/Y,+9
%?\%jLll*9i99A[V?&#5I"*KU.d?i=I.pk4AE/&-1<NL\;/]MFih[2or.49u<IJ!O,A?76;^neDHNZ/_m5=r)`:H&a.\$8rkjN!fc
%`XauWZY&=uU\"sujDUGm5sEk%8`</+dP])G*K?`uh`+>FYg]fQT(nbH.cOE7L+I];Se8'1Fp\qH%-rUsUkR22b?S[d0%c>Og@3tR
%6URZQG;E"ELH8Pl4O10I.+ZAgFZ/^Vibd`J/reQ(7t"?I:0e%a9Y";V2Jp6oTVY:igEGc_WVkI6lMrYBkT(.W0lV9_>\`!8*Q!J0
%f&I5@s39s<+@\8>)ePQWr0=bF$IjejnCJV`A_CQ)/cB4H+W?]k?TK<JJrPe=\2R2B=@\[fqb7TaO,HN[J07b-HgCe(.ItA!9Vg1?
%6gn0Yp5Vk4-e63A$\h"?H+N0:24)$j?eF<Lp.`n(?c&r2mTN^cg,S8gXR,hVQ]Tlp3u2qg'd)g9WEo+]rqp@;;J]'PFWFB-htT##
%US=.G3?obY-RlBU6O0c0SFq1)bRN&Hk&L,59H<ZL#L&YA2)YO)N4oVN#g1D40$$)1,3hX*`6.H+g:<EJ?Un-%(S2+A^-6S!:PB4%
%>+!VUE*pIoiTfF*48q!?3Uh,Ac#h-RoBq>r#<0R2Xofg/6,S=:?\EQo1P5(<H5;9ug6_N&mYepOc(B5gO3n4LDj*,lBR`$sC@8O&
%S8Yj>/^.s:4/[2LoH&5Eah.'/9;h6)rY]q)kNpsrBD]aBk@<-:kF8NQK9(S1?eT_=Y!4$.>^pp%S[I@$Y!5-UICHrr/jKA*]Bbo0
%HSFpdo5\9Uq<-e5CEef[f0g\;>SDD\kd_l1q!FNmj55@nPRY0k`1iubqbRqQ%Cf0JW"H):oYashp@,<]F\Nc!SN>M;)Ba?GaC*+-
%.Yso9)*1^F;R(1OPi81i#;YUWRaarOm,Nqic)gGJ-''U^RC?pVjJG;`ZF(3>:o'mEd2VL6R9VHgXJG4g:1J1:R'e"AbF>Vn=MfGb
%cT<3($uV!cK,/)aE"3DY%nU<paGjZKL7LI_BhJ&gr<NCJ$LFe?.MtcjA"Q,Z0dA=9IA"p+WITDLiqUmPNP5oQ)13_<s/9='*c0q3
%7:or!&-Au94M:k">Q@8R39!b&naOj\rcu>k'fSoRZI&TI(@s!(/D?hhKMa5sW\J165>S&1APLZT41D1*JCp>hh_0Io$^r:Z_,AT]
%B3r'^Xl6[K$D&=;qSV-MguBhrpSaUbY>e,l9<7fP2g+;mFAhs!7qC5*nl>P;Y]u`lhJMTO7't>,hRUC^/K7h65dejhC%cd>#(@=`
%,tP:J&+aG04!Vh-e*L03;J`)Hdfr3DgG2P.7!WZs6V+Cjb&Y(h/K;hQ_t\j+""c,h1+&FF_Z"u-k6pbM#>9Rn?[Yo;HDV.6?O!M]
%?aOkg3[KO`ZU)ss5jegt%''h,BGNJA(M=^b)gN;UD`ggH@C5#I)<n]<,[sfG7$DMBWEABp9BH>`NBhj]H7-c>cW1[n`fu#iRE<9-
%+>3N.ZdX8^+6CiEmlI*>X`2hX[3#lbg?3;k@PN+^!fcl[\\>`cYr18NotNCoWhSd(GtsQ')?*Po)LQ_>qq_"up[>Y/Cc,6AD=RPg
%p[_1ogZb5Mri^eDk@sG9q<lS2bS<ajQ)uW4Y'-CcSi_!mO=f@j%_\(N^d)-Za@j+%([70b07)ZR\T"qT:jq<V)\2]fbs*L4f?o3X
%b!J4snZ`j*m9T#Im+XTa\pqiB1P:tQG]rhU)UEE8!be'?h`-:A)D7;YbI-,RrC9!e\S!#%H=Qmc/Esk8if;mMd$eb5q>/F6FB7^6
%?ZL7]*B>3uk6u^\>C)r@Qn9_7df>Vhk=X_icFXs$XD/"1bZ!p`SiN<J`O0UDYJ%IF:g8YZq0dYP'%[!L,+^s0Xl3n]e/]1\dOLOg
%PZKZ*<d;PmJ@q>E['GD6OM%$UZm)4)0<`BW<ksJWV%k!tk:a#N.'5%V%Ekdi8ul''(-R\@<YTrTQWGu-<i!\0b:@LeaW2.O048]m
%#-\`c!.ad*"o'j'm,)a1?iYhE"6=[NGW&R'"u1Ws0*>Ul#X$IQWkgG`jNLDh[!;La]_(]KcJpg/etaSf7$)$LqXp;]Wu5H/#(PfC
%#9@@"'Q['_/H=+in5&p/+%mFlqdTKCE6MDS=ZYVt^fG]pki\?ZI*\s+aQ"^%Y1C&[EZecUp3'+oTCEFU32KL!Mn#IX*,L2Rd)LCO
%SFTG+OJo()hT(mg0I:=hZ^c9Qr<cA4U\q#D%ira81p)iV":TEYE_;UYBTA&uje3))qc[:NF--r(L;hlKCcU$n4CZ!odj+R^m<G.9
%nT[:OF#_\[L:WJpNP*alI@8s,TCfA^0=P`/%u$h9(1"X=AKdWn035%S*+f(_BrHr1_9iEn^M*,F>MC8fPgZN-(#ClECS]c0Jnu!I
%RVc\ee4(!bG73m:h\k0;:WVPD*%,`IfR1gMEdCtpjF4u(*qM/G6nS/^V^!aOHKAgq>foS@AR7Om.(*\/B)9Z\4BQtf:WS=blSn-3
%_X!@P/_K<s0jJbNmciiQEe)$p%sd$_qboM52HLCbScphrFQAE8DH1.OXRM7@/=ra]bRX2(:C#2+-K@R_acXD1Af[HFlq?OnL(a$r
%?%tFDK?c-BAf7Ks"lBet%_kbp@1^C"@W""j;^W72U[u/t5%ei%E*#":6:!au6m:@@iYHIZIW)sJ/DEN8SGjLHJoF8KhYDZ6r>p\l
%mYsJlbU8*bS)9s<DS$Z018"A=n]9V]Xf8ktITuC0cIQDhdXEr'`Djt67f:GmdY8/8)qY#,-V:1;\tpXiL=%PtS]/s,'jP00Y*(_.
%X;<FMmgO2KQf%3^;q2O9HZ0,9ST4(6.q?Q/GFW[@m&f!H*Yb+TIOfR]T6EcEbC=4A89Y:.)lLeJ/!00-iAY'h.tk[(-]S\VDinO%
%:bu0Cgt:YTIaHk4_ESR[XosO#f*oOicqkpX(=a.Pc";1AI9Pd*EpL7*LBSp4^:Ar*4#FNs3]s6Vn_iT<"]E;/f^FJ2(/Es:,hNb8
%@"d$AERcguO34*jf6$98l(qD9fnpm+)>0<,:iR:aGcE"33(gNk2T$Sie7!.K2V.!h_."dXB\Zj-YGL9qid:(`q#K4s-bPGR:?/C'
%/"5[E+uobiS#V/lk;acUITSm65;/CZQ,7J'qsc"e^N`\04fA4]+8dFj?Y5anC^/=;E.*EdQGm<\[ejoLr-g$</14EjR0K-,N1o#A
%']kjI]5h5)?s7F1i@]$c^o.BgONG)i[0<7m]?1\Q=k&Z\bTU/"=#sSg+_gPb8K^4s$@6uU_dpeTUEm[W9-BE9V0Up-/^m<H0R3JB
%78lZZbV:k`3jG.UmEkO#rpe+#8tMgodpl@sba,oH4tr""M;bV*a3a]j)_nQSd9UiqqgN_"MIMDFZlF;dK3s!l^+4D6hnNA+s4-AR
%pc8%4%fV7`Wqm\gBQJbV_]LYB[N8r#b/<E"TJOWdk$$ZI`*K(dm8r2;qX6Z+&)I8S>J[1ph:CgkF`(d9GkhhU?Caa+I(27_*M0+S
%6ZiWT]SgcN-2@MZJ)ZMrQpVU;m)QUu5PH)VN7l#<s+3JDiQ"/rqTh09DKkO6L7<P0:QJtRU?,rd(?Ql2c`<3Wb^DM*1bl1iRQ@>q
%6CueRAPU[-]HCnOBiFR@ccOP.jE4bekFQ.O"C5]fL!5DCQDD`=&te>$7=l:$0Cp[#LjWOf"PI[Dmf_i[PVgEb]U5%1c-[+c&TpAo
%L6H:1%QYD:M+pTA70'ra<s2fA':0Edd:+`s.HY=(o.$qYn<T5=AR1dhA2u:l$F]/D=m*&DbGuU9bZ/-IQH3bo#8r:0i*.6`Oeo^n
%$.>?DBs;maTE_'_-,NIsT9hq\THrhW-I:#;WgMsW1,QK`Lru6f^`#5$V\Vj('HG81e+>c#!BRV.D(pJ'#"DR"DS2VEQseBC&Z-")
%Mjh]eV^\-hF-)*g-Gd&$\seLtO>M3?_sarp!)J+\Gb*kO]*5p4@"kU33Ssm_8V4`]GQE0.)+a*UfI%a'cLV#_A+O4@^^G4=XjV:p
%GF`C$Vs"CE&N:5T<`MYB-KR[a7kiS4!'Zg;($H"Z"B[hiW<N?%TEeW]=GnNBl4qqk:#T65JXhT%La+>m"2#hpL`-Mf/[2A(3E;GF
%?uoja;XG7p%T$NpGt4>*!#9d1D5sI+nsfd>'3(EuSC[q[$M[BGo=V.2KHNCT965t="(8?W)1'/N'bM$s:'/"h&WpM@!d#l)4M@M'
%J2rL[js@r*pjX>SKEa\K)&E>p/!>DsV!p(*:-aA1aoDGf+@9a,8qho:Eb\nqE#AckPsBr5_?m4C&cf,:_\<8L1_pJ*QN=-q<P7/Y
%'U)NS0>a#9hGR<<5b\Gb,VfePMi-d&M%&_2'of>&`Sgfj6relc,TqIEq@R/RFeDV=TEgS!=^mlE8NO9LaGmEXP@0Otn0qY_#/gM5
%[;`F+\0E57@J8&,oI1ok$_"V!HsW+I,@j(s5m[rb@GgSnZsiF=0gqFSQ]=Hr-`JdI:1PmdW"$<BS5KJ"B8Ji1!S7^ha5Ng>("j##
%/79qO+MX_f]QI5N(0T4.k5Z_-<=qs09gON6+OejUUIAQB5]2Stp^TR0M]cNr[fB8>Q"/$6cuF@5]Ybu2WIi5pTH@R13\F3:XA7c*
%TK&G_!(#%aeOb9-n:04/)/3$imsr@W-]t?ef7C89$ZusKPr-9>&UC7*X:<7%3(G8Al38sfNf,;=?j$#9JL/0G!]7-U4NA\(i@Z7)
%4N0]'G#)0Wjm*OqRL"!`W3@:uF>=C(+TcH1GttVKi&_6"fg&s=(>o444a`/tfIk%DEu`6a(FG,8;WLgX!j3R5<mkrY!%;0nFip`3
%'9WnS.L]/K%]g39Sh+T7cnkdEWWdh@."#DfTNq8*%UTioHjL(6'/WfH^^`7Aq)ce1(^`q;[L=L_+TiEd?-8Z;BVn<Gc&dR./g2@8
%P7c+u3=dAfdV$'!p_BkqKh2T$M/;0"OApH-quk#0@)BMW:'<aZ!S5>r@E3qB@,`kcpn9J]YG0AIQ@Mr6'9?0klGIfG1BD&L8Kja!
%!P'?r(a1(7!.4tVU$94!\%lWATff(jo.9ue=CqV"JDCIp#X'\3jq3B(cN$]Qa?Ni^@Kp3]#[(,$J/CSn.!%L9$Rcg;LA>m1@1TO[
%S.R3*"5,L9h]dj9$1!Cmo*VW,75N"FiYh5+/l"m)&-90Qi1iF*;]Rh2BHocAW7P0(5W9=RM"I7c-GoC3)0Q@6i_X"`HNWuD$T&pF
%+di=6aO#Ut+kos-P@;X"(sIOB#;N7'M80WjfNuq8nfU5D^g`9Ai#C<K&fr7@OZ&^7_u\^!?\)bG5V&9*D?h*c;\MGLkTWea+97\C
%ej(0.ADg5b1.X]$9G`8uG_n>=(l-XnqlMV&CVL%Ge6m^O!rEB6&lb;=Fp'dYlN1$o?jEb<j:i2m3T'uP9o*IL5U?i(,S2n.0rk:(
%X+9uV7%\HILID]/&==,R9tGu`8dqWuRb\$5-j'0'7C(Zu#[pe'A6nnq'(ddYIaK,lKETEu`,>buc]Q?nau"7gnJ:U'&Al_aJ9>V<
%0"L+pas$:6HO^\4?3+qR[n3HcKY<#f6G.nU!,A2f#cLQDNs7MY,NF<VieY4^6]JnmG**G8(a@ZiVPNORT9=ST:8(B=<#ng-<.R"?
%i!SL9!)O_mS@&VPM'2SR(21.-X>!a4LUnH41LNMa4]L]NA;V]&(pjbUibs\Wi0/_h@3#DdWXb4>RB)7%A>&iK18loM@6qq2Od'hB
%?uV@f^^bqeM'=>`^cppXG`.0)pRjXIL-X?*`(gp?.W?UM@=Sb,q>j0bP9'](;.qf9hGJXH)(Q8<-9T!FGb?HY%#>heWIX!@$lk57
%*,,SP=<9C+iNIda7"pCpGXa0u;.5'TQV2uG7LOU?j"AN82?bsG!GS,q?3-]sJ228_F6Zq?XB=H/[NQnt=f2Vt7Le=4ap?]UMP-%s
%KRa5+%`,?aJeSBVNe$C'Cm"j-;ugCS!rB*\as00-9GYQ!),U`?9Rr*JaQtL4dnhb&1m(eh46^J[YrjKB$mJB;"_%LhVAVW"'baG9
%Vo>(I"h)ULcKYCeU@AXNm'eg2WHL0r5UMSXkQD5@'-Jq27Q^B4"_n)MWrsbI8V-[<<U`,%#m:W=+(Ugr4MA`#/$qfk-[,QK"a1rD
%a/34e!clYk$46tcr!rR'JNJBQ2Y\+lI55QoaD-k5l5I"c5_+HLl$*S\->\mP-\.J`$SFW?!C7<;46D'J0?IG!9u@\Y*C,OCH8+"1
%H'oEVBS-25l2oHZ3o?$dAnC^gO*$hc4[)CWM,OT0pdcBV"*br'RGfe+W!EGngFYg94Dof%L.hoT6ijp*g&T\:7pj-?FG8uZ[$)D#
%8Xqj,MkEE%e/,?5"?ID_W:N'?(80Uf0:<c&@m%_:@d-/eI3AY0emPs2/!i,dcnEgW(fuJN?:VO1/!WY]UWNm@%c'&d$\85]L$"6_
%:gbb;[K4[*Y_AKs_5U@k%+Zc4^n.ON(0#g+PWlD5/[/\06"dMJ&M".>'@1c+;Bo"SKCo:h'"#JcOI)X"7!ak!CBiJs'b#8W/:8'%
%Hk%DDIKqs;TUIL51@n9IJ/*Te)X6"6L?^%FU)+[qWf4aQ8`S6e.O+.?j"pgPY:>ER&C1]$\CKS8Ja>9oPXp-(\g,r:<Wu#;dPO6@
%TlpTe%Z&&**\hst.mUT+#qQ&ZSM5TS8Y=Mc,HD!W5t9";^4i%-C*`C9LOe=t7"3X<dC;S]"LVQdJW-O\7Nj.RntI<DCSV/t?A7)g
%YpB^I)lOo\8BDWh1jEG2m'?jV%d8coXp2lhrDCoQJL$gjFH'7>A8Gd)_k[uUL]ZkBBLebA.3Tb2U^EdE75BSuR1g&&=^\qU0l9ZR
%f+S!X;%F47M3MH%P)=!*llcXP-CE3KPVk3g&Z'ae&X&"u.&%PtE>V?C(*GN_eCJoW"d'O+7g7aq$,mFZ$gO87O[,?Ue%0h18.foB
%TN/:EFHd=<Ebo&jC,kT9Y;<j-D(7Ba6(K=n7@)%Dk;42YD?(TRPT$e5+uRl%<RAniEF>s55sZ5-``Eb4!oc/k<.G*s17LRc0HJqb
%E%(!@P6a&O\^do'Nat$bc('=<1^jSg0smli(\=GD92+=f&W5eSi_f]=KTo?7'Q@bWW2Ll5GCsDJ1FQ4kMTQ/BdJt(-*KJ&+cqjmU
%)up:D;H/o\*DR7WU&_HNg(1bI5hST#@+#TD*!s#s934&VE4^tq3*mdl7an=]*^hJcQ]u-A>2r')6q<Z"8#ZjQJ9WF5&V4G3W#Z90
%?:EU?"G2_SX[j!m)euaU4LJu`ldoaRhA5Tlj>WaW!qh-TUS5Hn.u#"YS/a50J<36t'DXZ)J7,1)r!cb\:4=6^:r.0R:cB&E>'8OD
%ACbEj,"7^Uc#seg/Pfn!#_Z.->;334dW[g;g*0ofLh!%QL+Qen!jZKaRY%[cZUT!X/J;T09`6_`'V%-37-H$Qm1rVk[N'IK:E.Y&
%YSA9sCdZ4!hEAdl*mF\P&-u*t>%D_&j*Dt:@,Dj5(X*`7(/IY8\tKEu15%1er#7e6P4eP_R6R?(o%"a.Rj>A(GuI38=pA-'PNB[Q
%j:F_b?uL$7__]J(68&oV"5@>fHIpQG!g$N$NoaJWP[>Ak(bFCkL;#,@/PHcT?lB(V8IQIC^<SNp!4n^jk('D*fn>:m<&=*2iIh=\
%Va0augkW*IFU?RZ-.J@a]gr9RJ\nT02C<`QOdK^#`hB)VDBp84"`sui3#f1-$j@*p<(EFZU>M.`,:#9['n;=S!:YPbHI+.h<?=\2
%7G\'7,_Q(aS<geroUXM6_FP>2`nG:j:]`[#G:eU*RmX#W`<l%X(m7_pK.nW2YAAGRH%`ei<Y*OaG(A42&Jl<**bb(H/SGB_C_]A$
%>KB`BSM1ZB)DE5Zoh:ra'@"oFNBSE'^mTfrq$8Gi2usW9+,*3/f4d_Y[$nh$5t"-X#ne1nL57Pch@Pmh-SXn=>mV5(6-;K)!r%le
%;C$IR/QoHl@F^K9#^8uI6,=`Vcm#6&c5bgNDn"YZ@](W\Xg_8/_3#OCn)S7bd-Y9r%US*#&^UQX$J6[]q]$pc659BbBE=mgPapkc
%.O\g/P'ICV/L)jAK9pF:N^=CLisbH^`g--dg)R8PgeBQp()?eo+IL:@8cEmR-B[=1&]&4%+HYE6C^2@\7d;6PWJ[.E`5^g?o]H4?
%OrjehaeAQ@ke%7WOt]^!L96Es(7NffNMF'38qs5D_1F3UqgsNB$>_/[kbO(ZitKe7JOhAVD2(4VSY@6_`6@qmOF`"i6JY6KR7JS4
%f/DF/(>O(`^*lNqRt?52?'s9=+S6E,hh3<.2baGbGMn!B+NUW4oEV11\9p`_e#S#3+98*[QW0+O$<gr\H8)^Zi+u7ljST_>U&iuD
%J.:2Fa:gO4onGF;YpC+9A/3jH8^Se!/[P3bL:U?EC`A#e.&ckabBEA$"k`tB@eO#>*/UU7R#FU&=/E1H#%,5l(er]C9*tr0i\)'S
%Z<M.PZJ/'eKKTVSBb1JU_Vo$Vpq4S4E)N]r!?mX[iP\#nFDr3'XWEAefmF*W^e)3!,n5!0i]&8hda7St[$YY9?'c86ZhP.e"VH$3
%`Kjm4*,^@\0;6!Ei+:0PQb\2>VJ`^dlq'^>^VuMcBs4:acXoh[T0+$.(0eXj(EPZB(lfWJ[+UTD/;L-ZOS+oU`e[<sKPhf*k;4Pj
%;NSZO0G[kZ%hV7J9s18l'o8eSg0=qOQR]tP9s0.E/"U#;-<F(J&Yhn.P:N?5Mu5@H<s#3?&;*ntY%5"$d)U#[\<92K0(c!/>RAHG
%76.l@6@K;JYdkBmdK&TTRk(2i9G:]*#rWiD'1p8U9'@OqfT!k7'QdOJ:6n<C9:g*d,q8'Li&M#E@W!uD%G33<EKrklp0/FbUZe)R
%=;"a"4?:h0/.""-5kM<`P"MDp?*matMXFo3k;uh0NhLM<FIOXD?]os9XMpB"HQpLb`KQ&/%l,HTYb59BkkCa+_I*?h9>=Oc_ZaW9
%pQR:]<k1T4=4%5KX:%tC7qWaNl7CI+'-Sct_4fefkg%RdamkZJ0aA45^P5EU7@toJ&b'TA1$q7_>i$+irBWkL_0BI5#SJT,Z6`J@
%kX0!;(@rWR2FkcAl"@A$)B<T("kWV%Y76nY#$Xf=1@q=d4V<8HNs&u+RG?CNfE^tM@U+k."c7V&.O?Y0b^L[:+t-EJ[4sP2J453X
%BaA+TM/G6pbZPM6Zia:W:sl4\fE%Ap,!*sKk2f"7$o8!e^E$p:h.Ej^GsWh5^WeY=X>\61cnIXLNX=/C^bd3lJN:pS%>f]-,>9mY
%"Vm;6314Bo0G5UFC[_@_UrXr65TB`tr$BMU&W')r7uT)l#^9\N/E[?:2B#s)M"2`9BGg3"D+t%EUDHL4XN)4b'<GT2cl[eCCsnD%
%8u\Ig-AeFjUI@*N6+[S^M,GbgSj8I#ibsmU@BtDWM-8E*1mr'Jp:Pqgo;H$MM_eQkU!&+>op,V&Wuh_.NmZ5N:D&%k:;"OCRJ+:q
%(<>_,2A69q:DTIT[Z,.$kl`rmm*">jGG(P\k4E9W(AA_bF+!it9:u&.qf-\2Xm\GHm[JS"q>1%-Hp>sMhB%,-F'h/X*Cm!a+N@9%
%+(o+$ir:VPN$LW=-2CCbM+WdjKrLnU0m')iD2Hb30dbth?nr#k)BH@M$@1P\$DfG4B_PP:.+LTp$C&4kWA_D$k[C:'UP#/Y9I66L
%D[NjZi)2rF6QeOC')@UrJX$N%VNZ9\]M[o;2%3YR6_`WS&@34:Ot`&M8rj+`+G\qeaHG!?n1thbds:)?a3W+lKlKHpF_-RNML]1b
%ZAc.&S@C'6b`F/l`bA&hr<4OB_4Enf+AEK&4)&@^>tN(-,!lj/%0?\b9pHS"'"T`9R6'K\TVk>Gd4b`]0d<:SerqfEM>h.4>GaY+
%#ujMPU#(&g'f8`.Cjm.aj"WRa.0a\jX2QApn!L#/R7Nd^Yn/=Ojt]8ne9FPp;#uWZngrBZ7)^->,T/u9.QJ.B4K>>V$YccfHUaJI
%j.92n'm'uGP;3b&_%&7f37'MKmuUiG&e!kpllg'3/@SEpCc"c!5d6u"0&p82jAJiX6lQfA0Y*!u?\pe`AWS1'BoR4.)2ePF[0g+N
%Hq(Z,PdREo?mU)t<V/-LP&mM8fhDfEmAD[e/t&0?3!Y?3,8Os`OBg_eaTr)7S<.:>bK5PE5Fd[KE3^,h%YA=Kd_RN'N-O:($-=Q7
%2^&4\LQdi3IQ88X_3^@J&UZP]+XQQj@m!;an"9&/?K-g7:c%hmSRW?B*<dYc#@nFDL5P&;61`1=UQ[,>_R(]E"YrS%l^AG'cufkt
%b1uV=o-@2Y(/K@Z/FQ#;ECnNP9Mih&(pshLKDCN2l<4n$#L%=Q.?-g8cmcgPN!lG"M-9]AAMuL&*LW$<LA:9D-*3;I0@PF'lg.E;
%0:E)#3]I.qFG[kkp_#W,:aF-%Y_ki"0T92@XCb;0=o'PD)Q7=3W(,K[Kp"V7%'3hVOAeP*1*p9OJ!*6[ruZ7[hc?7_$Hocp`$:8i
%VfbZf7DoqfbD"j<PXg_,-_2R7_g&siFH#%5R,BJ=MING0Z];h_`V?$oeg_g/AtX-Q=0JDdl5\eEg5+t.##\J%rDY#VSpW4K,n06>
%A65E#Nu2K;E[#/G<)?=K/5@DUPE5eFT`C:lLn5s!1(#]fF:,:Rk`!LLaG>A\:h_;1^jn<\)agt.03C&7C;;8u+p&dU%EN^#"CsEX
%8Of:ojOfp:BEtJ=1a\+j+T,!^0lIMbU+q?>];-i'c4tCkFiH6P/@QbqW$/F'''0AV<(/$7"_6b0-k4FXbuA0*<+MjN*i$$$hHHJH
%:p@<O7:\tVBS?No'c[3ZWhL*!P;)K#H$C6R<d90UG&XUSO_PA:a^^JM,q+u/"IU4k+@9G>&XHEMC'%$bFO09fJrhF<NWrlf+9;hi
%rU0RWlWg`#/j2?,^5d[=&,<.,T9',":1j^5is<.PQofiWBoESE,]>@>KE6/<-.gAC;)Mm#R":8=&Zdk;0V+UE;G]d@HBFh-3#pWh
%qHsdBAOp96*_%=PK*EUt,f_*S7#%QR[U*8@(99o00;[o*U``JL"0NHr5WEc-YEYt0!->k7Q6n.j`!R<>H&63bWlU?S!+_\&=Pd^'
%f`oVN,m=)X#m[`HJ/j4lnj?\>Y`TSJA;Eh.ba7:[er81<\eWTqW^rS0d$X'QF`8Oe1("hVEsh,CPQ_[*0L^[.+]K*R4L$i0d"9KO
%2c\Y+1*L0(29XZ!f<.Nh8IQS9Q50JY5D[:!4G#i<E1`ip\Yg1ThAeS9<K.ZdD(tem"dLC".TkoPpnC*sJ>a#e":"ZG/4$+K$4R.E
%c;J:1!OKFa.YSO$Z,8O=cm,_3#]277r/aJf"\F;#Go93N5A3/^`&qaUa&=;1d&b+X(Dm@<?DQ\SP5!qp3MmWO']mo6'o/U`%Au15
%NC\%S4TT41gG"NjX-+`+?3QX]8@]BH\T^l;>A4h:ikPj$Bb`<$POS>(3T6$/3Lm$O8Ha.OFPhXC-`'iT9MU+0JK/rlYqMA0+bqcf
%N:g$tQ)(^m:eLQEF\Kn5$@BRM6.Hd0,6V8W6kpS#Q:kViO=,V#UBU0P8SS3gP_MbD'[]Bc#:$^JL,LQj.9OP;;EDD73tn%//>Gu>
%2l/Fk#eYa\#Sbce=dB7lFAWG.>11S2:9%,g*FOjpg6=kEhT-#-$A1tD:`'VEZ1?ct[7Ol-ZY8P7#%=eN7EnDF@4dl6cZ0@`HrOQ?
%:os4(gSfo;1]s%INiE^bpqWn6R8b`.WJt"PJuC"P?:ifmQl8>HWJF-U==E](#qc0i&mC%-JR9Y=JJrF'JiVL$ZD0`C%Hdh4;_\r?
%$?5K"!7\+:mPTRGr;/u,S\ZID`TT(?KjT?X1gku'Q)%ZC6@LC"=Te-9lRY_cNZJe?A<E)<L'OgXG=h9i@iNes4b["^81!cd(W"7'
%N"@J'=O,,&6=[e<!WpcDiSIBbZW85(L;#?qgh,n3-<Tbl`lfE4nuYSW^\B\K>Cc!(V:*kf;4S">Nt@p-7tQE9*13LWR#_"u@I4">
%Jc$T]!ZB.&p^5>h5W)PLJQ,W+MQ)\ObF"']$I$D>S0#j2m,$,3_GW(8;Cc>lI;P8&JM4G"B0,H,Qc$R,FoY44qt&R#%=4T51CPs=
%^^&a]fXgt,rTMLSI/j--pY>L=J+!?F%r<+c&`'3!<X`bOe^*1b'W5'l%"HHd1Hg#+eG9(Crnm.]CZbmtO$P`%`MNSH03COrFN@*M
%LM'u!*1.]c!9G&$F(c6'Imeu/m+Bqjhc:K:G[DcRKjS6I."7mfq2>+`*g:d"7j+6\lRNNmP49^o%/cDI^Z=bMBSu0.cn+Qks3KKt
%$el0D*jurC_uYDRV#0Z\g$-L_HOp/(R="`.7\dE`$W#nk]:E:.VqZdirqT?+JPXMA'8bFl4nemXPQ5@\~>
%AI9_PrivateDataEnd
\ No newline at end of file
diff --git a/logo/gnocchi-icon.eps b/logo/gnocchi-icon.eps
new file mode 100644
index 0000000000000000000000000000000000000000..2c722cd9f9b07841188001c395589d2d087e96fc
--- /dev/null
+++ b/logo/gnocchi-icon.eps
@@ -0,0 +1,5578 @@
+%!PS-Adobe-3.1 EPSF-3.0
+%ADO_DSC_Encoding: MacOS Roman
+%%Title: gnocchi-icon.eps
+%%Creator: Adobe Illustrator(R) 13.0
+%%For: Thierry Ung
+%%CreationDate: 4/3/17
+%%BoundingBox: 0 0 1096 840
+%%HiResBoundingBox: 0 0 1096 840
+%%CropBox: 0 0 1096 840
+%%LanguageLevel: 2
+%%DocumentData: Clean7Bit
+%ADOBeginClientInjection: DocumentHeader "AI11EPS"
+%%AI8_CreatorVersion: 13.0.0
%AI9_PrintingDataBegin
%AI3_Cropmarks: 36.0000 36.0000 1060.0000 804.0000
+%ADO_BuildNumber: Adobe Illustrator(R) 13.0.0 x409 R agm 4.4378 ct 5.1039
%ADO_ContainsXMP: MainFirst
%AI7_Thumbnail: 128 100 8
%%BeginData: 3232 Hex Bytes
%0000330000660000990000CC0033000033330033660033990033CC0033FF
%0066000066330066660066990066CC0066FF009900009933009966009999
%0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66
%00FF9900FFCC3300003300333300663300993300CC3300FF333300333333
%3333663333993333CC3333FF3366003366333366663366993366CC3366FF
%3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99
%33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033
%6600666600996600CC6600FF6633006633336633666633996633CC6633FF
%6666006666336666666666996666CC6666FF669900669933669966669999
%6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33
%66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF
%9933009933339933669933999933CC9933FF996600996633996666996699
%9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33
%99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF
%CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399
%CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933
%CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF
%CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC
%FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699
%FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33
%FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100
%000011111111220000002200000022222222440000004400000044444444
%550000005500000055555555770000007700000077777777880000008800
%000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB
%DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF
%00FF0000FFFFFF0000FF00FFFFFF00FFFFFF
%524C45FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFF
%FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDE6FF
%7DA8A8FD7AFFA87DF827F827F852A8FD76FFA827F8274A4B2627F8277DFD
%25FFC9CFCAFD4DFF52F851C7C8C7C8C775F8277DFD23FFC7C7C7C8C7CEFD
%4AFF52F8279FFD04C7C8C77BF827A8FD21FFA5C7C7C7C1C8C7C8CFFD47FF
%A8F851C7C7C7C8C7C8C1C8C751F87DFD20FFC8C7C7C8C7C8C1C8C7CEFD47
%FF27279FC7C1FD05C79FC77B27F8FD1FFFA6C79FFD08C7FD46FF7D274BC8
%C7C8C7C8C7C8C7C8C7C826277DFD1EFFC8C7C8C7C8C7C8C7C8C7C8C9FD45
%FF52F8C79FFD04C7C8C7C7C1C8C79FF827FD1DFFC89FFD04C7C8C7C7C1C8
%C7C8CFFD44FF27279FC7C1C8C7C8C1C8C7C8C7C8C727F8FD1DFFC8C7C1C8
%C7C8C1C8C7C8C7C8C7CFFD44FF27F89F9FFD05C7C1C7C1C7C1C726277DFD
%1CFFCE9FC79FFD05C7C1C79FC7C8FD44FFF8279FC7C7C8C7C8C7C8C7C8C7
%C8C751F8A8FD1CFFC9C7C7C8C7C8C7C8C7C8C7C8C7CEFD44FF27F89FC1C7
%C1C8C7C7C1C8C7C7C7C850277DFD1CFFCEC1C79FC8C1C7C1C8C7C7C7C8C8
%FD44FF2727C1FD04C7C8C7C8C7C8C7C8C77BF87DFD1CFFC8FD04C7C8C7C8
%C7C8C7C8C7CEFD44FF27F8C79FC7C1C79FC8C1C79FC7C1C7502752FD1CFF
%C89FC79FC79FC8C1C79FC7C1C7C8FD44FF52274BFD04C7C8C7C8C7C8C7C8
%C751F8A8FD1CFFCFC8C7C7C7C8C7C8C7C8C7C8C7CEFD45FFF8279FC7C1C7
%C7C8C7C7C1C8C7C7F827A8FD1DFF9FC7C1C7C7C8C7C7C1C8C7C7CAFD45FF
%27F8C7C1C7C7C8C1C8C7C8C7C8512727FD1EFFC8C1C7C7C8C1C8C7C8C7C8
%C8FD46FF522750C79FC79FC79FC8C1C79F27F87DFD1EFFCFC79FC79FC79F
%C8C1C7C7CFFD47FF2727C1C7C7C8C1C8C7C8C751F852FD20FFC8C7C7C8C1
%C8C7C8C7CFFD48FF7DF8277BC7C1C79FC77B27F852A8FD21FFC8C7C1C79F
%C7C7CFFD4AFF52F827F8515151F827F852A8FD24FFCFC9CFFD4EFFA85227
%F827F82752A8FD7AFFA8A8FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD
%FCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD
%FCFFFDFCFFFDFCFFFDFCFFFD2FFFFF
%%EndData
+%ADOEndClientInjection: DocumentHeader "AI11EPS"
+%%Pages: 1
+%%DocumentNeededResources: 
+%%DocumentSuppliedResources: procset Adobe_AGM_Image 1.0 0
+%%+ procset Adobe_CoolType_Utility_T42 1.0 0
+%%+ procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
+%%+ procset Adobe_CoolType_Core 2.31 0
+%%+ procset Adobe_AGM_Core 2.0 0
+%%+ procset Adobe_AGM_Utils 1.0 0
+%%DocumentFonts: 
+%%DocumentNeededFonts: 
+%%DocumentNeededFeatures: 
+%%DocumentSuppliedFeatures: 
+%%DocumentProcessColors:  Cyan Magenta Yellow Black
+%%DocumentCustomColors: 
+%%CMYKCustomColor: 
+%%RGBCustomColor: 
+%%EndComments
+                                                                                                                                                                                                                              
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+%%BeginDefaults
+%%ViewingOrientation: 1 0 0 1
+%%EndDefaults
+%%BeginProlog
+%%BeginResource: procset Adobe_AGM_Utils 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{currentpacking	true setpacking}if
+userdict/Adobe_AGM_Utils 73 dict dup begin put
+/bdf
+{bind def}bind def
+/nd{null def}bdf
+/xdf
+{exch def}bdf
+/ldf 
+{load def}bdf
+/ddf
+{put}bdf	
+/xddf
+{3 -1 roll put}bdf	
+/xpt
+{exch put}bdf
+/ndf
+{
+	exch dup where{
+		pop pop pop
+	}{
+		xdf
+	}ifelse
+}def
+/cdndf
+{
+	exch dup currentdict exch known{
+		pop pop
+	}{
+		exch def
+	}ifelse
+}def
+/gx
+{get exec}bdf
+/ps_level
+	/languagelevel where{
+		pop systemdict/languagelevel gx
+	}{
+		1
+	}ifelse
+def
+/level2 
+	ps_level 2 ge
+def
+/level3 
+	ps_level 3 ge
+def
+/ps_version
+	{version cvr}stopped{-1}if
+def
+/set_gvm
+{currentglobal exch setglobal}bdf
+/reset_gvm
+{setglobal}bdf
+/makereadonlyarray
+{
+	/packedarray where{pop packedarray
+	}{
+		array astore readonly}ifelse
+}bdf
+/map_reserved_ink_name
+{
+	dup type/stringtype eq{
+		dup/Red eq{
+			pop(_Red_)
+		}{
+			dup/Green eq{
+				pop(_Green_)
+			}{
+				dup/Blue eq{
+					pop(_Blue_)
+				}{
+					dup()cvn eq{
+						pop(Process)
+					}if
+				}ifelse
+			}ifelse
+		}ifelse
+	}if
+}bdf
+/AGMUTIL_GSTATE 22 dict def
+/get_gstate
+{
+	AGMUTIL_GSTATE begin
+	/AGMUTIL_GSTATE_clr_spc currentcolorspace def
+	/AGMUTIL_GSTATE_clr_indx 0 def
+	/AGMUTIL_GSTATE_clr_comps 12 array def
+	mark currentcolor counttomark
+		{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 3 -1 roll put
+		/AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 add def}repeat pop
+	/AGMUTIL_GSTATE_fnt rootfont def
+	/AGMUTIL_GSTATE_lw currentlinewidth def
+	/AGMUTIL_GSTATE_lc currentlinecap def
+	/AGMUTIL_GSTATE_lj currentlinejoin def
+	/AGMUTIL_GSTATE_ml currentmiterlimit def
+	currentdash/AGMUTIL_GSTATE_do xdf/AGMUTIL_GSTATE_da xdf
+	/AGMUTIL_GSTATE_sa currentstrokeadjust def
+	/AGMUTIL_GSTATE_clr_rnd currentcolorrendering def
+	/AGMUTIL_GSTATE_op currentoverprint def
+	/AGMUTIL_GSTATE_bg currentblackgeneration cvlit def
+	/AGMUTIL_GSTATE_ucr currentundercolorremoval cvlit def
+	currentcolortransfer cvlit/AGMUTIL_GSTATE_gy_xfer xdf cvlit/AGMUTIL_GSTATE_b_xfer xdf
+		cvlit/AGMUTIL_GSTATE_g_xfer xdf cvlit/AGMUTIL_GSTATE_r_xfer xdf
+	/AGMUTIL_GSTATE_ht currenthalftone def
+	/AGMUTIL_GSTATE_flt currentflat def
+	end
+}def
+/set_gstate
+{
+	AGMUTIL_GSTATE begin
+	AGMUTIL_GSTATE_clr_spc setcolorspace
+	AGMUTIL_GSTATE_clr_indx{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 1 sub get
+	/AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 sub def}repeat setcolor
+	AGMUTIL_GSTATE_fnt setfont
+	AGMUTIL_GSTATE_lw setlinewidth
+	AGMUTIL_GSTATE_lc setlinecap
+	AGMUTIL_GSTATE_lj setlinejoin
+	AGMUTIL_GSTATE_ml setmiterlimit
+	AGMUTIL_GSTATE_da AGMUTIL_GSTATE_do setdash
+	AGMUTIL_GSTATE_sa setstrokeadjust
+	AGMUTIL_GSTATE_clr_rnd setcolorrendering
+	AGMUTIL_GSTATE_op setoverprint
+	AGMUTIL_GSTATE_bg cvx setblackgeneration
+	AGMUTIL_GSTATE_ucr cvx setundercolorremoval
+	AGMUTIL_GSTATE_r_xfer cvx AGMUTIL_GSTATE_g_xfer cvx AGMUTIL_GSTATE_b_xfer cvx
+		AGMUTIL_GSTATE_gy_xfer cvx setcolortransfer
+	AGMUTIL_GSTATE_ht/HalftoneType get dup 9 eq exch 100 eq or
+		{
+		currenthalftone/HalftoneType get AGMUTIL_GSTATE_ht/HalftoneType get ne
+			{
+			 mark AGMUTIL_GSTATE_ht{sethalftone}stopped cleartomark
+			}if
+		}{
+		AGMUTIL_GSTATE_ht sethalftone
+		}ifelse
+	AGMUTIL_GSTATE_flt setflat
+	end
+}def
+/get_gstate_and_matrix
+{
+	AGMUTIL_GSTATE begin
+	/AGMUTIL_GSTATE_ctm matrix currentmatrix def
+	end
+	get_gstate
+}def
+/set_gstate_and_matrix
+{
+	set_gstate
+	AGMUTIL_GSTATE begin
+	AGMUTIL_GSTATE_ctm setmatrix
+	end
+}def
+/AGMUTIL_str256 256 string def
+/AGMUTIL_src256 256 string def
+/AGMUTIL_dst64 64 string def
+/AGMUTIL_srcLen nd
+/AGMUTIL_ndx nd
+/AGMUTIL_cpd nd
+/capture_cpd{
+	//Adobe_AGM_Utils/AGMUTIL_cpd currentpagedevice ddf
+}def
+/thold_halftone
+{
+	level3
+		{sethalftone currenthalftone}
+		{
+			dup/HalftoneType get 3 eq
+			{
+				sethalftone currenthalftone
+			}{
+				begin
+				Width Height mul{
+					Thresholds read{pop}if
+				}repeat
+				end
+				currenthalftone
+			}ifelse
+		}ifelse
+}def 
+/rdcmntline
+{
+	currentfile AGMUTIL_str256 readline pop
+	(%)anchorsearch{pop}if
+}bdf
+/filter_cmyk
+{	
+	dup type/filetype ne{
+		exch()/SubFileDecode filter
+	}{
+		exch pop
+	}
+	ifelse
+	[
+	exch
+	{
+		AGMUTIL_src256 readstring pop
+		dup length/AGMUTIL_srcLen exch def
+		/AGMUTIL_ndx 0 def
+		AGMCORE_plate_ndx 4 AGMUTIL_srcLen 1 sub{
+			1 index exch get
+			AGMUTIL_dst64 AGMUTIL_ndx 3 -1 roll put
+			/AGMUTIL_ndx AGMUTIL_ndx 1 add def
+		}for
+		pop
+		AGMUTIL_dst64 0 AGMUTIL_ndx getinterval
+	}
+	bind
+	/exec cvx
+	]cvx
+}bdf
+/filter_indexed_devn
+{
+	cvi Names length mul names_index add Lookup exch get
+}bdf
+/filter_devn
+{	
+	4 dict begin
+	/srcStr xdf
+	/dstStr xdf
+	dup type/filetype ne{
+		0()/SubFileDecode filter
+	}if
+	[
+	exch
+		[
+			/devicen_colorspace_dict/AGMCORE_gget cvx/begin cvx
+			currentdict/srcStr get/readstring cvx/pop cvx
+			/dup cvx/length cvx 0/gt cvx[
+				Adobe_AGM_Utils/AGMUTIL_ndx 0/ddf cvx
+				names_index Names length currentdict/srcStr get length 1 sub{
+					1/index cvx/exch cvx/get cvx
+					currentdict/dstStr get/AGMUTIL_ndx/load cvx 3 -1/roll cvx/put cvx
+					Adobe_AGM_Utils/AGMUTIL_ndx/AGMUTIL_ndx/load cvx 1/add cvx/ddf cvx
+				}for
+				currentdict/dstStr get 0/AGMUTIL_ndx/load cvx/getinterval cvx
+			]cvx/if cvx
+			/end cvx
+		]cvx
+		bind
+		/exec cvx
+	]cvx
+	end
+}bdf
+/AGMUTIL_imagefile nd
+/read_image_file
+{
+	AGMUTIL_imagefile 0 setfileposition
+	10 dict begin
+	/imageDict xdf
+	/imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+	/imbufIdx 0 def
+	/origDataSource imageDict/DataSource get def
+	/origMultipleDataSources imageDict/MultipleDataSources get def
+	/origDecode imageDict/Decode get def
+	/dstDataStr imageDict/Width get colorSpaceElemCnt mul string def
+	imageDict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+	{
+		/imbufCnt imageDict/DataSource get length def
+		/imbufs imbufCnt array def
+		0 1 imbufCnt 1 sub{
+			/imbufIdx xdf
+			imbufs imbufIdx imbufLen string put
+			imageDict/DataSource get imbufIdx[AGMUTIL_imagefile imbufs imbufIdx get/readstring cvx/pop cvx]cvx put
+		}for
+		DeviceN_PS2{
+			imageDict begin
+		 	/DataSource[DataSource/devn_sep_datasource cvx]cvx def
+			/MultipleDataSources false def
+			/Decode[0 1]def
+			end
+		}if
+	}{
+		/imbuf imbufLen string def
+		Indexed_DeviceN level3 not and DeviceN_NoneName or{
+			/srcDataStrs[imageDict begin
+				currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+				{
+					Width Decode length 2 div mul cvi string
+				}repeat
+				end]def		
+			imageDict begin
+		 	/DataSource[AGMUTIL_imagefile Decode BitsPerComponent false 1/filter_indexed_devn load dstDataStr srcDataStrs devn_alt_datasource/exec cvx]cvx def
+			/Decode[0 1]def
+			end
+		}{
+			imageDict/DataSource[1 string dup 0 AGMUTIL_imagefile Decode length 2 idiv string/readstring cvx/pop cvx names_index/get cvx/put cvx]cvx put
+			imageDict/Decode[0 1]put
+		}ifelse
+	}ifelse
+	imageDict exch
+	load exec
+	imageDict/DataSource origDataSource put
+	imageDict/MultipleDataSources origMultipleDataSources put
+	imageDict/Decode origDecode put	
+	end
+}bdf
+/write_image_file
+{
+	begin
+	{(AGMUTIL_imagefile)(w+)file}stopped{
+		false
+	}{
+		Adobe_AGM_Utils/AGMUTIL_imagefile xddf 
+		2 dict begin
+		/imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+		MultipleDataSources{DataSource 0 get}{DataSource}ifelse type/filetype eq{
+			/imbuf imbufLen string def
+		}if
+		1 1 Height MultipleDataSources not{Decode length 2 idiv mul}if{
+			pop
+			MultipleDataSources{
+			 	0 1 DataSource length 1 sub{
+					DataSource type dup
+					/arraytype eq{
+						pop DataSource exch gx
+					}{
+						/filetype eq{
+							DataSource exch get imbuf readstring pop
+						}{
+							DataSource exch get
+						}ifelse
+					}ifelse
+					AGMUTIL_imagefile exch writestring
+				}for
+			}{
+				DataSource type dup
+				/arraytype eq{
+					pop DataSource exec
+				}{
+					/filetype eq{
+						DataSource imbuf readstring pop
+					}{
+						DataSource
+					}ifelse
+				}ifelse
+				AGMUTIL_imagefile exch writestring
+			}ifelse
+		}for
+		end
+		true
+	}ifelse
+	end
+}bdf
+/close_image_file
+{
+	AGMUTIL_imagefile closefile(AGMUTIL_imagefile)deletefile
+}def
+statusdict/product known userdict/AGMP_current_show known not and{
+	/pstr statusdict/product get def
+	pstr(HP LaserJet 2200)eq 	
+	pstr(HP LaserJet 4000 Series)eq or
+	pstr(HP LaserJet 4050 Series )eq or
+	pstr(HP LaserJet 8000 Series)eq or
+	pstr(HP LaserJet 8100 Series)eq or
+	pstr(HP LaserJet 8150 Series)eq or
+	pstr(HP LaserJet 5000 Series)eq or
+	pstr(HP LaserJet 5100 Series)eq or
+	pstr(HP Color LaserJet 4500)eq or
+	pstr(HP Color LaserJet 4600)eq or
+	pstr(HP LaserJet 5Si)eq or
+	pstr(HP LaserJet 1200 Series)eq or
+	pstr(HP LaserJet 1300 Series)eq or
+	pstr(HP LaserJet 4100 Series)eq or 
+	{
+ 		userdict/AGMP_current_show/show load put
+		userdict/show{
+		 currentcolorspace 0 get
+		 /Pattern eq
+		 {false charpath f}
+		 {AGMP_current_show}ifelse
+		}put
+	}if
+	currentdict/pstr undef
+}if
+/consumeimagedata
+{
+	begin
+	AGMIMG_init_common
+	currentdict/MultipleDataSources known not
+		{/MultipleDataSources false def}if
+	MultipleDataSources
+		{
+		DataSource 0 get type
+		dup/filetype eq
+			{
+			1 dict begin
+			/flushbuffer Width cvi string def
+			1 1 Height cvi
+				{
+				pop
+				0 1 DataSource length 1 sub
+					{
+					DataSource exch get
+					flushbuffer readstring pop pop
+					}for
+				}for
+			end
+			}if
+		dup/arraytype eq exch/packedarraytype eq or DataSource 0 get xcheck and
+			{
+			Width Height mul cvi
+				{
+				0 1 DataSource length 1 sub
+					{dup DataSource exch gx length exch 0 ne{pop}if}for
+				dup 0 eq
+					{pop exit}if
+				sub dup 0 le
+					{exit}if
+				}loop
+			pop
+			}if		
+		}
+		{
+		/DataSource load type 
+		dup/filetype eq
+			{
+			1 dict begin
+			/flushbuffer Width Decode length 2 idiv mul cvi string def
+			1 1 Height{pop DataSource flushbuffer readstring pop pop}for
+			end
+			}if
+		dup/arraytype eq exch/packedarraytype eq or/DataSource load xcheck and
+			{
+				Height Width BitsPerComponent mul 8 BitsPerComponent sub add 8 idiv Decode length 2 idiv mul mul
+					{
+					DataSource length dup 0 eq
+						{pop exit}if
+					sub dup 0 le
+						{exit}if
+					}loop
+				pop
+			}if
+		}ifelse
+	end
+}bdf
+/addprocs
+{
+	 2{/exec load}repeat
+	 3 1 roll
+	 [5 1 roll]bind cvx
+}def
+/modify_halftone_xfer
+{
+	currenthalftone dup length dict copy begin
+	 currentdict 2 index known{
+	 	1 index load dup length dict copy begin
+		currentdict/TransferFunction known{
+			/TransferFunction load
+		}{
+			currenttransfer
+		}ifelse
+		 addprocs/TransferFunction xdf 
+		 currentdict end def
+		currentdict end sethalftone
+	}{
+		currentdict/TransferFunction known{
+			/TransferFunction load 
+		}{
+			currenttransfer
+		}ifelse
+		addprocs/TransferFunction xdf
+		currentdict end sethalftone		
+		pop
+	}ifelse
+}def
+/clonearray
+{
+	dup xcheck exch
+	dup length array exch
+	Adobe_AGM_Core/AGMCORE_tmp -1 ddf 
+	{
+	Adobe_AGM_Core/AGMCORE_tmp 2 copy get 1 add ddf 
+	dup type/dicttype eq
+		{
+			Adobe_AGM_Core/AGMCORE_tmp get
+			exch
+			clonedict
+			Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf 
+		}if
+	dup type/arraytype eq
+		{
+			Adobe_AGM_Core/AGMCORE_tmp get exch
+			clonearray
+			Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf 
+		}if
+	exch dup
+	Adobe_AGM_Core/AGMCORE_tmp get 4 -1 roll put
+	}forall
+	exch{cvx}if
+}bdf
+/clonedict
+{
+	dup length dict
+	begin
+	{
+		dup type/dicttype eq
+			{clonedict}if
+		dup type/arraytype eq
+			{clonearray}if
+		def
+	}forall
+	currentdict
+	end
+}bdf
+/DeviceN_PS2
+{
+	/currentcolorspace AGMCORE_gget 0 get/DeviceN eq level3 not and
+}bdf
+/Indexed_DeviceN
+{
+	/indexed_colorspace_dict AGMCORE_gget dup null ne{
+		dup/CSDBase known{
+			/CSDBase get/CSD get_res/Names known 
+		}{
+			pop false
+		}ifelse
+	}{
+		pop false
+	}ifelse
+}bdf
+/DeviceN_NoneName
+{	
+	/Names where{
+		pop
+		false Names
+		{
+			(None)eq or
+		}forall
+	}{
+		false
+	}ifelse
+}bdf
+/DeviceN_PS2_inRip_seps
+{
+	/AGMCORE_in_rip_sep where
+	{
+		pop dup type dup/arraytype eq exch/packedarraytype eq or
+		{
+			dup 0 get/DeviceN eq level3 not and AGMCORE_in_rip_sep and
+			{
+				/currentcolorspace exch AGMCORE_gput
+				false
+			}{
+				true
+			}ifelse
+		}{
+			true
+		}ifelse
+	}{
+		true
+	}ifelse
+}bdf
+/base_colorspace_type
+{
+	dup type/arraytype eq{0 get}if
+}bdf
+/currentdistillerparams where{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+{
+	/pdfmark_5{cleartomark}bind def
+}{
+	/pdfmark_5{pdfmark}bind def
+}ifelse
+/ReadBypdfmark_5
+{
+	currentfile exch 0 exch/SubFileDecode filter
+	/currentdistillerparams where 
+	{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+	{flushfile cleartomark}
+	{/PUT pdfmark}ifelse 	
+}bdf
+/xpdfm
+{
+	{
+		dup 0 get/Label eq
+		{
+			aload length[exch 1 add 1 roll/PAGELABEL
+		}{
+			aload pop
+			[{ThisPage}<<5 -2 roll>>/PUT
+		}ifelse
+		pdfmark_5
+	}forall
+}bdf
+/ds{
+	Adobe_AGM_Utils begin
+}bdf
+/dt{
+	currentdict Adobe_AGM_Utils eq{
+		end
+	}if
+}bdf
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_AGM_Core 2.0 0
+%%Version: 2.0 0
+%%Copyright: Copyright(C)1997-2007 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+	currentpacking
+	true setpacking
+}if
+userdict/Adobe_AGM_Core 209 dict dup begin put
+/Adobe_AGM_Core_Id/Adobe_AGM_Core_2.0_0 def
+/AGMCORE_str256 256 string def
+/AGMCORE_save nd
+/AGMCORE_graphicsave nd
+/AGMCORE_c 0 def
+/AGMCORE_m 0 def
+/AGMCORE_y 0 def
+/AGMCORE_k 0 def
+/AGMCORE_cmykbuf 4 array def
+/AGMCORE_screen[currentscreen]cvx def
+/AGMCORE_tmp 0 def
+/AGMCORE_&setgray nd
+/AGMCORE_&setcolor nd
+/AGMCORE_&setcolorspace nd
+/AGMCORE_&setcmykcolor nd
+/AGMCORE_cyan_plate nd
+/AGMCORE_magenta_plate nd
+/AGMCORE_yellow_plate nd
+/AGMCORE_black_plate nd
+/AGMCORE_plate_ndx nd
+/AGMCORE_get_ink_data nd
+/AGMCORE_is_cmyk_sep nd
+/AGMCORE_host_sep nd
+/AGMCORE_avoid_L2_sep_space nd
+/AGMCORE_distilling nd
+/AGMCORE_composite_job nd
+/AGMCORE_producing_seps nd
+/AGMCORE_ps_level -1 def
+/AGMCORE_ps_version -1 def
+/AGMCORE_environ_ok nd
+/AGMCORE_CSD_cache 0 dict def
+/AGMCORE_currentoverprint false def
+/AGMCORE_deltaX nd
+/AGMCORE_deltaY nd
+/AGMCORE_name nd
+/AGMCORE_sep_special nd
+/AGMCORE_err_strings 4 dict def
+/AGMCORE_cur_err nd
+/AGMCORE_current_spot_alias false def
+/AGMCORE_inverting false def
+/AGMCORE_feature_dictCount nd
+/AGMCORE_feature_opCount nd
+/AGMCORE_feature_ctm nd
+/AGMCORE_ConvertToProcess false def
+/AGMCORE_Default_CTM matrix def
+/AGMCORE_Default_PageSize nd
+/AGMCORE_Default_flatness nd
+/AGMCORE_currentbg nd
+/AGMCORE_currentucr nd
+/AGMCORE_pattern_paint_type 0 def
+/knockout_unitsq nd
+currentglobal true setglobal
+[/CSA/Gradient/Procedure]
+{
+	/Generic/Category findresource dup length dict copy/Category defineresource pop
+}forall
+setglobal
+/AGMCORE_key_known
+{
+	where{
+		/Adobe_AGM_Core_Id known
+	}{
+		false
+	}ifelse
+}ndf
+/flushinput
+{
+	save
+	2 dict begin
+	/CompareBuffer 3 -1 roll def
+	/readbuffer 256 string def
+	mark
+	{
+	currentfile readbuffer{readline}stopped
+		{cleartomark mark}
+		{
+		not
+			{pop exit}
+		if
+		CompareBuffer eq
+			{exit}
+		if
+		}ifelse
+	}loop
+	cleartomark
+	end
+	restore
+}bdf
+/getspotfunction
+{
+	AGMCORE_screen exch pop exch pop
+	dup type/dicttype eq{
+		dup/HalftoneType get 1 eq{
+			/SpotFunction get
+		}{
+			dup/HalftoneType get 2 eq{
+				/GraySpotFunction get
+			}{
+				pop
+				{
+					abs exch abs 2 copy add 1 gt{
+						1 sub dup mul exch 1 sub dup mul add 1 sub
+					}{
+						dup mul exch dup mul add 1 exch sub
+					}ifelse
+				}bind
+			}ifelse
+		}ifelse
+	}if
+}def
+/np
+{newpath}bdf
+/clp_npth
+{clip np}def
+/eoclp_npth
+{eoclip np}def
+/npth_clp
+{np clip}def
+/graphic_setup
+{
+	/AGMCORE_graphicsave save store
+	concat
+	0 setgray
+	0 setlinecap
+	0 setlinejoin
+	1 setlinewidth
+	[]0 setdash
+	10 setmiterlimit
+	np
+	false setoverprint
+	false setstrokeadjust
+	//Adobe_AGM_Core/spot_alias gx
+	/Adobe_AGM_Image where{
+		pop
+		Adobe_AGM_Image/spot_alias 2 copy known{
+			gx
+		}{
+			pop pop
+		}ifelse
+	}if
+	/sep_colorspace_dict null AGMCORE_gput
+	100 dict begin
+	/dictstackcount countdictstack def
+	/showpage{}def
+	mark
+}def
+/graphic_cleanup
+{
+	cleartomark
+	dictstackcount 1 countdictstack 1 sub{end}for
+	end
+	AGMCORE_graphicsave restore
+}def
+/compose_error_msg
+{
+	grestoreall initgraphics	
+	/Helvetica findfont 10 scalefont setfont
+	/AGMCORE_deltaY 100 def
+	/AGMCORE_deltaX 310 def
+	clippath pathbbox np pop pop 36 add exch 36 add exch moveto
+	0 AGMCORE_deltaY rlineto AGMCORE_deltaX 0 rlineto
+	0 AGMCORE_deltaY neg rlineto AGMCORE_deltaX neg 0 rlineto closepath
+	0 AGMCORE_&setgray
+	gsave 1 AGMCORE_&setgray fill grestore 
+	1 setlinewidth gsave stroke grestore
+	currentpoint AGMCORE_deltaY 15 sub add exch 8 add exch moveto
+	/AGMCORE_deltaY 12 def
+	/AGMCORE_tmp 0 def
+	AGMCORE_err_strings exch get
+		{
+		dup 32 eq
+			{
+			pop
+			AGMCORE_str256 0 AGMCORE_tmp getinterval
+			stringwidth pop currentpoint pop add AGMCORE_deltaX 28 add gt
+				{
+				currentpoint AGMCORE_deltaY sub exch pop
+				clippath pathbbox pop pop pop 44 add exch moveto
+				}if
+			AGMCORE_str256 0 AGMCORE_tmp getinterval show( )show
+			0 1 AGMCORE_str256 length 1 sub
+				{
+				AGMCORE_str256 exch 0 put
+				}for
+			/AGMCORE_tmp 0 def
+			}{
+				AGMCORE_str256 exch AGMCORE_tmp xpt
+				/AGMCORE_tmp AGMCORE_tmp 1 add def
+			}ifelse
+		}forall
+}bdf
+/AGMCORE_CMYKDeviceNColorspaces[
+	[/Separation/None/DeviceCMYK{0 0 0}]
+	[/Separation(Black)/DeviceCMYK{0 0 0 4 -1 roll}bind]
+	[/Separation(Yellow)/DeviceCMYK{0 0 3 -1 roll 0}bind]
+	[/DeviceN[(Yellow)(Black)]/DeviceCMYK{0 0 4 2 roll}bind]
+	[/Separation(Magenta)/DeviceCMYK{0 exch 0 0}bind]
+	[/DeviceN[(Magenta)(Black)]/DeviceCMYK{0 3 1 roll 0 exch}bind]
+	[/DeviceN[(Magenta)(Yellow)]/DeviceCMYK{0 3 1 roll 0}bind]
+	[/DeviceN[(Magenta)(Yellow)(Black)]/DeviceCMYK{0 4 1 roll}bind]
+	[/Separation(Cyan)/DeviceCMYK{0 0 0}]
+	[/DeviceN[(Cyan)(Black)]/DeviceCMYK{0 0 3 -1 roll}bind]
+	[/DeviceN[(Cyan)(Yellow)]/DeviceCMYK{0 exch 0}bind]
+	[/DeviceN[(Cyan)(Yellow)(Black)]/DeviceCMYK{0 3 1 roll}bind]
+	[/DeviceN[(Cyan)(Magenta)]/DeviceCMYK{0 0}]
+	[/DeviceN[(Cyan)(Magenta)(Black)]/DeviceCMYK{0 exch}bind]
+	[/DeviceN[(Cyan)(Magenta)(Yellow)]/DeviceCMYK{0}]
+	[/DeviceCMYK]
+]def
+/ds{
+	Adobe_AGM_Core begin
+	/currentdistillerparams where
+		{
+		pop currentdistillerparams/CoreDistVersion get 5000 lt
+			{<</DetectBlends false>>setdistillerparams}if
+		}if	
+	/AGMCORE_ps_version xdf
+	/AGMCORE_ps_level xdf
+	errordict/AGM_handleerror known not{
+		errordict/AGM_handleerror errordict/handleerror get put
+		errordict/handleerror{
+			Adobe_AGM_Core begin
+			$error/newerror get AGMCORE_cur_err null ne and{
+				$error/newerror false put
+				AGMCORE_cur_err compose_error_msg
+			}if
+			$error/newerror true put
+			end
+			errordict/AGM_handleerror get exec
+			}bind put
+		}if
+	/AGMCORE_environ_ok 
+		ps_level AGMCORE_ps_level ge
+		ps_version AGMCORE_ps_version ge and 
+		AGMCORE_ps_level -1 eq or
+	def
+	AGMCORE_environ_ok not
+		{/AGMCORE_cur_err/AGMCORE_bad_environ def}if
+	/AGMCORE_&setgray systemdict/setgray get def
+	level2{
+		/AGMCORE_&setcolor systemdict/setcolor get def
+		/AGMCORE_&setcolorspace systemdict/setcolorspace get def
+	}if
+	/AGMCORE_currentbg currentblackgeneration def
+	/AGMCORE_currentucr currentundercolorremoval def
+	/AGMCORE_Default_flatness currentflat def
+	/AGMCORE_distilling
+		/product where{
+			pop systemdict/setdistillerparams known product(Adobe PostScript Parser)ne and
+		}{
+			false
+		}ifelse
+	def
+	/AGMCORE_GSTATE AGMCORE_key_known not{
+		/AGMCORE_GSTATE 21 dict def
+		/AGMCORE_tmpmatrix matrix def
+		/AGMCORE_gstack 32 array def
+		/AGMCORE_gstackptr 0 def
+		/AGMCORE_gstacksaveptr 0 def
+		/AGMCORE_gstackframekeys 14 def
+		/AGMCORE_&gsave/gsave ldf
+		/AGMCORE_&grestore/grestore ldf
+		/AGMCORE_&grestoreall/grestoreall ldf
+		/AGMCORE_&save/save ldf
+		/AGMCORE_&setoverprint/setoverprint ldf
+		/AGMCORE_gdictcopy{
+			begin
+			{def}forall
+			end
+		}def
+		/AGMCORE_gput{
+			AGMCORE_gstack AGMCORE_gstackptr get
+			3 1 roll
+			put
+		}def
+		/AGMCORE_gget{
+			AGMCORE_gstack AGMCORE_gstackptr get
+			exch
+			get
+		}def
+		/gsave{
+			AGMCORE_&gsave
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gstackptr 1 add
+			dup 32 ge{limitcheck}if
+			/AGMCORE_gstackptr exch store
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gdictcopy
+		}def
+		/grestore{
+			AGMCORE_&grestore
+			AGMCORE_gstackptr 1 sub
+			dup AGMCORE_gstacksaveptr lt{1 add}if
+			dup AGMCORE_gstack exch get dup/AGMCORE_currentoverprint known
+				{/AGMCORE_currentoverprint get setoverprint}{pop}ifelse
+			/AGMCORE_gstackptr exch store
+		}def
+		/grestoreall{
+			AGMCORE_&grestoreall
+			/AGMCORE_gstackptr AGMCORE_gstacksaveptr store 
+		}def
+		/save{
+			AGMCORE_&save
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gstackptr 1 add
+			dup 32 ge{limitcheck}if
+			/AGMCORE_gstackptr exch store
+			/AGMCORE_gstacksaveptr AGMCORE_gstackptr store
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gdictcopy
+		}def
+		/setoverprint{
+			dup/AGMCORE_currentoverprint exch AGMCORE_gput AGMCORE_&setoverprint
+		}def	
+		0 1 AGMCORE_gstack length 1 sub{
+				AGMCORE_gstack exch AGMCORE_gstackframekeys dict put
+		}for
+	}if
+	level3/AGMCORE_&sysshfill AGMCORE_key_known not and
+	{
+		/AGMCORE_&sysshfill systemdict/shfill get def
+		/AGMCORE_&sysmakepattern systemdict/makepattern get def
+		/AGMCORE_&usrmakepattern/makepattern load def
+	}if
+	/currentcmykcolor[0 0 0 0]AGMCORE_gput
+	/currentstrokeadjust false AGMCORE_gput
+	/currentcolorspace[/DeviceGray]AGMCORE_gput
+	/sep_tint 0 AGMCORE_gput
+	/devicen_tints[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]AGMCORE_gput
+	/sep_colorspace_dict null AGMCORE_gput
+	/devicen_colorspace_dict null AGMCORE_gput
+	/indexed_colorspace_dict null AGMCORE_gput
+	/currentcolor_intent()AGMCORE_gput
+	/customcolor_tint 1 AGMCORE_gput
+	/absolute_colorimetric_crd null AGMCORE_gput
+	/relative_colorimetric_crd null AGMCORE_gput
+	/saturation_crd null AGMCORE_gput
+	/perceptual_crd null AGMCORE_gput
+	currentcolortransfer cvlit/AGMCore_gray_xfer xdf cvlit/AGMCore_b_xfer xdf
+		 cvlit/AGMCore_g_xfer xdf cvlit/AGMCore_r_xfer xdf
+	<<
+	/MaxPatternItem currentsystemparams/MaxPatternCache get
+	>>
+	setuserparams
+	end
+}def
+/ps
+{
+	/setcmykcolor where{
+		pop
+		Adobe_AGM_Core/AGMCORE_&setcmykcolor/setcmykcolor load put
+	}if
+	Adobe_AGM_Core begin
+	/setcmykcolor
+	{
+		4 copy AGMCORE_cmykbuf astore/currentcmykcolor exch AGMCORE_gput
+		1 sub 4 1 roll
+		3{
+			3 index add neg dup 0 lt{
+				pop 0
+			}if
+			3 1 roll
+		}repeat
+		setrgbcolor pop
+	}ndf
+	/currentcmykcolor
+	{
+		/currentcmykcolor AGMCORE_gget aload pop
+	}ndf
+	/setoverprint
+	{pop}ndf
+	/currentoverprint
+	{false}ndf
+	/AGMCORE_cyan_plate 1 0 0 0 test_cmyk_color_plate def
+	/AGMCORE_magenta_plate 0 1 0 0 test_cmyk_color_plate def
+	/AGMCORE_yellow_plate 0 0 1 0 test_cmyk_color_plate def
+	/AGMCORE_black_plate 0 0 0 1 test_cmyk_color_plate def
+	/AGMCORE_plate_ndx 
+		AGMCORE_cyan_plate{
+			0
+		}{
+			AGMCORE_magenta_plate{
+				1
+			}{
+				AGMCORE_yellow_plate{
+					2
+				}{
+					AGMCORE_black_plate{
+						3
+					}{
+						4
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+		def
+	/AGMCORE_have_reported_unsupported_color_space false def
+	/AGMCORE_report_unsupported_color_space
+	{
+		AGMCORE_have_reported_unsupported_color_space false eq
+		{
+			(Warning: Job contains content that cannot be separated with on-host methods. This content appears on the black plate, and knocks out all other plates.)==
+			Adobe_AGM_Core/AGMCORE_have_reported_unsupported_color_space true ddf
+		}if
+	}def
+	/AGMCORE_composite_job
+		AGMCORE_cyan_plate AGMCORE_magenta_plate and AGMCORE_yellow_plate and AGMCORE_black_plate and def
+	/AGMCORE_in_rip_sep
+		/AGMCORE_in_rip_sep where{
+			pop AGMCORE_in_rip_sep
+		}{
+			AGMCORE_distilling 
+			{
+				false
+			}{
+				userdict/Adobe_AGM_OnHost_Seps known{
+					false
+				}{
+					level2{
+						currentpagedevice/Separations 2 copy known{
+							get
+						}{
+							pop pop false
+						}ifelse
+					}{
+						false
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+	def
+	/AGMCORE_producing_seps AGMCORE_composite_job not AGMCORE_in_rip_sep or def
+	/AGMCORE_host_sep AGMCORE_producing_seps AGMCORE_in_rip_sep not and def
+	/AGM_preserve_spots 
+		/AGM_preserve_spots where{
+			pop AGM_preserve_spots
+		}{
+			AGMCORE_distilling AGMCORE_producing_seps or
+		}ifelse
+	def
+	/AGM_is_distiller_preserving_spotimages
+	{
+		currentdistillerparams/PreserveOverprintSettings known
+		{
+			currentdistillerparams/PreserveOverprintSettings get
+				{
+					currentdistillerparams/ColorConversionStrategy known
+					{
+						currentdistillerparams/ColorConversionStrategy get
+						/sRGB ne
+					}{
+						true
+					}ifelse
+				}{
+					false
+				}ifelse
+		}{
+			false
+		}ifelse
+	}def
+	/convert_spot_to_process where{pop}{
+		/convert_spot_to_process
+		{
+			//Adobe_AGM_Core begin
+			dup map_alias{
+				/Name get exch pop
+			}if
+			dup dup(None)eq exch(All)eq or
+				{
+				pop false
+				}{
+				AGMCORE_host_sep
+				{
+					gsave
+					1 0 0 0 setcmykcolor currentgray 1 exch sub
+					0 1 0 0 setcmykcolor currentgray 1 exch sub
+					0 0 1 0 setcmykcolor currentgray 1 exch sub
+					0 0 0 1 setcmykcolor currentgray 1 exch sub
+					add add add 0 eq
+					{
+						pop false
+					}{
+						false setoverprint
+						current_spot_alias false set_spot_alias
+						1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+						set_spot_alias
+						currentgray 1 ne
+					}ifelse
+					grestore
+				}{
+					AGMCORE_distilling
+					{
+						pop AGM_is_distiller_preserving_spotimages not
+					}{
+						//Adobe_AGM_Core/AGMCORE_name xddf
+						false
+						//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 0 eq
+						AGMUTIL_cpd/OverrideSeparations known and
+						{
+							AGMUTIL_cpd/OverrideSeparations get
+							{
+								/HqnSpots/ProcSet resourcestatus
+								{
+									pop pop pop true
+								}if
+							}if
+						}if					
+						{
+							AGMCORE_name/HqnSpots/ProcSet findresource/TestSpot gx not
+						}{
+							gsave
+							[/Separation AGMCORE_name/DeviceGray{}]AGMCORE_&setcolorspace
+							false
+							AGMUTIL_cpd/SeparationColorNames 2 copy known
+							{
+								get
+								{AGMCORE_name eq or}forall
+								not
+							}{
+								pop pop pop true
+							}ifelse
+							grestore
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+			end
+		}def
+	}ifelse
+	/convert_to_process where{pop}{
+		/convert_to_process
+		{
+			dup length 0 eq
+				{
+				pop false
+				}{
+				AGMCORE_host_sep
+				{
+				dup true exch
+					{
+					dup(Cyan)eq exch
+					dup(Magenta)eq 3 -1 roll or exch
+					dup(Yellow)eq 3 -1 roll or exch
+					dup(Black)eq 3 -1 roll or
+						{pop}
+						{convert_spot_to_process and}ifelse
+					}
+				forall
+					{
+					true exch
+						{
+						dup(Cyan)eq exch
+						dup(Magenta)eq 3 -1 roll or exch
+						dup(Yellow)eq 3 -1 roll or exch
+						(Black)eq or and
+						}forall
+						not
+					}{pop false}ifelse
+				}{
+				false exch
+					{
+					/PhotoshopDuotoneList where{pop false}{true}ifelse
+						{
+						dup(Cyan)eq exch
+						dup(Magenta)eq 3 -1 roll or exch
+						dup(Yellow)eq 3 -1 roll or exch
+						dup(Black)eq 3 -1 roll or
+						{pop}
+						{convert_spot_to_process or}ifelse
+						}
+						{
+						convert_spot_to_process or
+						}
+					ifelse
+					}
+				forall
+				}ifelse
+			}ifelse
+		}def
+	}ifelse	
+	/AGMCORE_avoid_L2_sep_space 
+		version cvr 2012 lt 
+		level2 and 
+		AGMCORE_producing_seps not and
+	def
+	/AGMCORE_is_cmyk_sep
+		AGMCORE_cyan_plate AGMCORE_magenta_plate or AGMCORE_yellow_plate or AGMCORE_black_plate or
+	def
+	/AGM_avoid_0_cmyk where{
+		pop AGM_avoid_0_cmyk
+	}{
+		AGM_preserve_spots 
+		userdict/Adobe_AGM_OnHost_Seps known 
+		userdict/Adobe_AGM_InRip_Seps known or
+		not and
+	}ifelse
+	{
+		/setcmykcolor[
+			{
+				4 copy add add add 0 eq currentoverprint and{
+					pop 0.0005
+				}if
+			}/exec cvx
+			/AGMCORE_&setcmykcolor load dup type/operatortype ne{
+				/exec cvx
+			}if
+		]cvx def
+	}if
+	/AGMCORE_IsSeparationAProcessColor
+		{
+		dup(Cyan)eq exch dup(Magenta)eq exch dup(Yellow)eq exch(Black)eq or or or
+		}def
+	AGMCORE_host_sep{
+		/setcolortransfer
+		{
+			AGMCORE_cyan_plate{
+				pop pop pop
+			}{
+			 	AGMCORE_magenta_plate{
+			 		4 3 roll pop pop pop
+			 	}{
+			 		AGMCORE_yellow_plate{
+			 			4 2 roll pop pop pop
+			 		}{
+			 			4 1 roll pop pop pop
+			 		}ifelse
+			 	}ifelse
+			}ifelse
+			settransfer 
+		}	
+		def
+		/AGMCORE_get_ink_data
+			AGMCORE_cyan_plate{
+				{pop pop pop}
+			}{
+			 	AGMCORE_magenta_plate{
+			 		{4 3 roll pop pop pop}
+			 	}{
+			 		AGMCORE_yellow_plate{
+			 			{4 2 roll pop pop pop}
+			 		}{
+			 			{4 1 roll pop pop pop}
+			 		}ifelse
+			 	}ifelse
+			}ifelse
+		def
+		/AGMCORE_RemoveProcessColorNames
+			{
+			1 dict begin
+			/filtername
+				{
+				dup/Cyan eq 1 index(Cyan)eq or
+					{pop(_cyan_)}if
+				dup/Magenta eq 1 index(Magenta)eq or
+					{pop(_magenta_)}if
+				dup/Yellow eq 1 index(Yellow)eq or
+					{pop(_yellow_)}if
+				dup/Black eq 1 index(Black)eq or
+					{pop(_black_)}if
+				}def
+			dup type/arraytype eq
+				{[exch{filtername}forall]}
+				{filtername}ifelse
+			end
+			}def
+		level3{
+			/AGMCORE_IsCurrentColor
+				{
+				dup AGMCORE_IsSeparationAProcessColor
+					{
+					AGMCORE_plate_ndx 0 eq
+						{dup(Cyan)eq exch/Cyan eq or}if
+					AGMCORE_plate_ndx 1 eq
+						{dup(Magenta)eq exch/Magenta eq or}if
+					AGMCORE_plate_ndx 2 eq
+						{dup(Yellow)eq exch/Yellow eq or}if
+					AGMCORE_plate_ndx 3 eq
+						{dup(Black)eq exch/Black eq or}if
+					AGMCORE_plate_ndx 4 eq
+						{pop false}if
+					}{
+					gsave
+					false setoverprint
+					current_spot_alias false set_spot_alias
+					1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+					set_spot_alias
+					currentgray 1 ne
+					grestore
+					}ifelse
+				}def
+			/AGMCORE_filter_functiondatasource
+				{	
+				5 dict begin
+				/data_in xdf
+				data_in type/stringtype eq
+					{
+					/ncomp xdf
+					/comp xdf
+					/string_out data_in length ncomp idiv string def
+					0 ncomp data_in length 1 sub
+						{
+						string_out exch dup ncomp idiv exch data_in exch ncomp getinterval comp get 255 exch sub put
+						}for
+					string_out
+					}{
+					string/string_in xdf
+					/string_out 1 string def
+					/component xdf
+					[
+					data_in string_in/readstring cvx
+						[component/get cvx 255/exch cvx/sub cvx string_out/exch cvx 0/exch cvx/put cvx string_out]cvx
+						[/pop cvx()]cvx/ifelse cvx
+					]cvx/ReusableStreamDecode filter
+				}ifelse
+				end
+				}def
+			/AGMCORE_separateShadingFunction
+				{
+				2 dict begin
+				/paint? xdf
+				/channel xdf
+				dup type/dicttype eq
+					{
+					begin
+					FunctionType 0 eq
+						{
+						/DataSource channel Range length 2 idiv DataSource AGMCORE_filter_functiondatasource def
+						currentdict/Decode known
+							{/Decode Decode channel 2 mul 2 getinterval def}if
+						paint? not
+							{/Decode[1 1]def}if
+						}if
+					FunctionType 2 eq
+						{
+						paint?
+							{
+							/C0[C0 channel get 1 exch sub]def
+							/C1[C1 channel get 1 exch sub]def
+							}{
+							/C0[1]def
+							/C1[1]def
+							}ifelse			
+						}if
+					FunctionType 3 eq
+						{
+						/Functions[Functions{channel paint? AGMCORE_separateShadingFunction}forall]def			
+						}if
+					currentdict/Range known
+						{/Range[0 1]def}if
+					currentdict
+					end}{
+					channel get 0 paint? AGMCORE_separateShadingFunction
+					}ifelse
+				end
+				}def
+			/AGMCORE_separateShading
+				{
+				3 -1 roll begin
+				currentdict/Function known
+					{
+					currentdict/Background known
+						{[1 index{Background 3 index get 1 exch sub}{1}ifelse]/Background xdf}if
+					Function 3 1 roll AGMCORE_separateShadingFunction/Function xdf
+					/ColorSpace[/DeviceGray]def
+					}{
+					ColorSpace dup type/arraytype eq{0 get}if/DeviceCMYK eq
+						{
+						/ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+						}{
+						ColorSpace dup 1 get AGMCORE_RemoveProcessColorNames 1 exch put
+						}ifelse
+					ColorSpace 0 get/Separation eq
+						{
+							{
+								[1/exch cvx/sub cvx]cvx
+							}{
+								[/pop cvx 1]cvx
+							}ifelse
+							ColorSpace 3 3 -1 roll put
+							pop
+						}{
+							{
+								[exch ColorSpace 1 get length 1 sub exch sub/index cvx 1/exch cvx/sub cvx ColorSpace 1 get length 1 add 1/roll cvx ColorSpace 1 get length{/pop cvx}repeat]cvx
+							}{
+								pop[ColorSpace 1 get length{/pop cvx}repeat cvx 1]cvx
+							}ifelse
+							ColorSpace 3 3 -1 roll bind put
+						}ifelse
+					ColorSpace 2/DeviceGray put																		
+					}ifelse
+				end
+				}def
+			/AGMCORE_separateShadingDict
+				{
+				dup/ColorSpace get
+				dup type/arraytype ne
+					{[exch]}if
+				dup 0 get/DeviceCMYK eq
+					{
+					exch begin 
+					currentdict
+					AGMCORE_cyan_plate
+						{0 true}if
+					AGMCORE_magenta_plate
+						{1 true}if
+					AGMCORE_yellow_plate
+						{2 true}if
+					AGMCORE_black_plate
+						{3 true}if
+					AGMCORE_plate_ndx 4 eq
+						{0 false}if		
+					dup not currentoverprint and
+						{/AGMCORE_ignoreshade true def}if
+					AGMCORE_separateShading
+					currentdict
+					end exch
+					}if
+				dup 0 get/Separation eq
+					{
+					exch begin
+					ColorSpace 1 get dup/None ne exch/All ne and
+						{
+						ColorSpace 1 get AGMCORE_IsCurrentColor AGMCORE_plate_ndx 4 lt and ColorSpace 1 get AGMCORE_IsSeparationAProcessColor not and
+							{
+							ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq 
+								{
+								/ColorSpace
+									[
+									/Separation
+									ColorSpace 1 get
+									/DeviceGray
+										[
+										ColorSpace 3 get/exec cvx
+										4 AGMCORE_plate_ndx sub -1/roll cvx
+										4 1/roll cvx
+										3[/pop cvx]cvx/repeat cvx
+										1/exch cvx/sub cvx
+										]cvx									
+									]def
+								}{
+								AGMCORE_report_unsupported_color_space
+								AGMCORE_black_plate not
+									{
+									currentdict 0 false AGMCORE_separateShading
+									}if
+								}ifelse
+							}{
+							currentdict ColorSpace 1 get AGMCORE_IsCurrentColor
+							0 exch 
+							dup not currentoverprint and
+								{/AGMCORE_ignoreshade true def}if
+							AGMCORE_separateShading
+							}ifelse	
+						}if			
+					currentdict
+					end exch
+					}if
+				dup 0 get/DeviceN eq
+					{
+					exch begin
+					ColorSpace 1 get convert_to_process
+						{
+						ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq 
+							{
+							/ColorSpace
+								[
+								/DeviceN
+								ColorSpace 1 get
+								/DeviceGray
+									[
+									ColorSpace 3 get/exec cvx
+									4 AGMCORE_plate_ndx sub -1/roll cvx
+									4 1/roll cvx
+									3[/pop cvx]cvx/repeat cvx
+									1/exch cvx/sub cvx
+									]cvx									
+								]def
+							}{
+							AGMCORE_report_unsupported_color_space
+							AGMCORE_black_plate not
+								{
+								currentdict 0 false AGMCORE_separateShading
+								/ColorSpace[/DeviceGray]def
+								}if
+							}ifelse
+						}{
+						currentdict
+						false -1 ColorSpace 1 get
+							{
+							AGMCORE_IsCurrentColor
+								{
+								1 add
+								exch pop true exch exit
+								}if
+							1 add
+							}forall
+						exch 
+						dup not currentoverprint and
+							{/AGMCORE_ignoreshade true def}if
+						AGMCORE_separateShading
+						}ifelse
+					currentdict
+					end exch
+					}if
+				dup 0 get dup/DeviceCMYK eq exch dup/Separation eq exch/DeviceN eq or or not
+					{
+					exch begin
+					ColorSpace dup type/arraytype eq
+						{0 get}if
+					/DeviceGray ne
+						{
+						AGMCORE_report_unsupported_color_space
+						AGMCORE_black_plate not
+							{
+							ColorSpace 0 get/CIEBasedA eq
+								{
+								/ColorSpace[/Separation/_ciebaseda_/DeviceGray{}]def
+								}if
+							ColorSpace 0 get dup/CIEBasedABC eq exch dup/CIEBasedDEF eq exch/DeviceRGB eq or or
+								{
+								/ColorSpace[/DeviceN[/_red_/_green_/_blue_]/DeviceRGB{}]def
+								}if
+							ColorSpace 0 get/CIEBasedDEFG eq
+								{
+								/ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+								}if
+							currentdict 0 false AGMCORE_separateShading
+							}if
+						}if
+					currentdict
+					end exch
+					}if
+				pop
+				dup/AGMCORE_ignoreshade known
+					{
+					begin
+					/ColorSpace[/Separation(None)/DeviceGray{}]def
+					currentdict end
+					}if
+				}def
+			/shfill
+				{
+				AGMCORE_separateShadingDict 
+				dup/AGMCORE_ignoreshade known
+					{pop}
+					{AGMCORE_&sysshfill}ifelse
+				}def
+			/makepattern
+				{
+				exch
+				dup/PatternType get 2 eq
+					{
+					clonedict
+					begin
+					/Shading Shading AGMCORE_separateShadingDict def
+					Shading/AGMCORE_ignoreshade known
+					currentdict end exch
+					{pop<</PatternType 1/PaintProc{pop}/BBox[0 0 1 1]/XStep 1/YStep 1/PaintType 1/TilingType 3>>}if
+					exch AGMCORE_&sysmakepattern
+					}{
+					exch AGMCORE_&usrmakepattern
+					}ifelse
+				}def
+		}if
+	}if
+	AGMCORE_in_rip_sep{
+		/setcustomcolor
+		{
+			exch aload pop
+			dup 7 1 roll inRip_spot_has_ink not	{
+				4{4 index mul 4 1 roll}
+				repeat
+				/DeviceCMYK setcolorspace
+				6 -2 roll pop pop
+			}{
+				//Adobe_AGM_Core begin
+					/AGMCORE_k xdf/AGMCORE_y xdf/AGMCORE_m xdf/AGMCORE_c xdf
+				end
+				[/Separation 4 -1 roll/DeviceCMYK
+				{dup AGMCORE_c mul exch dup AGMCORE_m mul exch dup AGMCORE_y mul exch AGMCORE_k mul}
+				]
+				setcolorspace
+			}ifelse
+			setcolor
+		}ndf
+		/setseparationgray
+		{
+			[/Separation(All)/DeviceGray{}]setcolorspace_opt
+			1 exch sub setcolor
+		}ndf
+	}{
+		/setseparationgray
+		{
+			AGMCORE_&setgray
+		}ndf
+	}ifelse
+	/findcmykcustomcolor
+	{
+		5 makereadonlyarray
+	}ndf
+	/setcustomcolor
+	{
+		exch aload pop pop
+		4{4 index mul 4 1 roll}repeat
+		setcmykcolor pop
+	}ndf
+	/has_color
+		/colorimage where{
+			AGMCORE_producing_seps{
+				pop true
+			}{
+				systemdict eq
+			}ifelse
+		}{
+			false
+		}ifelse
+	def
+	/map_index
+	{
+		1 index mul exch getinterval{255 div}forall
+	}bdf
+	/map_indexed_devn
+	{
+		Lookup Names length 3 -1 roll cvi map_index
+	}bdf
+	/n_color_components
+	{
+		base_colorspace_type
+		dup/DeviceGray eq{
+			pop 1
+		}{
+			/DeviceCMYK eq{
+				4
+			}{
+				3
+			}ifelse
+		}ifelse
+	}bdf
+	level2{
+		/mo/moveto ldf
+		/li/lineto ldf
+		/cv/curveto ldf
+		/knockout_unitsq
+		{
+			1 setgray
+			0 0 1 1 rectfill
+		}def
+		level2/setcolorspace AGMCORE_key_known not and{
+			/AGMCORE_&&&setcolorspace/setcolorspace ldf
+			/AGMCORE_ReplaceMappedColor
+			{
+				dup type dup/arraytype eq exch/packedarraytype eq or
+				{
+					/AGMCORE_SpotAliasAry2 where{
+						begin
+						dup 0 get dup/Separation eq
+						{
+							pop
+							dup length array copy
+							dup dup 1 get
+							current_spot_alias
+							{
+								dup map_alias
+								{
+									false set_spot_alias
+									dup 1 exch setsepcolorspace
+									true set_spot_alias
+									begin
+									/sep_colorspace_dict currentdict AGMCORE_gput
+									pop pop	pop
+									[
+										/Separation Name 
+										CSA map_csa
+										MappedCSA 
+										/sep_colorspace_proc load
+									]
+									dup Name
+									end
+								}if
+							}if
+							map_reserved_ink_name 1 xpt
+						}{
+							/DeviceN eq 
+							{
+								dup length array copy
+								dup dup 1 get[
+									exch{
+										current_spot_alias{
+											dup map_alias{
+												/Name get exch pop
+											}if
+										}if
+										map_reserved_ink_name
+									}forall 
+								]1 xpt
+							}if
+						}ifelse
+						end
+					}if
+				}if
+			}def
+			/setcolorspace
+			{
+				dup type dup/arraytype eq exch/packedarraytype eq or
+				{
+					dup 0 get/Indexed eq
+					{
+						AGMCORE_distilling
+						{
+							/PhotoshopDuotoneList where
+							{
+								pop false
+							}{
+								true
+							}ifelse
+						}{
+							true
+						}ifelse
+						{
+							aload pop 3 -1 roll
+							AGMCORE_ReplaceMappedColor
+							3 1 roll 4 array astore
+						}if
+					}{
+						AGMCORE_ReplaceMappedColor
+					}ifelse
+				}if
+				DeviceN_PS2_inRip_seps{AGMCORE_&&&setcolorspace}if
+			}def
+		}if	
+	}{
+		/adj
+		{
+			currentstrokeadjust{
+				transform
+				0.25 sub round 0.25 add exch
+				0.25 sub round 0.25 add exch
+				itransform
+			}if
+		}def
+		/mo{
+			adj moveto
+		}def
+		/li{
+			adj lineto
+		}def
+		/cv{
+			6 2 roll adj
+			6 2 roll adj
+			6 2 roll adj curveto
+		}def
+		/knockout_unitsq
+		{
+			1 setgray
+			8 8 1[8 0 0 8 0 0]{<ffffffffffffffff>}image
+		}def
+		/currentstrokeadjust{
+			/currentstrokeadjust AGMCORE_gget
+		}def
+		/setstrokeadjust{
+			/currentstrokeadjust exch AGMCORE_gput
+		}def
+		/setcolorspace
+		{
+			/currentcolorspace exch AGMCORE_gput
+		}def
+		/currentcolorspace
+		{
+			/currentcolorspace AGMCORE_gget
+		}def
+		/setcolor_devicecolor
+		{
+			base_colorspace_type
+			dup/DeviceGray eq{
+				pop setgray
+			}{
+				/DeviceCMYK eq{
+					setcmykcolor
+				}{
+					setrgbcolor
+				}ifelse
+			}ifelse
+		}def
+		/setcolor
+		{
+			currentcolorspace 0 get
+			dup/DeviceGray ne{
+				dup/DeviceCMYK ne{
+					dup/DeviceRGB ne{
+						dup/Separation eq{
+							pop
+							currentcolorspace 3 gx
+							currentcolorspace 2 get
+						}{
+							dup/Indexed eq{
+								pop
+								currentcolorspace 3 get dup type/stringtype eq{
+									currentcolorspace 1 get n_color_components
+									3 -1 roll map_index
+								}{
+									exec
+								}ifelse
+								currentcolorspace 1 get
+							}{
+								/AGMCORE_cur_err/AGMCORE_invalid_color_space def
+								AGMCORE_invalid_color_space
+							}ifelse
+						}ifelse
+					}if
+				}if
+			}if
+			setcolor_devicecolor
+		}def
+	}ifelse
+	/sop/setoverprint ldf
+	/lw/setlinewidth ldf
+	/lc/setlinecap ldf
+	/lj/setlinejoin ldf
+	/ml/setmiterlimit ldf
+	/dsh/setdash ldf
+	/sadj/setstrokeadjust ldf
+	/gry/setgray ldf
+	/rgb/setrgbcolor ldf
+	/cmyk[
+		/currentcolorspace[/DeviceCMYK]/AGMCORE_gput cvx
+		/setcmykcolor load dup type/operatortype ne{/exec cvx}if
+	]cvx bdf
+	level3 AGMCORE_host_sep not and{
+		/nzopmsc{
+			6 dict begin
+			/kk exch def
+			/yy exch def
+			/mm exch def
+			/cc exch def
+			/sum 0 def
+			cc 0 ne{/sum sum 2#1000 or def cc}if
+			mm 0 ne{/sum sum 2#0100 or def mm}if
+			yy 0 ne{/sum sum 2#0010 or def yy}if
+			kk 0 ne{/sum sum 2#0001 or def kk}if
+			AGMCORE_CMYKDeviceNColorspaces sum get setcolorspace
+			sum 0 eq{0}if
+			end
+			setcolor
+		}bdf
+	}{
+		/nzopmsc/cmyk ldf
+	}ifelse
+	/sep/setsepcolor ldf
+	/devn/setdevicencolor ldf
+	/idx/setindexedcolor ldf
+	/colr/setcolor ldf
+	/csacrd/set_csa_crd ldf
+	/sepcs/setsepcolorspace ldf
+	/devncs/setdevicencolorspace ldf
+	/idxcs/setindexedcolorspace ldf
+	/cp/closepath ldf
+	/clp/clp_npth ldf
+	/eclp/eoclp_npth ldf
+	/f/fill ldf
+	/ef/eofill ldf
+	/@/stroke ldf
+	/nclp/npth_clp ldf
+	/gset/graphic_setup ldf
+	/gcln/graphic_cleanup ldf
+	/ct/concat ldf
+	/cf/currentfile ldf
+	/fl/filter ldf
+	/rs/readstring ldf
+	/AGMCORE_def_ht currenthalftone def
+	/clonedict Adobe_AGM_Utils begin/clonedict load end def
+	/clonearray Adobe_AGM_Utils begin/clonearray load end def
+	currentdict{
+		dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+			bind
+		}if
+		def
+	}forall
+	/getrampcolor
+	{
+		/indx exch def
+		0 1 NumComp 1 sub
+		{
+			dup
+			Samples exch get
+			dup type/stringtype eq{indx get}if
+			exch
+			Scaling exch get aload pop
+			3 1 roll
+			mul add
+		}for
+		ColorSpaceFamily/Separation eq 
+		{sep}
+		{
+			ColorSpaceFamily/DeviceN eq
+			{devn}{setcolor}ifelse
+		}ifelse
+	}bdf
+	/sssetbackground{
+		aload pop 
+		ColorSpaceFamily/Separation eq 
+		{sep}
+		{
+			ColorSpaceFamily/DeviceN eq
+			{devn}{setcolor}ifelse
+		}ifelse	
+	}bdf
+	/RadialShade
+	{
+		40 dict begin
+		/ColorSpaceFamily xdf
+		/background xdf
+		/ext1 xdf
+		/ext0 xdf
+		/BBox xdf
+		/r2 xdf
+		/c2y xdf
+		/c2x xdf
+		/r1 xdf
+		/c1y xdf
+		/c1x xdf
+		/rampdict xdf
+		/setinkoverprint where{pop/setinkoverprint{pop}def}if
+		gsave
+		BBox length 0 gt
+		{
+			np
+			BBox 0 get BBox 1 get moveto
+			BBox 2 get BBox 0 get sub 0 rlineto
+			0 BBox 3 get BBox 1 get sub rlineto
+			BBox 2 get BBox 0 get sub neg 0 rlineto
+			closepath
+			clip
+			np
+		}if
+		c1x c2x eq
+		{
+			c1y c2y lt{/theta 90 def}{/theta 270 def}ifelse
+		}{
+			/slope c2y c1y sub c2x c1x sub div def
+			/theta slope 1 atan def
+			c2x c1x lt c2y c1y ge and{/theta theta 180 sub def}if
+			c2x c1x lt c2y c1y lt and{/theta theta 180 add def}if
+		}ifelse
+		gsave
+		clippath
+		c1x c1y translate
+		theta rotate
+		-90 rotate
+		{pathbbox}stopped
+		{0 0 0 0}if
+		/yMax xdf
+		/xMax xdf
+		/yMin xdf
+		/xMin xdf
+		grestore
+		xMax xMin eq yMax yMin eq or
+		{
+			grestore
+			end
+		}{
+			/max{2 copy gt{pop}{exch pop}ifelse}bdf
+			/min{2 copy lt{pop}{exch pop}ifelse}bdf
+			rampdict begin
+			40 dict begin
+			background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+			gsave
+			c1x c1y translate
+			theta rotate
+			-90 rotate
+			/c2y c1x c2x sub dup mul c1y c2y sub dup mul add sqrt def
+			/c1y 0 def
+			/c1x 0 def
+			/c2x 0 def
+			ext0
+			{
+				0 getrampcolor
+				c2y r2 add r1 sub 0.0001 lt
+				{
+					c1x c1y r1 360 0 arcn
+					pathbbox
+					/aymax exch def
+					/axmax exch def
+					/aymin exch def
+					/axmin exch def
+					/bxMin xMin axmin min def
+					/byMin yMin aymin min def
+					/bxMax xMax axmax max def
+					/byMax yMax aymax max def
+					bxMin byMin moveto
+					bxMax byMin lineto
+					bxMax byMax lineto
+					bxMin byMax lineto
+					bxMin byMin lineto
+					eofill
+				}{
+					c2y r1 add r2 le
+					{
+						c1x c1y r1 0 360 arc
+						fill
+					}
+					{
+						c2x c2y r2 0 360 arc fill
+						r1 r2 eq
+						{
+							/p1x r1 neg def
+							/p1y c1y def
+							/p2x r1 def
+							/p2y c1y def
+							p1x p1y moveto p2x p2y lineto p2x yMin lineto p1x yMin lineto
+							fill
+						}{
+							/AA r2 r1 sub c2y div def
+							AA -1 eq
+							{/theta 89.99 def}
+							{/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+							ifelse
+							/SS1 90 theta add dup sin exch cos div def
+							/p1x r1 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+							/p1y p1x SS1 div neg def
+							/SS2 90 theta sub dup sin exch cos div def
+							/p2x r1 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+							/p2y p2x SS2 div neg def
+							r1 r2 gt
+							{
+								/L1maxX p1x yMin p1y sub SS1 div add def
+								/L2maxX p2x yMin p2y sub SS2 div add def
+							}{
+								/L1maxX 0 def
+								/L2maxX 0 def
+							}ifelse
+							p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+							L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+							fill
+						}ifelse
+					}ifelse
+				}ifelse
+			}if
+		c1x c2x sub dup mul
+		c1y c2y sub dup mul
+		add 0.5 exp
+		0 dtransform
+		dup mul exch dup mul add 0.5 exp 72 div
+		0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+		72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+		1 index 1 index lt{exch}if pop
+		/hires xdf
+		hires mul
+		/numpix xdf
+		/numsteps NumSamples def
+		/rampIndxInc 1 def
+		/subsampling false def
+		numpix 0 ne
+		{
+			NumSamples numpix div 0.5 gt
+			{
+				/numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+				/rampIndxInc NumSamples 1 sub numsteps div def
+				/subsampling true def
+			}if
+		}if
+		/xInc c2x c1x sub numsteps div def
+		/yInc c2y c1y sub numsteps div def
+		/rInc r2 r1 sub numsteps div def
+		/cx c1x def
+		/cy c1y def
+		/radius r1 def
+		np
+		xInc 0 eq yInc 0 eq rInc 0 eq and and
+		{
+			0 getrampcolor
+			cx cy radius 0 360 arc
+			stroke
+			NumSamples 1 sub getrampcolor
+			cx cy radius 72 hires div add 0 360 arc
+			0 setlinewidth
+			stroke
+		}{
+			0
+			numsteps
+			{
+				dup
+				subsampling{round cvi}if
+				getrampcolor
+				cx cy radius 0 360 arc
+				/cx cx xInc add def
+				/cy cy yInc add def
+				/radius radius rInc add def
+				cx cy radius 360 0 arcn
+				eofill
+				rampIndxInc add
+			}repeat
+			pop
+		}ifelse
+		ext1
+		{
+			c2y r2 add r1 lt
+			{
+				c2x c2y r2 0 360 arc
+				fill
+			}{
+				c2y r1 add r2 sub 0.0001 le
+				{
+					c2x c2y r2 360 0 arcn
+					pathbbox
+					/aymax exch def
+					/axmax exch def
+					/aymin exch def
+					/axmin exch def
+					/bxMin xMin axmin min def
+					/byMin yMin aymin min def
+					/bxMax xMax axmax max def
+					/byMax yMax aymax max def
+					bxMin byMin moveto
+					bxMax byMin lineto
+					bxMax byMax lineto
+					bxMin byMax lineto
+					bxMin byMin lineto
+					eofill
+				}{
+					c2x c2y r2 0 360 arc fill
+					r1 r2 eq
+					{
+						/p1x r2 neg def
+						/p1y c2y def
+						/p2x r2 def
+						/p2y c2y def
+						p1x p1y moveto p2x p2y lineto p2x yMax lineto p1x yMax lineto
+						fill
+					}{
+						/AA r2 r1 sub c2y div def
+						AA -1 eq
+						{/theta 89.99 def}
+						{/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+						ifelse
+						/SS1 90 theta add dup sin exch cos div def
+						/p1x r2 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+						/p1y c2y p1x SS1 div sub def
+						/SS2 90 theta sub dup sin exch cos div def
+						/p2x r2 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+						/p2y c2y p2x SS2 div sub def
+						r1 r2 lt
+						{
+							/L1maxX p1x yMax p1y sub SS1 div add def
+							/L2maxX p2x yMax p2y sub SS2 div add def
+						}{
+							/L1maxX 0 def
+							/L2maxX 0 def
+						}ifelse
+						p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+						L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+						fill
+					}ifelse
+				}ifelse
+			}ifelse
+		}if
+		grestore
+		grestore
+		end
+		end
+		end
+		}ifelse
+	}bdf
+	/GenStrips
+	{
+		40 dict begin
+		/ColorSpaceFamily xdf
+		/background xdf
+		/ext1 xdf
+		/ext0 xdf
+		/BBox xdf
+		/y2 xdf
+		/x2 xdf
+		/y1 xdf
+		/x1 xdf
+		/rampdict xdf
+		/setinkoverprint where{pop/setinkoverprint{pop}def}if
+		gsave
+		BBox length 0 gt
+		{
+			np
+			BBox 0 get BBox 1 get moveto
+			BBox 2 get BBox 0 get sub 0 rlineto
+			0 BBox 3 get BBox 1 get sub rlineto
+			BBox 2 get BBox 0 get sub neg 0 rlineto
+			closepath
+			clip
+			np
+		}if
+		x1 x2 eq
+		{
+			y1 y2 lt{/theta 90 def}{/theta 270 def}ifelse
+		}{
+			/slope y2 y1 sub x2 x1 sub div def
+			/theta slope 1 atan def
+			x2 x1 lt y2 y1 ge and{/theta theta 180 sub def}if
+			x2 x1 lt y2 y1 lt and{/theta theta 180 add def}if
+		}
+		ifelse
+		gsave
+		clippath
+		x1 y1 translate
+		theta rotate
+		{pathbbox}stopped
+		{0 0 0 0}if
+		/yMax exch def
+		/xMax exch def
+		/yMin exch def
+		/xMin exch def
+		grestore
+		xMax xMin eq yMax yMin eq or
+		{
+			grestore
+			end
+		}{
+			rampdict begin
+			20 dict begin
+			background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+			gsave
+			x1 y1 translate
+			theta rotate
+			/xStart 0 def
+			/xEnd x2 x1 sub dup mul y2 y1 sub dup mul add 0.5 exp def
+			/ySpan yMax yMin sub def
+			/numsteps NumSamples def
+			/rampIndxInc 1 def
+			/subsampling false def
+			xStart 0 transform
+			xEnd 0 transform
+			3 -1 roll
+			sub dup mul
+			3 1 roll
+			sub dup mul
+			add 0.5 exp 72 div
+			0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+			72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+			1 index 1 index lt{exch}if pop
+			mul
+			/numpix xdf
+			numpix 0 ne
+			{
+				NumSamples numpix div 0.5 gt
+				{
+					/numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+					/rampIndxInc NumSamples 1 sub numsteps div def
+					/subsampling true def
+				}if
+			}if
+			ext0
+			{
+				0 getrampcolor
+				xMin xStart lt
+				{
+					xMin yMin xMin neg ySpan rectfill
+				}if
+			}if
+			/xInc xEnd xStart sub numsteps div def
+			/x xStart def
+			0
+			numsteps
+			{
+				dup
+				subsampling{round cvi}if
+				getrampcolor
+				x yMin xInc ySpan rectfill
+				/x x xInc add def
+				rampIndxInc add
+			}repeat
+			pop
+			ext1{
+				xMax xEnd gt
+				{
+					xEnd yMin xMax xEnd sub ySpan rectfill
+				}if
+			}if
+			grestore
+			grestore
+			end
+			end
+			end
+		}ifelse
+	}bdf
+}def
+/pt
+{
+	end
+}def
+/dt{
+}def
+/pgsv{
+	//Adobe_AGM_Core/AGMCORE_save save put
+}def
+/pgrs{
+	//Adobe_AGM_Core/AGMCORE_save get restore
+}def
+systemdict/findcolorrendering known{
+	/findcolorrendering systemdict/findcolorrendering get def
+}if
+systemdict/setcolorrendering known{
+	/setcolorrendering systemdict/setcolorrendering get def
+}if
+/test_cmyk_color_plate
+{
+	gsave
+	setcmykcolor currentgray 1 ne
+	grestore
+}def
+/inRip_spot_has_ink
+{
+	dup//Adobe_AGM_Core/AGMCORE_name xddf
+	convert_spot_to_process not
+}def
+/map255_to_range
+{
+	1 index sub
+	3 -1 roll 255 div mul add
+}def
+/set_csa_crd
+{
+	/sep_colorspace_dict null AGMCORE_gput
+	begin
+		CSA get_csa_by_name setcolorspace_opt
+		set_crd
+	end
+}
+def
+/map_csa
+{
+	currentdict/MappedCSA known{MappedCSA null ne}{false}ifelse
+	{pop}{get_csa_by_name/MappedCSA xdf}ifelse
+}def
+/setsepcolor
+{
+	/sep_colorspace_dict AGMCORE_gget begin
+		dup/sep_tint exch AGMCORE_gput
+		TintProc
+	end
+}def
+/setdevicencolor
+{
+	/devicen_colorspace_dict AGMCORE_gget begin
+		Names length copy
+		Names length 1 sub -1 0
+		{
+			/devicen_tints AGMCORE_gget 3 1 roll xpt
+		}for
+		TintProc
+	end
+}def
+/sep_colorspace_proc
+{
+	/AGMCORE_tmp exch store
+	/sep_colorspace_dict AGMCORE_gget begin
+	currentdict/Components known{
+		Components aload pop 
+		TintMethod/Lab eq{
+			2{AGMCORE_tmp mul NComponents 1 roll}repeat
+			LMax sub AGMCORE_tmp mul LMax add NComponents 1 roll
+		}{
+			TintMethod/Subtractive eq{
+				NComponents{
+					AGMCORE_tmp mul NComponents 1 roll
+				}repeat
+			}{
+				NComponents{
+					1 sub AGMCORE_tmp mul 1 add NComponents 1 roll
+				}repeat
+			}ifelse
+		}ifelse
+	}{
+		ColorLookup AGMCORE_tmp ColorLookup length 1 sub mul round cvi get
+		aload pop
+	}ifelse
+	end
+}def
+/sep_colorspace_gray_proc
+{
+	/AGMCORE_tmp exch store
+	/sep_colorspace_dict AGMCORE_gget begin
+	GrayLookup AGMCORE_tmp GrayLookup length 1 sub mul round cvi get
+	end
+}def
+/sep_proc_name
+{
+	dup 0 get 
+	dup/DeviceRGB eq exch/DeviceCMYK eq or level2 not and has_color not and{
+		pop[/DeviceGray]
+		/sep_colorspace_gray_proc
+	}{
+		/sep_colorspace_proc
+	}ifelse
+}def
+/setsepcolorspace
+{
+	current_spot_alias{
+		dup begin
+			Name map_alias{
+				exch pop
+			}if
+		end
+	}if
+	dup/sep_colorspace_dict exch AGMCORE_gput
+	begin
+	CSA map_csa
+	/AGMCORE_sep_special Name dup()eq exch(All)eq or store
+	AGMCORE_avoid_L2_sep_space{
+		[/Indexed MappedCSA sep_proc_name 255 exch 
+			{255 div}/exec cvx 3 -1 roll[4 1 roll load/exec cvx]cvx 
+		]setcolorspace_opt
+		/TintProc{
+			255 mul round cvi setcolor
+		}bdf
+	}{
+		MappedCSA 0 get/DeviceCMYK eq 
+		currentdict/Components known and 
+		AGMCORE_sep_special not and{
+			/TintProc[
+				Components aload pop Name findcmykcustomcolor 
+				/exch cvx/setcustomcolor cvx
+			]cvx bdf
+		}{
+ 			AGMCORE_host_sep Name(All)eq and{
+ 				/TintProc{
+					1 exch sub setseparationgray 
+				}bdf
+ 			}{
+				AGMCORE_in_rip_sep MappedCSA 0 get/DeviceCMYK eq and 
+				AGMCORE_host_sep or
+				Name()eq and{
+					/TintProc[
+						MappedCSA sep_proc_name exch 0 get/DeviceCMYK eq{
+							cvx/setcmykcolor cvx
+						}{
+							cvx/setgray cvx
+						}ifelse
+					]cvx bdf
+				}{
+					AGMCORE_producing_seps MappedCSA 0 get dup/DeviceCMYK eq exch/DeviceGray eq or and AGMCORE_sep_special not and{
+	 					/TintProc[
+							/dup cvx
+							MappedCSA sep_proc_name cvx exch
+							0 get/DeviceGray eq{
+								1/exch cvx/sub cvx 0 0 0 4 -1/roll cvx
+							}if
+							/Name cvx/findcmykcustomcolor cvx/exch cvx
+							AGMCORE_host_sep{
+								AGMCORE_is_cmyk_sep
+								/Name cvx 
+								/AGMCORE_IsSeparationAProcessColor load/exec cvx
+								/not cvx/and cvx 
+							}{
+								Name inRip_spot_has_ink not
+							}ifelse
+							[
+		 						/pop cvx 1
+							]cvx/if cvx
+							/setcustomcolor cvx
+						]cvx bdf
+ 					}{
+						/TintProc{setcolor}bdf
+						[/Separation Name MappedCSA sep_proc_name load]setcolorspace_opt
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+	}ifelse
+	set_crd
+	setsepcolor
+	end
+}def
+/additive_blend
+{
+ 	3 dict begin
+ 	/numarrays xdf
+ 	/numcolors xdf
+ 	0 1 numcolors 1 sub
+ 		{
+ 		/c1 xdf
+ 		1
+ 		0 1 numarrays 1 sub
+ 			{
+			1 exch add/index cvx
+ 			c1/get cvx/mul cvx
+ 			}for
+ 		numarrays 1 add 1/roll cvx 
+ 		}for
+ 	numarrays[/pop cvx]cvx/repeat cvx
+ 	end
+}def
+/subtractive_blend
+{
+	3 dict begin
+	/numarrays xdf
+	/numcolors xdf
+	0 1 numcolors 1 sub
+		{
+		/c1 xdf
+		1 1
+		0 1 numarrays 1 sub
+			{
+			1 3 3 -1 roll add/index cvx 
+			c1/get cvx/sub cvx/mul cvx
+			}for
+		/sub cvx
+		numarrays 1 add 1/roll cvx
+		}for
+	numarrays[/pop cvx]cvx/repeat cvx
+	end
+}def
+/exec_tint_transform
+{
+	/TintProc[
+		/TintTransform cvx/setcolor cvx
+	]cvx bdf
+	MappedCSA setcolorspace_opt
+}bdf
+/devn_makecustomcolor
+{
+	2 dict begin
+	/names_index xdf
+	/Names xdf
+	1 1 1 1 Names names_index get findcmykcustomcolor
+	/devicen_tints AGMCORE_gget names_index get setcustomcolor
+	Names length{pop}repeat
+	end
+}bdf
+/setdevicencolorspace
+{
+	dup/AliasedColorants known{false}{true}ifelse 
+	current_spot_alias and{
+		7 dict begin
+		/names_index 0 def
+		dup/names_len exch/Names get length def
+		/new_names names_len array def
+		/new_LookupTables names_len array def
+		/alias_cnt 0 def
+		dup/Names get
+		{
+			dup map_alias{
+				exch pop
+				dup/ColorLookup known{
+					dup begin
+					new_LookupTables names_index ColorLookup put
+					end
+				}{
+					dup/Components known{
+						dup begin
+						new_LookupTables names_index Components put
+						end
+					}{
+						dup begin
+						new_LookupTables names_index[null null null null]put
+						end
+					}ifelse
+				}ifelse
+				new_names names_index 3 -1 roll/Name get put
+				/alias_cnt alias_cnt 1 add def 
+			}{
+				/name xdf				
+				new_names names_index name put
+				dup/LookupTables known{
+					dup begin
+					new_LookupTables names_index LookupTables names_index get put
+					end
+				}{
+					dup begin
+					new_LookupTables names_index[null null null null]put
+					end
+				}ifelse
+			}ifelse
+			/names_index names_index 1 add def 
+		}forall
+		alias_cnt 0 gt{
+			/AliasedColorants true def
+			/lut_entry_len new_LookupTables 0 get dup length 256 ge{0 get length}{length}ifelse def
+			0 1 names_len 1 sub{
+				/names_index xdf
+				new_LookupTables names_index get dup length 256 ge{0 get length}{length}ifelse lut_entry_len ne{
+					/AliasedColorants false def
+					exit
+				}{
+					new_LookupTables names_index get 0 get null eq{
+						dup/Names get names_index get/name xdf
+						name(Cyan)eq name(Magenta)eq name(Yellow)eq name(Black)eq
+						or or or not{
+							/AliasedColorants false def
+							exit
+						}if
+					}if
+				}ifelse
+			}for
+			lut_entry_len 1 eq{
+				/AliasedColorants false def
+			}if
+			AliasedColorants{
+				dup begin
+				/Names new_names def
+				/LookupTables new_LookupTables def
+				/AliasedColorants true def
+				/NComponents lut_entry_len def
+				/TintMethod NComponents 4 eq{/Subtractive}{/Additive}ifelse def
+				/MappedCSA TintMethod/Additive eq{/DeviceRGB}{/DeviceCMYK}ifelse def
+				currentdict/TTTablesIdx known not{
+					/TTTablesIdx -1 def
+				}if
+				end
+			}if
+		}if
+		end
+	}if
+	dup/devicen_colorspace_dict exch AGMCORE_gput
+	begin
+	currentdict/AliasedColorants known{
+		AliasedColorants
+	}{
+		false
+	}ifelse
+	dup not{
+		CSA map_csa
+	}if
+	/TintTransform load type/nulltype eq or{
+		/TintTransform[
+			0 1 Names length 1 sub
+				{
+				/TTTablesIdx TTTablesIdx 1 add def
+				dup LookupTables exch get dup 0 get null eq
+					{
+					1 index
+					Names exch get
+					dup(Cyan)eq
+						{
+						pop exch
+						LookupTables length exch sub
+						/index cvx
+						0 0 0
+						}
+						{
+						dup(Magenta)eq
+							{
+							pop exch
+							LookupTables length exch sub
+							/index cvx
+							0/exch cvx 0 0
+							}{
+							(Yellow)eq
+								{
+								exch
+								LookupTables length exch sub
+								/index cvx
+								0 0 3 -1/roll cvx 0
+								}{
+								exch
+								LookupTables length exch sub
+								/index cvx
+								0 0 0 4 -1/roll cvx
+								}ifelse
+							}ifelse
+						}ifelse
+					5 -1/roll cvx/astore cvx
+					}{
+					dup length 1 sub
+					LookupTables length 4 -1 roll sub 1 add
+					/index cvx/mul cvx/round cvx/cvi cvx/get cvx
+					}ifelse
+					Names length TTTablesIdx add 1 add 1/roll cvx
+				}for
+			Names length[/pop cvx]cvx/repeat cvx
+			NComponents Names length
+ 			TintMethod/Subtractive eq
+ 				{
+ 				subtractive_blend
+ 				}{
+ 				additive_blend
+ 				}ifelse
+		]cvx bdf
+	}if
+	AGMCORE_host_sep{
+		Names convert_to_process{
+			exec_tint_transform
+		}
+		{	
+			currentdict/AliasedColorants known{
+				AliasedColorants not
+			}{
+				false
+			}ifelse
+			5 dict begin
+			/AvoidAliasedColorants xdf
+			/painted? false def
+			/names_index 0 def
+			/names_len Names length def
+			AvoidAliasedColorants{
+				/currentspotalias current_spot_alias def
+				false set_spot_alias
+			}if
+			Names{
+				AGMCORE_is_cmyk_sep{
+					dup(Cyan)eq AGMCORE_cyan_plate and exch
+					dup(Magenta)eq AGMCORE_magenta_plate and exch
+					dup(Yellow)eq AGMCORE_yellow_plate and exch
+					(Black)eq AGMCORE_black_plate and or or or{
+						/devicen_colorspace_dict AGMCORE_gget/TintProc[
+							Names names_index/devn_makecustomcolor cvx
+						]cvx ddf
+						/painted? true def
+					}if
+					painted?{exit}if
+				}{
+					0 0 0 0 5 -1 roll findcmykcustomcolor 1 setcustomcolor currentgray 0 eq{
+					/devicen_colorspace_dict AGMCORE_gget/TintProc[
+						Names names_index/devn_makecustomcolor cvx
+					]cvx ddf
+					/painted? true def
+					exit
+					}if
+				}ifelse
+				/names_index names_index 1 add def
+			}forall
+			AvoidAliasedColorants{
+				currentspotalias set_spot_alias
+			}if
+			painted?{
+				/devicen_colorspace_dict AGMCORE_gget/names_index names_index put
+			}{
+				/devicen_colorspace_dict AGMCORE_gget/TintProc[
+					names_len[/pop cvx]cvx/repeat cvx 1/setseparationgray cvx
+ 					0 0 0 0/setcmykcolor cvx
+				]cvx ddf
+			}ifelse
+			end
+		}ifelse
+	}
+	{
+		AGMCORE_in_rip_sep{
+			Names convert_to_process not
+		}{
+			level3
+		}ifelse
+		{
+			[/DeviceN Names MappedCSA/TintTransform load]setcolorspace_opt
+			/TintProc level3 not AGMCORE_in_rip_sep and{
+				[
+					Names/length cvx[/pop cvx]cvx/repeat cvx
+				]cvx bdf
+			}{
+				{setcolor}bdf
+			}ifelse
+		}{
+			exec_tint_transform
+		}ifelse
+	}ifelse
+	set_crd
+	/AliasedColorants false def
+	end
+}def
+/setindexedcolorspace
+{
+	dup/indexed_colorspace_dict exch AGMCORE_gput
+	begin
+		currentdict/CSDBase known{
+			CSDBase/CSD get_res begin
+			currentdict/Names known{
+				currentdict devncs
+			}{
+				1 currentdict sepcs
+			}ifelse
+			AGMCORE_host_sep{
+				4 dict begin
+				/compCnt/Names where{pop Names length}{1}ifelse def
+				/NewLookup HiVal 1 add string def
+				0 1 HiVal{
+					/tableIndex xdf
+					Lookup dup type/stringtype eq{
+						compCnt tableIndex map_index
+					}{
+						exec
+					}ifelse
+					/Names where{
+						pop setdevicencolor
+					}{
+						setsepcolor
+					}ifelse
+					currentgray
+					tableIndex exch
+					255 mul cvi 
+					NewLookup 3 1 roll put
+				}for
+				[/Indexed currentcolorspace HiVal NewLookup]setcolorspace_opt
+				end
+			}{
+				level3
+				{
+					currentdict/Names known{
+						[/Indexed[/DeviceN Names MappedCSA/TintTransform load]HiVal Lookup]setcolorspace_opt
+					}{
+						[/Indexed[/Separation Name MappedCSA sep_proc_name load]HiVal Lookup]setcolorspace_opt
+					}ifelse
+				}{
+				[/Indexed MappedCSA HiVal
+					[
+					currentdict/Names known{
+						Lookup dup type/stringtype eq
+							{/exch cvx CSDBase/CSD get_res/Names get length dup/mul cvx exch/getinterval cvx{255 div}/forall cvx}
+							{/exec cvx}ifelse
+							/TintTransform load/exec cvx
+					}{
+						Lookup dup type/stringtype eq
+							{/exch cvx/get cvx 255/div cvx}
+							{/exec cvx}ifelse
+							CSDBase/CSD get_res/MappedCSA get sep_proc_name exch pop/load cvx/exec cvx
+					}ifelse
+					]cvx
+				]setcolorspace_opt
+				}ifelse
+			}ifelse
+			end
+			set_crd
+		}
+		{
+			CSA map_csa
+			AGMCORE_host_sep level2 not and{
+				0 0 0 0 setcmykcolor
+			}{
+				[/Indexed MappedCSA 
+				level2 not has_color not and{
+					dup 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or{
+						pop[/DeviceGray]
+					}if
+					HiVal GrayLookup
+				}{
+					HiVal 
+					currentdict/RangeArray known{
+						{
+							/indexed_colorspace_dict AGMCORE_gget begin
+							Lookup exch 
+							dup HiVal gt{
+								pop HiVal
+							}if
+							NComponents mul NComponents getinterval{}forall
+							NComponents 1 sub -1 0{
+								RangeArray exch 2 mul 2 getinterval aload pop map255_to_range
+								NComponents 1 roll
+							}for
+							end
+						}bind
+					}{
+						Lookup
+					}ifelse
+				}ifelse
+				]setcolorspace_opt
+				set_crd
+			}ifelse
+		}ifelse
+	end
+}def
+/setindexedcolor
+{
+	AGMCORE_host_sep{
+		/indexed_colorspace_dict AGMCORE_gget
+		begin
+		currentdict/CSDBase known{
+			CSDBase/CSD get_res begin
+			currentdict/Names known{
+				map_indexed_devn
+				devn
+			}
+			{
+				Lookup 1 3 -1 roll map_index
+				sep
+			}ifelse
+			end
+		}{
+			Lookup MappedCSA/DeviceCMYK eq{4}{1}ifelse 3 -1 roll
+			map_index
+			MappedCSA/DeviceCMYK eq{setcmykcolor}{setgray}ifelse
+		}ifelse
+		end
+	}{
+		level3 not AGMCORE_in_rip_sep and/indexed_colorspace_dict AGMCORE_gget/CSDBase known and{
+			/indexed_colorspace_dict AGMCORE_gget/CSDBase get/CSD get_res begin
+			map_indexed_devn
+			devn
+			end
+		}
+		{
+			setcolor
+		}ifelse
+	}ifelse
+}def
+/ignoreimagedata
+{
+	currentoverprint not{
+		gsave
+		dup clonedict begin
+		1 setgray
+		/Decode[0 1]def
+		/DataSource<FF>def
+		/MultipleDataSources false def
+		/BitsPerComponent 8 def
+		currentdict end
+		systemdict/image gx
+		grestore
+		}if
+	consumeimagedata
+}def
+/add_res
+{
+	dup/CSD eq{
+		pop 
+		//Adobe_AGM_Core begin
+		/AGMCORE_CSD_cache load 3 1 roll put
+		end
+	}{
+		defineresource pop
+	}ifelse
+}def
+/del_res
+{
+	{
+		aload pop exch
+		dup/CSD eq{
+			pop 
+			{//Adobe_AGM_Core/AGMCORE_CSD_cache get exch undef}forall
+		}{
+			exch
+			{1 index undefineresource}forall
+			pop
+		}ifelse
+	}forall
+}def
+/get_res
+{
+	dup/CSD eq{
+		pop
+		dup type dup/nametype eq exch/stringtype eq or{
+			AGMCORE_CSD_cache exch get
+		}if
+	}{
+		findresource
+	}ifelse
+}def
+/get_csa_by_name
+{
+	dup type dup/nametype eq exch/stringtype eq or{
+		/CSA get_res
+	}if
+}def
+/paintproc_buf_init
+{
+	/count get 0 0 put
+}def
+/paintproc_buf_next
+{
+	dup/count get dup 0 get
+	dup 3 1 roll
+	1 add 0 xpt
+	get				
+}def
+/cachepaintproc_compress
+{
+	5 dict begin
+	currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+	/ppdict 20 dict def
+	/string_size 16000 def
+	/readbuffer string_size string def
+	currentglobal true setglobal 
+	ppdict 1 array dup 0 1 put/count xpt
+	setglobal
+	/LZWFilter 
+	{
+		exch
+		dup length 0 eq{
+			pop
+		}{
+			ppdict dup length 1 sub 3 -1 roll put
+		}ifelse
+		{string_size}{0}ifelse string
+	}/LZWEncode filter def
+	{		
+		ReadFilter readbuffer readstring
+		exch LZWFilter exch writestring
+		not{exit}if
+	}loop
+	LZWFilter closefile
+	ppdict				
+	end
+}def
+/cachepaintproc
+{
+	2 dict begin
+	currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+	/ppdict 20 dict def
+	currentglobal true setglobal 
+	ppdict 1 array dup 0 1 put/count xpt
+	setglobal
+	{
+		ReadFilter 16000 string readstring exch
+		ppdict dup length 1 sub 3 -1 roll put
+		not{exit}if
+	}loop
+	ppdict dup dup length 1 sub()put					
+	end	
+}def
+/make_pattern
+{
+	exch clonedict exch
+	dup matrix currentmatrix matrix concatmatrix 0 0 3 2 roll itransform
+	exch 3 index/XStep get 1 index exch 2 copy div cvi mul sub sub
+	exch 3 index/YStep get 1 index exch 2 copy div cvi mul sub sub
+	matrix translate exch matrix concatmatrix
+			 1 index begin
+		BBox 0 get XStep div cvi XStep mul/xshift exch neg def
+		BBox 1 get YStep div cvi YStep mul/yshift exch neg def
+		BBox 0 get xshift add
+		BBox 1 get yshift add
+		BBox 2 get xshift add
+		BBox 3 get yshift add
+		4 array astore
+		/BBox exch def
+		[xshift yshift/translate load null/exec load]dup
+		3/PaintProc load put cvx/PaintProc exch def
+		end
+	gsave 0 setgray
+	makepattern
+	grestore
+}def
+/set_pattern
+{
+	dup/PatternType get 1 eq{
+		dup/PaintType get 1 eq{
+			currentoverprint sop[/DeviceGray]setcolorspace 0 setgray
+		}if
+	}if
+	setpattern
+}def
+/setcolorspace_opt
+{
+	dup currentcolorspace eq{pop}{setcolorspace}ifelse
+}def
+/updatecolorrendering
+{
+	currentcolorrendering/RenderingIntent known{
+		currentcolorrendering/RenderingIntent get
+	}
+	{
+		Intent/AbsoluteColorimetric eq 
+		{
+			/absolute_colorimetric_crd AGMCORE_gget dup null eq
+		}
+		{
+			Intent/RelativeColorimetric eq
+			{
+				/relative_colorimetric_crd AGMCORE_gget dup null eq
+			}
+			{
+				Intent/Saturation eq
+				{
+					/saturation_crd AGMCORE_gget dup null eq
+				}
+				{
+					/perceptual_crd AGMCORE_gget dup null eq
+				}ifelse
+			}ifelse
+		}ifelse
+		{
+			pop null	
+		}
+		{
+			/RenderingIntent known{null}{Intent}ifelse
+		}ifelse
+	}ifelse
+	Intent ne{
+		Intent/ColorRendering{findresource}stopped
+		{
+			pop pop systemdict/findcolorrendering known
+			{
+ 				Intent findcolorrendering
+ 				{
+ 					/ColorRendering findresource true exch
+ 				}
+ 				{
+ 					/ColorRendering findresource
+					product(Xerox Phaser 5400)ne
+					exch
+ 				}ifelse
+				dup Intent/AbsoluteColorimetric eq 
+				{
+					/absolute_colorimetric_crd exch AGMCORE_gput
+				}
+				{
+					Intent/RelativeColorimetric eq
+					{
+						/relative_colorimetric_crd exch AGMCORE_gput
+					}
+					{
+						Intent/Saturation eq
+						{
+							/saturation_crd exch AGMCORE_gput
+						}
+						{
+							Intent/Perceptual eq
+							{
+								/perceptual_crd exch AGMCORE_gput
+							}
+							{
+								pop
+							}ifelse
+						}ifelse
+					}ifelse
+				}ifelse
+				1 index{exch}{pop}ifelse
+			}
+			{false}ifelse
+		}
+		{true}ifelse
+		{
+			dup begin
+			currentdict/TransformPQR known{
+				currentdict/TransformPQR get aload pop
+				3{{}eq 3 1 roll}repeat or or
+			}
+			{true}ifelse
+			currentdict/MatrixPQR known{
+				currentdict/MatrixPQR get aload pop
+				1.0 eq 9 1 roll 0.0 eq 9 1 roll 0.0 eq 9 1 roll
+				0.0 eq 9 1 roll 1.0 eq 9 1 roll 0.0 eq 9 1 roll
+				0.0 eq 9 1 roll 0.0 eq 9 1 roll 1.0 eq
+				and and and and and and and and
+			}
+			{true}ifelse
+			end
+			or
+			{
+				clonedict begin
+				/TransformPQR[
+					{4 -1 roll 3 get dup 3 1 roll sub 5 -1 roll 3 get 3 -1 roll sub div
+					3 -1 roll 3 get 3 -1 roll 3 get dup 4 1 roll sub mul add}bind
+					{4 -1 roll 4 get dup 3 1 roll sub 5 -1 roll 4 get 3 -1 roll sub div
+					3 -1 roll 4 get 3 -1 roll 4 get dup 4 1 roll sub mul add}bind
+					{4 -1 roll 5 get dup 3 1 roll sub 5 -1 roll 5 get 3 -1 roll sub div
+					3 -1 roll 5 get 3 -1 roll 5 get dup 4 1 roll sub mul add}bind
+				]def
+				/MatrixPQR[0.8951 -0.7502 0.0389 0.2664 1.7135 -0.0685 -0.1614 0.0367 1.0296]def
+				/RangePQR[-0.3227950745 2.3229645538 -1.5003771057 3.5003465881 -0.1369979095 2.136967392]def
+				currentdict end
+			}if
+			setcolorrendering_opt
+		}if		
+	}if
+}def
+/set_crd
+{
+	AGMCORE_host_sep not level2 and{
+		currentdict/ColorRendering known{
+			ColorRendering/ColorRendering{findresource}stopped not{setcolorrendering_opt}if
+		}{
+			currentdict/Intent known{
+				updatecolorrendering
+			}if
+		}ifelse
+		currentcolorspace dup type/arraytype eq
+			{0 get}if
+		/DeviceRGB eq
+			{
+			currentdict/UCR known
+				{/UCR}{/AGMCORE_currentucr}ifelse
+			load setundercolorremoval
+			currentdict/BG known 
+				{/BG}{/AGMCORE_currentbg}ifelse
+			load setblackgeneration
+			}if
+	}if
+}def
+/set_ucrbg
+{
+	dup null eq{pop/AGMCORE_currentbg load}{/Procedure get_res}ifelse setblackgeneration
+	dup null eq{pop/AGMCORE_currentucr load}{/Procedure get_res}ifelse setundercolorremoval
+}def
+/setcolorrendering_opt
+{
+	dup currentcolorrendering eq{
+		pop
+	}{
+		clonedict
+		begin
+			/Intent Intent def
+			currentdict
+		end
+		setcolorrendering
+	}ifelse
+}def
+/cpaint_gcomp
+{
+	convert_to_process//Adobe_AGM_Core/AGMCORE_ConvertToProcess xddf
+	//Adobe_AGM_Core/AGMCORE_ConvertToProcess get not
+	{
+		(%end_cpaint_gcomp)flushinput
+	}if
+}def
+/cpaint_gsep
+{
+	//Adobe_AGM_Core/AGMCORE_ConvertToProcess get
+	{	
+		(%end_cpaint_gsep)flushinput
+	}if
+}def
+/cpaint_gend
+{np}def
+/T1_path
+{
+	currentfile token pop currentfile token pop mo
+	{
+		currentfile token pop dup type/stringtype eq
+			{pop exit}if 
+		0 exch rlineto 
+		currentfile token pop dup type/stringtype eq
+			{pop exit}if 
+		0 rlineto
+	}loop
+}def
+/T1_gsave
+	level3
+	{/clipsave}
+	{/gsave}ifelse
+	load def
+/T1_grestore
+	level3
+	{/cliprestore}
+	{/grestore}ifelse 
+	load def
+/set_spot_alias_ary
+{
+	dup inherit_aliases
+	//Adobe_AGM_Core/AGMCORE_SpotAliasAry xddf
+}def
+/set_spot_normalization_ary
+{
+	dup inherit_aliases
+	dup length
+	/AGMCORE_SpotAliasAry where{pop AGMCORE_SpotAliasAry length add}if
+	array
+	//Adobe_AGM_Core/AGMCORE_SpotAliasAry2 xddf
+	/AGMCORE_SpotAliasAry where{
+		pop
+		AGMCORE_SpotAliasAry2 0 AGMCORE_SpotAliasAry putinterval
+		AGMCORE_SpotAliasAry length
+	}{0}ifelse
+	AGMCORE_SpotAliasAry2 3 1 roll exch putinterval
+	true set_spot_alias
+}def
+/inherit_aliases
+{
+	{dup/Name get map_alias{/CSD put}{pop}ifelse}forall
+}def
+/set_spot_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		/AGMCORE_current_spot_alias 3 -1 roll put
+	}{
+		pop
+	}ifelse
+}def
+/current_spot_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		/AGMCORE_current_spot_alias get
+	}{
+		false
+	}ifelse
+}def
+/map_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		begin
+			/AGMCORE_name xdf
+			false	
+			AGMCORE_SpotAliasAry2{
+				dup/Name get AGMCORE_name eq{
+					/CSD get/CSD get_res
+					exch pop true
+					exit
+				}{
+					pop
+				}ifelse
+			}forall
+		end
+	}{
+		pop false
+	}ifelse
+}bdf
+/spot_alias
+{
+	true set_spot_alias
+	/AGMCORE_&setcustomcolor AGMCORE_key_known not{
+		//Adobe_AGM_Core/AGMCORE_&setcustomcolor/setcustomcolor load put
+	}if
+	/customcolor_tint 1 AGMCORE_gput
+	//Adobe_AGM_Core begin
+	/setcustomcolor
+	{
+		//Adobe_AGM_Core begin
+		dup/customcolor_tint exch AGMCORE_gput
+		1 index aload pop pop 1 eq exch 1 eq and exch 1 eq and exch 1 eq and not
+		current_spot_alias and{1 index 4 get map_alias}{false}ifelse
+		{
+			false set_spot_alias
+			/sep_colorspace_dict AGMCORE_gget null ne
+			3 1 roll 2 index{
+				exch pop/sep_tint AGMCORE_gget exch
+			}if
+			mark 3 1 roll
+			setsepcolorspace
+			counttomark 0 ne{
+				setsepcolor
+			}if
+			pop
+			not{/sep_tint 1.0 AGMCORE_gput}if
+			pop
+			true set_spot_alias
+		}{
+			AGMCORE_&setcustomcolor
+		}ifelse
+		end
+	}bdf
+	end
+}def
+/begin_feature
+{
+	Adobe_AGM_Core/AGMCORE_feature_dictCount countdictstack put
+	count Adobe_AGM_Core/AGMCORE_feature_opCount 3 -1 roll put
+	{Adobe_AGM_Core/AGMCORE_feature_ctm matrix currentmatrix put}if
+}def
+/end_feature
+{
+	2 dict begin
+	/spd/setpagedevice load def
+	/setpagedevice{get_gstate spd set_gstate}def
+	stopped{$error/newerror false put}if
+	end
+	count Adobe_AGM_Core/AGMCORE_feature_opCount get sub dup 0 gt{{pop}repeat}{pop}ifelse
+	countdictstack Adobe_AGM_Core/AGMCORE_feature_dictCount get sub dup 0 gt{{end}repeat}{pop}ifelse
+	{Adobe_AGM_Core/AGMCORE_feature_ctm get setmatrix}if
+}def
+/set_negative
+{
+	//Adobe_AGM_Core begin
+	/AGMCORE_inverting exch def
+	level2{
+		currentpagedevice/NegativePrint known AGMCORE_distilling not and{
+			currentpagedevice/NegativePrint get//Adobe_AGM_Core/AGMCORE_inverting get ne{
+				true begin_feature true{
+						<</NegativePrint//Adobe_AGM_Core/AGMCORE_inverting get>>setpagedevice
+				}end_feature
+			}if
+			/AGMCORE_inverting false def
+		}if
+	}if
+	AGMCORE_inverting{
+		[{1 exch sub}/exec load dup currenttransfer exch]cvx bind settransfer
+ 		AGMCORE_distilling{
+ 			erasepage
+ 		}{
+ 			gsave np clippath 1/setseparationgray where{pop setseparationgray}{setgray}ifelse
+ 			/AGMIRS_&fill where{pop AGMIRS_&fill}{fill}ifelse grestore
+ 		}ifelse
+	}if
+	end
+}def
+/lw_save_restore_override{
+	/md where{
+		pop
+		md begin
+		initializepage
+		/initializepage{}def
+		/pmSVsetup{}def
+		/endp{}def
+		/pse{}def
+		/psb{}def
+		/orig_showpage where
+			{pop}
+			{/orig_showpage/showpage load def}
+		ifelse
+		/showpage{orig_showpage gR}def
+		end
+	}if
+}def
+/pscript_showpage_override{
+	/NTPSOct95 where
+	{
+		begin
+		showpage
+		save
+		/showpage/restore load def
+		/restore{exch pop}def
+		end
+	}if
+}def
+/driver_media_override
+{
+	/md where{
+		pop
+		md/initializepage known{
+			md/initializepage{}put
+		}if
+		md/rC known{
+			md/rC{4{pop}repeat}put
+		}if
+	}if
+	/mysetup where{
+		/mysetup[1 0 0 1 0 0]put
+	}if
+	Adobe_AGM_Core/AGMCORE_Default_CTM matrix currentmatrix put
+	level2
+		{Adobe_AGM_Core/AGMCORE_Default_PageSize currentpagedevice/PageSize get put}if
+}def
+/driver_check_media_override
+{
+ 	/PrepsDict where
+ 		{pop}
+		{
+		Adobe_AGM_Core/AGMCORE_Default_CTM get matrix currentmatrix ne
+		Adobe_AGM_Core/AGMCORE_Default_PageSize get type/arraytype eq
+			{
+			Adobe_AGM_Core/AGMCORE_Default_PageSize get 0 get currentpagedevice/PageSize get 0 get eq and
+			Adobe_AGM_Core/AGMCORE_Default_PageSize get 1 get currentpagedevice/PageSize get 1 get eq and
+			}if
+			{
+			Adobe_AGM_Core/AGMCORE_Default_CTM get setmatrix
+			}if
+		}ifelse
+}def
+AGMCORE_err_strings begin
+	/AGMCORE_bad_environ(Environment not satisfactory for this job. Ensure that the PPD is correct or that the PostScript level requested is supported by this printer. )def
+	/AGMCORE_color_space_onhost_seps(This job contains colors that will not separate with on-host methods. )def
+	/AGMCORE_invalid_color_space(This job contains an invalid color space. )def
+end
+/set_def_ht
+{AGMCORE_def_ht sethalftone}def
+/set_def_flat
+{AGMCORE_Default_flatness setflat}def
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_CoolType_Core 2.31 0
%%Copyright: Copyright 1997-2006 Adobe Systems Incorporated. All Rights Reserved.
%%Version: 2.31 0
10 dict begin
/Adobe_CoolType_Passthru currentdict def
/Adobe_CoolType_Core_Defined userdict/Adobe_CoolType_Core known def
Adobe_CoolType_Core_Defined
	{/Adobe_CoolType_Core userdict/Adobe_CoolType_Core get def}
if
userdict/Adobe_CoolType_Core 70 dict dup begin put
/Adobe_CoolType_Version 2.31 def
/Level2?
	systemdict/languagelevel known dup
		{pop systemdict/languagelevel get 2 ge}
	if def
Level2? not
	{
	/currentglobal false def
	/setglobal/pop load def
	/gcheck{pop false}bind def
	/currentpacking false def
	/setpacking/pop load def
	/SharedFontDirectory 0 dict def
	}
if
currentpacking
true setpacking
currentglobal false setglobal
userdict/Adobe_CoolType_Data 2 copy known not
	{2 copy 10 dict put}
if
get
	 begin
	/@opStackCountByLevel 32 dict def
	/@opStackLevel 0 def
	/@dictStackCountByLevel 32 dict def
	/@dictStackLevel 0 def
	 end
setglobal
currentglobal true setglobal
userdict/Adobe_CoolType_GVMFonts known not
	{userdict/Adobe_CoolType_GVMFonts 10 dict put}
if
setglobal
currentglobal false setglobal
userdict/Adobe_CoolType_LVMFonts known not
	{userdict/Adobe_CoolType_LVMFonts 10 dict put}
if
setglobal
/ct_VMDictPut
	{
	dup gcheck{Adobe_CoolType_GVMFonts}{Adobe_CoolType_LVMFonts}ifelse
	3 1 roll put
	}bind def
/ct_VMDictUndef
	{
	dup Adobe_CoolType_GVMFonts exch known
		{Adobe_CoolType_GVMFonts exch undef}
		{
			dup Adobe_CoolType_LVMFonts exch known
			{Adobe_CoolType_LVMFonts exch undef}
			{pop}
			ifelse
		}ifelse
	}bind def
/ct_str1 1 string def
/ct_xshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			_ct_x _ct_y moveto
			0
			rmoveto
		}
		ifelse
		/_ct_i _ct_i 1 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/ct_yshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			_ct_x _ct_y moveto
			0 exch
			rmoveto
		}
		ifelse
		/_ct_i _ct_i 1 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/ct_xyshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			{_ct_na _ct_i 1 add get}stopped 
			{pop pop pop}
			{
				_ct_x _ct_y moveto
				rmoveto
			}
			ifelse
		}
		ifelse
		/_ct_i _ct_i 2 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/xsh{{@xshow}stopped{Adobe_CoolType_Data begin ct_xshow end}if}bind def
/ysh{{@yshow}stopped{Adobe_CoolType_Data begin ct_yshow end}if}bind def
/xysh{{@xyshow}stopped{Adobe_CoolType_Data begin ct_xyshow end}if}bind def
currentglobal true setglobal
/ct_T3Defs
{
/BuildChar
{
	1 index/Encoding get exch get
	1 index/BuildGlyph get exec
}bind def
/BuildGlyph
{
	exch begin
	GlyphProcs exch get exec
	end
}bind def
}bind def
setglobal
/@_SaveStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@vmState currentglobal def false setglobal
		@opStackCountByLevel
		@opStackLevel
		2 copy known not
			{
			2 copy
			3 dict dup/args
			7 index
			5 add array put
			put get
			}
			{
			get dup/args get dup length 3 index lt
				{
				dup length 5 add array exch
				1 index exch 0 exch putinterval
				1 index exch/args exch put
				}
				{pop}
			ifelse
			}
		ifelse
			begin
			count 1 sub
			1 index lt
				{pop count}
			if
			dup/argCount exch def
			dup 0 gt
				{
				args exch 0 exch getinterval 
			astore pop
				}
				{pop}
			ifelse
			count
			/restCount exch def
			end
		/@opStackLevel @opStackLevel 1 add def
		countdictstack 1 sub
		@dictStackCountByLevel exch @dictStackLevel exch put
		/@dictStackLevel @dictStackLevel 1 add def
		@vmState setglobal
		end
	}bind def
/@_RestoreStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@opStackLevel @opStackLevel 1 sub def
		@opStackCountByLevel @opStackLevel get
			begin
			count restCount sub dup 0 gt
				{{pop}repeat}
				{pop}
			ifelse
			args 0 argCount getinterval{}forall
			end
		/@dictStackLevel @dictStackLevel 1 sub def
		@dictStackCountByLevel @dictStackLevel get
		end
	countdictstack exch sub dup 0 gt
		{{end}repeat}
		{pop}
	ifelse
	}bind def
/@_PopStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@opStackLevel @opStackLevel 1 sub def
		/@dictStackLevel @dictStackLevel 1 sub def
		end
	}bind def
/@Raise
	{
	exch cvx exch errordict exch get exec
	stop
	}bind def
/@ReRaise
	{
	cvx $error/errorname get errordict exch get exec
	stop
	}bind def
/@Stopped
	{
	0 @#Stopped
	}bind def
/@#Stopped
	{
	@_SaveStackLevels
	stopped
		{@_RestoreStackLevels true}
		{@_PopStackLevels false}
	ifelse
	}bind def
/@Arg
	{
	Adobe_CoolType_Data
		begin
		@opStackCountByLevel @opStackLevel 1 sub get
		begin
		args exch
		argCount 1 sub exch sub get
		end
		end
	}bind def
currentglobal true setglobal
/CTHasResourceForAllBug
	Level2?
		{
		1 dict dup
				/@shouldNotDisappearDictValue true def
				Adobe_CoolType_Data exch/@shouldNotDisappearDict exch put
				begin
				count @_SaveStackLevels
					{(*){pop stop}128 string/Category resourceforall}
				stopped pop
				@_RestoreStackLevels
				currentdict Adobe_CoolType_Data/@shouldNotDisappearDict get dup 3 1 roll ne dup 3 1 roll
					{
						 /@shouldNotDisappearDictValue known
								{
										 {
												end
												currentdict 1 index eq
													{pop exit}
												if
										 }
									 loop
								}
						 if
					}
					{
						 pop
						 end
					}
				ifelse
		}
		{false}
	ifelse
	def
true setglobal
/CTHasResourceStatusBug
	Level2?
		{
		mark
			{/steveamerige/Category resourcestatus}
		stopped
			{cleartomark true}
			{cleartomark currentglobal not}
		ifelse
		}
		{false}
	ifelse
	def
setglobal
/CTResourceStatus
		{
		mark 3 1 roll
		/Category findresource
			begin
			({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
				{cleartomark false}
				{{3 2 roll pop true}{cleartomark false}ifelse}
			ifelse
			end
		}bind def
/CTWorkAroundBugs
	{
	Level2?
		{
		/cid_PreLoad/ProcSet resourcestatus
			{
			pop pop
			currentglobal
			mark
				{
				(*)
					{
					dup/CMap CTHasResourceStatusBug
						{CTResourceStatus}
						{resourcestatus}
					ifelse
						{
						pop dup 0 eq exch 1 eq or
							{
							dup/CMap findresource gcheck setglobal
							/CMap undefineresource
							}
							{
							pop CTHasResourceForAllBug
								{exit}
								{stop}
							ifelse
							}
						ifelse
						}
						{pop}
					ifelse
					}
				128 string/CMap resourceforall
				}
			stopped
				{cleartomark}
			stopped pop
			setglobal
			}
		if
		}
	if
	}bind def
/ds
	{
	Adobe_CoolType_Core
		begin
		CTWorkAroundBugs
		/mo/moveto load def
		/nf/newencodedfont load def
		/msf{makefont setfont}bind def
		/uf{dup undefinefont ct_VMDictUndef}bind def
		/ur/undefineresource load def
		/chp/charpath load def
		/awsh/awidthshow load def
		/wsh/widthshow load def
		/ash/ashow load def
		/@xshow/xshow load def
		/@yshow/yshow load def
		/@xyshow/xyshow load def
		/@cshow/cshow load def
		/sh/show load def
		/rp/repeat load def
		/.n/.notdef def
		end
		currentglobal false setglobal
	 userdict/Adobe_CoolType_Data 2 copy known not
		 {2 copy 10 dict put}
		if
		get
		begin
		/AddWidths? false def
		/CC 0 def
		/charcode 2 string def
		/@opStackCountByLevel 32 dict def
		/@opStackLevel 0 def
		/@dictStackCountByLevel 32 dict def
		/@dictStackLevel 0 def
		/InVMFontsByCMap 10 dict def
		/InVMDeepCopiedFonts 10 dict def
		end
		setglobal
	}bind def
/dt
	{
	currentdict Adobe_CoolType_Core eq
		{end}
	if
	}bind def
/ps
	{
	Adobe_CoolType_Core begin
	Adobe_CoolType_GVMFonts begin
	Adobe_CoolType_LVMFonts begin
	SharedFontDirectory begin
	}bind def
/pt
	{
	end
	end
	end
	end
	}bind def
/unload
	{
	systemdict/languagelevel known
		{
		systemdict/languagelevel get 2 ge
			{
			userdict/Adobe_CoolType_Core 2 copy known
				{undef}
				{pop pop}
			ifelse
			}
		if
		}
	if
	}bind def
/ndf
	{
	1 index where
		{pop pop pop}
		{dup xcheck{bind}if def}
	ifelse
	}def
/findfont systemdict
	begin
	userdict
		begin
		/globaldict where{/globaldict get begin}if
			dup where pop exch get
		/globaldict where{pop end}if
		end
	end
Adobe_CoolType_Core_Defined
	{/systemfindfont exch def}
	{
	/findfont 1 index def
	/systemfindfont exch def
	}
ifelse
/undefinefont
	{pop}ndf
/copyfont
	{
	currentglobal 3 1 roll
	1 index gcheck setglobal
	dup null eq{0}{dup length}ifelse
	2 index length add 1 add dict
		begin
		exch
			{
			1 index/FID eq
				{pop pop}
				{def}
			ifelse
			}
		forall
		dup null eq
			{pop}
			{{def}forall}
		ifelse
		currentdict
		end
	exch setglobal
	}bind def
/copyarray
	{
	currentglobal exch
	dup gcheck setglobal
	dup length array copy
	exch setglobal
	}bind def
/newencodedfont
	{
	currentglobal
		{
		SharedFontDirectory 3 index known
			{SharedFontDirectory 3 index get/FontReferenced known}
			{false}
		ifelse
		}
		{
		FontDirectory 3 index known
			{FontDirectory 3 index get/FontReferenced known}
			{
			SharedFontDirectory 3 index known
				{SharedFontDirectory 3 index get/FontReferenced known}
				{false}
			ifelse
			}
		ifelse
		}
	ifelse
	dup
		{
		3 index findfont/FontReferenced get
		2 index dup type/nametype eq
			{findfont}
		if ne
			{pop false}
		if
		}
	if
	dup
		{
		1 index dup type/nametype eq
			{findfont}
		 if
		dup/CharStrings known
			{
			/CharStrings get length
			4 index findfont/CharStrings get length
			ne
				{
				pop false
				}
			if 
			}
			{pop}
			ifelse
		}
	if
		{
		pop
		1 index findfont
		/Encoding get exch
		0 1 255
			{2 copy get 3 index 3 1 roll put}
		for
		pop pop pop
		}
		{
		currentglobal
	 4 1 roll
		dup type/nametype eq
		 {findfont}
	 if
	 dup gcheck setglobal
		dup dup maxlength 2 add dict
			begin
			exch
				{
				1 index/FID ne
				2 index/Encoding ne and
					{def}
					{pop pop}
				ifelse
				}
			forall
			/FontReferenced exch def
			/Encoding exch dup length array copy def
			/FontName 1 index dup type/stringtype eq{cvn}if def dup
			currentdict
			end
		definefont ct_VMDictPut
		setglobal
		}
	ifelse
	}bind def
/SetSubstituteStrategy
	{
	$SubstituteFont
		begin
		dup type/dicttype ne
			{0 dict}
		if
		currentdict/$Strategies known
			{
			exch $Strategies exch 
			2 copy known
				{
				get
				2 copy maxlength exch maxlength add dict
					begin
					{def}forall
					{def}forall
					currentdict
					dup/$Init known
						{dup/$Init get exec}
					if
					end
				/$Strategy exch def
				}
				{pop pop pop}
			ifelse
			}
			{pop pop}
		ifelse
		end
	}bind def
/scff
	{
	$SubstituteFont
		begin
		dup type/stringtype eq
			{dup length exch}
			{null}
		ifelse
		/$sname exch def
		/$slen exch def
		/$inVMIndex
			$sname null eq
				{
				1 index $str cvs
				dup length $slen sub $slen getinterval cvn
				}
				{$sname}
			ifelse def
		end
		{findfont}
	@Stopped
		{
		dup length 8 add string exch
		1 index 0(BadFont:)putinterval
		1 index exch 8 exch dup length string cvs putinterval cvn
			{findfont}
		@Stopped
			{pop/Courier findfont}
		if
		}
	if
	$SubstituteFont
		begin
		/$sname null def
		/$slen 0 def
		/$inVMIndex null def
		end
	}bind def
/isWidthsOnlyFont
	{
	dup/WidthsOnly known
		{pop pop true}
		{
		dup/FDepVector known
			{/FDepVector get{isWidthsOnlyFont dup{exit}if}forall}
			{
			dup/FDArray known
				{/FDArray get{isWidthsOnlyFont dup{exit}if}forall}
				{pop}
			ifelse
			}
		ifelse
		}
	ifelse
	}bind def
/ct_StyleDicts 4 dict dup begin
		 /Adobe-Japan1 4 dict dup begin
					 Level2?
								{
								/Serif
								/HeiseiMin-W3-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiMin-W3}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiMin-W3/CIDFont resourcestatus
								{pop pop/HeiseiMin-W3}
								{/Ryumin-Light}
								ifelse
							}
							{/Ryumin-Light}
							ifelse
								}
								ifelse
								def
								/SansSerif
								/HeiseiKakuGo-W5-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiKakuGo-W5}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiKakuGo-W5/CIDFont resourcestatus
								{pop pop/HeiseiKakuGo-W5}
								{/GothicBBB-Medium}
								ifelse
							}
							{/GothicBBB-Medium}
							ifelse
								}
								ifelse
								def
								/HeiseiMaruGo-W4-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiMaruGo-W4}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiMaruGo-W4/CIDFont resourcestatus
								{pop pop/HeiseiMaruGo-W4}
								{
									/Jun101-Light-RKSJ-H/Font resourcestatus
									{pop pop/Jun101-Light}
									{SansSerif}
									ifelse
								}
								ifelse
							}
							{
								/Jun101-Light-RKSJ-H/Font resourcestatus
								{pop pop/Jun101-Light}
								{SansSerif}
								ifelse
							}
							ifelse
								}
								ifelse
								/RoundSansSerif exch def
								/Default Serif def
								}
								{
								/Serif/Ryumin-Light def
								/SansSerif/GothicBBB-Medium def
								{
								(fonts/Jun101-Light-83pv-RKSJ-H)status
								}stopped
								{pop}{
										 {pop pop pop pop/Jun101-Light}
										 {SansSerif}
										 ifelse
										 /RoundSansSerif exch def
								}ifelse
								/Default Serif def
								}
					 ifelse
		 end
		 def
		 /Adobe-Korea1 4 dict dup begin
					/Serif/HYSMyeongJo-Medium def
					/SansSerif/HYGoThic-Medium def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
		 /Adobe-GB1 4 dict dup begin
					/Serif/STSong-Light def
					/SansSerif/STHeiti-Regular def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
		 /Adobe-CNS1 4 dict dup begin
					/Serif/MKai-Medium def
					/SansSerif/MHei-Medium def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
end
def
Level2?{currentglobal true setglobal}if
/ct_BoldRomanWidthProc 
	{
	stringwidth 1 index 0 ne{exch .03 add exch}if setcharwidth
	0 0
	}bind def
/ct_Type0WidthProc 
	{
	 dup stringwidth 0 0 moveto 
	 2 index true charpath pathbbox
	 0 -1 
	 7 index 2 div .88 
	 setcachedevice2
	 pop
	0 0
	}bind def
/ct_Type0WMode1WidthProc 
	{
	 dup stringwidth 
	 pop 2 div neg -0.88
	2 copy
	moveto 
	0 -1
	 5 -1 roll true charpath pathbbox
	 setcachedevice
	}bind def
/cHexEncoding
[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
/ct_BoldBaseFont 
	 11 dict begin
		/FontType 3 def
		/FontMatrix[1 0 0 1 0 0]def
		/FontBBox[0 0 1 1]def
		/Encoding cHexEncoding def 
		/_setwidthProc/ct_BoldRomanWidthProc load def
		/_bcstr1 1 string def
		/BuildChar
		{
			exch begin
				_basefont setfont
				_bcstr1 dup 0 4 -1 roll put
				dup 
				_setwidthProc
				3 copy 
				moveto				
				show
				_basefonto setfont
				moveto
				show
			end
		}bind def
		 currentdict
	 end 
def
systemdict/composefont known
{
/ct_DefineIdentity-H
{
	/Identity-H/CMap resourcestatus
	{
		pop pop
	}
	{
		/CIDInit/ProcSet findresource begin
		 12 dict begin
		 begincmap
		 /CIDSystemInfo 3 dict dup begin
			 /Registry(Adobe)def
			 /Ordering(Identity)def
			 /Supplement 0 def
		 end def
		 /CMapName/Identity-H def
		 /CMapVersion 1.000 def
		 /CMapType 1 def
		 1 begincodespacerange
		 <0000><FFFF>
		 endcodespacerange
		 1 begincidrange
		 <0000><FFFF>0
		 endcidrange
		 endcmap
		 CMapName currentdict/CMap defineresource pop
		 end
		 end
	 }
	 ifelse
}
def
/ct_BoldBaseCIDFont 
	 11 dict begin
		/CIDFontType 1 def
		/CIDFontName/ct_BoldBaseCIDFont def
		/FontMatrix[1 0 0 1 0 0]def
		/FontBBox[0 0 1 1]def
		/_setwidthProc/ct_Type0WidthProc load def
		/_bcstr2 2 string def
		/BuildGlyph
		{
			exch begin		 
				_basefont setfont
				_bcstr2 1 2 index 256 mod put
				_bcstr2 0 3 -1 roll 256 idiv put
				_bcstr2 dup _setwidthProc		 
				3 copy 
				moveto
				show
				_basefonto setfont
				moveto
				show
			end
		}bind def
		 currentdict
	 end 
def
}if
Level2?{setglobal}if
/ct_CopyFont{
	{
		1 index/FID ne 2 index/UniqueID ne and
		{def}{pop pop}ifelse
	}forall
}bind def
/ct_Type0CopyFont 
{
	exch
	dup length dict
	begin
	ct_CopyFont
	[
	exch
	FDepVector 
	{
		 dup/FontType get 0 eq
		{	
		1 index ct_Type0CopyFont 
		/_ctType0 exch definefont
		}
		{
		/_ctBaseFont exch
		2 index exec
		}
		 ifelse 
		 exch
	}
	forall 
	pop
	]				
	/FDepVector exch def
	currentdict
	end
}bind def
/ct_MakeBoldFont
{
	 dup/ct_SyntheticBold known
	{
		dup length 3 add dict begin 
		ct_CopyFont 
		/ct_StrokeWidth .03 0 FontMatrix idtransform pop def 
		/ct_SyntheticBold true def
		currentdict 
		end 
		definefont
	}
	{
		dup dup length 3 add dict
		begin
			ct_CopyFont
			/PaintType 2 def
			/StrokeWidth .03 0 FontMatrix idtransform pop def
			/dummybold currentdict
		end
		definefont
		dup/FontType get dup 9 ge exch 11 le and 
		{
			ct_BoldBaseCIDFont
			dup length 3 add dict copy begin
			dup/CIDSystemInfo get/CIDSystemInfo exch def
			ct_DefineIdentity-H
			/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
			/_basefont exch def
			/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
			/_basefonto exch def
			currentdict
			end
			/CIDFont defineresource
		}
		{
			ct_BoldBaseFont
			dup length 3 add dict copy begin
			/_basefont exch def
			/_basefonto exch def
			currentdict
			end
			definefont
		}
		ifelse
	}
	ifelse
}bind def
/ct_MakeBold{
	1 index 
	1 index
	findfont
	currentglobal 5 1 roll
	dup gcheck setglobal
		dup
		 /FontType get 0 eq
			{
				dup/WMode known{dup/WMode get 1 eq}{false}ifelse
				version length 4 ge
				and
					{version 0 4 getinterval cvi 2015 ge}
					{true}
				ifelse 
					{/ct_Type0WidthProc}
					{/ct_Type0WMode1WidthProc}
				ifelse
				ct_BoldBaseFont/_setwidthProc 3 -1 roll load put
						{ct_MakeBoldFont}ct_Type0CopyFont definefont
			}
			{
				dup/_fauxfont known not 1 index/SubstMaster known not and
				{
					 ct_BoldBaseFont/_setwidthProc /ct_BoldRomanWidthProc load put
					 ct_MakeBoldFont 
				}
				{
				2 index 2 index eq
					{exch pop	}
					{
						dup length dict begin
						ct_CopyFont
						currentdict
						end
						definefont 
					}
				ifelse
				}
			ifelse
			}
		 ifelse
		 pop pop pop
		 setglobal
}bind def
/?str1 256 string def
/?set
	{
	$SubstituteFont
		begin
		/$substituteFound false def
		/$fontname 1 index def
		/$doSmartSub false def
		end
	dup
	 findfont
	$SubstituteFont
		begin
		$substituteFound
			{false}
			{
			dup/FontName known
				{
				dup/FontName get $fontname eq
				1 index/DistillerFauxFont known not and
				/currentdistillerparams where
					{pop false 2 index isWidthsOnlyFont not and}
				if
				}
				{false}
			ifelse
			}
		ifelse
		exch pop
		/$doSmartSub true def
		end
		{
		5 1 roll pop pop pop pop
		findfont
		}
		{
		1 index
		findfont
		dup/FontType get 3 eq
		{
			6 1 roll pop pop pop pop pop false
		}
		{pop true}
		ifelse
		{
		$SubstituteFont
		begin
		pop pop
		/$styleArray 1 index def
		/$regOrdering 2 index def
		pop pop
		0 1 $styleArray length 1 sub
		{
			$styleArray exch get
			ct_StyleDicts $regOrdering
			2 copy known
			{
				get
				exch 2 copy known not
				{pop/Default}
				if
				get
				dup type/nametype eq
				{
				?str1 cvs length dup 1 add exch
				?str1 exch(-)putinterval
				exch dup length exch ?str1 exch 3 index exch putinterval
				add ?str1 exch 0 exch getinterval cvn
				}
				{
				pop pop/Unknown
				}
				ifelse
			}
			{
				pop pop pop pop/Unknown
			}
			ifelse
		}
		for
		end
		findfont 
		}if
		}
	ifelse
	currentglobal false setglobal 3 1 roll
	null copyfont definefont pop
	setglobal
	}bind def
setpacking
userdict/$SubstituteFont 25 dict put
1 dict
	begin
	/SubstituteFont
		dup $error exch 2 copy known
			{get}
			{pop pop{pop/Courier}bind}
		ifelse def
	/currentdistillerparams where dup
		{
		pop pop
		currentdistillerparams/CannotEmbedFontPolicy 2 copy known
			{get/Error eq}
			{pop pop false}
		ifelse
		}
	if not
		{
		countdictstack array dictstack 0 get
			begin
			userdict
				begin
				$SubstituteFont
					begin
					/$str 128 string def
					/$fontpat 128 string def
					/$slen 0 def
					/$sname null def
					/$match false def
					/$fontname null def
					/$substituteFound false def
					/$inVMIndex null def
					/$doSmartSub true def
					/$depth 0 def
					/$fontname null def
					/$italicangle 26.5 def
					/$dstack null def
					/$Strategies 10 dict dup
						begin
						/$Type3Underprint
							{
							currentglobal exch false setglobal
							11 dict
								begin
								/UseFont exch
									$WMode 0 ne
										{
										dup length dict copy
										dup/WMode $WMode put
										/UseFont exch definefont
										}
									if def
								/FontName $fontname dup type/stringtype eq{cvn}if def
								/FontType 3 def
								/FontMatrix[.001 0 0 .001 0 0]def
								/Encoding 256 array dup 0 1 255{/.notdef put dup}for pop def
								/FontBBox[0 0 0 0]def
								/CCInfo 7 dict dup
									begin
									/cc null def
									/x 0 def
									/y 0 def
									end def
								/BuildChar
									{
									exch
										begin
										CCInfo
											begin
											1 string dup 0 3 index put exch pop
											/cc exch def
											UseFont 1000 scalefont setfont
											cc stringwidth/y exch def/x exch def
											x y setcharwidth
											$SubstituteFont/$Strategy get/$Underprint get exec
											0 0 moveto cc show
											x y moveto
											end
										end
									}bind def
								currentdict
								end
							exch setglobal
							}bind def
						/$GetaTint
							2 dict dup
								begin
								/$BuildFont
									{
									dup/WMode known
										{dup/WMode get}
										{0}
									ifelse
									/$WMode exch def
									$fontname exch
									dup/FontName known
										{
										dup/FontName get
										dup type/stringtype eq{cvn}if
										}
										{/unnamedfont}
									ifelse
									exch
									Adobe_CoolType_Data/InVMDeepCopiedFonts get
									1 index/FontName get known
										{
										pop
										Adobe_CoolType_Data/InVMDeepCopiedFonts get
										1 index get
										null copyfont
										}
										{$deepcopyfont}
									ifelse
									exch 1 index exch/FontBasedOn exch put
									dup/FontName $fontname dup type/stringtype eq{cvn}if put
									definefont
									Adobe_CoolType_Data/InVMDeepCopiedFonts get
										begin
										dup/FontBasedOn get 1 index def
										end
									}bind def
								/$Underprint
									{
									gsave
									x abs y abs gt
										{/y 1000 def}
										{/x -1000 def 500 120 translate}
									ifelse
									Level2?
										{
										[/Separation(All)/DeviceCMYK{0 0 0 1 pop}]
										setcolorspace
										}
										{0 setgray}
									ifelse
									10 setlinewidth
									x .8 mul
									[7 3]
										{
										y mul 8 div 120 sub x 10 div exch moveto
										0 y 4 div neg rlineto
										dup 0 rlineto
										0 y 4 div rlineto
										closepath
										gsave
										Level2?
											{.2 setcolor}
											{.8 setgray}
										ifelse
										fill grestore
										stroke
										}
									forall
									pop
									grestore
									}bind def
								end def
						/$Oblique
							1 dict dup
								begin
								/$BuildFont
									{
									currentglobal exch dup gcheck setglobal
									null copyfont
										begin
										/FontBasedOn
										currentdict/FontName known
											{
											FontName
											dup type/stringtype eq{cvn}if
											}
											{/unnamedfont}
										ifelse
										def
										/FontName $fontname dup type/stringtype eq{cvn}if def
										/currentdistillerparams where
											{pop}
											{
											/FontInfo currentdict/FontInfo known
												{FontInfo null copyfont}
												{2 dict}
											ifelse
											dup
												begin
												/ItalicAngle $italicangle def
												/FontMatrix FontMatrix
												[1 0 ItalicAngle dup sin exch cos div 1 0 0]
												matrix concatmatrix readonly
												end
											4 2 roll def
											def
											}
										ifelse
										FontName currentdict
										end
									definefont
									exch setglobal
									}bind def
								end def
						/$None
							1 dict dup
								begin
								/$BuildFont{}bind def
								end def
						end def
					/$Oblique SetSubstituteStrategy
					/$findfontByEnum
						{
						dup type/stringtype eq{cvn}if
						dup/$fontname exch def
						$sname null eq
							{$str cvs dup length $slen sub $slen getinterval}
							{pop $sname}
						ifelse
						$fontpat dup 0(fonts/*)putinterval exch 7 exch putinterval
						/$match false def
						$SubstituteFont/$dstack countdictstack array dictstack put
						mark
							{
							$fontpat 0 $slen 7 add getinterval
								{/$match exch def exit}
							$str filenameforall
							}
						stopped
							{
							cleardictstack
							currentdict
							true
							$SubstituteFont/$dstack get
								{
								exch
									{
									1 index eq
										{pop false}
										{true}
									ifelse
									}
									{begin false}
								ifelse
								}
							forall
							pop
							}
						if
						cleartomark
						/$slen 0 def
						$match false ne
							{$match(fonts/)anchorsearch pop pop cvn}
							{/Courier}
						ifelse
						}bind def
					/$ROS 1 dict dup
						begin
						/Adobe 4 dict dup
							begin
							/Japan1 [/Ryumin-Light/HeiseiMin-W3
										 /GothicBBB-Medium/HeiseiKakuGo-W5
										 /HeiseiMaruGo-W4/Jun101-Light]def
							/Korea1 [/HYSMyeongJo-Medium/HYGoThic-Medium]def
							/GB1	 [/STSong-Light/STHeiti-Regular]def
							/CNS1	[/MKai-Medium/MHei-Medium]def
							end def
						end def
					/$cmapname null def
					/$deepcopyfont
						{
						dup/FontType get 0 eq
							{
							1 dict dup/FontName/copied put copyfont
								begin
								/FDepVector FDepVector copyarray
								0 1 2 index length 1 sub
									{
									2 copy get $deepcopyfont
									dup/FontName/copied put
									/copied exch definefont
									3 copy put pop pop
									}
								for
								def
								currentdict
								end
							}
							{$Strategies/$Type3Underprint get exec}
						ifelse
						}bind def
					/$buildfontname
						{
						dup/CIDFont findresource/CIDSystemInfo get
							begin
							Registry length Ordering length Supplement 8 string cvs
							3 copy length 2 add add add string
							dup 5 1 roll dup 0 Registry putinterval
							dup 4 index(-)putinterval
							dup 4 index 1 add Ordering putinterval
							4 2 roll add 1 add 2 copy(-)putinterval
							end
						1 add 2 copy 0 exch getinterval $cmapname $fontpat cvs exch
						anchorsearch
							{pop pop 3 2 roll putinterval cvn/$cmapname exch def}
							{pop pop pop pop pop}
						ifelse
						length
						$str 1 index(-)putinterval 1 add
						$str 1 index $cmapname $fontpat cvs putinterval
						$cmapname length add
						$str exch 0 exch getinterval cvn
						}bind def
					/$findfontByROS
						{
						/$fontname exch def
						$ROS Registry 2 copy known
							{
							get Ordering 2 copy known
								{get}
								{pop pop[]}
							ifelse
							}
							{pop pop[]}
						ifelse
						false exch
							{
							dup/CIDFont resourcestatus
								{
								pop pop
								save
								1 index/CIDFont findresource
								dup/WidthsOnly known
									{dup/WidthsOnly get}
									{false}
								ifelse
								exch pop
								exch restore
									{pop}
									{exch pop true exit}
								ifelse
								}
								{pop}
							ifelse
							}
						forall
							{$str cvs $buildfontname}
							{
							false(*)
								{
								save exch
								dup/CIDFont findresource
								dup/WidthsOnly known
									{dup/WidthsOnly get not}
									{true}
								ifelse
								exch/CIDSystemInfo get
								dup/Registry get Registry eq
								exch/Ordering get Ordering eq and and
									{exch restore exch pop true exit}
									{pop restore}
								ifelse
								}
							$str/CIDFont resourceforall
								{$buildfontname}
								{$fontname $findfontByEnum}
							ifelse
							}
						ifelse
						}bind def
					end
				end
				currentdict/$error known currentdict/languagelevel known and dup
					{pop $error/SubstituteFont known}
				if
				dup
					{$error}
					{Adobe_CoolType_Core}
				ifelse
				begin
					{
					/SubstituteFont
					/CMap/Category resourcestatus
						{
						pop pop
						{
						$SubstituteFont
							begin
							/$substituteFound true def
							dup length $slen gt
							$sname null ne or
							$slen 0 gt and
								{
								$sname null eq
									{dup $str cvs dup length $slen sub $slen getinterval cvn}
									{$sname}
								ifelse
								Adobe_CoolType_Data/InVMFontsByCMap get
								1 index 2 copy known
									{
									get
									false exch
										{
										pop
										currentglobal
											{
											GlobalFontDirectory 1 index known
												{exch pop true exit}
												{pop}
											ifelse
											}
											{
											FontDirectory 1 index known
												{exch pop true exit}
												{
												GlobalFontDirectory 1 index known
													{exch pop true exit}
													{pop}
												ifelse
												}
											ifelse
											}
										ifelse
										}
									forall
									}
									{pop pop false}
								ifelse
									{
									exch pop exch pop
									}
									{
									dup/CMap resourcestatus
										{
										pop pop
										dup/$cmapname exch def
										/CMap findresource/CIDSystemInfo get{def}forall
										$findfontByROS
										}
										{
										128 string cvs
										dup(-)search
											{
											3 1 roll search
												{
												3 1 roll pop
													{dup cvi}
												stopped
													{pop pop pop pop pop $findfontByEnum}
													{
													4 2 roll pop pop
													exch length
													exch
													2 index length
													2 index
													sub
													exch 1 sub -1 0
														{
														$str cvs dup length
														4 index
														0
														4 index
														4 3 roll add
														getinterval
														exch 1 index exch 3 index exch
														putinterval
														dup/CMap resourcestatus
															{
															pop pop
															4 1 roll pop pop pop
															dup/$cmapname exch def
															/CMap findresource/CIDSystemInfo get{def}forall
															$findfontByROS
															true exit
															}
															{pop}
														ifelse
														}
													for
													dup type/booleantype eq
														{pop}
														{pop pop pop $findfontByEnum}
													ifelse
													}
												ifelse
												}
												{pop pop pop $findfontByEnum}
											ifelse
											}
											{pop pop $findfontByEnum}
										ifelse
										}
									ifelse
									}
								ifelse
								}
								{//SubstituteFont exec}
							ifelse
							/$slen 0 def
							end
						}
						}
						{
						{
						$SubstituteFont
							begin
							/$substituteFound true def
							dup length $slen gt
							$sname null ne or
							$slen 0 gt and
								{$findfontByEnum}
								{//SubstituteFont exec}
							ifelse
							end
						}
						}
					ifelse
					bind readonly def
					Adobe_CoolType_Core/scfindfont/systemfindfont load put
					}
					{
					/scfindfont
						{
						$SubstituteFont
							begin
							dup systemfindfont
							dup/FontName known
								{dup/FontName get dup 3 index ne}
								{/noname true}
							ifelse
							dup
								{
								/$origfontnamefound 2 index def
								/$origfontname 4 index def/$substituteFound true def
								}
							if
							exch pop
								{
								$slen 0 gt
								$sname null ne
								3 index length $slen gt or and
									{
									pop dup $findfontByEnum findfont
									dup maxlength 1 add dict
										begin
											{1 index/FID eq{pop pop}{def}ifelse}
										forall
										currentdict
										end
									definefont
									dup/FontName known{dup/FontName get}{null}ifelse
									$origfontnamefound ne
										{
										$origfontname $str cvs print
										( substitution revised, using )print
										dup/FontName known
											{dup/FontName get}{(unspecified font)}
										ifelse
										$str cvs print(.\n)print
										}
									if
									}
									{exch pop}
								ifelse
								}
								{exch pop}
							ifelse
							end
						}bind def
					}
				ifelse
				end
			end
		Adobe_CoolType_Core_Defined not
			{
			Adobe_CoolType_Core/findfont
				{
				$SubstituteFont
					begin
					$depth 0 eq
						{
						/$fontname 1 index dup type/stringtype ne{$str cvs}if def
						/$substituteFound false def
						}
					if
					/$depth $depth 1 add def
					end
				scfindfont
				$SubstituteFont
					begin
					/$depth $depth 1 sub def
					$substituteFound $depth 0 eq and
						{
						$inVMIndex null ne
							{dup $inVMIndex $AddInVMFont}
						if
						$doSmartSub
							{
							currentdict/$Strategy known
								{$Strategy/$BuildFont get exec}
							if
							}
						if
						}
					if
					end
				}bind put
			}
		if
		}
	if
	end
/$AddInVMFont
	{
	exch/FontName 2 copy known
		{
		get
		1 dict dup begin exch 1 index gcheck def end exch
		Adobe_CoolType_Data/InVMFontsByCMap get exch
		$DictAdd
		}
		{pop pop pop}
	ifelse
	}bind def
/$DictAdd
	{
	2 copy known not
		{2 copy 4 index length dict put}
	if
	Level2? not
		{
		2 copy get dup maxlength exch length 4 index length add lt
		2 copy get dup length 4 index length add exch maxlength 1 index lt
			{
			2 mul dict
				begin
				2 copy get{forall}def
				2 copy currentdict put
				end
			}
			{pop}
		ifelse
		}
	if
	get
		begin
			{def}
		forall
		end
	}bind def
end
end
%%EndResource
currentglobal true setglobal
%%BeginResource: procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
%%Copyright: Copyright 1987-2006 Adobe Systems Incorporated.
%%Version: 1.23 0
systemdict/languagelevel known dup
	{currentglobal false setglobal}
	{false}
ifelse
exch
userdict/Adobe_CoolType_Utility 2 copy known
	{2 copy get dup maxlength 27 add dict copy}
	{27 dict}
ifelse put
Adobe_CoolType_Utility
	begin
	/@eexecStartData
		 <BAB431EA07F209EB8C4348311481D9D3F76E3D15246555577D87BC510ED54E
		 118C39697FA9F6DB58128E60EB8A12FA24D7CDD2FA94D221FA9EC8DA3E5E6A1C
		 4ACECC8C2D39C54E7C946031DD156C3A6B4A09AD29E1867A>def
	/@recognizeCIDFont null def
	/ct_Level2? exch def
	/ct_Clone? 1183615869 internaldict dup
			/CCRun known not
			exch/eCCRun known not
			ct_Level2? and or def
ct_Level2?
	{globaldict begin currentglobal true setglobal}
if
	/ct_AddStdCIDMap
		ct_Level2?
			{{
				mark
				Adobe_CoolType_Utility/@recognizeCIDFont currentdict put
					{
					((Hex)57 StartData
					 0615 1e27 2c39 1c60 d8a8 cc31 fe2b f6e0
					 7aa3 e541 e21c 60d8 a8c9 c3d0 6d9e 1c60
					 d8a8 c9c2 02d7 9a1c 60d8 a849 1c60 d8a8
					 cc36 74f4 1144 b13b 77)0()/SubFileDecode filter cvx exec
					}
				stopped
					{
					 cleartomark
					 Adobe_CoolType_Utility/@recognizeCIDFont get
					 countdictstack dup array dictstack
					 exch 1 sub -1 0
						 {
						 2 copy get 3 index eq
								{1 index length exch sub 1 sub{end}repeat exit}
								{pop}
						 ifelse
						 }
					 for
					 pop pop
					 Adobe_CoolType_Utility/@eexecStartData get eexec
					}
					{cleartomark}
				ifelse
			}}
			{{
				Adobe_CoolType_Utility/@eexecStartData get eexec
			}}
		ifelse bind def
userdict/cid_extensions known
dup{cid_extensions/cid_UpdateDB known and}if
	{
	 cid_extensions
	 begin
	/cid_GetCIDSystemInfo
		{
		 1 index type/stringtype eq
			{exch cvn exch}
		 if
		 cid_extensions
			 begin
			 dup load 2 index known
				{
				 2 copy
				 cid_GetStatusInfo
				 dup null ne
					{
					 1 index load
					 3 index get
					 dup null eq
						 {pop pop cid_UpdateDB}
						 {
						 exch
						 1 index/Created get eq
							 {exch pop exch pop}
							 {pop cid_UpdateDB}
						 ifelse
						 }
					 ifelse
					}
					{pop cid_UpdateDB}
				 ifelse
				}
				{cid_UpdateDB}
			 ifelse
			 end
		}bind def
	 end
	}
if
ct_Level2?
	{end setglobal}
if
	/ct_UseNativeCapability? systemdict/composefont known def
	/ct_MakeOCF 35 dict def
	/ct_Vars 25 dict def
	/ct_GlyphDirProcs 6 dict def
	/ct_BuildCharDict 15 dict dup
		begin
		/charcode 2 string def
		/dst_string 1500 string def
		/nullstring()def
		/usewidths? true def
		end def
	ct_Level2?{setglobal}{pop}ifelse
	ct_GlyphDirProcs
		begin
		/GetGlyphDirectory
			{
			systemdict/languagelevel known
				{pop/CIDFont findresource/GlyphDirectory get}
				{
				1 index/CIDFont findresource/GlyphDirectory
				get dup type/dicttype eq
					{
					dup dup maxlength exch length sub 2 index lt
						{
						dup length 2 index add dict copy 2 index
						/CIDFont findresource/GlyphDirectory 2 index put
						}
					if
					}
				if
				exch pop exch pop
				}
			ifelse
			+
			}def
		/+
			{
			systemdict/languagelevel known
				{
				currentglobal false setglobal
				3 dict begin
					/vm exch def
				}
				{1 dict begin}
			ifelse
			/$ exch def
			systemdict/languagelevel known
				{
				vm setglobal
				/gvm currentglobal def
				$ gcheck setglobal
				}
			if
			?{$ begin}if
			}def
		/?{$ type/dicttype eq}def
		/|{
			userdict/Adobe_CoolType_Data known
				{
			Adobe_CoolType_Data/AddWidths? known
				{
				 currentdict Adobe_CoolType_Data
					begin
					 begin
						AddWidths?
								{
								Adobe_CoolType_Data/CC 3 index put
								?{def}{$ 3 1 roll put}ifelse
								CC charcode exch 1 index 0 2 index 256 idiv put
								1 index exch 1 exch 256 mod put
								stringwidth 2 array astore
								currentfont/Widths get exch CC exch put
								}
								{?{def}{$ 3 1 roll put}ifelse}
							ifelse
					end
				end
				}
				{?{def}{$ 3 1 roll put}ifelse}	ifelse
				}
				{?{def}{$ 3 1 roll put}ifelse}
			ifelse
			}def
		/!
			{
			?{end}if
			systemdict/languagelevel known
				{gvm setglobal}
			if
			end
			}def
		/:{string currentfile exch readstring pop}executeonly def
		end
	ct_MakeOCF
		begin
		/ct_cHexEncoding
		[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
		/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
		/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
		/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
		/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
		/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
		/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
		/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
		/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
		/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
		/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
		/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
		/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
		/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
		/ct_CID_STR_SIZE 8000 def
		/ct_mkocfStr100 100 string def
		/ct_defaultFontMtx[.001 0 0 .001 0 0]def
		/ct_1000Mtx[1000 0 0 1000 0 0]def
		/ct_raise{exch cvx exch errordict exch get exec stop}bind def
		/ct_reraise
			{cvx $error/errorname get(Error: )print dup(						 )cvs print
					errordict exch get exec stop
			}bind def
		/ct_cvnsi
			{
			1 index add 1 sub 1 exch 0 4 1 roll
				{
				2 index exch get
				exch 8 bitshift
				add
				}
			for
			exch pop
			}bind def
		/ct_GetInterval
			{
			Adobe_CoolType_Utility/ct_BuildCharDict get
				begin
				/dst_index 0 def
				dup dst_string length gt
					{dup string/dst_string exch def}
				if
				1 index ct_CID_STR_SIZE idiv
				/arrayIndex exch def
				2 index arrayIndex get
				2 index
				arrayIndex ct_CID_STR_SIZE mul
				sub
					{
					dup 3 index add 2 index length le
						{
						2 index getinterval
						dst_string dst_index 2 index putinterval
						length dst_index add/dst_index exch def
						exit
						}
						{
						1 index length 1 index sub
						dup 4 1 roll
						getinterval
						dst_string dst_index 2 index putinterval
						pop dup dst_index add/dst_index exch def
						sub
						/arrayIndex arrayIndex 1 add def
						2 index dup length arrayIndex gt
							 {arrayIndex get}
							 {
							 pop
							 exit
							 }
						ifelse
						0
						}
					ifelse
					}
				loop
				pop pop pop
				dst_string 0 dst_index getinterval
				end
			}bind def
		ct_Level2?
			{
			/ct_resourcestatus
			currentglobal mark true setglobal
				{/unknowninstancename/Category resourcestatus}
			stopped
				{cleartomark setglobal true}
				{cleartomark currentglobal not exch setglobal}
			ifelse
				{
					{
					mark 3 1 roll/Category findresource
						begin
						ct_Vars/vm currentglobal put
						({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
							{cleartomark false}
							{{3 2 roll pop true}{cleartomark false}ifelse}
						ifelse
						ct_Vars/vm get setglobal
						end
					}
				}
				{{resourcestatus}}
			ifelse bind def
			/CIDFont/Category ct_resourcestatus
				{pop pop}
				{
				currentglobal true setglobal
				/Generic/Category findresource
				dup length dict copy
				dup/InstanceType/dicttype put
				/CIDFont exch/Category defineresource pop
				setglobal
				}
			ifelse
			ct_UseNativeCapability?
				{
				/CIDInit/ProcSet findresource begin
				12 dict begin
				begincmap
				/CIDSystemInfo 3 dict dup begin
				 /Registry(Adobe)def
				 /Ordering(Identity)def
				 /Supplement 0 def
				end def
				/CMapName/Identity-H def
				/CMapVersion 1.000 def
				/CMapType 1 def
				1 begincodespacerange
				<0000><FFFF>
				endcodespacerange
				1 begincidrange
				<0000><FFFF>0
				endcidrange
				endcmap
				CMapName currentdict/CMap defineresource pop
				end
				end
				}
			if
			}
			{
			/ct_Category 2 dict begin
			/CIDFont 10 dict def
			/ProcSet	2 dict def
			currentdict
			end
			def
			/defineresource
				{
				ct_Category 1 index 2 copy known
					{
					get
					dup dup maxlength exch length eq
						{
						dup length 10 add dict copy
						ct_Category 2 index 2 index put
						}
					if
					3 index 3 index put
					pop exch pop
					}
					{pop pop/defineresource/undefined ct_raise}
				ifelse
				}bind def
			/findresource
				{
				ct_Category 1 index 2 copy known
					{
					get
					2 index 2 copy known
						{get 3 1 roll pop pop}
						{pop pop/findresource/undefinedresource ct_raise}
					ifelse
					}
					{pop pop/findresource/undefined ct_raise}
				ifelse
				}bind def
			/resourcestatus
				{
				ct_Category 1 index 2 copy known
					{
					get
					2 index known
					exch pop exch pop
						{
						0 -1 true
						}
						{
						false
						}
					ifelse
					}
					{pop pop/findresource/undefined ct_raise}
				ifelse
				}bind def
			/ct_resourcestatus/resourcestatus load def
			}
		ifelse
		/ct_CIDInit 2 dict
			begin
			/ct_cidfont_stream_init
				{
					{
					dup(Binary)eq
						{
						pop
						null
						currentfile
						ct_Level2?
							{
								{cid_BYTE_COUNT()/SubFileDecode filter}
							stopped
								{pop pop pop}
							if
							}
						if
						/readstring load
						exit
						}
					if
					dup(Hex)eq
						{
						pop
						currentfile
						ct_Level2?
							{
								{null exch/ASCIIHexDecode filter/readstring}
							stopped
								{pop exch pop(>)exch/readhexstring}
							if
							}
							{(>)exch/readhexstring}
						ifelse
						load
						exit
						}
					if
					/StartData/typecheck ct_raise
					}
				loop
				cid_BYTE_COUNT ct_CID_STR_SIZE le
					{
					2 copy cid_BYTE_COUNT string exch exec
					pop
					1 array dup
					3 -1 roll
					0 exch put
					}
					{
					cid_BYTE_COUNT ct_CID_STR_SIZE div ceiling cvi
					dup array exch 2 sub 0 exch 1 exch
						{
						2 copy
						5 index
						ct_CID_STR_SIZE
						string
						6 index exec
						pop
						put
						pop
						}
					for
					2 index
					cid_BYTE_COUNT ct_CID_STR_SIZE mod string
					3 index exec
					pop
					1 index exch
					1 index length 1 sub
					exch put
					}
				ifelse
				cid_CIDFONT exch/GlyphData exch put
				2 index null eq
					{
					pop pop pop
					}
					{
					pop/readstring load
					1 string exch
						{
						3 copy exec
						pop
						dup length 0 eq
							{
							pop pop pop pop pop
							true exit
							}
						if
						4 index
						eq
							{
							pop pop pop pop
							false exit
							}
						if
						}
					loop
					pop
					}
				ifelse
				}bind def
			/StartData
				{
				mark
					{
					currentdict
					dup/FDArray get 0 get/FontMatrix get
					0 get 0.001 eq
						{
						dup/CDevProc known not
							{
							/CDevProc 1183615869 internaldict/stdCDevProc 2 copy known
								{get}
								{
								pop pop
								{pop pop pop pop pop 0 -1000 7 index 2 div 880}
								}
							ifelse
							def
							}
						if
						}
						{
						/CDevProc
							{
							 pop pop pop pop pop
							 0
							 1 cid_temp/cid_CIDFONT get
							/FDArray get 0 get
							/FontMatrix get 0 get div
							 7 index 2 div
							 1 index 0.88 mul
							}def
						}
					ifelse
					/cid_temp 15 dict def
					cid_temp
						begin
						/cid_CIDFONT exch def
						3 copy pop
						dup/cid_BYTE_COUNT exch def 0 gt
							{
							ct_cidfont_stream_init
							FDArray
								{
								/Private get
								dup/SubrMapOffset known
									{
									begin
									/Subrs SubrCount array def
									Subrs
									SubrMapOffset
									SubrCount
									SDBytes
									ct_Level2?
										{
										currentdict dup/SubrMapOffset undef
										dup/SubrCount undef
										/SDBytes undef
										}
									if
									end
									/cid_SD_BYTES exch def
									/cid_SUBR_COUNT exch def
									/cid_SUBR_MAP_OFFSET exch def
									/cid_SUBRS exch def
									cid_SUBR_COUNT 0 gt
										{
										GlyphData cid_SUBR_MAP_OFFSET cid_SD_BYTES ct_GetInterval
										0 cid_SD_BYTES ct_cvnsi
										0 1 cid_SUBR_COUNT 1 sub
											{
											exch 1 index
											1 add
											cid_SD_BYTES mul cid_SUBR_MAP_OFFSET add
											GlyphData exch cid_SD_BYTES ct_GetInterval
											0 cid_SD_BYTES ct_cvnsi
											cid_SUBRS 4 2 roll
											GlyphData exch
											4 index
											1 index
											sub
											ct_GetInterval
											dup length string copy put
											}
										for
										pop
										}
									if
									}
									{pop}
								ifelse
								}
							forall
							}
						if
						cleartomark pop pop
						end
					CIDFontName currentdict/CIDFont defineresource pop
					end end
					}
				stopped
					{cleartomark/StartData ct_reraise}
				if
				}bind def
			currentdict
			end def
		/ct_saveCIDInit
			{
			/CIDInit/ProcSet ct_resourcestatus
				{true}
				{/CIDInitC/ProcSet ct_resourcestatus}
			ifelse
				{
				pop pop
				/CIDInit/ProcSet findresource
				ct_UseNativeCapability?
					{pop null}
					{/CIDInit ct_CIDInit/ProcSet defineresource pop}
				ifelse
				}
				{/CIDInit ct_CIDInit/ProcSet defineresource pop null}
			ifelse
			ct_Vars exch/ct_oldCIDInit exch put
			}bind def
		/ct_restoreCIDInit
			{
			ct_Vars/ct_oldCIDInit get dup null ne
				{/CIDInit exch/ProcSet defineresource pop}
				{pop}
			ifelse
			}bind def
		/ct_BuildCharSetUp
			{
			1 index
				begin
				CIDFont
					begin
					Adobe_CoolType_Utility/ct_BuildCharDict get
						begin
						/ct_dfCharCode exch def
						/ct_dfDict exch def
						CIDFirstByte ct_dfCharCode add
						dup CIDCount ge
							{pop 0}
						if
						/cid exch def
							{
							GlyphDirectory cid 2 copy known
								{get}
								{pop pop nullstring}
							ifelse
							dup length FDBytes sub 0 gt
								{
								dup
								FDBytes 0 ne
									{0 FDBytes ct_cvnsi}
									{pop 0}
								ifelse
								/fdIndex exch def
								dup length FDBytes sub FDBytes exch getinterval
								/charstring exch def
								exit
								}
								{
								pop
								cid 0 eq
									{/charstring nullstring def exit}
								if
								/cid 0 def
								}
							ifelse
							}
						loop
			}def
		/ct_SetCacheDevice
			{
			0 0 moveto
			dup stringwidth
			3 -1 roll
			true charpath
			pathbbox
			0 -1000
			7 index 2 div 880
			setcachedevice2
			0 0 moveto
			}def
		/ct_CloneSetCacheProc
			{
			1 eq
				{
				stringwidth
				pop -2 div -880
				0 -1000 setcharwidth
				moveto
				}
				{
				usewidths?
					{
					currentfont/Widths get cid
					2 copy known
						{get exch pop aload pop}
						{pop pop stringwidth}
					ifelse
					}
					{stringwidth}
				ifelse
				setcharwidth
				0 0 moveto
				}
			ifelse
			}def
		/ct_Type3ShowCharString
			{
			ct_FDDict fdIndex 2 copy known
				{get}
				{
				currentglobal 3 1 roll
				1 index gcheck setglobal
				ct_Type1FontTemplate dup maxlength dict copy
					begin
					FDArray fdIndex get
					dup/FontMatrix 2 copy known
						{get}
						{pop pop ct_defaultFontMtx}
					ifelse
					/FontMatrix exch dup length array copy def
					/Private get
					/Private exch def
					/Widths rootfont/Widths get def
					/CharStrings 1 dict dup/.notdef
						<d841272cf18f54fc13>dup length string copy put def
					currentdict
					end
				/ct_Type1Font exch definefont
				dup 5 1 roll put
				setglobal
				}
			ifelse
			dup/CharStrings get 1 index/Encoding get
			ct_dfCharCode get charstring put
			rootfont/WMode 2 copy known
				{get}
				{pop pop 0}
			ifelse
			exch
			1000 scalefont setfont
			ct_str1 0 ct_dfCharCode put
			ct_str1 exch ct_dfSetCacheProc
			ct_SyntheticBold
				{
				currentpoint
				ct_str1 show
				newpath
				moveto
				ct_str1 true charpath
				ct_StrokeWidth setlinewidth
				stroke
				}
				{ct_str1 show}
			ifelse
			}def
		/ct_Type4ShowCharString
			{
			ct_dfDict ct_dfCharCode charstring
			FDArray fdIndex get
			dup/FontMatrix get dup ct_defaultFontMtx ct_matrixeq not
				{ct_1000Mtx matrix concatmatrix concat}
				{pop}
			ifelse
			/Private get
			Adobe_CoolType_Utility/ct_Level2? get not
				{
				ct_dfDict/Private
				3 -1 roll
					{put}
				1183615869 internaldict/superexec get exec
				}
			if
			1183615869 internaldict
			Adobe_CoolType_Utility/ct_Level2? get
				{1 index}
				{3 index/Private get mark 6 1 roll}
			ifelse
			dup/RunInt known
				{/RunInt get}
				{pop/CCRun}
			ifelse
			get exec
			Adobe_CoolType_Utility/ct_Level2? get not
				{cleartomark}
			if
			}bind def
		/ct_BuildCharIncremental
			{
				{
				Adobe_CoolType_Utility/ct_MakeOCF get begin
				ct_BuildCharSetUp
				ct_ShowCharString
				}
			stopped
				{stop}
			if
			end
			end
			end
			end
			}bind def
		/BaseFontNameStr(BF00)def
		/ct_Type1FontTemplate 14 dict
			begin
			/FontType 1 def
			/FontMatrix [0.001 0 0 0.001 0 0]def
			/FontBBox [-250 -250 1250 1250]def
			/Encoding ct_cHexEncoding def
			/PaintType 0 def
			currentdict
			end def
		/BaseFontTemplate 11 dict
			begin
			/FontMatrix [0.001 0 0 0.001 0 0]def
			/FontBBox [-250 -250 1250 1250]def
			/Encoding ct_cHexEncoding def
			/BuildChar/ct_BuildCharIncremental load def
			ct_Clone?
				{
				/FontType 3 def
				/ct_ShowCharString/ct_Type3ShowCharString load def
				/ct_dfSetCacheProc/ct_CloneSetCacheProc load def
				/ct_SyntheticBold false def
				/ct_StrokeWidth 1 def
				}
				{
				/FontType 4 def
				/Private 1 dict dup/lenIV 4 put def
				/CharStrings 1 dict dup/.notdef<d841272cf18f54fc13>put def
				/PaintType 0 def
				/ct_ShowCharString/ct_Type4ShowCharString load def
				}
			ifelse
			/ct_str1 1 string def
			currentdict
			end def
		/BaseFontDictSize BaseFontTemplate length 5 add def
		/ct_matrixeq
			{
			true 0 1 5
				{
				dup 4 index exch get exch 3 index exch get eq and
				dup not
					{exit}
				if
				}
			for
			exch pop exch pop
			}bind def
		/ct_makeocf
			{
			15 dict
				begin
				exch/WMode exch def
				exch/FontName exch def
				/FontType 0 def
				/FMapType 2 def
			dup/FontMatrix known
				{dup/FontMatrix get/FontMatrix exch def}
				{/FontMatrix matrix def}
			ifelse
				/bfCount 1 index/CIDCount get 256 idiv 1 add
					dup 256 gt{pop 256}if def
				/Encoding
					256 array 0 1 bfCount 1 sub{2 copy dup put pop}for
					bfCount 1 255{2 copy bfCount put pop}for
					def
				/FDepVector bfCount dup 256 lt{1 add}if array def
				BaseFontTemplate BaseFontDictSize dict copy
					begin
					/CIDFont exch def
					CIDFont/FontBBox known
						{CIDFont/FontBBox get/FontBBox exch def}
					if
					CIDFont/CDevProc known
						{CIDFont/CDevProc get/CDevProc exch def}
					if
					currentdict
					end
				BaseFontNameStr 3(0)putinterval
				0 1 bfCount dup 256 eq{1 sub}if
					{
					FDepVector exch
					2 index BaseFontDictSize dict copy
						begin
						dup/CIDFirstByte exch 256 mul def
						FontType 3 eq
							{/ct_FDDict 2 dict def}
						if
						currentdict
						end
					1 index 16
					BaseFontNameStr 2 2 getinterval cvrs pop
					BaseFontNameStr exch definefont
					put
					}
				for
				ct_Clone?
					{/Widths 1 index/CIDFont get/GlyphDirectory get length dict def}
				if
				FontName
				currentdict
				end
			definefont
			ct_Clone?
				{
				gsave
				dup 1000 scalefont setfont
				ct_BuildCharDict
					begin
					/usewidths? false def
					currentfont/Widths get
						begin
						exch/CIDFont get/GlyphDirectory get
							{
							pop
							dup charcode exch 1 index 0 2 index 256 idiv put
							1 index exch 1 exch 256 mod put
							stringwidth 2 array astore def
							}
						forall
						end
					/usewidths? true def
					end
				grestore
				}
				{exch pop}
			ifelse
			}bind def
		currentglobal true setglobal
		/ct_ComposeFont
			{
			ct_UseNativeCapability?
				{				
				2 index/CMap ct_resourcestatus
					{pop pop exch pop}
					{
					/CIDInit/ProcSet findresource
						begin
						12 dict
							begin
							begincmap
							/CMapName 3 index def
							/CMapVersion 1.000 def
							/CMapType 1 def
							exch/WMode exch def
							/CIDSystemInfo 3 dict dup
								begin
								/Registry(Adobe)def
								/Ordering
								CMapName ct_mkocfStr100 cvs
								(Adobe-)search
									{
									pop pop
									(-)search
										{
										dup length string copy
										exch pop exch pop
										}
										{pop(Identity)}
									ifelse
									}
									{pop (Identity)}
								ifelse
								def
								/Supplement 0 def
								end def
							1 begincodespacerange
							<0000><FFFF>
							endcodespacerange
							1 begincidrange
							<0000><FFFF>0
							endcidrange
							endcmap
							CMapName currentdict/CMap defineresource pop
							end
						end
					}
				ifelse
				composefont
				}
				{
				3 2 roll pop
				0 get/CIDFont findresource
				ct_makeocf
				}
			ifelse
			}bind def
			setglobal
		/ct_MakeIdentity
			{
			ct_UseNativeCapability?
				{
				1 index/CMap ct_resourcestatus
					{pop pop}
					{
					/CIDInit/ProcSet findresource begin
					12 dict begin
					begincmap
					/CMapName 2 index def
					/CMapVersion 1.000 def
					/CMapType 1 def
					/CIDSystemInfo 3 dict dup
						begin
						/Registry(Adobe)def
						/Ordering
						CMapName ct_mkocfStr100 cvs
						(Adobe-)search
							{
							pop pop
							(-)search
								{dup length string copy exch pop exch pop}
								{pop(Identity)}
							ifelse
							}
							{pop(Identity)}
						ifelse
						def
						/Supplement 0 def
						end def
					1 begincodespacerange
					<0000><FFFF>
					endcodespacerange
					1 begincidrange
					<0000><FFFF>0
					endcidrange
					endcmap
					CMapName currentdict/CMap defineresource pop
					end
					end
					}
				ifelse
				composefont
				}
				{
				exch pop
				0 get/CIDFont findresource
				ct_makeocf
				}
			ifelse
			}bind def
		currentdict readonly pop
		end
	end
%%EndResource
setglobal
%%BeginResource: procset Adobe_CoolType_Utility_T42 1.0 0
%%Copyright: Copyright 1987-2004 Adobe Systems Incorporated.
%%Version: 1.0 0
userdict/ct_T42Dict 15 dict put
ct_T42Dict begin
/Is2015?
{
 version
 cvi
 2015
 ge
}bind def
/AllocGlyphStorage
{
 Is2015?
 {	
	pop
 }
 {
	{string}forall
 }ifelse
}bind def
/Type42DictBegin
{
25 dict begin
 /FontName exch def
 /CharStrings 256 dict 
begin
	 /.notdef 0 def
	 currentdict 
end def
 /Encoding exch def
 /PaintType 0 def
 /FontType 42 def
 /FontMatrix[1 0 0 1 0 0]def
 4 array astore cvx/FontBBox exch def
 /sfnts
}bind def
/Type42DictEnd 
{
 currentdict dup/FontName get exch definefont end
ct_T42Dict exch
dup/FontName get exch put
}bind def
/RD{string currentfile exch readstring pop}executeonly def
/PrepFor2015
{
Is2015?
{		 
	/GlyphDirectory 
	 16
	 dict def
	 sfnts 0 get
	 dup
	 2 index
	(glyx)
	 putinterval
	 2 index 
	(locx)
	 putinterval
	 pop
	 pop
}
{
	 pop
	 pop
}ifelse			
}bind def
/AddT42Char
{
Is2015?
{
	/GlyphDirectory get 
	begin
	def
	end
	pop
	pop
}
{
	/sfnts get
	4 index
	get
	3 index
 2 index
	putinterval
	pop
	pop
	pop
	pop
}ifelse
}bind def
/T0AddT42Mtx2
{
/CIDFont findresource/Metrics2 get begin def end
}bind def
end
%%EndResource
currentglobal true setglobal
%%BeginFile: MMFauxFont.prc
%%Copyright: Copyright 1987-2001 Adobe Systems Incorporated. 
%%All Rights Reserved.
userdict /ct_EuroDict 10 dict put
ct_EuroDict begin
/ct_CopyFont 
{
    { 1 index /FID ne {def} {pop pop} ifelse} forall
} def
/ct_GetGlyphOutline
{
   gsave
   initmatrix newpath
   exch findfont dup 
   length 1 add dict 
   begin 
		ct_CopyFont 
		/Encoding Encoding dup length array copy 
		dup
		4 -1 roll
		0 exch put   
		def
		currentdict
   end
   /ct_EuroFont exch definefont
   1000 scalefont setfont
   0 0 moveto
   [
       <00> stringwidth 
       <00> false charpath
       pathbbox
       [
       {/m cvx} {/l cvx} {/c cvx} {/cp cvx} pathforall
   grestore
   counttomark 8 add
}
def
/ct_MakeGlyphProc
{
   ] cvx
   /ct_PSBuildGlyph cvx
   ] cvx
} def
/ct_PSBuildGlyph 
{ 
 	gsave 
	8 -1 roll pop 
	7 1 roll 
        6 -2 roll ct_FontMatrix transform 6 2 roll
        4 -2 roll ct_FontMatrix transform 4 2 roll
        ct_FontMatrix transform 
	currentdict /PaintType 2 copy known {get 2 eq}{pop pop false} ifelse  
	dup  9 1 roll 
	{  
		currentdict /StrokeWidth 2 copy known  
		{   
			get 2 div   
			0 ct_FontMatrix dtransform pop
			5 1 roll  
			4 -1 roll 4 index sub   
			4 1 roll   
			3 -1 roll 4 index sub  
			3 1 roll   
			exch 4 index add exch  
			4 index add  
			5 -1 roll pop  
		}  
		{	 
			pop pop 
		}  
		ifelse  
	}       
    if  
	setcachedevice  
        ct_FontMatrix concat
        ct_PSPathOps begin 
		exec 
	end 
	{  
		currentdict /StrokeWidth 2 copy known  
			{ get }  
			{ pop pop 0 }  
  	    ifelse  
		setlinewidth stroke  
	}  
	{   
	    fill  
	}  
	ifelse  
    grestore
} def 
/ct_PSPathOps 4 dict dup begin 
	/m {moveto} def 
	/l {lineto} def 
	/c {curveto} def 
	/cp {closepath} def 
end 
def 
/ct_matrix1000 [1000 0 0 1000 0 0] def
/ct_AddGlyphProc  
{
   2 index findfont dup length 4 add dict 
   begin 
	ct_CopyFont 
	/CharStrings CharStrings dup length 1 add dict copy
      begin
         3 1 roll def  
         currentdict 
      end 
      def
      /ct_FontMatrix ct_matrix1000 FontMatrix matrix concatmatrix def
      /ct_PSBuildGlyph /ct_PSBuildGlyph load def
      /ct_PSPathOps /ct_PSPathOps load def
      currentdict
   end
   definefont pop
}
def
systemdict /languagelevel known
{
	/ct_AddGlyphToPrinterFont {
		2 copy
		ct_GetGlyphOutline 3 add -1 roll restore 
		ct_MakeGlyphProc 
		ct_AddGlyphProc
	} def
}
{
	/ct_AddGlyphToPrinterFont {
	    pop pop restore
		Adobe_CTFauxDict /$$$FONTNAME get
		/Euro
		Adobe_CTFauxDict /$$$SUBSTITUTEBASE get
		ct_EuroDict exch get
		ct_AddGlyphProc
	} def
} ifelse
/AdobeSansMM 
{ 
556 0 24 -19 541 703 
	{ 
	541 628 m 
	510 669 442 703 354 703 c 
	201 703 117 607 101 444 c 
	50 444 l 
	25 372 l 
	97 372 l 
	97 301 l 
	49 301 l 
	24 229 l 
	103 229 l 
	124 67 209 -19 350 -19 c 
	435 -19 501 25 509 32 c 
	509 131 l 
	492 105 417 60 343 60 c 
	267 60 204 127 197 229 c 
	406 229 l 
	430 301 l 
	191 301 l 
	191 372 l 
	455 372 l 
	479 444 l 
	194 444 l 
	201 531 245 624 348 624 c 
	433 624 484 583 509 534 c 
	cp 
	556 0 m 
	}
ct_PSBuildGlyph
} def
/AdobeSerifMM 
{ 
500 0 10 -12 484 692 
	{ 
	347 298 m 
	171 298 l 
	170 310 170 322 170 335 c 
	170 362 l 
	362 362 l 
	374 403 l 
	172 403 l 
	184 580 244 642 308 642 c 
	380 642 434 574 457 457 c 
	481 462 l 
	474 691 l 
	449 691 l 
	433 670 429 657 410 657 c 
	394 657 360 692 299 692 c 
	204 692 94 604 73 403 c 
	22 403 l 
	10 362 l 
	70 362 l 
	69 352 69 341 69 330 c 
	69 319 69 308 70 298 c 
	22 298 l 
	10 257 l 
	73 257 l 
	97 57 216 -12 295 -12 c 
	364 -12 427 25 484 123 c 
	458 142 l 
	425 101 384 37 316 37 c 
	256 37 189 84 173 257 c 
	335 257 l 
	cp 
	500 0 m 
	} 
ct_PSBuildGlyph 
} def 
end		
%%EndFile
setglobal
Adobe_CoolType_Core begin /$Oblique SetSubstituteStrategy end
%%BeginResource: procset Adobe_AGM_Image 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+	currentpacking
+	true setpacking
+}if
+userdict/Adobe_AGM_Image 71 dict dup begin put
+/Adobe_AGM_Image_Id/Adobe_AGM_Image_1.0_0 def
+/nd{
+	null def
+}bind def
+/AGMIMG_&image nd
+/AGMIMG_&colorimage nd
+/AGMIMG_&imagemask nd
+/AGMIMG_mbuf()def
+/AGMIMG_ybuf()def
+/AGMIMG_kbuf()def
+/AGMIMG_c 0 def
+/AGMIMG_m 0 def
+/AGMIMG_y 0 def
+/AGMIMG_k 0 def
+/AGMIMG_tmp nd
+/AGMIMG_imagestring0 nd
+/AGMIMG_imagestring1 nd
+/AGMIMG_imagestring2 nd
+/AGMIMG_imagestring3 nd
+/AGMIMG_imagestring4 nd
+/AGMIMG_imagestring5 nd
+/AGMIMG_cnt nd
+/AGMIMG_fsave nd
+/AGMIMG_colorAry nd
+/AGMIMG_override nd
+/AGMIMG_name nd
+/AGMIMG_maskSource nd
+/AGMIMG_flushfilters nd
+/invert_image_samples nd
+/knockout_image_samples	nd
+/img nd
+/sepimg nd
+/devnimg nd
+/idximg nd
+/ds
+{
+	Adobe_AGM_Core begin
+	Adobe_AGM_Image begin
+	/AGMIMG_&image systemdict/image get def
+	/AGMIMG_&imagemask systemdict/imagemask get def
+	/colorimage where{
+		pop
+		/AGMIMG_&colorimage/colorimage ldf
+	}if
+	end
+	end
+}def
+/ps
+{
+	Adobe_AGM_Image begin
+	/AGMIMG_ccimage_exists{/customcolorimage where 
+		{
+			pop
+			/Adobe_AGM_OnHost_Seps where
+			{
+			pop false
+			}{
+			/Adobe_AGM_InRip_Seps where
+				{
+				pop false
+				}{
+					true
+				}ifelse
+			}ifelse
+			}{
+			false
+		}ifelse 
+	}bdf
+	level2{
+		/invert_image_samples
+		{
+			Adobe_AGM_Image/AGMIMG_tmp Decode length ddf
+			/Decode[Decode 1 get Decode 0 get]def
+		}def
+		/knockout_image_samples
+		{
+			Operator/imagemask ne{
+				/Decode[1 1]def
+			}if
+		}def
+	}{	
+		/invert_image_samples
+		{
+			{1 exch sub}currenttransfer addprocs settransfer
+		}def
+		/knockout_image_samples
+		{
+			{pop 1}currenttransfer addprocs settransfer
+		}def
+	}ifelse
+	/img/imageormask ldf
+	/sepimg/sep_imageormask ldf
+	/devnimg/devn_imageormask ldf
+	/idximg/indexed_imageormask ldf
+	/_ctype 7 def
+	currentdict{
+		dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+			bind
+		}if
+		def
+	}forall
+}def
+/pt
+{
+	end
+}def
+/dt
+{
+}def
+/AGMIMG_flushfilters
+{
+	dup type/arraytype ne
+		{1 array astore}if
+	dup 0 get currentfile ne
+		{dup 0 get flushfile}if
+		{
+		dup type/filetype eq
+			{
+			dup status 1 index currentfile ne and
+				{closefile}
+				{pop}
+			ifelse
+			}{pop}ifelse
+		}forall
+}def
+/AGMIMG_init_common
+{
+	currentdict/T known{/ImageType/T ldf currentdict/T undef}if
+	currentdict/W known{/Width/W ldf currentdict/W undef}if
+	currentdict/H known{/Height/H ldf currentdict/H undef}if
+	currentdict/M known{/ImageMatrix/M ldf currentdict/M undef}if
+	currentdict/BC known{/BitsPerComponent/BC ldf currentdict/BC undef}if
+	currentdict/D known{/Decode/D ldf currentdict/D undef}if
+	currentdict/DS known{/DataSource/DS ldf currentdict/DS undef}if
+	currentdict/O known{
+		/Operator/O load 1 eq{
+			/imagemask
+		}{
+			/O load 2 eq{
+				/image 
+			}{
+				/colorimage
+			}ifelse
+		}ifelse
+		def
+		currentdict/O undef
+	}if
+	currentdict/HSCI known{/HostSepColorImage/HSCI ldf currentdict/HSCI undef}if
+	currentdict/MD known{/MultipleDataSources/MD ldf currentdict/MD undef}if
+	currentdict/I known{/Interpolate/I ldf currentdict/I undef}if
+	currentdict/SI known{/SkipImageProc/SI ldf currentdict/SI undef}if
+	/DataSource load xcheck not{
+		DataSource type/arraytype eq{
+			DataSource 0 get type/filetype eq{
+				/_Filters DataSource def
+				currentdict/MultipleDataSources known not{
+					/DataSource DataSource dup length 1 sub get def 
+				}if
+			}if
+		}if
+		currentdict/MultipleDataSources known not{
+			/MultipleDataSources DataSource type/arraytype eq{
+				DataSource length 1 gt
+			}
+			{false}ifelse def
+		}if
+	}if
+	/NComponents Decode length 2 div def
+	currentdict/SkipImageProc known not{/SkipImageProc{false}def}if
+}bdf
+/imageormask_sys
+{
+	begin
+		AGMIMG_init_common
+		save mark
+		level2{
+			currentdict
+			Operator/imagemask eq{
+				AGMIMG_&imagemask
+			}{
+				use_mask{
+					process_mask AGMIMG_&image
+				}{
+					AGMIMG_&image
+				}ifelse
+			}ifelse
+		}{
+			Width Height
+			Operator/imagemask eq{
+				Decode 0 get 1 eq Decode 1 get 0 eq	and
+				ImageMatrix/DataSource load
+				AGMIMG_&imagemask
+			}{
+				BitsPerComponent ImageMatrix/DataSource load
+				AGMIMG_&image
+			}ifelse
+		}ifelse
+		currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+		cleartomark restore
+	end
+}def
+/overprint_plate
+{
+	currentoverprint{
+		0 get dup type/nametype eq{
+			dup/DeviceGray eq{
+				pop AGMCORE_black_plate not
+			}{
+				/DeviceCMYK eq{
+					AGMCORE_is_cmyk_sep not
+				}if
+			}ifelse
+		}{
+			false exch
+			{
+				 AGMOHS_sepink eq or
+			}forall
+			not
+		}ifelse
+	}{
+		pop false
+	}ifelse
+}def
+/process_mask
+{
+	level3{
+		dup begin
+		/ImageType 1 def
+		end
+		4 dict begin
+			/DataDict exch def
+			/ImageType 3 def
+			/InterleaveType 3 def
+			/MaskDict 9 dict begin
+				/ImageType 1 def
+				/Width DataDict dup/MaskWidth known{/MaskWidth}{/Width}ifelse get def
+				/Height DataDict dup/MaskHeight known{/MaskHeight}{/Height}ifelse get def
+				/ImageMatrix[Width 0 0 Height neg 0 Height]def
+				/NComponents 1 def
+				/BitsPerComponent 1 def
+				/Decode DataDict dup/MaskD known{/MaskD}{[1 0]}ifelse get def
+				/DataSource Adobe_AGM_Core/AGMIMG_maskSource get def
+			currentdict end def
+		currentdict end
+	}if
+}def
+/use_mask
+{
+	dup/Mask known	{dup/Mask get}{false}ifelse
+}def
+/imageormask
+{
+	begin
+		AGMIMG_init_common
+		SkipImageProc{
+			currentdict consumeimagedata
+		}
+		{
+			save mark
+			level2 AGMCORE_host_sep not and{
+				currentdict
+				Operator/imagemask eq DeviceN_PS2 not and{
+					imagemask
+				}{
+					AGMCORE_in_rip_sep currentoverprint and currentcolorspace 0 get/DeviceGray eq and{
+						[/Separation/Black/DeviceGray{}]setcolorspace
+						/Decode[Decode 1 get Decode 0 get]def
+					}if
+					use_mask{
+						process_mask image
+					}{
+						DeviceN_NoneName DeviceN_PS2 Indexed_DeviceN level3 not and or or AGMCORE_in_rip_sep and 
+						{
+							Names convert_to_process not{
+								2 dict begin
+								/imageDict xdf
+								/names_index 0 def
+								gsave
+								imageDict write_image_file{
+									Names{
+										dup(None)ne{
+											[/Separation 3 -1 roll/DeviceGray{1 exch sub}]setcolorspace
+											Operator imageDict read_image_file
+											names_index 0 eq{true setoverprint}if
+											/names_index names_index 1 add def
+										}{
+											pop
+										}ifelse
+									}forall
+									close_image_file
+								}if
+								grestore
+								end
+							}{
+								Operator/imagemask eq{
+									imagemask
+								}{
+									image
+								}ifelse
+							}ifelse
+						}{
+							Operator/imagemask eq{
+								imagemask
+							}{
+								image
+							}ifelse
+						}ifelse
+					}ifelse
+				}ifelse
+			}{
+				Width Height
+				Operator/imagemask eq{
+					Decode 0 get 1 eq Decode 1 get 0 eq	and
+					ImageMatrix/DataSource load
+					/Adobe_AGM_OnHost_Seps where{
+						pop imagemask
+					}{
+						currentgray 1 ne{
+							currentdict imageormask_sys
+						}{
+							currentoverprint not{
+								1 AGMCORE_&setgray
+								currentdict imageormask_sys
+							}{
+								currentdict ignoreimagedata
+							}ifelse				 		
+						}ifelse
+					}ifelse
+				}{
+					BitsPerComponent ImageMatrix 
+					MultipleDataSources{
+						0 1 NComponents 1 sub{
+							DataSource exch get
+						}for
+					}{
+						/DataSource load
+					}ifelse
+					Operator/colorimage eq{
+						AGMCORE_host_sep{
+							MultipleDataSources level2 or NComponents 4 eq and{
+								AGMCORE_is_cmyk_sep{
+									MultipleDataSources{
+										/DataSource DataSource 0 get xcheck
+											{
+											[
+											DataSource 0 get/exec cvx
+											DataSource 1 get/exec cvx
+											DataSource 2 get/exec cvx
+											DataSource 3 get/exec cvx
+											/AGMCORE_get_ink_data cvx
+											]cvx
+											}{
+											DataSource aload pop AGMCORE_get_ink_data
+											}ifelse def
+									}{
+										/DataSource 
+										Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul 
+										/DataSource load
+										filter_cmyk 0()/SubFileDecode filter def
+									}ifelse
+									/Decode[Decode 0 get Decode 1 get]def
+									/MultipleDataSources false def
+									/NComponents 1 def
+									/Operator/image def
+									invert_image_samples
+						 			1 AGMCORE_&setgray
+									currentdict imageormask_sys
+								}{
+									currentoverprint not Operator/imagemask eq and{
+ 			 							1 AGMCORE_&setgray
+ 			 							currentdict imageormask_sys
+ 			 						}{
+ 			 							currentdict ignoreimagedata
+ 			 						}ifelse
+								}ifelse
+							}{	
+								MultipleDataSources NComponents AGMIMG_&colorimage						
+							}ifelse
+						}{
+							true NComponents colorimage
+						}ifelse
+					}{
+						Operator/image eq{
+							AGMCORE_host_sep{
+								/DoImage true def
+								currentdict/HostSepColorImage known{HostSepColorImage not}{false}ifelse
+								{
+									AGMCORE_black_plate not Operator/imagemask ne and{
+										/DoImage false def
+										currentdict ignoreimagedata
+					 				}if
+								}if
+						 		1 AGMCORE_&setgray
+								DoImage
+									{currentdict imageormask_sys}if
+							}{
+								use_mask{
+									process_mask image
+								}{
+									image
+								}ifelse
+							}ifelse
+						}{
+							Operator/knockout eq{
+								pop pop pop pop pop
+								currentcolorspace overprint_plate not{
+									knockout_unitsq
+								}if
+							}if
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+			cleartomark restore
+		}ifelse
+		currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+}def
+/sep_imageormask
+{
+ 	/sep_colorspace_dict AGMCORE_gget begin
+	CSA map_csa
+	begin
+	AGMIMG_init_common
+	SkipImageProc{
+		currentdict consumeimagedata
+	}{
+		save mark 
+		AGMCORE_avoid_L2_sep_space{
+			/Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+		}if
+ 		AGMIMG_ccimage_exists 
+		MappedCSA 0 get/DeviceCMYK eq and
+		currentdict/Components known and 
+		Name()ne and 
+		Name(All)ne and 
+		Operator/image eq and
+		AGMCORE_producing_seps not and
+		level2 not and
+		{
+			Width Height BitsPerComponent ImageMatrix 
+			[
+			/DataSource load/exec cvx
+			{
+				0 1 2 index length 1 sub{
+					1 index exch
+					2 copy get 255 xor put
+				}for
+			}/exec cvx
+			]cvx bind
+			MappedCSA 0 get/DeviceCMYK eq{
+				Components aload pop
+			}{
+				0 0 0 Components aload pop 1 exch sub
+			}ifelse
+			Name findcmykcustomcolor
+			customcolorimage
+		}{
+			AGMCORE_producing_seps not{
+				level2{
+ 					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne AGMCORE_avoid_L2_sep_space not and currentcolorspace 0 get/Separation ne and{
+						[/Separation Name MappedCSA sep_proc_name exch dup 0 get 15 string cvs(/Device)anchorsearch{pop pop 0 get}{pop}ifelse exch load]setcolorspace_opt
+						/sep_tint AGMCORE_gget setcolor
+					}if
+					currentdict imageormask
+				}{
+					currentdict
+					Operator/imagemask eq{
+						imageormask
+					}{
+						sep_imageormask_lev1
+					}ifelse
+				}ifelse
+ 			}{
+				AGMCORE_host_sep{
+					Operator/knockout eq{
+						currentdict/ImageMatrix get concat
+						knockout_unitsq
+					}{
+						currentgray 1 ne{
+ 							AGMCORE_is_cmyk_sep Name(All)ne and{
+ 								level2{
+ 									Name AGMCORE_IsSeparationAProcessColor 
+ 									{
+ 										Operator/imagemask eq{
+ 											//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+ 												/sep_tint AGMCORE_gget 1 exch sub AGMCORE_&setcolor
+ 											}if
+ 										}{
+											invert_image_samples
+ 										}ifelse
+	 								}{
+	 									//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+	 										[/Separation Name[/DeviceGray]
+	 										{
+	 											sep_colorspace_proc AGMCORE_get_ink_data
+												1 exch sub
+	 										}bind
+											]AGMCORE_&setcolorspace
+											/sep_tint AGMCORE_gget AGMCORE_&setcolor
+										}if
+ 									}ifelse
+ 									currentdict imageormask_sys
+	 							}{
+	 								currentdict
+									Operator/imagemask eq{
+										imageormask_sys
+									}{
+										sep_image_lev1_sep
+									}ifelse
+	 							}ifelse
+ 							}{
+ 								Operator/imagemask ne{
+									invert_image_samples
+ 								}if
+		 						currentdict imageormask_sys
+ 							}ifelse
+ 						}{
+ 							currentoverprint not Name(All)eq or Operator/imagemask eq and{
+								currentdict imageormask_sys 
+								}{
+								currentoverprint not
+									{
+ 									gsave 
+ 									knockout_unitsq
+ 									grestore
+									}if
+								currentdict consumeimagedata 
+		 					}ifelse
+ 						}ifelse
+		 			}ifelse
+ 				}{
+					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+						currentcolorspace 0 get/Separation ne{
+							[/Separation Name MappedCSA sep_proc_name exch 0 get exch load]setcolorspace_opt
+							/sep_tint AGMCORE_gget setcolor
+						}if
+					}if
+					currentoverprint 
+					MappedCSA 0 get/DeviceCMYK eq and 
+					Name AGMCORE_IsSeparationAProcessColor not and
+					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{Name inRip_spot_has_ink not and}{false}ifelse 
+					Name(All)ne and{
+						imageormask_l2_overprint
+					}{
+						currentdict imageormask
+ 					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+		cleartomark restore
+	}ifelse
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+	end
+}def
+/colorSpaceElemCnt
+{
+	mark currentcolor counttomark dup 2 add 1 roll cleartomark
+}bdf
+/devn_sep_datasource
+{
+	1 dict begin
+	/dataSource xdf
+	[
+		0 1 dataSource length 1 sub{
+			dup currentdict/dataSource get/exch cvx/get cvx/exec cvx
+			/exch cvx names_index/ne cvx[/pop cvx]cvx/if cvx
+		}for
+	]cvx bind
+	end
+}bdf		
+/devn_alt_datasource
+{
+	11 dict begin
+	/convProc xdf
+	/origcolorSpaceElemCnt xdf
+	/origMultipleDataSources xdf
+	/origBitsPerComponent xdf
+	/origDecode xdf
+	/origDataSource xdf
+	/dsCnt origMultipleDataSources{origDataSource length}{1}ifelse def
+	/DataSource origMultipleDataSources
+		{
+			[
+			BitsPerComponent 8 idiv origDecode length 2 idiv mul string
+			0 1 origDecode length 2 idiv 1 sub
+				{
+				dup 7 mul 1 add index exch dup BitsPerComponent 8 idiv mul exch
+				origDataSource exch get 0()/SubFileDecode filter
+				BitsPerComponent 8 idiv string/readstring cvx/pop cvx/putinterval cvx
+				}for 
+			]bind cvx
+		}{origDataSource}ifelse 0()/SubFileDecode filter def		
+	[
+		origcolorSpaceElemCnt string
+		0 2 origDecode length 2 sub
+			{
+			dup origDecode exch get dup 3 -1 roll 1 add origDecode exch get exch sub 2 BitsPerComponent exp 1 sub div
+			1 BitsPerComponent 8 idiv{DataSource/read cvx/not cvx{0}/if cvx/mul cvx}repeat/mul cvx/add cvx
+			}for
+		/convProc load/exec cvx
+		origcolorSpaceElemCnt 1 sub -1 0
+			{
+			/dup cvx 2/add cvx/index cvx
+			3 1/roll cvx/exch cvx 255/mul cvx/cvi cvx/put cvx
+			}for
+	]bind cvx 0()/SubFileDecode filter
+	end
+}bdf
+/devn_imageormask
+{
+ 	/devicen_colorspace_dict AGMCORE_gget begin
+	CSA map_csa
+	2 dict begin
+	dup
+	/srcDataStrs[3 -1 roll begin
+		AGMIMG_init_common
+		currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+		{
+			Width Decode length 2 div mul cvi
+			{
+				dup 65535 gt{1 add 2 div cvi}{exit}ifelse
+			}loop
+			string
+		}repeat
+		end]def
+	/dstDataStr srcDataStrs 0 get length string def
+	begin
+	AGMIMG_init_common
+	SkipImageProc{
+		currentdict consumeimagedata
+	}{
+		save mark 
+		AGMCORE_producing_seps not{
+			level3 not{
+				Operator/imagemask ne{
+					/DataSource[[
+						DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+						colorSpaceElemCnt/devicen_colorspace_dict AGMCORE_gget/TintTransform get 
+						devn_alt_datasource 1/string cvx/readstring cvx/pop cvx]cvx colorSpaceElemCnt 1 sub{dup}repeat]def				
+					/MultipleDataSources true def
+					/Decode colorSpaceElemCnt[exch{0 1}repeat]def
+				}if
+			}if
+			currentdict imageormask
+ 		}{
+			AGMCORE_host_sep{
+				Names convert_to_process{
+					CSA get_csa_by_name 0 get/DeviceCMYK eq{
+						/DataSource
+							Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul 
+							DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+							4/devicen_colorspace_dict AGMCORE_gget/TintTransform get 
+							devn_alt_datasource
+						filter_cmyk 0()/SubFileDecode filter def
+						/MultipleDataSources false def
+						/Decode[1 0]def
+						/DeviceGray setcolorspace
+			 			currentdict imageormask_sys
+ 					}{
+						AGMCORE_report_unsupported_color_space
+						AGMCORE_black_plate{
+							/DataSource
+								DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+								CSA get_csa_by_name 0 get/DeviceRGB eq{3}{1}ifelse/devicen_colorspace_dict AGMCORE_gget/TintTransform get
+								devn_alt_datasource
+							/MultipleDataSources false def
+							/Decode colorSpaceElemCnt[exch{0 1}repeat]def
+				 			currentdict imageormask_sys
+				 		}{
+	 						gsave 
+	 						knockout_unitsq
+	 						grestore
+							currentdict consumeimagedata 
+						}ifelse
+ 					}ifelse
+				}
+				{	
+					/devicen_colorspace_dict AGMCORE_gget/names_index known{
+	 					Operator/imagemask ne{
+	 						MultipleDataSources{
+		 						/DataSource[DataSource devn_sep_datasource/exec cvx]cvx def
+								/MultipleDataSources false def
+	 						}{
+								/DataSource/DataSource load dstDataStr srcDataStrs 0 get filter_devn def
+	 						}ifelse
+							invert_image_samples
+	 					}if
+			 			currentdict imageormask_sys
+	 				}{
+	 					currentoverprint not Operator/imagemask eq and{
+							currentdict imageormask_sys 
+							}{
+							currentoverprint not
+								{
+	 							gsave 
+	 							knockout_unitsq
+	 							grestore
+								}if
+							currentdict consumeimagedata 
+			 			}ifelse
+	 				}ifelse
+	 			}ifelse
+ 			}{
+				currentdict imageormask
+			}ifelse
+		}ifelse
+		cleartomark restore
+	}ifelse
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+	end
+	end
+}def
+/imageormask_l2_overprint
+{
+	currentdict
+	currentcmykcolor add add add 0 eq{
+		currentdict consumeimagedata
+	}{
+		level3{			
+			currentcmykcolor 
+			/AGMIMG_k xdf 
+			/AGMIMG_y xdf 
+			/AGMIMG_m xdf 
+			/AGMIMG_c xdf
+			Operator/imagemask eq{
+				[/DeviceN[
+				AGMIMG_c 0 ne{/Cyan}if
+				AGMIMG_m 0 ne{/Magenta}if
+				AGMIMG_y 0 ne{/Yellow}if
+				AGMIMG_k 0 ne{/Black}if
+				]/DeviceCMYK{}]setcolorspace
+				AGMIMG_c 0 ne{AGMIMG_c}if
+				AGMIMG_m 0 ne{AGMIMG_m}if
+				AGMIMG_y 0 ne{AGMIMG_y}if
+				AGMIMG_k 0 ne{AGMIMG_k}if
+				setcolor			
+			}{	
+				/Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+				[/Indexed 				
+					[
+						/DeviceN[
+							AGMIMG_c 0 ne{/Cyan}if
+							AGMIMG_m 0 ne{/Magenta}if
+							AGMIMG_y 0 ne{/Yellow}if
+							AGMIMG_k 0 ne{/Black}if
+						]
+						/DeviceCMYK{
+							AGMIMG_k 0 eq{0}if
+							AGMIMG_y 0 eq{0 exch}if
+							AGMIMG_m 0 eq{0 3 1 roll}if
+							AGMIMG_c 0 eq{0 4 1 roll}if						
+						}
+					]
+					255
+					{
+						255 div 
+						mark exch
+						dup	dup dup
+						AGMIMG_k 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 1 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_y 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 2 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_m 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 3 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_c 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						counttomark 1 add -1 roll pop
+					}
+				]setcolorspace
+			}ifelse
+			imageormask_sys
+		}{
+	write_image_file{
+		currentcmykcolor
+		0 ne{
+			[/Separation/Black/DeviceGray{}]setcolorspace
+			gsave
+			/Black
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 1 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Yellow/DeviceGray{}]setcolorspace
+			gsave
+			/Yellow
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 2 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Magenta/DeviceGray{}]setcolorspace
+			gsave
+			/Magenta
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 3 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Cyan/DeviceGray{}]setcolorspace
+			gsave
+			/Cyan 
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+				close_image_file
+			}{
+				imageormask
+			}ifelse
+		}ifelse
+	}ifelse
+}def
+/indexed_imageormask
+{
+	begin
+		AGMIMG_init_common
+		save mark 
+ 		currentdict
+ 		AGMCORE_host_sep{
+			Operator/knockout eq{
+				/indexed_colorspace_dict AGMCORE_gget dup/CSA known{
+					/CSA get get_csa_by_name
+				}{
+					/Names get
+				}ifelse
+				overprint_plate not{
+					knockout_unitsq
+				}if
+			}{
+				Indexed_DeviceN{
+					/devicen_colorspace_dict AGMCORE_gget dup/names_index known exch/Names get convert_to_process or{
+			 			indexed_image_lev2_sep
+					}{
+						currentoverprint not{
+							knockout_unitsq
+			 			}if
+			 			currentdict consumeimagedata
+					}ifelse
+				}{
+		 			AGMCORE_is_cmyk_sep{
+						Operator/imagemask eq{
+							imageormask_sys
+						}{
+							level2{
+								indexed_image_lev2_sep
+							}{
+								indexed_image_lev1_sep
+							}ifelse
+						}ifelse
+					}{
+						currentoverprint not{
+							knockout_unitsq
+			 			}if
+			 			currentdict consumeimagedata
+					}ifelse
+				}ifelse
+			}ifelse
+ 		}{
+			level2{
+				Indexed_DeviceN{
+					/indexed_colorspace_dict AGMCORE_gget begin
+				}{
+					/indexed_colorspace_dict AGMCORE_gget dup null ne
+					{
+						begin
+						currentdict/CSDBase known{CSDBase/CSD get_res/MappedCSA get}{CSA}ifelse
+						get_csa_by_name 0 get/DeviceCMYK eq ps_level 3 ge and ps_version 3015.007 lt and
+						AGMCORE_in_rip_sep and{
+							[/Indexed[/DeviceN[/Cyan/Magenta/Yellow/Black]/DeviceCMYK{}]HiVal Lookup]
+							setcolorspace
+						}if
+						end
+					}
+					{pop}ifelse
+				}ifelse
+				imageormask
+				Indexed_DeviceN{
+					end
+				}if
+			}{
+				Operator/imagemask eq{
+					imageormask
+				}{
+					indexed_imageormask_lev1
+				}ifelse
+			}ifelse
+ 		}ifelse
+		cleartomark restore
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+}def
+/indexed_image_lev2_sep
+{
+	/indexed_colorspace_dict AGMCORE_gget begin
+	begin
+		Indexed_DeviceN not{
+			currentcolorspace 
+			dup 1/DeviceGray put
+			dup 3
+			currentcolorspace 2 get 1 add string
+			0 1 2 3 AGMCORE_get_ink_data 4 currentcolorspace 3 get length 1 sub
+			{
+			dup 4 idiv exch currentcolorspace 3 get exch get 255 exch sub 2 index 3 1 roll put
+			}for 
+			put	setcolorspace
+		}if
+		currentdict 
+		Operator/imagemask eq{
+			AGMIMG_&imagemask
+		}{
+			use_mask{
+				process_mask AGMIMG_&image
+			}{
+				AGMIMG_&image
+			}ifelse
+		}ifelse
+	end end
+}def
+ /OPIimage
+ {
+ 	dup type/dicttype ne{
+ 		10 dict begin
+ 			/DataSource xdf
+ 			/ImageMatrix xdf
+ 			/BitsPerComponent xdf
+ 			/Height xdf
+ 			/Width xdf
+ 			/ImageType 1 def
+ 			/Decode[0 1 def]
+ 			currentdict
+ 		end
+ 	}if
+ 	dup begin
+ 		/NComponents 1 cdndf
+ 		/MultipleDataSources false cdndf
+ 		/SkipImageProc{false}cdndf
+ 		/Decode[
+ 				0 
+ 				currentcolorspace 0 get/Indexed eq{
+ 					2 BitsPerComponent exp 1 sub
+ 				}{
+ 					1
+ 				}ifelse
+ 		]cdndf
+ 		/Operator/image cdndf
+ 	end
+ 	/sep_colorspace_dict AGMCORE_gget null eq{
+ 		imageormask
+ 	}{
+ 		gsave
+ 		dup begin invert_image_samples end
+ 		sep_imageormask
+ 		grestore
+ 	}ifelse
+ }def
+/cachemask_level2
+{
+	3 dict begin
+	/LZWEncode filter/WriteFilter xdf
+	/readBuffer 256 string def
+	/ReadFilter
+		currentfile
+		0(%EndMask)/SubFileDecode filter
+		/ASCII85Decode filter
+		/RunLengthDecode filter
+	def
+	{
+		ReadFilter readBuffer readstring exch
+		WriteFilter exch writestring
+		not{exit}if
+	}loop
+	WriteFilter closefile
+	end
+}def
+/spot_alias
+{
+	/mapto_sep_imageormask 
+	{
+		dup type/dicttype ne{
+			12 dict begin
+				/ImageType 1 def
+				/DataSource xdf
+				/ImageMatrix xdf
+				/BitsPerComponent xdf
+				/Height xdf
+				/Width xdf
+				/MultipleDataSources false def
+		}{
+			begin
+		}ifelse
+				/Decode[/customcolor_tint AGMCORE_gget 0]def
+				/Operator/image def
+				/SkipImageProc{false}def
+				currentdict 
+			end
+		sep_imageormask
+	}bdf
+	/customcolorimage
+	{
+		Adobe_AGM_Image/AGMIMG_colorAry xddf
+		/customcolor_tint AGMCORE_gget
+		<<
+			/Name AGMIMG_colorAry 4 get
+			/CSA[/DeviceCMYK]
+			/TintMethod/Subtractive
+			/TintProc null
+			/MappedCSA null
+			/NComponents 4 
+			/Components[AGMIMG_colorAry aload pop pop]
+		>>
+		setsepcolorspace
+		mapto_sep_imageormask
+	}ndf
+	Adobe_AGM_Image/AGMIMG_&customcolorimage/customcolorimage load put
+	/customcolorimage
+	{
+		Adobe_AGM_Image/AGMIMG_override false put
+		current_spot_alias{dup 4 get map_alias}{false}ifelse
+		{
+			false set_spot_alias
+			/customcolor_tint AGMCORE_gget exch setsepcolorspace
+			pop
+			mapto_sep_imageormask
+			true set_spot_alias
+		}{
+			//Adobe_AGM_Image/AGMIMG_&customcolorimage get exec
+		}ifelse			
+	}bdf
+}def
+/snap_to_device
+{
+	6 dict begin
+	matrix currentmatrix
+	dup 0 get 0 eq 1 index 3 get 0 eq and
+	1 index 1 get 0 eq 2 index 2 get 0 eq and or exch pop
+	{
+		1 1 dtransform 0 gt exch 0 gt/AGMIMG_xSign? exch def/AGMIMG_ySign? exch def
+		0 0 transform
+		AGMIMG_ySign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+		AGMIMG_xSign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+		itransform/AGMIMG_llY exch def/AGMIMG_llX exch def
+		1 1 transform
+		AGMIMG_ySign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+		AGMIMG_xSign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+		itransform/AGMIMG_urY exch def/AGMIMG_urX exch def			
+		[AGMIMG_urX AGMIMG_llX sub 0 0 AGMIMG_urY AGMIMG_llY sub AGMIMG_llX AGMIMG_llY]concat
+	}{
+	}ifelse
+	end
+}def
+level2 not{
+	/colorbuf
+	{
+		0 1 2 index length 1 sub{
+			dup 2 index exch get 
+			255 exch sub 
+			2 index 
+			3 1 roll 
+			put
+		}for
+	}def
+	/tint_image_to_color
+	{
+		begin
+			Width Height BitsPerComponent ImageMatrix 
+			/DataSource load
+		end
+		Adobe_AGM_Image begin
+			/AGMIMG_mbuf 0 string def
+			/AGMIMG_ybuf 0 string def
+			/AGMIMG_kbuf 0 string def
+			{
+				colorbuf dup length AGMIMG_mbuf length ne
+					{
+					dup length dup dup
+					/AGMIMG_mbuf exch string def
+					/AGMIMG_ybuf exch string def
+					/AGMIMG_kbuf exch string def
+					}if
+				dup AGMIMG_mbuf copy AGMIMG_ybuf copy AGMIMG_kbuf copy pop
+			}
+			addprocs
+			{AGMIMG_mbuf}{AGMIMG_ybuf}{AGMIMG_kbuf}true 4 colorimage	
+		end
+	}def			
+	/sep_imageormask_lev1
+	{
+		begin
+			MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+				{
+					255 mul round cvi GrayLookup exch get
+				}currenttransfer addprocs settransfer
+				currentdict imageormask
+			}{
+				/sep_colorspace_dict AGMCORE_gget/Components known{
+					MappedCSA 0 get/DeviceCMYK eq{
+						Components aload pop
+					}{
+						0 0 0 Components aload pop 1 exch sub
+					}ifelse
+					Adobe_AGM_Image/AGMIMG_k xddf 
+					Adobe_AGM_Image/AGMIMG_y xddf 
+					Adobe_AGM_Image/AGMIMG_m xddf 
+					Adobe_AGM_Image/AGMIMG_c xddf 
+					AGMIMG_y 0.0 eq AGMIMG_m 0.0 eq and AGMIMG_c 0.0 eq and{
+						{AGMIMG_k mul 1 exch sub}currenttransfer addprocs settransfer
+						currentdict imageormask
+					}{
+						currentcolortransfer
+						{AGMIMG_k mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_y mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_m mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_c mul 1 exch sub}exch addprocs 4 1 roll
+						setcolortransfer
+						currentdict tint_image_to_color
+					}ifelse
+				}{
+					MappedCSA 0 get/DeviceGray eq{
+						{255 mul round cvi ColorLookup exch get 0 get}currenttransfer addprocs settransfer
+						currentdict imageormask
+					}{
+						MappedCSA 0 get/DeviceCMYK eq{
+							currentcolortransfer
+							{255 mul round cvi ColorLookup exch get 3 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 2 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 1 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 0 get 1 exch sub}exch addprocs 4 1 roll
+							setcolortransfer 
+							currentdict tint_image_to_color
+						}{
+							currentcolortransfer
+							{pop 1}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 2 get}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 1 get}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 0 get}exch addprocs 4 1 roll
+							setcolortransfer 
+							currentdict tint_image_to_color
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+		end
+	}def
+	/sep_image_lev1_sep
+	{
+		begin
+			/sep_colorspace_dict AGMCORE_gget/Components known{
+				Components aload pop
+				Adobe_AGM_Image/AGMIMG_k xddf 
+				Adobe_AGM_Image/AGMIMG_y xddf 
+				Adobe_AGM_Image/AGMIMG_m xddf 
+				Adobe_AGM_Image/AGMIMG_c xddf 
+				{AGMIMG_c mul 1 exch sub}
+				{AGMIMG_m mul 1 exch sub}
+				{AGMIMG_y mul 1 exch sub}
+				{AGMIMG_k mul 1 exch sub}
+			}{
+				{255 mul round cvi ColorLookup exch get 0 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 1 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 2 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 3 get 1 exch sub}
+			}ifelse
+			AGMCORE_get_ink_data currenttransfer addprocs settransfer
+			currentdict imageormask_sys
+		end
+	}def
+	/indexed_imageormask_lev1
+	{
+		/indexed_colorspace_dict AGMCORE_gget begin
+		begin
+			currentdict
+			MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+				{HiVal mul round cvi GrayLookup exch get HiVal div}currenttransfer addprocs settransfer
+				imageormask
+			}{
+				MappedCSA 0 get/DeviceGray eq{
+					{HiVal mul round cvi Lookup exch get HiVal div}currenttransfer addprocs settransfer
+					imageormask
+				}{
+					MappedCSA 0 get/DeviceCMYK eq{
+						currentcolortransfer
+						{4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi		 Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						setcolortransfer 
+						tint_image_to_color
+					}{
+						currentcolortransfer
+						{pop 1}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 2 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 1 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 		Lookup exch get HiVal div}exch addprocs 4 1 roll
+						setcolortransfer 
+						tint_image_to_color
+					}ifelse
+				}ifelse
+			}ifelse
+		end end
+	}def
+	/indexed_image_lev1_sep
+	{
+		/indexed_colorspace_dict AGMCORE_gget begin
+		begin
+			{4 mul HiVal mul round cvi		 Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}
+			AGMCORE_get_ink_data currenttransfer addprocs settransfer
+			currentdict imageormask_sys
+		end end
+	}def
+}if
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndProlog
+%%BeginSetup
+Adobe_AGM_Utils begin
+2 2010 Adobe_AGM_Core/ds gx
+Adobe_CoolType_Core/ds get exec
Adobe_AGM_Image/ds gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndSetup
+%%Page: (Page 1) 1
+%%EndPageComments
+%%BeginPageSetup
+%ADOBeginClientInjection: PageSetup Start "AI11EPS"
+%AI12_RMC_Transparency: Balance=75 RasterRes=300 GradRes=150 Text=0 Stroke=1 Clip=1 OP=0
+%ADOEndClientInjection: PageSetup Start "AI11EPS"
+Adobe_AGM_Utils begin
+Adobe_AGM_Core/ps gx
+Adobe_AGM_Utils/capture_cpd gx
+Adobe_CoolType_Core/ps get exec
Adobe_AGM_Image/ps gx
+%ADOBeginClientInjection: PageSetup End "AI11EPS"
+/currentdistillerparams where
{pop currentdistillerparams /CoreDistVersion get 5000 lt} {true} ifelse
{ userdict /AI11_PDFMark5 /cleartomark load put
userdict /AI11_ReadMetadata_PDFMark5 {flushfile cleartomark } bind put}
{ userdict /AI11_PDFMark5 /pdfmark load put
userdict /AI11_ReadMetadata_PDFMark5 {/PUT pdfmark} bind put } ifelse
[/NamespacePush AI11_PDFMark5
[/_objdef {ai_metadata_stream_123} /type /stream /OBJ AI11_PDFMark5
[{ai_metadata_stream_123}
currentfile 0 (%  &&end XMP packet marker&&)
/SubFileDecode filter AI11_ReadMetadata_PDFMark5
<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 4.1-c036 46.277092, Fri Feb 23 2007 14:16:18        ">
+   <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+      <rdf:Description rdf:about=""
+            xmlns:dc="http://purl.org/dc/elements/1.1/">
+         <dc:format>application/postscript</dc:format>
+         <dc:title>
+            <rdf:Alt>
+               <rdf:li xml:lang="x-default">Web</rdf:li>
+            </rdf:Alt>
+         </dc:title>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xap="http://ns.adobe.com/xap/1.0/"
+            xmlns:xapGImg="http://ns.adobe.com/xap/1.0/g/img/">
+         <xap:CreatorTool>Adobe Illustrator CS3</xap:CreatorTool>
+         <xap:CreateDate>2017-04-03T09:54:57+02:00</xap:CreateDate>
+         <xap:ModifyDate>2017-04-03T10:03:08+02:00</xap:ModifyDate>
+         <xap:MetadataDate>2017-04-03T10:03:08+02:00</xap:MetadataDate>
+         <xap:Thumbnails>
+            <rdf:Alt>
+               <rdf:li rdf:parseType="Resource">
+                  <xapGImg:width>256</xapGImg:width>
+                  <xapGImg:height>96</xapGImg:height>
+                  <xapGImg:format>JPEG</xapGImg:format>
+                  <xapGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA&#xA;AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK&#xA;DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f&#xA;Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgAYAEAAwER&#xA;AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA&#xA;AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB&#xA;UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE&#xA;1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ&#xA;qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy&#xA;obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp&#xA;0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo&#xA;+DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FXlf5yfnzo3kBP0&#xA;bZxrqXmaVA6WRJEUCsPhknYePUINz7ChxV8pebPzc/MPzTPI+q63ceg5NLK3cwWyjsBFGVU08Wqf&#xA;fFWIVNa1361xVk/l380PzB8uyo+ka/eQIhBEDSmWA08YZecZ/wCBxV71+W//ADlnbXUsWneebZLR&#xA;2IVdYtFYxV/4uh+Jl/1kJ/1QMVfRVrdWt3bRXVrMk9tMoeGaJg6OrCoZWFQQcVVcVdirsVdirsVd&#xA;irsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVeW/nN+emkfl/bjT7SNdQ8zXEfOC&#xA;zJ/dwqdlkuCN6H9lBu3sN8VfGM02teZNeaWVnvtY1W4qzEjlJNM30AVJ+Q+WQyZIwiZSNAJjEk0O&#xA;b6M8gfkl5Y0a2SfV4I9W1QrWZ515wITvxjib4SB/Mwr32rTOF1/bmXNKoEwh5c/if0fe73DoYQHq&#xA;3kz/APw75f4cP0ZacKcePoR0p0pTjmq/MZP50vmXJ8OPcGLeZPyX8ga3E9NPXTboj4LmxpDQ+8YH&#xA;pN9K/Tmfpu2tRiP1cQ7pb/bzcfJo8cule58//mH+V2u+S7hXnIu9KmbjbahGCAT14SLvwenapB7H&#xA;rTsOzu1MepG20xzH6u91Oo00sZ7wzj/nHX86LryvrMHljWZy/lrUJPTgZz/vHcSNRXUnpE7H4x0H&#xA;2v5q7Nxn2NirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVQWrapBptoZ5fiY/DHGOrN/&#xA;TxzC1+uhpsfHLc9B3lv0+A5ZUGIS6xqd2xllnZQ32YkJVAPkOv05wmp7Uz5jZkQO4bD8e93kNNjh&#xA;sAsS8u0PJJpFPiGI/jmNHVZYmxKQ+JZnFE8wEzsfM15CwW4/fx9ydnHyPf6c3Gj9oM2M1k9cft/H&#xA;vcTLoIS+nYsmtLuC6hE0LckP3g+BGdjptTDNATgbDqMmOUDRVsyGtJPO/mm18qeU9U8w3Q5x6dA0&#xA;ix1pzkJCxR1/y5GVfpxV+fepajrXmXX5r27d73V9Unqx6s8srUVVH3KoHQbZGUhEEnkEgEmg+i/y&#xA;3/KbSfK8MF/doLvzAVq9wd0hLijJCvTYGnM7n2BpnBdp9sT1BMY7Y+7v9/6nf6XRxxizvJ6XHCFW&#xA;lTU9SDmnBcu2zET0dh9P9cPF5LbQE6/tBx77H8MNxPkuyF1fSbDWtLudL1GH1LS6QxzRnwPQg9iD&#xA;uD2OTxZZYpicTuGE4CQo8nxt5p0C58v+Yb/Rrg1kspmjDkU5p1R6f5SENnpWl1AzY4zH8Qeby4zC&#xA;Riej7F/5x4/NNPOnlJbC9YDXtESOC7BO80QXjFcCu9W40f8AyvmMyGt6virsVdirsVdirsVdirsV&#xA;dirsVdirsVdirsVdirsVQmqalDp1m1xLuRtGndmPQZh67WR0+Mzl8B3luwYTklwhgV7qN7qlyGuH&#xA;qBXgg2VQetBnn2s12TUS4pn3DoHocWCOIVFsRqAACQB7nMW2drTE/VZGB96EZLiHcm/JwMy/aAce&#xA;K7H7jjsV2TXQtRa1vUqSIZSEkB9+h+jNn2RrTgzCz6JbH9fwcPWYOOHmGaZ6E6B8nf8AOV35j3l9&#xA;5jXyVZTMmm6YqS6iqmgmupFEiK1OqxIy0H8xPgMVYf8AkX5M1LUPMcHmJ41Glaa7gtJ1kmMTKqoP&#xA;8guGJzn+39dCGI4v45fYL/S7Ds/AZT4+gfR0Ks0goK0339s4cO9KLJn7Kv8AwR/pkqDHZbzuB/us&#xA;H5N/UYaj3pod7hOR9qNl+QqPwx4O4o4fNeskbbBhXw7/AHZExIQQXkv/ADkR5Rtb3y0vmOKMLfaY&#xA;6JNIBu9vKwTi3jxdgR4b50Ps7qzHL4R+mX3j9jru0MQMeLqHnP8Azjx5oPl/81dJaSX0rPUi2nXV&#xA;TRSJx+6r/wA9lQ52zpn3PirsVdirsVdirsVdirsVdirsVdirsVdirsVdirsVYR5u1SG7uo4IGLJb&#xA;cg7dixpWnjSmcN2/ro5sghDcQv5u97PwGEST/Ek9sr7soB7bmn8DmhFOeaVi0/ZF/wCCP9MlUUbN&#xA;epOOsVfkwx4Y96aHe2Jx+0rL8xt+GPAjhXq6t9kg/LIkUghl/lq9e4sjFIavAQoP+Sfs/qzuewNW&#xA;cuExlzht8Ojo9fiEZ2Or4b/Oq5huPzX80SQyCWP69InNdxyjojD/AGLKRm+cF7Z+STo35a6UFBBR&#xA;rlWJBFT9ZkO3jsc8+7dH+Fy+H+5D0Og/uh8fvZ/bOFc1r07AnNSA5ZCuZx/K5/2JyXB7kcLvrC91&#xA;cf7E48C8LhcReJHzBGPAV4Su5xNtyU+1RgohFFBa/otnrei3mk3i1tryJonp1FfssPdWoRlmnzSx&#xA;TE484lhkgJRIPV8WX1pdaZqdxaSEpdWUzxOVJBWSJypoeuxXPT8cxOIkORFvMyjRp+g/5f8AmE+Y&#xA;vJGh625rNfWUMtx/xm4ASgfKQNk0MgxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KrZATGwU8SQQG&#xA;8DTrkZgmJpI5vLGBBIPUfTnlJD1iIt5AI6UJoewJwiKCFT1x/I//AAJyXB5hHC76wncMPmpx4CvC&#xA;WxPEf2qfOo/Xg4CvCVwaNjUEE+I3wUQjdN/Ld40GoLET+7n+Fh7jdT/DN12DqjjziP8ADPb49Px5&#xA;uFrsXFC+ofAV1czXVzNcztzmndpJXPdnPJj95zvXRPsfy/YWun6Fp9laKFtre3jSIDuAo3NOpPUn&#xA;PLdTklPJKUuZJeqxREYgDuTW1P736MpZSRZIHU0xYrDPCOrj78lwHuTwloXEH84w8Eu5eEtiaE/t&#xA;r94wcJ7l4SvBBGxqPbIofG35jywS+ffMDwbxm/uBUGoJEhDHoOrA56X2aCNPC/5oeb1B/eS977J/&#xA;5x5WVfyb8tiU8m9KcggU+E3UpUfQtMzWl6LirsVdirsVdirsVdirsVdirsVdirsVdirsVS7zDNJF&#xA;ot08Zo3ELX2Zgp/A5re18hhpZkc6+805OjiDliC86zzh6RF2h/dn54WMlVnRftMB8zTCASilhuIB&#xA;+2MlwS7k8JcLiA/tj78eA9y8JXCSI9HU/SMHCUUUXpqs2oWwT7XqoR9DA5k6CJOeFc+Ife05yBCV&#xA;9xfE/n2wstP88eYbCxr9TtNSu4LetPsRzso6bU22z015t9KfladTPkDRTqLh5zbgxEChEFT6ANOp&#xA;9Lj/AJ755v2twfmZ8HK/t6/a9JpL8KNsthAMq1JodtjT9WYALkkov0Ia14An33/Xh4yx4iuCIOig&#xA;fRgsotvAh2KuxV8lfnTpsNh+ZOsJCKRzvHckf5c8Su/3uSc9D7FyGelhfTb5F0GsjWUvpv8A5xVv&#xA;5rr8pYIZCStle3MEVTX4Swm/4lMc2rivX8VdirsVdirsVdirsVdirsVdirsVdirsVdiqhfCE2U/r&#xA;ryhEbGRfFQKnKNVw+FLjFx4TfubMV8QrnbzE+2eXPUq1qityDVPTapH6sIkglXEEI6IPuw8Z70cR&#xA;XBVHQDBaLbwIdiqpDK0UySr9qNgw+YNcsxZDCYkOYNsZx4gR3vkf87PL1zoX5peYrWZCiXN5Je2x&#xA;pRWiu2My8fYc+PzGeqPMPXfyU87WWs+WbfRpJAuraVEIngOxe3Q8Y5Er1AWit4HwqM4Pt3QSxZTk&#xA;A9Ez9vUfpd/oM4lDh/iD0apBqOozRgOcjVaZ1BUKoPc1P9Mlsx2d6Uh+1K3+xAGHiHcvEO5wgXuz&#xA;n5sf4Y8a8S4RIOx+kk/rwcRRZXAAdBTI2r5D/NzWI9W/MTWbmI1iimFsn/RughY/SyE56N2RhOPT&#xA;QB51fz3ed1c+LIS+qP8AnF7SZrD8pLKaUEHUbm4u1U9lL+iv3iGv05snHes4q7FXYq7FXYq7FXYq&#xA;7FXYq7FXYq7FXYq7FVlxCs8EkLGiyqyMR1owplebGJwMTykCPmyhLhIPc81v7C4sblredaMu6nsy&#xA;9mHsc8z1WlngmYTG/wB/mHp8WWOSPEFGGQpICBWu3hlADYQiiJz3VB7fEf4ZL0sdmvRY/alc/Kg/&#xA;Vjx+S8Xk2IE7lj82P9cHGV4lwjTwr89/14OIotXtIDPcxQL/ALsYL9BOXabCcuSMB/EWvJPhiT3P&#xA;Nv8AnMXQbd9C0LX1gX6zDctYy3Iry9OWNpUQ06gNGxFeldupz1F5l5H/AM4+3lvb+e5IpXCvd2M0&#xA;MAP7Th45aD/YRsc0HtHAy01jpIE/aP0uf2bIDJ7w+j84V3yJtpRTgT7jCxIVWniBpyqfAbn8MkIF&#xA;eErfVlb7EZA8X2/DDwgcymh3rgjn7b/Quw/rgsdEWFHUL+y0zT7i/vJBDaWsbSzyHoFQVJyWOEpy&#xA;ERuSwlIAWXxLqd69/qV3fOKPdTSTsPeRix/XnqGKHBER7hTzMpWSX6D/AJeaTLpHkPy9pkylJ7TT&#xA;rWKdT1EghX1B/wAFXLGLIcVdirsVdirsVdirsVdirsVdirsVdirsVdirsVYV51ikXU4pTUpJEAp7&#xA;VUmoH31ziPaTGRnEuhj934+13nZkh4ZHmx7OddijYplZKkgEda4QLYEONxF0WrnwUVyfAU8JcGnb&#xA;ooQeLbn7hjUQtBcI/wCZix+4fcMHF3Laa+XbZ5dTjYD4Iau58NqD8c23YeAz1MT0jufx73C1sxHG&#xA;fNE/md5Jh86+SdS8vuVSe4j52UzdI7mI84mPtyFG/wAknPQHQvgdl1ny7rrIedjq+l3BVh0eKaFq&#xA;EGtRsR8jkMmOM4mMhYLKMjE2Ob6z8n66Ne8r6Zq3JTJdwI0/pghRMBxlVQSTRZAw655nrcHg5pQ7&#xA;jt7un2PTYMnHAS704oMx+It1opLmPiAB8XTiowgE82PCV/8ApDdhGP8Agj/TD6R5o2XBKbs5NO5N&#xA;B+FMBKCXzh+e/wCZNtrt7H5f0ib1dMsXL3VwjVjnn6AKRsyR+Pcn2BztOwezTij4kx6pcvIftdLr&#xA;tQJHhHINf849flNP5z8zJq19HTy3o8qvdluk860dLdfwZ/8AJ2/aGdE699r4q7FXYq7FXYq7FXYq&#xA;7FXYq7FXYq7FXYq7FXYq7FUv17TY77TpUK8po1LwEdeYHQf63TNb2roxnwkV6hvH3/tcnS5jjmD0&#xA;PN5zTPObekbjKowYgEd64eIpNosXCttEpc/cPxyXD3seHvbCzt9pgg8FFT95xuIRsrW9tJLKsUQa&#xA;SRzQCtclixyySEYiyWE5iIs8mcaZYLZWiQihfrIw7sc9F7P0Q0+IQ69T5vO6jMckrReZzS+Sv+cq&#xA;Pyyu9M8xHzpYQl9K1XiuolBtBdqAoZqdFmUDf+ateoqq818h/mlr3lBXtoES802Vub2cxIoxFC0b&#xA;jdSaCuxHtmr7Q7Jx6nc+mfeP0uVp9XLFsNw9q8tfnZ5J1iGNby5/RN6R+8t7raOtBXjMBwK16cuJ&#xA;9s5TVdh6jEfSOOPeP1c/vdti1+OXP0lkZ89eSAK/4g02ntdwfq55g/kNQf8AJz/0pb/zGP8AnD5p&#xA;HrH54eRNKRhHfvqEyUHoWaGStfCR+EX/AA+ZmDsLU5P4eEef6uf2NGTXYo9b9zyTz3+d/mPzLBJp&#xA;9mv6L0p9pI42JmlXpSSQcfhP8qj5k50ug7Dx4DxS9c/sHuDrNRrZT2GwST8uPyy8zefdZXT9IhK2&#xA;0ZBvtRkB9C3Qnqzd2P7KDc/KpG8cJ90+TPJ+jeUPLlpoOkRlLS1Xd2oZJZG3eWQgCrMf6DbFU7xV&#xA;2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxVj+p+ULW5kea2kMEr1JSlULH8RXOd1vs9jyky&#xA;geGR6dP2OxwdoyiAJCwxu58uaxA9GtnkHZoxzB/4H+Oc3m7J1OM1wE/1d/udnDWY5DnXv2Uk0bV2&#xA;YBbOYHsSjL+JAymPZ2oP+Tn8izOpxj+IfNNrLyprEpBuJFt071IdvoA2/HNnp/Z3PM+uoD5n7P1u&#xA;Jk7Qxj6d2T6bpVrYR8YgWkP25WpyP9mdVoezsWmFRG/U9XVZ9RLId+SMzPaHYqo3tjZ39pNZ3sEd&#xA;zaXCmOe3lUPG6NsVZWqCMVeB+ef+cR/L996935Rvn0u6Ylk0+6rLa1/lVwPVjHz54q8J8wfkj+ae&#xA;hSsl35du541rSeyQ3cZA/a5Qc+I/1qYqx5fJ3m55TCuh6g0y1rGLWYsKdduNcVZJof5FfmzrLKLf&#xA;y3dW6HrJegWYA8aXBjb7hir2DyP/AM4gxxyR3XnPUxKFIY6Zp5YKe/GS4cK1OxCKPZsVfQ2haBou&#xA;gaZFpejWcVjYQCkcEK8V9ye7Me7Hc98VR+KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2KuxV2Ku&#xA;xV2KuxV2KuxV2KuxV//Z</xapGImg:image>
+               </rdf:li>
+            </rdf:Alt>
+         </xap:Thumbnails>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xapMM="http://ns.adobe.com/xap/1.0/mm/"
+            xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#">
+         <xapMM:DocumentID>uuid:82B45AE7E519E7119A76BA5BC76AA065</xapMM:DocumentID>
+         <xapMM:InstanceID>uuid:26AD93F6E619E7119A76BA5BC76AA065</xapMM:InstanceID>
+         <xapMM:DerivedFrom rdf:parseType="Resource">
+            <stRef:instanceID>uuid:81B45AE7E519E7119A76BA5BC76AA065</stRef:instanceID>
+            <stRef:documentID>uuid:80B45AE7E519E7119A76BA5BC76AA065</stRef:documentID>
+         </xapMM:DerivedFrom>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:illustrator="http://ns.adobe.com/illustrator/1.0/">
+         <illustrator:StartupProfile>Web</illustrator:StartupProfile>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xapTPg="http://ns.adobe.com/xap/1.0/t/pg/"
+            xmlns:stDim="http://ns.adobe.com/xap/1.0/sType/Dimensions#"
+            xmlns:xapG="http://ns.adobe.com/xap/1.0/g/">
+         <xapTPg:MaxPageSize rdf:parseType="Resource">
+            <stDim:w>14400.000000</stDim:w>
+            <stDim:h>14400.000000</stDim:h>
+            <stDim:unit>Pixels</stDim:unit>
+         </xapTPg:MaxPageSize>
+         <xapTPg:NPages>1</xapTPg:NPages>
+         <xapTPg:HasVisibleTransparency>False</xapTPg:HasVisibleTransparency>
+         <xapTPg:HasVisibleOverprint>False</xapTPg:HasVisibleOverprint>
+         <xapTPg:PlateNames>
+            <rdf:Seq>
+               <rdf:li>Cyan</rdf:li>
+               <rdf:li>Magenta</rdf:li>
+               <rdf:li>Yellow</rdf:li>
+               <rdf:li>Black</rdf:li>
+            </rdf:Seq>
+         </xapTPg:PlateNames>
+         <xapTPg:SwatchGroups>
+            <rdf:Seq>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Groupe de nuances par défaut</xapG:groupName>
+                  <xapG:groupType>0</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Blanc</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Noir</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Rouge RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Jaune RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Vert RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Cyan RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Bleu RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Magenta RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=193 V=39 B=45</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>193</xapG:red>
+                           <xapG:green>39</xapG:green>
+                           <xapG:blue>45</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=237 V=28 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>237</xapG:red>
+                           <xapG:green>28</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=241 V=90 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>241</xapG:red>
+                           <xapG:green>90</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=247 V=147 B=30</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>247</xapG:red>
+                           <xapG:green>147</xapG:green>
+                           <xapG:blue>30</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=251 V=176 B=59</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>251</xapG:red>
+                           <xapG:green>176</xapG:green>
+                           <xapG:blue>59</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=252 V=238 B=33</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>252</xapG:red>
+                           <xapG:green>238</xapG:green>
+                           <xapG:blue>33</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=217 V=224 B=33</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>217</xapG:red>
+                           <xapG:green>224</xapG:green>
+                           <xapG:blue>33</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=140 V=198 B=63</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>140</xapG:red>
+                           <xapG:green>198</xapG:green>
+                           <xapG:blue>63</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=57 V=181 B=74</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>57</xapG:red>
+                           <xapG:green>181</xapG:green>
+                           <xapG:blue>74</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=146 B=69</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>146</xapG:green>
+                           <xapG:blue>69</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=104 B=55</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>104</xapG:green>
+                           <xapG:blue>55</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=34 V=181 B=115</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>34</xapG:red>
+                           <xapG:green>181</xapG:green>
+                           <xapG:blue>115</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=169 B=157</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>169</xapG:green>
+                           <xapG:blue>157</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=41 V=171 B=226</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>41</xapG:red>
+                           <xapG:green>171</xapG:green>
+                           <xapG:blue>226</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=113 B=188</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>113</xapG:green>
+                           <xapG:blue>188</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=46 V=49 B=146</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>46</xapG:red>
+                           <xapG:green>49</xapG:green>
+                           <xapG:blue>146</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=27 V=20 B=100</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>27</xapG:red>
+                           <xapG:green>20</xapG:green>
+                           <xapG:blue>100</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=102 V=45 B=145</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>102</xapG:red>
+                           <xapG:green>45</xapG:green>
+                           <xapG:blue>145</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=147 V=39 B=143</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>147</xapG:red>
+                           <xapG:green>39</xapG:green>
+                           <xapG:blue>143</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=158 V=0 B=93</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>158</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>93</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=212 V=20 B=90</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>212</xapG:red>
+                           <xapG:green>20</xapG:green>
+                           <xapG:blue>90</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=237 V=30 B=121</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>237</xapG:red>
+                           <xapG:green>30</xapG:green>
+                           <xapG:blue>121</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=199 V=178 B=153</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>199</xapG:red>
+                           <xapG:green>178</xapG:green>
+                           <xapG:blue>153</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=153 V=134 B=117</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>153</xapG:red>
+                           <xapG:green>134</xapG:green>
+                           <xapG:blue>117</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=115 V=99 B=87</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>115</xapG:red>
+                           <xapG:green>99</xapG:green>
+                           <xapG:blue>87</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=83 V=71 B=65</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>83</xapG:red>
+                           <xapG:green>71</xapG:green>
+                           <xapG:blue>65</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=198 V=156 B=109</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>198</xapG:red>
+                           <xapG:green>156</xapG:green>
+                           <xapG:blue>109</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=166 V=124 B=82</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>166</xapG:red>
+                           <xapG:green>124</xapG:green>
+                           <xapG:blue>82</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=140 V=98 B=57</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>140</xapG:red>
+                           <xapG:green>98</xapG:green>
+                           <xapG:blue>57</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=117 V=76 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>117</xapG:red>
+                           <xapG:green>76</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=96 V=56 B=19</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>96</xapG:red>
+                           <xapG:green>56</xapG:green>
+                           <xapG:blue>19</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=66 V=33 B=11</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>66</xapG:red>
+                           <xapG:green>33</xapG:green>
+                           <xapG:blue>11</xapG:blue>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Groupe de couleurs Web</xapG:groupName>
+                  <xapG:groupType>1</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=236 V=28 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>236</xapG:red>
+                           <xapG:green>28</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=169 B=157</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>169</xapG:green>
+                           <xapG:blue>157</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=102 V=45 B=145</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>102</xapG:red>
+                           <xapG:green>45</xapG:green>
+                           <xapG:blue>145</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=139 V=146 B=152 1</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>139</xapG:red>
+                           <xapG:green>146</xapG:green>
+                           <xapG:blue>152</xapG:blue>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Niveaux de gris</xapG:groupName>
+                  <xapG:groupType>1</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=100</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>255</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=90</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>229</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=80</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>204</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=70</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>178</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=60</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>153</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=50</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>127</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=40</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>101</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=30</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>76</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=20</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>50</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=10</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>25</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=5</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>12</xapG:gray>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+            </rdf:Seq>
+         </xapTPg:SwatchGroups>
+      </rdf:Description>
+   </rdf:RDF>
+</x:xmpmeta>
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                           
+<?xpacket end="w"?>
%  &&end XMP packet marker&&
[{ai_metadata_stream_123}
<</Type /Metadata /Subtype /XML>>
/PUT AI11_PDFMark5
[/Document
1 dict begin /Metadata {ai_metadata_stream_123} def
currentdict end /BDC AI11_PDFMark5
+%ADOEndClientInjection: PageSetup End "AI11EPS"
+%%EndPageSetup
+1 -1 scale 0 -840 translate
+pgsv
+[1 0 0 1 0 0 ]ct
+gsave
+np
+gsave
+0 0 mo
+0 840 li
+1096 840 li
+1096 0 li
+cp
+clp
+[1 0 0 1 0 0 ]ct
+267.457 514.331 mo
+254.36 514.331 245.123 510.693 238.965 506.761 cv
+229.159 504.354 219.315 496.853 215.231 481.76 cv
+210.479 475.936 205.932 466.968 206.03 456.302 cv
+199.512 448.136 197.32 438.278 199.767 428.182 cv
+198.019 424.397 197.024 420.269 196.866 415.971 cv
+196.654 410.24 197.967 404.288 200.472 398.989 cv
+199.101 391.374 199.994 381.547 208.857 372.399 cv
+211.362 361.883 219.241 351.91 228.341 346.145 cv
+234.454 336.039 247.396 323.841 268.749 323.841 cv
+269.398 323.845 li
+287.271 324.053 316.313 334.345 326.901 361.497 cv
+332.496 367.651 336.394 376.008 337.542 384.271 cv
+342.521 393.116 345.776 406.585 343.587 418.229 cv
+347.276 429.814 347.05 441.576 342.909 452.764 cv
+342.979 461.48 340.28 472.554 332.324 479.874 cv
+325.125 494.155 308.89 506.154 289.214 511.314 cv
+281.577 513.315 274.258 514.331 267.457 514.331 cv
+267.457 514.331 li
+cp
+false sop
+/0 
+[/DeviceCMYK] /CSA add_res
+0 0 0 0.9 cmyk
+f
+269.204 340.844 mo
+248.926 340.609 241.95 356.076 241.079 358.664 cv
+232.139 361.957 223.854 373.719 225.162 380.776 cv
+218.51 385.482 214.096 391.48 219.057 401.479 cv
+214.259 406.477 210.334 418.299 218.839 425.943 cv
+211.151 439.47 219.819 447.057 224.072 450.174 cv
+220.146 462.406 228.872 472.127 230.831 473.463 cv
+232.411 483.579 237.31 490.244 245.657 490.636 cv
+251.542 495.576 263.831 500.394 284.903 494.87 cv
+302.399 490.282 315.153 479.344 318.48 468.993 cv
+326.437 464.877 326.11 452.526 325.675 449.938 cv
+332.161 436.118 328.072 424.298 325.675 418.886 cv
+330.035 410.006 324.476 393.422 320.879 389.951 cv
+321.205 382.836 316.789 374.543 312.157 370.896 cv
+305.506 348.078 281.212 340.983 269.204 340.844 cv
+cp
+0 0.203922 0.847059 0 cmyk
+f
+271.583 480.75 mo
+265.171 479.606 261.833 475.25 258.333 468.25 cv
+256.23 466.815 248.083 457.25 246.583 445 cv
+244.028 443.128 240.583 431.5 240.833 422.75 cv
+239.083 416.75 237.682 410.448 239.333 401.75 cv
+237.083 392.25 236.932 385.865 242.083 380.5 cv
+242.083 371 244.692 364.052 251.833 359 cv
+250.682 352.791 253.61 346.904 261.012 341.695 cv
+246.99 344.937 241.82 356.462 241.079 358.664 cv
+232.139 361.957 223.854 373.719 225.162 380.776 cv
+218.51 385.482 214.096 391.48 219.057 401.479 cv
+214.259 406.477 210.334 418.299 218.839 425.943 cv
+211.151 439.47 219.819 447.057 224.072 450.174 cv
+220.146 462.406 228.872 472.127 230.831 473.463 cv
+232.411 483.579 237.31 490.244 245.657 490.636 cv
+251.542 495.576 263.831 500.394 284.903 494.87 cv
+291.54 493.13 297.488 490.474 302.505 487.285 cv
+286.267 489.368 276.838 485.161 271.583 480.75 cv
+cp
+0.027451 0.278431 0.905882 0 cmyk
+f
+249.238 359.547 mo
+249.238 359.547 267.404 357.899 276.919 359.71 cv
+286.364 361.507 302.737 369.603 302.737 369.603 cv
+302.737 369.603 278.317 362.163 271.065 361.135 cv
+262.363 359.898 249.238 359.547 249.238 359.547 cv
+cp
+0 0.376471 0.819608 0 cmyk
+f
+234.115 381.029 mo
+234.115 381.029 258.269 378.72 272.415 380.014 cv
+286.562 381.308 309.608 388.997 309.608 388.997 cv
+309.608 388.997 283.603 384.326 269.219 382.709 cv
+258.482 381.502 234.115 381.029 234.115 381.029 cv
+cp
+f
+229.801 400.674 mo
+229.801 400.674 254.083 398.621 268.926 399.067 cv
+283.769 399.512 310.014 404.008 310.014 404.008 cv
+310.014 404.008 275.431 401.892 264.134 401.754 cv
+252.838 401.617 229.801 400.674 229.801 400.674 cv
+cp
+f
+230.443 425.032 mo
+230.443 425.032 256.462 418.443 270.496 417.417 cv
+287.121 416.2 314.15 419.357 314.15 419.357 cv
+314.15 419.357 278.842 419.268 268.202 420.439 cv
+256.939 421.681 230.443 425.032 230.443 425.032 cv
+cp
+f
+234.295 449.38 mo
+234.295 449.38 259.69 440.354 274.204 438.765 cv
+288.72 437.175 315.435 437.177 315.435 437.177 cv
+315.435 437.177 280.365 440.614 269.363 442.181 cv
+258.365 443.748 234.295 449.38 234.295 449.38 cv
+cp
+f
+315.007 452.174 mo
+315.007 452.174 293.946 461.131 280.261 464.999 cv
+266.575 468.868 239.058 472.093 239.058 472.093 cv
+239.058 472.093 270.63 465.146 280.987 461.944 cv
+291.343 458.742 315.007 452.174 315.007 452.174 cv
+cp
+f
+253.661 488.342 mo
+253.661 488.342 268.029 480.484 277.241 477.136 cv
+294.463 470.875 307.874 469.817 307.874 469.817 cv
+307.874 469.817 283.603 477.576 274.918 480.402 cv
+267.868 482.697 253.661 488.342 253.661 488.342 cv
+cp
+f
+648.537 340.844 mo
+628.26 340.609 621.283 356.076 620.412 358.664 cv
+611.473 361.957 603.188 373.719 604.496 380.776 cv
+597.844 385.482 593.43 391.48 598.391 401.479 cv
+593.592 406.477 589.668 418.299 598.172 425.943 cv
+590.484 439.47 599.152 447.057 603.406 450.174 cv
+599.48 462.406 608.205 472.127 610.164 473.463 cv
+611.744 483.579 616.643 490.244 624.99 490.636 cv
+630.876 495.576 643.164 500.394 664.236 494.87 cv
+681.732 490.282 694.486 479.344 697.814 468.993 cv
+705.77 464.877 705.443 452.526 705.008 449.938 cv
+711.494 436.118 707.406 424.298 705.008 418.886 cv
+709.369 410.006 703.809 393.422 700.213 389.951 cv
+700.539 382.836 696.123 374.543 691.49 370.896 cv
+684.84 348.078 660.545 340.983 648.537 340.844 cv
+cp
+0 0.203922 0.847059 0 cmyk
+f
+650.916 480.75 mo
+644.504 479.606 641.166 475.25 637.666 468.25 cv
+635.564 466.815 627.416 457.25 625.916 445 cv
+623.361 443.128 619.916 431.5 620.166 422.75 cv
+618.416 416.75 617.016 410.448 618.666 401.75 cv
+616.416 392.25 616.266 385.865 621.416 380.5 cv
+621.416 371 624.025 364.052 631.166 359 cv
+630.016 352.791 632.943 346.904 640.346 341.695 cv
+626.324 344.937 621.154 356.462 620.412 358.664 cv
+611.473 361.957 603.188 373.719 604.496 380.776 cv
+597.844 385.482 593.43 391.48 598.391 401.479 cv
+593.592 406.477 589.668 418.299 598.172 425.943 cv
+590.484 439.47 599.152 447.057 603.406 450.174 cv
+599.48 462.406 608.205 472.127 610.164 473.463 cv
+611.744 483.579 616.643 490.244 624.99 490.636 cv
+630.876 495.576 643.164 500.394 664.236 494.87 cv
+670.873 493.13 676.822 490.474 681.838 487.285 cv
+665.6 489.368 656.172 485.161 650.916 480.75 cv
+cp
+0.027451 0.278431 0.905882 0 cmyk
+f
+628.571 359.547 mo
+628.571 359.547 646.738 357.899 656.252 359.71 cv
+665.697 361.507 682.07 369.603 682.07 369.603 cv
+682.07 369.603 657.65 362.163 650.398 361.135 cv
+641.697 359.898 628.571 359.547 628.571 359.547 cv
+cp
+0 0.376471 0.819608 0 cmyk
+f
+613.448 381.029 mo
+613.448 381.029 637.603 378.72 651.748 380.014 cv
+665.895 381.308 688.941 388.997 688.941 388.997 cv
+688.941 388.997 662.936 384.326 648.553 382.709 cv
+637.816 381.502 613.448 381.029 613.448 381.029 cv
+cp
+f
+609.135 400.674 mo
+609.135 400.674 633.416 398.621 648.26 399.067 cv
+663.102 399.512 689.348 404.008 689.348 404.008 cv
+689.348 404.008 654.764 401.892 643.467 401.754 cv
+632.172 401.617 609.135 400.674 609.135 400.674 cv
+cp
+f
+609.776 425.032 mo
+609.776 425.032 635.795 418.443 649.829 417.417 cv
+666.455 416.2 693.484 419.357 693.484 419.357 cv
+693.484 419.357 658.176 419.268 647.535 420.439 cv
+636.273 421.681 609.776 425.032 609.776 425.032 cv
+cp
+f
+613.629 449.38 mo
+613.629 449.38 639.023 440.354 653.537 438.765 cv
+668.053 437.175 694.768 437.177 694.768 437.177 cv
+694.768 437.177 659.699 440.614 648.697 442.181 cv
+637.698 443.748 613.629 449.38 613.629 449.38 cv
+cp
+f
+694.34 452.174 mo
+694.34 452.174 673.279 461.131 659.594 464.999 cv
+645.908 468.868 618.392 472.093 618.392 472.093 cv
+618.392 472.093 649.964 465.146 660.32 461.944 cv
+670.676 458.742 694.34 452.174 694.34 452.174 cv
+cp
+f
+632.994 488.342 mo
+632.994 488.342 647.363 480.484 656.574 477.136 cv
+673.797 470.875 687.207 469.817 687.207 469.817 cv
+687.207 469.817 662.936 477.576 654.252 480.402 cv
+647.201 482.697 632.994 488.342 632.994 488.342 cv
+cp
+f
+0.5 lw
+0 lc
+0 lj
+4 ml
+[] 0 dsh
+true sadj
+27 804 mo
+0 804 li
+/0 
+<<
+/Name (All)
+/CSA /0 get_csa_by_name
+/MappedCSA /0 /CSA get_res
+/TintMethod /Subtractive
+/TintProc null
+/NComponents 4 
+/Components [ 0.858823 0.85098 0.788235 1 ] 
+>>
+/CSD add_res
+1 /0 /CSD get_res sepcs
+1 sep
+@
+36 813 mo
+36 840 li
+@
+27 36 mo
+0 36 li
+@
+36 27 mo
+36 0 li
+@
+1069 36 mo
+1096 36 li
+@
+1060 27 mo
+1060 0 li
+@
+1069 804 mo
+1096 804 li
+@
+1060 813 mo
+1060 840 li
+@
+%ADOBeginClientInjection: EndPageContent "AI11EPS"
+userdict /annotatepage 2 copy known {get exec}{pop pop} ifelse
+%ADOEndClientInjection: EndPageContent "AI11EPS"
+grestore
+grestore
+pgrs
+%%PageTrailer
+%ADOBeginClientInjection: PageTrailer Start "AI11EPS"
+[/EMC AI11_PDFMark5
[/NamespacePop AI11_PDFMark5
+%ADOEndClientInjection: PageTrailer Start "AI11EPS"
+[
+[/CSA [/0 ]]
+[/CSD [/0 ]]
+] del_res
+Adobe_AGM_Image/pt gx
+Adobe_CoolType_Core/pt get exec
Adobe_AGM_Core/pt gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%Trailer
+Adobe_AGM_Image/dt get exec
+Adobe_CoolType_Core/dt get exec
Adobe_AGM_Core/dt get exec
+%%EOF
+%AI9_PrintingDataEnd

userdict /AI9_read_buffer 256 string put
userdict begin
/ai9_skip_data
{
	mark
	{
		currentfile AI9_read_buffer { readline } stopped
		{
		}
		{
			not
			{
				exit
			} if
			(%AI9_PrivateDataEnd) eq
			{
				exit
			} if
		} ifelse
	} loop
	cleartomark
} def
end
userdict /ai9_skip_data get exec
%AI9_PrivateDataBegin
%!PS-Adobe-3.0 EPSF-3.0
%%Creator: Adobe Illustrator(R) 13.0
%%AI8_CreatorVersion: 13.0.0
%%For: (Thierry Ung) ()
%%Title: (gnocchi-icon.eps)
%%CreationDate: 4/3/17 10:03 AM
%AI9_DataStream
%Gb"-6fs9_\E?P#[nIOfZXMot+"p?+O!3Jt-B9b;P>YLr>.+>PgV5ghYfptcYAsO^f+6blZA*nVf!''_E]<F_ElGDuO#7/9-iShCu
%n\sJ)O70i3oD\X)LO]3sm`V<NGMe['2g$Qsn\sD"q>8d9*kLV.iL:qH]mg"Wf,m5)oD6q6j+k)c5(<A(meDQ=mruNkof[]nCA3dm
%hnK'YYIh^,=5Wr%nAYCqo%03eDdQj8I!k\Ro5;;.&D"n*%mRdRqs?%dI.tsj5Q6%s\f&?%f@8eEI"J5g<W&?&J+rr3[m,?=qUa*V
%lYlgk*IRbk58_/4^OPkOo4R8DFJO[U*e3^@s0qIScZe4$Pc#r?oLCi9n9`+Ro<.!LeNa$2lQ?,dh`jF`s8?TYLQ<O+Gk^L`s+L),
%\G@hTh,l$'n@,UYrUBO2kLVOX\W=@29);c[JC2fCI:%0X[%"[A=6>N-X:tC*?f&Hpf:Ts^IIlV"k7=26cU7a*Rr<Psp)cO!k0<o2
%Zd>W"Fo7Ffq>28k^Adm<n)&MIrY5R\r9,dtI/Eal9n>EVH2HlR)$;]lJ+=&`IJ*.-f:QKr5J6sbo:$9ZIh#+&+2Ot)p$*'k5CSdH
%k-':-_s@'3rUA^Ca'[-P?/CQh%mT<*MXt_W'Dt/ba;rKj]WJ(j8scaoCdQkt@Bk7joDYeODn^_mro[XS1/6-PQY#IR?0:hYRu_L%
%XiifGh8DNk+Es%HT(SS"%"nTXq=T(P!T;`&(U]g:]DKQcp]\G24eUa1=)W'?,.%`!%4hCriAG`Ao]l%KTC7Lo`80=brjU_B&"aI$
%3qsTtr)jC+S7hbF4=YFinFnWpVo+%W%Kh3,\_ts7<%3&n.JR7!GZQ>=="0Jo[Q2ZYcTL5N1OS01o8In<1Z!h8iD7,PGf74S7sslP
%`(uC`]]X=O:G8&4nEJ+"5JLIu)=B6RQi"[=jA5;;rHoI!(\e[4_D1"F]F2cU,N[6j7/.Ga_%GX%*4O2r5Ds$gTD0Lg#_-;V4AX`f
%NX1-o\FDLEED6.&SkXYuhM(",T6S]DJhUIPloEBHcQ^[Jro>GPjP5+M484`lN]kuOdQ@43n&"VR:MbkhO@RQfnVZ:D]F2cX/'P?:
%cf4`h-iSmrj7UHbgiJ/,:V7(ULHg&!),#3\q>L(X`<#>9ifC5[pQ&r-n)AWAH/Xm,n$2'207GR`XR!iLkSN*.UJoCTdG`P:CAnpf
%LHk;]`buh=cNugo5Ofd7?2-`@>PAWR*5@'0>B*bb7Bm;KcBuJYT_h(-PinTaa0X([ja"b8l\aoY\F4[/^7AKkkP92uih#beA8LBc
%kIKOe0(d-X>B`.n'>DiL>'H:X;oO1)DT2L1J,"-)+l2C*Q$&DY3ae"j-Lp=])qnB\YJ-@XjX9r>UG)Pi<3_dn)#9BlS%nG$_lrp+
%^VUdI1Sk$4YArY'lYlck-N8L`o\G^YmJ$NMrQS])"92_Xo2k_HFCIf>c!TWis'L9I=(bl6cZ/@A-a,'#YQ+-2(gftWPV8pBcK?F%
%_TG"*\]a52P]o7WVu:]tNVgn15E2Sa'M&/JOl-E%$qpJ>LkYk`4&=gnQsm,[j5mecSC6YBaUt-1O5k6>mkO.A&U2u#(OgPIVph60
%La+%eWCug+!?q7+fYb>-4Om)oE+^)okAU[@CUEi`4h8=$oL?dTC&'3l3K8j@EQ%T\bOBCo$iW:1E-.E[]_Jcc8+%0U"jatHN$k!F
%_\"kY3l:`Q1P>/`cgorcpC-G"HoI)^fSj!(#V_e9;h)Ybep#3IE>F]Z6Sh4-QEcsU5P=:'Y1hG1hFDM.FLQ\aaMJ`VmeS`RdGIGW
%R(*l;9fMD3%I]V5_6p,*k,nMsQ)mdV%X>SZo>FGe:?J)C-%22Whf]fr]?V4pG?V`pn]ojWr&r;N]:L$cmd7.Q5P>K9R+fDso8Kg?
%dojj+0l*gsq!Mb>/oFq[3Ql:DGh0lXeoYgR<`h!jpX!g'fQ/+1h;du/KUS%mPd,Ys%NIFr)]W/OC^/ZG@/IBF0)c?t(2d,q4KmT3
%4ZGA[e'2e!):o;/-eu=%M%181H*9#h6V8SMK\rp#2%#&E"trakA"m;UFQXP+,`WRX:$AEM:nk&df.Br>Sph;0J*?,FB?_7UpYV_+
%_LERj]L-u9PKG^))8H.>4b%'ndjH7ZrV-IfC@qPX^\e(UK5_/Q;tqYe14B,e[m.`"J+N3JO$EQuqsQ_%^Y427iLaKGIes_rf^F%!
%qWInKqO>eqg\oZPK3o]a^A7C.7EafmGH]m$n?[Jrqoq&J]m\ZV55t?ApUf=3q!]'Q!!*m(hqdUJK2:@=hXR/8=*VFUp<@g=rV5V7
%gsl9%-hO-?^\[m*h;RbepYUF'^Z"hb`kHj#n^Y,DHhZpnh07NH-[ml,hL,=eVO(`/^O"4E]le.pGOI_CIe0*>ma]A(Da&Suo=oae
%D1)A]?b-43q1dKQgU0WO1O.mKe_@QO@,6>-rL7_CGiq$Yp@Wde%j-8pjmB+0YcRI?2M1tGn%X;Dp;7?ol4<@_q8:rGTAIBtp35n$
%"[1Yjkr!@5g6#*G3kt]]f5EnOn,kRdJ+$]nhU.AZNc^<m&0>*.n(t_W(5TeFIXLb?:#(MJ5CE6ZmF20,iU-(u6&WAOfF<2gmAoub
%54/]j*m(Tm"0MM6k5<su]RTU7<q+lDU+cH0c+i0XK0/EWq.h,3Sk+A,jZV*TG.YY=p\E\H20f9/_p<nFr5ZPsD>B6Bhn=I0r]gD\
%eJm^>s0HYJ^?]r^GPF)o;u!PIjhG]s;O>#DNSWCW]0gf/cJ8Zc^coS[3qcK:p\Xs:++%D-@i%ErN'g8n"5>iT3UYoN*oe]@69N<3
%cj^T*h_^R+*2[./*/<Km)U?5HodQa="NHs()UjYn$HQ4d18de5i9ZJLa=3H<jseUo"H/@;q-:MZQWT!*5uo&M<9CJ>LN4rD!YWr6
%NBQbOC!:'Qa`<p1g5cKs%o2.&:AJeml&$.SGc*C[WD%7Q#b*Uiq?ONjdqLj=QPme-pG7bA*bOWmS7d'_Zo=,AA3V0o3mK+_c>[(_
%0O%rr;mV6.fFN<GcQ7DG072m_1),([L8@t]Nc:Q_B=KSkbFt[=-M7A&B>7lX_>u"FO:UFQA&-A1kd:Y1K1+Q@>UR&,LWeXsK[g-q
%W'A9H9_1!t(&keID3WT4opooQ:UJ+^XmuH.Y&pqVf$ut5HK0kn_3TIb(7eO>QSm*plI.H!Q,g<$gtL6BLF^Q?*<I5O&MYD8!!a2G
%P2O.h'@&n*!RY<UK^^Cp2j>G\0XMsu&=b/%'9!Z^(WQMh6&;e&)OXESO^h2.E?sU)=6'qJ(^N&#0*7ie6qG-BE?^WTKMWBPX$>(%
%+7!Z+['oAelM;jeN,a8=Z_(fgK.M&u=CYWJ?0D)cqBRTI#?^?(e.[6`[sPaHFj5'ZC#*Lb@GQPe4YQeTq1TO0qD1,s%)D=h#E/s]
%*qk035nS_5KVASn`BZtfBVC6UW.?4HM2#G/!;&MZ@73slWQ:g@";0c'4fN5oJm\OL%JFrWD?6UYXsV%f``U143=+%cM,IM"le)c?
%6s,=+b#AXd'Cc^?EZF3A#`7c$#7coe!n`:<G65GC!TPrng'h),!po4:>T!j5&</;m4p)qO"A!PQcd9X>aoN&]V?O`MaptY9'<Ksq
%2L0t8?!ol=p*\M/&U*RJMg@LU@5u.Z".&;7a5d!:CCB^hRDcc@m._G0B*=aLQZO)tn^:dUoAt?`T?bLK!KN!SOD>!)QHojcBKZ%P
%%cm-Nl#\rfp"q"F"JAQNWPDh3Ykd:X5BTKGZCKjD#C,i6&p&VQ,j7sUrS@Pi3<M-!!`do@:FG@EJR0^s4)bU^6O1Qn$q0=c6e]Gn
%-J0gP2Iuq.(4hoso*%t`"DE,^"J0$G7[T2E83R$jpN$T:iCn@b#eOUh@,<5t]`MCe_,NnV?ldVO1MjZJCr&8NL^A6:JLOC%BD\rd
%SU?0g35XP/_no*O49ZKL_,::WkXu/&i=)c2KJhQ&8kuU^"]dLsg.#3'/mX"IS:K01@,5Ii56[u_!c3[C'7bbR"\@/MYiL>U1;eP_
%JY$GgTE4P\+1c[tX?$B"*<=,l^bjQGA\bnYn?7mYIfXpZ!EL9<r?#NdVspuBHN6h&AQG,J'pgejjLX?VL?G^T>ub4\n=3J^\hj$>
%F4YSVFoU\&p=\d/P^RoI$3snsQWmnS\'N^;YdS9?`Rfm++:O+i"8WN.j>Z%UY8u]n.J3P1I;8f"B8;\mF"3L3/ua:!n]ds^_A[h*
%Y8C8GK;0"h`dC3)WsmdP7B^O^81Y^;)TPZ72IkC(<P)$Z3uCnV/:"7I=ccR(+dXon_rpp=Sa_$LZfj-G(fOs>cO5;&\Pj>MYgKE]
%Z4oYUH[7L\P_Z8jW",0hd_OmK$7"@%QFDTe)2-..3/"+2V)$P',Uj`<Hj@]'cX):5iTO)N%7dtrR>0a5lAgTe*-WEc/g;?2*>1%P
%;VPaIl63N+V1;<pV]S:\Vbl"m;bVVMo:EtYhu0YX\02!MZr`!$cQG-$&S)jhHHVl""/6hKFnW!J@):h[CB-^-m]]e7(X$r'%Uusa
%1lTd47pQ\$NW1jmPk1h$,G9AbjQBqap*3t<4i'4TlRoPGNdOKs]ft(=nf_g!=EHi.R"bX=,K%G"o*l?:S2<pnq0[bjHYUTL4OX`Y
%\$S(6r=,+WLINZ>qE3ImNTsX\)bu$R7lc)$$u^Rnd-3n\ZSS]R.>J-Gd)5Oo/C9h>JSFnFT';QR7l_dk*]MrofClJ;l!8l@U#D*g
%g\NHOa9iARB2iH^6Bsi/X%3Su%!Xi7%dsFbO+>\%/=ei;nd+HN:,r=JO0B"(fFJ5&q%N+Ub8KO43RS0lhJqp7UO1>,S+oFghUK1G
%d?<C2lat0;UN19??I4\T^[9hC_$@pF@3K$bLlDHo.WtRVg69hP8F##P^^Wi\lM6,r`CR",5/1&YF>=kEPk.O.N(-5M6q;pn<b4-X
%4+L*7*H:*uS6F")]fE1dJA?-ZV]ogi%Ql,$An6Gfd$Yb?q1/&ldJ">kMNAMq5ru7\*+t'aiLS?&A.4C]e+gSJA2=MW&nk(4h8Bbu
%[E,_O=D;(Z["Hi%Fq(4DBDt(K^IHi.P\`;&7H`/R/UegsA=<a&`"^1T+K3/uU9d.%)tH*H)q!o0'.IZG$eOr6`upll*J60fSKQ?'
%4HLHn.Mm3^R#t'1$YCY$#CD[q!Q7l"a+'ZI3luN?Mb_:8T!"L"\VW^8'^?#3Fsu(<E&RnDN4n7:oFUbE#Ti&SP^Afoq%5d8:%HaH
%^%,00rk&2$k6'Q1-"2_D;PEdB\QI1tCpPiSZ]P;a8=^:3PNdJM2C$5:"eCm;3Dj\cZH9.QiRWTS9>bo/1)JOrj!nrRF.op[I`-%6
%]2p^4d!=Ys^*Vh)\8T1qcb$@B,<3upk//8h6lh?m4":V-A0;Pr,hGkXg@XEnDAhi=k\nq2B-FLI#YfZ880U;'@.K-+UWOYNk@ajH
%q1n#!E`&G.VVC+M:!;;VO>"U*SVU)aLhJ"D1D::o9PgeX/`2<?ZS4J`q@:ZlngRH3gk5)bHY^%1"+r>Ik9]#TM.\Dn.2-^(1Z=D#
%[jT$FjC!thm>4bL?6S"^P<F/4&+Lma?l4g,iflCtjF7Nm>lmopnufSD#%2-7O#V@^9T^NCV&SP6nIN:3a$8#)j`qYde$tnE]0$S#
%=*]@o%\KCaSMnse%IaVlm;o"6js6'UmZ4o:cq<`pL>3<O1uG-<MR?YAe/n:bf9(1h[*!kd357H>7:FZ8qORarjp?d%NHMjA\Ed)X
%CVO2K/aCe=_G&sB`_9Zn58)o8aO<=Jn..uhiC`S]cqr_FqXG&_T!A"R"kpgW=`>$\EECR/ne!ju/pfb:k"X1>Jo]J("9B8e59C3=
%3Ec@6kCO6=4E13#6eHY*G_'i$GT#rs_K=[-?2WlooN_@U`t(C`s8%V)P!=Sc'\-$K8f!iH6Ahg3P3,#=J]bB#dUZM0bI(?tJ>O'Z
%CJa;QbkYQT)EY#92G-"M:7KjTj#UFR$Mfkb6N_/8,f_8ek?+.)2$7mdlg1csn$I$*arkWaGKJSZ$\D'nhXjL*Z6%L3POZ==@Dh_G
%a&\tY0j\94d/d#=+O\d_&m)dCk1Wu`EJ6A98Sc*SFsC@L<g_dm)DhdJ3VN7+:3Pa<<>O2r\>Id[/4C\q6Eg@I3\7G/?JLmLgQ'O$
%C9FnT0)jjq#dP_(F1#A-]+SJ1(9mHA@@:uipkPg75<,dQ;oJ9&n@OQt>'JkM?Z,:>DJ&2X^O[I(@_R!L0iS$^<1!&:U&5U\\4Y$$
%TF-06&^IF`nRm\e9+5'7W*h5:UPnT'U.#lZ805_Qm2F6u_=U4`UVYR0ITP0Y3866i:5$s$^I773$)2Y/7/Oo(:NV>p_6,&"Zg&Fn
%?Z)J?;*i]G))jHgE>(D?,EMlLp@kA0268_&[9_E[jjb@h<pS04ml0TJ"7$`r='1(j>C1.<ZPIcVP9@V27m3n>XtbU70!9N]<H#O4
%%))"dFWqorcCFCH3!Sog6^K]Rac,4e`BJM@]m"pm4o%n`idk820?(a[TTP&ClQZ5VD7J-d=b>OYRNssjoLq8F-d!P:90HJNE+&pQ
%*\j\575\tG;he'd4_mH,*9bI(d7J_Z*RTaZI/KqMk@Hfa<h-26L[nPT_2j7Y]`u#LqULLLO^cqp>fOA]*G1$k8Xe?XRB!<-Q#q!r
%mm#@iWeT,NHNqtq/mLRZX/)_AX!cgj4ogG67To110#39[JX+!%E`$)M-LrjX`;u4Ic@gG5\8\_1haG0cHZAcn8&*t%5M<MA^&$%<
%#=6E99KtO`NIZ<[69b,/lilhho?cVi*khZ:%TfkZlCJ:;e9@6Vm6X!F33DD>>KH$5-h"8aXZ2J>eF]CD%.G\W18.SOATjjVLcHmC
%PLC]6$Xk<S#h')L#=\f$n6)oHS="dt5),,5YeT^CWTp^tP$Xj]ku?s$.G&LK`d.ggO(dXaY5.@jD_KN!1Ukl%_H,4>B.#4.ddh(O
%93#?e9o><4Q@LKA:3.,oVI%rYACk3j[*'PO`$GO`L=WDKc$pc$3k/toY*lI9MUZs3mOIYIc'=`5W)k+D[(k>d^2'ne07.#1Vf4c#
%Y-"nFeSF&K6%0D5q@p,lHb9H5q.3fP+BuG`mT2S_cAO4b3VJJDf3HFt1KeiJ1RCE!-p%Sei`@qM3gR$*?d8C]ct<S>FtmLE^:K_h
%MeFe+4^74)Jj)-INN1asa]-fY*W8A2keV?\F.<<&Y_@bu9S.I4=\IF'(7?[[=II?e2`HS6&p]$Er''r:+_R=)AIl09CVR]@(>c\k
%C#PW[?t_Y.rR+b#_WiLpIIYipe3&<4UTe$Qp>\uq6Ls8P&:f0ifZ0(:4%^V(W&VSq)V%uc7=rW$.@)G96\HSNXpnVISJS%g0m#r1
%I^cu$C/Z`/8)3#=n;N"j:VXa#%diYiH$St<q1&;&h)\[Lm:@>R:UUh<CrGW9]l!eM:1)0bn+Za3e-Fb#Fk\!LgTP9RmItHn@G^Mk
%m/e6`ds7e\7,crck<4j;]I!"2-&fI*G'5!"Z;Pi%e*-UYqq2X(-KM\#O$^ojGlj2`dpsM-$:gJq!Rid=I.:I$0MaZ$*R2o]Au<qN
%i.N3fjJeMS.W?TqaMV/r3LGc^ojm4G_(2'T)V:[%5m(cZL4i8+*QKclmqDBR*p]JYOB$K]FcOK1l6:XW\$TOLdPFfU3Pq/:Z(G/F
%?[M@3:oEQW3Q'Jc/-bl<.XIgsBCeVn(UR<NfXN,4"6g07P.BRom^Z('?f@<?<nBKAiP#h@qRX%8=9>(@"gRY_ku+jqW[tWSqQ<>+
%c3s)Mo%+4DMI/mE4VYH[':'&"8+C-r01Y@(Qq\[@gKCn\C5jc>'=C;\U@<I:\uU^`)J41OX\mT(9T.BUqf#6Y)hfUWZPk(<*2*QC
%2+b%789@A(>%MKtDjkj<F`Y^?*\dJIM77apeUZ?:-N:HQf?icWMiWS&%lX*0nrM&VpW_bPeTPR3N>=<HfEd43CXBKR=3o(#@?^?=
%YFIj]r0K=#=T(hm+RlV>BW8-")-kDoJ,RmT+1X\oj2Jl3gTV-Zr1LX7Ar9"t/\jibZ-poX=EMeC^..iLmTr[FCM`T9+fdJj))lC>
%0!p&qig)5l4F8CbDem76[UE=r3<_uh:_ObSdHe[oImKp!/rEpi(=:&1l^]OupZe[$MKcd\;SER6JBst/;&0"Vg9X<B_0ia=*M@/5
%6lk/Hs!bZ]26X/blAd_+o?'tWXP#&C<+g]nb(-jBa\K8>rc7-B5/=j]D)Q1*&!AtUOHc2iU.C;;1G.i4e66Xmr1_[/`+ke&33&ZN
%UDunkBR>npWRhE*`b.H.p5UdcW\ZS&>Y!92o.bsVHX_!bYWDAq'jQV$4L'9ms'@P[B8c7$%/2R9r_1soTL0+?Dpe+L9cX1(6/6_?
%+%[W#6!Z]h;LCq=%F/%7s,"tL4\A1p2F3IHCSioR`-Qpe"$INk#1<EsrBFdVX&Tc3&c2b'X"e!+K7KgD<Kf[$qT#[l!OtP:.7gTi
%#H/XG_lIgc.UNF'^`5;9El40OYHS#&S$-8t$7er#`3ltCieH.gg@]+`ql(#(ZUL$Ia?R->9okFcNq\ER.Xp7r6Fc-o]MMge[JTM(
%lKVrjZVt?:dUV(,\b&M[)Z!]kT1*l?SF1Es\G#cY[J$n+?>Dgu+ct'cemE_,2N.Q=)U,5"2AKh+kV-d#fM[WQY9($M0uHVI3L_L+
%WS64WATghjI::HXV0@f<YL%7'/(lSIJm68JCm[O)fX2&aod[6^IKe2a<D?VUWaB>UTI_jU@8"MZau$C\Nbi-llH!,+[)TF93'FgF
%g&f`\;_b\iVV$(]fG4VGWn#&PbVq7oNH5Q["\qs6Lf[HW+s1UJ)>b9N'Ec$4?19O-`O\R]Ki]54rE!poWOeeLkh+\#h57n.b:`3l
%]L1gkOXf9,NKD17E,@U5rI[,)G;M?H","qG(NW2Lcg%)bX3%&]*pt3p;Y:oVJ[WLR*P\g!I<`b%&iiD,R-Luh)u13K;?(_-=Jk4Q
%]Ggjq]J'.T29F/$icKVcd1+sW0Wtc^GrMIaFKAJ0ibk.P&/f[Z\snH2]77RlKC%Cm8>Jf</n<:k*aRu.HocF&)reD+XQ[W5T5#RS
%a$,ZH34e60S,c$5eO&8$9Og<!H$Y%]T&uH$YZGaeZ#N>o+@QS6Phki,m"R`92uG,:iD8,9J')[(a(b%@m"R`92uG,:iD8,9J')[(
%;g-&CL[9ciAM8EZoAS/0["!AmiU,sW>Cr;k!n,fL88Fc8I4VWt-F4^D+-.J7b0_m0@?rZ&\=>Y'nK$sYm7`Ppiq;$\HBGNK"VL'b
%dlS5@a'>5,GnZa^bi9]qBSDDTQ2QOdP.!]8EX,TL;.6<ojb]9`8'H9P;.],pZ;0uS"]S4."JZ5\&1=-PKTiG\.bU1MV(##:PccEG
%;;+pIMpdqa@2>j4JgF&MD5r,QH"4;SUaXIgWY8Dno![&ae_F!c:CYo\&C.0oMiMn6#Bea],b<n?_So^bB-$'d,3Lfr?&./]$+_9E
%:hUhXlQWf0ERZGG]#D+GEU&uc@"p_`lI+:[5s/pfGY1guO8rptaTe,##g4EJW@3MQhFu=3N:oT+MIe?%M*Mi4dGC@(>%Mai_Rol'
%JlD!,d5ks&FG2WS.ZIMZ2O;1;q6PA@N3`iJlH5u9kp'BbfSqAd7@L)#F%&!/_]TALd>Y2`.>'C2',anjnn"NI\ktRcAl1saX.De*
%%d'cMOkIE_T-@]Wl_\l>Hqme9:kC)/:n9ko"S2t!^Cp5gi@G#"K9*o.a^9^k6o[3B5V[Fl+9rFL$tl4M9?o3?`6bne?e6=6qUJr$
%qd8"RR"+G(qKFbU$_KTXT-8<<dfk1)q$<$"!SN62NZe@-m&s9H-2l]lGE?oc7u@4/f2T8)Er@<+pZo#`!J<QU_;?ZL:<df6MfUoA
%M&g<6AhdC8g:nVEe"`(a68m.*DKR4)b.cRH"4?=IBJa3WZ+3k&ol4(k+NGG0`U"FtokgebNT$"fi&3Pb)<(OKiA"Af\.-arUEe$@
%F^4:/f(ge(Dfn9iS#N[[d.]e2^EooDcuAK6PieD/+C+g[+9)Ta]SbY+ISQI:\_-U-fbPjV"hP88,d?;W[;Yg!*hg,m8##E^hN_'Z
%_uiQ1?XmS&i_,]-]ch#Wb\a-iHai!F`Q79N6MAZmHA#f0N7Kh%/o"\C7@&+h$S%pZOKqVe8JY<Ei"T:.>1=6sS?GOZU04g+_0Smg
%^un44VB9tK^72I8'0`lKlAZZBWt!R;ZXfV\56eMUp6^O@RnT@BM\;(ZIa^c,Hc\/f,Q%GWqtI;N^&E3L]mt8K`dm!V$2-9gC@,s1
%&23)+eD=Q=7<)Mm,"^aWL/A.t1I6:XS4ns/m8%eOE'=h-.)H:kOaNcI]@;rg1<Bte\r;A8I))qUoU:1(^3o2tHb[PX0A-/@KtlNf
%VEWHE?FrLoS<"#I*o7'8V>+f5+uQam9RP7h##V&j\3(dc<um+/RVI?f;(_,h;l<K=$XBY&p(3`q9f>VmqJ>O>@F<FP<TK[+Q)oP6
%@q[pjeN$s0,:X^WYT7#`QmBddHC(&tCtWYb&ais<>h6hp`J4qVN<EAXVS@FS7YNWs5AWI4e0`2n]C[H0B3eH+KU&&"8:gE>AC[^Z
%eS"&HXWH"V-`&(K$.gt:8n+!QbXBmGh\%&12@XVp#1p'bNYrVXouhl]letBHEVIjOLP9gJ+qTSU#(3D*Mn*3(<@]tdNo[`-3;ngF
%3RF/lW;X6>WVLjF:h+>NboBuH%?d,mF+j75psd68l2oDu+/=,^78<F-@tN%DhE='oi<qIhDE(ej:aSl,.(L>n7[J'N\\smDkH:^?
%nB:tkD',EbmGu,k)R*18P-G=E*aR]FNGkt>cB:&\k5@IL!G#*,OU@PdM_h(t.HD,24C(Q[!RlWGQ7nPBCSGq828tk.f(*O]I,6A;
%d4K?<E67CtZ&]q(7S+mi9&,'<5$3Z,]pS^.i^q3tL$,+YaNq3j4:<s^Pc:]JS*10lqYE#CJ(ftQVg!.uA7\jJHns<L_)/sa0ra;/
%T:Kg9)]aN'b-Z=anh*L+.\_tcC:mE"15:-IUQPa,#7I8>1pNa1_3)[kKEn(oY>Q=)M-oU9ZCMeP%b5oZNk.,'jKe9lU<J7\@P0V#
%ZMKC'M$tbs?T3+'8IKV>^9RC(["s6KQ"mhUE1T'LJ#'+,?TP7_A;c>WCSqn.6%V0beocSQc:e]#=]SqWqF=US9=G!rcct^B(?[':
%)&etkGVVo_cGa@C)bJG6aDiOA-`]1/Z$AYoP-f^q4V@h\rQ]urW4jOd"H&blVs,j=1%djBhOd`!86=G7XALn%Ph@g'pmdI!1F*dd
%#`F(krp2&`33A$WDlHsFcH0:#P(m/f+o:,h7=:,VNq%t1Hibjcq'oM#/"c7PVn%tA2gMNQ2QJ`k<Kju9B<1V!WAH,j:IO<n:7]W:
%!eOus&kVK0,Vk9\KFK/B)W=9i=\L)W,T29p342_!Zr13`7@L3J*(Koi*C_*bDE"ZP':_Af3&]$^Wu!Z,Q/00#)s\;@)-n;uIa&4S
%<f=Uq_>=tCpZY.!>s3rtJ4u`7"t%NB^Cmq^@,jZ6"rme4%?:>B$q/oCD_K]&1Sq%_/OedPq.)BdBQ(I'L3Ut(<fO+_TWr6cB`#<h
%[T,%iQ;:t0.M#dJA/knYKb%djl`f<Pc+D`#G\coQhSHQ/l\mr]`O0&XoAZ2ii?K4VQAM9CK%j=_hqI![Stu\df0co"[n4r.mAMWo
%hS")I]_t!#]ls'-N]I_=X\'J'`I*u0j5OiOY*26T:c&.ujmBE1-Rs"c`L[P2c.DM:gJp/7;6Ga#n0Y^.ou/i0l`!pR4?'=:%;h]%
%P@iV]-=+$UoUMH59&.tpa=;%$Or[[OY&e8kdBM^qPMF-B+(pbMCA#TQAkt*d@FZ,+L5#NQatj+Vj.<uBB]+7SKanpD&N%sfF/5IY
%&4;&'\T$-WJI\YQ'022YoL&r:JHV2>*GK0/"am"r0$;L27j#aZd.E?o9:s:D@?OR[5lM-NGAr!<f>jC(F,#n%$ui:n!<Ps&psdY9
%1Iiq-l?0pV>7t$RO_amM2)ZZbVnX7$ZO"ii+u@Y6fTf/K9ZBe$iosT(hc]RkX<H\2V#+[U!\PuocQ&8`8A\d12GlkCOV!LL/CX>I
%2-8hl2u"<OCr3m]q:=)-f9nYnAt^N[J`)J6^%i%LEn6\WZSF^:["iLFHZ1#.C3EZG/^onRENomTmFWOkKL2/uds*%^%;kW&b0qh5
%UeHmgf.@qWPDf)VB(fDFVInY#;R"^TXJ)U,[`H3UF(4,*bGa]pJ@7r:f,UT'(/g-mT%@u45-k#&ERf%Dd(bO>?H6%BLh[_u*F'k4
%XVRX""<(d#ekP_XF,FCKTrYu+!9ZlH:f4!a%H&+@E6Kh%o-/rmoT(P("`-HaAs67k)E9.5_Kg\US^CUO;sS4P]hj>$o;saeZSK^)
%`61jHfnt9Y'gtBe3F$o6_sHKRHlh$*C`>$^%HBm:cs=4L.j&su6?d8ET>Ng+p"&41W6@[LES-2k,h';)Q+@3AI+*_M.Wi"^)R4aG
%-GZuqFLbsF7>p&p`S-Zp`Kgu(c1-n7R<,5oDtWu=XQ'Q5_OXoEZj]D/"Q-VKl>tK7NS?gF]^hq"8$kEnqJrc;@W@!T7sJN7hmU4k
%YPCFB2,Y5.JR)!sE0CP!a]K&F%Y(pS+)Sc^DpX:sPDIhh)$WS/E6;A>Y-j$K\"2\ZQFYnK/-,Bdki=So=\dZZ-_7L^>7)3EU"/M?
%JPKB4@86^KYSU$9GDjGF6?ZDM%ZR;*<^F#(TWY)*/Z=!go[e3W7o9O0rrZ>6J*Jb<U:qY"lX1A[,Wnj3%+EY$P;l.5Mc?eY7oeK!
%P^sdm%q\rVG/:8Y)Udfq+/RGVH7j6CBBs`k9X8ii1(Is]\tg$lcC^p(Ff)S/WqYn["5%Bsme$T'&bLnf9Wf>*ZhZ?ol#mLZF0Q%4
%0h+3.jRik^ig5+r4Kan7gQW*#.2cpaX,-3pfUp'h,5,(5aERZq1"F5;#7"'e^HM1tGFp]+@:T"tAHEr.s.$1Yq>f5#rRXr0FCO]G
%7"^\Fm.gj`X9u,gSsEl$>bk'dj?87I7;?Q%NHkem7p$-PZ+9VUE_"lp2R`3o<.8MGE?d"oU*;]s>Q9+SEN0jOVrcg\74POic,Rsf
%clYdm_Ff_tkG+8a<jp;[Sn+Q+Z7J/$+<H-\[(3XH!!UhNlKFnG:abN^.%K2\&Oh?I*'W]iF3Kq9k.\t>Pp]ee':BA00X(*/,3J`+
%Ce5/BOfRo*-KK(_n95KiV_XL;!N_M=qr%*"D@#u\rk>r#ef%E[K2W^p<ug&QrBkPG&P)BgLB2Y*dj.A(cMT</`-_'%5Q7Z7E_?>X
%B_gBsG5[.n^T*3p.#W]@oi4qd<?IT@]5B*6%%Z8!)/*M/LUN5J=N3t*(>N'PK'od*q^-NqLpQX%NkG$p)5cD/h_`K_d-/rE`Ga?$
%O0rtiX(9kAKr3MI6e:4CN/O\Y$<AmK))*.MmZS!bQ%Q9j%YeDH9s1`-s&`#t%I@4e-#LUI;;08;<Alb@,1b1>:3fo>DdEbD#qOQ%
%:1YC3dnXLkoT@0@kbl8=BKWClC0Q_O'kCWElN7e.SM&8phC`%C7!FM\1>F<3_pj/151'1Z3Qdu<7D>3^Mt,<!=8Y4qn:-0&<NH:O
%P],?p^?&e@O7EM4nl:MS1]2sHme>3\gWn"Z9Eo``SaD_g<mE^Ao):jJllU"P?Z8n_:4l1gp&`^c,mI[p>#/F;VY-!,Z,UoXe9cP-
%`W,UHQ>>A_RYC.-[^gIKODU,6NG#`_FOYTeoR3/`JEnSh`S4D1M(Xi2j#/*=YEEVOH#]tW`(L]MhNB)H2$?QSQRupBjqQI[X9(8n
%7+)sa.Pt"m?P(mAqM'"8j..JSjFP7oCUeDmNYakuK]IdlQ5!f&%E"#G%TZ`qjQC,0(PA1[RtC,p?3T+q<j)FqX&c,[hW')Y=)bU,
%k\<g+.u1k.:+(Z`>T)lc(uZI'>]j:;6Bfqg:0,D_hUW_cVp\i'0(jB*N2_geE\P6F*6Y9SULtN]7!O[t^GhZGg_UgQX<J0clg+DO
%g&_$.E#R=\3::uNR-R-(Y=K.%'sXQLk*B?u%qNFpc&6@#6pIbb:n_0fAp"S&/H+.=c;%QuLIMd-oIdXVjnAK9^P(9GUMd]0qtGC^
%@L]-+U?7e4B)ZWkmu3a@KD2p5ET_eA(cR2h?`ksi^J'X!1qEJ87B9r7*`YCAMaaUD7?dE"S?Z"B-"lR*$th<mAcc/*o.!cu1o=mp
%]ie.LG4;BP)6:<"s,s*[Y,,(A[ZkCZr_F"V\?VEEGhLGql.Ln3s'0%`SiuH?N;SpHV,DSRI`cfu`eC>:!_VU'X9t'\*BO2dVVr"C
%ND%p=ocZGOmA.B6LV%F@7c1e0ZL\(2EL@*/"k0"Z*^+[j'=nnN_]<\!/hUPt6g;Y!"I2c,(5Q1t!c=&".>I_/f?6'Cp7_:Rf>5*%
%pMq=s'b#J+Y%iVN%]@9G9iLFJmOgfkFD5-U?+mjt!L@03"l;8S#8=h;:@<J>%WZ80,`02Z7IA(bYP/sV)PlUK[8IBD\7@ZK-Y;I.
%p-e$+cBap4Zh0e>/?0g`!oPKDhqj]!>pO0gU7Psmi$+-nbTTk9/9IP<H#8Rj9>PM/^.S.+BMU5r)p=\6=b<N3qfAE^FR$H1>*HjT
%*T"jGq1@4>)B;fp3V-RBcGb1BI>oFnlIG7ErPiu!f5aZoHXIfYff+\6$^]MZ`9`B2UEl"n`C6=/3f%XEds5>%f!?'g<D8E.AM#Bm
%'YcjM&p"3=R0=#<i.:0LVM`3UdS_S(e/AF/oG&]XfoaWP+("r^f!L%EZ79gop_1%WCS$-rFcVioOqiG)\CmkX/.@6T-*iD(eU+CN
%,HY8`Q/75EH)SrV30&[&jf:uNSp!8p_0``SGicNE8UDXXY,[f,elt;\I@VNg5f3qVji3ebrpJ+\m.9DhTK&&rWm]o:f?6's6H?Ub
%PHLrn_>ZK#^%RrI]XgW?+!alnUZLfAgm-]\m&DFjAa_VPRce[Yl#@S@J$KR9S<'-3Oe#S+s'lJu_rB-0W]Q6,?tsaJegunanj!E&
%;4?3PUK)ck7<[*J`0$lC%@`1sX!^JBH'3"@P$#-:E%MEd'5:c[qBoRpBlFa;_A*ctNf#?oCp1'oSkBd2Ni!bo)OfFlL8p;&<gTU=
%.(20IXf+9sT^)3'V1bkbV:&Z2/7#eH*B'&0gmh1H%u-f_#jQ2JHLI)Fio7iJ&8#=ug=u\Yk=#8Ce8L,UG0&'!ZN7:c$2U!]Dok#2
%1f`Ro?@T<dj"i?hE?/;plfHZ&^U,!YihDgnAZb%TfCC%F1tUSi1D_g8>tu3.fqi;)W>T#!a3-NiMe7;"_l;B]pMt.H`Kp!mR^(03
%OuP_7:%crs*cCP\fToiU*HjqJfZ6).DW&Vca_G,;f]F*TmYRW/#O3cu[m>eST1_+?69n+cgIZQP$q*tFW$-eV;)%F8r[n^[hY$\^
%*4iuN[pH"!:AClPG7qMYNDSI>bnln\G363n^/'+TGri<M;i0'_q(k^hP:_1r7pr]ls*W*Vc2;c"UPiHCr@_:L5;pAl^-=60Q6kEq
%O\EQSjnCjHr-k*_O$)NV3F^T^S&sc75RkPf)H9O89:.N9)PbgBSC8s^b=U<,,/o-2s-([Wlt5BJ'@E#k*V9;.!:D8SiO#9&"[1+E
%=Cd0GE:)5,bVn(`0d<qCj]6tWDa_UO$X"r=]^Y.M.APJB9/fI*eObcPNhO;llR[TZKcCO`EAZIWhr=]h\r`7'iWoA;T#d$*M\5UH
%"$\\8]f+>c?-2ASG0tM7K-XR!'46Nj=:VMQM76KEA6FtHJfB,0P3SCj)i9kg`js2j[[!np%]Kr:9i:&qWGE++raE0dYT^5:,ch5Y
%YlWijcgK,cjNuP;+EI+B1diqKer?f>8QOq=%pa-,?Id0@]6PtK]rnkkBm`g'fS@cL$N+TW!FDS<KmDUU<e^6fMU\l0?+jV&HMom6
%`8u"1_N34mYr5Qto:2qN_TWaCF*T(7_Q':RY#W&AQ)J>;f@4'sb(S`u)!hBE$GuM-SNT=mdK6J)5VubcU3`H5$-lZnlc?:rk)EcW
%NT808TPN?@eBMipCDQlQqR*So(k`cMaZ597MV:(:=-@`g)9?j.'KKpXf5IAc#`I43WECE'/JDR15k4>d_d:&k>IeO[%1ML=(@Eh,
%ESF#i^H<!arN.:4&m^P+S"!U:Wi`#L.EGC!)QVWRXDUPM97Co)2n$6$eUj>H[1J[M=p@:6AIYg->hN/Y3`)"!j/P)IQ/E!dD/@Bc
%X1lb]hTOc*fm>F%+W%;-0RKRLVLme9rU#_J;]"J&SAIb*C(45*I/>jI0!jU/T:@oPe!=u<2uWq)akD'GD!&I8%AQJH]\s`8J&]K_
%T,PAEYs/J_b_n<f:m*ulaE,nm'99PMh5m?c:Ef`d7D,YR_Lnl`<Z/YQaJ\a]%nCP+nuDLT4l<4%X-2<il:%Ha)L]<Nfe8=YAnWs]
%IC1tE@q:AQ2#3+hheD_r!U=2oHnaZTe*jVtJ#d*d'##c_^;$,6/s<tKbNG:Ml;,\ZZtUq7a>!7"F&BF`<O]&u%iC>D"4j5UJYsGH
%g>ZaI+D5I[]#X0jR$*so.9nTUhJ`<sQGIW3V=7l<`q`6L7:2P3fg!AdD+!AUgWG9-`oO6qX$"o+>FKHO6f#b=a5FhQ26L(lRX<NF
%C!^g5Jqc9,g]c7.H!.&RY:%u%_L8;^#HnTK<Ofl$<U1(KDoX&5@(=gUeXEh_TOl`P74U2aX_QFuc`guX/[]D_D<k)&Od6)_D<pS'
%]P6H,"V&>SU;<SFAo$\c05/Xj75gK5lg*'^C6?d,WEdtch-\DpTaC*@9X;pd8Vu-QF<nC?WL=P>No1F*,uG8V$/N1&PEPN/O0]5i
%1MphPh/t0k05$g(#2*j!#'MqH&pc"kWPuP3;F7%5NJq`+9Q\Tqj]R>Lc$Gi]B#P_69W+ke[Nj\4,b*N31dtU%YR9p#FbSE:?)<O1
%ZcFa8H60$!qC;K(eJ27dWQ).;MG!1g0!Q]\6%'Zp"bR1@3fR_:(?n1VdF7L-s':D[iI3DA.&q3"]ZZ[#mX,t17RaX@jeeFFM@7D(
%`mL"2=7Ou0iB+Q:[+R$2*DVSee"fp22LDJW<$.P@kp#QG"K,ZdgOV9WeN7'WfpknTHub1nG)0#J%^]ZN>Kt*3;i]5L!Os;+j#UDn
%XT_BVLCps>L:HhlVQ;iUf*9U)5e@4&>/k<edk]iq\IQeK/D">Ue?(?Z1Tr[XJrYb*90CEeEgsef)oT3rWS"6n2+`M/ejZ(H@A*s<
%*d!h1@H3Yb&r8I3U)P"*f5&r`.r1D[XIJ9DS9RU\"U3(!]#lJEBmeLbL)rtDFHA>sg/sO?.p)kNo-,!:'OYdAXlK/cnYV5fD7"[h
%[f#D24*K+427\]W"eV%NA!QSZ-N"/$NPLu-F1_"^rL$)4hP;;#qPI81&UrW>n(PimZB]_lpi3^09NL-AlRu7@02"BGE<f8$A@4]T
%VZaXX=;hK/^Y%4PZL^_!WD>C=h"!pF=6SVI:2*j6NR85D%4Bc&[rNE?Lt\E53NU*>J`4,bai=WS7%'5e.BKi"9MN,^9-22(\<;&@
%3L(V41rXW4Yap_]dHGX8mjHcs_R]lPK'MW27,_E.0nIXn?A%d[L/G:%o\cJ@cH=2_1)F[I=lW#L'OP&>Y:-9$-4PgW#-i$do>k`9
%Q[g2U[a<16[J\?*eC5_/9jJdV1j:@o7@QM`3?qZ_L^)mIO"r)5=>eoHrAr]06M5N`_<T3B4aFu(-X=n"meMs53AR)]:2g&d*p9U%
%S_JG5mm#7Tr1+LV%BR(?>']Y3\[Y[qEEo;.LF]aBd2c"p%S\n,M",/pS=%lKZHd63F]Re?KUUK!E]U\R`APlLMAtqfe/\1kcULVC
%J*+E;`s'euA6^/L/NDa8f@>6Oa8+D9p]H5K""26mGobM)1>U!W9=MT.:M:irQ9?p=k2_h+nQPeR>2naMDi5Pn>tsZsV."aW(p+'"
%MT75g[Dpf>/MXeJ<EIaq:hH"hM9>!rcH!W+>1718;g_pB^"G/@O+]5DnZJ.jPmU]brJ?rthIYqX-G"(%+58$[@+8_$pS:GteP6DY
%PG%C8TME^.l2Z//#i.o6,H_e!GLO'kqn"sOTBt`"`Pl8mREd8q9">g=he3l0pP2^&kjO8]&%f3M^%+MWECE?td=Q[%re#"/TH6a7
%<nh2+Q03q]Y#!$'@XNW/e>GT4H]\[s^`Nl`i:++ShJ:V54X4%GiNa!S?,A'Oeu'<t$i@:ZF]$R-ne%Z&bh),gfM0]0Jo]J("9B8e
%59C3=3Ec@6k<]nA[%pOE`JqI?5TM0BTJAkJ@CgBR0)forHBq<HZO0ND95jBf0;h!:[]Y8Qd=%eV)kF:<C<;8p%$"Ci%G=%Jljt,J
%bh#fm<\8*`PEZYMba9u!(&>MM-.5BQ=#iVn%<5m!=m2V,,AV@@Bh#7oNCt/I;kq*d`plf4Tq1R%l!Bj3CTX*I?2?,fQXCND#3)q,
%XYOg#9i+<WOQEOpCd"5j7l0jPcCVOD-a#8(Mb5K:PB5I-C9>t#Dt,b=PBp?q[]54fC:>ZO>h*qli7^dooX%:cMnJa:RZcm623K7/
%d<<L(lSXaO/UVtKKdg9-H;rMMa'"?qO9KIMYR4:,3mG86P?@sn=TCG^gM??1R6F,6n[TI6;biO]p,Ve;4XFB'?jC=9j]U&W`n'BR
%ns7S`EI)dlV`nVYLo#<2&m(\^QP4%GC^'^+ZIe>DFD+PD>=FFION&`*apCpZY_\1!->YlOhV>cP]dN2jIaZm0!m-bgG42IR#9+[r
%XAsaR:('tCYXZK+b6CW`,N_TkW^0YoQJ`+eO;?)_BNbJWHd]4fN:gdfDhg-&W;A.8Z%:P0cc!-H/B\2%Y:s*+_793hYQ!hAs2*p=
%r4=<@kX65^E7W-ERCH!!`?kF"j<Hd%"CrTd3J.0QZ+ib0)T>^>$_kX$9(Ag#2.>Icop:o!E;apC\$NhH1TsUaB'74Cl,5K]XT)Lb
%72E6OX?9#G!JjH!f@)4]6?OpOmR:$=4"O:8[\88qYe\LO\P6dl(sM#2I?&]+$;P)E*T*e_.o$LWh+p66hRoVVn"H_YZ/m*.X,^F:
%;X;#lK%P+P*2V@M,cdXXNFg/s1?"+!7S?".=5m7=&FZr.h;Ofe$BK0ufh/$d\tj-eV/o3g2AeG[6--cC;id$C84qN>e@MaUeiUM`
%B9F%6(-U.d=EKfPXen-e3E'LsXO?<gn^CGCFbFBm-sri5Hu;:k#-Ns0<^5!-7Whn>C99WkO&Yq@-1u&Ii@igFo-D&_.IuSt3hn3,
%m"uN^X^^*3Fdq`'h.F*%p>4+L3/b*-aL'7M8-dqSF[S$X]Bqh,gpdhq<<&-sSql3s!H(@%VF?_&W>4$"k%e=:GHH4iIA9jJH)hI=
%m[^'i)0?re/.-"SM\.@U0lAL1[^CRm/,`/[<<..N)s(J9!d;$'kQU6;1fi,qZm$HjSuj02H#]+g2(l%GF^ZG5ac#)`,Ll%`\BL#&
%qm(?K','49/#Yh"?5J+]8d&:[^:Dt$_-I[*g\?*%)7(\`b])'2$teH=S8XR3\)Nt:G89/Fm_8A.gt]a$bgo.1"$QEZb:]mFVu,Q>
%5&n0+!n#Kd&c]'Xs+%]pYBn0[-&+!+E/C9?\_N,r^.4%r>s'<V'GIihr!Tq_:7UZ(%m-3VZ=I_>Iu.ga-.F+RA1rs@`-&<!#J6-9
%8(<u4s)RunLnV@SbIFfSMjr?jbVbj$`,45>ENa61jW;'C7u#Q?[/^"4]@t'3L&Db;TZ,L=g>L?nf3Nb-q*glGZasFYPW5W7KrP2o
%,ds1BLaY^eg-T?jK9\DU=GkM7=;*2!C0dN^,J>MFk%^/#A6hW"i\OF+r"FIDi&@8T2,gZ=d?Po^iG+'R=6#Xs:pnA_P#J6;h%fs_
%5Y"SO3SQ+oEd[e`:![&9e0jW_/UOG##Z=a5^R.etc#u@@%)&#EECG5CbBOhs_orZ?8K08?:9B4hC+:G".hA4o?%d8ah)+AoI?e"!
%HYQ;=B?TM_)%Vm@4O@QFHcEL%MYBY>h&o6Ie+Z/XG:/bR7(ePs`Ns*cSYQ5tX0s:iD%HhDUsYV6r=B"U.,PU,!@&&p2b?p04_a(U
%NR\En+m+cQRQ4bfAl?h^Um=ZLPZ+t0LP)3j.i1c%=3.cY75k(\'h@hO$=_M:;Q.R+oN4VhbT^'F0^qEqgdLqS_%VVK9ICYt(_(T'
%2)3?A:<^IhS^'A-hAW$C;Uo2q=k\SVA7<Q[X5%fVoa:[?X,F6sk,[+.>*Set0t5ZC>XOrBWQGQ*NKU=7eOa=j;T*CW"C+7BrM*#k
%U`#&qjXEf6#Zq:6XWjIU.ctG,[V7gjBd8Qc>`W@*)T:,01AP<)1J5TF\>D]:diufR@\&f>e6>f!Nsg!bc8s)Vku2uumkH9qZRT62
%FKr">Td_lY'5WD'L/":MF.KCke(1U87+G2=61g;i=^)VgXQ&Q8]"&&CoD+QQWpCCA?SHD7?V5N[Zl<#Sd??q,XTY]4?rb6%UM4qZ
%1:rqWmj."<V)Bl]b`si_b[n\]Rm]8n]YFo(TD\4UaMl0)LRPTT0df/ln@jj'ED,2EX3"B1e*F.Qhs-bd9m(P^l;ojUUuQ;'JG$RV
%l"+.=$'O(]Yad`pSZ$2a.US<G13`PjMTZPSLo+qi(I[FD8u<q(LEtl'bH%[%jW\``%Gp!g3%4h!)nbRbhEIEq-a,LTDkF[H-gRJ>
%/.UpdI_[I0?nL9CmH%Ik#F7I.n+d-d&%:`g[QhpC?dX1dU30&kH,iRtDboP]atEkPX5U$(mbV]rM7lKDXAiKk`?1oTGpVV'>&tBE
%5&1:I:Qp'S.3)da"sBNSQnP&3&`sC4WMB8Ld**2m'V#`1Jd8QK'Uf:gno,@`%ORrpC=U2;/"KTWf:d.L")$VJKM;[la"B>eQ!13-
%3bTaK2<[Sj<120OeI9l8WX<@=R[JcL&`ut`47]d,cZlR#4uHu?e1qOQr_F6pCf,s+BJ^(t)cu*9+C-/dX$.kob[rDQH\G[A>$^lM
%L&4'Hl0Hc(@0q2H'uZU98XDIc[iT/I8r$V^*nqm"e\u>1)\ij$/+]]Kqk=^GfITI>?SUj5'&TQD$EbBA3i1T:,^!%M\gd>iG)/E%
%E\J:K2+B.gQE!N.2+OB&C=Oe<PCCuff@lZQlf%-95@WW#8ZT'm"oPg.T/Q!9`F)Vo><]`?V7,F+2`"8a9@,`&mb7;KI,3S^8UBf+
%:*;.f9,g@%I\d#UGZ\(TeCO:$Y3(GQIVSA5g`Z(U+rS!M<)<72EJ9']E+>1X@@BYO\$=@Vb3O@UeAp\,lRV"ARcu>JmAoiMCg&9`
%?CZnd\g)N6`0#/4lODV%L1Per`3)nm*V!Tn>:iIVgpBa2O/d)q9rn-a+fomj.,AYq/h>fSiiJQ#.,_FFEE!*+UXSU:oKl&F_k/c?
%<l/*&;a.Ha?4s;rC0h![2!AN[(42hH2mZGCWQtKfPlYUdP+?@ZrT26dI";)Bb$*-DVnuQeDXYN/Hu5aOeZE*(PR\kal$$UE9l_6(
%"<!sS8fCHMP(Ta3&6$JZ4e_S000!^fY5-NoQRHC?RW\orXNl2u"YG3T\RBnLke0"?WcRT9\;pr)B>T,jaZZV`^DFZ8/EbUJatK>_
%@mPHl%8D[ASVfD/<fR-9bg\f=#tsED4-JdlJOHHCm"(ae9nR-Z^&FY/T@Ks3(:I;gYJu*L$j*bKrR<+>MuGH3@6f3FnR<bFPQTDF
%K&K_?l4[PWEpih*gi2BB.kC\bk%gBm>HY13`CF0+gF7$=/YHC1B!@oMn3:,&H%c*:MpV*pN1]j%FXhjtSg\2$:j&J?%-o%d)q<E]
%-jZ`j#VONUrL-B_ZlVnTXXXeOV[ZCF\Pufe]i&)%AZNJp%"@3,]%157"ZD;<>8k_c`&8?B=QhGS_N1N/o3ORl<F^cK-?D(F^f%SR
%V9A4qP5E+f&PMR,k?aiR?+)KR8[rp0_M0LMJR,;^)m:GiiE%]rft==oe2n)>`Qr1h_Q>POX@sj3j2`"%WK)An[*<jH#H&pc<*Na6
%E@Rh$f+csL*nu:>>IBnZSbXVPX`:M[\hFO%i`4L/aENf3UBe\$`B^V&(H`+$Z0kJ0iRLEsa#.IMWkl\QLWK^$ZAWST`H[0^8)pEB
%X@>1%?Gp5LI"pI*H`I#ic^P`VVW`XJ.=RT"B'8,*![>29H-^D%6C%)!;ZFkVeHHiHV9@/0-[-J?/9.Z=>`!Y,TUCjCI]4YS`)f9`
%mio>)],B`G&Z=hi')'-`(_$l?%.bJ`j).D3e'Rt7h\U$h6^SsP'WKmL*i>UN5j8uTNa9h_Gch`2HAThrN.j?[jADDuTQ(h%GR">`
%MeVN`I`@^flri3/\_lsI#:O[;W].N9Kj1_UlF!fsBM+E;Jn01-kCDj`T[$a+e#4n-oG*f_ITeJ^<tRI&r'%M&a@/F`=WE><cs/*Q
%N2EUTU$O2hB.i=e<<JeU'U_gr.kN^:PStW;OiYZN<@&\lJapHOid+n)BbLU5-!H"s7S@VK'j3h"F)KcVSZZ:Y?RG@cNW5hr?h^.Y
%9;fq(WjXX/UWa4uE(bBD0Ls(E&t`$K@e&]*2"[aq_rk;?bkiN<ijMYU8EZrjG)g[uScq\<-ETe]X-M>Y>oM3P<7DJtLb$Gb<@o;o
%Ul21LJp??V*HS8ok+LEX\Vmb=l#HaM[Ie\"oZ[o9meI7"r5D[,I+?R&A12OfN)Sl'pf6H^Kp,L^?khP+<QVBA,WZcKIB)QKD!<sl
%f9)F>.e?.e!co3#[b_++8&^UEdJ6re\O7"^phZo7@`Yp(eS&)Z28]Hs[&Z3PMu\ZG-AZ^)BU,DH0'U1p(54<5e<@'.s)7iCs$Rd\
%nqS>3&hO!9\j'j^YYSN9%"I1TOcRjK>lSj4D.2P:\*:NV3:8A(p)6H7?p8nn-#['S[sHXH>Rl=1Pt-AI+#M(B>f#3a6+hqe[UGWH
%3E\n9M;TAT%^%p30oq6d.?3X6m+.aEI9ULcd.l7]X-N:k0J=s"noWo7dn5D?Wla"rHdg]'`Wu+dQ*#Lu2.O;#CN-94YHMa7Qs#_?
%Xp'n5FbhC&BM0FK,seSd;FNnETKBZZn$KA=1qf_6W/@P(b!*8t_@>V8CVfE(jg;MD@(G]C`IhB4^XV,lG.Gb<NS>2FoaT(J^9Y?;
%g2?:\fs/A)KkV9[7p"&`>cW?4*F^I\s$!(-H0#\iCYGXbb:42RbihHEBL7)>op%AmHnpM8=dVk*LCh3oF0$&5n7O+k]!;@cL`*-B
%fp'\'57Sgn2dtOgDs;Q<j(8?CJ3H;KHr+-S#`GE@D.U\A,Xq[c'GEhY-f-C#?5F6,PpuB(U<8=HfYgfc4L1Ke&eir14U!G(G`SZ!
%k3J;6kit/T'c)E^TQk2@lqH,JcD1:uk(chDgS\5tp_Kk"5q-gt6Cu*.&nGYGUT2@/H?@$ak_7(8UrQ!HNn;0gK..Y;m!*3*kMjjc
%Dp%*\!E#tC/"20r.6jh6_rqG[*mHuB--4B%Pai!s9SRY0,^Y8-.FS*OeJLYr?92d5UK_9R#A))T@-N;dX&&`2IUA?h9r&IfIhaDb
%acqjL-j_+V"j15Od#hL[4"=J)^nj_2XFHE(&X"f.F671RN!#3#P82/HV,EQ.YGFNsQUT6O5F>tALqP!(Xl;pRB![a3&Ij?`!#VdO
%0rO118HSiTf&0PBF%b-o$[L3VgU@4XD3Yq+]kVFc\&bBVb+VHO-#CXO%nWGg$7V[2;JqdaY6uf<7Oc;:q7@o-?MkL\2E3_Y)FpjB
%s!nto@;851$-RjkL!dO;H?jj_kBu8pKtCQbe_AZhd6?>=RGP7+9m*d=(]6!CkuFFR%i[:gEkTL&WRP.SS+R8\EbW/Bf-Jg>)g8_8
%a0a:Zequ-()BM=[pa!ECSn`T1?>9'R=k,4!?%8c9Ga,"M>u2!R/\\LL>O/tr?/jBEX6j1.dd#7$k-3Ro?3tTXpI_OA)CE5IbrW=s
%D*e%K8pshu5UR<>HF4\%>1UroeAULh[:K.ibrn_*7=0B-&]rU-HCeY/Db@J0cR=41cct7W`5OSio2-,:pQCH2c-(KrXS4)S>igd=
%Wb'@]LRL*;H^6.rH#T$U3YP;1jl1UPRnp$MMo>FkC,JX[^VMaCoVS"*ld>O:W`/5n*TjZg1&K6DnZ:t!UO;sR3qm"[XOhtlhaD9u
%*TYuTf5LUh9:]a>hdrBkc0/Bt;I%DnX>,SQHBk(mj&L6eo<)/!V8p7G'ffZ=V-&1=K&t^S#npm_:ib[bgrMZ$h#3Y'eO@qWIa4Y/
%Mqf*DkT?!)q>Qn@pSCNi)P'KF##-)@kl]=q"\Bt;.SIuM0kFXSgh4(e/D=.9/_UD*(i*=%[R&u9i[ER7.%@,L8j/1eA?Jgl'&%A,
%7S-^i;Z5_11@$6"Q(MiLG1)l`MZ286\lYBuB_i@*QCSmORYm*[j9$^7f'![%mW,>9FXl]t2>K%B:'(S]FA;q9$*KJ*ij-LfnBYQh
%TdT$GZ'PI(".$l;]8#V9cKsL5?63>p7WV*nlI_B$^<s4jq=W@oLY9>__-a7#DB0\1PaRcoCl^A$4XK&"rX]t7+MmN1F1kDj$h;C0
%_38Z4MlJI0QO=:ci-8X7r\M-*ddm.KoOZ]e1nPd$*&M'?jYIu,WB[:MikUDF-kHdtNfp;5+t4?@`X?Dp2Dg4!,4NSB+")W1K.r;q
%#:000-o"8]d't:<Boucom'GFLq;iC)/pc@>Q![@Z@G`??4rXs08.gJ)5`nL_=2\<rbI8LOm3*[I=k]Zd=\Yf+79kc/&8UU]?8+r*
%1U"!_7K@Q5=XrfhaFm$0=*8b':S>DMkuOqC;1*B246RkK\ld'E^*=XeW2[q#0t.g_hEb8M./5ol3d<?.F8-WkXAI.LllS@4<Q+9<
%P5e+G@%SoZW`P9h5H4=Io<lYS^<R\-RGI.CC+GW7,@!f>[8'<$R@\.<Q%mV`iOQlQT=EqrO.6&_FSt,Ygr$l6kIBMZQ%*W9W&7ud
%kCp_.R$K\MokI];<0"-h\Z:"e(THGCWc"H1Rh^'5Y`.Bq'78!CIM+\hYak',3O^AmJ<=*nQ=<#_gR<b#C(%/`O9QM%O_/6U"o7g>
%Z/DWid#&%mGq`P:;qc8L<Cg6D8/>&GR'a(G-*rCs]&jDV/Cu[7bINm\]kV,eEp<I;qQt"RQ7RqQ:i^>^CZF"),eCR^SGJamTAr[Y
%/p9!AMPa8'`^2!g^+/Lkg_CWl1R\LCp"*DGC0gJCm+86D9hdO:..r7@#t_VXrGHg:Kpo#.8#MXhKEt=?];;NS82\"WcA"cjrqj"o
%0.DJ2N[lDA(rSDP=QIR*A()\Mn7#MMO9epkJZp[Bmp-6S,$ed.l?-?;L\;)Gb@P.UG9pmrT:CK35ilJ-7":M4U?:cTF1oj=Jf>sJ
%h3)tQ,VI-s%T5E?d<h0mF=\oZ*`SG=[Rd"9UZD3/'(;t,C:#rpQ>/Jd=-\N`lVKa5YYgKos)@sWM4sR,`220UXYiCu-5ImScmB11
%_F?OW--AeXjeG]t=C"ZS[s:(0n^fYgo>/^kU[!$ug_n9T)9_[@rKCA&Y$'6eX:92][*$iH:S0*oK!#@#1O"sXs-(CLERZ>%6/eb'
%NSigL3qeNGDRujY$#A4VVLlHVjCIpQr6e6SOf`XKlY7ZoRqC-O2eA8`QIgEZ$Hka@Xe`\@S;ckp\gqQjA5\>TkZ)u30pp]ag?<>^
%X'eV%((3oW>TF<UCL\+k:BXQ3]"DDUb6ita_2cmS&`,_T8dBNmGE"ii,%^kf?Ns*/.?5ZM-B]9C+;&&i"r-ecf0Ua.%Abi0$Un0/
%9':6ui?,`ZYW1M@W5XAmQ`@T%q=UcE3ca%';/UBU61d6!"k&5<;Oa4>eEB'WqH&.,7DbVH>&tLX8FcMNQV!7@V`hVoK4!f1@T'qC
%Q=S#"NFG8g=g?\>PSS6X>-fOCp\U2Z_f3YiNRp"lo`e=7Ads:7=N.^FaJ"j`Zlm;_!K]krRH"9=S!qOV#V5;t4C>0]/nUm/AL5]b
%'YG,mW3pS"S(O[\bR?mR2e<DMFHpp>d0B9;BW]gZ[QCNCgZouaEmLD%\&<u'CR_HNnE&AOQ83e:>KUc42D1qs3)!8+e`:/Heu>^T
%+jiU+S'OTWFds(0of22n5A53.;cVpVo[P=1%m?ua1-!HZPBbr^TAMV$<``K5K^tplc&s48.Ndtm)qShR\C)I.Q'j0dB?bM")6dmo
%'he"p)+3<s2;JP;,_Gs9^ma,Uk"hsCD3](i)HTQ9Bc<6[d*6Bng8muRYEmgpc#XYrWC/\S)6##Q^Eo]0ZQ#P$TsoqCBW24ho5M-#
%qe/Wi""3!b>f=+LSA(t[Bcrh`BC8m6eSdM!?ZAJj_S5R1j>0]h?T."sU+)V&\Q8\!EJN.<\SRLLECE[e$LB^qSS]#\B`:_ul(I0!
%W7?:HjZe&m7q?,b>@V!#G?te;Li[Cmo-&iT,d=W*-aRkbMJ1miC00cO+^ut2_$dWhhB#;RB=1je;E_XX=#4>uYEANZq[JSY#ff4!
%C=g4&Mt\5A!;QDW+5[%oj.#e8hn9!gb:h[Sj$*gaquK#"plf("qY^0^a7jNYS\P"!S,`0AS(Ab@kc'&A_>khq]*qUtk$?jHYL:7d
%4CXb:DM7QY8/t>W4O*?o5l^gjnE9fOi4n^.mMEKbqX<8.>CunCn);lmc$'XCc0i:If^`<":`fUNh&_1)e(]>*&+%#cT>U)(iFgA^
%A)Gls&,kePMn:jMn'n9.<`hIqY<$ee9(GJV.^79omXFnfHM@/pj+DfiVpXE^`YmL#BC$V$"R=eD^<:a[:-CU#ouQ:E#7Bg+Mj1cM
%DNF#l8cGk7Jb+g'RaPK5@7*$$gr$b99M`N54:'i+J[>u-oi_LtoCbL\2cG4XG69_>Mi[o4r+_nOoQ\A]4L_!R%OfKT$j:"DTA*HE
%p9cE02_b'N3<T+s(OI>41*lc#qK&#"WGP^f^&rdB_;pQ(g8IG"<*-<d/Ui^6hod^K03IZjWLKSiS;:&iK%$OVSmg!5bt?Zc7*ZP_
%1h"Cd*ff_NS\RmZGQ">L#%)lpE!^S=eP;ET2^O!p#_[5iXHUgRH$<i]eo0I0,GNSA1Z;`!W;,T3VeaAcL#ZD$Jn(4l?(.uh(^k:H
%2O4NT^Y@TY"<,=X6hk1EKfYgci,E66@pG^?o<?6,ZpM+<[WA+!eG*W@*]Qq6S43q:?G9XX^%\^^h;IJOJ:pkmjSCkJArK3;nMMb4
%!)'q72Xbm3L>sosc+N*'iS.bDeVVU^dkJUmBiiAsW37ufZk:o>Nc"5((jDPm:$_>FZ5tu_^gCr#YLsQ@`I'VhF)&0Ph2bXH/]'bR
%`%K$V@BB*KP-%*@!r>SE:VC0`BfMZ9Q$.Rt6htl[+;[M$Eh]>;08g8L>oLaF'F/kB)r9!eqV`hW+]sR9#)e<)`S?DU?_Lc"6Or'q
%qD\2$>QNWP"ckS25Z,UtMb]G-\?-'<(j0Of%BAZ.gHFc1P]d`*1A/?8KP_5<N1FPkN.'.[4%4NE<ps`LE((_3'NJ:h-0<rLSh25B
%g8\A(ArB2t&^r=h:SYHnV*d*J*[6-NfPbs$cI26CG;5]c8_V%cB';<n]M8rV+LM*3)H][WEL^UA6\XK]6;=@oc[mjqI2IjUUZn-=
%pG&2oX">lIkCM5=E=#jEciUDb$]e[/YAmeQCH/(`%e)Q(Q8Pl"ihL/e;qZh@11@4b/IlQGIQrKm\$<h3Fag7`h6S<3dh_-jNp"#^
%r0"?^n%ppdNnEJGfEV*36/VKjg.g5^iP!Jqgr/ne'Q?0(K@kX-4P<iR2Aqb@]I?!@>4g+V[LUK9%a\d6%2QH0j'C;$1Rb(U!S]YR
%Q5atg3-]&pl:JU<XdG+P,>qCT0lla"nm(Fl\?e<4DIa3k;\Ig#c&p-:3sS-=$Ip,fXDYlHfrm<fe+`c-34#(6r;KntE]coh1@Z`I
%O$A*i]s_FNq0r0Ck]V=JrnY>D+5pu,+PU'2dXcd+9O.[H"7h_s%QVW3=8,?1I1mT]ZNkuf!2K7]Ns,09>QJ%Rn#:ZK6L,(1a!aHS
%"sF`3N`@ZqSmB%@o!;X0NiH)t(c*$LHYBtW%>"To"H4\qXW]D[IRbI[_h+gS+2EB$Zju"(mNV.V>`\r\^(P*%[l_odj:$XS+6@?R
%!`k!32TF-*0j9p!/re9\#`&Qb-Q)VrZWH$dGP_^/MEaA6N-6"ciW\*<F2G^l7VN`'Es7q.g0ihAqtDKJUm,C0"kd()=c1oj7/Y:N
%*3!_hahqTRT]pUf'\j-Y6[8)TY`)L]+H09M[RSX4UHB#6[*Es@7II5j3GOJ1[nF2>Ii%8lkQr*D1PT:^mDr[7:=BgV:OSSjFn(J.
%"-3>lN+\R6=s<tpPUY\5aR9crC/Pq.Cu/5KM?-jMBd.SaOm.d,hDLO[J.[he!Ho@+E,HKk8P)D!Q'CsuX@)=9g;#gaX%:(@Ta(A*
%GoH>3Z0*1DZ5-d).0%cGnP,P`$GbJri$$dej"P5/H>Ti-U:qo(kK2f/PEPN2pVKUJMYD1X"u'.Vn"^VN)X(cq655B9.fjgWG+e0-
%!bIbGC5T>S!5tV\r@uQM6pS3cI#;?GJONsuSNA?2U2\9$"1ZgN0W#]KhI.m[n#WbCGHu3sRq-dc-t>P7R&En(\H@AM__gSK/%WDd
%*fAs(.^hS5*-Hn3U/YUeF&ZOSXFXL\@-N"%!C`_48LfjG*!"qu*EEpuSE8F;%jI)n:ao[>,Tt<#8qpMNDKj7S6lD^<"9V68jE@:N
%/Mrc*SJa_0C_V+oI2=+Y5+cqP@uujJ/K>[]:2Ss6_#`BVI$4UAJ\tb_e[WhIYIH=&MK[]+l7$7\NV!s"1r49uUc7T,!:Vu6%-0,l
%*mA,^((6^B'9@QYPb0%+"t#nnOce+0"@k<o4I$WuM_&^0!-0CA*GJp8ORK:Ffa+D$@JJXAlSq9Y\tq2l3Z*9>dkq2bi6`PME98l$
%+!%op:1U&cWR#b7V/-ieRZc*M1b0_=@U8=cMF970#/obJ)Zu=TGZ6%2!?s61]E,HGmY80$Tg5Zi3C(rgd1/]Qo]u\>/L*WeD5:m%
%HE3/5)?U'hgRD+H`.=LUaGfCZSdHPY$#/5&fPr>aZ=jJ>?bd*nfTp+n<ZAjmN[Q-,M-(EnUUqA&DFqINA#35&gXL*^E;SlscnRQ+
%"^X)WG:rrIna?:KV]h3AC&l*+U^c6"73)Ku\U_mNG>Gm;9=BH\;5%X\kVbR8'qkdU0n)1DBKnd*16oN<i96CMH-F:2i%a'2:&rZH
%]$p4Jis6&$dQ>$!_$V<(_Na+Y)otV%HlD?J',#j]^jJ/?nIXfmSRuBdn=\D#1e61OG0J7ERHKMkRCk(l#EV_%46]a0GI$@%]r+/6
%9m!7,n5_MJ"i"XpTa6UUT[lkt6;/_TJKdTZU[[IL&EGr7$tCEMQc]A(Zl+B_LXe>T6?*4uSV<gBE'O$DDF,"r:A?M=dR8p\3"V/V
%Z2e;SP[@>BL#=EEe-nL>^(?f3Dl*A0D0Hn2j5^g`!q%$BDgTF\UGNMeVS=n"='Ghp=^+=G=;sO*=..r*NB7nu@]-7_0`</&e,sAR
%9KS,6e0W!N5&_+$HTbTl*SP7HLD=NG[=LrNKI&!SbA\'=H39UD#'?TK#0'4h@Gk4H3d]F"5RU!]&=;b*4R!'2C^k#r&rJ#[bIsHe
%bRph=go:#KSZXcs7sPu`rQ+W+",6(NjS0Ptm"<N'!>8dUK!*-$-BcZi8N%4',77Bs\<Of]C9K!r7cg[Y"ahnJ!9Z:EOh$%q4*3&H
%3X_G-"M'G2nl)8gO2)nh3<U$r?kP(!)TYk5?KdcYI>EMnnYFqsWP+]:j>cl$:2Z^`!ALe^K0YO'PHseVl\bc+;:c3NSk-mJh:q7M
%.Bj>+9T@DW7"@VOO[RlW+h#oLf;o+WbUf"[&H3CeTsarbDSr`?Q1W%U8W5jfGF]3mPsNK'Si_?9:+UZ+^jVQ7_TmPta@jj[1,<VH
%5WC>1"X`=T&_nnm/?(.?DmYiG-6di8XpU#gg]0rkg9(_98GEEO<2V.B6/o9`AEcM#Qj:PhI2P\6E14QXQ`6QBJeVUMqE1Fb*1es,
%:QtJ[-^'&ZV"9WWB`nRA5PA7KeE8oKJhTR)kNSKm!_kd)![f@ANBL&G!!hF`A7hU.L]Hj57BEKK3LE9J$b6:H<reUR2V*^&A7>P?
%\e+Tk1a1/q!.f[]/#jfmo#%]BZ]ZltZ"R/b=Fm8\G92%4&_JD#^QO:4T9*\6m4Lko@P[%2Nl#Ffgm/@*NQ.0H:(KNLHG:*^d!'pP
%+:sqp7H64"=b+INK`M^.DRUa[c:X$CD4i/n_Ee=\YrM9EJD*8YFh39q,@^n%DQJ]r<@EkY+`RV*L`5MQ];m';6=mI7_&i#7V7L3;
%7j-1+>R];Qh>jc0E:Tr$."[G=&\l0R3]K;].g&1Ie&rHX'0?3rl_J<(IAdu]jrkU\l3NI70M5c&N!Bqt:b(\dVhu'ar\;ka^te8'
%OU"lG+k&!20PSD>[W)tCIY<]oc42]_5^drVV-3%'cR^FKEp\"u12D`Nc;\!b>Ajq!B0269I,Xop<\-M\Wna$7cFUS)lOXrZ1)f;T
%.RhckOoS.[:(:>CGm)lqShb4g"SU!>cjU8#15*V6Ql@H@i@;Bh+Y]Zhf1*q%)j#gN8bZ(NmbF9M"bdgX\^G<B)j,guU6d>[87n+k
%&3T3!`@8rUVh1G:b%/_^N@CQA7X6CEeQD`JD#nCrFD]Cg<iUn:2$4/[0(epr?*D+[eOB3'=QS6d'iG.`YlX*ral.rm8s#Bj,jU\Q
%Eb.MlL8$6GGo?)OZYKfe^ciT_GGURQ(6KPQBf\8m+Yk_icdg8eLVkaP@pH+l?]oe$GGFFOk/?gRcC)tq+?I1:fc[,@]d^;mZmmN<
%mRL8>$lpjW(sJe(^]`bFZuqS4*Sf\ROeYqhli]uD=qI&q-]=lG")$Y+ag$@8R,HiKa9Nqr6l-9f%m;R5/FF@Q7eq=?l##a/\dYi'
%DMeSR01heaM<4m0Nepq5:,4[u%De<RNi%d90^0]0jr)f.a\KN5TMV_f.IZ1C_H_NHQWmX?[5+g_bjPt12q6SZO.HPh%2;gNoVA$3
%(%Z8<lp2-feN7[u\\AgB+r^na*8!=VgAsi#K)fQODjeWCJCkO#>nPAB)R*.0f^4Y>T\8NN_S#cHTC%+%s"o4f\afUsgW%n@O:@AG
%3rn_Y@uS(R'KLd/!#ltKK5mhTg+sYS^B?<P.Nop^(&ZS`%!3$Z-a7OM$J68Bs$DH2C:u87)2@!L@ntNp"D<H`kUM1G$'ceCJHPcp
%[s`lj-YD*N4R`?U;.0A?(ni;c/4f=i?VbnOK?#ciPe1@c8S3'VcC0&6,#\_3&dY-n^3@AV*In`GI*[#6SkOG=VN<nEkLf+66cq/B
%1`I$cSEU=1GC0SsGQ9Y3PNNhR0qHr;7e&0Oi!BLZq[Qo4FDfMP%01`4,j?jL2)A"#=GF?QB*WuJ:4r1sG^9EdIBT3\U4gR[i'A`L
%`$thOJBG[1BEKAlka[R@aUR=Y(-(n:9L,n\5oZ1*#\2@*0X2d7*]gtLR;`2X8Kd=Q!U6iN%cD_M"6ARb%D_\(R6eJ7ZckD#F:=[L
%-Q"b^9IPdaT:s>hP=^e=&]$GK?:e[V)+^cJ#,6I5!9_k@""Ro!;9WYg'nG<+a\+hDgus9q=b*5r-0T"E8;ZJjJ:i"p,3$f;L0VT;
%0bfUbK!IO(&'W%pca0uJ^k<BI5U>aP!9u2E!S1ipE1U\CP$j/4OR5saAZfH!U]CJi[KQQQF:>tcOOqb)RNTa)UQsM%#u#!$Pi1>7
%iPUQM4*B7p@HNg/SN@m3!END$bA'WNQE3`C6#NoM9@EcD@'-Nd$/I"sGb>j:"%gJI9b?cNU_Bf.l7S./O8s.Tp6C/"k>E[eJWl22
%JZ6'X;:!3%?u!_ifS.c0geZWh19;du)?WX4/"(Ll)#9g4;0ps-V5NhC=cnh:b8,.>68#PT]l8>RgGmV,oZ3_M3o`cd"ZY`X@W;jQ
%6'mOT+G,`!L10.P5X8#p+E*][mh6OoXFM\<RXSYf8)"%1N2U9%5lDNY+C4'11D0FM+GTbB*Y>SH+OQ]?gY)YIAP*L.&0YFZ`#m?3
%bO1T97,Yb\!HRD`#1/P@JLGc(UJ@lk;pm7\D$n3$hiJ-E_JJLZnX56<2!D==MF@Ll<9I<F8afY?.3oLV)/+f"HlA0n!&Di6e9`(D
%!LT(A2Nd5<!.h%"19]d"5sSOA-%;eJelK,kMJSIb6<@A4^hBrE]Ceuen2)Y)Q@VV$O",M201,O93"=@'qSWgW2W2a/@($5i;>1HP
%L&<1O4!N.V]=sJPdEauo_K#l.5YN15;&Xo8`*Bu4/'h#X&`&s@]g"emksoN>gdbl`O^A;D;N/W:Ug=t,7O9_:TI9e1je*V:In0cq
%*4VBQ832pMO[<nm6Jp`okkX)fg"s_V)8:1[Re)FULr`le.WfA?*Yr+<)B38jk".Xhfnks2lE/mi30ml0#I`A_arAEI+$8MXNBKTN
%E0[3(7ln%TU.)T,UrimLNug,I+*_OqpnI9-f_1=gS,Hl;99OYBP)<!DF3<-#*-?2Jc%-hLg<=`?iVrgK^r)r^!`5GWLrLqinLoLp
%PL5pUa4]1coJV:.\LV]=/rK@=?6p``g68,/0j4Z)XOW=S&A"HXC(dd<$l_5Rk[QgadR;g-3\t9nkNgN%&g*"`k[4X/>oTMi\o7G@
%pL<'h2UEml[*tq/IY,CjiahHQ9#1&NO-L_Q=MccT_+c*Uql!._rk7*'@Mc+hC#IiF=T]MLc4#HL.l5^Oec7]In0A?ZJN=&.6QmQ`
%,jc@-'FIYS)#tI0%S-[qdh*GoU>>*V#^K<'Kst7gF5l#;N6R?tc4,0$Tls%_ORFT34okn.nmZS#3Ij%Vff,<>\%#XpOGV1r21F(`
%qkbL<PXUs1HVZmi/!@0eECAWUFK:I.D[hdYJ7'R"@(]RZ\7FA;!rF6e%__H%!%(0hLPlKV!MU:PXeITV"(CT(#K&N)")1T79DQsp
%)H33Z>DRjjS<D1!1+*'nVI"U2=:[+KZSWd5*k"-($;ATm7h!ftdJs^\JWg]3`,Z4eB4H34,_m>+NO_RoG/1=:NH/g65Z7800TPk:
%*glHlbk<(ZP<i?_ePchX*#C'H]qRVS=OH;Ae7OK]6q0EoGs+1$o2JLPph&+Oo@'8$$)e3&jm@^1ga"?/\B2SnO2.\b*0"uJYCt%V
%h8_&hdXX%3W,9d^&7Z8-(kKk0Uk,5"'mKhGMC)qRVu]7E@uc9b2S1T"@Nk#UU$""Y^cPILFITTo0F3`!NjjW\.L7);HfU4?^&\bQ
%dE_jrZgAXj70eYgW]LN<OfLi)C5A)TGL!N=;,!gq#%h@bB@VB<'6MSWG[roX+tm]C%+49\1A`,FPO5.iF+Zu]+ZCnh#WO.MXWCLK
%7VSF/6uj/)VM^t92Gj,f@1kLQQrHIkYriEXDQ0S5Z52a#D9p0uR5>9.dk..nV1Lt>f':Gu,X;uG.[AKfJ7[7P%+<m@WhR)j&:nsO
%P(U2+5O5"0nOO'.#GkZ>k0LhW<f@nC-0Y[fSM!5^HRtqGcr4ieP)`7s.bf39h0^(,acsh2eAVfX'V%VZZ/1nGR&g>@GM3%hlpEM(
%@q&Y$"3JreSeM$L@:Ppg[ckC=RP+eHjoq<)Z6".f\*&>5RSJ`#I/m.l"aQI,$I5&2a[pD3T`HCS%(VJBHmN/;\=OeK[P/2D@dLnQ
%e,dmPU6Cl*ABg8p?B,.ScH3Sc`;D"c]1FFi7>5db'>)1>*<>Z"PY@+3W^<W2?8l^a.M4etE+^MHWC&CC&Pg9`KU,]7S\6(D#t_P!
%;+BrMZs#p:D*RS_76OHWQ$auq%sgTh<^e8dU`o=R@Q4FKdO3O'C(E#]P9Tt$]cE7J7%l\%KJ=rR0?S@0h/ZDi!@</%+Lt-#nK]JU
%!;<;58F.\?%7p1%klG]5"K"J3+KC7IZ`G]k0P"sLm/$W8U_R5CGW*j2oGS,XVkg[qFS2UlL-"kPWU8!+@Rhf$CD&_1n>ORS,ubOl
%&#p&&;#>,MF]9#EMXJ"&SX6)8p7P\'.h_\%l+ZCVb$X&,m4(")XANRP*1o8MeoS`@7D6GoJC:!s>Bs"-(m6l7e2(04Q1VS[,?Yu$
%oPmL(8"Hp,cDr5-)>1d'A?*u%,q7k%]PE[Tq+B-RW(JAp7CWA_is/kGC"@J/&\"N0epLjY>m=n(D$[4U5SD1#iM!E$08hP5U%UR-
%.qF=>YtTV=8um$a.1erKJ\iE5!OW..DI-^!2DEej0O)#\''lQc_e@]5BZC=JY0)nD;9L4q09,]Fk2)]um1<\:-3i0Q9P:-5Bi+4R
%)GR7>]0qDAY7t_f*mki-f/$JhRlI4US\eop"$o;&)0N2-TE(L&oYMUN<B$:Ifu"80fKi+@<,r4O<<lo<3roh4<tr31-Ui!RkWR6I
%Yc:9:QjD[t$7XabfcD%-V)#*k`!BJ+=$ddB[[<Hl^s"[3id!ACN6k.-<2s-o\4F[`-s]S7C>mC_Wf&.^aM9O!Zsi;DJ<b)Q"tGV0
%YcGVm!36B',jKoqBiJ8Fo/`>'5)Fn:!)Y\RX&V/'"<2iR[0I@l?-,p@=b)d8-OV.X0V_.HcM#KI61P=[A\]j3gYA2;/=BEKJb9FN
%m3LOk-JaG4JI!5i59O:L">3&S7i6<]']btA\lh^5)81='Vma,Ig80K;gt\?2Aqr07RjPn/b)BeWF]:t"3>`=[4*K<YX2%tc-M4;V
%F<cr2./-1OmE"U^,liVMeZ</'X.>MI"NE"<*Hc9T0[?PS0bU$'r6jHpPs?@%i+]CQS1ANnL._RtJJ!>n7o$2%XVTaK#bLZh-(Gj4
%]1,MA5-)"L&(FO4)aMZ@DY[/Te3#]g,0"*qeqk^%YVn18iAQO+R0Q42Ag:i^gq)[h,fh]"XbCOK3SqB(\hGLeSZ7guagAe!T5at$
%S^_on\rOb2E1,8(#TWeVEMmaX!&gfgMNm+S_;l7_pr1/jJ4XEB(,$^1;T'ii40!91)^m0N9VR!Y,KG,\h&m.876*cd#%5<%@TueK
%g,s@'%Ss3/Q2hb!nu<qm"t!hY1TT`8!Rp=Qe-?#2WtW@F1,FN9drr&WRDfZrE!CmX!-5L*kD@a>1>fE%:92#I5ZP<0'5ISQGO?jk
%ciBZ^5j:2k`.g1IF2I0So&UZ>B[tr!lXi#pOt4ORqRU,?3i-P(>nW7OPM+>_c',+&5@/gZU[M3.5PM_l<KJU$dntig(*+FsEuQr8
%ULtc=!Y@=Eh-U%VT<Dn-DBoY&=4B'KK,6u:/^`^\cR7J?]jeutr]!o_QdK/;HWgqKlG.)MXcS%B[o4U!1^T<9)U-l:(ig#86Y5Pa
%!GtbP72/Cn#XGi+l]2,Z#_B]^D![uS$WM:":ccnD6""98$_C_J8;i66!lnVk7[1t'!it!1RHXcW27Z'hGh@P9_S.oniE<JC]XnOI
%V0,6uoQ^d=k7/==i2sj!A^2hQ^q..3h(+j,<131%1tDg4ad!/^U/*8P)21oCUnC;AD)=hjOKqe6L3e@aN.72GpY)H`eq7;2BBX*u
%P_;)nBO2HGIErJG-UD@L;*[\Z92R>`JEGQ."0-CDL`-adhi5sZFjl,0BN[-<VYF4oF]YQ45^$<KpNX7_k6gq1+6?PNWG&FOT&7*p
%OklT(??H]*dJ3?M(6F.9)(ZP7'BKg>n#D*["S_37(e(cb5g`JB,sE/o+GP0;lKT@>d0Eqi@R&Lj=U*.d`1'9kahT:3X?C@=!(c)+
%EaDMog.b#$CR?$pg*LA6R70:nMDuP3S50`+/gp?tgBDVuS$HNM;F*a]NOMI!'"_8gaHT+BU4hHeTk)-X^9:%5(78hBZEmUH0d6t4
%Z%42KS#BAZhDHkZ]TP6nW1M\rDIq>In":s"#aKl'\;L&]AZ"hj4SgrB,)B.2+GlJkE]D:i@AB^\SQ1CnfoRhSJIWWiB73ij[[\PN
%Pq4lta#auH<'pu-b!:kh.0@U4cU3\gp:s8e7?.0Jh5!cX7?*%Re]f]ao7eilo,+*G[D1e!26ed]%N)^"9PVN^[1E<co?s,H,nY[6
%3G(8cc`!U<WaGN\/d:'f'Q'+I(Cl5)A`W$4Bnb?"'k;803@4etVce'hj-A_ifFSBNfI9,%.R):\R_td#;$D0V2^s_5Xe`U'N6SYQ
%$qsq[%9,b&2.2Y@geRo@d@]eI+5'+;R:r9'%"kR6Lqf0TX.EC_!O5kN<"ETk`I!S=:dkEAgFK>()Y>4&(Fr<$:j$M.V\VsWQ?C&S
%dU%pHU2m89!8u/t[m>g->.]\A<*8k62eA8F-+oD6F)TCN8S:]0)68=i+CB&WK@qd5??H9@Cge)edR-]5:C(!c6C9V@Rh_DiB!qP<
%!q&\1V3QjK!ip[Lkd[DpYp*hh5m-1tZhGeSEa7.gYU(rcD2l3L`g1JpAP48-QKbAsd:UXtOX6!_Yst$ibHfpP>r0r-0B6BG3%qoE
%s%>lg"LDY_@!Eh*bb!pYKKkRULd1%<7fk$uB87a>&qeG@*#i]3rc@L.<P;GZg-)SSN4VmAjkP4/4ki-o'L,r7!Va5[*h8/T`bX%d
%\Hq0&'t25sRmKh8bD=K%,pG8Kb]PQ^__DI]j3q78h_o]B&AP&,8Y#eN5^gRA3599Q:k4Nd_Uu+6DbQI9-j[hPSQWg,6dO[bLE9mP
%4i!i<&aVN#Ga'ZX23hC.@LkH4h1C,E".$/.Kj8eSMk8%;!+I,]Mbg>s4D5"$4P<(tV_"7mReOuH9;\&R(pS-Vfog&9fH?N)Hj;&0
%o4m5d`Ps$';>CJ+LE");F8l5:Fq!=tomhW/HA<\(1PRFj)/MZ6(D!P*WjY8j_JL3)V0_fW>D.U7-mQ[Qk0^-E>]GFeB"'.4"'cT5
%!l2#@08tIm!"l1$0ErU@Ep1_EYU8u7imD4AbR32d'fm$89]1goZLLgQBiCm_5[do;'>d'd9%\kB:^Xu`V^bmRWq0@Ea^p!a/VRM8
%UI'j]1J@6EC17>i;"a_Q/LjIGeL,Q'0;t`B:*XJi"Tl%F+lmH_O[]q#0shh.>7;HY+_6<eQGTpO;N';2#]t_]8!Z.>W]8B`W!Hha
%g.iMAa)MD]3+:PC",$KSpj2a!N]\imJI2p%--*\=!Q1++bjf,@^>Ml6@BcOYDXiP(#]`AP&9LH3TB4`9+qTli$$qBlgh+N/)@n7(
%fdR0`Wr]Vc(g#MXo6eqR"(s]#U;+g8@9\C(;P:&](ds"7;K>?6`$@.*"'o:Hra9]<c8LS-Y1++=f,urnSq\*7Eu`j?@rE\lY)#ZH
%Ri?m8,>P#Emgb7Z((\q2=#[@fc?h?`hPqC+b`A\7B_$-T:BPOd$W&]?LGnD%+%;*eA6;am@Hf&h!,k-3pp_#]7"L^$<?@2o!n/UF
%&b2tKKUb!:mlN7?l+eh^IAX9N,m6b*(MR#%O9+M-6A62f&A]9G:a-r,g8lm)OPR,P@)Wn6UFN`adk$Hd#k"QTQu>/F?'$TA!V0H!
%[KrrZfua&<)4AHP5SIJJ[USfiJJBHuB]unVXGCoY+h<mbghL/_/Z*?gG\eU]@-.o;5)nOWCb,@[Ol+2%R<^eQP8c$b!A?c/k5<R=
%k<^*#[*13X-p(A,<QlE?6'j'FefY#fr2A,Yb^cQ7$9F2KT*;0lo_=#8LHursk\)[c#370#Wm?;#@"k!M.TKUh0e3+8,4U,$$KVpO
%DWDD6_65PMc:i4r9GLW,E&4`a;#q*p*Y^gBj,k(l&GR8%Q!Tkd4G@&i'Xs>YK4ah`rA"$HZlHeq'GfVFYoH5TYsm.lQF6NC.'[J(
%d,b(i6uk%0`D5!gUuGgUX#s?baBu<&`DlooA!@244(sJ?DhS/UWc)V9ma14;8_O=1M12+jDAKNCJO3l+6/$Ft32b4nI(g,i;?9@?
%6K')S<Va$n@VQEn#q2rAEq>KN<L)HbA4@I;9lZeV*6rO9m$e5c'oFM?rcq6P![Ot#TI"0o;rqO[d<t>);qjYO%60r]D/hnl`>K@n
%RKgN"MZID5bctiQN*s75V&%g0n!;\=r9ga?!]NjRcJtg<@+ot6bG5\q(75M2l"q:*0Aqk2<?/rN)had5?jN^JJR%53WR`D$4>u(B
%Qi]hR^;pIh)_YJY1lMQC?P[8[/-Nk$f4;NSj=WSgFN-[8cmL5G3oAsUG80=s`]'m,\j0MqHl[L.Xf9],)GqWdlS-A;ai>\oT?u]C
%]KW@Y\l-IP1t:'CCP$+7AZbN590<qsMMFO#b\h_^UDkPP6uc\U*T.86D&(*D9QO509-h*jaX<-0;<T0<2a&BCpbKuCg;#t=>@[Ed
%Kn%*-?]2m"+O5ri#3LZ=.\\Ka8D)KN4pgSC(3M6`2_08g#.[T@Yu-UQ+9]cB7M2(1SlEmTM^\5g4$Nc/YtH?D0g/--`H;/L*(D-(
%-CZZLXW&hG0#c#-jtR&[-l`8jJ!/.]W[B;R$@t.On(&oZjDp'-ajA^cf`;ljD+6D<._p4q,=M3sqpG/BZ15-8@K:`O7cC7jZFHj1
%GXiab6Oe!<T@o@</_mjnnA\rZ)0QVC9+FS1QH`HhT6-8JTG.E\hJf8UKjAk$7#E#u3Fg4E!IdR16>mj'PEuOeib!b.&=A&\53!tG
%q0\l#_$4FU:`&;9,g(pA4ZNYDe#K;8<?uR&ZiNO!9NMA+f6&6=4R+&/EOP7s*3HE%_coRn#J*T/+?#!;W0S^.TeI&umTgN(pcASi
%8R/KpnHO4D!!'RE=J[GZ\k*15URS=c&]]]YaioiH`$Igr95gI798^U-C=)snTQs09B8%[Okb_54QGo6uSmYBfXD<EB31iXD_`*@m
%6\nGKfVbs$,8=fo(>lNI?@@t"$4W#i5jlJ]BN*C!FVQQ82@[8WpmrWVIhZQ6Hj]YBYbm>,Us_?6]E,q.RZjbUq]!,Va/%'Q:C:&X
%0(;:DKVm5jrs[$oo\%&F,@c9)5J4rd^RXb+Wjm/NHFHlgHJsEM(CRLaRl)6f6<:(GDqdA5\dZ?4N4HeFILD")^p+k:]KX(DNu<SK
%?q$`gne!Q=f67Zrn[U&I]M%S[Z:oMA[^[[*Er8e-[g'SDXgU3J:qZZs[YXl^]c2AXc""h^WF^kiG[-TXO#QTPQS9+u]XMX46mXZO
%'!6'!"EpBJ0Z:d)cp4;&DVaC5S7_kB?!fHW.(sQNX5s8]c'\dd47V12_eG;_Dr'J81d;"?I!;h0?/1+MOE=)5MB\c#'legQ4$M@B
%f/uS+ZAidkipr;h8OT5qkE)RdUH&n<"^a\D7jDD7(+`5BBcJ[b%!@]#_34U"4O"b52*#BTcid&;i,I-(-340N9UXnPH@`UX/2$;b
%fdok8b*b9]`KP,V)B42C.ZS4uA^3sD'a'kWb&ZI]PUL@`(<fb!0$Fu<E>:kT7L=ZhRB8&$s579Xk3F/\FJ&`(j/]Ji7p-L<#Du<N
%`gj(fV'MJ00poi^hiuBr^I'g9(c:6-G/Z+<+IJdY.F_<cTIq3_$?sEtdN*?tal\lq(!QQm>m(Lqh7sFbYXFFAW@UD5+QF*n&YoHO
%fI:oTAt`;J4EQsd,XVNhj`;KA.SH%;;cUg1jk(aE*d7!QV>Q+6'^g/u<!L19_TdHDNK9X`>Xd[7\e9XEL)QM"+uWW$079I$6eiPf
%1$Z^T&`LhO27+npck(u6S0_)fG=m5=>;)3KJL1mT1dm.SjkX(k4\$1m8)\IJ%X4t?NIVO6p4]Xg.XA3G"'b[W7NeC\p]u&c7=j5%
%JaRR(8LrO9U)fEW(_O<3T#hBCX""XZ:^qkCU,(tMcoXqWm$L=$3RI7*]!u'ZLg+!B7*":11b@(W&qPL_))o]?>RJNq]:NC:,\.M6
%CKi^k"#qK0cb#0,O/;Up"$aYE7%P2>-B$oZZMljn#/3BRO,^ljOb"7$7r-4/8AW^h&J\bp+HL"XkG?sAkNA5HQjf0bp987IO3V5j
%b.?b\H/U4W6OAUmi^^eiYkL`7BF89"1PX6n?:6R#+OE%i6<.RY9/bJ"$Ap;lpq;%Rk^!eqrdL@SacW'Fr/?9A>F?tSZ3^@S7st5t
%TIds*M0E@+H6]M6<>UM*FK.Q:(t^n_k-m<Mh%tS'V+ff"9E.F_Ct@\?%Aln#r[Y&I+KlCY[Q]R#DUiH#.F2t;,B?;YjHpWtoCNdY
%gJIQ4L.Hd/C$#KHVND7Jqq*p<dS547)oc[q?ra&>6pn[(Wkl@5oPJ+D4.:Nr\H8_iS(tV-)A,L7U1)H?QjJA,;UL8fAp,c@%B[u"
%RPM+hZ9>$G7\H+)8Xc=aYp/p=;+7_ZhE(b#S:l\A:Kn+cMLP/lEL^:r$"Z'7YmUspa99>6,#Xg@LDnipL+_o[+Aj%FmGYh`o;T(.
%4oVa1=bF/d?WrFS<"\-J25</UC<u4/!tu[Uo_`:6nbH@j(j)K^PcQ[d-_D[.F\c'WfUe+#4.*3R_\\uf3$\\/eV1ZsPGN\\WE&0!
%A&JM0UrM.jW:a\g;k.9W?ua%pnsb(62&4::e%7kq.8O89"<%%s\.3"K9V[.-j"(,`%b;2bgchfqK$V#pH.kf0eVSqKrL-]@a6O?(
%'m4sZN5;<nU9-Hea>g%^ZEu39H6%*e7]r$&WTK*a`Nr3`4K$YD('867CDM.N:DSWGd&UDrXc?NqQ&AO(=8ir91<eI9idit13^$Gj
%Ht9B6SI]%F-2p:9?]g3ZK9oe^W_?m;b%SQm;3JIg:lpP`.pKp^.J#4&T.gBOd[4Y%\.1`-d]((IhqVCX7Y"&_M+c*#<i+D3$'8Mc
%ZrDL&e6mQQqh$\Y0h`u<ld9)tMP\7'n`_f2109u?cI+T@_(Q!aHd/E_j`bY4[Q@9F8elE*nK4?=N>tN)<Z1YZWcKY0p![:!Ui(@k
%>bQ2K)?lH@@"@%5&5R.@o>(,&Bhn_0N-Qh5#+3Q'l"7kN?D92h>)>+:Z:$<!%dkZ-'STjhdC]`V%iF5l*nYFb"m3588ZsU,-CE8B
%Kmrm@U-"WHWV3ab!<X0`?lug`X1iRRc9\2"cP'Y4@fo9H9Ti!2-_sGQ@Q`8ZeM04,V3>"og6G)[Dim`/R&=+6joN&UE@1jM`5iK_
%!)nlF/-ho.D[%mDO;uhLQPCRo=_qf^G=l:haU"!k7`!l5FB`.=/RZDBAQ\"XlF/O.1f8qt!&JOR4A?hX5#@A+Z#rA"Ni#oDQQ-GI
%62]s>%ErX+ZUAI2O8W$4j9:93l^L"2"X_<"D?n_Q=67gSa>;gYe-mWgilXqK:S^;4_3f>ed_,3DA-]0DQO*mKBTiIF/p3.#X+qEd
%72do)bX,#IbnaBQ4<U67TR/E@&HEc)Q]"Aoq.&s7N)#Gs!!=#5K!q8\(.e!lOeX(/6*^o$2LQMV!V6./hAkH60`'2l,a->q"O,+V
%S6;<O$u[(EEnMq0m$:PD,HtX%%DslEU-?#mGiOGREs3<:8[-@Z^`s]/:;]C&4OV+D5g6%uW/t9^EB&R^'Pj_#B7%"FQ^2_N\..qL
%fOQ^T^+D]i>`i%YD(ZS<mG!?U7KL-LU0B\QHC>1b?0dg$O.aRB:dWoEU%&?V8CCtFYrX33Q\jUUB4O9#9"e4i6mb^t:k]GPcW2*3
%A%K)j:)YkbOKc^K=/oa*@$hF^#ElEn<C!X_n+G/'`DaDf;C"Vt_lj(Y_Pt?3%#H@jGK;B(b9D.`/?KnNW<u$C9-9P3_u.CX_Ftm5
%<Rq4XL=/XY#Mog+N&nU.&(9qj(RuPr7SRb(fgOr_1IL^O@]hYeL8+$6GliAF1pOUDe7Zps#5&'-"i5F$'2QEd_7jX30a`u5s47>L
%)Y&CW?2DB$d\ssM:-`lbTp#QegHXMYB[Qih)J+ieUKdarq@K_3p,>lZ:@M9GQ<Zn.#/5\LAVR7kH[IMO*$fm=X,N(NgOf'iWC?(K
%`K%:(%S<T)2&DuLEf3c5))['_M4kN7qWJpR#/=E`)pSSAk+;nYLfG?F;0*prb[b:7Ct+^pXN_/9Qk'QH362PJCQUF#)GJ#/89=^_
%5i@Lt\a49!PqAl=j5&N*9\?lT6-&srI%n^SdE(9PK;g[ae'TDS8t.ngNp[k?T1ccjFWcqtVK1@7r`m,A>GSF.R<02cFTYhCe7biF
%(=l-J3dQ,Dq06&!`'c)Gbe%#*Y"CjhZUO=#r[FG.FR`M#gppbV\L3k=!,r'Ec&N#:SrT^A[,edo"]9;K3C)RD_5^MUEAmGKcHa<e
%'KQ(^cD6WBM9r#+V-A76'5hQ4U)82I]_J"XWH\)nhXoksr.G]+A>pi=CbQh[1Zq-INLk5*`P,S+RfqOCEXfI42ki%Db+NFQ&:2.o
%FZ+pj;6-qC@OXNF9"Wc0)g6O@eWBC>qjE4:KcNtG/X4]B[jEalk@7E-%ObQ(*(Ue(Vqpk'l`W)<&hYLM@E18E0=ZX[pRr3rCj<Kr
%<bouNR*CU);oM-,OlhY/5Y<+jV41T(5b0SKW5_)e3WlA,k^AuM4lg6'oeEVU$?NjB,Ip]9iU4k/-RVfKcuLcYH>-:1(2&2tC]if4
%(lUKdUM2R>hH4f^DE_CNj[0R"X"\J(VE^E8bS=$p0UlZ4&0T+g_nLI%gbEM9AF6eE3$N.AF[sr)et*pcH`m]jD_)t82*\c=QRYDs
%?AmQbU%`*pfmBE`99Hb$R<t(,(R]W4\JLZ\Ier9Z(Q-&\D(HBZ:K_WP?P>iY7M]?<*gH\uo8em's$/cT&J3WDD/K$dRp,!:R_aA&
%[h9's%$lr%;l/u!bDX^BO@$-F],7\M^F"u;Kq_j#;sAN]1p_!gSTJ3k5<4j!kJkiUOX1h#iec;kbH=ROYdK:T7EfRK2b\99G:h%`
%\jO*4PMqMmjEt(miRal2\6'D>_BuhG4RMeD@q,\OfLGacTq9)[L/q_dSUZ>88_`V.n?Sn0Q']j[U@!.S)Ofrc;pecYN(ug>c6BWL
%_S/AJUl^D1_i%MqHL?'&kj_'hW-'tOej"4?)#>5XIj1K/]'0ecm3\:l$rg8tqt%#<VoDXUA7``*P=#GO_IP^!E&2C?\1^&!7`$Pe
%LSdB[jgBebe6IfH$]A(hir>g3Y=d_gfFa<ciKZN3]S4tAD'X+GV6<VK62e?90#*IRO*9K+US5rd<p'tO8(';B,Wt7-EG[S?g0H*R
%#Mde-3$Qk*mHq+,:=1e+H.RI(ZnHSRPtj7O0pJ8bPr9OO;r7mT.D0Cgb-iqQNob>Mn-_Sj\$#V6Qj^+Fk2o#=$r(0*Qn0YuQ]dZ+
%qpYN9RJ=>>@fg(<N9?ZC?F9VmG[7YL3EnV.-tQ#1K*-K'o2h@OZ/o8#hM*(X179Vh:,ame;.7b*(1%(>`lG^K:PKF(OMP+A514O&
%lc'mb+E*3qGWN)?+JFSR<L>.t,b@_Aiq%?D_qc%\'agpb[P(p$I-%,G%&1\W$&5]$8h`s62/+#YDP;>?U0s?(EVCdq9JB08OgN<7
%DaLn>\jIjPMnT(`@A992_,3\\0QYD_(/-\]B`58#_I:;79I,[&='(3p@AG:kArbtL'Fn&QIHJ^:>.ENFat"bX-N"djN?jP$@9S9?
%,o0069_aaWboV_Y@:gr.VPVUgg*`>e$0KV^9RNJ%+*V]/gPCPB&^_Gt_Q9lVm,b<^K12.j"Ql]Tp<VLG]DKMHAfi0onO.FNekkX;
%qhV6@DTk"1pp2*ll:/=),-KKB=#44*>V5U+,H_f%[>:lk3*4k&cP7Wm/RiF2^s_"cY#//C`6fch`]74uUdZ7Tib/ooCO_@&R*Lu\
%+ci+`NVE4U%pKE)pfGgf?F4RdC`D]*]PTtG52%oun86Y1`%.E_r2j0tN24F"]4EP'f5<H#U,I>bf'Z4h*Y@u*;PE\U7'GE5m+V0@
%>GLFPflc!rP-LF[ZqDt1K(L!MAl,<ff.%1Pa7&1E#aV]&?LpLM"_fFS-C]=$?+Y'eVBjPU@;s?XWdF='4>O7ObM&)o>9sZ2MiFQS
%\R[W:dcSr!4E5+o6p\B*GTp-M[on_K+]?WDF+p'KT)1#Xbq.b:3SZ`6VEnJ!2VLfXM0Le"VLKcVW.cm"Rq$t#/l/CZ>ciL6^X:iG
%pBbSa/i/nGUdAW/C__Dc9T>$s_C,4qF^E\\E8$#^/L5]%bX<q.Y\P.?`:1E$"H3!`XM!8W%l3d"]0[r2(/MFbbpE5n<,16Y\nj>!
%kH]eN4<&aA<Rbg?+]=WQh!tX6[$(T_Co[`'8l<u&n'_!KWD':J_`CYDc3Ip8c>2F:iC?hlAm^PnbSl-c@>Gp\c:?pqi'>rD&/-g9
%V!SmDh*`\4cQ=Qhp3K4Y;PYL*.T4a$,r?S'>B8o[a^!D66:n2YfXb6!a0fVUYd.`mkt@/M'WFa/BQ\$BmGUTCC*e:B$:ATO[>Dm6
%75"2QANm.F&mAh9C2d8-[hMqdc:KPa5f.H&;tI7a6L1>C)O0k)6ttm#Bb1C*+,E)k"%fDJ_sp\N7DJesk(4"=D>"CBj^i$dT.3TH
%gg^%)MI^A#EU"=+S`'h%ij(AQaGFG?,Wk10>cfV:Vdr(`U3"/,cM$cq:FDsBaZVUZ$egWoTP'f;[_Z.k*Wg1H#Rg=-X+<2=/gs_3
%8tP1ren,]cb=eUSQ^3UdH?\VD/l-)q]e3%l03P@L9gHQ*R=#B<MS/_S%so*l%ImKMp(M)`$gMo_7GIEfCCBs@h?$ZIU(e]O]9Trp
%l[Gt-($!)]^W&>_%][(:^F27bI'Wfpp*+4EQF"rH9[bJ*X7)CSF1Kjd>DY]B>+%7$^=V!i]O<:;YLV<Y&ds2+-rRM:KAqoO\bX'@
%I_khNAm`upcZsOc4rSkkg;0J^8tf$FaOh"^kHPHFRD2^+V/5.!)f_&F7qFDJP,24aSaD<uC`_h7H)7aO^A/*j+iY#K\iu@>B+&2m
%hB^K0oT$F[0Dn/de#&jl[^t!F`EO^P5:#fT1q+YSEiq?lSoppW5.8bd\7N:BO1.ibk%o.rri^k;h0i`lFk09Z#T[[o]tsuY.slup
%>\?6`,pTdK:/O1,KR3'ed8#WYmq-U<6Shg]A-47qS<<G+XKs*">?"a'%/;>GnGb*0*WEYS*ea4hZqGA.+u_d8GLG:q;k*fc]/1V?
%ahj*Me/`Hof!EeGJ#b)J-@RT>Qp=@r`q)p(FgV'/6p>6NF,Y>f;9G-OnOAr>r->8h<T\[FH$;"a%j-oO:F9#Z<$MV4+.T8`k<fV+
%dsQf.IIAQYLE0g-ei[1K9rGkWB-NpBm)q84a1-&4?A3[/glq/..bd@W8t]cT?6!X1oD<:HPBdkgVD@\MZJVE9*4E:sEWHA/8+3Kr
%?$;=f0$"3`^Mk@XH]J%$Oak-bZ5,mnQWbtV;3ZNDmY,8:YCUD9WAt]QTKT+6!kTroj^AKJ6egd[@nNgK3_F8oJ`nM-\jsle^#,k*
%4\*msEkUF#8'.%FddfoMA=`6E=d?HQ[H>m,nU/tUdID3uHlO.K8&r[!)C?LHa$]a6UZ:BTXqD`s2nL]inSr>kn"Sk&kkKB4f;@6q
%_R)9mBQ?*"_A!d6623H!S^*c44.>BXa&REA4MU\OZl)ZKA(.UaY,Ks`6_li<\`F4KM4AJTl#`]UBZkYWFgG;&c+7,pE(Q&P[[^+S
%dVa%thG%n0jl>%ad%bItDk%Dbj!&DHjGf)1EFIX%iO,o2,o=,Q,*uP5P1U7me/*ToPpmFe]EQ6gR<\Nf-T3dQR+sOrBZfGsX"ePl
%Sq.S19nn`cc;ujH!&]sp(jBjBP+TU<'dI)1I&Jk2r-du$W)4>=V^D?*fT+a4Q6na&5Ji;8o+5$PlmS@s86&]Lr&402[p`S]lF11s
%*9+\hr&0\p-M#p8oXH*[:#rd?r&0\p-M#oUn#h?W3>"9F+t(m)E#<#L;<3G2Y-H64(MX:ECNX:>K"M1*jj.TtV@Yr56&XpP#X=Cd
%c^te5&P/-NA$.ZGic%$1aF2ck;KI^eRT`p52B1Z3nhN#DS'e7A1Whl2C9fY<4k6R<F2?t/C/ucsFh'LY'0;;I$V#<JcgcJ5/1&Ft
%eI2NNeG4^;2`4Kl(K1j0_HS*Z_Po+%$)cL4'92,6M./T\5F2^qE72lPB86.;f3/'@bF$ktkBu5YdOoiY@eR`HbJl8K9:5mdd7uV4
%Yi`,EpXWP"F<Kj^]\j:^?'#rW`4>@<?boq;&0Ni.9GJ3M%`SCG0!OuA<4[o$%+Ju=WNbm/:DZt=B)'&"QHs-Z0+B?m&&S^EDl/m0
%YBT'i_cVto8Y]tIo"iRX1hdVd3=7*mJk=]AQulBa7YaV3d8Cmk8NRsL2.>OR\/c;QMCiM&IH54H7N^Hdg_lDE_V<W4@7rO8Op5f+
%hal&<LI4QB5E[h%A.p]jXE7*rBYnV66,GQ+?@:&=e@YUrTj2apQVF!"CnT:E._,"H0g>#s#k(k(I;7.n:7EZBDHl<c%AI.&jLXrm
%KgMt;1;#kIcA+g5NK"\.@c%7J#R`(c96bIbY;IWPcJu>t5fg*f2LP#.p#\%D9t4=5`H0;bQ(b&;,q*X3?C+keCg^[!1ho)%:Ko/&
%C3G]BY^AprM@?*\ipAFg(eSSU41`sSQK@)P@sN;?FcbQe._9"0Ba`B-<i[_Zd;d9[4t^OaAe85XX>c7)EaWuL0Y1BD<cYN\25&K2
%Aop$e[-VTN>If%:rBe.;fIEpVl^+F%?nGt$JLGbV#!sGE,O?SbXP&2g+c>c&$T0_^=Dei3&Ogq<?VAVTaelLB]5WLYFMo+eH7T>S
%XP'_a&<26A=d?Pf:pn,k7+PMa=_HQJdCDMo*L[g:CWAIe^oJL0_S4`DU:D7-O^B@(CUB,pZ2jhAB&qQW=`)fOr#4t>)&ABB,<XXJ
%IZ9X,]9-Hq'fgR#f80>CXm4M?-j_C>9Y(RV_>,pV(f<Bj1dlel'+JEI,H""K@5p-%rfglcVPbI*C?)6.RR%_CLhXULc>o/g3>_<]
%bs<-.=]&GY1.,J+gnN>)*RU^q"atA51AE&G0/E<+HUR78n:aoZ,(m)$%DIPm.]%i4nqE+ED*O4;o["e?0=(?sM_DC2,)^B2/ABEd
%h26ShB/V1i56Mi03O(f8*JPn3DTZi(mW6EI=oX,7rKF;lnn@)hAi,kBkj")hV1#mP_3G4FB<phbA>pCEJ+QE)$3#"=6]N%a@[N0$
%a#)OO2$:UlNio>M791,G7#7NUZ=Do29/UfBCcJ;.g!\8-[%^)Hq02HNr7D*Y:'0X<UeCT\A&i.0M>\I]kdF/RN^4J]A/hB];`)IZ
%11ai>Am22Z+]+"UZDeuYUmNC-"K9:=7*Pi-6N`Wl`%8g7IGc7M[UQP;>Ce+Ph2]=Elp,34+@D*r/&8S.ZV?pul[^(D/5)*,SN/Ac
%3*QF%33^/M%`?dIJ(YtH<b%T"fRYAsP@N?%8!C&1TG>p-`FA48bjSBK(i*:*2$Y5eYg-_;4g5]c/Oa/jXN79!5^4j7'FR($/%4I?
%dL"?3L(-!"JVVm(fT'PJR]plijqtN=kfaQJLs^ALfJ[6:@e,BM^fJZVD+b4XL(iXHFHm#=*(VGJ%"j1f3W_\BRda%3f9m`%&F6M`
%^I'#iUVK"'m`FHX:J0[CGV\@^-dgN*`mTCCAZ=F8D%d#O0iX/",5Sce]V?E<i<P032.D;Sku,!b!IBW@9'htfBRuph_i@_Kl"DD?
%gOZP_d\sUR!!*1)97;X,6m6AZI@95UGJc0QC(hiO[:E9gj_n6oM]'=&6nQA7?t2*pAN;IIQB'Js\7MX_j'"oGZS\c5r0/G2(lgt\
%Hji('lN;=3R(=SR"i=Yir$'$@&9_m,S09u&p`kK4A)Q9':_[kYTRG(B.?9+PdmBYE2S5e1V&=aO:_7FFpWtqQCSV0eCLiAdcsjTS
%+R?;>R&4Z#;dXeSY1ZLQOW%WTaB5Ya',JpA.RqRsAL"=296IJjaB6JG*M"QCB.4$aZd'4h9*"L_%>0Dqk-G:Y#T&AD'#8mN.1Dap
%eB3B1V`%oSMTA[k?S7ZTQH@-:e*aA:.S80fc4cj(SEOpfY4^5j,<jY/$N97)5i/W-rfQPbA_C?VK\5F5$+Z%i>HfWV(f>fBWk_SK
%9+amYR>ulH:fh+p5r:`c.EOL:*q4@^>q(s]\-G`iZ41Q<.]ep:WsgQ(cOfc:#qW)bh]Pl=gFd-:+o=`?6dlaga%g8Bp$q?-T'E(0
%U&%mA<$/0Pl=3gqIB:eqO'Y3HnUp"0p!F48\!jI'RNgrrZQ7BqC="Q*",p?+*/=o#Xd1Y`Cu?sC_+1t:gPGrNW/YNoH,WGlH6M!<
%2b])8MFZ\jXdJ7dW_f5eUcr`4X)T^3E2#r,eMK*Zru+nC%:EGL<OOc8e&!"P%;T0obVfliW!Z"d/N98!TN2dA$\+f-V\C#XKaZmW
%7h@^9&?Mm_44p7<$iZ%r?,crj7mYT:\jS,.;J'NHRGeK@ql2Na;DB![C6X8CoM.1)#092"71C,:RpoL@@"?UkenfAT)RX:=4H^H"
%<1<Go!1fT/Yr+*H()hM@"Mpsd9TJHLR!oL?!WWZc7[+G4`nb-NTEJ:"+`&U-j"KR\<C\crm<LtR2m"^J^oM>=0[Q\,JDu-7PeVDP
%,Ro!CqT=+/o!MX]HFhpD`mF5tfubZ*Ckcge(%W]d-AjTLBCfXa1:Cq"TPnOffh=+CT.<orjU7:VBqUqpXn5fiJAKnO?l56q-rQ^8
%i7>+<B%^]L+J13>XKS]a/?dID)8Nd\r1k+Q;iQ&(SUqMThaCK;6;^"_nAm5T:FM%1U.a!9_1$>s.DKCVJZSI=PsfF!i!JT^AVN58
%fJ%NJ8ITM]ZcGKUj&"BC=ti5jcA5rsXX4'!(0.W4F/!l0P\.?-8:,5B=:U].<:baEBZk4I_@KYE9!^f`D?N!S1=i>>f#",5p%kT:
%&M+T-TRnW3#fiSf'NlPOVok[a56;Ru6Q.)W)AWLG[R*%H%9CHi;5L9Kcjs&[<T*N9M\f-NDaI^qcCT4N75n?r7JbK4aiP729d'KX
%8/4ssYgQ%N_4M?c%MAgBj4e:=-\rjm0U6f?o,gB/*/k\ioDJe_+]u]hL_no:-J,KI8OrLWi0[VXg]M*,/Es[?Jg-+@G''I%)$J?-
%oWRYW<F$doMWf-p>R'3,ph!+HS;sAVYIJ7<T@J,B?$h?]p>4>4YP+X;f),'Bq41%4p9qHCiO+AL>Ae*348,OYs6.N'pA>S"I.gJ\
%J+U3Fm?S$&AUiuhrn"(InQZ%mrU9d`\,PnrJ,4K#G9t,XhL4MsrpgHSIce0W?[V:0Dh%c4hbbB]7/350e%bqGs6!TNGjZB_Gjag7
%54Pne(&"gspl0\8^\-Huq>AkEmA[#_mH^DE`4K>7HKRoSmk4@dhu:I,H2R.2rpdaLm&fG?#<5&ZlFYhYHi9GkqURV(k33*U[9J%]
%le4,(B4tuQJA$,@'6=c212%I`a%.b*E@LsUTRm&!2']9K37;4!iJ5`N8@gu2VXjp?<C2s2EHEH'Jb&U\/L4L:8nCe<K&&a34AjZD
%p!c[!("L]?8;Ul4.?m,7fJ`^)O6eMr@][uN>5)s3nr:Ss&_f\YGP0o9o'[&]L2C>@1@,KIbRBp-TGf*f6qbqB[Y2A9`<pK*s7W]S
%6c8loX/UR5PL5VaST.?c(qhA8]%KDdR`N](@P`haN$(#Ope2R+=iG"\=Npk19jonlpk6S#Nq$h>e\'g=#HN-gV\W>)NDV].o8O7W
%Znb8FNC_t@TbQY<CA;&`OsibkkS&B=:\9K-1\GH>1"Vl:8VC!Z:TLJPX_a]<#Ze'GmERdo<UlJIXQn_"03C*MB9'5(M9h]#eJ(2g
%JCPA'R8p>cN6c;_2LT+J&@%WlAt"P#*\pH`2oElm&t^k!Y\;GDM_DetHnN-XEPkCFqU3QQ??,'^pfr*`j\%5>A9#:4=FBA=CT_#i
%o.@"Dh/`,0='`H9MC"TU5IHDV^APQH3kKN]o9LttD*>=&>'Xu*dMc+$/\Z=@a7ZoZYQbWp5O+8iO,Y2FZtI"^DUil+V+Q_9(LM@I
%MH(Uc]uP2trgcR9rf238r@Mr4mHO-cGM*p9Xgmr-Mp#le\p2?f)qS8?:)&3TO$E8<T76\oT6^@`J,QW:H/%X+3kiX<HfOZCAMk6/
%]_(Jn])1p=s7W4;If;uIs3CB<oj@a\45]hUk<JpMGJ4!LHi(Bm\U%t_#*rVmlulb-c9M3m5CN""+9(ocTDe-0TDnWMJ,f5+kPsn>
%YAnb:Wrb-YT;u%m6`8f1rr;0'n,;1Bs7Pt"?\6U^^Ne_J++O=&:]B2Ihf*=$?$3Q1eY:$L/\a^lb5C*dTD7PI5C*5E(LI(eq7M+6
%V-c@bPGc.j`(,;11Sig3r%^H;M_Dd7`pVZ\Idc<(q!Kra@<#%T=Q^LH4nIJ_I/N_6mI%^Yb"EN?!N]DXkJ#oM?hiK=Sq"j4:@#Rj
%ddo^"hA8BP#.JO<$9!j^QZfI9mX*4UTD7i8?77I&C:s&2mlp:@h`,B+ZL$".["I[&m!7fcJ+q3#Sq!`P32uJso?VgEdsm`*>^,QJ
%*6"XFQec*ks/h?3q*1Kul/epLk2hr$qp0ciaD6j8/$H`c/7*?kk@!_KTD@nNISkY,a6MhPrmn"(2SNI#%QFJYf68<aonU9Re:1Ie
%La)NI^OQ5Y:@IEKS=m_6/+rdhk7_5"iQnPLKG?,Pn:Rolm'ZT[]^#'G(G35Zb+%e:,G4hX:LC3R!K?"S5Q8Z0^Yu]:nd;J&m$>n+
%Dh%\3cdp#?\p3bChtcnK')'SSG#2Vtf(!s3k$NU=YJNrYrla>?+5:hZ10.iCpWkdsn?[WCbWEhRY%jiNq;7g37YSil#bZ:_(7:qJ
%WB3b(6/t#=jjSA(+Y^%.d`:87fAHAN05@XH/DQ<!KtI-N=2bZ_k+c;Z_=JIXdaEj4J+/_a^?/6#8HYpag3%a7n,;%VrN%>@^O5Qa
%s7X"tQM06k_#:M8[VbFWTD\Nr?`;?Ud,#;JrlI[cD7:[6,IuYZO8SkO5Q>ZJ^0#MFr:8qcTDc7F^]2nUj;Y;<9t^gUd$ih]8,heS
%huBm[>OLU>rp0GgrMrH.He2Li[cg.6Yq+qp1QOa!X_0,N#%\b"I-^;mcO@uI)4ppnG."^Qq!1-5\aTO%.\$7mG<bhQf,`a"?-PC_
%U.[,RY%qG3%3PQHG:4]rFVlA#cK@FgL9Z<4lRlh/$q8:DQ*EKMhXhE&.UQYt;R"<)]]mq;fZIcs=<Ymg7V;RB:O`'85CW$S?bBLd
%N+F)HV<n)Z<AJ"WkF_\G^H_[,i:d7q0<_s%8\'A]$3\Mb&&%sJ+8b]n^[dBQr6)hX"j0h<.msp`=iVV1h:qqi@t+qCX1lXf6IEO5
%aS>#O^>UY^D^6$bfY)1Q3d(=k6cV^Q0t2_dAU6V^[4A(r0pM#6"ZrcjjGN'N^'\t;>8jOPUroFNQX%,_Eh]+X:ek[=U&TdCImg-e
%$C2JcLbXOuLr'&(gBUS\pC,#`lg2>1?<NG$RXl:'JA*"'`f!3]@oS,.a%Y//\$talCcI1L$\(R!UW\\:mXf26m0iorE?f(]"-dFH
%+rTE*D,XK3j-$*-7b*<*?Ps!oEqF:+Lh,$oNN%[Ycg,6':%*K%^$p=>SSpV.ZGp*4psj#KJ+9?C(LDFXq.K]k_f$T7=QSTcJ*!(+
%IM$b^l*#O1X`aL[J*0&pZr429psGH.\9<)l-[o$#6ho2mcBi:q.=C[))KgN%X-^P"]iO)b[C%$:ra.WhXj0/\Wrli-bE[M+mt^UZ
%VTF9XXY1E@oCe6)lbI?7]mo1bs8Ji+Faj"aBAVmRo^5oK;uZUUS>\76jcFV6p"[/IV76@VCo(Keg#\J_FJ3\gpA)Oo3-JX<?0>9B
%I"YLGdkfr5rTaDmZi885S#GuO)el/Ce0aV+cguWS/%mmsb0nUoO4$Z#1]6;7.D9p4PqGb0df5iD9(/=fm.#pW[$c'r;r/3WnATfc
%1UQlb,P.-!e3p,\3Lfe?pmLpQ@ses(q$3TD4nT)aPRNW15GeRIGkB[NFe8CrO+h/^DYPksI#RM)T+4la.6k/O./CD-D#*8W;uTB*
%SZ*j\pufNWK)u$A9qsa&J+</"je$Vo:qs3#^K=3\(@-#5QbQWTdTBj`GbJ`?QbH9h]Kc&,:MSDTiW!,`;<O[3:;-8B5H*r9L%#&Z
%]B\ZBm[HH;[3,3i`_/PkdUS?6l;]""eD!1ZQ)C4%roZNGq=?h!lH:8q;b0@D,56<]khY]Qc[4OI8&*fgNu%Z#c\4/SrRDGqa5l0t
%0hi#1RP291<acjrO_$XLbEun*6c0bFE\p--.90>QYEK8`Z!s_a\IdDC]kenRZEG^`S-:BS5OpAbKl^\SOqiE`='+pA<R#$mIpn\0
%U]1^1*\/q.F(QJK4:>E$8=T`>37,:ud`[q/?$c>.9:%9A$ls/lb:UF*b)YRb?$4]gjiag>HteSGM_;_cknVCE]&q]]f>$VIlW7><
%p$u[\0=eHIdrFn(QX.>lWb\fQ@HC3Bh0efg.rW)ZqJ36;G55^9/?UD2jGJt5Y.sM(>?9R'Rj?g5l`+0,K!HCu/1E*\kRul^]]k_d
%V)q_]J^<UCUg%c2NlF_D?J*Goi!hX&.]m$`G$Po3eu`5!LU??D)k^l[6+Zj>b<)rdm;(9rD;<Xsh7[OOL9@q"DSPl,)pm9Kf[olG
%A_4T[VH`NQ>dn]OX9Z4::MeL7F\q_-='`/VfDji'YINloQ)efkm+N<B;,dugWm]A0&W3_pH2Ua0MajtnCN$^O>-i*V9NF#].#+n(
%EX[gt\(gtCE_I9THMLRm6f2Z<FN^\%>*a#0QD(@"pP-)"gq%OFEo@a?I+ae:@$=lcHE\MG7)uPa;ce9'6)-BRlKTlP>KisXag,]#
%>QmjA'[<kPc$4WJfs(qp)XY39Q5`9eJEHD;.rS_,DBh&ra1PWcDV=%%ctI5?l+UZl-)0Jn2r1og*H[<96:!",NTt9)`c\<o;,Ir2
%j=e&l4"4mBJEZ9&Dt9/%h^79,;b0#4Y$G1Om&P$:<fA&2WTnj4RU'GfYN3l`bPitV[)O"i^\mXqq.`7j.rGcu]A]mh[EFkn1QIB[
%>^81M3hRd%cr+[i=!s[f+"a7C*sh[#/DAA5'8Wg_%NskKTLhGl\2g^DrboDYc+4@;k.<Qhlrpm\Nn]*H!DD>D=FA6=/"nA-O4Bfe
%\i.@%CL-+<fGliQ\E_5PQGXAjAQM7H<O^euH;,c7euZUa2V;tFE+s81PG2-\Gh#D7/):[Wl*aT:>!NF\T8d*eoZH:I+:H"AKj/H/
%l,FZC>_BZ%Ji,ip97Yi'"'k)F)fNCJL?kNQ0W&SIN3KlDQ7]l:l>*Q9p6r%cl<%)Nf`;;D>e3=V:Fodlkp9=h*@n^70bh?<$0AMd
%\MB6*\@=gZ?8ub?aRh^aUXU>u2BtOb"&l+sJIsI`Qt4%]_l:hn4:a>C!o[]eMa?Ma3=/Q)9\<W&$]U#>YH:^AcDP)t@"i4m73,3_
%.@.f7Bp.s3O=5kdBc7jFAo"gr/F05h%J,;Z>4hn*^P;-,*U&fgo:f5XV($>7WHqNN*ZP\p?Wl9r]N-QG+L/`Qd#]e`#,nUA=Dr6Q
%4>`c@JnR1@2hY@Mb1b'R[Fa9d#m-j%;nBEsdFD.hp,sWjm_/A9h0ZSK])<QIZfR@\lYpn8m&PWr2dUoU13n,2EEe3"EDu>bM)51l
%O["*bHf"@s5Buf"F_9X)*?0:sAR59/j^.Q"]0H('J+^oZIcAZpHe0LWOhJ6=CDhPHoB*rYf=pX#J+fkfrgtimnc+"1Da468\%X2;
%O)m+&Gf>DCn[!.]o])SCgmA':CL?u(Q_+!%`ub38Zgh<?aBVuN%#r$uXoI^:TDs[rroSF=rQVNpnM>[jVf."VplIg3G-*mfrq]ju
%oIiH#`NOaNgY_kI5CMM\^NeX?IbeR2YNX^95j<Llpqra%lkc;ll;sVUib/;R6gss?dK;SCkUbV%%?LOu2nf)UTD6%E54QR(s6bAd
%n;i(nf>`TjS(NPJCqb8om-ER!h@G)50E0tk++2DhJ*V2sGk:33I)*3/H]D`fm5NqpN^*HZGJ*`1c^d+?Q[Yto=VkAM`n>DYOXltc
%=3;g!hdrnr9Jg.+qkib-L(>:C^2(ohgFH@iU5N(-[V"p\27K`%YgQrEm3P?$a-2uaa5[;5L/ZjGJtjFsVM7mCm(lZ6.\qpcq,M)A
%e+'NdED4E^dWm$@KCe[NOoX8(h(A;cME-M+?g;(WCIC.%,]faK#pbW8UBNT+g.e0kl^g1($aGq^4lnhks6b%=%EM#$D3dW^:d1I&
%N90Vuc1!$Ib=Dfg8!'`SUYR9O0k[@$AZ"1DB;Q\DWSS+j.nN_6]p=AuBUp>Z_\,Rt,'(RF*.'tU]<d3#*8_]k>k,c#!T6D1lRU(q
%k?0`@_Vk\Lpm@?K`O!+VU_lk?Pcn"Y^a^'kq9unHq8^W5bki)VF>S;[X$JpLfc[9D/M!]uYd#uMY\D+!bXdOT4(^L=_`3cGP<.]m
%*,J":!P(-kZSWn/ecG_GEKjk3*fsmIZ`5c7GqXk`8V'n_1M.LKB-Y<E"0"@a@-]?/NQ]pgjW&sD#HKThPX76qr7@0T$[AF=hBu`G
%Ajr5SL4q;j+`np/ikO=I!?I(mJqXaNh=PuGUU?Q9=qSAD<\a!GN.Gdlaf0&b-@^D;6J4t6?kM?j!>IVm!m]f?h>@9mM6,r@d-7Q9
%RETaW1\Re&166SeD$_<mI*"rkf"OBui^c#As#&*WB53/VQ-=[8Fj=T'SoFmF&#JOK0S@Hj#TD14B4fPY1,?1e`Y=N&4Z&ToB"$(m
%M9ObJd1D_V7&(5IF,rdjAEF8ZQGV^W<m"4c3ga1%8sr(N"HA=C$'XQdIA0O0M*A+r:J8gh#a;PiMl:JEW):$g]ea#_<4)bi"drP]
%bV:kqd$RI:)PSrFaFlrk[ZP82NS-[2$>@XC51@>V#*G;c1o>oObT\_EXCe?S7>hq>:P5%Z7>ig*\pVJ>m'C0sdQ4oo0GDP;laH1D
%0MFY8p24)Y_c9@P,O*lonWFWXgIF#[I,JN$o79Ce#IiR(XD6ta`QR]5:He92a1q<i:UAAsI-pG7^[YG#F]FmFGZiXHO*pM6:]9@.
%$SSj4Kbo4/T3m%S[f;A!(KV;Ws1kC*55i>%J,YdA^AQF*^AYO%gt,P,*"0OUT:`5ZH1UUQT76@Y#M'-;e&">MG.mC3(S:TgpBHN,
%.?)OO%#T6\j$R+bXK,*);Z+r>mT@>Heh"Al^.7K'L>pNCq+1MmMT`shqB("Rft(6/00K3TeCp`Ej3Eh5e0glkIt.3Z07<^=4%\Ij
%YM/usf3YDHo8#=(bU6>[M##"KN-tE"7TY4lq81`)fm_K2M\[mMQ[/KV%52a.Gj<0k2dCaXU?1,J,6H?bPK,KFQ7c@R@daR$U[]@9
%6tP-D7>n>grohfSDNnK&Nt:S_Yo$)_/55;pTDn'DF\S,"=Ncq@GW.E+:!In'[J],^5Egf/$qVg"5P#[]<aHc/"qg6r'oPH-2aa)%
%SS&itNa%M0#2#8(hspC9NPGBIZ&KPO?/&asrpo70Ibum*+rY@q$W$f[fl#)a)DMq:-Nhs,ejc'-'qS"D!(Q1W.N^mXQILpM=cX"!
%?N7Llrn3!_ls2Icp`9=dpJH,694#!6E*0d6]'a&N9)eFMT2Jen7A$S3GSX.2omsmQ%$aa6UH\77g-UdT^hlG/Z;_$[r#=<q7pqK*
%>Us]fns.AKHGZ%UhdEb%^>/G6W2UH[cTkLlVCg+[<>dBPBn9rcq2ZA-*3[8#:oOr=0O0>"XdGk6OqP)A6^,+G1rFeW'0@midt+^&
%#2L2d68%1G(_ho`<>rSW;n'FniX(Rh#!=d:k&+*ubWnq'#4H?rlIIZGfPiZS87>k4D^&'$*6]/>9A9Pgb]6C\I052CkuJspZ3O;O
%)&g=Q/Y\q59T*_30_7+,Ve.&kppCAqW_,GOF2`cL,*u(,p#4q#Ut'P5NMlNiTnfs"U:n<cpF\qBLG6<(5,o5O$94(\c_]G2?N1G-
%)39=Y7\H3UG`?Nra+m9<\&F]?/^&ZQWs6huAm61#LUIG!]*<ge9N-%c^'#D+a:sbVMB>B-0.AX)2Ts7s)LI7Jf;8Pqc=pPgEtmi!
%h,"a/3GM6sCa7Ri#L[B\!_n/I0d'i('k>9g95U5k%c9(fj7]>0&dCEu^T8,#8&1oOr>Xl?^=qM^dO-_OatJ8@$2jd4We:3d^GC$"
%!e+dM[@S`,8W'qurXH".a_-(paa,/DI4hm#1#L"k'B-%JRVNpQ>c%2_\uX.ZL#:F--\(Nu4`jB8n!b\2pW-S<a2oq.?$kM[Dg^Sp
%Io+2S\*\G0TncCu.FHig[VjpjMC-=CIFPeF<CN:>_l"VP8/"o;.(&\)6jasNFG.dV?AU.1#F(XhOE+DTis'Cpq!Xop/;r?1P;Q/g
%2,o+Z(NOlK1f[1="lE8fR.gqfUqt"b.q\+57J0"OQS$*m.,1&AR]!8sW4sJbQ%@2ICkGVU_4]@.MGa\\a(3l3)&Pq2AW:B'VG]+L
%Zsou&%hf#a1q,MIUT8r?4U+;b0sD:31tAZ:[cEg-`m52pBM;'=C4cVJQqkl29r0a-J7.KW%Y5,.N8-2=&M[6pQ>VnEc:Uc324Lb$
%NCJZjpqT7.?mDPk@+,RM2,XN-)B%QCUa]o$Vg'X2J!>*@"8c<s2,XPCla3?Gf^.2BSOImV==/a-/[SsSFqMs&C1E\Y_pZAQEcea9
%!&c[B9UI2]EC)A3'FM3Y`guLaU"Kdd"p9j`6K9t'EaKN;#kju\B@bX5#3;+A!=2qp!3muWp_(g+!%;R5ThSCWWfZiT2N66A[5+Bf
%ZW2R-#M-]!<t3^u8[JXd"#dUM0WAPd[R5U,NfZ.!kifra7:h.4!.%IQLV\)QiDN%>XMouEWm-_bW%g`iD9>5]8u?0"iG_&:j9VDp
%BG@C[L+XD&@+Dl^Wri#<Ul=\N7VN4:%77=UMe6,/B[UFF9oK-C3YR7.,HGnn*QOi,(1=)nINWLC^i\I;=]lDkUuq+s#R"Bh_b4+J
%_6D'3c$G-)!hMm#[\hLX#]2S^&'3?f0f6#6/1dN30tAqf$3f_1.MS/NJtnaJKG@0NJ-$5POh`4lFN$'d,D&/a6ijJETR``u:tGh,
%.Q]!;OHuV#$iqObau9CMNm'D@/i/ZENSkALTGAjuim&s;noWNAQC%^YBMp,cBJGFG,e>Vo'Vq*s)%6d<28%'?$7#W-o:kT?\.!_;
%d023BQ*OQt'LZX"j^gTP!D8A\(hGtjg;oo_X#$,"A`5?9#-JuW_<r/ElS<$Zi"<2:Cl:<^Nl*PfB[0Km`e=7P+<RmY2hS:bDK43'
%Brehf-JS\)"tIr/kQA_c!oCaR1P)#4e8L%^#$f=VjDu[67)o&GHK,'r7Stb:l5ZJ/@Db5bq*55O:LU`>6+gmKnKMY'fn1DX$H5+>
%/;MMF\l8u8.gtc-4.48.]csapGBd])BH-@hV37ugRqtF8*QX8=h-ue&$RFKS6cplOJ\>M)Xis`mpWiuYaRBn!l9]qRbcS#+.ThR#
%oB$V]5ga_,!(!udS'M0Lb11?2&l%nIM0bfspTY_bd"JCaPk4c6o)fk>Zs<^p_\fC;\fl0[=3X.tJ.doAk"o'Z7d<P['H,ZV,f1I"
%Q)C5'aI(6ChMu4G<LkgD2&&?be<b\3!_S1jkTN%Rb<QdfGnej>Wdl'jJa\^N?p9-<<uP&CR(+A@5TYJuDH$db5"J+?"4d^98Z,Dp
%&E&rIU9FB<_p]#!'^rbhGM#aLSutBXAIgp"07P[0*C+GrC3tIpR+<m_9iTdN2+BVplt'eH13YWVNX[*9':!(P[R#&Q0<0YNJ4qd%
%'-RS+*+Nq^g2K_'pbjj7E*ZFT*@^K2fb-J&BF^I`gME@*?r1[=qL]X.)P%=m$[ERT9LXZj"4HQI.YFV^54B-D$@n?SBYr8=Yr!iT
%R30K]U[8CB+3%.Md8:Q,ihi68d[Y?F5mWZdF?:c8&($DMf4M/@^frZ*f;N_bj%=8a6'\Z?HOB8nDI=*W9&)l..[pbr\H70jp*tjb
%MK#f^8p>nS@[j[ZCaB7B3XmTj8`SH0-.O.s9AMY%Cah\CFMg9DP=ogr$!&$^i?1>p3S14.=VfWj6&lp>iL3GB!n4`o("7eQ@`$C-
%s3#N5\b9IS6q!D;9SOm])[hU9c*eHdUPd"'H__'--+Wb=;\b:D<))=K%?=jQl]V%9\RB#V4t^;%K"0lbNFqGo2]IGoWb]gle/d0u
%K[ZZn;)+KLA79-UpED)IWC\YgUVa?%Ec7fU(oeJVWX^1k'*oC5,7R*sJ\E+9Rm@99N>O':?m8N"N]?!t:02qIS<C0J<jIt2AL;Jp
%Cf2`V`$\h>P=rJNRVD!=;o6"D_)"^?@0k9pr[i2bd`@:qKKpVYSVO4$<\_5bU@#5k6fLCs7B3edPtTHWXs=b5G#c,R8q7Ypl8;l@
%?q<DY6M"4N7`@"FY;P"cEXbA#!bK.M%FSu*=OI>;Pb6ii7&@TX0/0-#B<]p3$)qpK!?"nt./XWlaukA2o[LDYD4q;DiAh.@_--R,
%R0IT/Gt?NiG:?nb/eDD(A#X:mbtB:X<7,!t(A[1`$uhilPuoS=VBV0MPG8IR)X-,)0V,0[N[_Vd[R6S^kQ#4_ZTqYj(%-`&B49UQ
%0[,ZI#?c`a&/gB8P8pl3\1H-fmjKk\>GK,Ml'fau2)S!^.f(>GiPX4eHH:e'R/EVDj=-t/!aB5R;+BBu&HWN$C9n!tiWc5@#%gW,
%4p)0uRoKYgeOVkW$79sAgPHn)B%Xp@)K6*jmbKh#5tCEb<0dq2O!i"/"RIfUZS1n]9O66%2Utfb*6aAC=Ti6eYVJ;IE2XkC@'&\2
%9khQTfK]jXMk-.!+s9^CR4Kd-<$Ru:lJ4BU6TF6LS*Wq@:Q1lUgr%Lk)3:aVgf7Q:#L(LSF0]#3U+@=T_Bo6d&0:;VWm4/4ZnEWe
%$A'lu"C7Xm$StH&pN/)!>[=>a5W-^c:U,kP"&(!t&,)Zr9-i2aXJ)R=1lom8IC#*FR%c9GlJcQuPj+)2j\002d0ScHD(qspb%+hZ
%Ot%!`SeeU%[\lEo6:[V._=A_KBl%!7?+&mF5'?&(rb,2hY2oM_6CdY[6aM?W;+1u=#Y?V8m?Of[2eL0bR(<SX7BNPXkn$[fr!%fe
%1a&7W]X4UREF^7`K<!+cLrrs+E5DP)#Y.0*)!-6:J-hKM'EiofMucmmZS2^k"&Z[%N/h_C*9_@F*/<%CL*+Y5X/tlR$]M7J5i(E:
%=LMmn`/#"p>a&`*<n:endc9"$a2$,k"+]`+Y37?mY5,k\>f+p5F@W>l#;tBrdm_Od'MER8?/"Qj&O<?K1DbXSBt=R-jC$p,$p3BV
%Wp[)6)DZ`s,VY%n4)lAR%'h][.k>Ce!$L(@hl6ol(iX&XK2#`r1(T+LXnjBh_]UYL*k)'RhRIs8<m1S>YjdiPNHJ&+8.6Hrs#RFo
%9"]<U6iVSm+b&ZPUPBZn?KL35Y:*jo6lR1Z+ERaph]CE(-Wt5F`?Ujg>JuCch\ls`@UQ9=omp4aG&F?Z,3Iue$l<Ca']!4nI+TUH
%(YJPgKDIQnB:G:ma#OJ9@oK@ck(0k=U,mkAO75ss`Fe3P9t!0rMaaQJbEFV:R=uBE3$Ng]T[E6U-r8-DGl(7[e9*+j*)(%ka%_05
%4><'SHV[90h1.2a9?jcr_n#>',"d-h#p5>]*7Q->.JSp'K:"q!65W"S5^j'N9T!@`;fo";Q.a^*O'mE1KcT`RG\EDB"@G-7!*XnI
%"kV"u!nQjkNR4naEI*l$Q%9glf)TkcL8lIX7'(Qr5r]6-:5DC>9/`W#d0M]\-@;Rabf1MG+patRM^hQV8L;A-b01=C"9s"G#T/pt
%GSP.DW-XdIYlJQsY?+-3!aA(SmrF>CES3D@:3^O42_VVW<i?jg]nnmbJma)S2%XMGKqT*tZ@-!g<QBcQb2O]4n,^V#<Q[',Krc/;
%X-&@m%.G/ln5c%\K14^";G!E:W]Ig%bT`OWK)ogldEUBuM5.?'"$o'\.M'9k@)&]%*]HMk`pcs5-ZqI83XS!.VieRP93$*qS)%E$
%p/,CfhG"75KnN==R+O/*Emm#fTi;68W7.Tj(b4+N-5H@qW<)];gGXpF\b^JYIkEek3`cpCY3p2bX"aG^3qcA/':V0':7'o-J^K[!
%U=d<=jjTPL'df`)nSERm9V;K50SJ$/IjWQurUFtj24fk):8AN(j<5@DO*'<%4RGb@1g01E;KTV%%8Bff2enQf'HKrh8W46>&[lr7
%L.N'?`=M+HC=L*WN$TngQEN90c@IP#PC^#fnSC++5Nk*$X+m-g-)J76`MIGm[=/qjq=qL*<[&"/YNb?NTYoTHa*E<:D#KfTV*Z0E
%XI!C\9Rs8I6o)Zu<b"IIW'$f**t!CHJ#3>f'?hBi+4Dn.iAlm8%Gn2frtH(Qg^W%)1K\J0bkFa:R<P^J56q2XZ-'C.<j<U^:g)GW
%,L9Pc@S;Pt.^DbIn,#%*]ir'M$iW$:lV,`S[>bE9OY)%]"3;&3(PiQX#?0&[_mif3')uUoOdA4q'p1)bs$-0jD2>B!'43>IYKB1c
%/Z)QPW4\MeOQ&@&kVAJ!YJ9U.A:eA0/%13]p>7+"&>E;oL2tt9e#$0d-DlO8VeKU_kS'OVV9mVr2k)mm,sOZT@=dB#qZ0Df7s]JI
%)`mBWV`k"&^H.`SKD'oYq^5cn&hQABoL;7[/$c-XV3,4LEN4Ja^3CfdnCPbE::YMJW8r=%5b=6p7d?hKl(Q)Kd%p0))oO#j)0!Wl
%/Q.ARj_G&8\1J?X<SGkEoaJn4`@>B^#7aa+'l+5q?ZA]e?.Kt?8c9"hAkD3fpCuDRVqp]j,ftX'B%i`c"/-a;hCHaL^6cLhI2iko
%m*3/X+QTWuV-OW@;kAdd;VrF6f]uQZ_Hm=%0WtCtQH'STfpWgJ7uqnu-[@uB<oZ!!U?J_pCM%%F/L1NX$RDUpdRJLnn5f+7BbGlH
%)'rh2TsGZD3E2]=[[I^mC`.[_Rmb]UUM,7Ki_>U3kGAWS0eQ/f1)$*\pq)$XV8gc3qU:`u0^\bPF!c)\hEnCKVP?dH+V&fIJ%r"N
%PL*:ONQ"h[^`]T_N@FW>X"::Q71Q/Ol%X"Fb\`JDB0Lb'aOf*j^gnoVS-'.H4fs9<:Sk3V#"Z6O%`7@be)I)VBIfLoO;)8q3q.G$
%X&;L,e*c0._eqZsQF3%#STaH_0jG,F6n/VAZLhBfM,cAi(f(ieq-`CR=P?I`mq"a>J8?[`T:8NbKk_op&n&rUUN2`sK!,:foQce,
%:Sm&R[@sj^)K<ag0#aKK,lRYo9nj<QqFK%1_2`Io$b'97!l>d1=&=;)_7;mnaWmrP4'Z'XB8EO^%r.DTHFKVJjlmTt;Nn"[4IS#u
%Y>-/<*3NlFZ#aa*/P8T9)GB6c2+[jmFYQt?99]/koNirA`kmmmODq_@V%AfgXnY5h('$Qhl^Oj@``8rtJC-)l9Q`Uk7#,YR`FN:2
%CC\"?p]d<ucB'qDCT:cb7Bhdf-ne\gTus'R`iPe2=L39Ln]GHbm-Q:`<WEu\#%$$(+D$h:;5RjI$F^tqk9d$fM0Le"VLk`WA4H0n
%KQ1YGl"W#bZ>sAf=sQM.N2Q5qehEQ<-He;h17fj"dT$bo9p]OaR*@q-a`rDhKQT><b<HtS)OQ07hc6D4:Xj>m@/_2^"$LKBI6a>@
%@c=5Yd2!AZHmFq#U"&3kC&V\,(-,Y`HX,_M"uK8fA7gmD)kC:)On#G%5+HtP*3*(M?ot]mh/mOTdZonrOaRPe<0Lo!!l^TD393gP
%6(UVP:<"!a]5m]]&K4qFiq^^@;K"`RiObC6O=Er$j6,XP)GM33Vb=1b.oNU=1:aJG'2"BD#Y5&?VYns]5WE!=5gcV>"?Zdc.2+0P
%/KBnGcbE"9Y%O5d<1"sf`E:GYZuf!41:qkKI#3)&nWg9`S5K+<7MHnBH%I&l?J8?U(JMdWB:4Od/#X];o(9VIKm&rU1=nQs2X2-"
%jj2ZHN#D?0r#RZS.o2pMk1WL@%Zd"oL+ZC(Fk"l=:)9;U<#DS,r2(38;pUT[IX]b6^S3Fj7j$![:F0JBYAEnm=5&-Sq\LsHRl*(<
%o=X+digZ3l<cF/EQQk(K._,#3Pg#E<7a,ZiR9<gkgY<l),<$/BKu=t1.f'_l%Gsc-CD_/(;$Xuq1s;1p91l_@P*mX%3pm)[@g*\i
%hK5q<3'$%#;`d0L>D;d9#s2H*=-spW'Udj23<R^/$k"g59('f;EAHT)[M<fJ'Y/+Pd'YoO:[T__B"kI5`[2noeInnW65E6(g`LcX
%@kQeN><hs=+q/ceVp/!ci@F#?,Do?62BX0;N/055k"M-uSq=@=r.H'=NDMuLpd$_\Sl0)p&%o^`:H(fKA=c&?AR1p`#g3uEPZ"B#
%.&E7P8p+3lk*Oiu>j@S#&[e#7"t=J*p4GZ<3c_M'=aL=)ba7p3i;@(5$-tg_9jj2C(kaS=j7uZjk>3$fAL>mCU!el2Y*2UMU3U:L
%b?VcPLI(nRRm]H4.uh0IM7mfN&Gh'b7ujXkERh?"cg@X[(_o)k):RtF[DjsOoGTTaR,s3b+3$/WV6OXM@U2u].iKc-,B9kAr]2ne
%.a9J9A0$<^&hs9hGc>r#!McjN.-)CBjNT*ODt<XUQonj2(2r6+6-6?Gr!JcWhj%.+NAspmY1+!b,Z"$WCspMTgb@:M0W6h$$Bbu1
%O+u#P]8dnR-dP#CO,ponf97XCqJ\q)fJ/npS01q0GW%j[L!e5Yp?,g*ZDP:8D%=;&7&9N("V7>0Q4=u(@u\Yco869/22AlML.P;p
%]dI./qFd:d'q`"T@X(\l=,J@X_q!a`b$o&_kMfWO@@LJXS/fIPd!9S'^;:e<WK'E1EXF(*iI*DRTR6IVMf<lq^QZ\"Dr?cBH^Kok
%9ZeHqlY7.1U*29tgD!/A%WBf9WG`_h*8VD$p;G^6@Lc2q<[G]q1o+k%1o/jPOCjB)/55H.QSl_5:D4QK)?S:ED4pP))D_)7CE1ne
%IcWZ8i3,mRaOimJLlhagTd<jKV");[lo:E\L3R&;hhN\hpfU5oU4argD)3/,=r)!U;Os"*Bo-E8j5^tSke-GO<1G7o9U!J";T69$
%r6rTnK.Usp"<(1ZFK<!f+[Zg<m30M!6G-YtV:f5/qWfmHbs@"VhMt+a^6+9L()C8,Pti*3a<6[`/'s<QF5j)N+;1W")5G.9Pl-@<
%<JiVMO.`?0N1547D>*f-NU?V1_qCr+#'Kp:-<7^oOgc[tA/PRn,U5Z]FkX'<6[OH]V._BJ0#i\`U0'KO^JL*RA)aXi%#[&D[]a@m
%L$p.J/LdM.K/KH'b!_IneJ'R"V-%8tL,Gh!QRH!Y!e%!q)PU]qX9"+E,QDXK<#A\o.H8Ti_W-?KR*_!k`h[ZL*/Zm^28MM%D2#_@
%:0c4UYDcs::+.+XJ0N0aNl9@e&sZ!#FjAM+RPS@ZN4NTUDD;$FP7cRrJL3Jh_Kh;11$?0ei`oC/q(C'JS;M#INXMbXa':H@nJYUe
%ChO+[jm[H31^JQ-Zc^/@#Wml)+N7;W6od2&q-5p$V\?,pM('M_k7?fM62XD],BL";S3:j!(8IZ6M@1k@ReR?,#bg5hWjE(\o5X'f
%F`p<22#VMik7r]X+rAf*$d%Uje+pYMS5M27loi'o$r.;h(Vrd5Dd;"PS>:UJ"8`q3%BceW>;\DKWVdq[_4':bCd$E4rFi5i,"jhj
%af/%3U[^&e_?ROtke6qU6Zgkf0Z]nqpkD@`")!EVJ/WfHptPR+c-g.?fD5=#1n(5*[;4d!<JVKs<Sf9$a=?lr^))hr<6df[hXm_'
%=0Dq06A15(D`X4,-D*ZN>eEOJ5"R6%$miDc8=i3XF4J($.[-lYA3)\e$X'Zj*T`bi#KKT;Pg?;RkKi.jItu@T-/P04OVC\fB!8Hb
%XcaRP#X>.ILcTcC02air4oZ?9QMBG\T4V?mi2KS/[2D)Gnqs.D#n0B_FPENVI:SbdX(d6$^%uSE=('KOPA9'Rj)g0kYdDM4@C<=t
%Y+:E+e3g&H2eTgn`q]"iYJ&uEUq\`e<hum(0Fs@"#0S94,uFL1"mLh)MOogdnXs"u/up>Xen2J84bMJ0N(D[ejb(Qr2+K'H+Ia8E
%_gLlJSTa%iZT+YCHItWGK=0oA(_d>8;DG&sh_tM:dGG`>Xjc/Ss)$-?#6/PghmuWTk\k?G`,,9SjhDgr&HBuo,eo-3KMg$3mq,n=
%1mBI(b<6S&;R*C^\t2E`H]3Scq\AN%de?^I*Yj'd0K*taFHn?kF81>;qF)lunu$Qno8b%Qfl=;&]DCN4MA?VE?`m@IV1ID>XM`m`
%giZ[NK?naS`L.R'`d9.^f0M>MiTB,!lcWXm8^XJ"*&d<h\O7kK7ef6](rpiKo&%l-o#p4I_V[lX%IMIfh5V!p86ojakq"sbNYdWL
%e7bi&?f'JbS'T.<VMQ1(hcC.e?94Kufdkg&1c9c?Iu<5KVHmuDKbLBEq8*nSNZEVsdl`s,PXThj7QERhTu@iqKtslMSXjRLl\:do
%F=qRU9no&R??4NmNb^J:9`h%]Lp6u#N4?k*A.Xa@k:\tcb<O7>;!Q=oW:?eT]tRn\^Yu3<o0uN?$H67^[^LKDEO!cd6OsZbL*u*9
%UVk?13$*@<b_%J_Q5jOc?]:X=h_M,]KO%au44D'pGb2B1+e;644"jWO#:r-piFgEC`d08]lCOJ`k,Xm1d+`4jR^8]$FhU@U-q_]?
%]K>Ph7WToA/5Cj9&M#ieCZLTLU^]gOiMOUB_XMVDEh:=j,=H8UBJ`cnNG$<Q.iud0^iBi>[)tE]+T;]))p+B9$+CEIQf_TQK9VgC
%'JaP\Scj1L\p-3``+*qQq&dIMQ`i)06l9%pDFK]_1FZ>FV;k!G)T=BKQK]/3bR8[\h<dc1gqn_d0O?0=q:*;=?MU6%'h^@n\PDao
%Z=&IM8k!G6h4_3S-3kGR:b#4,JbmW\L4P;UbT9fBkDQ+GGWmQ&Ep6h8;?2219<,04$$i,$_ZD+/TslO$'j/OtLaFjF2f/56DJL2k
%Y1g9VW]ifi(A/itYcL)T#qX#6hCZ85e#mf64^4.o-<%nYBI]Q&Sh$E`-nCflT\bt]V4i-W]G)H3V;^2T?TR"u_I+;"aKnEjGF.=A
%VDRA*G12CNNu^@_(gfWZP<7r>ZPYLJF&+]Z!)hq>>HMPhZC@cU(F@DO05(-MLngGu-^/,;Qm$ZU"/*MAY'`G6lAWD[HodFSWE=,T
%0[4mJZ[-gg]qo15OK;Zk"WV534ph!#;OLEDmZFbPEsX@@OXuKKH%6p":1[_1#ch(<bgf<$85(NUQq+c>_KdnemaEf.]1\o=N7'0l
%G\'kfl-ptdN!A9E+M)RPFKJbWP*9;A[%![U05I9X=pChXLmu+/?3JCfasH%]UjrMl?;2&iZ7ng.^O*b'df9(R)K48D73c*piJrN3
%'&Y!;VWG`)p>)#drnY"n/VXK5:lPMbb?tiD`m";IcfZX%o5<?=&\kRn*g$G>Nbsh12R7.Zb!RNYF7)TPS2\Db.SEek=%F'3Yp1tO
%[b@>FD+7M,lE7GorMrW7p*2lq1P.UM-O;reOeQs4oGjahee]Gf^VHP>GO`cLGW*Yh:mk,c2^1]E3BU2X8-+!(qo8L];K&0#9iu%R
%ha'/Y4k-'Zb-D9)/fgq!951ZX'1Rm=T!@3MBs#BbD*WcESpNSB[:aM8+/;TH+QFt*PB=k)V)6qfVBYA]no?qS`c>g[o%OLri\d%E
%VLT<^%n`[>\.?`Y:O*&6![Uq6,>j1fm6Z7qWm)&'7'mV^D1127k!k!5<Ap5ia"gK'*b+qU/=.H>eO?'p@BN/YA\R_5j?=8rX'rQf
%FYG1s-I[fm[7Q(<8Rr5%VA./e6+>f6CfRb\c^]k-Gd)F`1*@t"A:lWDi8:q>*m5=/a`Lo^Wg5)u7es!iC-<;@Bhte0MG#*NR_la@
%[n,'jO*b?57\Krsm`<s4P?(M_`/>?=V6!p1,+`oJY/^UEQ9mGPlP@W'TYK"0a]Q)c;QU?W-FFG@!kHE@Y!IV*SY;5Ka\m36D-6u6
%[r=O!LC#D@';8M0.36`H\N]?$]S]UH(i^b[@i\kR3NraPdbo[^#CBHD]"`%Ta8nk0:#5)82lj6chnQlhB$f=D4u\d"QPj=FAO&n.
%;qsG$3d/F^m5^\/Spr<CZS)PAMG"eqmm9LW*cl&.jU#]7:fJoj/JQZ=iXd]`c%ha&XN&D0.i>//A.n&_SILS.A288eFAYJdlIul'
%9Ib[?e^Ekf0iYt5k@o'l0eJgf,[;@P9Zp=0=$t)u,?a"Eb1<k-'f1`q1?fQPj&r#=<K.?j+Bd>ok38nR7\ZQ:XD@K&b/Fa`WF@2\
%LEg5)=sdV@8uX;sJNMn'0f3;Hoh,WJ?o/m"0G@QrC'i-i*gO`i!sG:T(pI+kKrsr72-u9=Ls=msX+lo@32Y`"NO9@!Y)W]PUp6CX
%j"B-dU#MPX+E'E\e:C%=ie9^RMsl']'h)P(UP^/C(9=2o$!S[?*NLEU827K[_5qRZ:ipO;;AbDV$ha5+cuWSZ2.R8L_Lb@4.%:_q
%_Mq1b)225"GUB"H<=!%SU9M\l)OL,**MRWnH`NhJ:?KT;fm6khBOI6"Ou'6.&qbAh(5&V#;8fI`/bsG\67g/l--*nW,1NtJ*t%e;
%lBJ'8W!7!4@/!]%(9)JqXS>+&?;8o[l>fL-cCLuQd_(1Bie6Xlmh*kp\l!d!EQN8@2=!)aCRY9Y.]1QC;W]O2n@MYB+Z-FQX6YR0
%BE?CdNK2W<;m9B_<,*%V9"*$H@^/$t/GXmU*TuaT7*R;iFK8.7T%k_`G5<OlXu-GakE2(/kB!`i:TH_o;&UJm4Mb=f]@6!W@DfW<
%qU8k6qr<miX.Bsd:=VMI>O"`HN3TH@d&,USU8la3CQ6KK;8;B:c#LqPCfIh"j"PgYfjsLCY#UB?qniha=$D(NWoFN=/K\RfKX#,`
%I-];:QcnM('DYnQp%n?YW&fR5=1<RiHJQ-K(2An3&(PmS[B1pnG[6c)Gn;J"k)3kmq,9ZR-#1\N#9*rmRM?@`Q-(tbp%_#D<31)r
%Y]ksnrPDRj_SM*QO">]#g@G(`c;UdT6CKsLBiM_48PZ&'<KEQbE6mB/4uXB5%`Qc)q>/W@ZL28#nQ7/C*2ZI7>bU+]3`XL5aK)Ws
%]<_h%feC4Gobc"BrM@/1"#D*V39H&@`mF-!$m7En$L)PD0RQXh5#F.2Db1D`7$+pVi"CloaQHg1k,S%V#5SR'rc@55ll$0;FFZ^l
%V9EU2Sk@V^ON"@+b!4mG*_s7Y640r3/6t+O8kU#-4!IFE)bHT8S^cu"9N$S`%',(PN-EX<YN%CsKb*9DR=/c"1bO+$*=l;9b[d@'
%)Qh725j[m@CC5CsKfJ-)KgR&;VNsYW5.i+X"07YfkX37pYEAQ/;#4!oj#Bm-?o(mnq(;6Cr7NmSl>@tQmuT2+VD2?_eX@5TWum=c
%7T"VerHf+;?/sN#OAjr_kSfKEj-fK\-it^l=e1d^k@U,l`$KS/Ea:+ENMR7$^hY5\K64G+=;&k<"oZB8Hgo7Xi'&\<BZm+dT\71L
%-!*-u/&?l9q8Z6_@du//5Kr2OKU(TlCp&Hc&uDrc3_dIB#("R[JrM(m4F1+%AM?mpPV94t3GRQ;P9=nEC4JGKL,]/idl,%:!X"c1
%.]qHr*?]7F:"[Eu$46q4al;.*/?mV*Nc-Z_@u<Wh"gD-Cb@C#F>"Y)?!jQ]R$$o>,;j4qZpsA\Yht8#_<@WY:h'9m`2AshrEs1'U
%er=5bCmE,aOPWH5k9))"F+HtWn:3![rB`"2.K4crZ&4psN8t4<iZE3>m1D$SdCDhppTZ92!t8l-!.EphT^#0ilaM)6J+mj8(iU(.
%I&PqJ@-t4Y>E<T)[j?KFdiNAnJaU5mq-BDFc/[L3pC,omWG>dh.NU+A1%2YgVn%-FNdGqdhl#ABH0G6Mqf_$n$#=LX]$KeDFFCPJ
%(C!VEN,Om2m+CkoCr;:8RjJM)$@C<pNm8YLL2P?/6@4T<j*0<BZt$0)NqB6XD,1SY@dD#a\f+2t$iO31K:SL6qfW\Z:*/lQn*Ai[
%S&l*SH?YDa-T5j$=-j*roJK/8:8ZV`l0'*(PP:#:D/>3nN*1r)5'DGEU&N^'g7A)`D*G[Dl`%_\4#RmaR>h<<3_P*2]96YESRf#1
%*s;W`(d!2bl:^S:+5K'12c8q6YjPEke+)AMJV5fNd#P5PXueu)!L*uj%u``7K5YNI)VCV9I=#>1I<&'N8GDC#`SkJm=mG3m:3LJJ
%lqA7?GP+bq-amjYmF9^)m+];>_2UPQfA/78n!5kJ5J1Y%`0Ne8IL<id2XVP(R\2eoE_kK5Vq*u?3VC1ZA7)q>c?7t=NVAccLITiY
%K_Vb.Z";77f$S2L3IH3ED`4jQZZ4a6/Nm.srbX+-l7%[+^%oTaA`Kc7MJ+_!YbuFWVTi<->":D0\N/3t'8Q6\p20-\hW4C,Xl0[/
%_7`4,48O/>?%$8H-H@OM]'MUmY24'hk8r9col0KSAp&4gERO4ICaTX)]=D?-0$,j>Lj=1[k@o;#WV'j^,L+-J+^$h>HJ^t`I-]M&
%gGduUa'MGen,R0V'^=hG8+5*4POBI"%rPt9[N=,4QPT&I[.?T;R!PsI)bB_%]Q*iBSX\,[43+h8G)gCn\g;WBrrZdX"iAXCJNe_Z
%!epLE#R$07#Uj!5%UpS_-SXTS^4?r.We$l"WLNXM-PO1K##9%U)!flPd*?l`h3pNKoU`[ciQe4moH*5b)]fh88NjH@U#VW_4$*92
%HOl[?PBidg]Pls>A`M`5rFkYN_1@e?:@tGZRQXlp17dHXrF6c\R)&)">1UN#cKidtGpp#.pV>.1]n6FQPC)QLn;4$L&$''-N8;2;
%cGT1-k^^+2G'q>r6cD-5&+(g3.Yh%_ZbC_C$ZS"Q4)\j:G0Y[<@RoV,qi[N1l@W=0*0:Eohk:Vpf_XOW_#^3h%@]9LjdZRuf.&bd
%re9GA5q2!BM*d'C&rado:O@h>oq'g2&;o/PEH96TK3_u6A%hHa6hdSGf0$:&+qgCUZZOQ6#J8P^Y4%u:&"_LcE@<a/;(?C4HZR8:
%7Wp,@6?g5Bb,g;jX^9mL5-_G%D+WN(Yh-H^4]8uu_fqpf#MQn%f?WeYUJA3CEDF&kq`?ic&+*#-":hco)<`$NpJa"d8P*:"?2%N4
%Y\A9\ZFW]d=%PEkiPG3)kq.Mi`dR-lmCOG+mca;_"f"\K%IP2iX?Nuf^gY*!@`hu32j$i?H6/U3d!)d_bk.a<LNC.a-B>6[-MD<k
%%8?3FPhWAOY3iZXoc>*B=9mB:YO+,)%S]c;\UIq&h-D@8S"$'UE9H2TGB"B.*UGq<8/p>=^?m=(p]h$nonap,pN._h_/a=<%j!ct
%[SNd4[h=_`#m0>0H_/t:Ds]X:245B<\kD;Ynb2hiDM\2Q"D!@M=Y'&VBkQ&+LQ6VBVX,a.E`^X,#P?f6MWa\c)n5/dMugsoX>]p>
%;YjMaaFej9fi+gC_&ZbKlAn5n#7gnQ(g>[lgfHPa4&S,8HV[4Ai*uc.m)UP4YGJ;]hk009`ak-OT4RDLYEnns%r@O*FrfBkmoA@/
%fXLNXn!8`hjUVB.]T59JijWI%]]\-g%X^@H*I@6*;J9.E(Fka=JT'TIr9;=Pg5]6Fcb0R/MsKgokjP21j8f+G*2HkK2=/;/NgTVm
%<b[llLJQS*JdAg;o`2F`fu\^jq`NB%QT_70d._j!R/N_'1G&I16[l3,)[QqCUZ>iNMi^[_8mCa21KKA1Sl8!\"*KX[8^#MZ\3bO;
%h/atA3<k,KDr#mAhuSiX)%aV&^6':#]Lc2)F+>M.5h0G9b3D>qs*oCQ]p6m04ST&lK.'9Fc2e[%TO:#R`InO2.S,r[3I>r+B:i[@
%Co_I:VRpQZS25ZmcM)_b1`^J4>PV=U_Hq=,W,R(8=6HRo^-Vd`[J)M&RX2tl'mqam@tHRJkdW5L7#bdb*[nfJ&'0>@.rAVHfl%Jt
%VBPo&ol\Xo<hH8b6[u&1og_8=eleR8W]6jdfeg4l'd0RGEt*6,.'M:nKch*3R]MdR.toHiZX,;haB/b/iE]Bg"?O$G*t@D@??FgK
%K?#.iW7$DfG'\-RB7iejre0ar"In+DdNA9O?W\!2<MCV'%/`[c!Gi]hg7U!>)IM2sUBB8f/*oP,V(p1O:o,2d#ZMh%lI0O&o,CBW
%J_-:aMOA0,pL]NUe]Pp+]st8BNaCH*ZP\K=R)%jCOB,&hM;25kms*gNZLM/D7sYtZBY?-4GqBh]5ui2K<ZdA`fkN44rh+S`=\2Q"
%;B\jUbPh@h<ZC\ldcWtV?[:o9D(D'.8Xn`fVs`-3=mJ^c3E_PVR-"'AC`dM)e@\Qf@X!Qa@52r)Xo)`U.[N>rB/epAj0*-f:pJb*
%SGeaLo&gOYB<XsWBPT7-@^lTYfjj<Z]:4,J%d_LtS$C_=VsDN7jX#27.1/WnAMW$)iq%N-(U)=_WpRda<&VOdWk`fkE.68@brQgZ
%,-D+F]/k1XC#YS6cVfD>dFhe+a-?^)M8+&9$rOM+B,e!_)6/"g&)2&3bE,Eb=gQ)kY(qeH@3A2"ktt_lK6Fc.Wi+I(7:J\fiM8<m
%C=](iYr9MFeZ8j9H$tDfnR?1OD#LNl-jgQ"55#CCE[uVY0=Jal'4QqtLG`7WJ<SY#,\We`)X6h/QhY(nRgT)1DdC$RDI+#Pg5S-q
%Hp+,uqbZ'a3X)*86&=h0:b-u&n$b48b;N&$pTJokj]Xn("7g3oU/1"WY*,"&E:[4u'0G!S[FqF8NOToNSJKMco*P3crJuCfgHT-;
%m@[.S,]j`*`-VKc4u^OF_VFdm8fr8n/.B)DeQ2]6%F\$1MZ':Q=FmAgS5(,.[)sFt/q%=bBKX_jS]\\GHrIOKLDC\[:os(cma?DC
%@eJ\D^/_<5:*5n.r$*YXPmh5A-1RgW=DXPj5uobGcHiaVctOk4BH<^PU9!<RI;nkuW_aaQ;dt7QHqH8Ol0m@$Uc7l!Zbg/;I4s?]
%L60s`B_9^7Z:A!nV"m9#LYCX27,tjH]ud)Q;qi\;@_KiWM\M^uO+W`U\:#dH4,oL(i.Ci!IhpsV-(,il*nIC^=HWbuFTocro(=pQ
%#3g51X`e3j&"G/>hgKtKc!RjkgY%Y+q'Ut_q2=1AM*'&YO6W_8i@+]hN%`">:@l?eoP80b9Y^#Fb6Wf=<<Ba.07"=4qIL,.Y7pjQ
%e8#s.;D2/3K=B:lperTUV/ARTWVi#tm01?jS1LOL>="A+)YLCXXjm+mY@M>rG[-5N:+(`bLON5=ENaB[>(6F4PD4IADO00(qc(+*
%hbk_(C(SUc'gIX$U9=4bD!uAUVo#E^RON]`cY."cr7=b,ZH$hH^=%E8hnmatKDE:A6SoA*-.[j3H,+QV:`(;u+/$i'Y<6H85-6jS
%iRkWXFe[%Eju]6cdI>p@\HN_3`Efq#X#un7G<n1@OeIE:=QLhph%dGug]@\),q6EUg-*'5hq`3hQOeO/Di=PTFh5qWT=u,e/eZ]<
%\99M@qt03qXJferfY'uSi3jLnFlSFn"Pe:sghH9+`HLIXg>1KNPhuII`G[2i/0@>k;+%lkon`?]+#$%<gj'9uUJK"c/U#9f(d/g>
%&olmT/Db?JL>CiVr\?<ukWLk5Ol(f.Uffi2ne^PTjZt._*Sf4h<?9()?\sF5kj.Ufq\a)gJP)7[<6LhYghCMhp\;\PrS[CpknVSA
%e<_fJipk4;M=%.1hF"A?gmHdTpRTW&Ec=AJBB8<o3FBJ'rCl2_I.'_e0Jb\q2$QWm";"M=O+#uu<]Vj=mb=s5h)lSZDaA7>4+fuI
%s-^5][4s-?i4U%lh'[sm8<[>NL*5h_s6@5Xmj;s$_<Nd>C^&"#kDt8Dk@TMT`9'f%^U\uJ6Xpu;j\?"56i#pt6PiM'Pt$5#3!?6<
%C#8-rG,+k18tI5&c54FrfAlFkr0XJ=oHr<NGfjBga9BZ`LJD:IE>#aSMQSQhQ.<W*"qQ@?5;;%fDGOZ2&4Gq&.Mi7c5'of-Z!2Zm
%N>i5":rcbsWg4"dn't>8r@AM_L\BqaJt2'AS6mba^[d^-#>`n+o#*@'i5Aihq<V,M4YI77?'ALFBV<5\iaf/VMY/#c5@!AdS#N.E
%=khW3I38D5Eal/Bs1?tKkf^mGUTBDoO/)rjAKq,]S\KQaR$uaS$&?uMqWp))cQ7T/:!,Ko\I^["^En$c9./?+!n/_"JF'TB5OtB@
%kWM$4_A]A"S/E=>PA.T,0HQREcdg0U//2t(/p@5O5:8I?(1177+Mdh4`Vp>f"YRiL5uG88Pi")_qtV8tmDnDo..CVls&]@8h8kYc
%*[^!X_'n]$YVJbopD@eM>us[;qs5m0gl(A"<tq`NkkaTY,Vm:35K^B=\-FL</1<.?Qckl'hX!upHLQ67`oS63M!K)"pAt#=V?o>F
%AE2()`/f)E5UWR['i*s:$V'i=+"b4^E6Cq5RMl<1Y8L7Pe2V^ujP!k8^o&)&A/)Da(\.nNk!PEE0K>[B@^)lWhoZ?,gX#>cdP1u7
%71/&M;&.ZTF'9CphIcOGp&]2O8@A_ESp[p^"oX:(cQ4B7OJNcJ6!olA&+,WF$Z5?bRK*q,r"^\m^Y/abHDE#DSIo5Sf8"k0dO#3t
%dKp71eN3MEZ>WCCMo=K2c(PG%+EQt/hdSbPg,PN6l9;M(HTP\aMYuHbRsNg0kqc>0&;gkTW#h^khqu=A])b,H[%Akpe8>D299UF!
%[O@5-e\_&Z%l]3NE3G3MS.1pNEq8;[<R[3/;gmGUHm-1>='de=Ha%%4UM<4Y"'`tr:rL-F=d']F4)[@<gL.O@$!(?QV?%Y;a0)5o
%n3t_QJFe\C4'.]c8L+<ZHq:f^$1Hu9J.*A?kdd/Um.i-Gqd#1!)Rp*ooZ#0>kRL/YYB4$3*u0?i)rJ^@HR'9YMmd(8Zql]<T3KuS
%U<o?T@B85QMuYt<ps+fLn!/K()c&0Y+B=CBVCkM`qe*RjbAR?qSBk[,So]BGWZJ_Nd^*u;BgI%dJt?ck$A@Bp[%?QUlOBe`[qTbk
%B>&qB3FhnPpV%37&cQJ>*45,)C,Q#$."4@nGU:u]#^bR75g3O9$+'ep!.h9icpr8?$KoMnXNPtgf7o`ZQg&+k+JmMUW"'.;\]c#t
%`6iM,aRrnIL!02gV'PJqlD9Qrf1iWW2Y4O4p7^8Tlu)*8p%H^TW__B7r6B^0PYSKKTU=ft$h3p1i0GL,hhHikDuRnrKV,F=j4O$:
%\T,[liboQe<iMf+Lh7&,?r[Mk&orKl_t$IbWF([8AmPb`6>qq$ZZLAeF"3_Ph-"2A:2e(/a/orB>k.a;"8+)?;<-!N=$A4(gk!]6
%G%.XOGnkiZT.^OQj`g.,%gt8:S",Ya9.,l!8kKXZJZZmM'd&7OXt&,Vf9_E^n\at*qKP7'^j)4PEi_?!YW?"-IQ?t!\TrJ7d<;QB
%/IhdYoi/><(*':s/='n1[pK-YX;tJiV"tdeIE)K&kNa1&9#\!;>eP=."g_8dPR3SX.o25D">&1J/%-N4kF+c%!:,LpI])Cfg5U4%
%7L&UC/QTXsNW3"&X"Rs%gd*,g+g3QE=IitQ3^I</(##Rh)5,Z+dV?CH$qOC-KMeeaO!`mF3eKX^k7sQgdp#?(qlb)RE6S0iF92MJ
%'a3O@9t;b8DOfGs)BZq!+X@c4nZO'm(W0;ZYI1$DI\`_8nlp)h-%9(ilF62iWmO:5.\&?T<uB9U#m_65f_RC;1&Fuk7;S]eZiY;G
%g3I@2Wo`C_`iY*\r(c8TZmR>K>s8\2;&8nO$WNCdC'5rh?B\+o._IZdMfqt>IQf%7$Q%oEggghE'`12^2/_e!W3=:JMQNU4;"<D[
%>:!1l2sFBf[LDWuDK8p0`jo=WUZ0he7>DEVaG<g94?T9E_@M=#oKHPg8$C'?.-;g?/\c5[Kp9UBjmF9Ck6k?*nh4_K9Z8&LV>0Wn
%qG@Afj/Q&]?5+GP-TAjYp!uhXAWH3XG\"[HgmB%213js;_J!+%R/#\UOSq<SVKqkNrlEn`jX;:)hHg$\WZ4c"&B$4$S^WesXAOaa
%Eb6Lb*D2Qp7C:CR2&jPq3AS%]b2LgQFpQ79Cu1a\rR+lS!MbL:8e.44W'(NM-tDiha(WO^Ndh`Vqj5VH.[)iL-h]bO/7('dDC`oq
%%pJfEH*G3S?VH?M>h&WCXg>\gZjLjM;daf^_FH<"f(Xcq9E_k!0Tk\i#%P\4E\R=Z$'-.bd_WjYHQGj%?'g0mlA2,hH^uSLSe%,r
%/T/dk6)O265.?%4Qt)$e.tNHQF$lI,,TP-T9%CO:OStqODXY,t*$-3a)tGs9eT\4Fe\M]==W-&O;!"0]kG#OhfR';Xq=ms:@7..b
%.2>("rgJGnFHm0/fqo2Sjm5XbnS#9c^48tA#?6F>=/KDbCb/R6Qd!@?^J2CZe"l.?UY4$X2;X%qV.h,T$Z;/bPBA>Ke![I?j.sG\
%0DJ]IL9E[MCSuBOqkLchF/LncJ^5B"U`DOF#TX"S[WQA-.XX-h@HUC0K@/)6%Zd,A4f.VW9dUf<3Xpb%DT@n#;9G_6rZh7Rku=07
%jZ;!mQ&6YDG.LgFW>kssmau6V<;#7Nm*_l;'g=c6'\k`tUhX*mD51#B4fij39B@B'cfo48!8i2f$#6B9ObaIZm*=\hQ#mACpft),
%Ba6?fSiJpQDI^#RR_InWN_0WAPT.i>d_ftQ[8[)%A924%C1+?qmF19-4+-;@C%XpiN<lUqVrPWWlddN;0\kTp1N8rOmUgP\&[t8*
%SW9S.F>t&l^V+jVPcDlkSC&eX@U7O2q;l;1\PoYP:K?lLB9"e(XH%MArQOB6;l?;/e<89jQamH-OktHa`+>g^b\0+$%:Q1))=Y)7
%nU3,64G$>I@3BBYCJud-e4r*)W3'"W]\1&!3j%31X-T11@),ZoGNq!r*cKX0`aXFu-'3$3SK>a?T<=aIqqY/dV:409Fe7u[8Z.AS
%2?5[6GE(3\EoPa9H$=bY[F=0JqI.dN;rml3D2Gi_>YC(WhJ@((cek@%--&b6S+OM(drm<Aq^.j<_@CX(:mXO-n'WXu%1cO$"ijBA
%p7LA?SH;U,DS+`AjN7YAG8taTEY3B\o_@MGlaj_JGO4sNP&?CW;819"p#e0:II5U"Q0PgWG+C]Xai?l[KlP5c.J,s<XaHrJ(B@,$
%i70R4<'#H]SCFW.ZTjmbh/_.^@YSPk[WZ*je"86hjm2Wsa?J4rpfU/TZnKI:P%L1E3p'#39I(FV><>Q?`p\[efP/Ll'REUo6o,jr
%gCgO5Bmr1^$L\2)dcl4fP-fd!=K%fZPYUZpfg?-lA*q30II@_nJSm9MXKYEs`9-b0FBY6h3urb;a4*kjXgE5Jol`(&]h:CtpY3q>
%knJJAk.5[sk+?QfYu>BA^rQ$>k4PNm!1bl$gj)kDrbX%"m&os/U'*Thdr+"_3^00""a?qu]>VYLBd?25NDI$RT#(h6PJM2m<5th2
%*2`]D7U.f=j7=.qfrTZ32,WN.oQMRPgt-i+;gn?)PDM]jDFEZk&%7,dPH.=!jlp<REH&EFG(//<mK;E[,/I&\SdXB7Hu9jBH.mL)
%c&.i:a/@AU%BW3"-KtN8-5Ft\1,\r.fPWq0<cTfgGGCM$h5e3UW=H1F0n'iUI26,&Xs^h-iSr/Yku"/N[23=_c;0MI5U(];]9S&m
%oBWS%G+;uGT)SWs;Z33Jr3UNp.\&YuP]%QbFs"KLZ$Z31mbak\%iq6*>r-hs8[7ObZ/5.nFg?;@F8l#q])RS6*Gdc#%:?ML0;HAD
%IAa!<d9a9s#da>-1lBW.Zd5'!_+C^&K26-Id&>=i)$fUTA]L^ta)!RRh@2Z3V_\m:kBjIYr[RnXI%;4rG)*ZJ2^D'H-eC^mKR6Sh
%a4$2hRYae:RG"kO`Z%!]0NgTO$77DJ^om%<?3I%4Ypj"MUA\4%VA`aeKs(<Inl]jKd"g#=k=L.mc^?$B2Zi.*oop:T`(G@1e[sY[
%I-s/<5f;?XJ@UlKo$,Q/'A5HAWQ%\H?l$Sh/Jd%>iK%I74GV5YB";jJQAaS0GIgq*8@`n9<K"_N9;fWB=@p9o:=n^\dV7m\UXND.
%GOMn>)g1dJ=k#&th+I@B5@Jl9F^"[N!r1X-.<cggK=@+11NiQ*]tJ!!$GGeR[uZMdD#d%A)`i2cFZSr]DH>XD$WUBB-+8G!VdN5N
%mI9Jnl'#n\W/]E-XQs,J7cN,Nl6C.)n0X,i(%&k!f7RqpWmXJc2+[K&8mR85l)V#^::>I,g37g`o40N<j1WjIgCKd`"jU96QB-gu
%2U[;_PL2d<0KrOA+0J57ruDZdBTUan^34N$0Kr+/J+b=TqhFm$d&uGmB4cHNh$3Qi\Ljf@IQB!r5dQ*u0`$*(pDQ?l`QUiU*6(3I
%H3\GZRnNGJ^j'b?FM.BK`Ks]'$%qX,Eg<ln%rb!*hc:b#VO()fs0PkWl0>tj(c$6TLVr0`,(mk]:e,]'c_-fL4gT8E!)7F9[85cE
%2G1,dSFf>8`I1QmgRr@M_O\=bq4k`(n^ik'$rheE_%qqFRa_'t-\'Fc2S$a'p?>&qBmtW;H%tD+drj>X"c`1bpFe1[k,i"]8AuCP
%LChL<B*b9gM0f?Js8(9kIpjr;!R%7C96XnWKRKN*!A*LfnosI;ZOFLp+_J`q!W<^ie/*_L)r*4R-EdjRkHQu$"(&dKZ(=_]f?S[l
%GGV"D"+BlDee8>8?&aUH?+D'/oHm[Q!7i(CW`%tMQ'JtFM<B*hDboAb>I2e$#'L`gTGpXBgRTg(Y#Agt4,rAh;mT(N"k7a#o0L'l
%d#d!IW8:I^D9U.]A!U9:[`PV@fk;`n[#0]#'6<%cQ+M(j_I7OA[,<sp4&'WtF`m:2j_B!h">%,KGr`[QAYV2%;S&\L:""^:$'3(u
%l!#6h3B%+@BCWh#Ssb/#m[c>+Im3)AS*C8+5?7BC>h1Z58Lf5>[A&/AdGgSMFBVGhPcP7MIoJ$kSCe=]n[*R0]Ac<BA!#?LNJh1b
%p<R;\bTBcrOUn!1R4LfJZnVB?=S@nuI;*A!')9%$o&R3-=\fM"Z-u7o753!8)(iAn>-i6LP?80F(L5k31gO@5Ei+7%r,^4(=?+(>
%g`ae/*jX32Nt7KcqYSoc)cC"=ID7;rWm[k?[QBInlMQq_S>Xg2?b_^UW`T^o=)%"Ynl5-#=!6k=Z$CMJf=3AYihS:fS6pZ,L/tbf
%Lflb*M@d+7["T[#HF?d@@uAm/Pgs`T,H4WXf8biq&jJU@B9Jb%3D9d^1EUtI];jJ6n;MtdX=MffQM:edq-I_%Vo?XWL9e+S41gZ>
%rNH;AeIl4/3r[AfJ;pR5c9S:NqW$1ei%gYSfO,&(o\sAK]M(HB5SQr7E@CFgJ=hDFFj$Uh16:e0JS6qsBMpQT&QQL3Ef8W3b*Jpm
%ijQde4D)\^q3fpD*A(&&lbLK=EM2[feeVX8/d)5ffV[tr!o\h;[3Pk$EQggk?O(@U!&21T7I>NTq<_g!8)>@!p]Png@d"05r#$8E
%hX'Q<m`-FAHB8I>Z=@o6..Y@3fBHn2IiV@(oOc,KIKnr<@!Uau`*LV/eOi/9nLFTTBP&-CHP7?L,9NZkV`q3Mq;.r84+Y"h8A'<V
%TI5N-8)pVacZi"5:_k&To8I-#0%iTZJ(HB"EAn1\&bYJH5gJgLKFVY5ne1AMT0oV;mU$.7/j9TV'0;Z[nBhF%Gj3kBr5\U:N%8"$
%0AY#"4qKZ&%7%tY?Xq&c'Ap0A)Q]qqL*fi"<OuU?V6.NDKRG8tl5F2;>>)h.e&R^C\F3#6dtB<W)`Fd,OL<eO$OO[dCCTpaYfjg.
%n3#G4P,HHOjg*`VG/#&0I7u'INTgnAcs)A.$ldQO:#7H,(iO%q?rkdfkctI7EA9JLl`uC3AefHnBYk\&!OK@$pSL#jWj?.i0]A;k
%-d_[\ZH[Z,=4FFg$6.1`TbdmQ]`"tb(pthh/eL:5T<=!NACWe1SnX7(IQLRtEe2UTHfCilPpqg%EbG."Rs=KJm49[[9_Q1fA(D[;
%KnQC9Ium4^o3]+$M>,T/_a*pXD$"?a`s[pXD:?ORO3^OW\A%G=8sB)h#Wp`&3_,)iE4#>u*6l_q!jo3:N]-=[GH:[Xn.(GgDu+.Q
%dcV)Q2TDi$.@%\:38[u+p=i=^D$ep70@%t8Hl&TLeN$f[4aXB.SZ_;0=e=nQg)5K+!E[UuM.OMH-s$f1Z?>B4UZf:c3g9me^q$s,
%W69,;K9)D#lUt;HgAdI$/>j/FeNND[YutaV&LL.ahj)H0L5SPdfSTDUo#CpN.k<"bEffe2@HP".=6(,8%Eq7VBB9[1dYaeH34ncG
%[iK\8EE1+$G8!MW$[VcaA78C/?!*r^eLLF/"\J1^Lb)fUi^4)hNW@Q(fR.B=NVP:N?"K_KZ@Kqqp:6DqKmKHCV:a>X/hKRBG.!3V
%DeVY*Ke#a".jt;PWKf_nDEGS.TslT&36]k6&[R?O[)kWC#u;JF;!hg@p1:4B,b4h@P:SP$V)`eC#fCW3"^kRGY3)YdP_:O"4Vh]^
%bg@heBY#*XL;nTQU^p09Rc<LU'g-cD'k4pd^FQ:&937&8g*`G<m@FA.TRUe5^N/<(o>BhZ6ZOMO"d=@rQVB01*Q1jr<@k]WP?e@@
%o@UrOF,=.C\+_uX=F)o&qB.Dp`/E5qDqi?OaBJt>qQl(qe%.*;H\7;PHmA/dM#$Qg]Bp#/<t[7@&q90"R#?i!8nU8<DZM(:6<2!]
%N/p8M\V32&9:*`"&'.QaPb;uNrlf#]7*e2/0_t6]NgXn3?qO%6SMKC1iUu9YH"YLKA.1&X6g("+Vc^.'kKnKed!911/s8TbSO$o5
%(0j%Yc9$Y2&7/-BSTb://R*2Y*'3Ns\j1L'3HX*qF<nG-%68Oemuct72<<,0B@*'[E3&'HH"YL'qUk]c3bq!dH\U5#8_`rtJToEC
%b5X%M,WANfl1Sdsi:c#Ke476;8aBS8ZM&Be:.2%1:G9NJFHK[GpH?7>LjPrN6?&C<ZC-\/Mu-0P9DEJ#=SMSQ)G@4q5GH!>c^aSk
%^[sX7nI9TpB$U;t\S"+OEJ&NFBf9N4]qYr^c2$BWSYV+D/[7Hs,u?0)$DPn]>;4,cANbB]9'iKd1=9Q0o2OQ=W11?>DUqA"`>jN!
%Hs,!0X<kJ=DqipHUq+Tb98.t"\Kq6q@pdi(Ra*cXYUM`)6';$P2J*E7!h6T`YjU.CD)Splk+b_DosB#3*TCH^H39Hh$tg?_lC)7p
%:l;6@a6,C8EP?9sFkY"sl!nngfmLhK@b+9E>'!:Y>&MB\0=`J(4(Wu*=9[;=YhP#D3jD3sTD-_:^>nQdCT\8hk*R[:C#tqTQCI4_
%\dZ\JQ$2`6_k>dpi:Wr<[=3.0?4?`5H$mB`B+8Z/ON0:=-cR/E/31@XA*:Vm_(9*Z_`]eeOiFJ#AOVF2c*:FfJscF9aMO-?3SLbM
%ZT<QSd7g4^,WE0r@nR^On+F3tN%1g=qCOl$jm:d>XT=(5\7Rp2d)qt@e32Iqq(Q7ZIZ1rXf<0n6kAj#p;s\=<YL3QN@6%LCB'[uL
%Bc!/(;rs<MU"PtKGFQW9`GIg+cNf7&(8D?+?%HEF=(*.0]JPtdpQ6j"Bd`oP8EPC!S@7^F#P561Q&Hm4>TYQHkB@NI`JgdlBhFs'
%EXl?p4X8*`"d[TW?q=_@D@9_lF+r'glBbbhe*J903J6l1-i5NV'a:rQ4'sBi0/Lmr8@EnV9P7SN,NCL(FtkH.Q!rk%,;Uu6h+(A5
%a,oY#GO3e6A",uC%EZLJ2OtqNbWak8>Wk^Hd"b+\hrC292Z+PtifULs)PA%3eYhQfOYNRlr265?mZDM1(6nib%1kG$eP&HtEqt!=
%G[t(Wr!8[8OnLu&Er\+M022'U-'=hF,ar@M_>/*2I[&r&@i[Rdqeb%K@_e%uTm5N7lZ)3OWB!Dm=s^JH1pjsKi^%TCq(cd]$=0$g
%5%Y1.E6B#eQWLXu/>H^.C;Xu&Sp+M3+lb[@L$&\sOQ8Is6`S@tln;<KHJXP3_Ud[Pe2njI&TiT7kBj?V+&7ZJRH#&i3?N.,o>_Wm
%LNAK-??\Tg8eJ);oASh"W3.QkNpu+p.<&TWSE#I!Y47(dk/5lITsX4%p#a.PS3)UfXZ7KKa!h%)aG2RRM`ISOc1u;g[[>7H%XM$=
%h4V+9McIG&aEL=Rm>,1ph9o(mj@kfVEjX_14hVo)OO$J=\a=W1#5d99`HH(;o-0sWoeFn?W]BcF/$jE*Et%toNn3oGLtUgZ<SI0m
%6\hd+9O[Nf`3BlI?N%o)dq^NQ;(%RCl3b5UfVL[k\F;4S19B8RQ:NC2C#]3^23;6Z\<bTo8NU6hkg+Q$mArR-k#R!<=$5e<BubWd
%h>&lZf.^uVb^.4O][P6O:!K3ZLbh`WHrtW3Xt&gM:n'4<YL\huHM=ngN)XJ,I2T#.Ir!+rV)S3mA>DD+*[SL(nX#@FmB"5b:&/84
%:)00Y)FsC5d>V')a$?+g@ct6b66I>)VCictKI?%qUWFB/bXUbHKKm5;RL86#a4?[dL\fZYL`Yk-QUlgO>F5#[_QDn.oUQ>uQsI7=
%.\,l7U@.50Op^C9<EOStS=>#PVgSd?dgmt5SY,82pgahq=R8G#]0=eKgOX+,E2L1,^!@bliBYE&X9XlkTYimQXbR<<[^fej7PCSo
%^:%uC65M?!DoP:[m_q\:ZY`5s0uDulFV#(`#H4IuNH7L#:%]_71<$`W2mIa/]faVR-H0S.oWZs'].!F<5e'e;*+"o>HAC\FrP\NN
%hV^+3^fBb9hMd]!f6>bH-Bd+7.lnhnpR;$%QO_,Ta8>"(/54\>4,I.HgX]s!eEjoTUk%R4V;%#7ZJ2d&eeqNNUHLC!86Ker:5(8N
%WdX?%lo$6/DTKD5MIj2$42mrJmr#[<0l)N7/_TNUB2"k";ntr'Bt[GlZ5XS#r-$8fj'j&IG+7$[ZtlR=[BY)DdOEc-$ZjAM8%3$<
%Y#ho589"%)f%R\i3h<7j(I%XWH?/?DbQGMuND%kuJWEe\mAb;/6csr`g6GjdDAqEUpj.uQ;7?35HEk4V4iXf3+"\hFK1X#nSM94:
%B3.J1.k0I#:eP+LpN&]>m2@O+Q&f)BLrqm_SG]5Wm+G.a=Jb5JpJKp+0j!dq.a<etooAXjXk0s\P&mWJkBNlH8sSHQoYSimHF@oR
%j<,#2k+!Z6NU-0e+u^mPDbW[]>j(9JpW[?PmbAu1XYo#UkWV<^/SD%7?U)h#]aX%oQgYXVlE:3D4AYG*flYF6@2@%`F0YlV0AO.E
%l'ueYSBlsn-(E3%-g3fK.q[6jjjL8mq7+3u[VM];db/_#Yh4#tf9B"meC<7e65IkDF#*%Z8`W=Gl(%H]9X8\5flU!"FLIa&2.Y/r
%q,sf^_Q>&PcH4[U=Kh&5VXle1C5png::YJtf[4hPdl@_OZMJ@]VK3Z=rFFs=Zg""20+_G\4nXq3kHo\ud82?)e'0d.\#/X-flZYn
%jZ9a4(4t2eflXPpiE9p]cDFe]@Z+gH3E]I8[:drK2*c2_f:#Fse3IF/flU!$,r7:F.qd<kjjMhaf@@4lSC5HZ4!X(4VK9nVZce]c
%Zg"!1@B#R#Q>AT(R9XOKVK9nVo;K^=\cK;_/WY\gBWEf[5"ET]R339P_;1eCLVLbThqUQG0%O=3c=oh@g;Qt_l9)H^IrLi\g"(bN
%3gAuViWhX]$\'R$Seqb@Y$')rK:3gD037#3]%fXIj>8,.]&URr5.t;_3<`<*E>T3Gp&%^k-'QMe&NT2@ru>32JjYp5LM+mX`S:jW
%/Wd=$>is=XcBUVZI.8t9mMtB9a8W=#13Xe->lGB#I%:T3*M9DK\m'/@LJr`UWlZhVb1ug_-lt!Im`3kQkY^Qh:TdU;X3@-#qmK$8
%YcCi]h/=[LSE@K23EO,<g@?fgZRoP+YOb,G?u"BYb.0XPL9F\f^e[+:e2`Y/::YuPpt8L(q:C^!2`LFhi%)^l4c%!o<Z2$/=A'3.
%_q"Ema(.CL[KqqabpPRNE><T@B1'5spL(R9HAR4\p1UDrogM6=Y;7*^gh/InSA=:9+2>`Q-hB($nl?$1`Uu4[5^9PWQ4nNZ.uCX>
%.Ktu9CqSLS;g<b%(MR]7@]`-(G'd]#IRLGtEaH6L]VW"8;ZX/.dUXL1cX#_S`Y`'Fi?m7>$HfPYg]g@moj0eG#h7F7p'>ECB.q[i
%`tETUTl)NZ\pIE!>?_7uPa-%-fXr36-8I,=qr>Do6::?#+*Z_FWY;5^;i6pqXX<EdQI"ena(jamialb6YEQJ(+W_SRGUG6t\AO69
%=*u?W+n@QN1W:^ZY#OB*aKK]gEOQW"c;_;X^2ETf(%+2kEDkh7*LVuM`0r1;bon1dQ1ELhQfM3-DW+_Y*pl+SQfP'kjMDZ"k1"`P
%cMFd-=1IjKW]:9_X<k,E%G,R.?Mu8&5&GK+jMD-!-e:Iu5!-=aHl/QCbf;Rdf&$?cA[a==cBZi'U>@H!1ErK_2'ga^.pAU0[k0MQ
%*VXX6bb6nk1Eg&QR'%4IDg.l<TljH/0pZi&_s<Td>,JlLmK^p;e,UR^9\kiUf!7oQ\S4_n).ENgG14%4)Xc3-#@j1#o%KG"g.r>[
%G@\&9Qa\J"dF3"HP?q14Q>lpn0ARN;6AhJ"Odc!5`oKcI/>Z9'`=RAFV<>o4T8I't<A>b=(Q7Fc(9]r-:MOi2ZX,Ych)-EnN`kFe
%a$tWI]gh%F%V0Cud5ear%Pnd50m@]TZaSfWjW:&77-+0dHCiI13h@]mfs73_Ea0(=V)$S=b,ITWc)atbNV;V@P+m0*06REUnkHN5
%;80#gnqFYoEEF]f?V7/(VL`0Gk<hu5KYOY,;nYi43c+3BpT7@X>Tf!IBt#.;mTS:B1FcPui=\4*-Z2M#r'rl^)]Y5jZ8H(64nim-
%=aeQ)G"PafMCk`$"(A#9RlO-chW^e:RCe]"lI;7K2B$3AA!?Au?KdE]rb1`0jSj!c>uT3Admr+H#nJ.J!5,r8A#_P%9>e)9f<`cp
%egp@\LU$V^mC@_!M[S-3\!t'l=1:qO:7Z-Bhk];`[4?d,b40>nV,O@2.:-]VZpd#58`VHiRd'moQ(!AoSQUC(pNBg!;e)r;^@.N)
%>3'.jb'U.UC^LYEVae2Hl1[m=-VlsHhFZFHKhWnYe)bCGbdIb*j2\fCo530lYJBr&OLV26rV7IsOoaMrW.dQHg_-*<O0'k<lF-$t
%=Cq0AqX>c$7dM:35*jKn1<XN[Z-Xt]">K_pPKRU8c&9qJj'[;oe%g^.bJs7?(CAcb2^Bng3m`e'T_jeu,5u,V<'YXlnUgDm&Z:mq
%`\SPQCLpT3_9D%a-Y]TIl(#lWO'OnJr,,kfpgD4SEGIQlY(E^hVJN&`ar5GK?tsndqo9a-CH/d8f%au_HGO[)Qh2i`@n/'83.r&g
%!<)&<c*^`Z1&!(J5Q3BAd)\"!Y\23ED)T)jm$GUP1XgL"a?JCq5+TMif9eR02o/;c3X`0Pg^h#;EnM%\H'OX<-UUu;#\rs,?*NW>
%Y*SiubsOHCQ:N>sk1<;sSOiH^5Nfgah1]pOjsb3aDlbjmg&.2X?I/;7"!?7,Gt9`28h6\=>5>jA.];[(/,*#YQ.F9EB+`h/9W\aA
%0%2_h^J\PZ+60Y=f6T83\+oD9&=Zn]a8[EQF[D$m-cA!r`b*YQB=1*Z?C&\7oU>qrO6Bmo6-P,KWL^'Jf#6@N@p_6H\GFB]!SZ/O
%/I0W:Tt3W-TQ^mdbeD_m=b&D0EO@K5j'^\DBU+I$$H/7T!jjM87h4HNNg2+S<iN$1ZB:Kp!d#NIeQ657Y>iLt1FnsgZuAmVKdsA[
%?'NZ=/aGW$Pg@IPYl:AEo..,KUF;aD,=g;e-eG)?A1)B%$@#2Xj`C"0XhQ9VB78jK($5$k^N,%&Hkrme%][Y,\I@c!m41'HV5iE6
%cA'3IIV['d>2._,X4?mL:<fB_Zp_:'@pM<;YE2,3eP^PgCfkDPlF6-5;"hJLhHh@,974ReIP*.H<qUMq)f!d3I_IL]jgootOWc't
%eOju_Ht79"Trt#NalV[1e`Fu3FoAtN2I!q:\[m7H<W/5`!9-i,c[/"(&ZUc@olF@bmo.JtW9`UeETV9WZkYWNduRatO%`a78p1*F
%"Wc:X=[QIZn#CgESj;C@f?`4$Fbt]V\?;H&e<Eh\Drl:mCWLeq?e<W9'sd[OW9`W[TjioC?E(*"GN,\k7d<lorL>fG[&SS`l3p>;
%opZDC1KX7uifoa(:JpHSGr)dW9m/*mf?\H/7TU`I.r"[fIbk+Wer8,?lhXcT[q;Z:c(&j_A6hE<!,<8%<it]N8S`1]jE8uFoA1Ko
%TH;ttVg)>0nZADWi*=fNnSA]u\piL?`9u$]Mq.Llf&W*dpXSU+GusZC:KH2^pl%I_4a$_9pRCPI"O#Bho<.Af69W$dJi(.*nZ@2+
%NaC7S/bSo$g<r/[Dj[Zm[n%r3)i%RuY2;pYKr(fASh[T]>sB@C*38ofPEiSdRcB98$eG"B;KM:*^trEk?,(OC^!<hIk:c:He[g3U
%.m@aD03?bDZ'[%s\phgM_s:>$/G5.M]1NIo4F's"o/PiJTt5\*Hq[NBAu+(BFoArTSS!7A9@icW998,q6-Oi(;`<p%.T`e=\!UI3
%CgB75j-lt4[n?:c\ZVP`7Ke*"H0<`No2Z:mS\u9/qKXI,)A7>'\piL9r>bI7V<[o&.7&h$Z];VGIW_%.lM=XbdU(JM1V7g]\piMQ
%@87T9n%*u)_El_:E2t`W,9E;Da,h%H0H9Oj4'.:Kao./BpJ%erZA[s>cJF-Hf!Ijb+6/Msf%lg4$e\%"bUtQcR.Z%R,Y^NsJWe7&
%DL2KuAqU>_\$9X$Q&-KWfqob;?>>Kj,A(0d`ns:g9IcRgB!76i!\Yh=*s\4MoI3],8TK4Y2KZ8C4IF'=L$DfQZh#;M6g%NTj`(7o
%]G]s^$r__]E8LNYMZ)/hHbTN)r6a`!bOY1/rU9<I*]Dm#5?a]:m98XcHJ,F>U>9]9&c:k_?RJ(9M4M+ai[-l-L!QNVd>/582[@#^
%-;9+WG%*DTQck&P3/Zkt69e2?rU=`N1tp9I#35HcUEEJtq+g:hE-\HUmH:G5`Dr2'D%2N)g<&m*B_8j-)GNG:O`;Xlr/;:"WgT0`
%^Ihs23kiQZ$'MYSWcBP38)s?]/YpOQSg-U#f7i:Pk/NVU9sr?LmM5G%N"V4E*Pk;:n&uYOh_]QVd11`!4Nd[2a2+h:7`&lBb"S^o
%ktlahI!lorG.rmne+;:%jRN.UO-@&6KS5p:$HmNm8Vt^Z00kF`FhCj?8-MMUpOLAQ^j--Yj^_nuVimHq*lHkNh^YW>WkYrT9[/Y_
%AiT.8O[+0SVYQ^^I:$=<($b-UX,n<!q1QulSE)Jih8=VPr45glUIVT3<O$UM);X_(8Ms6;q0*.K`62]:9X0e[-j3<==)t^L-sf=U
%j^`TS#ZmeoK*/^UOPML:5%SD4VVfNhs%fLIfDQE4XhQ3^bgTo+V0rVZ,?":oLsE\"TA=F(VFe*4Qf!Z3Z\Zcn>`AR`f]BA=>)(?M
%a:09YZTn5Q:FSNsa1Wb/@RIc>O4A.7[R%SijPTPjc/8lIh,"<U$o:KR53`[<jp.aPF!.PK8t$j0ku[gLgAKlWmnJ9sR3&.j#9VZP
%iu"15?#+=AOaDac*[aXbf,i?nqWfU%K41(_HPogCnAc6?0)a.>Tul7h7VVbq+oJ,ncq9C$5YlhMdWf4k/#>%o&>2`3/&emD8in[m
%M8>eu3C'0bWLo9EC9oQP[kogAW(a+Xa;gcF=a57T,YshqOF8u/G*,gOTjf1%@_U1+Q)#CHaR;rl+Z"-?o2JgBYr-1]=B,PjHPrSA
%=T*BL8=;q<B=>glM>#4caL/S=Jp@;6<G\oa'>$78Wqb>ZAI:YSk.&dJ\p?3[Mht/8qqAul9W5gml[,QD?7FOOP]%%F"msJd-TnpH
%KUMCTRo/WLD]]8<1R6TkG=De2A2@fAmL@&\#,"nk'*H%?G@1Hs$p`I[dhol>+,KmZYWR=p4X%!g1&tN)>nn/1)dP0sJ6g4:KRfg<
%&unZ>04"PX3@E:c3)THr2o_b`7-4!NN^19VA".a)8d]I!g6hA.N%;:8[!BqGqdL_UFT0d4!SSo8f;N!1J;h5_.;7Es'mCBNqUWsK
%3k@n51Yet.UAn),0fg3Ja<2eLbTnf:d/2QgkX3(>5om`XF!\\QU0qE=pJoM*4317rF(8S9#<D=[?@(rS?9sJh(<J)Ja(j9E0%:I^
%K+-+h'GR6VmAL.<rI/51?ZT+6bj*B1AZs]'_)uM>5Tu;!0CqW>0N)AJl>?)06^!UZ=SK8#MY+SZ_Q<Tq#'Tae'1t*Gd_rt-12Umf
%,M2A'1"uD=Jr37VP!H/u2d:5sROUqVp'l(V&p-aGr9fEi!+I]>DTt'o[5:b<XrGM(_F2MI`t\[TRlk:JN9#L.-L1uN9'c4,j;u=V
%hrS7fP33rk-IIs-BN-g:@tM>t2KJ0%UNX5b*jX"b?h,?9*'1\6"Q=+h@h4Ys&O-]@KSr!\oUan0pfpaC@NBj#V(+mjm:2'S\c"nH
%76*KIW!%PB_`V%9f>:ePl8])<iPbQ)KZ$aC1niY9qeQtC[Z$^QEuHo,Z!HMJE_+.ppS[7GGrFW#7Rk6#;H>+S%9Ub4bF?M>)lQ8C
%ip&$J0o\9^12bO>\e%t"qVGd,88OT;6Ma7fIMW'0XL'>BbU^6qF/Cm8D/6rHlQZo1Q&l#B$Zd-eY)Cm[/5:4VMbh5'"&FrR=`#'!
%p=1k:@"m7tSI<APBR&6R6s/b8,PYS`XE,5]F(;js/I[YX%YCi8qBK7H-Ef;Sq(["l5O,nKhsB._R+8`'&*^fG5OR;BG@=^BhSf!_
%4(oMXjfdcYB41Q24L4n_YV^LVT3BjCdiDV@56"c7p+22G5/rn\I7r=chl4T@TBhFej6fT"bs--T[`gt.T.L.jmTBbbr*^JaF+8"[
%f^l:X.*mBA7A/`:H5)R#'%TEE'PTi*-@(HP3aGO/0PM%nTq=E@0p5IJ4,O5u?6!FYUf8'sf8<OuWNN:(fJC\>hRJWj\fN56,s(bi
%7*KsTrE2`;9<>&a>TUUs;R$Yt!u)G%WXn`nHAtD#g4O4sG[NGlEquO$J;NVOF+LRhW1#V-\t;1I[0KFKG[Tq\<$*<Hbrs\L#m5??
%cU:^3VMn"F^c&Et=u2p'S[B+<FiFP'Os`Me[5NYSKAuY_Ok$m99*otl$%G>s.8LIPRNaQ<:'cE_bqs\P3k,BSE-b!ILn`kS/,m1^
%RUu4H5dHLEMhTS6bq(mu.g`nRkT.([c:OJs_A4Fug?Cc=h>h",6M7/L/T"(VBEVU(;]<!Sr='4>F-a)=kgUZV^@)ilPgQ5PhTM2l
%mHpIbdYHHl(+os/=,?2u;nS)SW#!_&oqu63?EcJ:4dt!s\S320'<&?R0[638Lk22:;[HpUe^NY,d@Cm1kAfU`Gf/+[iopFem2fr=
%!hqe9]HK\H;j?Bg6&MV`SV$C0!u3Djq[&8+*ansCS*A/[k34d3aj1))]!5j7Ua&-i`Tp9VCn22oK49J+_h#<:BJ?N.$DH_6"CGD3
%K\D6K&`^m[A7HIYp`"qZXAKT24\h_HWkt"mE//;t!SU#94k[L^`!B/)_F&Ie5`:NATm/gRoS7(WO.Z<gn'0`>SK*roo&9"eJsT5G
%K8,URGQe7cHZ<nO`sKW6$s1rnErf,LjYX^qnKJMDABq(0^!XscoDS9),8o,?A-rPuh>VgC<(FXY7IP_#b(H&F:[`M5Okn(mc:\BI
%&mt(?$sn9",U&Y,'MWnO1'/21=Z8"u%MmJAQJ^b,^V4j!pJcU:3]RlP&C!GZf([OD$Hb[hOmDH?A&gNK0_t5FNum3,_8J0W*_[V\
%Q\-aPL]MA!j1#V.U4sWGk@Q[G=<33!/G8:9=QfCL_3-uGbW"U&:9*bhV5o4fm]nUZ]:m;sG/X2IGg<D"]ICHRKp;t`mq[G/_,Pu6
%`GsL^TsAbY9Y@0)_*i'[#NpDh=3!W&W=jaeknAI+#3Pi+*DBd'<:I4Vl4.6@Bb^ocb,[Bl.m2h=7:*)n=rBY@7UE2o=qRIh/?]hN
%<ksJQV%l!ac<4JU;-N2qS!@&jPZEA)3a/%$XiTY2Zb0J:Zm&,4Zb3S^0j1u>i<ralN<*S')$&#IZ2dB!OSg'K<<b/%Sg(qM#EuE?
%fo6#cljQ.q7Gr`s;[oU,+O9np7Z6FsoqeQNF<5YQZc5G]-\=20SQ%,`Oee.')Z7:P*]hC-0uZ<G(AT1>iD4*J%YRiCI6MnZ3p'*"
%,WXq>J1^A<4\7/en2$J,&<A1Xg8Q&Q`B_5e*B,f>TDAd7Zq2q77EgTC'3e:H.(>PiL>5>"'"(H3*F/CX9k$"0h2+W@Y5hSRMLU6;
%T`^]#-Sm"V!kPrnR&,\\DN98o`L-uRUT?(Z/#5NX6`BRklV9;BO4ZF'p5$CQ%@^9J(n="7-E#S^9G:[DQKIId+3j[ALsLDI;FcK=
%OGV+#f&HQ?[Y!IIW(\,F?M7^6B::5\9MNmalPaWM[8etM.?1oGjtkVl&FBr>,Y.?&]6F'@mLb7t7[_Sdhh9s_-cfcE*1(Yd(jns1
%jKuJ+M#.K;pCo7o%TIX:I@I[!;6QMYOGg*3TScfH4Gt4qLRGkT((V2JcP+`nL/+KRK^CBPe:Pqo)q?6YB,t;n+U9Esrf$#WDmGP4
%BKpi)Yt[c/6Mk'p[AoD<7$]a5AH"(XUc"6/fbuOL2lRO#Q:D-:R!!Q\YV)NBQjL]8F;i_<%<?pn.ar'D1QDp[(kuXYi?jF<`2mX.
%VG>"97ItkC3i-j6Dc\nQK\TaYnjBpdciL8srckbM0,e+>SGl3#JoHN[hYAhYGRq8^*n?5iSrPSF%XgU6>4#LTaE4.0ifV!H#>K8"
%qUO\1f?dqkdXE]MMPcP\eGV%+8uj]eCg?@bNY7bSl^"D"2VcR:pQRg_?j4$L+'2'M+'(2/a>\$W-R0^=h,LfLQp$iqd`q[Ea_!ls
%kAH[]\W[P5:OeH\58]]FI=0b]1LC!;.i[a8:&J0VqQ+V2S%%m&n!q+GJ".i**K8l<V'nY&<#'E5Qr`j3qYhB3CrIt,%aXSMKPa@^
%,$mTF<b8VZ,N:m=dP%R_>kh>hh-Ai%5d7ZGAm\_YdOZI;]XpTHEBNqVXDJ?c;B->9KaBG,Cd5\0S&/gWn+:t>E;K.V/2n&jk=.kQ
%XtGs?aVhPRa<L=UYO8Qu74-T#mMX#*5<02G!o_l3NV$O:T8#N[2m9.C0"IVar&#F#SHi1_Y7&XgOhNL6W,e/$GLKbB,k\3sG$$1u
%6efr]0ta5<%JG[tIgtn)<N`\_04*C(+5m&PgI`0<qUs9"MX`(-\7.IlZEIrG8V/ZBFp1;pV8fZC<Lso,[eJ9GWs17-_'!9O<d/D8
%.Aq%sKZnOQ8eK8j[&0(0\CS-b[=XT-I*NW%quMMfN\*+76-DnD-8S9c[*;c$1W>?iLY#`cA>I2/6AC@mCo'C,lT-<c^D3Z0/Z;,0
%(_nd2-*>H'N+40Qn48II)#lZ6!*^,m7GZ,TX4^Je#`%BaH#GGYiX:.CfR*YrD*Ip!2[[nNb't-'LOVa<K4^ngL?IM#O*@)>JS=<4
%qet3CTOSqcha"]@.?#V8c^2c:i:gsXYDk*i\isl;&'>jE%JAg8]Y(Q_L%.uFGTEBpcpQ?;0m3ZK7KNM'rb#VrQUCOh&tm\F`d<56
%`r]':"[<LUiX9Z&F+`4,g:4ho2I>pkZN.nkBoWug(eII1Jg>:4";k<7ZNA)`pD?LtdgWk)aD@<A"9EBoBj'A%I^5>Q=IItn"_`-O
%o.gFBi%kii:.t(;c#XgT-)PBOKKGn:YL@HG@F>QS1&.<\V)\Mlb=H=S^>#:a3A%QG]tq!`RY!KW1C>`_G(\npZli$&1P&9l1d#;o
%4tR'\$fB?GQGL$54tSOs\:O+)Jd<rbn.%$ij?"\_d?4M(@$hYGPOBoSK0d=u73!=":s^IQKp?%>0^F^b!8\B6<n`)WVF=gWaEOp4
%6@U[tOeuR+"e5pYM5,=ZKG)gTdM+!DE&<ao(kE)fQ8s8D!uPu>$WnW/Bo*Ye%1X.@^ipA?4=gOcD/^JYVVqsc9`UTk_=92]#mVS/
%BpPOP*F=C'!91Kq&i>KJ$R1OiK#OJc/@*'0E"]!"&Bgk<:,[!S!"E>8A5TtZZQ%-h_p`<hUu4UjPg$s3)0j6N/DPe&'(@JBVU@4'
%+N%XY!%MeX(=J^(^^sTK`eU4ai&d>1(bdWl)&J:,dl[kcU&f@pN$ASG9(NH#NRIlU#pBak-].sV$t'qX:CrS3#lqq:YehSY5,3+^
%p_)Ks%8b]m3tqqh@:?,qL,:0k-tU=3GaT:K[6%NUi,p#"/S"*O)J09@M)<$T3'3K-"2fdV5QJ;H8/!htPHd\!#D@_f-clE5"c8<R
%%gth4!-qZg!Cubp?oU?p_h\W7"Gqc_=XhVbU1BuR4"6J-WO!Ie,IC4B!RVL@!%JK0864A<>12ieP/P.&>sG<YJ5J9,XtrEWoK[/F
%3?l-=0f*d*pK3hMo&eCVX<t"uX;=27mnb2Z,mP)l=^EGR-DY(=$s@oa#uErF7.akqHkME_!*TGV\.8H=jRG#4i=02WJC!_l#S\E`
%Q,s(p8;&Mti]>$\\:@!r8:_J+!0\[[%6VN0RDOif/>lkX73dZ)>-IuR(0fi'NK@fp=UkujO%]Zq#A*rFnULZ\2M>Tn!eXukkp/_@
%r'c2`k6/^65ZO4PaA!9R?_ZK\)Nm,E%cG=CI!HK^WrN;7)"'*m+<T#RTK6HggnBjMn-V5\m_Y&I/@$m1BpqPD5fAg"Uh,'WPh%9!
%\,p%t_b#Bu/5)(c"u_Bq,lif(4&K1Z&YFSWBcc.5U_Rj*)#t&k271WF.T1jdL_PTaiq-?4[fb(*_"EgZJCk#/ksQ$?_uXKtMguKB
%Hk0TblpUt(&gUOd#Q9MoJB.EpeqKudklI_2Q=gKt,i"F]R"Ip@#e:$Xi#Mfq#cl"nn1lAR@6tWXH\apDFA0EDddRn+OVQR]@7UYu
%N^Z^dP$nEAifP]U$ipD3CISA9O@LOY;`)Y$),b\[ibA_ZNG*.gq?k0A'=#@/--7Ig?'8UR_^.kY^0H\3nAFA/"p_aSi@*VeL3.B+
%Q6RIGE_5hQ0:*n+r,=Y/_Cq6#,5?1=1.(ae7nDV,0;uh:_XHE,"3++B$?S6sjEMA;N$s7[Z[o[i+=G;Y\.g3>&`EfJTg3o+1uTO,
%-cdtQAK^bf9qX!XAJU4CZpL1^&lC4t##m^jC_EBJqukg]J&7Nr@0qH`$qjV5PYiWUC"/nSW>*Di#X\G\W0O&Qo>@3!$R5/Y'U@i(
%n9OS_F=5Hq,5csk=p(4*G_3MY6jmP-GTKp(?GI-rIB$Z$j9lIi($m$,,akJ46KBt]au-+Gj9:pYnq$WV>/B_81+iZear.u+M#^$K
%/;M:[:?eB<Q%u;hTK4<DU9[>3\DdoHWLtSZTI5<],EgA\&2LOH?3ZX*H,@]Le>eYAJc8=BO"Tp/WseFl*OLJ:9>NrGJH?gA-#N5^
%IKf(u#Mg6b!7'%5&[=GLO<EJK!+f9N<"9gSd3O?E+Y^:N?O6k3U\.5rA.F&9MRC%c1AD^sVSB;Rj,^*HDrYL"g,#/"MAFBejt7:c
%'n?IR$t0:[)`Y>=^p3rB+l1Qt^fXZM<O3I*Cj75"NPm93e;p=Y,6EGuYLm,?)js0U#]G9FnddKc!bt,(%STRu(D47Z$pZn1hgj;:
%+U_reDo!'mJcZ*Ji"]`,lpD!3@R,GLK^8\/J0/[QNp9sO0!rfP&W4NlP=71cOGNuc5dLt,OVf-)'ZA9_$BZWBUrlebJ_U?;6=sU7
%7770\A`6>p7kc/-X/O[VBMX1N.Oe%/&K<_o"+:(iUcVruS76^Uqp@SSAYakN'O$dCY`Ji!QN/=?q`t%[82(+Q+<KG"crq6i!uQ_?
%+CfqK"S%$"",gSV@NlK'%o!TH3%Wdg5gM8^KOKu$Z')sB3IqSS&<u)qhI@i97r'3u^YI0i$DI`,KLj-A1?XkKkO8:;Zrell]M*h#
%#*UE^eV&cM]!.n9VZ;?U,"i"P/'s0Man%pC_-8[N>#l7A/c:5<a>AO4*139FbG[P=9R,%fJ7Aj)5XiL6$2>IM-aL76!'B*A,&oU3
%!m%0eO:Gh;X\]>O:0'?3F]_uAV9q4FqOB2m:Dk#RW$MKRKZocOE^S+X(WbltI>UoC]d\L-*18J_>XO6s"2+[q0LT+HRM"%2/6QAr
%HPWR_gB./,";qN.YeZAHS=G1j5_L<nYh7IlYgtX<*$\>8:be/5KE`SJUP[;0;?m!5;)89H-0fOc7;rf@\VNTU_P^O:L4dL?Y^ID-
%"Mn_]Rg;'*R)FXL`MR/[$J7>W!s&<dGVf8N6(n4qX,<cs=RPcTTn>n#.)t.t;:Gt%/V$1o]L4(T3(H)6aMN5LdU8<T(dQ6\*C4\<
%i@%"M#`*Q#"JQeZU3OSdTlWZAB9h*,GD^KoM9C_5fX*2ml(V8oi0%YJidkDp4".;oKlOB'jD23k!"fI>!C8Tu$kmNY3(0,,Q3aFA
%TI`#>J@pj+)pl06rpcpl9<?85j+5We#..N#_RN!/"@-W=&hfn!.[i1>?BA.,R@teo822+T7SQ5OHigMS&aC8IO89fU`2"R8><H%&
%9gdo+T+M4Jd#d`Q>,3_g'$hfck"H[+:SpnZ+]UnHJTI&`d?*iL`CA94,g^6>P=*f\GkB=Ul!`#%&1A`XpG<>9#J5XC:0g3g2,@(@
%9c=Y*g6O-($Bfo\B%"2d+:0/iLo\jt+:57lKELPkSq%*-P#f'c"N$lmMmXp_'dO-l@I)k]%Ul'cH?b2HXk7IU$+T%OB<q.4O]FcG
%=aJ)3L8>+V*$u"T8[n9oCk.[BG+J^"!sX_'c4e9faX/MK#`ajF1\(Yi7mpL=4FojG%hVu$nH=@GcZ(9FDjCt$^(Mek$$V^^(AMo1
%Ee+af,T-;s3Kh2hQQmCglr%AhJ7fb:_'N6AFC9fh,Tc`$3V%_3QR!^OE]FcL_(J)C#bL/!B[*+]!TkT$gHkFqJ>iMq+)CG3JAR_o
%dAWWL!Bh1m;tH7CTW@YcBC_8e<<.j_R\tfD9:kdL#W>M.6*E(i8jGI=SeFm+dPpDco9Ab.:W(51VZbKuJ.?MgPi)p/,2H1L\BPW3
%#d*`[(+X=oPTX$N0;tHTrAWr14P@V>X%i2Mn?N)q`D_?k62K1q-+p>-XR7ML^n+tSktR`^?D`t#+l-(KS^KW5ns''5*q3O:#I#0c
%MXDo*d,Vqr-;Oqg44OS1HV-OXL,m@&:C3C;.d/]bM2hN690=LKLO#"oc(5OK-82Zfa]!et5tamqPi.2u=Yr-2^be_`,%YbY#f%hl
%1jX:7#m(-0TuBl\>XrX(Fd1)[$7u<c"KXp>2*"87M^%Qubu4ZV,)uSAPG\Ng/'4=';Z[sAVU?5UM#^#'bjm)V$":t=&u=qE_>Y]F
%,Mc9R4"+[=XMTE[Se@W2<==PV"IiIl+N%%Xgfjf1mO/*O(-%7dEKg`Z;kh'r*$pEs*7D"3;1dS\TV1^$R2skI*$s7AZ6+!E8l/WG
%11tWlJ1(D$6IDhq9#*QD"m/O>&cnC:i@fq\6su)enKpb_!;rE9)]ja/7%d@)&l5^r.%)+,RN)RBlG,SoZ_KoR%fin4h,,BCU;FoD
%NY2ApD8[mZ;1cBB.#8X/M*RR`ZH5sqa?k^$j<u1mO"i;VUL\OMCRQo:O[U#)j<t',")uF17u9L4I#P)P*@1ck<ggfJC5ro@i@Nr?
%L8+k`6P`K3d2XI2+Xn!bL8+lsZTt`[F-OlYCL)r5,3ar5BfJ]rk7h9gjsWO(#XZ_o-pb(nZ=#Xa"JS#7Ak.^'C32rP;DbPLa\dZG
%0bj/pW/d%o62d]$--`.s``Dn`,sc@H'`M^0YdVtn0H:=pj5/u73o.m8KAQ^d4aJ%>`uG!7_*<E.IKu'rs(r656Xb*A$ESp7.Z<:Y
%<Dc3(`M*[8kj;Mkq#C?^kiod)+!^3@lPo#[2$!VlfNJVf3'Vs.kXO&k1N3hJ;]*+Ns1g5!@J0c=,>jQPp[LlO652lZ$*);E:MANf
%;*L^'J^V<-.H!kALZHnV>>"dU1;4u#<3Bh&BjCV/NR'Op+^e`CAI+&e!DjT-VpFhg..BELqF>Jal1Z"G\2T'g)t0QYGVU-/5_BpO
%'CW^ag?[^:jeNgis4\X(g]~>
%AI9_PrivateDataEnd
\ No newline at end of file
diff --git a/logo/gnocchi.eps b/logo/gnocchi.eps
new file mode 100644
index 0000000000000000000000000000000000000000..cf707f093aa72cafd0fc83e27f38eb441a4bb2d7
--- /dev/null
+++ b/logo/gnocchi.eps
@@ -0,0 +1,5775 @@
+%!PS-Adobe-3.1 EPSF-3.0
+%ADO_DSC_Encoding: MacOS Roman
+%%Title: gnocchi.eps
+%%Creator: Adobe Illustrator(R) 13.0
+%%For: Thierry Ung
+%%CreationDate: 4/3/17
+%%BoundingBox: 0 0 1096 840
+%%HiResBoundingBox: 0 0 1096 840
+%%CropBox: 0 0 1096 840
+%%LanguageLevel: 2
+%%DocumentData: Clean7Bit
+%ADOBeginClientInjection: DocumentHeader "AI11EPS"
+%%AI8_CreatorVersion: 13.0.0
%AI9_PrintingDataBegin
%AI3_Cropmarks: 36.0000 36.0000 1060.0000 804.0000
+%ADO_BuildNumber: Adobe Illustrator(R) 13.0.0 x409 R agm 4.4378 ct 5.1039
%ADO_ContainsXMP: MainFirst
%AI7_Thumbnail: 128 100 8
%%BeginData: 5284 Hex Bytes
%0000330000660000990000CC0033000033330033660033990033CC0033FF
%0066000066330066660066990066CC0066FF009900009933009966009999
%0099CC0099FF00CC0000CC3300CC6600CC9900CCCC00CCFF00FF3300FF66
%00FF9900FFCC3300003300333300663300993300CC3300FF333300333333
%3333663333993333CC3333FF3366003366333366663366993366CC3366FF
%3399003399333399663399993399CC3399FF33CC0033CC3333CC6633CC99
%33CCCC33CCFF33FF0033FF3333FF6633FF9933FFCC33FFFF660000660033
%6600666600996600CC6600FF6633006633336633666633996633CC6633FF
%6666006666336666666666996666CC6666FF669900669933669966669999
%6699CC6699FF66CC0066CC3366CC6666CC9966CCCC66CCFF66FF0066FF33
%66FF6666FF9966FFCC66FFFF9900009900339900669900999900CC9900FF
%9933009933339933669933999933CC9933FF996600996633996666996699
%9966CC9966FF9999009999339999669999999999CC9999FF99CC0099CC33
%99CC6699CC9999CCCC99CCFF99FF0099FF3399FF6699FF9999FFCC99FFFF
%CC0000CC0033CC0066CC0099CC00CCCC00FFCC3300CC3333CC3366CC3399
%CC33CCCC33FFCC6600CC6633CC6666CC6699CC66CCCC66FFCC9900CC9933
%CC9966CC9999CC99CCCC99FFCCCC00CCCC33CCCC66CCCC99CCCCCCCCCCFF
%CCFF00CCFF33CCFF66CCFF99CCFFCCCCFFFFFF0033FF0066FF0099FF00CC
%FF3300FF3333FF3366FF3399FF33CCFF33FFFF6600FF6633FF6666FF6699
%FF66CCFF66FFFF9900FF9933FF9966FF9999FF99CCFF99FFFFCC00FFCC33
%FFCC66FFCC99FFCCCCFFCCFFFFFF33FFFF66FFFF99FFFFCC110000001100
%000011111111220000002200000022222222440000004400000044444444
%550000005500000055555555770000007700000077777777880000008800
%000088888888AA000000AA000000AAAAAAAABB000000BB000000BBBBBBBB
%DD000000DD000000DDDDDDDDEE000000EE000000EEEEEEEE0000000000FF
%00FF0000FFFFFF0000FF00FFFFFF00FFFFFF
%524C45FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFF
%FDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD9CFFA87D7D
%7DFD08FFA87DA8FD6FFF2727F827F82727FD05FF7D27F827F827A8FD33FF
%A85252F827F827277D7DFD11FF7D27F827F8527DFD17FF5227F827275227
%277DFFFFFF7D27F827F827F827A8FD30FFA852F827F827F827F827F827F8
%7DFD0EFF2727F827F827F827F8A8FD15FF52F852FFFFFF52F87DFFFFFF27
%F852A8FF7D27F87DFD2FFF7D27F827277DFD04A87D52F827F8A8FD0CFF52
%27F8FD049F752627F8A8FD14FF52277DFFFFFF27277DFFFFFFF8277DFFFF
%FF522752FD2EFF7D27F8277DFD09FF272752FFFFFFA8FD07FF2727F89F9F
%FD05C72627F8FD07FF7DFD09FFA8A8FF52F87DFFFFFF52F87DFFFFFF27F8
%A8FFFFFF52F852FD2EFF27F852FD0BFF27F8522727F827F827277DA8FF7D
%27F89FC7C8C7C8C1C8C7C826277DFFFFFF5227F827F827277DA8FF5227F8
%27F827F8277DFFFFFF2727F827277D272727FFFFFFF82752FD2DFF7DF827
%A8FFA8FFFFFF7DA8A8FFFF7DF827F827F827F827F827F8277D27F89FC1C7
%C7C8C7C7C1C8C775F827A87DF827F827F827F827F827F827F827F827F827
%F87DFFFFFF52F827F827F827F827F852F827F8A8FD2DFF2727A8FD04FFA8
%F827F827F8522727F8527DFD05A87D27F827F827C1C7C7C8C7C8C7C8C7C8
%9F27F827F827277DFD04A87D27F827277D7DFFA8A87D277DFFFFFFFD04A8
%5227F827527D7D7D27277DFD2CFF7D27F8FFA8FFA8A8F827F827F827F827
%F827FD08FFA852F8279FC79FC7C1C79FC8C1C79FC74A27F82727A8FD06FF
%272727FD07FF277DFD08FFA827F87DFFFFFF52F87DFD2CFF7DF852FD04FF
%F827F827F827F827F827F8FD09FFA827F8C7C7C8C7C8C7C8C7C8C7C8C7C8
%F82727FD08FF2727FD08FF277DFD09FF7D277DFFFFFF52277DFD2CFF5227
%52FFA8FFA827F827F827F827F827F827FFFFFFA8277DA8FFFFFF52279FC7
%9FC8C1C7C1C8C1C7C1C8C727F8A8FFFFFFA85252527DF8A8FFFFFFA85252
%527DF87DFFFFFFA852A8FD04FFF87DFFFFFF52F87DFD2CFF7DF87DFD04FF
%F827F827F8A8FFFFFFA8F8FFFFFFA827F852FFFFFF7DF89FC7C7C7C8C7C8
%C7C8C7C8C7C84B27FD04FF2727F827F852FD04FFF827F827F8277DFFFFFF
%2727F8A8FFFFFF277DFFFFFF27277DFD2CFF522752FFA8FFA827F827F827
%7DFFA8FF7D27A8FFA8A8F82727FFA8FF7D279EC79FC79FC8C1C79FC7C1C7
%C75127FFFFFF5227F827F82727FFFFFF5227F827F827F87DFFFFFF52F827
%52FFFFFF277DFFFFFF52F87DFD2CFF7DF87DFD04FFF827F827F8A8FFFFFF
%A8F8FFFFFFA827F827FFFFFFA8F8C8C7C7C7C8C7C8C7C8C7C8C7C8757DFF
%FFFF52F827F827F87DFFFFFF52F827F827F8277DFFFFFF2727F87DFFFFFF
%527DFFFFFF52277DFD2CFF7D27F8FFA8FFA87DF827F8277DFFA8FF7D27A8
%FFA8A8F827F8FFA8FF7D279FC7C1C7C7C8C1C7C1C8C7C7C77552FFA8FF7D
%27F827F82752FFFFFF5227F827F827F87DA8FFFF52F8277DFFA8FF52A8A8
%FFA852F87DFD2DFFF827A8FFFFFFA852F827F8A8FFFFFFA8F8FFFFFFA827
%F827FFFFFFA8F8C7C1C7C7C8C1C8C7C8C7C8C7C8517DFD04FFF827F827F8
%7DFD04FFF827F827F827A8FFFFFF5227F8A8FFFFFF7DA8FFFFFF52277DFD
%2DFF52F852A8FFA8FFA8A85252A8FFA8FF7D27A8FFA8A8F827F8FFA8FF7D
%2726C79FFD07C79FC79F27F8FFA8FFA8A8522752A852FFA8FFA8A8525252
%A827A8A8FFA87DF8277DFFA8FF52A8A8FFA87DF87DFD2DFFA827F8A8FD0B
%FFA8F8FFFFFFA827F827FFFFFFA8F827FD05C7C8C7C8C7C8C79FF8277DFD
%08FF7D7DFD08FF7DA8FFFFFF5227F8A8FFFFFF7DA8FFFFFF52277DFD2EFF
%5227F87DA8FFA8FFA8FFA8FFA8FF7D27A8FFA8A8F827F8FFA8FF7D27F89F
%9FC7C1C7C1C8C7C7C1C82627F827A8FFA8FFA8FFA8FF7D27A8FFA8FFA8FF
%A8FF7DA8A8FFA87DF8277DFFA8FF52A8A8FFA87DF87DFD2FFF2727F827A8
%FD07FFA852F8FFA8FF7D27F827A8FFA87DF82726C7C1C7C7C8C1C8C7C875
%27F827F82752FD05FFA87DF8277DFD06FF7D7DFFA8FF5227F87DA8FFA852
%7DFFA8FF52277DFD30FF5227F827F827F827F827F827F827F827F827F827
%F827F827F827F82774C79FC79FC79FC77427F852A827F827F827F827F827
%F827F827F827F827F827F827F827F827F827F827F827F827F827F827F87D
%FD31FFA87D2727F827F827F82727522727F827F87D7D27F827F827527DF8
%27269F9FC79F9F2627F852FFFFFFA82727F827F827F8527DA82727F827F8
%27F827F827F82727A87D27F827F827F827F827F87DFD35FFA8A87DFD04A8
%FD10FF7DF827F827F827F827F87DFD06FFA8A87DA8A8A8FD04FFA8A87DA8
%7DFD62FF7D52F82752527DFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD
%FCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFDFCFFFD
%FCFFFDFCFFFDFCFFFDFCFFFDFCFFFD15FFFF
%%EndData
+%ADOEndClientInjection: DocumentHeader "AI11EPS"
+%%Pages: 1
+%%DocumentNeededResources: 
+%%DocumentSuppliedResources: procset Adobe_AGM_Image 1.0 0
+%%+ procset Adobe_CoolType_Utility_T42 1.0 0
+%%+ procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
+%%+ procset Adobe_CoolType_Core 2.31 0
+%%+ procset Adobe_AGM_Core 2.0 0
+%%+ procset Adobe_AGM_Utils 1.0 0
+%%DocumentFonts: 
+%%DocumentNeededFonts: 
+%%DocumentNeededFeatures: 
+%%DocumentSuppliedFeatures: 
+%%DocumentProcessColors:  Cyan Magenta Yellow Black
+%%DocumentCustomColors: 
+%%CMYKCustomColor: 
+%%RGBCustomColor: 
+%%EndComments
+                                                                                                                                                                                                                              
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+                                                                                                                                                                                                                                                         
+%%BeginDefaults
+%%ViewingOrientation: 1 0 0 1
+%%EndDefaults
+%%BeginProlog
+%%BeginResource: procset Adobe_AGM_Utils 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{currentpacking	true setpacking}if
+userdict/Adobe_AGM_Utils 73 dict dup begin put
+/bdf
+{bind def}bind def
+/nd{null def}bdf
+/xdf
+{exch def}bdf
+/ldf 
+{load def}bdf
+/ddf
+{put}bdf	
+/xddf
+{3 -1 roll put}bdf	
+/xpt
+{exch put}bdf
+/ndf
+{
+	exch dup where{
+		pop pop pop
+	}{
+		xdf
+	}ifelse
+}def
+/cdndf
+{
+	exch dup currentdict exch known{
+		pop pop
+	}{
+		exch def
+	}ifelse
+}def
+/gx
+{get exec}bdf
+/ps_level
+	/languagelevel where{
+		pop systemdict/languagelevel gx
+	}{
+		1
+	}ifelse
+def
+/level2 
+	ps_level 2 ge
+def
+/level3 
+	ps_level 3 ge
+def
+/ps_version
+	{version cvr}stopped{-1}if
+def
+/set_gvm
+{currentglobal exch setglobal}bdf
+/reset_gvm
+{setglobal}bdf
+/makereadonlyarray
+{
+	/packedarray where{pop packedarray
+	}{
+		array astore readonly}ifelse
+}bdf
+/map_reserved_ink_name
+{
+	dup type/stringtype eq{
+		dup/Red eq{
+			pop(_Red_)
+		}{
+			dup/Green eq{
+				pop(_Green_)
+			}{
+				dup/Blue eq{
+					pop(_Blue_)
+				}{
+					dup()cvn eq{
+						pop(Process)
+					}if
+				}ifelse
+			}ifelse
+		}ifelse
+	}if
+}bdf
+/AGMUTIL_GSTATE 22 dict def
+/get_gstate
+{
+	AGMUTIL_GSTATE begin
+	/AGMUTIL_GSTATE_clr_spc currentcolorspace def
+	/AGMUTIL_GSTATE_clr_indx 0 def
+	/AGMUTIL_GSTATE_clr_comps 12 array def
+	mark currentcolor counttomark
+		{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 3 -1 roll put
+		/AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 add def}repeat pop
+	/AGMUTIL_GSTATE_fnt rootfont def
+	/AGMUTIL_GSTATE_lw currentlinewidth def
+	/AGMUTIL_GSTATE_lc currentlinecap def
+	/AGMUTIL_GSTATE_lj currentlinejoin def
+	/AGMUTIL_GSTATE_ml currentmiterlimit def
+	currentdash/AGMUTIL_GSTATE_do xdf/AGMUTIL_GSTATE_da xdf
+	/AGMUTIL_GSTATE_sa currentstrokeadjust def
+	/AGMUTIL_GSTATE_clr_rnd currentcolorrendering def
+	/AGMUTIL_GSTATE_op currentoverprint def
+	/AGMUTIL_GSTATE_bg currentblackgeneration cvlit def
+	/AGMUTIL_GSTATE_ucr currentundercolorremoval cvlit def
+	currentcolortransfer cvlit/AGMUTIL_GSTATE_gy_xfer xdf cvlit/AGMUTIL_GSTATE_b_xfer xdf
+		cvlit/AGMUTIL_GSTATE_g_xfer xdf cvlit/AGMUTIL_GSTATE_r_xfer xdf
+	/AGMUTIL_GSTATE_ht currenthalftone def
+	/AGMUTIL_GSTATE_flt currentflat def
+	end
+}def
+/set_gstate
+{
+	AGMUTIL_GSTATE begin
+	AGMUTIL_GSTATE_clr_spc setcolorspace
+	AGMUTIL_GSTATE_clr_indx{AGMUTIL_GSTATE_clr_comps AGMUTIL_GSTATE_clr_indx 1 sub get
+	/AGMUTIL_GSTATE_clr_indx AGMUTIL_GSTATE_clr_indx 1 sub def}repeat setcolor
+	AGMUTIL_GSTATE_fnt setfont
+	AGMUTIL_GSTATE_lw setlinewidth
+	AGMUTIL_GSTATE_lc setlinecap
+	AGMUTIL_GSTATE_lj setlinejoin
+	AGMUTIL_GSTATE_ml setmiterlimit
+	AGMUTIL_GSTATE_da AGMUTIL_GSTATE_do setdash
+	AGMUTIL_GSTATE_sa setstrokeadjust
+	AGMUTIL_GSTATE_clr_rnd setcolorrendering
+	AGMUTIL_GSTATE_op setoverprint
+	AGMUTIL_GSTATE_bg cvx setblackgeneration
+	AGMUTIL_GSTATE_ucr cvx setundercolorremoval
+	AGMUTIL_GSTATE_r_xfer cvx AGMUTIL_GSTATE_g_xfer cvx AGMUTIL_GSTATE_b_xfer cvx
+		AGMUTIL_GSTATE_gy_xfer cvx setcolortransfer
+	AGMUTIL_GSTATE_ht/HalftoneType get dup 9 eq exch 100 eq or
+		{
+		currenthalftone/HalftoneType get AGMUTIL_GSTATE_ht/HalftoneType get ne
+			{
+			 mark AGMUTIL_GSTATE_ht{sethalftone}stopped cleartomark
+			}if
+		}{
+		AGMUTIL_GSTATE_ht sethalftone
+		}ifelse
+	AGMUTIL_GSTATE_flt setflat
+	end
+}def
+/get_gstate_and_matrix
+{
+	AGMUTIL_GSTATE begin
+	/AGMUTIL_GSTATE_ctm matrix currentmatrix def
+	end
+	get_gstate
+}def
+/set_gstate_and_matrix
+{
+	set_gstate
+	AGMUTIL_GSTATE begin
+	AGMUTIL_GSTATE_ctm setmatrix
+	end
+}def
+/AGMUTIL_str256 256 string def
+/AGMUTIL_src256 256 string def
+/AGMUTIL_dst64 64 string def
+/AGMUTIL_srcLen nd
+/AGMUTIL_ndx nd
+/AGMUTIL_cpd nd
+/capture_cpd{
+	//Adobe_AGM_Utils/AGMUTIL_cpd currentpagedevice ddf
+}def
+/thold_halftone
+{
+	level3
+		{sethalftone currenthalftone}
+		{
+			dup/HalftoneType get 3 eq
+			{
+				sethalftone currenthalftone
+			}{
+				begin
+				Width Height mul{
+					Thresholds read{pop}if
+				}repeat
+				end
+				currenthalftone
+			}ifelse
+		}ifelse
+}def 
+/rdcmntline
+{
+	currentfile AGMUTIL_str256 readline pop
+	(%)anchorsearch{pop}if
+}bdf
+/filter_cmyk
+{	
+	dup type/filetype ne{
+		exch()/SubFileDecode filter
+	}{
+		exch pop
+	}
+	ifelse
+	[
+	exch
+	{
+		AGMUTIL_src256 readstring pop
+		dup length/AGMUTIL_srcLen exch def
+		/AGMUTIL_ndx 0 def
+		AGMCORE_plate_ndx 4 AGMUTIL_srcLen 1 sub{
+			1 index exch get
+			AGMUTIL_dst64 AGMUTIL_ndx 3 -1 roll put
+			/AGMUTIL_ndx AGMUTIL_ndx 1 add def
+		}for
+		pop
+		AGMUTIL_dst64 0 AGMUTIL_ndx getinterval
+	}
+	bind
+	/exec cvx
+	]cvx
+}bdf
+/filter_indexed_devn
+{
+	cvi Names length mul names_index add Lookup exch get
+}bdf
+/filter_devn
+{	
+	4 dict begin
+	/srcStr xdf
+	/dstStr xdf
+	dup type/filetype ne{
+		0()/SubFileDecode filter
+	}if
+	[
+	exch
+		[
+			/devicen_colorspace_dict/AGMCORE_gget cvx/begin cvx
+			currentdict/srcStr get/readstring cvx/pop cvx
+			/dup cvx/length cvx 0/gt cvx[
+				Adobe_AGM_Utils/AGMUTIL_ndx 0/ddf cvx
+				names_index Names length currentdict/srcStr get length 1 sub{
+					1/index cvx/exch cvx/get cvx
+					currentdict/dstStr get/AGMUTIL_ndx/load cvx 3 -1/roll cvx/put cvx
+					Adobe_AGM_Utils/AGMUTIL_ndx/AGMUTIL_ndx/load cvx 1/add cvx/ddf cvx
+				}for
+				currentdict/dstStr get 0/AGMUTIL_ndx/load cvx/getinterval cvx
+			]cvx/if cvx
+			/end cvx
+		]cvx
+		bind
+		/exec cvx
+	]cvx
+	end
+}bdf
+/AGMUTIL_imagefile nd
+/read_image_file
+{
+	AGMUTIL_imagefile 0 setfileposition
+	10 dict begin
+	/imageDict xdf
+	/imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+	/imbufIdx 0 def
+	/origDataSource imageDict/DataSource get def
+	/origMultipleDataSources imageDict/MultipleDataSources get def
+	/origDecode imageDict/Decode get def
+	/dstDataStr imageDict/Width get colorSpaceElemCnt mul string def
+	imageDict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+	{
+		/imbufCnt imageDict/DataSource get length def
+		/imbufs imbufCnt array def
+		0 1 imbufCnt 1 sub{
+			/imbufIdx xdf
+			imbufs imbufIdx imbufLen string put
+			imageDict/DataSource get imbufIdx[AGMUTIL_imagefile imbufs imbufIdx get/readstring cvx/pop cvx]cvx put
+		}for
+		DeviceN_PS2{
+			imageDict begin
+		 	/DataSource[DataSource/devn_sep_datasource cvx]cvx def
+			/MultipleDataSources false def
+			/Decode[0 1]def
+			end
+		}if
+	}{
+		/imbuf imbufLen string def
+		Indexed_DeviceN level3 not and DeviceN_NoneName or{
+			/srcDataStrs[imageDict begin
+				currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+				{
+					Width Decode length 2 div mul cvi string
+				}repeat
+				end]def		
+			imageDict begin
+		 	/DataSource[AGMUTIL_imagefile Decode BitsPerComponent false 1/filter_indexed_devn load dstDataStr srcDataStrs devn_alt_datasource/exec cvx]cvx def
+			/Decode[0 1]def
+			end
+		}{
+			imageDict/DataSource[1 string dup 0 AGMUTIL_imagefile Decode length 2 idiv string/readstring cvx/pop cvx names_index/get cvx/put cvx]cvx put
+			imageDict/Decode[0 1]put
+		}ifelse
+	}ifelse
+	imageDict exch
+	load exec
+	imageDict/DataSource origDataSource put
+	imageDict/MultipleDataSources origMultipleDataSources put
+	imageDict/Decode origDecode put	
+	end
+}bdf
+/write_image_file
+{
+	begin
+	{(AGMUTIL_imagefile)(w+)file}stopped{
+		false
+	}{
+		Adobe_AGM_Utils/AGMUTIL_imagefile xddf 
+		2 dict begin
+		/imbufLen Width BitsPerComponent mul 7 add 8 idiv def
+		MultipleDataSources{DataSource 0 get}{DataSource}ifelse type/filetype eq{
+			/imbuf imbufLen string def
+		}if
+		1 1 Height MultipleDataSources not{Decode length 2 idiv mul}if{
+			pop
+			MultipleDataSources{
+			 	0 1 DataSource length 1 sub{
+					DataSource type dup
+					/arraytype eq{
+						pop DataSource exch gx
+					}{
+						/filetype eq{
+							DataSource exch get imbuf readstring pop
+						}{
+							DataSource exch get
+						}ifelse
+					}ifelse
+					AGMUTIL_imagefile exch writestring
+				}for
+			}{
+				DataSource type dup
+				/arraytype eq{
+					pop DataSource exec
+				}{
+					/filetype eq{
+						DataSource imbuf readstring pop
+					}{
+						DataSource
+					}ifelse
+				}ifelse
+				AGMUTIL_imagefile exch writestring
+			}ifelse
+		}for
+		end
+		true
+	}ifelse
+	end
+}bdf
+/close_image_file
+{
+	AGMUTIL_imagefile closefile(AGMUTIL_imagefile)deletefile
+}def
+statusdict/product known userdict/AGMP_current_show known not and{
+	/pstr statusdict/product get def
+	pstr(HP LaserJet 2200)eq 	
+	pstr(HP LaserJet 4000 Series)eq or
+	pstr(HP LaserJet 4050 Series )eq or
+	pstr(HP LaserJet 8000 Series)eq or
+	pstr(HP LaserJet 8100 Series)eq or
+	pstr(HP LaserJet 8150 Series)eq or
+	pstr(HP LaserJet 5000 Series)eq or
+	pstr(HP LaserJet 5100 Series)eq or
+	pstr(HP Color LaserJet 4500)eq or
+	pstr(HP Color LaserJet 4600)eq or
+	pstr(HP LaserJet 5Si)eq or
+	pstr(HP LaserJet 1200 Series)eq or
+	pstr(HP LaserJet 1300 Series)eq or
+	pstr(HP LaserJet 4100 Series)eq or 
+	{
+ 		userdict/AGMP_current_show/show load put
+		userdict/show{
+		 currentcolorspace 0 get
+		 /Pattern eq
+		 {false charpath f}
+		 {AGMP_current_show}ifelse
+		}put
+	}if
+	currentdict/pstr undef
+}if
+/consumeimagedata
+{
+	begin
+	AGMIMG_init_common
+	currentdict/MultipleDataSources known not
+		{/MultipleDataSources false def}if
+	MultipleDataSources
+		{
+		DataSource 0 get type
+		dup/filetype eq
+			{
+			1 dict begin
+			/flushbuffer Width cvi string def
+			1 1 Height cvi
+				{
+				pop
+				0 1 DataSource length 1 sub
+					{
+					DataSource exch get
+					flushbuffer readstring pop pop
+					}for
+				}for
+			end
+			}if
+		dup/arraytype eq exch/packedarraytype eq or DataSource 0 get xcheck and
+			{
+			Width Height mul cvi
+				{
+				0 1 DataSource length 1 sub
+					{dup DataSource exch gx length exch 0 ne{pop}if}for
+				dup 0 eq
+					{pop exit}if
+				sub dup 0 le
+					{exit}if
+				}loop
+			pop
+			}if		
+		}
+		{
+		/DataSource load type 
+		dup/filetype eq
+			{
+			1 dict begin
+			/flushbuffer Width Decode length 2 idiv mul cvi string def
+			1 1 Height{pop DataSource flushbuffer readstring pop pop}for
+			end
+			}if
+		dup/arraytype eq exch/packedarraytype eq or/DataSource load xcheck and
+			{
+				Height Width BitsPerComponent mul 8 BitsPerComponent sub add 8 idiv Decode length 2 idiv mul mul
+					{
+					DataSource length dup 0 eq
+						{pop exit}if
+					sub dup 0 le
+						{exit}if
+					}loop
+				pop
+			}if
+		}ifelse
+	end
+}bdf
+/addprocs
+{
+	 2{/exec load}repeat
+	 3 1 roll
+	 [5 1 roll]bind cvx
+}def
+/modify_halftone_xfer
+{
+	currenthalftone dup length dict copy begin
+	 currentdict 2 index known{
+	 	1 index load dup length dict copy begin
+		currentdict/TransferFunction known{
+			/TransferFunction load
+		}{
+			currenttransfer
+		}ifelse
+		 addprocs/TransferFunction xdf 
+		 currentdict end def
+		currentdict end sethalftone
+	}{
+		currentdict/TransferFunction known{
+			/TransferFunction load 
+		}{
+			currenttransfer
+		}ifelse
+		addprocs/TransferFunction xdf
+		currentdict end sethalftone		
+		pop
+	}ifelse
+}def
+/clonearray
+{
+	dup xcheck exch
+	dup length array exch
+	Adobe_AGM_Core/AGMCORE_tmp -1 ddf 
+	{
+	Adobe_AGM_Core/AGMCORE_tmp 2 copy get 1 add ddf 
+	dup type/dicttype eq
+		{
+			Adobe_AGM_Core/AGMCORE_tmp get
+			exch
+			clonedict
+			Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf 
+		}if
+	dup type/arraytype eq
+		{
+			Adobe_AGM_Core/AGMCORE_tmp get exch
+			clonearray
+			Adobe_AGM_Core/AGMCORE_tmp 4 -1 roll ddf 
+		}if
+	exch dup
+	Adobe_AGM_Core/AGMCORE_tmp get 4 -1 roll put
+	}forall
+	exch{cvx}if
+}bdf
+/clonedict
+{
+	dup length dict
+	begin
+	{
+		dup type/dicttype eq
+			{clonedict}if
+		dup type/arraytype eq
+			{clonearray}if
+		def
+	}forall
+	currentdict
+	end
+}bdf
+/DeviceN_PS2
+{
+	/currentcolorspace AGMCORE_gget 0 get/DeviceN eq level3 not and
+}bdf
+/Indexed_DeviceN
+{
+	/indexed_colorspace_dict AGMCORE_gget dup null ne{
+		dup/CSDBase known{
+			/CSDBase get/CSD get_res/Names known 
+		}{
+			pop false
+		}ifelse
+	}{
+		pop false
+	}ifelse
+}bdf
+/DeviceN_NoneName
+{	
+	/Names where{
+		pop
+		false Names
+		{
+			(None)eq or
+		}forall
+	}{
+		false
+	}ifelse
+}bdf
+/DeviceN_PS2_inRip_seps
+{
+	/AGMCORE_in_rip_sep where
+	{
+		pop dup type dup/arraytype eq exch/packedarraytype eq or
+		{
+			dup 0 get/DeviceN eq level3 not and AGMCORE_in_rip_sep and
+			{
+				/currentcolorspace exch AGMCORE_gput
+				false
+			}{
+				true
+			}ifelse
+		}{
+			true
+		}ifelse
+	}{
+		true
+	}ifelse
+}bdf
+/base_colorspace_type
+{
+	dup type/arraytype eq{0 get}if
+}bdf
+/currentdistillerparams where{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+{
+	/pdfmark_5{cleartomark}bind def
+}{
+	/pdfmark_5{pdfmark}bind def
+}ifelse
+/ReadBypdfmark_5
+{
+	currentfile exch 0 exch/SubFileDecode filter
+	/currentdistillerparams where 
+	{pop currentdistillerparams/CoreDistVersion get 5000 lt}{true}ifelse
+	{flushfile cleartomark}
+	{/PUT pdfmark}ifelse 	
+}bdf
+/xpdfm
+{
+	{
+		dup 0 get/Label eq
+		{
+			aload length[exch 1 add 1 roll/PAGELABEL
+		}{
+			aload pop
+			[{ThisPage}<<5 -2 roll>>/PUT
+		}ifelse
+		pdfmark_5
+	}forall
+}bdf
+/ds{
+	Adobe_AGM_Utils begin
+}bdf
+/dt{
+	currentdict Adobe_AGM_Utils eq{
+		end
+	}if
+}bdf
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_AGM_Core 2.0 0
+%%Version: 2.0 0
+%%Copyright: Copyright(C)1997-2007 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+	currentpacking
+	true setpacking
+}if
+userdict/Adobe_AGM_Core 209 dict dup begin put
+/Adobe_AGM_Core_Id/Adobe_AGM_Core_2.0_0 def
+/AGMCORE_str256 256 string def
+/AGMCORE_save nd
+/AGMCORE_graphicsave nd
+/AGMCORE_c 0 def
+/AGMCORE_m 0 def
+/AGMCORE_y 0 def
+/AGMCORE_k 0 def
+/AGMCORE_cmykbuf 4 array def
+/AGMCORE_screen[currentscreen]cvx def
+/AGMCORE_tmp 0 def
+/AGMCORE_&setgray nd
+/AGMCORE_&setcolor nd
+/AGMCORE_&setcolorspace nd
+/AGMCORE_&setcmykcolor nd
+/AGMCORE_cyan_plate nd
+/AGMCORE_magenta_plate nd
+/AGMCORE_yellow_plate nd
+/AGMCORE_black_plate nd
+/AGMCORE_plate_ndx nd
+/AGMCORE_get_ink_data nd
+/AGMCORE_is_cmyk_sep nd
+/AGMCORE_host_sep nd
+/AGMCORE_avoid_L2_sep_space nd
+/AGMCORE_distilling nd
+/AGMCORE_composite_job nd
+/AGMCORE_producing_seps nd
+/AGMCORE_ps_level -1 def
+/AGMCORE_ps_version -1 def
+/AGMCORE_environ_ok nd
+/AGMCORE_CSD_cache 0 dict def
+/AGMCORE_currentoverprint false def
+/AGMCORE_deltaX nd
+/AGMCORE_deltaY nd
+/AGMCORE_name nd
+/AGMCORE_sep_special nd
+/AGMCORE_err_strings 4 dict def
+/AGMCORE_cur_err nd
+/AGMCORE_current_spot_alias false def
+/AGMCORE_inverting false def
+/AGMCORE_feature_dictCount nd
+/AGMCORE_feature_opCount nd
+/AGMCORE_feature_ctm nd
+/AGMCORE_ConvertToProcess false def
+/AGMCORE_Default_CTM matrix def
+/AGMCORE_Default_PageSize nd
+/AGMCORE_Default_flatness nd
+/AGMCORE_currentbg nd
+/AGMCORE_currentucr nd
+/AGMCORE_pattern_paint_type 0 def
+/knockout_unitsq nd
+currentglobal true setglobal
+[/CSA/Gradient/Procedure]
+{
+	/Generic/Category findresource dup length dict copy/Category defineresource pop
+}forall
+setglobal
+/AGMCORE_key_known
+{
+	where{
+		/Adobe_AGM_Core_Id known
+	}{
+		false
+	}ifelse
+}ndf
+/flushinput
+{
+	save
+	2 dict begin
+	/CompareBuffer 3 -1 roll def
+	/readbuffer 256 string def
+	mark
+	{
+	currentfile readbuffer{readline}stopped
+		{cleartomark mark}
+		{
+		not
+			{pop exit}
+		if
+		CompareBuffer eq
+			{exit}
+		if
+		}ifelse
+	}loop
+	cleartomark
+	end
+	restore
+}bdf
+/getspotfunction
+{
+	AGMCORE_screen exch pop exch pop
+	dup type/dicttype eq{
+		dup/HalftoneType get 1 eq{
+			/SpotFunction get
+		}{
+			dup/HalftoneType get 2 eq{
+				/GraySpotFunction get
+			}{
+				pop
+				{
+					abs exch abs 2 copy add 1 gt{
+						1 sub dup mul exch 1 sub dup mul add 1 sub
+					}{
+						dup mul exch dup mul add 1 exch sub
+					}ifelse
+				}bind
+			}ifelse
+		}ifelse
+	}if
+}def
+/np
+{newpath}bdf
+/clp_npth
+{clip np}def
+/eoclp_npth
+{eoclip np}def
+/npth_clp
+{np clip}def
+/graphic_setup
+{
+	/AGMCORE_graphicsave save store
+	concat
+	0 setgray
+	0 setlinecap
+	0 setlinejoin
+	1 setlinewidth
+	[]0 setdash
+	10 setmiterlimit
+	np
+	false setoverprint
+	false setstrokeadjust
+	//Adobe_AGM_Core/spot_alias gx
+	/Adobe_AGM_Image where{
+		pop
+		Adobe_AGM_Image/spot_alias 2 copy known{
+			gx
+		}{
+			pop pop
+		}ifelse
+	}if
+	/sep_colorspace_dict null AGMCORE_gput
+	100 dict begin
+	/dictstackcount countdictstack def
+	/showpage{}def
+	mark
+}def
+/graphic_cleanup
+{
+	cleartomark
+	dictstackcount 1 countdictstack 1 sub{end}for
+	end
+	AGMCORE_graphicsave restore
+}def
+/compose_error_msg
+{
+	grestoreall initgraphics	
+	/Helvetica findfont 10 scalefont setfont
+	/AGMCORE_deltaY 100 def
+	/AGMCORE_deltaX 310 def
+	clippath pathbbox np pop pop 36 add exch 36 add exch moveto
+	0 AGMCORE_deltaY rlineto AGMCORE_deltaX 0 rlineto
+	0 AGMCORE_deltaY neg rlineto AGMCORE_deltaX neg 0 rlineto closepath
+	0 AGMCORE_&setgray
+	gsave 1 AGMCORE_&setgray fill grestore 
+	1 setlinewidth gsave stroke grestore
+	currentpoint AGMCORE_deltaY 15 sub add exch 8 add exch moveto
+	/AGMCORE_deltaY 12 def
+	/AGMCORE_tmp 0 def
+	AGMCORE_err_strings exch get
+		{
+		dup 32 eq
+			{
+			pop
+			AGMCORE_str256 0 AGMCORE_tmp getinterval
+			stringwidth pop currentpoint pop add AGMCORE_deltaX 28 add gt
+				{
+				currentpoint AGMCORE_deltaY sub exch pop
+				clippath pathbbox pop pop pop 44 add exch moveto
+				}if
+			AGMCORE_str256 0 AGMCORE_tmp getinterval show( )show
+			0 1 AGMCORE_str256 length 1 sub
+				{
+				AGMCORE_str256 exch 0 put
+				}for
+			/AGMCORE_tmp 0 def
+			}{
+				AGMCORE_str256 exch AGMCORE_tmp xpt
+				/AGMCORE_tmp AGMCORE_tmp 1 add def
+			}ifelse
+		}forall
+}bdf
+/AGMCORE_CMYKDeviceNColorspaces[
+	[/Separation/None/DeviceCMYK{0 0 0}]
+	[/Separation(Black)/DeviceCMYK{0 0 0 4 -1 roll}bind]
+	[/Separation(Yellow)/DeviceCMYK{0 0 3 -1 roll 0}bind]
+	[/DeviceN[(Yellow)(Black)]/DeviceCMYK{0 0 4 2 roll}bind]
+	[/Separation(Magenta)/DeviceCMYK{0 exch 0 0}bind]
+	[/DeviceN[(Magenta)(Black)]/DeviceCMYK{0 3 1 roll 0 exch}bind]
+	[/DeviceN[(Magenta)(Yellow)]/DeviceCMYK{0 3 1 roll 0}bind]
+	[/DeviceN[(Magenta)(Yellow)(Black)]/DeviceCMYK{0 4 1 roll}bind]
+	[/Separation(Cyan)/DeviceCMYK{0 0 0}]
+	[/DeviceN[(Cyan)(Black)]/DeviceCMYK{0 0 3 -1 roll}bind]
+	[/DeviceN[(Cyan)(Yellow)]/DeviceCMYK{0 exch 0}bind]
+	[/DeviceN[(Cyan)(Yellow)(Black)]/DeviceCMYK{0 3 1 roll}bind]
+	[/DeviceN[(Cyan)(Magenta)]/DeviceCMYK{0 0}]
+	[/DeviceN[(Cyan)(Magenta)(Black)]/DeviceCMYK{0 exch}bind]
+	[/DeviceN[(Cyan)(Magenta)(Yellow)]/DeviceCMYK{0}]
+	[/DeviceCMYK]
+]def
+/ds{
+	Adobe_AGM_Core begin
+	/currentdistillerparams where
+		{
+		pop currentdistillerparams/CoreDistVersion get 5000 lt
+			{<</DetectBlends false>>setdistillerparams}if
+		}if	
+	/AGMCORE_ps_version xdf
+	/AGMCORE_ps_level xdf
+	errordict/AGM_handleerror known not{
+		errordict/AGM_handleerror errordict/handleerror get put
+		errordict/handleerror{
+			Adobe_AGM_Core begin
+			$error/newerror get AGMCORE_cur_err null ne and{
+				$error/newerror false put
+				AGMCORE_cur_err compose_error_msg
+			}if
+			$error/newerror true put
+			end
+			errordict/AGM_handleerror get exec
+			}bind put
+		}if
+	/AGMCORE_environ_ok 
+		ps_level AGMCORE_ps_level ge
+		ps_version AGMCORE_ps_version ge and 
+		AGMCORE_ps_level -1 eq or
+	def
+	AGMCORE_environ_ok not
+		{/AGMCORE_cur_err/AGMCORE_bad_environ def}if
+	/AGMCORE_&setgray systemdict/setgray get def
+	level2{
+		/AGMCORE_&setcolor systemdict/setcolor get def
+		/AGMCORE_&setcolorspace systemdict/setcolorspace get def
+	}if
+	/AGMCORE_currentbg currentblackgeneration def
+	/AGMCORE_currentucr currentundercolorremoval def
+	/AGMCORE_Default_flatness currentflat def
+	/AGMCORE_distilling
+		/product where{
+			pop systemdict/setdistillerparams known product(Adobe PostScript Parser)ne and
+		}{
+			false
+		}ifelse
+	def
+	/AGMCORE_GSTATE AGMCORE_key_known not{
+		/AGMCORE_GSTATE 21 dict def
+		/AGMCORE_tmpmatrix matrix def
+		/AGMCORE_gstack 32 array def
+		/AGMCORE_gstackptr 0 def
+		/AGMCORE_gstacksaveptr 0 def
+		/AGMCORE_gstackframekeys 14 def
+		/AGMCORE_&gsave/gsave ldf
+		/AGMCORE_&grestore/grestore ldf
+		/AGMCORE_&grestoreall/grestoreall ldf
+		/AGMCORE_&save/save ldf
+		/AGMCORE_&setoverprint/setoverprint ldf
+		/AGMCORE_gdictcopy{
+			begin
+			{def}forall
+			end
+		}def
+		/AGMCORE_gput{
+			AGMCORE_gstack AGMCORE_gstackptr get
+			3 1 roll
+			put
+		}def
+		/AGMCORE_gget{
+			AGMCORE_gstack AGMCORE_gstackptr get
+			exch
+			get
+		}def
+		/gsave{
+			AGMCORE_&gsave
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gstackptr 1 add
+			dup 32 ge{limitcheck}if
+			/AGMCORE_gstackptr exch store
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gdictcopy
+		}def
+		/grestore{
+			AGMCORE_&grestore
+			AGMCORE_gstackptr 1 sub
+			dup AGMCORE_gstacksaveptr lt{1 add}if
+			dup AGMCORE_gstack exch get dup/AGMCORE_currentoverprint known
+				{/AGMCORE_currentoverprint get setoverprint}{pop}ifelse
+			/AGMCORE_gstackptr exch store
+		}def
+		/grestoreall{
+			AGMCORE_&grestoreall
+			/AGMCORE_gstackptr AGMCORE_gstacksaveptr store 
+		}def
+		/save{
+			AGMCORE_&save
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gstackptr 1 add
+			dup 32 ge{limitcheck}if
+			/AGMCORE_gstackptr exch store
+			/AGMCORE_gstacksaveptr AGMCORE_gstackptr store
+			AGMCORE_gstack AGMCORE_gstackptr get
+			AGMCORE_gdictcopy
+		}def
+		/setoverprint{
+			dup/AGMCORE_currentoverprint exch AGMCORE_gput AGMCORE_&setoverprint
+		}def	
+		0 1 AGMCORE_gstack length 1 sub{
+				AGMCORE_gstack exch AGMCORE_gstackframekeys dict put
+		}for
+	}if
+	level3/AGMCORE_&sysshfill AGMCORE_key_known not and
+	{
+		/AGMCORE_&sysshfill systemdict/shfill get def
+		/AGMCORE_&sysmakepattern systemdict/makepattern get def
+		/AGMCORE_&usrmakepattern/makepattern load def
+	}if
+	/currentcmykcolor[0 0 0 0]AGMCORE_gput
+	/currentstrokeadjust false AGMCORE_gput
+	/currentcolorspace[/DeviceGray]AGMCORE_gput
+	/sep_tint 0 AGMCORE_gput
+	/devicen_tints[0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0]AGMCORE_gput
+	/sep_colorspace_dict null AGMCORE_gput
+	/devicen_colorspace_dict null AGMCORE_gput
+	/indexed_colorspace_dict null AGMCORE_gput
+	/currentcolor_intent()AGMCORE_gput
+	/customcolor_tint 1 AGMCORE_gput
+	/absolute_colorimetric_crd null AGMCORE_gput
+	/relative_colorimetric_crd null AGMCORE_gput
+	/saturation_crd null AGMCORE_gput
+	/perceptual_crd null AGMCORE_gput
+	currentcolortransfer cvlit/AGMCore_gray_xfer xdf cvlit/AGMCore_b_xfer xdf
+		 cvlit/AGMCore_g_xfer xdf cvlit/AGMCore_r_xfer xdf
+	<<
+	/MaxPatternItem currentsystemparams/MaxPatternCache get
+	>>
+	setuserparams
+	end
+}def
+/ps
+{
+	/setcmykcolor where{
+		pop
+		Adobe_AGM_Core/AGMCORE_&setcmykcolor/setcmykcolor load put
+	}if
+	Adobe_AGM_Core begin
+	/setcmykcolor
+	{
+		4 copy AGMCORE_cmykbuf astore/currentcmykcolor exch AGMCORE_gput
+		1 sub 4 1 roll
+		3{
+			3 index add neg dup 0 lt{
+				pop 0
+			}if
+			3 1 roll
+		}repeat
+		setrgbcolor pop
+	}ndf
+	/currentcmykcolor
+	{
+		/currentcmykcolor AGMCORE_gget aload pop
+	}ndf
+	/setoverprint
+	{pop}ndf
+	/currentoverprint
+	{false}ndf
+	/AGMCORE_cyan_plate 1 0 0 0 test_cmyk_color_plate def
+	/AGMCORE_magenta_plate 0 1 0 0 test_cmyk_color_plate def
+	/AGMCORE_yellow_plate 0 0 1 0 test_cmyk_color_plate def
+	/AGMCORE_black_plate 0 0 0 1 test_cmyk_color_plate def
+	/AGMCORE_plate_ndx 
+		AGMCORE_cyan_plate{
+			0
+		}{
+			AGMCORE_magenta_plate{
+				1
+			}{
+				AGMCORE_yellow_plate{
+					2
+				}{
+					AGMCORE_black_plate{
+						3
+					}{
+						4
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+		def
+	/AGMCORE_have_reported_unsupported_color_space false def
+	/AGMCORE_report_unsupported_color_space
+	{
+		AGMCORE_have_reported_unsupported_color_space false eq
+		{
+			(Warning: Job contains content that cannot be separated with on-host methods. This content appears on the black plate, and knocks out all other plates.)==
+			Adobe_AGM_Core/AGMCORE_have_reported_unsupported_color_space true ddf
+		}if
+	}def
+	/AGMCORE_composite_job
+		AGMCORE_cyan_plate AGMCORE_magenta_plate and AGMCORE_yellow_plate and AGMCORE_black_plate and def
+	/AGMCORE_in_rip_sep
+		/AGMCORE_in_rip_sep where{
+			pop AGMCORE_in_rip_sep
+		}{
+			AGMCORE_distilling 
+			{
+				false
+			}{
+				userdict/Adobe_AGM_OnHost_Seps known{
+					false
+				}{
+					level2{
+						currentpagedevice/Separations 2 copy known{
+							get
+						}{
+							pop pop false
+						}ifelse
+					}{
+						false
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+	def
+	/AGMCORE_producing_seps AGMCORE_composite_job not AGMCORE_in_rip_sep or def
+	/AGMCORE_host_sep AGMCORE_producing_seps AGMCORE_in_rip_sep not and def
+	/AGM_preserve_spots 
+		/AGM_preserve_spots where{
+			pop AGM_preserve_spots
+		}{
+			AGMCORE_distilling AGMCORE_producing_seps or
+		}ifelse
+	def
+	/AGM_is_distiller_preserving_spotimages
+	{
+		currentdistillerparams/PreserveOverprintSettings known
+		{
+			currentdistillerparams/PreserveOverprintSettings get
+				{
+					currentdistillerparams/ColorConversionStrategy known
+					{
+						currentdistillerparams/ColorConversionStrategy get
+						/sRGB ne
+					}{
+						true
+					}ifelse
+				}{
+					false
+				}ifelse
+		}{
+			false
+		}ifelse
+	}def
+	/convert_spot_to_process where{pop}{
+		/convert_spot_to_process
+		{
+			//Adobe_AGM_Core begin
+			dup map_alias{
+				/Name get exch pop
+			}if
+			dup dup(None)eq exch(All)eq or
+				{
+				pop false
+				}{
+				AGMCORE_host_sep
+				{
+					gsave
+					1 0 0 0 setcmykcolor currentgray 1 exch sub
+					0 1 0 0 setcmykcolor currentgray 1 exch sub
+					0 0 1 0 setcmykcolor currentgray 1 exch sub
+					0 0 0 1 setcmykcolor currentgray 1 exch sub
+					add add add 0 eq
+					{
+						pop false
+					}{
+						false setoverprint
+						current_spot_alias false set_spot_alias
+						1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+						set_spot_alias
+						currentgray 1 ne
+					}ifelse
+					grestore
+				}{
+					AGMCORE_distilling
+					{
+						pop AGM_is_distiller_preserving_spotimages not
+					}{
+						//Adobe_AGM_Core/AGMCORE_name xddf
+						false
+						//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 0 eq
+						AGMUTIL_cpd/OverrideSeparations known and
+						{
+							AGMUTIL_cpd/OverrideSeparations get
+							{
+								/HqnSpots/ProcSet resourcestatus
+								{
+									pop pop pop true
+								}if
+							}if
+						}if					
+						{
+							AGMCORE_name/HqnSpots/ProcSet findresource/TestSpot gx not
+						}{
+							gsave
+							[/Separation AGMCORE_name/DeviceGray{}]AGMCORE_&setcolorspace
+							false
+							AGMUTIL_cpd/SeparationColorNames 2 copy known
+							{
+								get
+								{AGMCORE_name eq or}forall
+								not
+							}{
+								pop pop pop true
+							}ifelse
+							grestore
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+			end
+		}def
+	}ifelse
+	/convert_to_process where{pop}{
+		/convert_to_process
+		{
+			dup length 0 eq
+				{
+				pop false
+				}{
+				AGMCORE_host_sep
+				{
+				dup true exch
+					{
+					dup(Cyan)eq exch
+					dup(Magenta)eq 3 -1 roll or exch
+					dup(Yellow)eq 3 -1 roll or exch
+					dup(Black)eq 3 -1 roll or
+						{pop}
+						{convert_spot_to_process and}ifelse
+					}
+				forall
+					{
+					true exch
+						{
+						dup(Cyan)eq exch
+						dup(Magenta)eq 3 -1 roll or exch
+						dup(Yellow)eq 3 -1 roll or exch
+						(Black)eq or and
+						}forall
+						not
+					}{pop false}ifelse
+				}{
+				false exch
+					{
+					/PhotoshopDuotoneList where{pop false}{true}ifelse
+						{
+						dup(Cyan)eq exch
+						dup(Magenta)eq 3 -1 roll or exch
+						dup(Yellow)eq 3 -1 roll or exch
+						dup(Black)eq 3 -1 roll or
+						{pop}
+						{convert_spot_to_process or}ifelse
+						}
+						{
+						convert_spot_to_process or
+						}
+					ifelse
+					}
+				forall
+				}ifelse
+			}ifelse
+		}def
+	}ifelse	
+	/AGMCORE_avoid_L2_sep_space 
+		version cvr 2012 lt 
+		level2 and 
+		AGMCORE_producing_seps not and
+	def
+	/AGMCORE_is_cmyk_sep
+		AGMCORE_cyan_plate AGMCORE_magenta_plate or AGMCORE_yellow_plate or AGMCORE_black_plate or
+	def
+	/AGM_avoid_0_cmyk where{
+		pop AGM_avoid_0_cmyk
+	}{
+		AGM_preserve_spots 
+		userdict/Adobe_AGM_OnHost_Seps known 
+		userdict/Adobe_AGM_InRip_Seps known or
+		not and
+	}ifelse
+	{
+		/setcmykcolor[
+			{
+				4 copy add add add 0 eq currentoverprint and{
+					pop 0.0005
+				}if
+			}/exec cvx
+			/AGMCORE_&setcmykcolor load dup type/operatortype ne{
+				/exec cvx
+			}if
+		]cvx def
+	}if
+	/AGMCORE_IsSeparationAProcessColor
+		{
+		dup(Cyan)eq exch dup(Magenta)eq exch dup(Yellow)eq exch(Black)eq or or or
+		}def
+	AGMCORE_host_sep{
+		/setcolortransfer
+		{
+			AGMCORE_cyan_plate{
+				pop pop pop
+			}{
+			 	AGMCORE_magenta_plate{
+			 		4 3 roll pop pop pop
+			 	}{
+			 		AGMCORE_yellow_plate{
+			 			4 2 roll pop pop pop
+			 		}{
+			 			4 1 roll pop pop pop
+			 		}ifelse
+			 	}ifelse
+			}ifelse
+			settransfer 
+		}	
+		def
+		/AGMCORE_get_ink_data
+			AGMCORE_cyan_plate{
+				{pop pop pop}
+			}{
+			 	AGMCORE_magenta_plate{
+			 		{4 3 roll pop pop pop}
+			 	}{
+			 		AGMCORE_yellow_plate{
+			 			{4 2 roll pop pop pop}
+			 		}{
+			 			{4 1 roll pop pop pop}
+			 		}ifelse
+			 	}ifelse
+			}ifelse
+		def
+		/AGMCORE_RemoveProcessColorNames
+			{
+			1 dict begin
+			/filtername
+				{
+				dup/Cyan eq 1 index(Cyan)eq or
+					{pop(_cyan_)}if
+				dup/Magenta eq 1 index(Magenta)eq or
+					{pop(_magenta_)}if
+				dup/Yellow eq 1 index(Yellow)eq or
+					{pop(_yellow_)}if
+				dup/Black eq 1 index(Black)eq or
+					{pop(_black_)}if
+				}def
+			dup type/arraytype eq
+				{[exch{filtername}forall]}
+				{filtername}ifelse
+			end
+			}def
+		level3{
+			/AGMCORE_IsCurrentColor
+				{
+				dup AGMCORE_IsSeparationAProcessColor
+					{
+					AGMCORE_plate_ndx 0 eq
+						{dup(Cyan)eq exch/Cyan eq or}if
+					AGMCORE_plate_ndx 1 eq
+						{dup(Magenta)eq exch/Magenta eq or}if
+					AGMCORE_plate_ndx 2 eq
+						{dup(Yellow)eq exch/Yellow eq or}if
+					AGMCORE_plate_ndx 3 eq
+						{dup(Black)eq exch/Black eq or}if
+					AGMCORE_plate_ndx 4 eq
+						{pop false}if
+					}{
+					gsave
+					false setoverprint
+					current_spot_alias false set_spot_alias
+					1 1 1 1 6 -1 roll findcmykcustomcolor 1 setcustomcolor
+					set_spot_alias
+					currentgray 1 ne
+					grestore
+					}ifelse
+				}def
+			/AGMCORE_filter_functiondatasource
+				{	
+				5 dict begin
+				/data_in xdf
+				data_in type/stringtype eq
+					{
+					/ncomp xdf
+					/comp xdf
+					/string_out data_in length ncomp idiv string def
+					0 ncomp data_in length 1 sub
+						{
+						string_out exch dup ncomp idiv exch data_in exch ncomp getinterval comp get 255 exch sub put
+						}for
+					string_out
+					}{
+					string/string_in xdf
+					/string_out 1 string def
+					/component xdf
+					[
+					data_in string_in/readstring cvx
+						[component/get cvx 255/exch cvx/sub cvx string_out/exch cvx 0/exch cvx/put cvx string_out]cvx
+						[/pop cvx()]cvx/ifelse cvx
+					]cvx/ReusableStreamDecode filter
+				}ifelse
+				end
+				}def
+			/AGMCORE_separateShadingFunction
+				{
+				2 dict begin
+				/paint? xdf
+				/channel xdf
+				dup type/dicttype eq
+					{
+					begin
+					FunctionType 0 eq
+						{
+						/DataSource channel Range length 2 idiv DataSource AGMCORE_filter_functiondatasource def
+						currentdict/Decode known
+							{/Decode Decode channel 2 mul 2 getinterval def}if
+						paint? not
+							{/Decode[1 1]def}if
+						}if
+					FunctionType 2 eq
+						{
+						paint?
+							{
+							/C0[C0 channel get 1 exch sub]def
+							/C1[C1 channel get 1 exch sub]def
+							}{
+							/C0[1]def
+							/C1[1]def
+							}ifelse			
+						}if
+					FunctionType 3 eq
+						{
+						/Functions[Functions{channel paint? AGMCORE_separateShadingFunction}forall]def			
+						}if
+					currentdict/Range known
+						{/Range[0 1]def}if
+					currentdict
+					end}{
+					channel get 0 paint? AGMCORE_separateShadingFunction
+					}ifelse
+				end
+				}def
+			/AGMCORE_separateShading
+				{
+				3 -1 roll begin
+				currentdict/Function known
+					{
+					currentdict/Background known
+						{[1 index{Background 3 index get 1 exch sub}{1}ifelse]/Background xdf}if
+					Function 3 1 roll AGMCORE_separateShadingFunction/Function xdf
+					/ColorSpace[/DeviceGray]def
+					}{
+					ColorSpace dup type/arraytype eq{0 get}if/DeviceCMYK eq
+						{
+						/ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+						}{
+						ColorSpace dup 1 get AGMCORE_RemoveProcessColorNames 1 exch put
+						}ifelse
+					ColorSpace 0 get/Separation eq
+						{
+							{
+								[1/exch cvx/sub cvx]cvx
+							}{
+								[/pop cvx 1]cvx
+							}ifelse
+							ColorSpace 3 3 -1 roll put
+							pop
+						}{
+							{
+								[exch ColorSpace 1 get length 1 sub exch sub/index cvx 1/exch cvx/sub cvx ColorSpace 1 get length 1 add 1/roll cvx ColorSpace 1 get length{/pop cvx}repeat]cvx
+							}{
+								pop[ColorSpace 1 get length{/pop cvx}repeat cvx 1]cvx
+							}ifelse
+							ColorSpace 3 3 -1 roll bind put
+						}ifelse
+					ColorSpace 2/DeviceGray put																		
+					}ifelse
+				end
+				}def
+			/AGMCORE_separateShadingDict
+				{
+				dup/ColorSpace get
+				dup type/arraytype ne
+					{[exch]}if
+				dup 0 get/DeviceCMYK eq
+					{
+					exch begin 
+					currentdict
+					AGMCORE_cyan_plate
+						{0 true}if
+					AGMCORE_magenta_plate
+						{1 true}if
+					AGMCORE_yellow_plate
+						{2 true}if
+					AGMCORE_black_plate
+						{3 true}if
+					AGMCORE_plate_ndx 4 eq
+						{0 false}if		
+					dup not currentoverprint and
+						{/AGMCORE_ignoreshade true def}if
+					AGMCORE_separateShading
+					currentdict
+					end exch
+					}if
+				dup 0 get/Separation eq
+					{
+					exch begin
+					ColorSpace 1 get dup/None ne exch/All ne and
+						{
+						ColorSpace 1 get AGMCORE_IsCurrentColor AGMCORE_plate_ndx 4 lt and ColorSpace 1 get AGMCORE_IsSeparationAProcessColor not and
+							{
+							ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq 
+								{
+								/ColorSpace
+									[
+									/Separation
+									ColorSpace 1 get
+									/DeviceGray
+										[
+										ColorSpace 3 get/exec cvx
+										4 AGMCORE_plate_ndx sub -1/roll cvx
+										4 1/roll cvx
+										3[/pop cvx]cvx/repeat cvx
+										1/exch cvx/sub cvx
+										]cvx									
+									]def
+								}{
+								AGMCORE_report_unsupported_color_space
+								AGMCORE_black_plate not
+									{
+									currentdict 0 false AGMCORE_separateShading
+									}if
+								}ifelse
+							}{
+							currentdict ColorSpace 1 get AGMCORE_IsCurrentColor
+							0 exch 
+							dup not currentoverprint and
+								{/AGMCORE_ignoreshade true def}if
+							AGMCORE_separateShading
+							}ifelse	
+						}if			
+					currentdict
+					end exch
+					}if
+				dup 0 get/DeviceN eq
+					{
+					exch begin
+					ColorSpace 1 get convert_to_process
+						{
+						ColorSpace 2 get dup type/arraytype eq{0 get}if/DeviceCMYK eq 
+							{
+							/ColorSpace
+								[
+								/DeviceN
+								ColorSpace 1 get
+								/DeviceGray
+									[
+									ColorSpace 3 get/exec cvx
+									4 AGMCORE_plate_ndx sub -1/roll cvx
+									4 1/roll cvx
+									3[/pop cvx]cvx/repeat cvx
+									1/exch cvx/sub cvx
+									]cvx									
+								]def
+							}{
+							AGMCORE_report_unsupported_color_space
+							AGMCORE_black_plate not
+								{
+								currentdict 0 false AGMCORE_separateShading
+								/ColorSpace[/DeviceGray]def
+								}if
+							}ifelse
+						}{
+						currentdict
+						false -1 ColorSpace 1 get
+							{
+							AGMCORE_IsCurrentColor
+								{
+								1 add
+								exch pop true exch exit
+								}if
+							1 add
+							}forall
+						exch 
+						dup not currentoverprint and
+							{/AGMCORE_ignoreshade true def}if
+						AGMCORE_separateShading
+						}ifelse
+					currentdict
+					end exch
+					}if
+				dup 0 get dup/DeviceCMYK eq exch dup/Separation eq exch/DeviceN eq or or not
+					{
+					exch begin
+					ColorSpace dup type/arraytype eq
+						{0 get}if
+					/DeviceGray ne
+						{
+						AGMCORE_report_unsupported_color_space
+						AGMCORE_black_plate not
+							{
+							ColorSpace 0 get/CIEBasedA eq
+								{
+								/ColorSpace[/Separation/_ciebaseda_/DeviceGray{}]def
+								}if
+							ColorSpace 0 get dup/CIEBasedABC eq exch dup/CIEBasedDEF eq exch/DeviceRGB eq or or
+								{
+								/ColorSpace[/DeviceN[/_red_/_green_/_blue_]/DeviceRGB{}]def
+								}if
+							ColorSpace 0 get/CIEBasedDEFG eq
+								{
+								/ColorSpace[/DeviceN[/_cyan_/_magenta_/_yellow_/_black_]/DeviceCMYK{}]def
+								}if
+							currentdict 0 false AGMCORE_separateShading
+							}if
+						}if
+					currentdict
+					end exch
+					}if
+				pop
+				dup/AGMCORE_ignoreshade known
+					{
+					begin
+					/ColorSpace[/Separation(None)/DeviceGray{}]def
+					currentdict end
+					}if
+				}def
+			/shfill
+				{
+				AGMCORE_separateShadingDict 
+				dup/AGMCORE_ignoreshade known
+					{pop}
+					{AGMCORE_&sysshfill}ifelse
+				}def
+			/makepattern
+				{
+				exch
+				dup/PatternType get 2 eq
+					{
+					clonedict
+					begin
+					/Shading Shading AGMCORE_separateShadingDict def
+					Shading/AGMCORE_ignoreshade known
+					currentdict end exch
+					{pop<</PatternType 1/PaintProc{pop}/BBox[0 0 1 1]/XStep 1/YStep 1/PaintType 1/TilingType 3>>}if
+					exch AGMCORE_&sysmakepattern
+					}{
+					exch AGMCORE_&usrmakepattern
+					}ifelse
+				}def
+		}if
+	}if
+	AGMCORE_in_rip_sep{
+		/setcustomcolor
+		{
+			exch aload pop
+			dup 7 1 roll inRip_spot_has_ink not	{
+				4{4 index mul 4 1 roll}
+				repeat
+				/DeviceCMYK setcolorspace
+				6 -2 roll pop pop
+			}{
+				//Adobe_AGM_Core begin
+					/AGMCORE_k xdf/AGMCORE_y xdf/AGMCORE_m xdf/AGMCORE_c xdf
+				end
+				[/Separation 4 -1 roll/DeviceCMYK
+				{dup AGMCORE_c mul exch dup AGMCORE_m mul exch dup AGMCORE_y mul exch AGMCORE_k mul}
+				]
+				setcolorspace
+			}ifelse
+			setcolor
+		}ndf
+		/setseparationgray
+		{
+			[/Separation(All)/DeviceGray{}]setcolorspace_opt
+			1 exch sub setcolor
+		}ndf
+	}{
+		/setseparationgray
+		{
+			AGMCORE_&setgray
+		}ndf
+	}ifelse
+	/findcmykcustomcolor
+	{
+		5 makereadonlyarray
+	}ndf
+	/setcustomcolor
+	{
+		exch aload pop pop
+		4{4 index mul 4 1 roll}repeat
+		setcmykcolor pop
+	}ndf
+	/has_color
+		/colorimage where{
+			AGMCORE_producing_seps{
+				pop true
+			}{
+				systemdict eq
+			}ifelse
+		}{
+			false
+		}ifelse
+	def
+	/map_index
+	{
+		1 index mul exch getinterval{255 div}forall
+	}bdf
+	/map_indexed_devn
+	{
+		Lookup Names length 3 -1 roll cvi map_index
+	}bdf
+	/n_color_components
+	{
+		base_colorspace_type
+		dup/DeviceGray eq{
+			pop 1
+		}{
+			/DeviceCMYK eq{
+				4
+			}{
+				3
+			}ifelse
+		}ifelse
+	}bdf
+	level2{
+		/mo/moveto ldf
+		/li/lineto ldf
+		/cv/curveto ldf
+		/knockout_unitsq
+		{
+			1 setgray
+			0 0 1 1 rectfill
+		}def
+		level2/setcolorspace AGMCORE_key_known not and{
+			/AGMCORE_&&&setcolorspace/setcolorspace ldf
+			/AGMCORE_ReplaceMappedColor
+			{
+				dup type dup/arraytype eq exch/packedarraytype eq or
+				{
+					/AGMCORE_SpotAliasAry2 where{
+						begin
+						dup 0 get dup/Separation eq
+						{
+							pop
+							dup length array copy
+							dup dup 1 get
+							current_spot_alias
+							{
+								dup map_alias
+								{
+									false set_spot_alias
+									dup 1 exch setsepcolorspace
+									true set_spot_alias
+									begin
+									/sep_colorspace_dict currentdict AGMCORE_gput
+									pop pop	pop
+									[
+										/Separation Name 
+										CSA map_csa
+										MappedCSA 
+										/sep_colorspace_proc load
+									]
+									dup Name
+									end
+								}if
+							}if
+							map_reserved_ink_name 1 xpt
+						}{
+							/DeviceN eq 
+							{
+								dup length array copy
+								dup dup 1 get[
+									exch{
+										current_spot_alias{
+											dup map_alias{
+												/Name get exch pop
+											}if
+										}if
+										map_reserved_ink_name
+									}forall 
+								]1 xpt
+							}if
+						}ifelse
+						end
+					}if
+				}if
+			}def
+			/setcolorspace
+			{
+				dup type dup/arraytype eq exch/packedarraytype eq or
+				{
+					dup 0 get/Indexed eq
+					{
+						AGMCORE_distilling
+						{
+							/PhotoshopDuotoneList where
+							{
+								pop false
+							}{
+								true
+							}ifelse
+						}{
+							true
+						}ifelse
+						{
+							aload pop 3 -1 roll
+							AGMCORE_ReplaceMappedColor
+							3 1 roll 4 array astore
+						}if
+					}{
+						AGMCORE_ReplaceMappedColor
+					}ifelse
+				}if
+				DeviceN_PS2_inRip_seps{AGMCORE_&&&setcolorspace}if
+			}def
+		}if	
+	}{
+		/adj
+		{
+			currentstrokeadjust{
+				transform
+				0.25 sub round 0.25 add exch
+				0.25 sub round 0.25 add exch
+				itransform
+			}if
+		}def
+		/mo{
+			adj moveto
+		}def
+		/li{
+			adj lineto
+		}def
+		/cv{
+			6 2 roll adj
+			6 2 roll adj
+			6 2 roll adj curveto
+		}def
+		/knockout_unitsq
+		{
+			1 setgray
+			8 8 1[8 0 0 8 0 0]{<ffffffffffffffff>}image
+		}def
+		/currentstrokeadjust{
+			/currentstrokeadjust AGMCORE_gget
+		}def
+		/setstrokeadjust{
+			/currentstrokeadjust exch AGMCORE_gput
+		}def
+		/setcolorspace
+		{
+			/currentcolorspace exch AGMCORE_gput
+		}def
+		/currentcolorspace
+		{
+			/currentcolorspace AGMCORE_gget
+		}def
+		/setcolor_devicecolor
+		{
+			base_colorspace_type
+			dup/DeviceGray eq{
+				pop setgray
+			}{
+				/DeviceCMYK eq{
+					setcmykcolor
+				}{
+					setrgbcolor
+				}ifelse
+			}ifelse
+		}def
+		/setcolor
+		{
+			currentcolorspace 0 get
+			dup/DeviceGray ne{
+				dup/DeviceCMYK ne{
+					dup/DeviceRGB ne{
+						dup/Separation eq{
+							pop
+							currentcolorspace 3 gx
+							currentcolorspace 2 get
+						}{
+							dup/Indexed eq{
+								pop
+								currentcolorspace 3 get dup type/stringtype eq{
+									currentcolorspace 1 get n_color_components
+									3 -1 roll map_index
+								}{
+									exec
+								}ifelse
+								currentcolorspace 1 get
+							}{
+								/AGMCORE_cur_err/AGMCORE_invalid_color_space def
+								AGMCORE_invalid_color_space
+							}ifelse
+						}ifelse
+					}if
+				}if
+			}if
+			setcolor_devicecolor
+		}def
+	}ifelse
+	/sop/setoverprint ldf
+	/lw/setlinewidth ldf
+	/lc/setlinecap ldf
+	/lj/setlinejoin ldf
+	/ml/setmiterlimit ldf
+	/dsh/setdash ldf
+	/sadj/setstrokeadjust ldf
+	/gry/setgray ldf
+	/rgb/setrgbcolor ldf
+	/cmyk[
+		/currentcolorspace[/DeviceCMYK]/AGMCORE_gput cvx
+		/setcmykcolor load dup type/operatortype ne{/exec cvx}if
+	]cvx bdf
+	level3 AGMCORE_host_sep not and{
+		/nzopmsc{
+			6 dict begin
+			/kk exch def
+			/yy exch def
+			/mm exch def
+			/cc exch def
+			/sum 0 def
+			cc 0 ne{/sum sum 2#1000 or def cc}if
+			mm 0 ne{/sum sum 2#0100 or def mm}if
+			yy 0 ne{/sum sum 2#0010 or def yy}if
+			kk 0 ne{/sum sum 2#0001 or def kk}if
+			AGMCORE_CMYKDeviceNColorspaces sum get setcolorspace
+			sum 0 eq{0}if
+			end
+			setcolor
+		}bdf
+	}{
+		/nzopmsc/cmyk ldf
+	}ifelse
+	/sep/setsepcolor ldf
+	/devn/setdevicencolor ldf
+	/idx/setindexedcolor ldf
+	/colr/setcolor ldf
+	/csacrd/set_csa_crd ldf
+	/sepcs/setsepcolorspace ldf
+	/devncs/setdevicencolorspace ldf
+	/idxcs/setindexedcolorspace ldf
+	/cp/closepath ldf
+	/clp/clp_npth ldf
+	/eclp/eoclp_npth ldf
+	/f/fill ldf
+	/ef/eofill ldf
+	/@/stroke ldf
+	/nclp/npth_clp ldf
+	/gset/graphic_setup ldf
+	/gcln/graphic_cleanup ldf
+	/ct/concat ldf
+	/cf/currentfile ldf
+	/fl/filter ldf
+	/rs/readstring ldf
+	/AGMCORE_def_ht currenthalftone def
+	/clonedict Adobe_AGM_Utils begin/clonedict load end def
+	/clonearray Adobe_AGM_Utils begin/clonearray load end def
+	currentdict{
+		dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+			bind
+		}if
+		def
+	}forall
+	/getrampcolor
+	{
+		/indx exch def
+		0 1 NumComp 1 sub
+		{
+			dup
+			Samples exch get
+			dup type/stringtype eq{indx get}if
+			exch
+			Scaling exch get aload pop
+			3 1 roll
+			mul add
+		}for
+		ColorSpaceFamily/Separation eq 
+		{sep}
+		{
+			ColorSpaceFamily/DeviceN eq
+			{devn}{setcolor}ifelse
+		}ifelse
+	}bdf
+	/sssetbackground{
+		aload pop 
+		ColorSpaceFamily/Separation eq 
+		{sep}
+		{
+			ColorSpaceFamily/DeviceN eq
+			{devn}{setcolor}ifelse
+		}ifelse	
+	}bdf
+	/RadialShade
+	{
+		40 dict begin
+		/ColorSpaceFamily xdf
+		/background xdf
+		/ext1 xdf
+		/ext0 xdf
+		/BBox xdf
+		/r2 xdf
+		/c2y xdf
+		/c2x xdf
+		/r1 xdf
+		/c1y xdf
+		/c1x xdf
+		/rampdict xdf
+		/setinkoverprint where{pop/setinkoverprint{pop}def}if
+		gsave
+		BBox length 0 gt
+		{
+			np
+			BBox 0 get BBox 1 get moveto
+			BBox 2 get BBox 0 get sub 0 rlineto
+			0 BBox 3 get BBox 1 get sub rlineto
+			BBox 2 get BBox 0 get sub neg 0 rlineto
+			closepath
+			clip
+			np
+		}if
+		c1x c2x eq
+		{
+			c1y c2y lt{/theta 90 def}{/theta 270 def}ifelse
+		}{
+			/slope c2y c1y sub c2x c1x sub div def
+			/theta slope 1 atan def
+			c2x c1x lt c2y c1y ge and{/theta theta 180 sub def}if
+			c2x c1x lt c2y c1y lt and{/theta theta 180 add def}if
+		}ifelse
+		gsave
+		clippath
+		c1x c1y translate
+		theta rotate
+		-90 rotate
+		{pathbbox}stopped
+		{0 0 0 0}if
+		/yMax xdf
+		/xMax xdf
+		/yMin xdf
+		/xMin xdf
+		grestore
+		xMax xMin eq yMax yMin eq or
+		{
+			grestore
+			end
+		}{
+			/max{2 copy gt{pop}{exch pop}ifelse}bdf
+			/min{2 copy lt{pop}{exch pop}ifelse}bdf
+			rampdict begin
+			40 dict begin
+			background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+			gsave
+			c1x c1y translate
+			theta rotate
+			-90 rotate
+			/c2y c1x c2x sub dup mul c1y c2y sub dup mul add sqrt def
+			/c1y 0 def
+			/c1x 0 def
+			/c2x 0 def
+			ext0
+			{
+				0 getrampcolor
+				c2y r2 add r1 sub 0.0001 lt
+				{
+					c1x c1y r1 360 0 arcn
+					pathbbox
+					/aymax exch def
+					/axmax exch def
+					/aymin exch def
+					/axmin exch def
+					/bxMin xMin axmin min def
+					/byMin yMin aymin min def
+					/bxMax xMax axmax max def
+					/byMax yMax aymax max def
+					bxMin byMin moveto
+					bxMax byMin lineto
+					bxMax byMax lineto
+					bxMin byMax lineto
+					bxMin byMin lineto
+					eofill
+				}{
+					c2y r1 add r2 le
+					{
+						c1x c1y r1 0 360 arc
+						fill
+					}
+					{
+						c2x c2y r2 0 360 arc fill
+						r1 r2 eq
+						{
+							/p1x r1 neg def
+							/p1y c1y def
+							/p2x r1 def
+							/p2y c1y def
+							p1x p1y moveto p2x p2y lineto p2x yMin lineto p1x yMin lineto
+							fill
+						}{
+							/AA r2 r1 sub c2y div def
+							AA -1 eq
+							{/theta 89.99 def}
+							{/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+							ifelse
+							/SS1 90 theta add dup sin exch cos div def
+							/p1x r1 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+							/p1y p1x SS1 div neg def
+							/SS2 90 theta sub dup sin exch cos div def
+							/p2x r1 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+							/p2y p2x SS2 div neg def
+							r1 r2 gt
+							{
+								/L1maxX p1x yMin p1y sub SS1 div add def
+								/L2maxX p2x yMin p2y sub SS2 div add def
+							}{
+								/L1maxX 0 def
+								/L2maxX 0 def
+							}ifelse
+							p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+							L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+							fill
+						}ifelse
+					}ifelse
+				}ifelse
+			}if
+		c1x c2x sub dup mul
+		c1y c2y sub dup mul
+		add 0.5 exp
+		0 dtransform
+		dup mul exch dup mul add 0.5 exp 72 div
+		0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+		72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+		1 index 1 index lt{exch}if pop
+		/hires xdf
+		hires mul
+		/numpix xdf
+		/numsteps NumSamples def
+		/rampIndxInc 1 def
+		/subsampling false def
+		numpix 0 ne
+		{
+			NumSamples numpix div 0.5 gt
+			{
+				/numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+				/rampIndxInc NumSamples 1 sub numsteps div def
+				/subsampling true def
+			}if
+		}if
+		/xInc c2x c1x sub numsteps div def
+		/yInc c2y c1y sub numsteps div def
+		/rInc r2 r1 sub numsteps div def
+		/cx c1x def
+		/cy c1y def
+		/radius r1 def
+		np
+		xInc 0 eq yInc 0 eq rInc 0 eq and and
+		{
+			0 getrampcolor
+			cx cy radius 0 360 arc
+			stroke
+			NumSamples 1 sub getrampcolor
+			cx cy radius 72 hires div add 0 360 arc
+			0 setlinewidth
+			stroke
+		}{
+			0
+			numsteps
+			{
+				dup
+				subsampling{round cvi}if
+				getrampcolor
+				cx cy radius 0 360 arc
+				/cx cx xInc add def
+				/cy cy yInc add def
+				/radius radius rInc add def
+				cx cy radius 360 0 arcn
+				eofill
+				rampIndxInc add
+			}repeat
+			pop
+		}ifelse
+		ext1
+		{
+			c2y r2 add r1 lt
+			{
+				c2x c2y r2 0 360 arc
+				fill
+			}{
+				c2y r1 add r2 sub 0.0001 le
+				{
+					c2x c2y r2 360 0 arcn
+					pathbbox
+					/aymax exch def
+					/axmax exch def
+					/aymin exch def
+					/axmin exch def
+					/bxMin xMin axmin min def
+					/byMin yMin aymin min def
+					/bxMax xMax axmax max def
+					/byMax yMax aymax max def
+					bxMin byMin moveto
+					bxMax byMin lineto
+					bxMax byMax lineto
+					bxMin byMax lineto
+					bxMin byMin lineto
+					eofill
+				}{
+					c2x c2y r2 0 360 arc fill
+					r1 r2 eq
+					{
+						/p1x r2 neg def
+						/p1y c2y def
+						/p2x r2 def
+						/p2y c2y def
+						p1x p1y moveto p2x p2y lineto p2x yMax lineto p1x yMax lineto
+						fill
+					}{
+						/AA r2 r1 sub c2y div def
+						AA -1 eq
+						{/theta 89.99 def}
+						{/theta AA 1 AA dup mul sub sqrt div 1 atan def}
+						ifelse
+						/SS1 90 theta add dup sin exch cos div def
+						/p1x r2 SS1 SS1 mul SS1 SS1 mul 1 add div sqrt mul neg def
+						/p1y c2y p1x SS1 div sub def
+						/SS2 90 theta sub dup sin exch cos div def
+						/p2x r2 SS2 SS2 mul SS2 SS2 mul 1 add div sqrt mul def
+						/p2y c2y p2x SS2 div sub def
+						r1 r2 lt
+						{
+							/L1maxX p1x yMax p1y sub SS1 div add def
+							/L2maxX p2x yMax p2y sub SS2 div add def
+						}{
+							/L1maxX 0 def
+							/L2maxX 0 def
+						}ifelse
+						p1x p1y moveto p2x p2y lineto L2maxX L2maxX p2x sub SS2 mul p2y add lineto
+						L1maxX L1maxX p1x sub SS1 mul p1y add lineto
+						fill
+					}ifelse
+				}ifelse
+			}ifelse
+		}if
+		grestore
+		grestore
+		end
+		end
+		end
+		}ifelse
+	}bdf
+	/GenStrips
+	{
+		40 dict begin
+		/ColorSpaceFamily xdf
+		/background xdf
+		/ext1 xdf
+		/ext0 xdf
+		/BBox xdf
+		/y2 xdf
+		/x2 xdf
+		/y1 xdf
+		/x1 xdf
+		/rampdict xdf
+		/setinkoverprint where{pop/setinkoverprint{pop}def}if
+		gsave
+		BBox length 0 gt
+		{
+			np
+			BBox 0 get BBox 1 get moveto
+			BBox 2 get BBox 0 get sub 0 rlineto
+			0 BBox 3 get BBox 1 get sub rlineto
+			BBox 2 get BBox 0 get sub neg 0 rlineto
+			closepath
+			clip
+			np
+		}if
+		x1 x2 eq
+		{
+			y1 y2 lt{/theta 90 def}{/theta 270 def}ifelse
+		}{
+			/slope y2 y1 sub x2 x1 sub div def
+			/theta slope 1 atan def
+			x2 x1 lt y2 y1 ge and{/theta theta 180 sub def}if
+			x2 x1 lt y2 y1 lt and{/theta theta 180 add def}if
+		}
+		ifelse
+		gsave
+		clippath
+		x1 y1 translate
+		theta rotate
+		{pathbbox}stopped
+		{0 0 0 0}if
+		/yMax exch def
+		/xMax exch def
+		/yMin exch def
+		/xMin exch def
+		grestore
+		xMax xMin eq yMax yMin eq or
+		{
+			grestore
+			end
+		}{
+			rampdict begin
+			20 dict begin
+			background length 0 gt{background sssetbackground gsave clippath fill grestore}if
+			gsave
+			x1 y1 translate
+			theta rotate
+			/xStart 0 def
+			/xEnd x2 x1 sub dup mul y2 y1 sub dup mul add 0.5 exp def
+			/ySpan yMax yMin sub def
+			/numsteps NumSamples def
+			/rampIndxInc 1 def
+			/subsampling false def
+			xStart 0 transform
+			xEnd 0 transform
+			3 -1 roll
+			sub dup mul
+			3 1 roll
+			sub dup mul
+			add 0.5 exp 72 div
+			0 72 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+			72 0 matrix defaultmatrix dtransform dup mul exch dup mul add sqrt
+			1 index 1 index lt{exch}if pop
+			mul
+			/numpix xdf
+			numpix 0 ne
+			{
+				NumSamples numpix div 0.5 gt
+				{
+					/numsteps numpix 2 div round cvi dup 1 le{pop 2}if def
+					/rampIndxInc NumSamples 1 sub numsteps div def
+					/subsampling true def
+				}if
+			}if
+			ext0
+			{
+				0 getrampcolor
+				xMin xStart lt
+				{
+					xMin yMin xMin neg ySpan rectfill
+				}if
+			}if
+			/xInc xEnd xStart sub numsteps div def
+			/x xStart def
+			0
+			numsteps
+			{
+				dup
+				subsampling{round cvi}if
+				getrampcolor
+				x yMin xInc ySpan rectfill
+				/x x xInc add def
+				rampIndxInc add
+			}repeat
+			pop
+			ext1{
+				xMax xEnd gt
+				{
+					xEnd yMin xMax xEnd sub ySpan rectfill
+				}if
+			}if
+			grestore
+			grestore
+			end
+			end
+			end
+		}ifelse
+	}bdf
+}def
+/pt
+{
+	end
+}def
+/dt{
+}def
+/pgsv{
+	//Adobe_AGM_Core/AGMCORE_save save put
+}def
+/pgrs{
+	//Adobe_AGM_Core/AGMCORE_save get restore
+}def
+systemdict/findcolorrendering known{
+	/findcolorrendering systemdict/findcolorrendering get def
+}if
+systemdict/setcolorrendering known{
+	/setcolorrendering systemdict/setcolorrendering get def
+}if
+/test_cmyk_color_plate
+{
+	gsave
+	setcmykcolor currentgray 1 ne
+	grestore
+}def
+/inRip_spot_has_ink
+{
+	dup//Adobe_AGM_Core/AGMCORE_name xddf
+	convert_spot_to_process not
+}def
+/map255_to_range
+{
+	1 index sub
+	3 -1 roll 255 div mul add
+}def
+/set_csa_crd
+{
+	/sep_colorspace_dict null AGMCORE_gput
+	begin
+		CSA get_csa_by_name setcolorspace_opt
+		set_crd
+	end
+}
+def
+/map_csa
+{
+	currentdict/MappedCSA known{MappedCSA null ne}{false}ifelse
+	{pop}{get_csa_by_name/MappedCSA xdf}ifelse
+}def
+/setsepcolor
+{
+	/sep_colorspace_dict AGMCORE_gget begin
+		dup/sep_tint exch AGMCORE_gput
+		TintProc
+	end
+}def
+/setdevicencolor
+{
+	/devicen_colorspace_dict AGMCORE_gget begin
+		Names length copy
+		Names length 1 sub -1 0
+		{
+			/devicen_tints AGMCORE_gget 3 1 roll xpt
+		}for
+		TintProc
+	end
+}def
+/sep_colorspace_proc
+{
+	/AGMCORE_tmp exch store
+	/sep_colorspace_dict AGMCORE_gget begin
+	currentdict/Components known{
+		Components aload pop 
+		TintMethod/Lab eq{
+			2{AGMCORE_tmp mul NComponents 1 roll}repeat
+			LMax sub AGMCORE_tmp mul LMax add NComponents 1 roll
+		}{
+			TintMethod/Subtractive eq{
+				NComponents{
+					AGMCORE_tmp mul NComponents 1 roll
+				}repeat
+			}{
+				NComponents{
+					1 sub AGMCORE_tmp mul 1 add NComponents 1 roll
+				}repeat
+			}ifelse
+		}ifelse
+	}{
+		ColorLookup AGMCORE_tmp ColorLookup length 1 sub mul round cvi get
+		aload pop
+	}ifelse
+	end
+}def
+/sep_colorspace_gray_proc
+{
+	/AGMCORE_tmp exch store
+	/sep_colorspace_dict AGMCORE_gget begin
+	GrayLookup AGMCORE_tmp GrayLookup length 1 sub mul round cvi get
+	end
+}def
+/sep_proc_name
+{
+	dup 0 get 
+	dup/DeviceRGB eq exch/DeviceCMYK eq or level2 not and has_color not and{
+		pop[/DeviceGray]
+		/sep_colorspace_gray_proc
+	}{
+		/sep_colorspace_proc
+	}ifelse
+}def
+/setsepcolorspace
+{
+	current_spot_alias{
+		dup begin
+			Name map_alias{
+				exch pop
+			}if
+		end
+	}if
+	dup/sep_colorspace_dict exch AGMCORE_gput
+	begin
+	CSA map_csa
+	/AGMCORE_sep_special Name dup()eq exch(All)eq or store
+	AGMCORE_avoid_L2_sep_space{
+		[/Indexed MappedCSA sep_proc_name 255 exch 
+			{255 div}/exec cvx 3 -1 roll[4 1 roll load/exec cvx]cvx 
+		]setcolorspace_opt
+		/TintProc{
+			255 mul round cvi setcolor
+		}bdf
+	}{
+		MappedCSA 0 get/DeviceCMYK eq 
+		currentdict/Components known and 
+		AGMCORE_sep_special not and{
+			/TintProc[
+				Components aload pop Name findcmykcustomcolor 
+				/exch cvx/setcustomcolor cvx
+			]cvx bdf
+		}{
+ 			AGMCORE_host_sep Name(All)eq and{
+ 				/TintProc{
+					1 exch sub setseparationgray 
+				}bdf
+ 			}{
+				AGMCORE_in_rip_sep MappedCSA 0 get/DeviceCMYK eq and 
+				AGMCORE_host_sep or
+				Name()eq and{
+					/TintProc[
+						MappedCSA sep_proc_name exch 0 get/DeviceCMYK eq{
+							cvx/setcmykcolor cvx
+						}{
+							cvx/setgray cvx
+						}ifelse
+					]cvx bdf
+				}{
+					AGMCORE_producing_seps MappedCSA 0 get dup/DeviceCMYK eq exch/DeviceGray eq or and AGMCORE_sep_special not and{
+	 					/TintProc[
+							/dup cvx
+							MappedCSA sep_proc_name cvx exch
+							0 get/DeviceGray eq{
+								1/exch cvx/sub cvx 0 0 0 4 -1/roll cvx
+							}if
+							/Name cvx/findcmykcustomcolor cvx/exch cvx
+							AGMCORE_host_sep{
+								AGMCORE_is_cmyk_sep
+								/Name cvx 
+								/AGMCORE_IsSeparationAProcessColor load/exec cvx
+								/not cvx/and cvx 
+							}{
+								Name inRip_spot_has_ink not
+							}ifelse
+							[
+		 						/pop cvx 1
+							]cvx/if cvx
+							/setcustomcolor cvx
+						]cvx bdf
+ 					}{
+						/TintProc{setcolor}bdf
+						[/Separation Name MappedCSA sep_proc_name load]setcolorspace_opt
+					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+	}ifelse
+	set_crd
+	setsepcolor
+	end
+}def
+/additive_blend
+{
+ 	3 dict begin
+ 	/numarrays xdf
+ 	/numcolors xdf
+ 	0 1 numcolors 1 sub
+ 		{
+ 		/c1 xdf
+ 		1
+ 		0 1 numarrays 1 sub
+ 			{
+			1 exch add/index cvx
+ 			c1/get cvx/mul cvx
+ 			}for
+ 		numarrays 1 add 1/roll cvx 
+ 		}for
+ 	numarrays[/pop cvx]cvx/repeat cvx
+ 	end
+}def
+/subtractive_blend
+{
+	3 dict begin
+	/numarrays xdf
+	/numcolors xdf
+	0 1 numcolors 1 sub
+		{
+		/c1 xdf
+		1 1
+		0 1 numarrays 1 sub
+			{
+			1 3 3 -1 roll add/index cvx 
+			c1/get cvx/sub cvx/mul cvx
+			}for
+		/sub cvx
+		numarrays 1 add 1/roll cvx
+		}for
+	numarrays[/pop cvx]cvx/repeat cvx
+	end
+}def
+/exec_tint_transform
+{
+	/TintProc[
+		/TintTransform cvx/setcolor cvx
+	]cvx bdf
+	MappedCSA setcolorspace_opt
+}bdf
+/devn_makecustomcolor
+{
+	2 dict begin
+	/names_index xdf
+	/Names xdf
+	1 1 1 1 Names names_index get findcmykcustomcolor
+	/devicen_tints AGMCORE_gget names_index get setcustomcolor
+	Names length{pop}repeat
+	end
+}bdf
+/setdevicencolorspace
+{
+	dup/AliasedColorants known{false}{true}ifelse 
+	current_spot_alias and{
+		7 dict begin
+		/names_index 0 def
+		dup/names_len exch/Names get length def
+		/new_names names_len array def
+		/new_LookupTables names_len array def
+		/alias_cnt 0 def
+		dup/Names get
+		{
+			dup map_alias{
+				exch pop
+				dup/ColorLookup known{
+					dup begin
+					new_LookupTables names_index ColorLookup put
+					end
+				}{
+					dup/Components known{
+						dup begin
+						new_LookupTables names_index Components put
+						end
+					}{
+						dup begin
+						new_LookupTables names_index[null null null null]put
+						end
+					}ifelse
+				}ifelse
+				new_names names_index 3 -1 roll/Name get put
+				/alias_cnt alias_cnt 1 add def 
+			}{
+				/name xdf				
+				new_names names_index name put
+				dup/LookupTables known{
+					dup begin
+					new_LookupTables names_index LookupTables names_index get put
+					end
+				}{
+					dup begin
+					new_LookupTables names_index[null null null null]put
+					end
+				}ifelse
+			}ifelse
+			/names_index names_index 1 add def 
+		}forall
+		alias_cnt 0 gt{
+			/AliasedColorants true def
+			/lut_entry_len new_LookupTables 0 get dup length 256 ge{0 get length}{length}ifelse def
+			0 1 names_len 1 sub{
+				/names_index xdf
+				new_LookupTables names_index get dup length 256 ge{0 get length}{length}ifelse lut_entry_len ne{
+					/AliasedColorants false def
+					exit
+				}{
+					new_LookupTables names_index get 0 get null eq{
+						dup/Names get names_index get/name xdf
+						name(Cyan)eq name(Magenta)eq name(Yellow)eq name(Black)eq
+						or or or not{
+							/AliasedColorants false def
+							exit
+						}if
+					}if
+				}ifelse
+			}for
+			lut_entry_len 1 eq{
+				/AliasedColorants false def
+			}if
+			AliasedColorants{
+				dup begin
+				/Names new_names def
+				/LookupTables new_LookupTables def
+				/AliasedColorants true def
+				/NComponents lut_entry_len def
+				/TintMethod NComponents 4 eq{/Subtractive}{/Additive}ifelse def
+				/MappedCSA TintMethod/Additive eq{/DeviceRGB}{/DeviceCMYK}ifelse def
+				currentdict/TTTablesIdx known not{
+					/TTTablesIdx -1 def
+				}if
+				end
+			}if
+		}if
+		end
+	}if
+	dup/devicen_colorspace_dict exch AGMCORE_gput
+	begin
+	currentdict/AliasedColorants known{
+		AliasedColorants
+	}{
+		false
+	}ifelse
+	dup not{
+		CSA map_csa
+	}if
+	/TintTransform load type/nulltype eq or{
+		/TintTransform[
+			0 1 Names length 1 sub
+				{
+				/TTTablesIdx TTTablesIdx 1 add def
+				dup LookupTables exch get dup 0 get null eq
+					{
+					1 index
+					Names exch get
+					dup(Cyan)eq
+						{
+						pop exch
+						LookupTables length exch sub
+						/index cvx
+						0 0 0
+						}
+						{
+						dup(Magenta)eq
+							{
+							pop exch
+							LookupTables length exch sub
+							/index cvx
+							0/exch cvx 0 0
+							}{
+							(Yellow)eq
+								{
+								exch
+								LookupTables length exch sub
+								/index cvx
+								0 0 3 -1/roll cvx 0
+								}{
+								exch
+								LookupTables length exch sub
+								/index cvx
+								0 0 0 4 -1/roll cvx
+								}ifelse
+							}ifelse
+						}ifelse
+					5 -1/roll cvx/astore cvx
+					}{
+					dup length 1 sub
+					LookupTables length 4 -1 roll sub 1 add
+					/index cvx/mul cvx/round cvx/cvi cvx/get cvx
+					}ifelse
+					Names length TTTablesIdx add 1 add 1/roll cvx
+				}for
+			Names length[/pop cvx]cvx/repeat cvx
+			NComponents Names length
+ 			TintMethod/Subtractive eq
+ 				{
+ 				subtractive_blend
+ 				}{
+ 				additive_blend
+ 				}ifelse
+		]cvx bdf
+	}if
+	AGMCORE_host_sep{
+		Names convert_to_process{
+			exec_tint_transform
+		}
+		{	
+			currentdict/AliasedColorants known{
+				AliasedColorants not
+			}{
+				false
+			}ifelse
+			5 dict begin
+			/AvoidAliasedColorants xdf
+			/painted? false def
+			/names_index 0 def
+			/names_len Names length def
+			AvoidAliasedColorants{
+				/currentspotalias current_spot_alias def
+				false set_spot_alias
+			}if
+			Names{
+				AGMCORE_is_cmyk_sep{
+					dup(Cyan)eq AGMCORE_cyan_plate and exch
+					dup(Magenta)eq AGMCORE_magenta_plate and exch
+					dup(Yellow)eq AGMCORE_yellow_plate and exch
+					(Black)eq AGMCORE_black_plate and or or or{
+						/devicen_colorspace_dict AGMCORE_gget/TintProc[
+							Names names_index/devn_makecustomcolor cvx
+						]cvx ddf
+						/painted? true def
+					}if
+					painted?{exit}if
+				}{
+					0 0 0 0 5 -1 roll findcmykcustomcolor 1 setcustomcolor currentgray 0 eq{
+					/devicen_colorspace_dict AGMCORE_gget/TintProc[
+						Names names_index/devn_makecustomcolor cvx
+					]cvx ddf
+					/painted? true def
+					exit
+					}if
+				}ifelse
+				/names_index names_index 1 add def
+			}forall
+			AvoidAliasedColorants{
+				currentspotalias set_spot_alias
+			}if
+			painted?{
+				/devicen_colorspace_dict AGMCORE_gget/names_index names_index put
+			}{
+				/devicen_colorspace_dict AGMCORE_gget/TintProc[
+					names_len[/pop cvx]cvx/repeat cvx 1/setseparationgray cvx
+ 					0 0 0 0/setcmykcolor cvx
+				]cvx ddf
+			}ifelse
+			end
+		}ifelse
+	}
+	{
+		AGMCORE_in_rip_sep{
+			Names convert_to_process not
+		}{
+			level3
+		}ifelse
+		{
+			[/DeviceN Names MappedCSA/TintTransform load]setcolorspace_opt
+			/TintProc level3 not AGMCORE_in_rip_sep and{
+				[
+					Names/length cvx[/pop cvx]cvx/repeat cvx
+				]cvx bdf
+			}{
+				{setcolor}bdf
+			}ifelse
+		}{
+			exec_tint_transform
+		}ifelse
+	}ifelse
+	set_crd
+	/AliasedColorants false def
+	end
+}def
+/setindexedcolorspace
+{
+	dup/indexed_colorspace_dict exch AGMCORE_gput
+	begin
+		currentdict/CSDBase known{
+			CSDBase/CSD get_res begin
+			currentdict/Names known{
+				currentdict devncs
+			}{
+				1 currentdict sepcs
+			}ifelse
+			AGMCORE_host_sep{
+				4 dict begin
+				/compCnt/Names where{pop Names length}{1}ifelse def
+				/NewLookup HiVal 1 add string def
+				0 1 HiVal{
+					/tableIndex xdf
+					Lookup dup type/stringtype eq{
+						compCnt tableIndex map_index
+					}{
+						exec
+					}ifelse
+					/Names where{
+						pop setdevicencolor
+					}{
+						setsepcolor
+					}ifelse
+					currentgray
+					tableIndex exch
+					255 mul cvi 
+					NewLookup 3 1 roll put
+				}for
+				[/Indexed currentcolorspace HiVal NewLookup]setcolorspace_opt
+				end
+			}{
+				level3
+				{
+					currentdict/Names known{
+						[/Indexed[/DeviceN Names MappedCSA/TintTransform load]HiVal Lookup]setcolorspace_opt
+					}{
+						[/Indexed[/Separation Name MappedCSA sep_proc_name load]HiVal Lookup]setcolorspace_opt
+					}ifelse
+				}{
+				[/Indexed MappedCSA HiVal
+					[
+					currentdict/Names known{
+						Lookup dup type/stringtype eq
+							{/exch cvx CSDBase/CSD get_res/Names get length dup/mul cvx exch/getinterval cvx{255 div}/forall cvx}
+							{/exec cvx}ifelse
+							/TintTransform load/exec cvx
+					}{
+						Lookup dup type/stringtype eq
+							{/exch cvx/get cvx 255/div cvx}
+							{/exec cvx}ifelse
+							CSDBase/CSD get_res/MappedCSA get sep_proc_name exch pop/load cvx/exec cvx
+					}ifelse
+					]cvx
+				]setcolorspace_opt
+				}ifelse
+			}ifelse
+			end
+			set_crd
+		}
+		{
+			CSA map_csa
+			AGMCORE_host_sep level2 not and{
+				0 0 0 0 setcmykcolor
+			}{
+				[/Indexed MappedCSA 
+				level2 not has_color not and{
+					dup 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or{
+						pop[/DeviceGray]
+					}if
+					HiVal GrayLookup
+				}{
+					HiVal 
+					currentdict/RangeArray known{
+						{
+							/indexed_colorspace_dict AGMCORE_gget begin
+							Lookup exch 
+							dup HiVal gt{
+								pop HiVal
+							}if
+							NComponents mul NComponents getinterval{}forall
+							NComponents 1 sub -1 0{
+								RangeArray exch 2 mul 2 getinterval aload pop map255_to_range
+								NComponents 1 roll
+							}for
+							end
+						}bind
+					}{
+						Lookup
+					}ifelse
+				}ifelse
+				]setcolorspace_opt
+				set_crd
+			}ifelse
+		}ifelse
+	end
+}def
+/setindexedcolor
+{
+	AGMCORE_host_sep{
+		/indexed_colorspace_dict AGMCORE_gget
+		begin
+		currentdict/CSDBase known{
+			CSDBase/CSD get_res begin
+			currentdict/Names known{
+				map_indexed_devn
+				devn
+			}
+			{
+				Lookup 1 3 -1 roll map_index
+				sep
+			}ifelse
+			end
+		}{
+			Lookup MappedCSA/DeviceCMYK eq{4}{1}ifelse 3 -1 roll
+			map_index
+			MappedCSA/DeviceCMYK eq{setcmykcolor}{setgray}ifelse
+		}ifelse
+		end
+	}{
+		level3 not AGMCORE_in_rip_sep and/indexed_colorspace_dict AGMCORE_gget/CSDBase known and{
+			/indexed_colorspace_dict AGMCORE_gget/CSDBase get/CSD get_res begin
+			map_indexed_devn
+			devn
+			end
+		}
+		{
+			setcolor
+		}ifelse
+	}ifelse
+}def
+/ignoreimagedata
+{
+	currentoverprint not{
+		gsave
+		dup clonedict begin
+		1 setgray
+		/Decode[0 1]def
+		/DataSource<FF>def
+		/MultipleDataSources false def
+		/BitsPerComponent 8 def
+		currentdict end
+		systemdict/image gx
+		grestore
+		}if
+	consumeimagedata
+}def
+/add_res
+{
+	dup/CSD eq{
+		pop 
+		//Adobe_AGM_Core begin
+		/AGMCORE_CSD_cache load 3 1 roll put
+		end
+	}{
+		defineresource pop
+	}ifelse
+}def
+/del_res
+{
+	{
+		aload pop exch
+		dup/CSD eq{
+			pop 
+			{//Adobe_AGM_Core/AGMCORE_CSD_cache get exch undef}forall
+		}{
+			exch
+			{1 index undefineresource}forall
+			pop
+		}ifelse
+	}forall
+}def
+/get_res
+{
+	dup/CSD eq{
+		pop
+		dup type dup/nametype eq exch/stringtype eq or{
+			AGMCORE_CSD_cache exch get
+		}if
+	}{
+		findresource
+	}ifelse
+}def
+/get_csa_by_name
+{
+	dup type dup/nametype eq exch/stringtype eq or{
+		/CSA get_res
+	}if
+}def
+/paintproc_buf_init
+{
+	/count get 0 0 put
+}def
+/paintproc_buf_next
+{
+	dup/count get dup 0 get
+	dup 3 1 roll
+	1 add 0 xpt
+	get				
+}def
+/cachepaintproc_compress
+{
+	5 dict begin
+	currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+	/ppdict 20 dict def
+	/string_size 16000 def
+	/readbuffer string_size string def
+	currentglobal true setglobal 
+	ppdict 1 array dup 0 1 put/count xpt
+	setglobal
+	/LZWFilter 
+	{
+		exch
+		dup length 0 eq{
+			pop
+		}{
+			ppdict dup length 1 sub 3 -1 roll put
+		}ifelse
+		{string_size}{0}ifelse string
+	}/LZWEncode filter def
+	{		
+		ReadFilter readbuffer readstring
+		exch LZWFilter exch writestring
+		not{exit}if
+	}loop
+	LZWFilter closefile
+	ppdict				
+	end
+}def
+/cachepaintproc
+{
+	2 dict begin
+	currentfile exch 0 exch/SubFileDecode filter/ReadFilter exch def
+	/ppdict 20 dict def
+	currentglobal true setglobal 
+	ppdict 1 array dup 0 1 put/count xpt
+	setglobal
+	{
+		ReadFilter 16000 string readstring exch
+		ppdict dup length 1 sub 3 -1 roll put
+		not{exit}if
+	}loop
+	ppdict dup dup length 1 sub()put					
+	end	
+}def
+/make_pattern
+{
+	exch clonedict exch
+	dup matrix currentmatrix matrix concatmatrix 0 0 3 2 roll itransform
+	exch 3 index/XStep get 1 index exch 2 copy div cvi mul sub sub
+	exch 3 index/YStep get 1 index exch 2 copy div cvi mul sub sub
+	matrix translate exch matrix concatmatrix
+			 1 index begin
+		BBox 0 get XStep div cvi XStep mul/xshift exch neg def
+		BBox 1 get YStep div cvi YStep mul/yshift exch neg def
+		BBox 0 get xshift add
+		BBox 1 get yshift add
+		BBox 2 get xshift add
+		BBox 3 get yshift add
+		4 array astore
+		/BBox exch def
+		[xshift yshift/translate load null/exec load]dup
+		3/PaintProc load put cvx/PaintProc exch def
+		end
+	gsave 0 setgray
+	makepattern
+	grestore
+}def
+/set_pattern
+{
+	dup/PatternType get 1 eq{
+		dup/PaintType get 1 eq{
+			currentoverprint sop[/DeviceGray]setcolorspace 0 setgray
+		}if
+	}if
+	setpattern
+}def
+/setcolorspace_opt
+{
+	dup currentcolorspace eq{pop}{setcolorspace}ifelse
+}def
+/updatecolorrendering
+{
+	currentcolorrendering/RenderingIntent known{
+		currentcolorrendering/RenderingIntent get
+	}
+	{
+		Intent/AbsoluteColorimetric eq 
+		{
+			/absolute_colorimetric_crd AGMCORE_gget dup null eq
+		}
+		{
+			Intent/RelativeColorimetric eq
+			{
+				/relative_colorimetric_crd AGMCORE_gget dup null eq
+			}
+			{
+				Intent/Saturation eq
+				{
+					/saturation_crd AGMCORE_gget dup null eq
+				}
+				{
+					/perceptual_crd AGMCORE_gget dup null eq
+				}ifelse
+			}ifelse
+		}ifelse
+		{
+			pop null	
+		}
+		{
+			/RenderingIntent known{null}{Intent}ifelse
+		}ifelse
+	}ifelse
+	Intent ne{
+		Intent/ColorRendering{findresource}stopped
+		{
+			pop pop systemdict/findcolorrendering known
+			{
+ 				Intent findcolorrendering
+ 				{
+ 					/ColorRendering findresource true exch
+ 				}
+ 				{
+ 					/ColorRendering findresource
+					product(Xerox Phaser 5400)ne
+					exch
+ 				}ifelse
+				dup Intent/AbsoluteColorimetric eq 
+				{
+					/absolute_colorimetric_crd exch AGMCORE_gput
+				}
+				{
+					Intent/RelativeColorimetric eq
+					{
+						/relative_colorimetric_crd exch AGMCORE_gput
+					}
+					{
+						Intent/Saturation eq
+						{
+							/saturation_crd exch AGMCORE_gput
+						}
+						{
+							Intent/Perceptual eq
+							{
+								/perceptual_crd exch AGMCORE_gput
+							}
+							{
+								pop
+							}ifelse
+						}ifelse
+					}ifelse
+				}ifelse
+				1 index{exch}{pop}ifelse
+			}
+			{false}ifelse
+		}
+		{true}ifelse
+		{
+			dup begin
+			currentdict/TransformPQR known{
+				currentdict/TransformPQR get aload pop
+				3{{}eq 3 1 roll}repeat or or
+			}
+			{true}ifelse
+			currentdict/MatrixPQR known{
+				currentdict/MatrixPQR get aload pop
+				1.0 eq 9 1 roll 0.0 eq 9 1 roll 0.0 eq 9 1 roll
+				0.0 eq 9 1 roll 1.0 eq 9 1 roll 0.0 eq 9 1 roll
+				0.0 eq 9 1 roll 0.0 eq 9 1 roll 1.0 eq
+				and and and and and and and and
+			}
+			{true}ifelse
+			end
+			or
+			{
+				clonedict begin
+				/TransformPQR[
+					{4 -1 roll 3 get dup 3 1 roll sub 5 -1 roll 3 get 3 -1 roll sub div
+					3 -1 roll 3 get 3 -1 roll 3 get dup 4 1 roll sub mul add}bind
+					{4 -1 roll 4 get dup 3 1 roll sub 5 -1 roll 4 get 3 -1 roll sub div
+					3 -1 roll 4 get 3 -1 roll 4 get dup 4 1 roll sub mul add}bind
+					{4 -1 roll 5 get dup 3 1 roll sub 5 -1 roll 5 get 3 -1 roll sub div
+					3 -1 roll 5 get 3 -1 roll 5 get dup 4 1 roll sub mul add}bind
+				]def
+				/MatrixPQR[0.8951 -0.7502 0.0389 0.2664 1.7135 -0.0685 -0.1614 0.0367 1.0296]def
+				/RangePQR[-0.3227950745 2.3229645538 -1.5003771057 3.5003465881 -0.1369979095 2.136967392]def
+				currentdict end
+			}if
+			setcolorrendering_opt
+		}if		
+	}if
+}def
+/set_crd
+{
+	AGMCORE_host_sep not level2 and{
+		currentdict/ColorRendering known{
+			ColorRendering/ColorRendering{findresource}stopped not{setcolorrendering_opt}if
+		}{
+			currentdict/Intent known{
+				updatecolorrendering
+			}if
+		}ifelse
+		currentcolorspace dup type/arraytype eq
+			{0 get}if
+		/DeviceRGB eq
+			{
+			currentdict/UCR known
+				{/UCR}{/AGMCORE_currentucr}ifelse
+			load setundercolorremoval
+			currentdict/BG known 
+				{/BG}{/AGMCORE_currentbg}ifelse
+			load setblackgeneration
+			}if
+	}if
+}def
+/set_ucrbg
+{
+	dup null eq{pop/AGMCORE_currentbg load}{/Procedure get_res}ifelse setblackgeneration
+	dup null eq{pop/AGMCORE_currentucr load}{/Procedure get_res}ifelse setundercolorremoval
+}def
+/setcolorrendering_opt
+{
+	dup currentcolorrendering eq{
+		pop
+	}{
+		clonedict
+		begin
+			/Intent Intent def
+			currentdict
+		end
+		setcolorrendering
+	}ifelse
+}def
+/cpaint_gcomp
+{
+	convert_to_process//Adobe_AGM_Core/AGMCORE_ConvertToProcess xddf
+	//Adobe_AGM_Core/AGMCORE_ConvertToProcess get not
+	{
+		(%end_cpaint_gcomp)flushinput
+	}if
+}def
+/cpaint_gsep
+{
+	//Adobe_AGM_Core/AGMCORE_ConvertToProcess get
+	{	
+		(%end_cpaint_gsep)flushinput
+	}if
+}def
+/cpaint_gend
+{np}def
+/T1_path
+{
+	currentfile token pop currentfile token pop mo
+	{
+		currentfile token pop dup type/stringtype eq
+			{pop exit}if 
+		0 exch rlineto 
+		currentfile token pop dup type/stringtype eq
+			{pop exit}if 
+		0 rlineto
+	}loop
+}def
+/T1_gsave
+	level3
+	{/clipsave}
+	{/gsave}ifelse
+	load def
+/T1_grestore
+	level3
+	{/cliprestore}
+	{/grestore}ifelse 
+	load def
+/set_spot_alias_ary
+{
+	dup inherit_aliases
+	//Adobe_AGM_Core/AGMCORE_SpotAliasAry xddf
+}def
+/set_spot_normalization_ary
+{
+	dup inherit_aliases
+	dup length
+	/AGMCORE_SpotAliasAry where{pop AGMCORE_SpotAliasAry length add}if
+	array
+	//Adobe_AGM_Core/AGMCORE_SpotAliasAry2 xddf
+	/AGMCORE_SpotAliasAry where{
+		pop
+		AGMCORE_SpotAliasAry2 0 AGMCORE_SpotAliasAry putinterval
+		AGMCORE_SpotAliasAry length
+	}{0}ifelse
+	AGMCORE_SpotAliasAry2 3 1 roll exch putinterval
+	true set_spot_alias
+}def
+/inherit_aliases
+{
+	{dup/Name get map_alias{/CSD put}{pop}ifelse}forall
+}def
+/set_spot_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		/AGMCORE_current_spot_alias 3 -1 roll put
+	}{
+		pop
+	}ifelse
+}def
+/current_spot_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		/AGMCORE_current_spot_alias get
+	}{
+		false
+	}ifelse
+}def
+/map_alias
+{
+	/AGMCORE_SpotAliasAry2 where{
+		begin
+			/AGMCORE_name xdf
+			false	
+			AGMCORE_SpotAliasAry2{
+				dup/Name get AGMCORE_name eq{
+					/CSD get/CSD get_res
+					exch pop true
+					exit
+				}{
+					pop
+				}ifelse
+			}forall
+		end
+	}{
+		pop false
+	}ifelse
+}bdf
+/spot_alias
+{
+	true set_spot_alias
+	/AGMCORE_&setcustomcolor AGMCORE_key_known not{
+		//Adobe_AGM_Core/AGMCORE_&setcustomcolor/setcustomcolor load put
+	}if
+	/customcolor_tint 1 AGMCORE_gput
+	//Adobe_AGM_Core begin
+	/setcustomcolor
+	{
+		//Adobe_AGM_Core begin
+		dup/customcolor_tint exch AGMCORE_gput
+		1 index aload pop pop 1 eq exch 1 eq and exch 1 eq and exch 1 eq and not
+		current_spot_alias and{1 index 4 get map_alias}{false}ifelse
+		{
+			false set_spot_alias
+			/sep_colorspace_dict AGMCORE_gget null ne
+			3 1 roll 2 index{
+				exch pop/sep_tint AGMCORE_gget exch
+			}if
+			mark 3 1 roll
+			setsepcolorspace
+			counttomark 0 ne{
+				setsepcolor
+			}if
+			pop
+			not{/sep_tint 1.0 AGMCORE_gput}if
+			pop
+			true set_spot_alias
+		}{
+			AGMCORE_&setcustomcolor
+		}ifelse
+		end
+	}bdf
+	end
+}def
+/begin_feature
+{
+	Adobe_AGM_Core/AGMCORE_feature_dictCount countdictstack put
+	count Adobe_AGM_Core/AGMCORE_feature_opCount 3 -1 roll put
+	{Adobe_AGM_Core/AGMCORE_feature_ctm matrix currentmatrix put}if
+}def
+/end_feature
+{
+	2 dict begin
+	/spd/setpagedevice load def
+	/setpagedevice{get_gstate spd set_gstate}def
+	stopped{$error/newerror false put}if
+	end
+	count Adobe_AGM_Core/AGMCORE_feature_opCount get sub dup 0 gt{{pop}repeat}{pop}ifelse
+	countdictstack Adobe_AGM_Core/AGMCORE_feature_dictCount get sub dup 0 gt{{end}repeat}{pop}ifelse
+	{Adobe_AGM_Core/AGMCORE_feature_ctm get setmatrix}if
+}def
+/set_negative
+{
+	//Adobe_AGM_Core begin
+	/AGMCORE_inverting exch def
+	level2{
+		currentpagedevice/NegativePrint known AGMCORE_distilling not and{
+			currentpagedevice/NegativePrint get//Adobe_AGM_Core/AGMCORE_inverting get ne{
+				true begin_feature true{
+						<</NegativePrint//Adobe_AGM_Core/AGMCORE_inverting get>>setpagedevice
+				}end_feature
+			}if
+			/AGMCORE_inverting false def
+		}if
+	}if
+	AGMCORE_inverting{
+		[{1 exch sub}/exec load dup currenttransfer exch]cvx bind settransfer
+ 		AGMCORE_distilling{
+ 			erasepage
+ 		}{
+ 			gsave np clippath 1/setseparationgray where{pop setseparationgray}{setgray}ifelse
+ 			/AGMIRS_&fill where{pop AGMIRS_&fill}{fill}ifelse grestore
+ 		}ifelse
+	}if
+	end
+}def
+/lw_save_restore_override{
+	/md where{
+		pop
+		md begin
+		initializepage
+		/initializepage{}def
+		/pmSVsetup{}def
+		/endp{}def
+		/pse{}def
+		/psb{}def
+		/orig_showpage where
+			{pop}
+			{/orig_showpage/showpage load def}
+		ifelse
+		/showpage{orig_showpage gR}def
+		end
+	}if
+}def
+/pscript_showpage_override{
+	/NTPSOct95 where
+	{
+		begin
+		showpage
+		save
+		/showpage/restore load def
+		/restore{exch pop}def
+		end
+	}if
+}def
+/driver_media_override
+{
+	/md where{
+		pop
+		md/initializepage known{
+			md/initializepage{}put
+		}if
+		md/rC known{
+			md/rC{4{pop}repeat}put
+		}if
+	}if
+	/mysetup where{
+		/mysetup[1 0 0 1 0 0]put
+	}if
+	Adobe_AGM_Core/AGMCORE_Default_CTM matrix currentmatrix put
+	level2
+		{Adobe_AGM_Core/AGMCORE_Default_PageSize currentpagedevice/PageSize get put}if
+}def
+/driver_check_media_override
+{
+ 	/PrepsDict where
+ 		{pop}
+		{
+		Adobe_AGM_Core/AGMCORE_Default_CTM get matrix currentmatrix ne
+		Adobe_AGM_Core/AGMCORE_Default_PageSize get type/arraytype eq
+			{
+			Adobe_AGM_Core/AGMCORE_Default_PageSize get 0 get currentpagedevice/PageSize get 0 get eq and
+			Adobe_AGM_Core/AGMCORE_Default_PageSize get 1 get currentpagedevice/PageSize get 1 get eq and
+			}if
+			{
+			Adobe_AGM_Core/AGMCORE_Default_CTM get setmatrix
+			}if
+		}ifelse
+}def
+AGMCORE_err_strings begin
+	/AGMCORE_bad_environ(Environment not satisfactory for this job. Ensure that the PPD is correct or that the PostScript level requested is supported by this printer. )def
+	/AGMCORE_color_space_onhost_seps(This job contains colors that will not separate with on-host methods. )def
+	/AGMCORE_invalid_color_space(This job contains an invalid color space. )def
+end
+/set_def_ht
+{AGMCORE_def_ht sethalftone}def
+/set_def_flat
+{AGMCORE_Default_flatness setflat}def
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+%%BeginResource: procset Adobe_CoolType_Core 2.31 0
%%Copyright: Copyright 1997-2006 Adobe Systems Incorporated. All Rights Reserved.
%%Version: 2.31 0
10 dict begin
/Adobe_CoolType_Passthru currentdict def
/Adobe_CoolType_Core_Defined userdict/Adobe_CoolType_Core known def
Adobe_CoolType_Core_Defined
	{/Adobe_CoolType_Core userdict/Adobe_CoolType_Core get def}
if
userdict/Adobe_CoolType_Core 70 dict dup begin put
/Adobe_CoolType_Version 2.31 def
/Level2?
	systemdict/languagelevel known dup
		{pop systemdict/languagelevel get 2 ge}
	if def
Level2? not
	{
	/currentglobal false def
	/setglobal/pop load def
	/gcheck{pop false}bind def
	/currentpacking false def
	/setpacking/pop load def
	/SharedFontDirectory 0 dict def
	}
if
currentpacking
true setpacking
currentglobal false setglobal
userdict/Adobe_CoolType_Data 2 copy known not
	{2 copy 10 dict put}
if
get
	 begin
	/@opStackCountByLevel 32 dict def
	/@opStackLevel 0 def
	/@dictStackCountByLevel 32 dict def
	/@dictStackLevel 0 def
	 end
setglobal
currentglobal true setglobal
userdict/Adobe_CoolType_GVMFonts known not
	{userdict/Adobe_CoolType_GVMFonts 10 dict put}
if
setglobal
currentglobal false setglobal
userdict/Adobe_CoolType_LVMFonts known not
	{userdict/Adobe_CoolType_LVMFonts 10 dict put}
if
setglobal
/ct_VMDictPut
	{
	dup gcheck{Adobe_CoolType_GVMFonts}{Adobe_CoolType_LVMFonts}ifelse
	3 1 roll put
	}bind def
/ct_VMDictUndef
	{
	dup Adobe_CoolType_GVMFonts exch known
		{Adobe_CoolType_GVMFonts exch undef}
		{
			dup Adobe_CoolType_LVMFonts exch known
			{Adobe_CoolType_LVMFonts exch undef}
			{pop}
			ifelse
		}ifelse
	}bind def
/ct_str1 1 string def
/ct_xshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			_ct_x _ct_y moveto
			0
			rmoveto
		}
		ifelse
		/_ct_i _ct_i 1 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/ct_yshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			_ct_x _ct_y moveto
			0 exch
			rmoveto
		}
		ifelse
		/_ct_i _ct_i 1 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/ct_xyshow
{
	/_ct_na exch def
	/_ct_i 0 def
	currentpoint
	/_ct_y exch def
	/_ct_x exch def
	{
		pop pop
		ct_str1 exch 0 exch put
		ct_str1 show
		{_ct_na _ct_i get}stopped 
		{pop pop}
		{
			{_ct_na _ct_i 1 add get}stopped 
			{pop pop pop}
			{
				_ct_x _ct_y moveto
				rmoveto
			}
			ifelse
		}
		ifelse
		/_ct_i _ct_i 2 add def
		currentpoint
		/_ct_y exch def
		/_ct_x exch def
	}
	exch
	@cshow
}bind def
/xsh{{@xshow}stopped{Adobe_CoolType_Data begin ct_xshow end}if}bind def
/ysh{{@yshow}stopped{Adobe_CoolType_Data begin ct_yshow end}if}bind def
/xysh{{@xyshow}stopped{Adobe_CoolType_Data begin ct_xyshow end}if}bind def
currentglobal true setglobal
/ct_T3Defs
{
/BuildChar
{
	1 index/Encoding get exch get
	1 index/BuildGlyph get exec
}bind def
/BuildGlyph
{
	exch begin
	GlyphProcs exch get exec
	end
}bind def
}bind def
setglobal
/@_SaveStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@vmState currentglobal def false setglobal
		@opStackCountByLevel
		@opStackLevel
		2 copy known not
			{
			2 copy
			3 dict dup/args
			7 index
			5 add array put
			put get
			}
			{
			get dup/args get dup length 3 index lt
				{
				dup length 5 add array exch
				1 index exch 0 exch putinterval
				1 index exch/args exch put
				}
				{pop}
			ifelse
			}
		ifelse
			begin
			count 1 sub
			1 index lt
				{pop count}
			if
			dup/argCount exch def
			dup 0 gt
				{
				args exch 0 exch getinterval 
			astore pop
				}
				{pop}
			ifelse
			count
			/restCount exch def
			end
		/@opStackLevel @opStackLevel 1 add def
		countdictstack 1 sub
		@dictStackCountByLevel exch @dictStackLevel exch put
		/@dictStackLevel @dictStackLevel 1 add def
		@vmState setglobal
		end
	}bind def
/@_RestoreStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@opStackLevel @opStackLevel 1 sub def
		@opStackCountByLevel @opStackLevel get
			begin
			count restCount sub dup 0 gt
				{{pop}repeat}
				{pop}
			ifelse
			args 0 argCount getinterval{}forall
			end
		/@dictStackLevel @dictStackLevel 1 sub def
		@dictStackCountByLevel @dictStackLevel get
		end
	countdictstack exch sub dup 0 gt
		{{end}repeat}
		{pop}
	ifelse
	}bind def
/@_PopStackLevels
	{
	Adobe_CoolType_Data
		begin
		/@opStackLevel @opStackLevel 1 sub def
		/@dictStackLevel @dictStackLevel 1 sub def
		end
	}bind def
/@Raise
	{
	exch cvx exch errordict exch get exec
	stop
	}bind def
/@ReRaise
	{
	cvx $error/errorname get errordict exch get exec
	stop
	}bind def
/@Stopped
	{
	0 @#Stopped
	}bind def
/@#Stopped
	{
	@_SaveStackLevels
	stopped
		{@_RestoreStackLevels true}
		{@_PopStackLevels false}
	ifelse
	}bind def
/@Arg
	{
	Adobe_CoolType_Data
		begin
		@opStackCountByLevel @opStackLevel 1 sub get
		begin
		args exch
		argCount 1 sub exch sub get
		end
		end
	}bind def
currentglobal true setglobal
/CTHasResourceForAllBug
	Level2?
		{
		1 dict dup
				/@shouldNotDisappearDictValue true def
				Adobe_CoolType_Data exch/@shouldNotDisappearDict exch put
				begin
				count @_SaveStackLevels
					{(*){pop stop}128 string/Category resourceforall}
				stopped pop
				@_RestoreStackLevels
				currentdict Adobe_CoolType_Data/@shouldNotDisappearDict get dup 3 1 roll ne dup 3 1 roll
					{
						 /@shouldNotDisappearDictValue known
								{
										 {
												end
												currentdict 1 index eq
													{pop exit}
												if
										 }
									 loop
								}
						 if
					}
					{
						 pop
						 end
					}
				ifelse
		}
		{false}
	ifelse
	def
true setglobal
/CTHasResourceStatusBug
	Level2?
		{
		mark
			{/steveamerige/Category resourcestatus}
		stopped
			{cleartomark true}
			{cleartomark currentglobal not}
		ifelse
		}
		{false}
	ifelse
	def
setglobal
/CTResourceStatus
		{
		mark 3 1 roll
		/Category findresource
			begin
			({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
				{cleartomark false}
				{{3 2 roll pop true}{cleartomark false}ifelse}
			ifelse
			end
		}bind def
/CTWorkAroundBugs
	{
	Level2?
		{
		/cid_PreLoad/ProcSet resourcestatus
			{
			pop pop
			currentglobal
			mark
				{
				(*)
					{
					dup/CMap CTHasResourceStatusBug
						{CTResourceStatus}
						{resourcestatus}
					ifelse
						{
						pop dup 0 eq exch 1 eq or
							{
							dup/CMap findresource gcheck setglobal
							/CMap undefineresource
							}
							{
							pop CTHasResourceForAllBug
								{exit}
								{stop}
							ifelse
							}
						ifelse
						}
						{pop}
					ifelse
					}
				128 string/CMap resourceforall
				}
			stopped
				{cleartomark}
			stopped pop
			setglobal
			}
		if
		}
	if
	}bind def
/ds
	{
	Adobe_CoolType_Core
		begin
		CTWorkAroundBugs
		/mo/moveto load def
		/nf/newencodedfont load def
		/msf{makefont setfont}bind def
		/uf{dup undefinefont ct_VMDictUndef}bind def
		/ur/undefineresource load def
		/chp/charpath load def
		/awsh/awidthshow load def
		/wsh/widthshow load def
		/ash/ashow load def
		/@xshow/xshow load def
		/@yshow/yshow load def
		/@xyshow/xyshow load def
		/@cshow/cshow load def
		/sh/show load def
		/rp/repeat load def
		/.n/.notdef def
		end
		currentglobal false setglobal
	 userdict/Adobe_CoolType_Data 2 copy known not
		 {2 copy 10 dict put}
		if
		get
		begin
		/AddWidths? false def
		/CC 0 def
		/charcode 2 string def
		/@opStackCountByLevel 32 dict def
		/@opStackLevel 0 def
		/@dictStackCountByLevel 32 dict def
		/@dictStackLevel 0 def
		/InVMFontsByCMap 10 dict def
		/InVMDeepCopiedFonts 10 dict def
		end
		setglobal
	}bind def
/dt
	{
	currentdict Adobe_CoolType_Core eq
		{end}
	if
	}bind def
/ps
	{
	Adobe_CoolType_Core begin
	Adobe_CoolType_GVMFonts begin
	Adobe_CoolType_LVMFonts begin
	SharedFontDirectory begin
	}bind def
/pt
	{
	end
	end
	end
	end
	}bind def
/unload
	{
	systemdict/languagelevel known
		{
		systemdict/languagelevel get 2 ge
			{
			userdict/Adobe_CoolType_Core 2 copy known
				{undef}
				{pop pop}
			ifelse
			}
		if
		}
	if
	}bind def
/ndf
	{
	1 index where
		{pop pop pop}
		{dup xcheck{bind}if def}
	ifelse
	}def
/findfont systemdict
	begin
	userdict
		begin
		/globaldict where{/globaldict get begin}if
			dup where pop exch get
		/globaldict where{pop end}if
		end
	end
Adobe_CoolType_Core_Defined
	{/systemfindfont exch def}
	{
	/findfont 1 index def
	/systemfindfont exch def
	}
ifelse
/undefinefont
	{pop}ndf
/copyfont
	{
	currentglobal 3 1 roll
	1 index gcheck setglobal
	dup null eq{0}{dup length}ifelse
	2 index length add 1 add dict
		begin
		exch
			{
			1 index/FID eq
				{pop pop}
				{def}
			ifelse
			}
		forall
		dup null eq
			{pop}
			{{def}forall}
		ifelse
		currentdict
		end
	exch setglobal
	}bind def
/copyarray
	{
	currentglobal exch
	dup gcheck setglobal
	dup length array copy
	exch setglobal
	}bind def
/newencodedfont
	{
	currentglobal
		{
		SharedFontDirectory 3 index known
			{SharedFontDirectory 3 index get/FontReferenced known}
			{false}
		ifelse
		}
		{
		FontDirectory 3 index known
			{FontDirectory 3 index get/FontReferenced known}
			{
			SharedFontDirectory 3 index known
				{SharedFontDirectory 3 index get/FontReferenced known}
				{false}
			ifelse
			}
		ifelse
		}
	ifelse
	dup
		{
		3 index findfont/FontReferenced get
		2 index dup type/nametype eq
			{findfont}
		if ne
			{pop false}
		if
		}
	if
	dup
		{
		1 index dup type/nametype eq
			{findfont}
		 if
		dup/CharStrings known
			{
			/CharStrings get length
			4 index findfont/CharStrings get length
			ne
				{
				pop false
				}
			if 
			}
			{pop}
			ifelse
		}
	if
		{
		pop
		1 index findfont
		/Encoding get exch
		0 1 255
			{2 copy get 3 index 3 1 roll put}
		for
		pop pop pop
		}
		{
		currentglobal
	 4 1 roll
		dup type/nametype eq
		 {findfont}
	 if
	 dup gcheck setglobal
		dup dup maxlength 2 add dict
			begin
			exch
				{
				1 index/FID ne
				2 index/Encoding ne and
					{def}
					{pop pop}
				ifelse
				}
			forall
			/FontReferenced exch def
			/Encoding exch dup length array copy def
			/FontName 1 index dup type/stringtype eq{cvn}if def dup
			currentdict
			end
		definefont ct_VMDictPut
		setglobal
		}
	ifelse
	}bind def
/SetSubstituteStrategy
	{
	$SubstituteFont
		begin
		dup type/dicttype ne
			{0 dict}
		if
		currentdict/$Strategies known
			{
			exch $Strategies exch 
			2 copy known
				{
				get
				2 copy maxlength exch maxlength add dict
					begin
					{def}forall
					{def}forall
					currentdict
					dup/$Init known
						{dup/$Init get exec}
					if
					end
				/$Strategy exch def
				}
				{pop pop pop}
			ifelse
			}
			{pop pop}
		ifelse
		end
	}bind def
/scff
	{
	$SubstituteFont
		begin
		dup type/stringtype eq
			{dup length exch}
			{null}
		ifelse
		/$sname exch def
		/$slen exch def
		/$inVMIndex
			$sname null eq
				{
				1 index $str cvs
				dup length $slen sub $slen getinterval cvn
				}
				{$sname}
			ifelse def
		end
		{findfont}
	@Stopped
		{
		dup length 8 add string exch
		1 index 0(BadFont:)putinterval
		1 index exch 8 exch dup length string cvs putinterval cvn
			{findfont}
		@Stopped
			{pop/Courier findfont}
		if
		}
	if
	$SubstituteFont
		begin
		/$sname null def
		/$slen 0 def
		/$inVMIndex null def
		end
	}bind def
/isWidthsOnlyFont
	{
	dup/WidthsOnly known
		{pop pop true}
		{
		dup/FDepVector known
			{/FDepVector get{isWidthsOnlyFont dup{exit}if}forall}
			{
			dup/FDArray known
				{/FDArray get{isWidthsOnlyFont dup{exit}if}forall}
				{pop}
			ifelse
			}
		ifelse
		}
	ifelse
	}bind def
/ct_StyleDicts 4 dict dup begin
		 /Adobe-Japan1 4 dict dup begin
					 Level2?
								{
								/Serif
								/HeiseiMin-W3-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiMin-W3}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiMin-W3/CIDFont resourcestatus
								{pop pop/HeiseiMin-W3}
								{/Ryumin-Light}
								ifelse
							}
							{/Ryumin-Light}
							ifelse
								}
								ifelse
								def
								/SansSerif
								/HeiseiKakuGo-W5-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiKakuGo-W5}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiKakuGo-W5/CIDFont resourcestatus
								{pop pop/HeiseiKakuGo-W5}
								{/GothicBBB-Medium}
								ifelse
							}
							{/GothicBBB-Medium}
							ifelse
								}
								ifelse
								def
								/HeiseiMaruGo-W4-83pv-RKSJ-H/Font resourcestatus
								{pop pop/HeiseiMaruGo-W4}
								{
							/CIDFont/Category resourcestatus
							{
								pop pop
								/HeiseiMaruGo-W4/CIDFont resourcestatus
								{pop pop/HeiseiMaruGo-W4}
								{
									/Jun101-Light-RKSJ-H/Font resourcestatus
									{pop pop/Jun101-Light}
									{SansSerif}
									ifelse
								}
								ifelse
							}
							{
								/Jun101-Light-RKSJ-H/Font resourcestatus
								{pop pop/Jun101-Light}
								{SansSerif}
								ifelse
							}
							ifelse
								}
								ifelse
								/RoundSansSerif exch def
								/Default Serif def
								}
								{
								/Serif/Ryumin-Light def
								/SansSerif/GothicBBB-Medium def
								{
								(fonts/Jun101-Light-83pv-RKSJ-H)status
								}stopped
								{pop}{
										 {pop pop pop pop/Jun101-Light}
										 {SansSerif}
										 ifelse
										 /RoundSansSerif exch def
								}ifelse
								/Default Serif def
								}
					 ifelse
		 end
		 def
		 /Adobe-Korea1 4 dict dup begin
					/Serif/HYSMyeongJo-Medium def
					/SansSerif/HYGoThic-Medium def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
		 /Adobe-GB1 4 dict dup begin
					/Serif/STSong-Light def
					/SansSerif/STHeiti-Regular def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
		 /Adobe-CNS1 4 dict dup begin
					/Serif/MKai-Medium def
					/SansSerif/MHei-Medium def
					/RoundSansSerif SansSerif def
					/Default Serif def
		 end
		 def
end
def
Level2?{currentglobal true setglobal}if
/ct_BoldRomanWidthProc 
	{
	stringwidth 1 index 0 ne{exch .03 add exch}if setcharwidth
	0 0
	}bind def
/ct_Type0WidthProc 
	{
	 dup stringwidth 0 0 moveto 
	 2 index true charpath pathbbox
	 0 -1 
	 7 index 2 div .88 
	 setcachedevice2
	 pop
	0 0
	}bind def
/ct_Type0WMode1WidthProc 
	{
	 dup stringwidth 
	 pop 2 div neg -0.88
	2 copy
	moveto 
	0 -1
	 5 -1 roll true charpath pathbbox
	 setcachedevice
	}bind def
/cHexEncoding
[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
/ct_BoldBaseFont 
	 11 dict begin
		/FontType 3 def
		/FontMatrix[1 0 0 1 0 0]def
		/FontBBox[0 0 1 1]def
		/Encoding cHexEncoding def 
		/_setwidthProc/ct_BoldRomanWidthProc load def
		/_bcstr1 1 string def
		/BuildChar
		{
			exch begin
				_basefont setfont
				_bcstr1 dup 0 4 -1 roll put
				dup 
				_setwidthProc
				3 copy 
				moveto				
				show
				_basefonto setfont
				moveto
				show
			end
		}bind def
		 currentdict
	 end 
def
systemdict/composefont known
{
/ct_DefineIdentity-H
{
	/Identity-H/CMap resourcestatus
	{
		pop pop
	}
	{
		/CIDInit/ProcSet findresource begin
		 12 dict begin
		 begincmap
		 /CIDSystemInfo 3 dict dup begin
			 /Registry(Adobe)def
			 /Ordering(Identity)def
			 /Supplement 0 def
		 end def
		 /CMapName/Identity-H def
		 /CMapVersion 1.000 def
		 /CMapType 1 def
		 1 begincodespacerange
		 <0000><FFFF>
		 endcodespacerange
		 1 begincidrange
		 <0000><FFFF>0
		 endcidrange
		 endcmap
		 CMapName currentdict/CMap defineresource pop
		 end
		 end
	 }
	 ifelse
}
def
/ct_BoldBaseCIDFont 
	 11 dict begin
		/CIDFontType 1 def
		/CIDFontName/ct_BoldBaseCIDFont def
		/FontMatrix[1 0 0 1 0 0]def
		/FontBBox[0 0 1 1]def
		/_setwidthProc/ct_Type0WidthProc load def
		/_bcstr2 2 string def
		/BuildGlyph
		{
			exch begin		 
				_basefont setfont
				_bcstr2 1 2 index 256 mod put
				_bcstr2 0 3 -1 roll 256 idiv put
				_bcstr2 dup _setwidthProc		 
				3 copy 
				moveto
				show
				_basefonto setfont
				moveto
				show
			end
		}bind def
		 currentdict
	 end 
def
}if
Level2?{setglobal}if
/ct_CopyFont{
	{
		1 index/FID ne 2 index/UniqueID ne and
		{def}{pop pop}ifelse
	}forall
}bind def
/ct_Type0CopyFont 
{
	exch
	dup length dict
	begin
	ct_CopyFont
	[
	exch
	FDepVector 
	{
		 dup/FontType get 0 eq
		{	
		1 index ct_Type0CopyFont 
		/_ctType0 exch definefont
		}
		{
		/_ctBaseFont exch
		2 index exec
		}
		 ifelse 
		 exch
	}
	forall 
	pop
	]				
	/FDepVector exch def
	currentdict
	end
}bind def
/ct_MakeBoldFont
{
	 dup/ct_SyntheticBold known
	{
		dup length 3 add dict begin 
		ct_CopyFont 
		/ct_StrokeWidth .03 0 FontMatrix idtransform pop def 
		/ct_SyntheticBold true def
		currentdict 
		end 
		definefont
	}
	{
		dup dup length 3 add dict
		begin
			ct_CopyFont
			/PaintType 2 def
			/StrokeWidth .03 0 FontMatrix idtransform pop def
			/dummybold currentdict
		end
		definefont
		dup/FontType get dup 9 ge exch 11 le and 
		{
			ct_BoldBaseCIDFont
			dup length 3 add dict copy begin
			dup/CIDSystemInfo get/CIDSystemInfo exch def
			ct_DefineIdentity-H
			/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
			/_basefont exch def
			/_Type0Identity/Identity-H 3 -1 roll[exch]composefont
			/_basefonto exch def
			currentdict
			end
			/CIDFont defineresource
		}
		{
			ct_BoldBaseFont
			dup length 3 add dict copy begin
			/_basefont exch def
			/_basefonto exch def
			currentdict
			end
			definefont
		}
		ifelse
	}
	ifelse
}bind def
/ct_MakeBold{
	1 index 
	1 index
	findfont
	currentglobal 5 1 roll
	dup gcheck setglobal
		dup
		 /FontType get 0 eq
			{
				dup/WMode known{dup/WMode get 1 eq}{false}ifelse
				version length 4 ge
				and
					{version 0 4 getinterval cvi 2015 ge}
					{true}
				ifelse 
					{/ct_Type0WidthProc}
					{/ct_Type0WMode1WidthProc}
				ifelse
				ct_BoldBaseFont/_setwidthProc 3 -1 roll load put
						{ct_MakeBoldFont}ct_Type0CopyFont definefont
			}
			{
				dup/_fauxfont known not 1 index/SubstMaster known not and
				{
					 ct_BoldBaseFont/_setwidthProc /ct_BoldRomanWidthProc load put
					 ct_MakeBoldFont 
				}
				{
				2 index 2 index eq
					{exch pop	}
					{
						dup length dict begin
						ct_CopyFont
						currentdict
						end
						definefont 
					}
				ifelse
				}
			ifelse
			}
		 ifelse
		 pop pop pop
		 setglobal
}bind def
/?str1 256 string def
/?set
	{
	$SubstituteFont
		begin
		/$substituteFound false def
		/$fontname 1 index def
		/$doSmartSub false def
		end
	dup
	 findfont
	$SubstituteFont
		begin
		$substituteFound
			{false}
			{
			dup/FontName known
				{
				dup/FontName get $fontname eq
				1 index/DistillerFauxFont known not and
				/currentdistillerparams where
					{pop false 2 index isWidthsOnlyFont not and}
				if
				}
				{false}
			ifelse
			}
		ifelse
		exch pop
		/$doSmartSub true def
		end
		{
		5 1 roll pop pop pop pop
		findfont
		}
		{
		1 index
		findfont
		dup/FontType get 3 eq
		{
			6 1 roll pop pop pop pop pop false
		}
		{pop true}
		ifelse
		{
		$SubstituteFont
		begin
		pop pop
		/$styleArray 1 index def
		/$regOrdering 2 index def
		pop pop
		0 1 $styleArray length 1 sub
		{
			$styleArray exch get
			ct_StyleDicts $regOrdering
			2 copy known
			{
				get
				exch 2 copy known not
				{pop/Default}
				if
				get
				dup type/nametype eq
				{
				?str1 cvs length dup 1 add exch
				?str1 exch(-)putinterval
				exch dup length exch ?str1 exch 3 index exch putinterval
				add ?str1 exch 0 exch getinterval cvn
				}
				{
				pop pop/Unknown
				}
				ifelse
			}
			{
				pop pop pop pop/Unknown
			}
			ifelse
		}
		for
		end
		findfont 
		}if
		}
	ifelse
	currentglobal false setglobal 3 1 roll
	null copyfont definefont pop
	setglobal
	}bind def
setpacking
userdict/$SubstituteFont 25 dict put
1 dict
	begin
	/SubstituteFont
		dup $error exch 2 copy known
			{get}
			{pop pop{pop/Courier}bind}
		ifelse def
	/currentdistillerparams where dup
		{
		pop pop
		currentdistillerparams/CannotEmbedFontPolicy 2 copy known
			{get/Error eq}
			{pop pop false}
		ifelse
		}
	if not
		{
		countdictstack array dictstack 0 get
			begin
			userdict
				begin
				$SubstituteFont
					begin
					/$str 128 string def
					/$fontpat 128 string def
					/$slen 0 def
					/$sname null def
					/$match false def
					/$fontname null def
					/$substituteFound false def
					/$inVMIndex null def
					/$doSmartSub true def
					/$depth 0 def
					/$fontname null def
					/$italicangle 26.5 def
					/$dstack null def
					/$Strategies 10 dict dup
						begin
						/$Type3Underprint
							{
							currentglobal exch false setglobal
							11 dict
								begin
								/UseFont exch
									$WMode 0 ne
										{
										dup length dict copy
										dup/WMode $WMode put
										/UseFont exch definefont
										}
									if def
								/FontName $fontname dup type/stringtype eq{cvn}if def
								/FontType 3 def
								/FontMatrix[.001 0 0 .001 0 0]def
								/Encoding 256 array dup 0 1 255{/.notdef put dup}for pop def
								/FontBBox[0 0 0 0]def
								/CCInfo 7 dict dup
									begin
									/cc null def
									/x 0 def
									/y 0 def
									end def
								/BuildChar
									{
									exch
										begin
										CCInfo
											begin
											1 string dup 0 3 index put exch pop
											/cc exch def
											UseFont 1000 scalefont setfont
											cc stringwidth/y exch def/x exch def
											x y setcharwidth
											$SubstituteFont/$Strategy get/$Underprint get exec
											0 0 moveto cc show
											x y moveto
											end
										end
									}bind def
								currentdict
								end
							exch setglobal
							}bind def
						/$GetaTint
							2 dict dup
								begin
								/$BuildFont
									{
									dup/WMode known
										{dup/WMode get}
										{0}
									ifelse
									/$WMode exch def
									$fontname exch
									dup/FontName known
										{
										dup/FontName get
										dup type/stringtype eq{cvn}if
										}
										{/unnamedfont}
									ifelse
									exch
									Adobe_CoolType_Data/InVMDeepCopiedFonts get
									1 index/FontName get known
										{
										pop
										Adobe_CoolType_Data/InVMDeepCopiedFonts get
										1 index get
										null copyfont
										}
										{$deepcopyfont}
									ifelse
									exch 1 index exch/FontBasedOn exch put
									dup/FontName $fontname dup type/stringtype eq{cvn}if put
									definefont
									Adobe_CoolType_Data/InVMDeepCopiedFonts get
										begin
										dup/FontBasedOn get 1 index def
										end
									}bind def
								/$Underprint
									{
									gsave
									x abs y abs gt
										{/y 1000 def}
										{/x -1000 def 500 120 translate}
									ifelse
									Level2?
										{
										[/Separation(All)/DeviceCMYK{0 0 0 1 pop}]
										setcolorspace
										}
										{0 setgray}
									ifelse
									10 setlinewidth
									x .8 mul
									[7 3]
										{
										y mul 8 div 120 sub x 10 div exch moveto
										0 y 4 div neg rlineto
										dup 0 rlineto
										0 y 4 div rlineto
										closepath
										gsave
										Level2?
											{.2 setcolor}
											{.8 setgray}
										ifelse
										fill grestore
										stroke
										}
									forall
									pop
									grestore
									}bind def
								end def
						/$Oblique
							1 dict dup
								begin
								/$BuildFont
									{
									currentglobal exch dup gcheck setglobal
									null copyfont
										begin
										/FontBasedOn
										currentdict/FontName known
											{
											FontName
											dup type/stringtype eq{cvn}if
											}
											{/unnamedfont}
										ifelse
										def
										/FontName $fontname dup type/stringtype eq{cvn}if def
										/currentdistillerparams where
											{pop}
											{
											/FontInfo currentdict/FontInfo known
												{FontInfo null copyfont}
												{2 dict}
											ifelse
											dup
												begin
												/ItalicAngle $italicangle def
												/FontMatrix FontMatrix
												[1 0 ItalicAngle dup sin exch cos div 1 0 0]
												matrix concatmatrix readonly
												end
											4 2 roll def
											def
											}
										ifelse
										FontName currentdict
										end
									definefont
									exch setglobal
									}bind def
								end def
						/$None
							1 dict dup
								begin
								/$BuildFont{}bind def
								end def
						end def
					/$Oblique SetSubstituteStrategy
					/$findfontByEnum
						{
						dup type/stringtype eq{cvn}if
						dup/$fontname exch def
						$sname null eq
							{$str cvs dup length $slen sub $slen getinterval}
							{pop $sname}
						ifelse
						$fontpat dup 0(fonts/*)putinterval exch 7 exch putinterval
						/$match false def
						$SubstituteFont/$dstack countdictstack array dictstack put
						mark
							{
							$fontpat 0 $slen 7 add getinterval
								{/$match exch def exit}
							$str filenameforall
							}
						stopped
							{
							cleardictstack
							currentdict
							true
							$SubstituteFont/$dstack get
								{
								exch
									{
									1 index eq
										{pop false}
										{true}
									ifelse
									}
									{begin false}
								ifelse
								}
							forall
							pop
							}
						if
						cleartomark
						/$slen 0 def
						$match false ne
							{$match(fonts/)anchorsearch pop pop cvn}
							{/Courier}
						ifelse
						}bind def
					/$ROS 1 dict dup
						begin
						/Adobe 4 dict dup
							begin
							/Japan1 [/Ryumin-Light/HeiseiMin-W3
										 /GothicBBB-Medium/HeiseiKakuGo-W5
										 /HeiseiMaruGo-W4/Jun101-Light]def
							/Korea1 [/HYSMyeongJo-Medium/HYGoThic-Medium]def
							/GB1	 [/STSong-Light/STHeiti-Regular]def
							/CNS1	[/MKai-Medium/MHei-Medium]def
							end def
						end def
					/$cmapname null def
					/$deepcopyfont
						{
						dup/FontType get 0 eq
							{
							1 dict dup/FontName/copied put copyfont
								begin
								/FDepVector FDepVector copyarray
								0 1 2 index length 1 sub
									{
									2 copy get $deepcopyfont
									dup/FontName/copied put
									/copied exch definefont
									3 copy put pop pop
									}
								for
								def
								currentdict
								end
							}
							{$Strategies/$Type3Underprint get exec}
						ifelse
						}bind def
					/$buildfontname
						{
						dup/CIDFont findresource/CIDSystemInfo get
							begin
							Registry length Ordering length Supplement 8 string cvs
							3 copy length 2 add add add string
							dup 5 1 roll dup 0 Registry putinterval
							dup 4 index(-)putinterval
							dup 4 index 1 add Ordering putinterval
							4 2 roll add 1 add 2 copy(-)putinterval
							end
						1 add 2 copy 0 exch getinterval $cmapname $fontpat cvs exch
						anchorsearch
							{pop pop 3 2 roll putinterval cvn/$cmapname exch def}
							{pop pop pop pop pop}
						ifelse
						length
						$str 1 index(-)putinterval 1 add
						$str 1 index $cmapname $fontpat cvs putinterval
						$cmapname length add
						$str exch 0 exch getinterval cvn
						}bind def
					/$findfontByROS
						{
						/$fontname exch def
						$ROS Registry 2 copy known
							{
							get Ordering 2 copy known
								{get}
								{pop pop[]}
							ifelse
							}
							{pop pop[]}
						ifelse
						false exch
							{
							dup/CIDFont resourcestatus
								{
								pop pop
								save
								1 index/CIDFont findresource
								dup/WidthsOnly known
									{dup/WidthsOnly get}
									{false}
								ifelse
								exch pop
								exch restore
									{pop}
									{exch pop true exit}
								ifelse
								}
								{pop}
							ifelse
							}
						forall
							{$str cvs $buildfontname}
							{
							false(*)
								{
								save exch
								dup/CIDFont findresource
								dup/WidthsOnly known
									{dup/WidthsOnly get not}
									{true}
								ifelse
								exch/CIDSystemInfo get
								dup/Registry get Registry eq
								exch/Ordering get Ordering eq and and
									{exch restore exch pop true exit}
									{pop restore}
								ifelse
								}
							$str/CIDFont resourceforall
								{$buildfontname}
								{$fontname $findfontByEnum}
							ifelse
							}
						ifelse
						}bind def
					end
				end
				currentdict/$error known currentdict/languagelevel known and dup
					{pop $error/SubstituteFont known}
				if
				dup
					{$error}
					{Adobe_CoolType_Core}
				ifelse
				begin
					{
					/SubstituteFont
					/CMap/Category resourcestatus
						{
						pop pop
						{
						$SubstituteFont
							begin
							/$substituteFound true def
							dup length $slen gt
							$sname null ne or
							$slen 0 gt and
								{
								$sname null eq
									{dup $str cvs dup length $slen sub $slen getinterval cvn}
									{$sname}
								ifelse
								Adobe_CoolType_Data/InVMFontsByCMap get
								1 index 2 copy known
									{
									get
									false exch
										{
										pop
										currentglobal
											{
											GlobalFontDirectory 1 index known
												{exch pop true exit}
												{pop}
											ifelse
											}
											{
											FontDirectory 1 index known
												{exch pop true exit}
												{
												GlobalFontDirectory 1 index known
													{exch pop true exit}
													{pop}
												ifelse
												}
											ifelse
											}
										ifelse
										}
									forall
									}
									{pop pop false}
								ifelse
									{
									exch pop exch pop
									}
									{
									dup/CMap resourcestatus
										{
										pop pop
										dup/$cmapname exch def
										/CMap findresource/CIDSystemInfo get{def}forall
										$findfontByROS
										}
										{
										128 string cvs
										dup(-)search
											{
											3 1 roll search
												{
												3 1 roll pop
													{dup cvi}
												stopped
													{pop pop pop pop pop $findfontByEnum}
													{
													4 2 roll pop pop
													exch length
													exch
													2 index length
													2 index
													sub
													exch 1 sub -1 0
														{
														$str cvs dup length
														4 index
														0
														4 index
														4 3 roll add
														getinterval
														exch 1 index exch 3 index exch
														putinterval
														dup/CMap resourcestatus
															{
															pop pop
															4 1 roll pop pop pop
															dup/$cmapname exch def
															/CMap findresource/CIDSystemInfo get{def}forall
															$findfontByROS
															true exit
															}
															{pop}
														ifelse
														}
													for
													dup type/booleantype eq
														{pop}
														{pop pop pop $findfontByEnum}
													ifelse
													}
												ifelse
												}
												{pop pop pop $findfontByEnum}
											ifelse
											}
											{pop pop $findfontByEnum}
										ifelse
										}
									ifelse
									}
								ifelse
								}
								{//SubstituteFont exec}
							ifelse
							/$slen 0 def
							end
						}
						}
						{
						{
						$SubstituteFont
							begin
							/$substituteFound true def
							dup length $slen gt
							$sname null ne or
							$slen 0 gt and
								{$findfontByEnum}
								{//SubstituteFont exec}
							ifelse
							end
						}
						}
					ifelse
					bind readonly def
					Adobe_CoolType_Core/scfindfont/systemfindfont load put
					}
					{
					/scfindfont
						{
						$SubstituteFont
							begin
							dup systemfindfont
							dup/FontName known
								{dup/FontName get dup 3 index ne}
								{/noname true}
							ifelse
							dup
								{
								/$origfontnamefound 2 index def
								/$origfontname 4 index def/$substituteFound true def
								}
							if
							exch pop
								{
								$slen 0 gt
								$sname null ne
								3 index length $slen gt or and
									{
									pop dup $findfontByEnum findfont
									dup maxlength 1 add dict
										begin
											{1 index/FID eq{pop pop}{def}ifelse}
										forall
										currentdict
										end
									definefont
									dup/FontName known{dup/FontName get}{null}ifelse
									$origfontnamefound ne
										{
										$origfontname $str cvs print
										( substitution revised, using )print
										dup/FontName known
											{dup/FontName get}{(unspecified font)}
										ifelse
										$str cvs print(.\n)print
										}
									if
									}
									{exch pop}
								ifelse
								}
								{exch pop}
							ifelse
							end
						}bind def
					}
				ifelse
				end
			end
		Adobe_CoolType_Core_Defined not
			{
			Adobe_CoolType_Core/findfont
				{
				$SubstituteFont
					begin
					$depth 0 eq
						{
						/$fontname 1 index dup type/stringtype ne{$str cvs}if def
						/$substituteFound false def
						}
					if
					/$depth $depth 1 add def
					end
				scfindfont
				$SubstituteFont
					begin
					/$depth $depth 1 sub def
					$substituteFound $depth 0 eq and
						{
						$inVMIndex null ne
							{dup $inVMIndex $AddInVMFont}
						if
						$doSmartSub
							{
							currentdict/$Strategy known
								{$Strategy/$BuildFont get exec}
							if
							}
						if
						}
					if
					end
				}bind put
			}
		if
		}
	if
	end
/$AddInVMFont
	{
	exch/FontName 2 copy known
		{
		get
		1 dict dup begin exch 1 index gcheck def end exch
		Adobe_CoolType_Data/InVMFontsByCMap get exch
		$DictAdd
		}
		{pop pop pop}
	ifelse
	}bind def
/$DictAdd
	{
	2 copy known not
		{2 copy 4 index length dict put}
	if
	Level2? not
		{
		2 copy get dup maxlength exch length 4 index length add lt
		2 copy get dup length 4 index length add exch maxlength 1 index lt
			{
			2 mul dict
				begin
				2 copy get{forall}def
				2 copy currentdict put
				end
			}
			{pop}
		ifelse
		}
	if
	get
		begin
			{def}
		forall
		end
	}bind def
end
end
%%EndResource
currentglobal true setglobal
%%BeginResource: procset Adobe_CoolType_Utility_MAKEOCF 1.23 0
%%Copyright: Copyright 1987-2006 Adobe Systems Incorporated.
%%Version: 1.23 0
systemdict/languagelevel known dup
	{currentglobal false setglobal}
	{false}
ifelse
exch
userdict/Adobe_CoolType_Utility 2 copy known
	{2 copy get dup maxlength 27 add dict copy}
	{27 dict}
ifelse put
Adobe_CoolType_Utility
	begin
	/@eexecStartData
		 <BAB431EA07F209EB8C4348311481D9D3F76E3D15246555577D87BC510ED54E
		 118C39697FA9F6DB58128E60EB8A12FA24D7CDD2FA94D221FA9EC8DA3E5E6A1C
		 4ACECC8C2D39C54E7C946031DD156C3A6B4A09AD29E1867A>def
	/@recognizeCIDFont null def
	/ct_Level2? exch def
	/ct_Clone? 1183615869 internaldict dup
			/CCRun known not
			exch/eCCRun known not
			ct_Level2? and or def
ct_Level2?
	{globaldict begin currentglobal true setglobal}
if
	/ct_AddStdCIDMap
		ct_Level2?
			{{
				mark
				Adobe_CoolType_Utility/@recognizeCIDFont currentdict put
					{
					((Hex)57 StartData
					 0615 1e27 2c39 1c60 d8a8 cc31 fe2b f6e0
					 7aa3 e541 e21c 60d8 a8c9 c3d0 6d9e 1c60
					 d8a8 c9c2 02d7 9a1c 60d8 a849 1c60 d8a8
					 cc36 74f4 1144 b13b 77)0()/SubFileDecode filter cvx exec
					}
				stopped
					{
					 cleartomark
					 Adobe_CoolType_Utility/@recognizeCIDFont get
					 countdictstack dup array dictstack
					 exch 1 sub -1 0
						 {
						 2 copy get 3 index eq
								{1 index length exch sub 1 sub{end}repeat exit}
								{pop}
						 ifelse
						 }
					 for
					 pop pop
					 Adobe_CoolType_Utility/@eexecStartData get eexec
					}
					{cleartomark}
				ifelse
			}}
			{{
				Adobe_CoolType_Utility/@eexecStartData get eexec
			}}
		ifelse bind def
userdict/cid_extensions known
dup{cid_extensions/cid_UpdateDB known and}if
	{
	 cid_extensions
	 begin
	/cid_GetCIDSystemInfo
		{
		 1 index type/stringtype eq
			{exch cvn exch}
		 if
		 cid_extensions
			 begin
			 dup load 2 index known
				{
				 2 copy
				 cid_GetStatusInfo
				 dup null ne
					{
					 1 index load
					 3 index get
					 dup null eq
						 {pop pop cid_UpdateDB}
						 {
						 exch
						 1 index/Created get eq
							 {exch pop exch pop}
							 {pop cid_UpdateDB}
						 ifelse
						 }
					 ifelse
					}
					{pop cid_UpdateDB}
				 ifelse
				}
				{cid_UpdateDB}
			 ifelse
			 end
		}bind def
	 end
	}
if
ct_Level2?
	{end setglobal}
if
	/ct_UseNativeCapability? systemdict/composefont known def
	/ct_MakeOCF 35 dict def
	/ct_Vars 25 dict def
	/ct_GlyphDirProcs 6 dict def
	/ct_BuildCharDict 15 dict dup
		begin
		/charcode 2 string def
		/dst_string 1500 string def
		/nullstring()def
		/usewidths? true def
		end def
	ct_Level2?{setglobal}{pop}ifelse
	ct_GlyphDirProcs
		begin
		/GetGlyphDirectory
			{
			systemdict/languagelevel known
				{pop/CIDFont findresource/GlyphDirectory get}
				{
				1 index/CIDFont findresource/GlyphDirectory
				get dup type/dicttype eq
					{
					dup dup maxlength exch length sub 2 index lt
						{
						dup length 2 index add dict copy 2 index
						/CIDFont findresource/GlyphDirectory 2 index put
						}
					if
					}
				if
				exch pop exch pop
				}
			ifelse
			+
			}def
		/+
			{
			systemdict/languagelevel known
				{
				currentglobal false setglobal
				3 dict begin
					/vm exch def
				}
				{1 dict begin}
			ifelse
			/$ exch def
			systemdict/languagelevel known
				{
				vm setglobal
				/gvm currentglobal def
				$ gcheck setglobal
				}
			if
			?{$ begin}if
			}def
		/?{$ type/dicttype eq}def
		/|{
			userdict/Adobe_CoolType_Data known
				{
			Adobe_CoolType_Data/AddWidths? known
				{
				 currentdict Adobe_CoolType_Data
					begin
					 begin
						AddWidths?
								{
								Adobe_CoolType_Data/CC 3 index put
								?{def}{$ 3 1 roll put}ifelse
								CC charcode exch 1 index 0 2 index 256 idiv put
								1 index exch 1 exch 256 mod put
								stringwidth 2 array astore
								currentfont/Widths get exch CC exch put
								}
								{?{def}{$ 3 1 roll put}ifelse}
							ifelse
					end
				end
				}
				{?{def}{$ 3 1 roll put}ifelse}	ifelse
				}
				{?{def}{$ 3 1 roll put}ifelse}
			ifelse
			}def
		/!
			{
			?{end}if
			systemdict/languagelevel known
				{gvm setglobal}
			if
			end
			}def
		/:{string currentfile exch readstring pop}executeonly def
		end
	ct_MakeOCF
		begin
		/ct_cHexEncoding
		[/c00/c01/c02/c03/c04/c05/c06/c07/c08/c09/c0A/c0B/c0C/c0D/c0E/c0F/c10/c11/c12
		/c13/c14/c15/c16/c17/c18/c19/c1A/c1B/c1C/c1D/c1E/c1F/c20/c21/c22/c23/c24/c25
		/c26/c27/c28/c29/c2A/c2B/c2C/c2D/c2E/c2F/c30/c31/c32/c33/c34/c35/c36/c37/c38
		/c39/c3A/c3B/c3C/c3D/c3E/c3F/c40/c41/c42/c43/c44/c45/c46/c47/c48/c49/c4A/c4B
		/c4C/c4D/c4E/c4F/c50/c51/c52/c53/c54/c55/c56/c57/c58/c59/c5A/c5B/c5C/c5D/c5E
		/c5F/c60/c61/c62/c63/c64/c65/c66/c67/c68/c69/c6A/c6B/c6C/c6D/c6E/c6F/c70/c71
		/c72/c73/c74/c75/c76/c77/c78/c79/c7A/c7B/c7C/c7D/c7E/c7F/c80/c81/c82/c83/c84
		/c85/c86/c87/c88/c89/c8A/c8B/c8C/c8D/c8E/c8F/c90/c91/c92/c93/c94/c95/c96/c97
		/c98/c99/c9A/c9B/c9C/c9D/c9E/c9F/cA0/cA1/cA2/cA3/cA4/cA5/cA6/cA7/cA8/cA9/cAA
		/cAB/cAC/cAD/cAE/cAF/cB0/cB1/cB2/cB3/cB4/cB5/cB6/cB7/cB8/cB9/cBA/cBB/cBC/cBD
		/cBE/cBF/cC0/cC1/cC2/cC3/cC4/cC5/cC6/cC7/cC8/cC9/cCA/cCB/cCC/cCD/cCE/cCF/cD0
		/cD1/cD2/cD3/cD4/cD5/cD6/cD7/cD8/cD9/cDA/cDB/cDC/cDD/cDE/cDF/cE0/cE1/cE2/cE3
		/cE4/cE5/cE6/cE7/cE8/cE9/cEA/cEB/cEC/cED/cEE/cEF/cF0/cF1/cF2/cF3/cF4/cF5/cF6
		/cF7/cF8/cF9/cFA/cFB/cFC/cFD/cFE/cFF]def
		/ct_CID_STR_SIZE 8000 def
		/ct_mkocfStr100 100 string def
		/ct_defaultFontMtx[.001 0 0 .001 0 0]def
		/ct_1000Mtx[1000 0 0 1000 0 0]def
		/ct_raise{exch cvx exch errordict exch get exec stop}bind def
		/ct_reraise
			{cvx $error/errorname get(Error: )print dup(						 )cvs print
					errordict exch get exec stop
			}bind def
		/ct_cvnsi
			{
			1 index add 1 sub 1 exch 0 4 1 roll
				{
				2 index exch get
				exch 8 bitshift
				add
				}
			for
			exch pop
			}bind def
		/ct_GetInterval
			{
			Adobe_CoolType_Utility/ct_BuildCharDict get
				begin
				/dst_index 0 def
				dup dst_string length gt
					{dup string/dst_string exch def}
				if
				1 index ct_CID_STR_SIZE idiv
				/arrayIndex exch def
				2 index arrayIndex get
				2 index
				arrayIndex ct_CID_STR_SIZE mul
				sub
					{
					dup 3 index add 2 index length le
						{
						2 index getinterval
						dst_string dst_index 2 index putinterval
						length dst_index add/dst_index exch def
						exit
						}
						{
						1 index length 1 index sub
						dup 4 1 roll
						getinterval
						dst_string dst_index 2 index putinterval
						pop dup dst_index add/dst_index exch def
						sub
						/arrayIndex arrayIndex 1 add def
						2 index dup length arrayIndex gt
							 {arrayIndex get}
							 {
							 pop
							 exit
							 }
						ifelse
						0
						}
					ifelse
					}
				loop
				pop pop pop
				dst_string 0 dst_index getinterval
				end
			}bind def
		ct_Level2?
			{
			/ct_resourcestatus
			currentglobal mark true setglobal
				{/unknowninstancename/Category resourcestatus}
			stopped
				{cleartomark setglobal true}
				{cleartomark currentglobal not exch setglobal}
			ifelse
				{
					{
					mark 3 1 roll/Category findresource
						begin
						ct_Vars/vm currentglobal put
						({ResourceStatus}stopped)0()/SubFileDecode filter cvx exec
							{cleartomark false}
							{{3 2 roll pop true}{cleartomark false}ifelse}
						ifelse
						ct_Vars/vm get setglobal
						end
					}
				}
				{{resourcestatus}}
			ifelse bind def
			/CIDFont/Category ct_resourcestatus
				{pop pop}
				{
				currentglobal true setglobal
				/Generic/Category findresource
				dup length dict copy
				dup/InstanceType/dicttype put
				/CIDFont exch/Category defineresource pop
				setglobal
				}
			ifelse
			ct_UseNativeCapability?
				{
				/CIDInit/ProcSet findresource begin
				12 dict begin
				begincmap
				/CIDSystemInfo 3 dict dup begin
				 /Registry(Adobe)def
				 /Ordering(Identity)def
				 /Supplement 0 def
				end def
				/CMapName/Identity-H def
				/CMapVersion 1.000 def
				/CMapType 1 def
				1 begincodespacerange
				<0000><FFFF>
				endcodespacerange
				1 begincidrange
				<0000><FFFF>0
				endcidrange
				endcmap
				CMapName currentdict/CMap defineresource pop
				end
				end
				}
			if
			}
			{
			/ct_Category 2 dict begin
			/CIDFont 10 dict def
			/ProcSet	2 dict def
			currentdict
			end
			def
			/defineresource
				{
				ct_Category 1 index 2 copy known
					{
					get
					dup dup maxlength exch length eq
						{
						dup length 10 add dict copy
						ct_Category 2 index 2 index put
						}
					if
					3 index 3 index put
					pop exch pop
					}
					{pop pop/defineresource/undefined ct_raise}
				ifelse
				}bind def
			/findresource
				{
				ct_Category 1 index 2 copy known
					{
					get
					2 index 2 copy known
						{get 3 1 roll pop pop}
						{pop pop/findresource/undefinedresource ct_raise}
					ifelse
					}
					{pop pop/findresource/undefined ct_raise}
				ifelse
				}bind def
			/resourcestatus
				{
				ct_Category 1 index 2 copy known
					{
					get
					2 index known
					exch pop exch pop
						{
						0 -1 true
						}
						{
						false
						}
					ifelse
					}
					{pop pop/findresource/undefined ct_raise}
				ifelse
				}bind def
			/ct_resourcestatus/resourcestatus load def
			}
		ifelse
		/ct_CIDInit 2 dict
			begin
			/ct_cidfont_stream_init
				{
					{
					dup(Binary)eq
						{
						pop
						null
						currentfile
						ct_Level2?
							{
								{cid_BYTE_COUNT()/SubFileDecode filter}
							stopped
								{pop pop pop}
							if
							}
						if
						/readstring load
						exit
						}
					if
					dup(Hex)eq
						{
						pop
						currentfile
						ct_Level2?
							{
								{null exch/ASCIIHexDecode filter/readstring}
							stopped
								{pop exch pop(>)exch/readhexstring}
							if
							}
							{(>)exch/readhexstring}
						ifelse
						load
						exit
						}
					if
					/StartData/typecheck ct_raise
					}
				loop
				cid_BYTE_COUNT ct_CID_STR_SIZE le
					{
					2 copy cid_BYTE_COUNT string exch exec
					pop
					1 array dup
					3 -1 roll
					0 exch put
					}
					{
					cid_BYTE_COUNT ct_CID_STR_SIZE div ceiling cvi
					dup array exch 2 sub 0 exch 1 exch
						{
						2 copy
						5 index
						ct_CID_STR_SIZE
						string
						6 index exec
						pop
						put
						pop
						}
					for
					2 index
					cid_BYTE_COUNT ct_CID_STR_SIZE mod string
					3 index exec
					pop
					1 index exch
					1 index length 1 sub
					exch put
					}
				ifelse
				cid_CIDFONT exch/GlyphData exch put
				2 index null eq
					{
					pop pop pop
					}
					{
					pop/readstring load
					1 string exch
						{
						3 copy exec
						pop
						dup length 0 eq
							{
							pop pop pop pop pop
							true exit
							}
						if
						4 index
						eq
							{
							pop pop pop pop
							false exit
							}
						if
						}
					loop
					pop
					}
				ifelse
				}bind def
			/StartData
				{
				mark
					{
					currentdict
					dup/FDArray get 0 get/FontMatrix get
					0 get 0.001 eq
						{
						dup/CDevProc known not
							{
							/CDevProc 1183615869 internaldict/stdCDevProc 2 copy known
								{get}
								{
								pop pop
								{pop pop pop pop pop 0 -1000 7 index 2 div 880}
								}
							ifelse
							def
							}
						if
						}
						{
						/CDevProc
							{
							 pop pop pop pop pop
							 0
							 1 cid_temp/cid_CIDFONT get
							/FDArray get 0 get
							/FontMatrix get 0 get div
							 7 index 2 div
							 1 index 0.88 mul
							}def
						}
					ifelse
					/cid_temp 15 dict def
					cid_temp
						begin
						/cid_CIDFONT exch def
						3 copy pop
						dup/cid_BYTE_COUNT exch def 0 gt
							{
							ct_cidfont_stream_init
							FDArray
								{
								/Private get
								dup/SubrMapOffset known
									{
									begin
									/Subrs SubrCount array def
									Subrs
									SubrMapOffset
									SubrCount
									SDBytes
									ct_Level2?
										{
										currentdict dup/SubrMapOffset undef
										dup/SubrCount undef
										/SDBytes undef
										}
									if
									end
									/cid_SD_BYTES exch def
									/cid_SUBR_COUNT exch def
									/cid_SUBR_MAP_OFFSET exch def
									/cid_SUBRS exch def
									cid_SUBR_COUNT 0 gt
										{
										GlyphData cid_SUBR_MAP_OFFSET cid_SD_BYTES ct_GetInterval
										0 cid_SD_BYTES ct_cvnsi
										0 1 cid_SUBR_COUNT 1 sub
											{
											exch 1 index
											1 add
											cid_SD_BYTES mul cid_SUBR_MAP_OFFSET add
											GlyphData exch cid_SD_BYTES ct_GetInterval
											0 cid_SD_BYTES ct_cvnsi
											cid_SUBRS 4 2 roll
											GlyphData exch
											4 index
											1 index
											sub
											ct_GetInterval
											dup length string copy put
											}
										for
										pop
										}
									if
									}
									{pop}
								ifelse
								}
							forall
							}
						if
						cleartomark pop pop
						end
					CIDFontName currentdict/CIDFont defineresource pop
					end end
					}
				stopped
					{cleartomark/StartData ct_reraise}
				if
				}bind def
			currentdict
			end def
		/ct_saveCIDInit
			{
			/CIDInit/ProcSet ct_resourcestatus
				{true}
				{/CIDInitC/ProcSet ct_resourcestatus}
			ifelse
				{
				pop pop
				/CIDInit/ProcSet findresource
				ct_UseNativeCapability?
					{pop null}
					{/CIDInit ct_CIDInit/ProcSet defineresource pop}
				ifelse
				}
				{/CIDInit ct_CIDInit/ProcSet defineresource pop null}
			ifelse
			ct_Vars exch/ct_oldCIDInit exch put
			}bind def
		/ct_restoreCIDInit
			{
			ct_Vars/ct_oldCIDInit get dup null ne
				{/CIDInit exch/ProcSet defineresource pop}
				{pop}
			ifelse
			}bind def
		/ct_BuildCharSetUp
			{
			1 index
				begin
				CIDFont
					begin
					Adobe_CoolType_Utility/ct_BuildCharDict get
						begin
						/ct_dfCharCode exch def
						/ct_dfDict exch def
						CIDFirstByte ct_dfCharCode add
						dup CIDCount ge
							{pop 0}
						if
						/cid exch def
							{
							GlyphDirectory cid 2 copy known
								{get}
								{pop pop nullstring}
							ifelse
							dup length FDBytes sub 0 gt
								{
								dup
								FDBytes 0 ne
									{0 FDBytes ct_cvnsi}
									{pop 0}
								ifelse
								/fdIndex exch def
								dup length FDBytes sub FDBytes exch getinterval
								/charstring exch def
								exit
								}
								{
								pop
								cid 0 eq
									{/charstring nullstring def exit}
								if
								/cid 0 def
								}
							ifelse
							}
						loop
			}def
		/ct_SetCacheDevice
			{
			0 0 moveto
			dup stringwidth
			3 -1 roll
			true charpath
			pathbbox
			0 -1000
			7 index 2 div 880
			setcachedevice2
			0 0 moveto
			}def
		/ct_CloneSetCacheProc
			{
			1 eq
				{
				stringwidth
				pop -2 div -880
				0 -1000 setcharwidth
				moveto
				}
				{
				usewidths?
					{
					currentfont/Widths get cid
					2 copy known
						{get exch pop aload pop}
						{pop pop stringwidth}
					ifelse
					}
					{stringwidth}
				ifelse
				setcharwidth
				0 0 moveto
				}
			ifelse
			}def
		/ct_Type3ShowCharString
			{
			ct_FDDict fdIndex 2 copy known
				{get}
				{
				currentglobal 3 1 roll
				1 index gcheck setglobal
				ct_Type1FontTemplate dup maxlength dict copy
					begin
					FDArray fdIndex get
					dup/FontMatrix 2 copy known
						{get}
						{pop pop ct_defaultFontMtx}
					ifelse
					/FontMatrix exch dup length array copy def
					/Private get
					/Private exch def
					/Widths rootfont/Widths get def
					/CharStrings 1 dict dup/.notdef
						<d841272cf18f54fc13>dup length string copy put def
					currentdict
					end
				/ct_Type1Font exch definefont
				dup 5 1 roll put
				setglobal
				}
			ifelse
			dup/CharStrings get 1 index/Encoding get
			ct_dfCharCode get charstring put
			rootfont/WMode 2 copy known
				{get}
				{pop pop 0}
			ifelse
			exch
			1000 scalefont setfont
			ct_str1 0 ct_dfCharCode put
			ct_str1 exch ct_dfSetCacheProc
			ct_SyntheticBold
				{
				currentpoint
				ct_str1 show
				newpath
				moveto
				ct_str1 true charpath
				ct_StrokeWidth setlinewidth
				stroke
				}
				{ct_str1 show}
			ifelse
			}def
		/ct_Type4ShowCharString
			{
			ct_dfDict ct_dfCharCode charstring
			FDArray fdIndex get
			dup/FontMatrix get dup ct_defaultFontMtx ct_matrixeq not
				{ct_1000Mtx matrix concatmatrix concat}
				{pop}
			ifelse
			/Private get
			Adobe_CoolType_Utility/ct_Level2? get not
				{
				ct_dfDict/Private
				3 -1 roll
					{put}
				1183615869 internaldict/superexec get exec
				}
			if
			1183615869 internaldict
			Adobe_CoolType_Utility/ct_Level2? get
				{1 index}
				{3 index/Private get mark 6 1 roll}
			ifelse
			dup/RunInt known
				{/RunInt get}
				{pop/CCRun}
			ifelse
			get exec
			Adobe_CoolType_Utility/ct_Level2? get not
				{cleartomark}
			if
			}bind def
		/ct_BuildCharIncremental
			{
				{
				Adobe_CoolType_Utility/ct_MakeOCF get begin
				ct_BuildCharSetUp
				ct_ShowCharString
				}
			stopped
				{stop}
			if
			end
			end
			end
			end
			}bind def
		/BaseFontNameStr(BF00)def
		/ct_Type1FontTemplate 14 dict
			begin
			/FontType 1 def
			/FontMatrix [0.001 0 0 0.001 0 0]def
			/FontBBox [-250 -250 1250 1250]def
			/Encoding ct_cHexEncoding def
			/PaintType 0 def
			currentdict
			end def
		/BaseFontTemplate 11 dict
			begin
			/FontMatrix [0.001 0 0 0.001 0 0]def
			/FontBBox [-250 -250 1250 1250]def
			/Encoding ct_cHexEncoding def
			/BuildChar/ct_BuildCharIncremental load def
			ct_Clone?
				{
				/FontType 3 def
				/ct_ShowCharString/ct_Type3ShowCharString load def
				/ct_dfSetCacheProc/ct_CloneSetCacheProc load def
				/ct_SyntheticBold false def
				/ct_StrokeWidth 1 def
				}
				{
				/FontType 4 def
				/Private 1 dict dup/lenIV 4 put def
				/CharStrings 1 dict dup/.notdef<d841272cf18f54fc13>put def
				/PaintType 0 def
				/ct_ShowCharString/ct_Type4ShowCharString load def
				}
			ifelse
			/ct_str1 1 string def
			currentdict
			end def
		/BaseFontDictSize BaseFontTemplate length 5 add def
		/ct_matrixeq
			{
			true 0 1 5
				{
				dup 4 index exch get exch 3 index exch get eq and
				dup not
					{exit}
				if
				}
			for
			exch pop exch pop
			}bind def
		/ct_makeocf
			{
			15 dict
				begin
				exch/WMode exch def
				exch/FontName exch def
				/FontType 0 def
				/FMapType 2 def
			dup/FontMatrix known
				{dup/FontMatrix get/FontMatrix exch def}
				{/FontMatrix matrix def}
			ifelse
				/bfCount 1 index/CIDCount get 256 idiv 1 add
					dup 256 gt{pop 256}if def
				/Encoding
					256 array 0 1 bfCount 1 sub{2 copy dup put pop}for
					bfCount 1 255{2 copy bfCount put pop}for
					def
				/FDepVector bfCount dup 256 lt{1 add}if array def
				BaseFontTemplate BaseFontDictSize dict copy
					begin
					/CIDFont exch def
					CIDFont/FontBBox known
						{CIDFont/FontBBox get/FontBBox exch def}
					if
					CIDFont/CDevProc known
						{CIDFont/CDevProc get/CDevProc exch def}
					if
					currentdict
					end
				BaseFontNameStr 3(0)putinterval
				0 1 bfCount dup 256 eq{1 sub}if
					{
					FDepVector exch
					2 index BaseFontDictSize dict copy
						begin
						dup/CIDFirstByte exch 256 mul def
						FontType 3 eq
							{/ct_FDDict 2 dict def}
						if
						currentdict
						end
					1 index 16
					BaseFontNameStr 2 2 getinterval cvrs pop
					BaseFontNameStr exch definefont
					put
					}
				for
				ct_Clone?
					{/Widths 1 index/CIDFont get/GlyphDirectory get length dict def}
				if
				FontName
				currentdict
				end
			definefont
			ct_Clone?
				{
				gsave
				dup 1000 scalefont setfont
				ct_BuildCharDict
					begin
					/usewidths? false def
					currentfont/Widths get
						begin
						exch/CIDFont get/GlyphDirectory get
							{
							pop
							dup charcode exch 1 index 0 2 index 256 idiv put
							1 index exch 1 exch 256 mod put
							stringwidth 2 array astore def
							}
						forall
						end
					/usewidths? true def
					end
				grestore
				}
				{exch pop}
			ifelse
			}bind def
		currentglobal true setglobal
		/ct_ComposeFont
			{
			ct_UseNativeCapability?
				{				
				2 index/CMap ct_resourcestatus
					{pop pop exch pop}
					{
					/CIDInit/ProcSet findresource
						begin
						12 dict
							begin
							begincmap
							/CMapName 3 index def
							/CMapVersion 1.000 def
							/CMapType 1 def
							exch/WMode exch def
							/CIDSystemInfo 3 dict dup
								begin
								/Registry(Adobe)def
								/Ordering
								CMapName ct_mkocfStr100 cvs
								(Adobe-)search
									{
									pop pop
									(-)search
										{
										dup length string copy
										exch pop exch pop
										}
										{pop(Identity)}
									ifelse
									}
									{pop (Identity)}
								ifelse
								def
								/Supplement 0 def
								end def
							1 begincodespacerange
							<0000><FFFF>
							endcodespacerange
							1 begincidrange
							<0000><FFFF>0
							endcidrange
							endcmap
							CMapName currentdict/CMap defineresource pop
							end
						end
					}
				ifelse
				composefont
				}
				{
				3 2 roll pop
				0 get/CIDFont findresource
				ct_makeocf
				}
			ifelse
			}bind def
			setglobal
		/ct_MakeIdentity
			{
			ct_UseNativeCapability?
				{
				1 index/CMap ct_resourcestatus
					{pop pop}
					{
					/CIDInit/ProcSet findresource begin
					12 dict begin
					begincmap
					/CMapName 2 index def
					/CMapVersion 1.000 def
					/CMapType 1 def
					/CIDSystemInfo 3 dict dup
						begin
						/Registry(Adobe)def
						/Ordering
						CMapName ct_mkocfStr100 cvs
						(Adobe-)search
							{
							pop pop
							(-)search
								{dup length string copy exch pop exch pop}
								{pop(Identity)}
							ifelse
							}
							{pop(Identity)}
						ifelse
						def
						/Supplement 0 def
						end def
					1 begincodespacerange
					<0000><FFFF>
					endcodespacerange
					1 begincidrange
					<0000><FFFF>0
					endcidrange
					endcmap
					CMapName currentdict/CMap defineresource pop
					end
					end
					}
				ifelse
				composefont
				}
				{
				exch pop
				0 get/CIDFont findresource
				ct_makeocf
				}
			ifelse
			}bind def
		currentdict readonly pop
		end
	end
%%EndResource
setglobal
%%BeginResource: procset Adobe_CoolType_Utility_T42 1.0 0
%%Copyright: Copyright 1987-2004 Adobe Systems Incorporated.
%%Version: 1.0 0
userdict/ct_T42Dict 15 dict put
ct_T42Dict begin
/Is2015?
{
 version
 cvi
 2015
 ge
}bind def
/AllocGlyphStorage
{
 Is2015?
 {	
	pop
 }
 {
	{string}forall
 }ifelse
}bind def
/Type42DictBegin
{
25 dict begin
 /FontName exch def
 /CharStrings 256 dict 
begin
	 /.notdef 0 def
	 currentdict 
end def
 /Encoding exch def
 /PaintType 0 def
 /FontType 42 def
 /FontMatrix[1 0 0 1 0 0]def
 4 array astore cvx/FontBBox exch def
 /sfnts
}bind def
/Type42DictEnd 
{
 currentdict dup/FontName get exch definefont end
ct_T42Dict exch
dup/FontName get exch put
}bind def
/RD{string currentfile exch readstring pop}executeonly def
/PrepFor2015
{
Is2015?
{		 
	/GlyphDirectory 
	 16
	 dict def
	 sfnts 0 get
	 dup
	 2 index
	(glyx)
	 putinterval
	 2 index 
	(locx)
	 putinterval
	 pop
	 pop
}
{
	 pop
	 pop
}ifelse			
}bind def
/AddT42Char
{
Is2015?
{
	/GlyphDirectory get 
	begin
	def
	end
	pop
	pop
}
{
	/sfnts get
	4 index
	get
	3 index
 2 index
	putinterval
	pop
	pop
	pop
	pop
}ifelse
}bind def
/T0AddT42Mtx2
{
/CIDFont findresource/Metrics2 get begin def end
}bind def
end
%%EndResource
currentglobal true setglobal
%%BeginFile: MMFauxFont.prc
%%Copyright: Copyright 1987-2001 Adobe Systems Incorporated. 
%%All Rights Reserved.
userdict /ct_EuroDict 10 dict put
ct_EuroDict begin
/ct_CopyFont 
{
    { 1 index /FID ne {def} {pop pop} ifelse} forall
} def
/ct_GetGlyphOutline
{
   gsave
   initmatrix newpath
   exch findfont dup 
   length 1 add dict 
   begin 
		ct_CopyFont 
		/Encoding Encoding dup length array copy 
		dup
		4 -1 roll
		0 exch put   
		def
		currentdict
   end
   /ct_EuroFont exch definefont
   1000 scalefont setfont
   0 0 moveto
   [
       <00> stringwidth 
       <00> false charpath
       pathbbox
       [
       {/m cvx} {/l cvx} {/c cvx} {/cp cvx} pathforall
   grestore
   counttomark 8 add
}
def
/ct_MakeGlyphProc
{
   ] cvx
   /ct_PSBuildGlyph cvx
   ] cvx
} def
/ct_PSBuildGlyph 
{ 
 	gsave 
	8 -1 roll pop 
	7 1 roll 
        6 -2 roll ct_FontMatrix transform 6 2 roll
        4 -2 roll ct_FontMatrix transform 4 2 roll
        ct_FontMatrix transform 
	currentdict /PaintType 2 copy known {get 2 eq}{pop pop false} ifelse  
	dup  9 1 roll 
	{  
		currentdict /StrokeWidth 2 copy known  
		{   
			get 2 div   
			0 ct_FontMatrix dtransform pop
			5 1 roll  
			4 -1 roll 4 index sub   
			4 1 roll   
			3 -1 roll 4 index sub  
			3 1 roll   
			exch 4 index add exch  
			4 index add  
			5 -1 roll pop  
		}  
		{	 
			pop pop 
		}  
		ifelse  
	}       
    if  
	setcachedevice  
        ct_FontMatrix concat
        ct_PSPathOps begin 
		exec 
	end 
	{  
		currentdict /StrokeWidth 2 copy known  
			{ get }  
			{ pop pop 0 }  
  	    ifelse  
		setlinewidth stroke  
	}  
	{   
	    fill  
	}  
	ifelse  
    grestore
} def 
/ct_PSPathOps 4 dict dup begin 
	/m {moveto} def 
	/l {lineto} def 
	/c {curveto} def 
	/cp {closepath} def 
end 
def 
/ct_matrix1000 [1000 0 0 1000 0 0] def
/ct_AddGlyphProc  
{
   2 index findfont dup length 4 add dict 
   begin 
	ct_CopyFont 
	/CharStrings CharStrings dup length 1 add dict copy
      begin
         3 1 roll def  
         currentdict 
      end 
      def
      /ct_FontMatrix ct_matrix1000 FontMatrix matrix concatmatrix def
      /ct_PSBuildGlyph /ct_PSBuildGlyph load def
      /ct_PSPathOps /ct_PSPathOps load def
      currentdict
   end
   definefont pop
}
def
systemdict /languagelevel known
{
	/ct_AddGlyphToPrinterFont {
		2 copy
		ct_GetGlyphOutline 3 add -1 roll restore 
		ct_MakeGlyphProc 
		ct_AddGlyphProc
	} def
}
{
	/ct_AddGlyphToPrinterFont {
	    pop pop restore
		Adobe_CTFauxDict /$$$FONTNAME get
		/Euro
		Adobe_CTFauxDict /$$$SUBSTITUTEBASE get
		ct_EuroDict exch get
		ct_AddGlyphProc
	} def
} ifelse
/AdobeSansMM 
{ 
556 0 24 -19 541 703 
	{ 
	541 628 m 
	510 669 442 703 354 703 c 
	201 703 117 607 101 444 c 
	50 444 l 
	25 372 l 
	97 372 l 
	97 301 l 
	49 301 l 
	24 229 l 
	103 229 l 
	124 67 209 -19 350 -19 c 
	435 -19 501 25 509 32 c 
	509 131 l 
	492 105 417 60 343 60 c 
	267 60 204 127 197 229 c 
	406 229 l 
	430 301 l 
	191 301 l 
	191 372 l 
	455 372 l 
	479 444 l 
	194 444 l 
	201 531 245 624 348 624 c 
	433 624 484 583 509 534 c 
	cp 
	556 0 m 
	}
ct_PSBuildGlyph
} def
/AdobeSerifMM 
{ 
500 0 10 -12 484 692 
	{ 
	347 298 m 
	171 298 l 
	170 310 170 322 170 335 c 
	170 362 l 
	362 362 l 
	374 403 l 
	172 403 l 
	184 580 244 642 308 642 c 
	380 642 434 574 457 457 c 
	481 462 l 
	474 691 l 
	449 691 l 
	433 670 429 657 410 657 c 
	394 657 360 692 299 692 c 
	204 692 94 604 73 403 c 
	22 403 l 
	10 362 l 
	70 362 l 
	69 352 69 341 69 330 c 
	69 319 69 308 70 298 c 
	22 298 l 
	10 257 l 
	73 257 l 
	97 57 216 -12 295 -12 c 
	364 -12 427 25 484 123 c 
	458 142 l 
	425 101 384 37 316 37 c 
	256 37 189 84 173 257 c 
	335 257 l 
	cp 
	500 0 m 
	} 
ct_PSBuildGlyph 
} def 
end		
%%EndFile
setglobal
Adobe_CoolType_Core begin /$Oblique SetSubstituteStrategy end
%%BeginResource: procset Adobe_AGM_Image 1.0 0
+%%Version: 1.0 0
+%%Copyright: Copyright(C)2000-2006 Adobe Systems, Inc. All Rights Reserved.
+systemdict/setpacking known
+{
+	currentpacking
+	true setpacking
+}if
+userdict/Adobe_AGM_Image 71 dict dup begin put
+/Adobe_AGM_Image_Id/Adobe_AGM_Image_1.0_0 def
+/nd{
+	null def
+}bind def
+/AGMIMG_&image nd
+/AGMIMG_&colorimage nd
+/AGMIMG_&imagemask nd
+/AGMIMG_mbuf()def
+/AGMIMG_ybuf()def
+/AGMIMG_kbuf()def
+/AGMIMG_c 0 def
+/AGMIMG_m 0 def
+/AGMIMG_y 0 def
+/AGMIMG_k 0 def
+/AGMIMG_tmp nd
+/AGMIMG_imagestring0 nd
+/AGMIMG_imagestring1 nd
+/AGMIMG_imagestring2 nd
+/AGMIMG_imagestring3 nd
+/AGMIMG_imagestring4 nd
+/AGMIMG_imagestring5 nd
+/AGMIMG_cnt nd
+/AGMIMG_fsave nd
+/AGMIMG_colorAry nd
+/AGMIMG_override nd
+/AGMIMG_name nd
+/AGMIMG_maskSource nd
+/AGMIMG_flushfilters nd
+/invert_image_samples nd
+/knockout_image_samples	nd
+/img nd
+/sepimg nd
+/devnimg nd
+/idximg nd
+/ds
+{
+	Adobe_AGM_Core begin
+	Adobe_AGM_Image begin
+	/AGMIMG_&image systemdict/image get def
+	/AGMIMG_&imagemask systemdict/imagemask get def
+	/colorimage where{
+		pop
+		/AGMIMG_&colorimage/colorimage ldf
+	}if
+	end
+	end
+}def
+/ps
+{
+	Adobe_AGM_Image begin
+	/AGMIMG_ccimage_exists{/customcolorimage where 
+		{
+			pop
+			/Adobe_AGM_OnHost_Seps where
+			{
+			pop false
+			}{
+			/Adobe_AGM_InRip_Seps where
+				{
+				pop false
+				}{
+					true
+				}ifelse
+			}ifelse
+			}{
+			false
+		}ifelse 
+	}bdf
+	level2{
+		/invert_image_samples
+		{
+			Adobe_AGM_Image/AGMIMG_tmp Decode length ddf
+			/Decode[Decode 1 get Decode 0 get]def
+		}def
+		/knockout_image_samples
+		{
+			Operator/imagemask ne{
+				/Decode[1 1]def
+			}if
+		}def
+	}{	
+		/invert_image_samples
+		{
+			{1 exch sub}currenttransfer addprocs settransfer
+		}def
+		/knockout_image_samples
+		{
+			{pop 1}currenttransfer addprocs settransfer
+		}def
+	}ifelse
+	/img/imageormask ldf
+	/sepimg/sep_imageormask ldf
+	/devnimg/devn_imageormask ldf
+	/idximg/indexed_imageormask ldf
+	/_ctype 7 def
+	currentdict{
+		dup xcheck 1 index type dup/arraytype eq exch/packedarraytype eq or and{
+			bind
+		}if
+		def
+	}forall
+}def
+/pt
+{
+	end
+}def
+/dt
+{
+}def
+/AGMIMG_flushfilters
+{
+	dup type/arraytype ne
+		{1 array astore}if
+	dup 0 get currentfile ne
+		{dup 0 get flushfile}if
+		{
+		dup type/filetype eq
+			{
+			dup status 1 index currentfile ne and
+				{closefile}
+				{pop}
+			ifelse
+			}{pop}ifelse
+		}forall
+}def
+/AGMIMG_init_common
+{
+	currentdict/T known{/ImageType/T ldf currentdict/T undef}if
+	currentdict/W known{/Width/W ldf currentdict/W undef}if
+	currentdict/H known{/Height/H ldf currentdict/H undef}if
+	currentdict/M known{/ImageMatrix/M ldf currentdict/M undef}if
+	currentdict/BC known{/BitsPerComponent/BC ldf currentdict/BC undef}if
+	currentdict/D known{/Decode/D ldf currentdict/D undef}if
+	currentdict/DS known{/DataSource/DS ldf currentdict/DS undef}if
+	currentdict/O known{
+		/Operator/O load 1 eq{
+			/imagemask
+		}{
+			/O load 2 eq{
+				/image 
+			}{
+				/colorimage
+			}ifelse
+		}ifelse
+		def
+		currentdict/O undef
+	}if
+	currentdict/HSCI known{/HostSepColorImage/HSCI ldf currentdict/HSCI undef}if
+	currentdict/MD known{/MultipleDataSources/MD ldf currentdict/MD undef}if
+	currentdict/I known{/Interpolate/I ldf currentdict/I undef}if
+	currentdict/SI known{/SkipImageProc/SI ldf currentdict/SI undef}if
+	/DataSource load xcheck not{
+		DataSource type/arraytype eq{
+			DataSource 0 get type/filetype eq{
+				/_Filters DataSource def
+				currentdict/MultipleDataSources known not{
+					/DataSource DataSource dup length 1 sub get def 
+				}if
+			}if
+		}if
+		currentdict/MultipleDataSources known not{
+			/MultipleDataSources DataSource type/arraytype eq{
+				DataSource length 1 gt
+			}
+			{false}ifelse def
+		}if
+	}if
+	/NComponents Decode length 2 div def
+	currentdict/SkipImageProc known not{/SkipImageProc{false}def}if
+}bdf
+/imageormask_sys
+{
+	begin
+		AGMIMG_init_common
+		save mark
+		level2{
+			currentdict
+			Operator/imagemask eq{
+				AGMIMG_&imagemask
+			}{
+				use_mask{
+					process_mask AGMIMG_&image
+				}{
+					AGMIMG_&image
+				}ifelse
+			}ifelse
+		}{
+			Width Height
+			Operator/imagemask eq{
+				Decode 0 get 1 eq Decode 1 get 0 eq	and
+				ImageMatrix/DataSource load
+				AGMIMG_&imagemask
+			}{
+				BitsPerComponent ImageMatrix/DataSource load
+				AGMIMG_&image
+			}ifelse
+		}ifelse
+		currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+		cleartomark restore
+	end
+}def
+/overprint_plate
+{
+	currentoverprint{
+		0 get dup type/nametype eq{
+			dup/DeviceGray eq{
+				pop AGMCORE_black_plate not
+			}{
+				/DeviceCMYK eq{
+					AGMCORE_is_cmyk_sep not
+				}if
+			}ifelse
+		}{
+			false exch
+			{
+				 AGMOHS_sepink eq or
+			}forall
+			not
+		}ifelse
+	}{
+		pop false
+	}ifelse
+}def
+/process_mask
+{
+	level3{
+		dup begin
+		/ImageType 1 def
+		end
+		4 dict begin
+			/DataDict exch def
+			/ImageType 3 def
+			/InterleaveType 3 def
+			/MaskDict 9 dict begin
+				/ImageType 1 def
+				/Width DataDict dup/MaskWidth known{/MaskWidth}{/Width}ifelse get def
+				/Height DataDict dup/MaskHeight known{/MaskHeight}{/Height}ifelse get def
+				/ImageMatrix[Width 0 0 Height neg 0 Height]def
+				/NComponents 1 def
+				/BitsPerComponent 1 def
+				/Decode DataDict dup/MaskD known{/MaskD}{[1 0]}ifelse get def
+				/DataSource Adobe_AGM_Core/AGMIMG_maskSource get def
+			currentdict end def
+		currentdict end
+	}if
+}def
+/use_mask
+{
+	dup/Mask known	{dup/Mask get}{false}ifelse
+}def
+/imageormask
+{
+	begin
+		AGMIMG_init_common
+		SkipImageProc{
+			currentdict consumeimagedata
+		}
+		{
+			save mark
+			level2 AGMCORE_host_sep not and{
+				currentdict
+				Operator/imagemask eq DeviceN_PS2 not and{
+					imagemask
+				}{
+					AGMCORE_in_rip_sep currentoverprint and currentcolorspace 0 get/DeviceGray eq and{
+						[/Separation/Black/DeviceGray{}]setcolorspace
+						/Decode[Decode 1 get Decode 0 get]def
+					}if
+					use_mask{
+						process_mask image
+					}{
+						DeviceN_NoneName DeviceN_PS2 Indexed_DeviceN level3 not and or or AGMCORE_in_rip_sep and 
+						{
+							Names convert_to_process not{
+								2 dict begin
+								/imageDict xdf
+								/names_index 0 def
+								gsave
+								imageDict write_image_file{
+									Names{
+										dup(None)ne{
+											[/Separation 3 -1 roll/DeviceGray{1 exch sub}]setcolorspace
+											Operator imageDict read_image_file
+											names_index 0 eq{true setoverprint}if
+											/names_index names_index 1 add def
+										}{
+											pop
+										}ifelse
+									}forall
+									close_image_file
+								}if
+								grestore
+								end
+							}{
+								Operator/imagemask eq{
+									imagemask
+								}{
+									image
+								}ifelse
+							}ifelse
+						}{
+							Operator/imagemask eq{
+								imagemask
+							}{
+								image
+							}ifelse
+						}ifelse
+					}ifelse
+				}ifelse
+			}{
+				Width Height
+				Operator/imagemask eq{
+					Decode 0 get 1 eq Decode 1 get 0 eq	and
+					ImageMatrix/DataSource load
+					/Adobe_AGM_OnHost_Seps where{
+						pop imagemask
+					}{
+						currentgray 1 ne{
+							currentdict imageormask_sys
+						}{
+							currentoverprint not{
+								1 AGMCORE_&setgray
+								currentdict imageormask_sys
+							}{
+								currentdict ignoreimagedata
+							}ifelse				 		
+						}ifelse
+					}ifelse
+				}{
+					BitsPerComponent ImageMatrix 
+					MultipleDataSources{
+						0 1 NComponents 1 sub{
+							DataSource exch get
+						}for
+					}{
+						/DataSource load
+					}ifelse
+					Operator/colorimage eq{
+						AGMCORE_host_sep{
+							MultipleDataSources level2 or NComponents 4 eq and{
+								AGMCORE_is_cmyk_sep{
+									MultipleDataSources{
+										/DataSource DataSource 0 get xcheck
+											{
+											[
+											DataSource 0 get/exec cvx
+											DataSource 1 get/exec cvx
+											DataSource 2 get/exec cvx
+											DataSource 3 get/exec cvx
+											/AGMCORE_get_ink_data cvx
+											]cvx
+											}{
+											DataSource aload pop AGMCORE_get_ink_data
+											}ifelse def
+									}{
+										/DataSource 
+										Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul 
+										/DataSource load
+										filter_cmyk 0()/SubFileDecode filter def
+									}ifelse
+									/Decode[Decode 0 get Decode 1 get]def
+									/MultipleDataSources false def
+									/NComponents 1 def
+									/Operator/image def
+									invert_image_samples
+						 			1 AGMCORE_&setgray
+									currentdict imageormask_sys
+								}{
+									currentoverprint not Operator/imagemask eq and{
+ 			 							1 AGMCORE_&setgray
+ 			 							currentdict imageormask_sys
+ 			 						}{
+ 			 							currentdict ignoreimagedata
+ 			 						}ifelse
+								}ifelse
+							}{	
+								MultipleDataSources NComponents AGMIMG_&colorimage						
+							}ifelse
+						}{
+							true NComponents colorimage
+						}ifelse
+					}{
+						Operator/image eq{
+							AGMCORE_host_sep{
+								/DoImage true def
+								currentdict/HostSepColorImage known{HostSepColorImage not}{false}ifelse
+								{
+									AGMCORE_black_plate not Operator/imagemask ne and{
+										/DoImage false def
+										currentdict ignoreimagedata
+					 				}if
+								}if
+						 		1 AGMCORE_&setgray
+								DoImage
+									{currentdict imageormask_sys}if
+							}{
+								use_mask{
+									process_mask image
+								}{
+									image
+								}ifelse
+							}ifelse
+						}{
+							Operator/knockout eq{
+								pop pop pop pop pop
+								currentcolorspace overprint_plate not{
+									knockout_unitsq
+								}if
+							}if
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+			cleartomark restore
+		}ifelse
+		currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+}def
+/sep_imageormask
+{
+ 	/sep_colorspace_dict AGMCORE_gget begin
+	CSA map_csa
+	begin
+	AGMIMG_init_common
+	SkipImageProc{
+		currentdict consumeimagedata
+	}{
+		save mark 
+		AGMCORE_avoid_L2_sep_space{
+			/Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+		}if
+ 		AGMIMG_ccimage_exists 
+		MappedCSA 0 get/DeviceCMYK eq and
+		currentdict/Components known and 
+		Name()ne and 
+		Name(All)ne and 
+		Operator/image eq and
+		AGMCORE_producing_seps not and
+		level2 not and
+		{
+			Width Height BitsPerComponent ImageMatrix 
+			[
+			/DataSource load/exec cvx
+			{
+				0 1 2 index length 1 sub{
+					1 index exch
+					2 copy get 255 xor put
+				}for
+			}/exec cvx
+			]cvx bind
+			MappedCSA 0 get/DeviceCMYK eq{
+				Components aload pop
+			}{
+				0 0 0 Components aload pop 1 exch sub
+			}ifelse
+			Name findcmykcustomcolor
+			customcolorimage
+		}{
+			AGMCORE_producing_seps not{
+				level2{
+ 					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne AGMCORE_avoid_L2_sep_space not and currentcolorspace 0 get/Separation ne and{
+						[/Separation Name MappedCSA sep_proc_name exch dup 0 get 15 string cvs(/Device)anchorsearch{pop pop 0 get}{pop}ifelse exch load]setcolorspace_opt
+						/sep_tint AGMCORE_gget setcolor
+					}if
+					currentdict imageormask
+				}{
+					currentdict
+					Operator/imagemask eq{
+						imageormask
+					}{
+						sep_imageormask_lev1
+					}ifelse
+				}ifelse
+ 			}{
+				AGMCORE_host_sep{
+					Operator/knockout eq{
+						currentdict/ImageMatrix get concat
+						knockout_unitsq
+					}{
+						currentgray 1 ne{
+ 							AGMCORE_is_cmyk_sep Name(All)ne and{
+ 								level2{
+ 									Name AGMCORE_IsSeparationAProcessColor 
+ 									{
+ 										Operator/imagemask eq{
+ 											//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+ 												/sep_tint AGMCORE_gget 1 exch sub AGMCORE_&setcolor
+ 											}if
+ 										}{
+											invert_image_samples
+ 										}ifelse
+	 								}{
+	 									//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+	 										[/Separation Name[/DeviceGray]
+	 										{
+	 											sep_colorspace_proc AGMCORE_get_ink_data
+												1 exch sub
+	 										}bind
+											]AGMCORE_&setcolorspace
+											/sep_tint AGMCORE_gget AGMCORE_&setcolor
+										}if
+ 									}ifelse
+ 									currentdict imageormask_sys
+	 							}{
+	 								currentdict
+									Operator/imagemask eq{
+										imageormask_sys
+									}{
+										sep_image_lev1_sep
+									}ifelse
+	 							}ifelse
+ 							}{
+ 								Operator/imagemask ne{
+									invert_image_samples
+ 								}if
+		 						currentdict imageormask_sys
+ 							}ifelse
+ 						}{
+ 							currentoverprint not Name(All)eq or Operator/imagemask eq and{
+								currentdict imageormask_sys 
+								}{
+								currentoverprint not
+									{
+ 									gsave 
+ 									knockout_unitsq
+ 									grestore
+									}if
+								currentdict consumeimagedata 
+		 					}ifelse
+ 						}ifelse
+		 			}ifelse
+ 				}{
+					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{
+						currentcolorspace 0 get/Separation ne{
+							[/Separation Name MappedCSA sep_proc_name exch 0 get exch load]setcolorspace_opt
+							/sep_tint AGMCORE_gget setcolor
+						}if
+					}if
+					currentoverprint 
+					MappedCSA 0 get/DeviceCMYK eq and 
+					Name AGMCORE_IsSeparationAProcessColor not and
+					//Adobe_AGM_Core/AGMCORE_pattern_paint_type get 2 ne{Name inRip_spot_has_ink not and}{false}ifelse 
+					Name(All)ne and{
+						imageormask_l2_overprint
+					}{
+						currentdict imageormask
+ 					}ifelse
+				}ifelse
+			}ifelse
+		}ifelse
+		cleartomark restore
+	}ifelse
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+	end
+}def
+/colorSpaceElemCnt
+{
+	mark currentcolor counttomark dup 2 add 1 roll cleartomark
+}bdf
+/devn_sep_datasource
+{
+	1 dict begin
+	/dataSource xdf
+	[
+		0 1 dataSource length 1 sub{
+			dup currentdict/dataSource get/exch cvx/get cvx/exec cvx
+			/exch cvx names_index/ne cvx[/pop cvx]cvx/if cvx
+		}for
+	]cvx bind
+	end
+}bdf		
+/devn_alt_datasource
+{
+	11 dict begin
+	/convProc xdf
+	/origcolorSpaceElemCnt xdf
+	/origMultipleDataSources xdf
+	/origBitsPerComponent xdf
+	/origDecode xdf
+	/origDataSource xdf
+	/dsCnt origMultipleDataSources{origDataSource length}{1}ifelse def
+	/DataSource origMultipleDataSources
+		{
+			[
+			BitsPerComponent 8 idiv origDecode length 2 idiv mul string
+			0 1 origDecode length 2 idiv 1 sub
+				{
+				dup 7 mul 1 add index exch dup BitsPerComponent 8 idiv mul exch
+				origDataSource exch get 0()/SubFileDecode filter
+				BitsPerComponent 8 idiv string/readstring cvx/pop cvx/putinterval cvx
+				}for 
+			]bind cvx
+		}{origDataSource}ifelse 0()/SubFileDecode filter def		
+	[
+		origcolorSpaceElemCnt string
+		0 2 origDecode length 2 sub
+			{
+			dup origDecode exch get dup 3 -1 roll 1 add origDecode exch get exch sub 2 BitsPerComponent exp 1 sub div
+			1 BitsPerComponent 8 idiv{DataSource/read cvx/not cvx{0}/if cvx/mul cvx}repeat/mul cvx/add cvx
+			}for
+		/convProc load/exec cvx
+		origcolorSpaceElemCnt 1 sub -1 0
+			{
+			/dup cvx 2/add cvx/index cvx
+			3 1/roll cvx/exch cvx 255/mul cvx/cvi cvx/put cvx
+			}for
+	]bind cvx 0()/SubFileDecode filter
+	end
+}bdf
+/devn_imageormask
+{
+ 	/devicen_colorspace_dict AGMCORE_gget begin
+	CSA map_csa
+	2 dict begin
+	dup
+	/srcDataStrs[3 -1 roll begin
+		AGMIMG_init_common
+		currentdict/MultipleDataSources known{MultipleDataSources{DataSource length}{1}ifelse}{1}ifelse
+		{
+			Width Decode length 2 div mul cvi
+			{
+				dup 65535 gt{1 add 2 div cvi}{exit}ifelse
+			}loop
+			string
+		}repeat
+		end]def
+	/dstDataStr srcDataStrs 0 get length string def
+	begin
+	AGMIMG_init_common
+	SkipImageProc{
+		currentdict consumeimagedata
+	}{
+		save mark 
+		AGMCORE_producing_seps not{
+			level3 not{
+				Operator/imagemask ne{
+					/DataSource[[
+						DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+						colorSpaceElemCnt/devicen_colorspace_dict AGMCORE_gget/TintTransform get 
+						devn_alt_datasource 1/string cvx/readstring cvx/pop cvx]cvx colorSpaceElemCnt 1 sub{dup}repeat]def				
+					/MultipleDataSources true def
+					/Decode colorSpaceElemCnt[exch{0 1}repeat]def
+				}if
+			}if
+			currentdict imageormask
+ 		}{
+			AGMCORE_host_sep{
+				Names convert_to_process{
+					CSA get_csa_by_name 0 get/DeviceCMYK eq{
+						/DataSource
+							Width BitsPerComponent mul 7 add 8 idiv Height mul 4 mul 
+							DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+							4/devicen_colorspace_dict AGMCORE_gget/TintTransform get 
+							devn_alt_datasource
+						filter_cmyk 0()/SubFileDecode filter def
+						/MultipleDataSources false def
+						/Decode[1 0]def
+						/DeviceGray setcolorspace
+			 			currentdict imageormask_sys
+ 					}{
+						AGMCORE_report_unsupported_color_space
+						AGMCORE_black_plate{
+							/DataSource
+								DataSource Decode BitsPerComponent currentdict/MultipleDataSources known{MultipleDataSources}{false}ifelse
+								CSA get_csa_by_name 0 get/DeviceRGB eq{3}{1}ifelse/devicen_colorspace_dict AGMCORE_gget/TintTransform get
+								devn_alt_datasource
+							/MultipleDataSources false def
+							/Decode colorSpaceElemCnt[exch{0 1}repeat]def
+				 			currentdict imageormask_sys
+				 		}{
+	 						gsave 
+	 						knockout_unitsq
+	 						grestore
+							currentdict consumeimagedata 
+						}ifelse
+ 					}ifelse
+				}
+				{	
+					/devicen_colorspace_dict AGMCORE_gget/names_index known{
+	 					Operator/imagemask ne{
+	 						MultipleDataSources{
+		 						/DataSource[DataSource devn_sep_datasource/exec cvx]cvx def
+								/MultipleDataSources false def
+	 						}{
+								/DataSource/DataSource load dstDataStr srcDataStrs 0 get filter_devn def
+	 						}ifelse
+							invert_image_samples
+	 					}if
+			 			currentdict imageormask_sys
+	 				}{
+	 					currentoverprint not Operator/imagemask eq and{
+							currentdict imageormask_sys 
+							}{
+							currentoverprint not
+								{
+	 							gsave 
+	 							knockout_unitsq
+	 							grestore
+								}if
+							currentdict consumeimagedata 
+			 			}ifelse
+	 				}ifelse
+	 			}ifelse
+ 			}{
+				currentdict imageormask
+			}ifelse
+		}ifelse
+		cleartomark restore
+	}ifelse
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+	end
+	end
+}def
+/imageormask_l2_overprint
+{
+	currentdict
+	currentcmykcolor add add add 0 eq{
+		currentdict consumeimagedata
+	}{
+		level3{			
+			currentcmykcolor 
+			/AGMIMG_k xdf 
+			/AGMIMG_y xdf 
+			/AGMIMG_m xdf 
+			/AGMIMG_c xdf
+			Operator/imagemask eq{
+				[/DeviceN[
+				AGMIMG_c 0 ne{/Cyan}if
+				AGMIMG_m 0 ne{/Magenta}if
+				AGMIMG_y 0 ne{/Yellow}if
+				AGMIMG_k 0 ne{/Black}if
+				]/DeviceCMYK{}]setcolorspace
+				AGMIMG_c 0 ne{AGMIMG_c}if
+				AGMIMG_m 0 ne{AGMIMG_m}if
+				AGMIMG_y 0 ne{AGMIMG_y}if
+				AGMIMG_k 0 ne{AGMIMG_k}if
+				setcolor			
+			}{	
+				/Decode[Decode 0 get 255 mul Decode 1 get 255 mul]def
+				[/Indexed 				
+					[
+						/DeviceN[
+							AGMIMG_c 0 ne{/Cyan}if
+							AGMIMG_m 0 ne{/Magenta}if
+							AGMIMG_y 0 ne{/Yellow}if
+							AGMIMG_k 0 ne{/Black}if
+						]
+						/DeviceCMYK{
+							AGMIMG_k 0 eq{0}if
+							AGMIMG_y 0 eq{0 exch}if
+							AGMIMG_m 0 eq{0 3 1 roll}if
+							AGMIMG_c 0 eq{0 4 1 roll}if						
+						}
+					]
+					255
+					{
+						255 div 
+						mark exch
+						dup	dup dup
+						AGMIMG_k 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 1 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_y 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 2 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_m 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec 4 3 roll pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						AGMIMG_c 0 ne{
+							/sep_tint AGMCORE_gget mul MappedCSA sep_proc_name exch pop load exec pop pop pop		
+							counttomark 1 roll
+						}{
+							pop
+						}ifelse
+						counttomark 1 add -1 roll pop
+					}
+				]setcolorspace
+			}ifelse
+			imageormask_sys
+		}{
+	write_image_file{
+		currentcmykcolor
+		0 ne{
+			[/Separation/Black/DeviceGray{}]setcolorspace
+			gsave
+			/Black
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 1 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Yellow/DeviceGray{}]setcolorspace
+			gsave
+			/Yellow
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 2 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Magenta/DeviceGray{}]setcolorspace
+			gsave
+			/Magenta
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{4 3 roll pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+		0 ne{
+			[/Separation/Cyan/DeviceGray{}]setcolorspace
+			gsave
+			/Cyan 
+			[{1 exch sub/sep_tint AGMCORE_gget mul}/exec cvx MappedCSA sep_proc_name cvx exch pop{pop pop pop 1 exch sub}/exec cvx]
+			cvx modify_halftone_xfer
+			Operator currentdict read_image_file
+			grestore
+		}if
+				close_image_file
+			}{
+				imageormask
+			}ifelse
+		}ifelse
+	}ifelse
+}def
+/indexed_imageormask
+{
+	begin
+		AGMIMG_init_common
+		save mark 
+ 		currentdict
+ 		AGMCORE_host_sep{
+			Operator/knockout eq{
+				/indexed_colorspace_dict AGMCORE_gget dup/CSA known{
+					/CSA get get_csa_by_name
+				}{
+					/Names get
+				}ifelse
+				overprint_plate not{
+					knockout_unitsq
+				}if
+			}{
+				Indexed_DeviceN{
+					/devicen_colorspace_dict AGMCORE_gget dup/names_index known exch/Names get convert_to_process or{
+			 			indexed_image_lev2_sep
+					}{
+						currentoverprint not{
+							knockout_unitsq
+			 			}if
+			 			currentdict consumeimagedata
+					}ifelse
+				}{
+		 			AGMCORE_is_cmyk_sep{
+						Operator/imagemask eq{
+							imageormask_sys
+						}{
+							level2{
+								indexed_image_lev2_sep
+							}{
+								indexed_image_lev1_sep
+							}ifelse
+						}ifelse
+					}{
+						currentoverprint not{
+							knockout_unitsq
+			 			}if
+			 			currentdict consumeimagedata
+					}ifelse
+				}ifelse
+			}ifelse
+ 		}{
+			level2{
+				Indexed_DeviceN{
+					/indexed_colorspace_dict AGMCORE_gget begin
+				}{
+					/indexed_colorspace_dict AGMCORE_gget dup null ne
+					{
+						begin
+						currentdict/CSDBase known{CSDBase/CSD get_res/MappedCSA get}{CSA}ifelse
+						get_csa_by_name 0 get/DeviceCMYK eq ps_level 3 ge and ps_version 3015.007 lt and
+						AGMCORE_in_rip_sep and{
+							[/Indexed[/DeviceN[/Cyan/Magenta/Yellow/Black]/DeviceCMYK{}]HiVal Lookup]
+							setcolorspace
+						}if
+						end
+					}
+					{pop}ifelse
+				}ifelse
+				imageormask
+				Indexed_DeviceN{
+					end
+				}if
+			}{
+				Operator/imagemask eq{
+					imageormask
+				}{
+					indexed_imageormask_lev1
+				}ifelse
+			}ifelse
+ 		}ifelse
+		cleartomark restore
+	currentdict/_Filters known{_Filters AGMIMG_flushfilters}if
+	end
+}def
+/indexed_image_lev2_sep
+{
+	/indexed_colorspace_dict AGMCORE_gget begin
+	begin
+		Indexed_DeviceN not{
+			currentcolorspace 
+			dup 1/DeviceGray put
+			dup 3
+			currentcolorspace 2 get 1 add string
+			0 1 2 3 AGMCORE_get_ink_data 4 currentcolorspace 3 get length 1 sub
+			{
+			dup 4 idiv exch currentcolorspace 3 get exch get 255 exch sub 2 index 3 1 roll put
+			}for 
+			put	setcolorspace
+		}if
+		currentdict 
+		Operator/imagemask eq{
+			AGMIMG_&imagemask
+		}{
+			use_mask{
+				process_mask AGMIMG_&image
+			}{
+				AGMIMG_&image
+			}ifelse
+		}ifelse
+	end end
+}def
+ /OPIimage
+ {
+ 	dup type/dicttype ne{
+ 		10 dict begin
+ 			/DataSource xdf
+ 			/ImageMatrix xdf
+ 			/BitsPerComponent xdf
+ 			/Height xdf
+ 			/Width xdf
+ 			/ImageType 1 def
+ 			/Decode[0 1 def]
+ 			currentdict
+ 		end
+ 	}if
+ 	dup begin
+ 		/NComponents 1 cdndf
+ 		/MultipleDataSources false cdndf
+ 		/SkipImageProc{false}cdndf
+ 		/Decode[
+ 				0 
+ 				currentcolorspace 0 get/Indexed eq{
+ 					2 BitsPerComponent exp 1 sub
+ 				}{
+ 					1
+ 				}ifelse
+ 		]cdndf
+ 		/Operator/image cdndf
+ 	end
+ 	/sep_colorspace_dict AGMCORE_gget null eq{
+ 		imageormask
+ 	}{
+ 		gsave
+ 		dup begin invert_image_samples end
+ 		sep_imageormask
+ 		grestore
+ 	}ifelse
+ }def
+/cachemask_level2
+{
+	3 dict begin
+	/LZWEncode filter/WriteFilter xdf
+	/readBuffer 256 string def
+	/ReadFilter
+		currentfile
+		0(%EndMask)/SubFileDecode filter
+		/ASCII85Decode filter
+		/RunLengthDecode filter
+	def
+	{
+		ReadFilter readBuffer readstring exch
+		WriteFilter exch writestring
+		not{exit}if
+	}loop
+	WriteFilter closefile
+	end
+}def
+/spot_alias
+{
+	/mapto_sep_imageormask 
+	{
+		dup type/dicttype ne{
+			12 dict begin
+				/ImageType 1 def
+				/DataSource xdf
+				/ImageMatrix xdf
+				/BitsPerComponent xdf
+				/Height xdf
+				/Width xdf
+				/MultipleDataSources false def
+		}{
+			begin
+		}ifelse
+				/Decode[/customcolor_tint AGMCORE_gget 0]def
+				/Operator/image def
+				/SkipImageProc{false}def
+				currentdict 
+			end
+		sep_imageormask
+	}bdf
+	/customcolorimage
+	{
+		Adobe_AGM_Image/AGMIMG_colorAry xddf
+		/customcolor_tint AGMCORE_gget
+		<<
+			/Name AGMIMG_colorAry 4 get
+			/CSA[/DeviceCMYK]
+			/TintMethod/Subtractive
+			/TintProc null
+			/MappedCSA null
+			/NComponents 4 
+			/Components[AGMIMG_colorAry aload pop pop]
+		>>
+		setsepcolorspace
+		mapto_sep_imageormask
+	}ndf
+	Adobe_AGM_Image/AGMIMG_&customcolorimage/customcolorimage load put
+	/customcolorimage
+	{
+		Adobe_AGM_Image/AGMIMG_override false put
+		current_spot_alias{dup 4 get map_alias}{false}ifelse
+		{
+			false set_spot_alias
+			/customcolor_tint AGMCORE_gget exch setsepcolorspace
+			pop
+			mapto_sep_imageormask
+			true set_spot_alias
+		}{
+			//Adobe_AGM_Image/AGMIMG_&customcolorimage get exec
+		}ifelse			
+	}bdf
+}def
+/snap_to_device
+{
+	6 dict begin
+	matrix currentmatrix
+	dup 0 get 0 eq 1 index 3 get 0 eq and
+	1 index 1 get 0 eq 2 index 2 get 0 eq and or exch pop
+	{
+		1 1 dtransform 0 gt exch 0 gt/AGMIMG_xSign? exch def/AGMIMG_ySign? exch def
+		0 0 transform
+		AGMIMG_ySign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+		AGMIMG_xSign?{floor 0.1 sub}{ceiling 0.1 add}ifelse exch
+		itransform/AGMIMG_llY exch def/AGMIMG_llX exch def
+		1 1 transform
+		AGMIMG_ySign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+		AGMIMG_xSign?{ceiling 0.1 add}{floor 0.1 sub}ifelse exch
+		itransform/AGMIMG_urY exch def/AGMIMG_urX exch def			
+		[AGMIMG_urX AGMIMG_llX sub 0 0 AGMIMG_urY AGMIMG_llY sub AGMIMG_llX AGMIMG_llY]concat
+	}{
+	}ifelse
+	end
+}def
+level2 not{
+	/colorbuf
+	{
+		0 1 2 index length 1 sub{
+			dup 2 index exch get 
+			255 exch sub 
+			2 index 
+			3 1 roll 
+			put
+		}for
+	}def
+	/tint_image_to_color
+	{
+		begin
+			Width Height BitsPerComponent ImageMatrix 
+			/DataSource load
+		end
+		Adobe_AGM_Image begin
+			/AGMIMG_mbuf 0 string def
+			/AGMIMG_ybuf 0 string def
+			/AGMIMG_kbuf 0 string def
+			{
+				colorbuf dup length AGMIMG_mbuf length ne
+					{
+					dup length dup dup
+					/AGMIMG_mbuf exch string def
+					/AGMIMG_ybuf exch string def
+					/AGMIMG_kbuf exch string def
+					}if
+				dup AGMIMG_mbuf copy AGMIMG_ybuf copy AGMIMG_kbuf copy pop
+			}
+			addprocs
+			{AGMIMG_mbuf}{AGMIMG_ybuf}{AGMIMG_kbuf}true 4 colorimage	
+		end
+	}def			
+	/sep_imageormask_lev1
+	{
+		begin
+			MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+				{
+					255 mul round cvi GrayLookup exch get
+				}currenttransfer addprocs settransfer
+				currentdict imageormask
+			}{
+				/sep_colorspace_dict AGMCORE_gget/Components known{
+					MappedCSA 0 get/DeviceCMYK eq{
+						Components aload pop
+					}{
+						0 0 0 Components aload pop 1 exch sub
+					}ifelse
+					Adobe_AGM_Image/AGMIMG_k xddf 
+					Adobe_AGM_Image/AGMIMG_y xddf 
+					Adobe_AGM_Image/AGMIMG_m xddf 
+					Adobe_AGM_Image/AGMIMG_c xddf 
+					AGMIMG_y 0.0 eq AGMIMG_m 0.0 eq and AGMIMG_c 0.0 eq and{
+						{AGMIMG_k mul 1 exch sub}currenttransfer addprocs settransfer
+						currentdict imageormask
+					}{
+						currentcolortransfer
+						{AGMIMG_k mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_y mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_m mul 1 exch sub}exch addprocs 4 1 roll
+						{AGMIMG_c mul 1 exch sub}exch addprocs 4 1 roll
+						setcolortransfer
+						currentdict tint_image_to_color
+					}ifelse
+				}{
+					MappedCSA 0 get/DeviceGray eq{
+						{255 mul round cvi ColorLookup exch get 0 get}currenttransfer addprocs settransfer
+						currentdict imageormask
+					}{
+						MappedCSA 0 get/DeviceCMYK eq{
+							currentcolortransfer
+							{255 mul round cvi ColorLookup exch get 3 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 2 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 1 get 1 exch sub}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 0 get 1 exch sub}exch addprocs 4 1 roll
+							setcolortransfer 
+							currentdict tint_image_to_color
+						}{
+							currentcolortransfer
+							{pop 1}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 2 get}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 1 get}exch addprocs 4 1 roll
+							{255 mul round cvi ColorLookup exch get 0 get}exch addprocs 4 1 roll
+							setcolortransfer 
+							currentdict tint_image_to_color
+						}ifelse
+					}ifelse
+				}ifelse
+			}ifelse
+		end
+	}def
+	/sep_image_lev1_sep
+	{
+		begin
+			/sep_colorspace_dict AGMCORE_gget/Components known{
+				Components aload pop
+				Adobe_AGM_Image/AGMIMG_k xddf 
+				Adobe_AGM_Image/AGMIMG_y xddf 
+				Adobe_AGM_Image/AGMIMG_m xddf 
+				Adobe_AGM_Image/AGMIMG_c xddf 
+				{AGMIMG_c mul 1 exch sub}
+				{AGMIMG_m mul 1 exch sub}
+				{AGMIMG_y mul 1 exch sub}
+				{AGMIMG_k mul 1 exch sub}
+			}{
+				{255 mul round cvi ColorLookup exch get 0 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 1 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 2 get 1 exch sub}
+				{255 mul round cvi ColorLookup exch get 3 get 1 exch sub}
+			}ifelse
+			AGMCORE_get_ink_data currenttransfer addprocs settransfer
+			currentdict imageormask_sys
+		end
+	}def
+	/indexed_imageormask_lev1
+	{
+		/indexed_colorspace_dict AGMCORE_gget begin
+		begin
+			currentdict
+			MappedCSA 0 get dup/DeviceRGB eq exch/DeviceCMYK eq or has_color not and{
+				{HiVal mul round cvi GrayLookup exch get HiVal div}currenttransfer addprocs settransfer
+				imageormask
+			}{
+				MappedCSA 0 get/DeviceGray eq{
+					{HiVal mul round cvi Lookup exch get HiVal div}currenttransfer addprocs settransfer
+					imageormask
+				}{
+					MappedCSA 0 get/DeviceCMYK eq{
+						currentcolortransfer
+						{4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						{4 mul HiVal mul round cvi		 Lookup exch get HiVal div 1 exch sub}exch addprocs 4 1 roll
+						setcolortransfer 
+						tint_image_to_color
+					}{
+						currentcolortransfer
+						{pop 1}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 2 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 1 add Lookup exch get HiVal div}exch addprocs 4 1 roll
+						{3 mul HiVal mul round cvi 		Lookup exch get HiVal div}exch addprocs 4 1 roll
+						setcolortransfer 
+						tint_image_to_color
+					}ifelse
+				}ifelse
+			}ifelse
+		end end
+	}def
+	/indexed_image_lev1_sep
+	{
+		/indexed_colorspace_dict AGMCORE_gget begin
+		begin
+			{4 mul HiVal mul round cvi		 Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 1 add Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 2 add Lookup exch get HiVal div 1 exch sub}
+			{4 mul HiVal mul round cvi 3 add Lookup exch get HiVal div 1 exch sub}
+			AGMCORE_get_ink_data currenttransfer addprocs settransfer
+			currentdict imageormask_sys
+		end end
+	}def
+}if
+end
+systemdict/setpacking known
+{setpacking}if
+%%EndResource
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndProlog
+%%BeginSetup
+Adobe_AGM_Utils begin
+2 2010 Adobe_AGM_Core/ds gx
+Adobe_CoolType_Core/ds get exec
Adobe_AGM_Image/ds gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%EndSetup
+%%Page: (Page 1) 1
+%%EndPageComments
+%%BeginPageSetup
+%ADOBeginClientInjection: PageSetup Start "AI11EPS"
+%AI12_RMC_Transparency: Balance=75 RasterRes=300 GradRes=150 Text=0 Stroke=1 Clip=1 OP=0
+%ADOEndClientInjection: PageSetup Start "AI11EPS"
+Adobe_AGM_Utils begin
+Adobe_AGM_Core/ps gx
+Adobe_AGM_Utils/capture_cpd gx
+Adobe_CoolType_Core/ps get exec
Adobe_AGM_Image/ps gx
+%ADOBeginClientInjection: PageSetup End "AI11EPS"
+/currentdistillerparams where
{pop currentdistillerparams /CoreDistVersion get 5000 lt} {true} ifelse
{ userdict /AI11_PDFMark5 /cleartomark load put
userdict /AI11_ReadMetadata_PDFMark5 {flushfile cleartomark } bind put}
{ userdict /AI11_PDFMark5 /pdfmark load put
userdict /AI11_ReadMetadata_PDFMark5 {/PUT pdfmark} bind put } ifelse
[/NamespacePush AI11_PDFMark5
[/_objdef {ai_metadata_stream_123} /type /stream /OBJ AI11_PDFMark5
[{ai_metadata_stream_123}
currentfile 0 (%  &&end XMP packet marker&&)
/SubFileDecode filter AI11_ReadMetadata_PDFMark5
<?xpacket begin="" id="W5M0MpCehiHzreSzNTczkc9d"?>
+<x:xmpmeta xmlns:x="adobe:ns:meta/" x:xmptk="Adobe XMP Core 4.1-c036 46.277092, Fri Feb 23 2007 14:16:18        ">
+   <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#">
+      <rdf:Description rdf:about=""
+            xmlns:dc="http://purl.org/dc/elements/1.1/">
+         <dc:format>application/postscript</dc:format>
+         <dc:title>
+            <rdf:Alt>
+               <rdf:li xml:lang="x-default">Web</rdf:li>
+            </rdf:Alt>
+         </dc:title>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xap="http://ns.adobe.com/xap/1.0/"
+            xmlns:xapGImg="http://ns.adobe.com/xap/1.0/g/img/">
+         <xap:CreatorTool>Adobe Illustrator CS3</xap:CreatorTool>
+         <xap:CreateDate>2017-04-03T09:52:22+02:00</xap:CreateDate>
+         <xap:ModifyDate>2017-04-03T10:02:31+02:00</xap:ModifyDate>
+         <xap:MetadataDate>2017-04-03T10:02:31+02:00</xap:MetadataDate>
+         <xap:Thumbnails>
+            <rdf:Alt>
+               <rdf:li rdf:parseType="Resource">
+                  <xapGImg:width>256</xapGImg:width>
+                  <xapGImg:height>76</xapGImg:height>
+                  <xapGImg:format>JPEG</xapGImg:format>
+                  <xapGImg:image>/9j/4AAQSkZJRgABAgEASABIAAD/7QAsUGhvdG9zaG9wIDMuMAA4QklNA+0AAAAAABAASAAAAAEA&#xA;AQBIAAAAAQAB/+4ADkFkb2JlAGTAAAAAAf/bAIQABgQEBAUEBgUFBgkGBQYJCwgGBggLDAoKCwoK&#xA;DBAMDAwMDAwQDA4PEA8ODBMTFBQTExwbGxscHx8fHx8fHx8fHwEHBwcNDA0YEBAYGhURFRofHx8f&#xA;Hx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8fHx8f/8AAEQgATAEAAwER&#xA;AAIRAQMRAf/EAaIAAAAHAQEBAQEAAAAAAAAAAAQFAwIGAQAHCAkKCwEAAgIDAQEBAQEAAAAAAAAA&#xA;AQACAwQFBgcICQoLEAACAQMDAgQCBgcDBAIGAnMBAgMRBAAFIRIxQVEGE2EicYEUMpGhBxWxQiPB&#xA;UtHhMxZi8CRygvElQzRTkqKyY3PCNUQnk6OzNhdUZHTD0uIIJoMJChgZhJRFRqS0VtNVKBry4/PE&#xA;1OT0ZXWFlaW1xdXl9WZ2hpamtsbW5vY3R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo+Ck5SVlpeYmZ&#xA;qbnJ2en5KjpKWmp6ipqqusra6voRAAICAQIDBQUEBQYECAMDbQEAAhEDBCESMUEFURNhIgZxgZEy&#xA;obHwFMHR4SNCFVJicvEzJDRDghaSUyWiY7LCB3PSNeJEgxdUkwgJChgZJjZFGidkdFU38qOzwygp&#xA;0+PzhJSktMTU5PRldYWVpbXF1eX1RlZmdoaWprbG1ub2R1dnd4eXp7fH1+f3OEhYaHiImKi4yNjo&#xA;+DlJWWl5iZmpucnZ6fkqOkpaanqKmqq6ytrq+v/aAAwDAQACEQMRAD8A9U4q7FWGebvzi/LjynK9&#xA;vrOtQreps1jAGuJwf5XSIPwP+vTFXnl1/wA5gfl9HKUt9L1SdB/uwpAgPyBmJp88Vdbf85gfl88g&#xA;W40rVYUP7apbvT5j1lNPlirNfLn5+flTr8iQ22uxWty5AEF8rWpqegDyhYyT4BsVegKyuodCGVgC&#xA;rA1BB6EHFW8VdirsVdirsVSDzh588peT7EXnmHUYrKN6+jEavNKR2jiQM7e9BQd8VeRX/wDzmF5K&#xA;imKWOjajcxAkepJ6MNfcAPIfvpiqceX/APnKv8r9TmWG+N5oztt6l3CHir2+OBpSPmVAxV61pmqa&#xA;ZqllHfaZdw3tlMKxXNvIssbfJkJBxVFYq7FXYq7FXYq7FXYq7FXzR+fv/OQfmXSPM1x5V8pzrYiw&#xA;CrqGohEkleZ1DGOPmGVFQNQmleXhTdV4ddeefzL8xSmCbW9V1Fnr/oyTzupr1pEh4/hiqBeLzl5S&#xA;1S2vZYr/AEPU2Hr2s0qy20zLy+2vIKzKWHyOKvuX8oPOV15x/LzSNdvQBfTo8V5xFFMsEjRM4Hbn&#xA;w5U7VxVlGp6nY6Xp1zqV/MtvZWcTz3MzdEjjBZjt7DFXxx+Zf/OSnnXzJfzW/l+6l0LQlJWBLZuF&#xA;1Io/blmX41J/lQgDpv1xVgkXm78yNKkj1FNY1e0aYhkuWnuFWQj3ZqP+OKvof8hv+cib3zDqMPlX&#xA;ze6Nqk/w6bqiqsYnYCvpTKoChyPssoAPTr1VfQFzc29rby3NzKkFvAjSTTSMFREUVZmY7AAdTir5&#xA;G/OP/nJPWdfuZ9G8ozyaboKExyX0ZKXN1Q0JDDeOI9gPiI+1144qw7yH+RP5iedYlvbKzWz0yTdd&#xA;Sv2MUbg94wA0knzVae+KvWdP/wCcNIfTRtR80MZDQyR29oAo8QHeU1+fH6MVbvf+cM7coxsvNTq9&#xA;TxWazDAjsCyzLT50xV575u/5xj/M3QIpLm0gh1yzjBZnsGJmCjxgcI5PsnLFVT/nHz81/MegectJ&#xA;8vTXUt15f1W4jsTYyMXWGSdgkUkNa8KSMOQGxFe9CFX2lirsVdirsVYb+Z/5peXvy/0M32pP6t/O&#xA;GXTdOQ/vJ5FH/CopI5v29zQFV8U3V15y/Mvzk887m+1i+NST8MUMS9h2jijH+ZY70anUwwwM5mgG&#xA;zFilOVDm9R0z/nGW2Nsrajrb/WGALJbwgIppuAXYlt+9B8s5jJ7Tm/TDbzLso9mjqUm8yf8AON2v&#xA;2UDz6Hfx6pxFfq0i/V5T7ISzox+ZXMrTe0mORrJHh8+Y/HzasnZ0h9JtiPkX8w/Of5beYWezaSFY&#xA;5OOp6NcclilCmjLIh+y9OjgVHy2PRQmJAGJsF15BBovuHyT5x0jzj5as9f0pyba6X44m+3FKu0kT&#xA;gftI23v1GxySE9xV2KuxV2KuxV2KuxV8D/nj/wCTa80f8xrf8RXFX3D5Qtre38r6SkESQobSBisa&#xA;hRUxLU0HfFXzD/zmJ/ynGi/9swf9REuKvYP+cYf/ACTulf8AGa7/AOoh8VQ//OU2pXNn+UtzFASq&#xA;393bW05Bp+75GUj6TEBirx7/AJxO8o6LrXm7UtT1OBLp9GgjksoZQGUTTOQJeJ2JQJtXoTXrTFX1&#xA;pqmlabq1hNp+p2sV5Y3C8JreZQ6MPcHFX59eZbU+U/zB1O20uU10LVJksZSeTD6rcH0iT/MOAr74&#xA;q+gP+csvzJltre28jabNxe6QXWtMh39Kv7mAkfzEF2HgF7HFWM/845/kxY67H/i/zHbrcaZHIyaX&#xA;YSDlHO8Z4vLIpFGjRvhC92BrsN1X1ZDchVCFQFUUHEUAA9sVYV5o/PX8rvLV3LY6jrSPfwkrLa2s&#xA;clw6sOqsY1ZFbtRmBxVKtN/5yZ/KC9mETarLZsxorXNtMq/SyK6j6cVR35q/mTpGmflTq+v6LqMF&#xA;4biL6lp9zayrIPrFx+7BVkJHKNWMlP8AJxV8QaLq95o2r2erWRUXthMlxas6h1WWJgyNxOx4sAd8&#xA;VZZqf54/m1qRJuPM97Hy7WrLaj6Pq6xeOKpYfOP5l3X7865rU/P/AHb9aunrTb7XI16YqmGkfnP+&#xA;a2jTA2/mW/cxmhiu5DdKKbceFx6gHTFXu/5Xf85VWGqTJpfneOLTLgj91q8NRbOQOkqHkYyf5gSp&#xA;P8uKvAfOnmfW/wAyPP8ALfUZ59RnW20y1J2ih5cYY/AUBqx8anK8uWOOJlLkBbKETIgDmX0F+XX5&#xA;aaP5StiICZ9TnQC9vm6sBvwjXoicvpPeu2ee9o9pz1Ut9oDkPx1eh0+mjiHmzko3ZyPu/pmuvyb7&#xA;aAmHcOPfY/hh2Ts8n/P7yGmq6GPMdjbk6pp1BdemKtJa9yQOvpE8q/y1+joOwNf4eTwpH0S5e/8A&#xA;a67X4OKPEOYY3/zi/wDma3l3zT/he+f/AHE+YJVWFidor2nGMj2m2jPvx8DnbOleu/mz/wA5KaD5&#xA;QuJtH0KJNZ16L4ZmLEWlu/8ALIy7yMO6Idu7A7Yq+cvMH55/mvr0zGfzBdW0chottYN9UQA/sj0e&#xA;LsP9ZjiqVN5n/M6KMTtq2tpHuVlNxdhfh3NG5dsVT7y5/wA5B/mxoUilNck1GFftW+pD60rfN3/f&#xA;fc4xV6Lq/wDzmHrMugwRaVokNrrjgi7upnMtuh3FYYxxYkjf42+Hp8WKvJ9W/Nz809duSbnzHqDP&#xA;IdoLWVreM99orf012+WKoSTzb+Z1lxnk1nW7agDrK1zdx7NsCGLDY4qkGpanqGp301/qFw91e3Dc&#xA;57iVizu1KVZj1OKv0V8r/wDKNaT/AMwVv/yaXFXy9/zmCrP570REBZm00BVAqSTcSUAGKvItL84e&#xA;ffLcccGm6xqWlQAlkt4p5oYiakk+nUId28MVZJrH55edPMPk+78r+ZXj1W1n4PBeMixXMMkThkPK&#xA;MBXGxDclqQftYqk35bfmNrnkHzEus6UElDoYbyzlr6c0JIJU03BBFVbsfaoxV7Lrf/OY17PpUkOj&#xA;+XlstTkTit1PcevHExH2ljEcfMg9KmniO2KvFfIlhb+ZPzE0Wz1i4Ii1TUYlvJnqzSNLKCy7ftSs&#xA;eNfE1xVF+etQvvOn5qapLAwln1XUza2PImnD1Bb26132CBRir7h0PR7LRdGsdIsV4WlhBHbwA9eM&#xA;ahQT4k0qT44q8n/5yY/Me/8ALPly10TSZWg1HXPUEtyho8VrFxD8SDVWkLhQfAN3pir5FxV2KrhJ&#xA;IIzGGIjYhmSuxK1AJHiORxV6H+QHl7RfMH5oabpms2iXthJHcPJbSV4s0cLOtaEdGGKvtfSvKXlX&#xA;SFUaVo9lYBfs/VreKI7GvVFHffFU1xVKPMPlDyv5jtmttc0u21CJhSs0as6+6P8AbQ+6kHFXxz+f&#xA;X5OD8vtZgudNZ5vLup8vqjSfE8MqbvA7d9jyRj1FR+ySVUb/AM4/+TrS9u5/MtyxZ9Pk9CziUkAS&#xA;MlXdvGivQD55zHtHrZQiMI/iFl2nZuAE8Z6PoK1QEM1SD02zjwXcEqxi/wAth9OHi8kW4JKOklf9&#xA;YA/qpjY7lsN8SylZFBUihHUEH2OD3IL4489aJc+WfO+p2USm1FtdNNYNGxBWFm9SBlYUoQhXp0Oe&#xA;l9n6jxsEZ3Zrf39ftebz4+CZDJPyZ/JvU/zF1aVpJGs9AsmX9IX4ALlm3EMNdjIw6k7KNz2BzGl9&#xA;jeUPy48leUbVINB0qC2kQUa7Kh7l/EvM1XNfCtPADFWSYq+fP+ct7byzZeU7CRdNtV17Ub0Kl8sS&#xA;LOIYkZpf3gAY/EyCh8cVee/kT/zj+3nSNfMPmIyW/ltXK20EZ4y3bIaNRv2IgRQsNydhTrir6w8v&#xA;eVfLflyzWz0LTbfTrdRQrAgUtTu7/ac+7EnFU0ZVZSrAFSKEHcEHFXyT/wA5bW/lqx8y6PY6Xp1t&#xA;Z6g1tJd6jNbxLG0glfhF6nACpHpOd998VfUnlf8A5RrSf+YK3/5NLiqP+rW/r/WPST6xx4etxHPj&#xA;Unjy6036Yq64tra5haC5iSeFxR4pFDqw91aoOKvn78+P+cetBm0O88z+UbNbDUbFGuL3ToBSGeFR&#xA;ykaOMbJIi70TZvCuKvNf+cVrGyvPzNlhvLeO5i/Rtw3pzIsi1EkVDRgRXFX15/hfy1/1abL/AKR4&#xA;v+acVfA/lUBfzK0cLsBrNtSm1KXS4qqflZElx+ZnlgTTCMHVbSQyPvyZJlcLuRu7Dj9OKvvbFXyT&#xA;/wA5ZO7fmRYqQQqaTCF8DW4uCSMVeK4qzL8qfy7/AMfeZ30P9Ifo3hbSXP1j0fXr6bIvHhzi68+t&#xA;cVevf9Cd/wDf3f8Acu/7OsVYx+Quj/oX8+rnSPV+sNpZ1G0W448OZgLRcwtW48gOlTir68W5lHWh&#xA;+eKvhvzb+Z35lWfm/VoYfNWqqlnfXEMKLdzLGFjmZVHphuB28Rir6k/5x/8AzC1Lzp5AS91mQSap&#xA;Y3Mljc3FAnq8ESRZCFotSkoBp3GKoX/nJvSodQ/KDVJiOcmnTW13DSho3rLCx/5FzNirzP8AIHRo&#xA;LPyWdRVi0+pzu0m5oqwMYkWladmNff2GcL7RZzLUcHSA+/d3vZ0AMd971S1RWDV36d80QkXPJVGS&#xA;3H2iB82p/HJAyUEtD6sOklP9mf64fV3fYu6orJ2evtUHIkFiXzn/AM5LRWy+btNkQj6w9gPVUdeI&#xA;mk4E/P4h9Gdn7NE+DIdOL9AdL2iBxj3Pqb8qPKFt5T8gaPo8SBJ1gSe+YdXuZgHmJPejHiPYAZ0b&#xA;r2Hf85DfnDe+Q9ItNP0Xj+n9VDtFO4Di3hjIDS8GqGZiaJUU2JPShVfLEn5s/mfJctct5r1YSMeR&#xA;VbydY6+0asEA9gMVRF15o87fmZrnl3Qtb1Fr6b6wtlYyuiKyfW5ERmYoq8vsgktvQYq+89J0ux0n&#xA;S7TS7CMQ2VlClvbxD9mONQqj7hirwD/nI7889d8vasPKPle4+p3kcSy6pfqFMq+qvKOGItXgeBDM&#xA;1K7ihG+KvBLb82/zQt7gXEfmvVWkBrxku5pU8f7uRmT8MVSrzX5u8webNXbWNeufreoNGkTTcEjq&#xA;sa8V+GMKo+gYq/Qbyv8A8o1pP/MFb/8AJpcVfPf/ADlR5885aD5n0rTtE1m60yznsPWljtJDCWkM&#xA;sici6Uf7KjviqC/5xp/ODzdqHnEeVdf1KbVLS/hlezku3Ms8c8CepQSuS7K0aNVSTvQim9VX1G6I&#xA;6MjqGRgVZWFQQdiCDir5K/5xw06PTPz21rTY/wC7soNQt0r1pFcog/4jir62xV+enlf/AMmXpH/b&#xA;Zt/+opcVdpvp+VPzLtfrYKx6DrUf1gHqFs7oc/p/d4q++gQRUbg9Dir5p/5y58rzi80XzREhaFo2&#xA;026euysjNNBt/lc5PuxV864q9k/5xU/8mbN/2zbj/k5Fir68xV8s/lT/AOtNa9/zGax/yefFX1Ni&#xA;r8+/PX/KbeYf+2lef8n3xV9I/wDOI7N/gfV1r8I1NiB7m3ir+rFWd/nd/wCSo8y/8wn/ABuuKvHP&#xA;+cdVmPlLUHadni+vMkduacUKxRszLtX4+e+9NvnnE+0tePHbfh5/Eu77M+g+965bKpchhWo6HOeE&#xA;i7ElEEQJ1Cr9ww7lG5cJouxr8gT+rHhK8JXBwelfpBGCkU8I/wCcmLezTUfLV3InJpFuY5yNmaKJ&#xA;omVf+SjZ1vsxI1kHT0/p/U6ntIbxPvfXXrxfzDOqdW+R/wDnMCOb/lYOkz0P1Z9JjSN/2TIlzOXA&#xA;9wHWv0Yq8JxVm/5JGMfmx5XMlOP15OorvQ0/HFX3zzT+Yffir4O/PxJ0/N/zMJ93NwjL/qNDG0f/&#xA;AAhGKsAxV2Kv0d8tusfl3S43PF0tIFZT1BESgjFXy9/zmF8XnTRJBuh03iD7rPIT/wASGKsO/wCc&#xA;b3CfnR5dY9B9c6f8wM+Kvt83a9lOKvmT8mTX/nJXzvL0KSau4Hz1FF/42xV9Lm6lPgPkMVfAmhIs&#xA;f5q6ciCiprsKqPYXgAxVnH/OUfkqXQ/zEk1mKMjTvMCC5jcD4RcRgJOnzJ4yf7LFXt3/ADj/APmN&#xA;B5t8lQWdzLXXNFRLW9Rj8TxqKQzipJPJBRj/ADA+IxVn/mDQNJ8waPdaPq9ut1p94nCeFqitCCCC&#xA;NwysAVI6HFXzd5l/5xI1+K8ZvLWr21zZMWKx3/OGZB+yvKJJEkPi1E+WKso/I/8AI3zj5J83ya3r&#xA;M1k1q1pLbCO3lkeTm7xsDRo0Xj8B/axV7zir5Z/Kn/1prXv+YzWP+Tz4q+psVfn356/5TbzD/wBt&#xA;K8/5Pvir6Q/5xH/5QnWP+2kf+TEWKs8/O7/yVHmX/mE/43XFXzL+QWuX9t5v/RKThbHUIpGlt2oQ&#xA;0kSFlZPBgAa06jr0FOf9otPGWDjr1RPP3uw7OyEZOHoX0chowJ6DrnDgu9RypGBVVAr3phMiWFlo&#xA;zRg0rU+A3P4YREp4S2GY/s0Hv1/DAQEPm3/nJDUjcedbSyWTlHZWSco9qLLK7s33pwztvZvFWAy/&#xA;nS+wfguk7RlcwO4Pp78sPN0HmzyNpOsJKsly8CRX4X9i6iULMpWpK/EOQr+yQe+dC4CUfnJ+VFv+&#xA;YWhQQRzraaxp7PJp904JT4wA8UgG/B+K/EASCO+4Kr58m/5xd/NRJmjWKylQGglS5AU+45KrfeMV&#xA;drP5PebvyvttL876lcWt02napaP9TtGkaiqxl5PI6R0+KMJQA/axV9daZqVlqmnWupWMomsryJJ7&#xA;eUdGjkUMp39jiryn87/yKk89XMGtaNcxWmuQRiCWO45LDPErErVkVmV15Gh4muw2pirxqP8A5xe/&#xA;NR5QjQ2Uan/djXI4j/gVZvwxVj35nflJrP5exaT+lLy3uptUWc8bbmUjMBSo5OELVEo/ZGKvt7Tv&#xA;+Ofa/wDGGP8A4iMVfMf/ADl5/wApPoP/ADBSf8nTirC/+cdf/JyeX/8Ao8/6gZ8VfbGKvm38l/8A&#xA;1o/zz89W/wC6lHir6SxV8GaL/wCTYsP+29F/1GDFX2z+Zv5e6X588q3GiXpEU1fVsLynJoLhQQrg&#xA;dxvxYdwcVfE80Hnv8q/Og5h9M1qyJ4OPiiniJpUV+GWJ6f7RGyr6F8kf85S+T9Ugjg80RvomogAP&#xA;OqvNaO2wqpQNIlTvRloB+0cVelWn5j/l9eMiW3mbS5ZJPsRC8g5nav2OfL8MVbl/Mj8vIneOXzRp&#xA;KSRkq6NfWwYMDQgjnWoxVjeuf85B/lRpHqKdZF/Oi8hDYxvPy9llAENfm+KvAfys88eXbf8AO/UP&#xA;NGo3I07Sr+XUZ45LgUKi5dnjV+HP4vipir6M/wCV3flR/wBTLaf8P/zTir4t823dte+a9avLVxLb&#xA;XN/czQSCtGSSZmVhXfcHFXun/ONX5heS/LXlTVLPXdWh0+5mvzNFHLyqyGFF5CgI6qcVZj+bP5s/&#xA;lzq35c69p2na9b3N9c2/CCBOfJ25qaCq4q+UdF1i+0bVbXVLB/Tu7SQSRE1oadVYClVYbEeGVZ8M&#xA;csDCXKTPHMxkCOYfX+havbazo1lqtsf3N5CkygEHiWHxISO6mqn3zzLPhOLJKB5xNPT45iURIdUz&#xA;jkjApICw7Dr+GQBLPdEIXI+AKi/efuG2Jr3sTTHPP3nvS/J2iS3l1Ksl9IpFhZVHOWTt8I34L1Zv&#xA;40zM0GhnqcgjEenqe5x9RnjjjfV8t6Vo/m7z75knisYpdV1u5WW6mNQCViXkxJNFUbBVHjRR2z0X&#xA;FijjiIxFRDz0pGRs80//ACv/ADW8xflvrFxGsBuNOmfhqekzVjPOM8eSEiscq9Dtv0I2BFjF9Q+W&#xA;fz3/ACv163Eia1DpswAMltqTLaOhPblIfSb/AGDnFWTSedvJkdqLuTXtOS1IVhO13AI+LU4nmXpQ&#xA;12xV5T+ff5m/lzqf5faroFprtve6pdCF7SOzrcqWinjkNZY6xL8KkbvX2PTFXlH5N/nzqHkaP9Ea&#xA;pDJqPltmLxwxkevasx5MYORCsrE1aMkCu4I35KvpTRfzk/LDV7ZZ7bzHZQ8tjFeSraSA+HCf0yfo&#xA;2xVOLvzv5Ls4xJd6/p1ujGitLdwICetAWcb4q8A/5yZ8/eQfMmiabYaLqceo6vY3nNjAHaNYJImD&#xA;0lp6bVcJsrHFXrdh+dn5VJY26t5ktVZYkDKeYIIUbfZxV4F/zkv5w8s+ZvMGjz6DqEeoQ29o6TPF&#xA;yorGQkA1A7YqxX8j9b0nQ/zQ0XVNWuUs9Pt/rXrXMleK87SaNa0B6swGKvq7/ld35Uf9TLaf8P8A&#xA;804q8J/Kzzv5U0r88/N2u6jqUVtpF+dS+p3j8uEnrX6Sx0oCfiRSRir3b/ld35Uf9TLaf8P/AM04&#xA;q+QvLciXf5qaVJbH1UuNdgaEqD8Qe8UrQe9cVfoNiqQecvInlXzlpn6O8wWKXcS1MMv2ZomP7UUi&#xA;0ZTtv2PeuKvnrzX/AM4e6pHK8vlTWoriAklbTUQYpFHh60Surn/YLirA7j/nGf8AOaKUomhpOo6S&#xA;x3lmFPy9SVG/DFVsX/ONP50O4VtAWMHq7XlkQP8AgZmP4YqyPRf+cRfzCu2VtTvtP02I/aHN7iUf&#xA;JUUIf+RmKovXv+cP/OVvcKNE1ey1C2IFXuRJayhqb/AonWlf8vFUr/6FK/NP/fumf9JEn/VLFWTe&#xA;V/8AnD3UpLe6fzPrMVtM0TLZRaeGmCyn7LzNKsdVXuijf+YYqx66/wCcRvzLjndILvS54Qfgl9aV&#xA;CR2qpi2OKqa/84k/mkWAM+lqCaFjcS0HvtCcVTTzT/zibr+leTo7/Srz9L+YoHZ76wiXhG0PHpbc&#xA;qM7oR3pyB2AIoyryny95486eTbl7S1mkt1ikrcaZdISnMbMrRvRkJ78SpzB1fZuHUfWN+/r+Pe34&#xA;dTPH9J2ehQ/85KTLEom8vK8oHxul2UUn2UwuR9+aKXsuL2yf7H9rnjtQ9Y/b+xL9Y/5yK8x3EZTS&#xA;dOg07kpBlkZrmRSehTaJNv8AKU5fg9msUTc5GX2fr+9rn2nM/SK+1h+jeXvPv5keYWWzhn1fUZCP&#xA;rF05pFEpPV5DRI0HZfoUZv8ADghijwwFRdfOZkbJsvsX8nfyf0n8u9GdA63mu3oU6lqFKA8dxFED&#xA;usa1+bHc9gLWKD/M78gPJvnqR7+jaTrrDfUbZQRIe3rxGgkp4gq3virwLXv+cUfzQsJW/RgtNZgr&#xA;+7aGZYJCP8pLj01B+TnFWPr/AM47/nK03pDy3JyqRU3FoF2/yzNx/HFWS6D/AM4m/mXfOp1OSy0i&#xA;E/b9SX15R8lhDoT/ALMYqz/U/wDnDvQW0OGLTNcuI9aiBMt1cRq1vMT0HpKQ0Q9+bfTirzHV/wDn&#xA;Fv8ANyxlK2tla6og6SWt1EgI+VyYG/DFUrt/+cdfzlnfivlx06VaS4tEAr/rSiv0YqyXT/8AnEj8&#xA;y7izea5utNspwAY7aWaR2J7hmijdF28Cf44qp/8AQpX5p/790z/pIk/6pYqjtG/5xC8+T30a6tqO&#xA;n2VjUetLC8k8vHvwQpGpPzYYqjvNf/OIPmOHUHfytqdtdaax+CO/ZorhPZmjjZH+fw/LFUj/AOhS&#xA;vzT/AN+6Z/0kSf8AVLFVWT/nEX8zVhR1vNJeRvtRCecFfpMAX8cVUv8AoUr80/8Afumf9JEn/VLF&#xA;XpX5N/8AOM9z5W1+DzH5ovILq+sjzsLG05vEkhFBJI8ioWZK7KFpXeuKvfsVdirsVdirsVdirsVd&#xA;irsVdirsVdirzv8ANz/lTH1Bf+VhfU+VP9H5cvrtP+KvQ/0jj8vh8cVfNGo/9CxfWm9D/Fvp9vq3&#xA;1H0/o9f959+Ksr8k/wDQpX1iP63+kPXqvp/pv1eHL/K+qfufnz+HFX035Y/wt+h4f8MfUv0P/un9&#xA;Hel9XrQVp6Pw18cVTXFXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq7FXYq//Z</xapGImg:image>
+               </rdf:li>
+            </rdf:Alt>
+         </xap:Thumbnails>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xapMM="http://ns.adobe.com/xap/1.0/mm/"
+            xmlns:stRef="http://ns.adobe.com/xap/1.0/sType/ResourceRef#">
+         <xapMM:DocumentID>uuid:2B68CB7CE519E7119A76BA5BC76AA065</xapMM:DocumentID>
+         <xapMM:InstanceID>uuid:21AD93F6E619E7119A76BA5BC76AA065</xapMM:InstanceID>
+         <xapMM:DerivedFrom rdf:parseType="Resource">
+            <stRef:instanceID>uuid:2A68CB7CE519E7119A76BA5BC76AA065</stRef:instanceID>
+            <stRef:documentID>uuid:2968CB7CE519E7119A76BA5BC76AA065</stRef:documentID>
+         </xapMM:DerivedFrom>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:illustrator="http://ns.adobe.com/illustrator/1.0/">
+         <illustrator:StartupProfile>Web</illustrator:StartupProfile>
+      </rdf:Description>
+      <rdf:Description rdf:about=""
+            xmlns:xapTPg="http://ns.adobe.com/xap/1.0/t/pg/"
+            xmlns:stDim="http://ns.adobe.com/xap/1.0/sType/Dimensions#"
+            xmlns:xapG="http://ns.adobe.com/xap/1.0/g/">
+         <xapTPg:MaxPageSize rdf:parseType="Resource">
+            <stDim:w>14400.000000</stDim:w>
+            <stDim:h>14400.000000</stDim:h>
+            <stDim:unit>Pixels</stDim:unit>
+         </xapTPg:MaxPageSize>
+         <xapTPg:NPages>1</xapTPg:NPages>
+         <xapTPg:HasVisibleTransparency>False</xapTPg:HasVisibleTransparency>
+         <xapTPg:HasVisibleOverprint>False</xapTPg:HasVisibleOverprint>
+         <xapTPg:PlateNames>
+            <rdf:Seq>
+               <rdf:li>Cyan</rdf:li>
+               <rdf:li>Magenta</rdf:li>
+               <rdf:li>Yellow</rdf:li>
+               <rdf:li>Black</rdf:li>
+            </rdf:Seq>
+         </xapTPg:PlateNames>
+         <xapTPg:SwatchGroups>
+            <rdf:Seq>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Groupe de nuances par défaut</xapG:groupName>
+                  <xapG:groupType>0</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Blanc</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Noir</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Rouge RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Jaune RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Vert RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>0</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Cyan RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>255</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Bleu RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>Magenta RVB</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>255</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>255</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=193 V=39 B=45</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>193</xapG:red>
+                           <xapG:green>39</xapG:green>
+                           <xapG:blue>45</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=237 V=28 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>237</xapG:red>
+                           <xapG:green>28</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=241 V=90 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>241</xapG:red>
+                           <xapG:green>90</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=247 V=147 B=30</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>247</xapG:red>
+                           <xapG:green>147</xapG:green>
+                           <xapG:blue>30</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=251 V=176 B=59</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>251</xapG:red>
+                           <xapG:green>176</xapG:green>
+                           <xapG:blue>59</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=252 V=238 B=33</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>252</xapG:red>
+                           <xapG:green>238</xapG:green>
+                           <xapG:blue>33</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=217 V=224 B=33</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>217</xapG:red>
+                           <xapG:green>224</xapG:green>
+                           <xapG:blue>33</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=140 V=198 B=63</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>140</xapG:red>
+                           <xapG:green>198</xapG:green>
+                           <xapG:blue>63</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=57 V=181 B=74</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>57</xapG:red>
+                           <xapG:green>181</xapG:green>
+                           <xapG:blue>74</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=146 B=69</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>146</xapG:green>
+                           <xapG:blue>69</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=104 B=55</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>104</xapG:green>
+                           <xapG:blue>55</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=34 V=181 B=115</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>34</xapG:red>
+                           <xapG:green>181</xapG:green>
+                           <xapG:blue>115</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=169 B=157</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>169</xapG:green>
+                           <xapG:blue>157</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=41 V=171 B=226</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>41</xapG:red>
+                           <xapG:green>171</xapG:green>
+                           <xapG:blue>226</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=113 B=188</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>113</xapG:green>
+                           <xapG:blue>188</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=46 V=49 B=146</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>46</xapG:red>
+                           <xapG:green>49</xapG:green>
+                           <xapG:blue>146</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=27 V=20 B=100</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>27</xapG:red>
+                           <xapG:green>20</xapG:green>
+                           <xapG:blue>100</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=102 V=45 B=145</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>102</xapG:red>
+                           <xapG:green>45</xapG:green>
+                           <xapG:blue>145</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=147 V=39 B=143</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>147</xapG:red>
+                           <xapG:green>39</xapG:green>
+                           <xapG:blue>143</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=158 V=0 B=93</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>158</xapG:red>
+                           <xapG:green>0</xapG:green>
+                           <xapG:blue>93</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=212 V=20 B=90</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>212</xapG:red>
+                           <xapG:green>20</xapG:green>
+                           <xapG:blue>90</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=237 V=30 B=121</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>237</xapG:red>
+                           <xapG:green>30</xapG:green>
+                           <xapG:blue>121</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=199 V=178 B=153</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>199</xapG:red>
+                           <xapG:green>178</xapG:green>
+                           <xapG:blue>153</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=153 V=134 B=117</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>153</xapG:red>
+                           <xapG:green>134</xapG:green>
+                           <xapG:blue>117</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=115 V=99 B=87</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>115</xapG:red>
+                           <xapG:green>99</xapG:green>
+                           <xapG:blue>87</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=83 V=71 B=65</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>83</xapG:red>
+                           <xapG:green>71</xapG:green>
+                           <xapG:blue>65</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=198 V=156 B=109</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>198</xapG:red>
+                           <xapG:green>156</xapG:green>
+                           <xapG:blue>109</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=166 V=124 B=82</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>166</xapG:red>
+                           <xapG:green>124</xapG:green>
+                           <xapG:blue>82</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=140 V=98 B=57</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>140</xapG:red>
+                           <xapG:green>98</xapG:green>
+                           <xapG:blue>57</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=117 V=76 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>117</xapG:red>
+                           <xapG:green>76</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=96 V=56 B=19</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>96</xapG:red>
+                           <xapG:green>56</xapG:green>
+                           <xapG:blue>19</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=66 V=33 B=11</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>66</xapG:red>
+                           <xapG:green>33</xapG:green>
+                           <xapG:blue>11</xapG:blue>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Groupe de couleurs Web</xapG:groupName>
+                  <xapG:groupType>1</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=236 V=28 B=36</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>236</xapG:red>
+                           <xapG:green>28</xapG:green>
+                           <xapG:blue>36</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=0 V=169 B=157</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>0</xapG:red>
+                           <xapG:green>169</xapG:green>
+                           <xapG:blue>157</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=102 V=45 B=145</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>102</xapG:red>
+                           <xapG:green>45</xapG:green>
+                           <xapG:blue>145</xapG:blue>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>R=139 V=146 B=152 1</xapG:swatchName>
+                           <xapG:mode>RGB</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:red>139</xapG:red>
+                           <xapG:green>146</xapG:green>
+                           <xapG:blue>152</xapG:blue>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+               <rdf:li rdf:parseType="Resource">
+                  <xapG:groupName>Niveaux de gris</xapG:groupName>
+                  <xapG:groupType>1</xapG:groupType>
+                  <xapG:Colorants>
+                     <rdf:Seq>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=100</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>255</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=90</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>229</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=80</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>204</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=70</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>178</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=60</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>153</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=50</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>127</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=40</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>101</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=30</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>76</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=20</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>50</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=10</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>25</xapG:gray>
+                        </rdf:li>
+                        <rdf:li rdf:parseType="Resource">
+                           <xapG:swatchName>N=5</xapG:swatchName>
+                           <xapG:mode>GRAY</xapG:mode>
+                           <xapG:type>PROCESS</xapG:type>
+                           <xapG:gray>12</xapG:gray>
+                        </rdf:li>
+                     </rdf:Seq>
+                  </xapG:Colorants>
+               </rdf:li>
+            </rdf:Seq>
+         </xapTPg:SwatchGroups>
+      </rdf:Description>
+   </rdf:RDF>
+</x:xmpmeta>
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                                                                                                    
+                           
+<?xpacket end="w"?>
%  &&end XMP packet marker&&
[{ai_metadata_stream_123}
<</Type /Metadata /Subtype /XML>>
/PUT AI11_PDFMark5
[/Document
1 dict begin /Metadata {ai_metadata_stream_123} def
currentdict end /BDC AI11_PDFMark5
+%ADOEndClientInjection: PageSetup End "AI11EPS"
+%%EndPageSetup
+1 -1 scale 0 -840 translate
+pgsv
+[1 0 0 1 0 0 ]ct
+gsave
+np
+gsave
+0 0 mo
+0 840 li
+1096 840 li
+1096 0 li
+cp
+clp
+[1 0 0 1 0 0 ]ct
+904.575 334.257 mo
+904.575 320.361 898.182 311.771 892.811 307.004 cv
+885.783 300.775 877.066 297.482 867.603 297.482 cv
+858.135 297.482 849.418 300.775 842.393 307.006 cv
+837.025 311.763 830.63 320.354 830.63 334.257 cv
+830.63 342.282 832.76 348.533 835.592 353.299 cv
+832.309 351 828.712 349.018 824.782 347.397 cv
+817.438 344.379 809.047 342.776 799.205 342.523 cv
+799.205 314.912 li
+799.205 309.227 796.713 303.826 792.384 300.14 cv
+788.849 297.129 784.381 295.508 779.801 295.508 cv
+778.775 295.508 777.742 295.588 776.715 295.754 cv
+747.302 300.492 li
+737.896 302.006 730.983 310.123 730.983 319.648 cv
+730.983 344.484 li
+730.277 344.316 729.57 344.156 728.861 344.004 cv
+722.946 342.736 716.463 342.093 709.595 342.093 cv
+698.512 342.093 688.313 344.105 679.273 348.078 cv
+675.158 349.891 671.305 352.037 667.71 354.475 cv
+665.828 352.313 663.445 350.541 660.656 349.357 cv
+655.197 347.042 649.498 345.239 643.722 344.003 cv
+637.803 342.734 631.32 342.092 624.455 342.092 cv
+613.37 342.092 603.172 344.105 594.137 348.078 cv
+585.168 352.024 577.379 357.498 570.993 364.338 cv
+570.505 364.861 570.043 365.404 569.571 365.941 cv
+567.759 359.079 564.273 352.445 559.656 347.231 cv
+554.654 334.644 545.437 324.396 532.739 317.38 cv
+522.827 311.904 510.623 308.563 500.098 308.44 cv
+499.868 308.439 499.64 308.438 499.413 308.438 cv
+477.133 308.438 463.671 320.701 457.115 331.523 cv
+448.029 337.533 440.207 347.509 437.402 358.122 cv
+436.752 358.81 436.147 359.513 435.567 360.225 cv
+430.352 354.689 423.794 350.346 416.015 347.283 cv
+407.797 344.057 398.276 342.486 386.911 342.486 cv
+377.26 342.486 367.872 343.197 359 344.601 cv
+353.201 345.52 348.15 346.486 343.704 347.52 cv
+344.549 345.173 li
+347.646 336.569 344.319 326.972 336.56 322.135 cv
+334.878 321.084 332.65 319.895 329.551 318.387 cv
+326.144 316.729 322.15 315.226 317.317 313.781 cv
+312.746 312.423 307.58 311.275 301.503 310.271 cv
+295.283 309.251 288.347 308.732 280.892 308.732 cv
+269.104 308.732 257.795 310.749 247.286 314.723 cv
+236.409 318.832 226.785 324.981 218.687 332.993 cv
+210.643 340.949 204.265 350.688 199.728 361.941 cv
+195.257 373.034 192.99 385.702 192.99 399.594 cv
+192.99 413.145 195.016 425.551 199.007 436.455 cv
+203.167 447.837 209.248 457.667 217.078 465.667 cv
+224.963 473.729 234.529 479.92 245.506 484.068 cv
+256.024 488.043 267.729 490.059 280.299 490.059 cv
+293.684 490.059 304.816 489.127 314.333 487.21 cv
+321.051 485.856 326.354 484.569 330.603 483.257 cv
+333.919 485.905 338.121 487.492 342.695 487.492 cv
+372.106 487.492 li
+378.378 487.492 383.956 484.516 387.503 479.898 cv
+391.05 484.516 396.628 487.492 402.899 487.492 cv
+432.311 487.492 li
+438.632 487.492 444.246 484.467 447.789 479.788 cv
+453.011 488.951 460.835 493.996 468.687 496.009 cv
+475.153 500.054 484.721 503.735 498.12 503.737 cv
+505.128 503.737 512.654 502.694 520.491 500.64 cv
+540.638 495.358 557.324 483.058 564.939 468.345 cv
+565.798 467.53 566.584 466.669 567.328 465.78 cv
+567.969 466.562 568.627 467.331 569.308 468.082 cv
+575.643 475.081 583.667 480.618 593.157 484.54 cv
+602.332 488.334 613.062 490.256 625.048 490.256 cv
+632.506 490.256 639.527 489.592 645.93 488.281 cv
+652.391 486.952 657.784 485.354 662.418 483.393 cv
+664.66 482.444 666.634 481.103 668.309 479.489 cv
+671.426 481.392 674.758 483.078 678.293 484.538 cv
+687.468 488.332 698.197 490.256 710.188 490.256 cv
+717.642 490.256 724.662 489.592 731.066 488.281 cv
+735.035 487.465 738.597 486.545 741.831 485.505 cv
+744.413 486.776 747.316 487.492 750.389 487.492 cv
+779.801 487.492 li
+786.072 487.492 791.649 484.518 795.196 479.901 cv
+798.745 484.518 804.321 487.492 810.593 487.492 cv
+840.004 487.492 li
+842.283 487.492 844.469 487.096 846.5 486.374 cv
+848.531 487.096 850.717 487.492 852.995 487.492 cv
+882.407 487.492 li
+893.124 487.492 901.811 478.805 901.811 468.089 cv
+901.811 364.26 li
+901.811 360.595 900.797 357.169 899.031 354.245 cv
+902.149 349.367 904.575 342.82 904.575 334.257 cv
+cp
+false sop
+/0 
+[/DeviceCMYK] /CSA add_res
+0 0 0 0.9 cmyk
+f
+852.995 468.089 mo
+882.407 468.089 li
+882.407 364.26 li
+852.995 364.26 li
+852.995 468.089 li
+cp
+855.265 346.988 mo
+858.75 350.082 862.865 351.627 867.602 351.627 cv
+872.34 351.627 876.451 350.082 879.939 346.988 cv
+883.425 343.897 885.17 339.654 885.17 334.256 cv
+885.17 328.861 883.425 324.618 879.939 321.524 cv
+876.451 318.434 872.34 316.886 867.602 316.886 cv
+862.865 316.886 858.75 318.434 855.265 321.524 cv
+851.776 324.618 850.034 328.861 850.034 334.256 cv
+850.034 339.654 851.776 343.897 855.265 346.988 cv
+cp
+830.826 375.116 mo
+827.471 370.906 822.995 367.65 817.403 365.346 cv
+811.809 363.045 804.801 361.891 796.381 361.891 cv
+793.485 361.891 790.49 362.188 787.4 362.779 cv
+784.306 363.371 781.773 363.998 779.8 364.654 cv
+779.8 314.912 li
+750.389 319.648 li
+750.389 468.089 li
+779.8 468.089 li
+779.8 389.131 li
+781.642 388.607 783.714 388.113 786.019 387.65 cv
+788.319 387.191 790.786 386.96 793.42 386.96 cv
+799.999 386.96 804.505 388.934 806.941 392.882 cv
+809.375 396.83 810.594 403.54 810.594 413.016 cv
+810.594 468.089 li
+840.005 468.089 li
+840.005 409.463 li
+840.005 402.356 839.314 395.91 837.932 390.118 cv
+836.55 384.329 834.182 379.33 830.826 375.116 cv
+cp
+724.597 444.6 mo
+720.516 445.259 716.7 445.586 713.147 445.586 cv
+703.146 445.586 696.138 443.02 692.125 437.887 cv
+688.11 432.755 686.104 425.52 686.104 416.174 cv
+686.104 407.359 688.242 400.253 692.521 394.855 cv
+696.795 389.461 703.278 386.763 711.963 386.763 cv
+716.041 386.763 719.727 387.157 723.017 387.947 cv
+726.305 388.736 729.334 389.658 732.097 390.711 cv
+738.216 367.221 li
+733.873 365.379 729.398 363.964 724.794 362.977 cv
+720.186 361.99 715.121 361.496 709.595 361.496 cv
+701.171 361.496 693.671 362.945 687.091 365.839 cv
+680.51 368.735 674.918 372.648 670.313 377.584 cv
+665.705 382.52 662.187 388.311 659.753 394.955 cv
+657.316 401.601 656.101 408.673 656.101 416.174 cv
+656.101 423.94 657.119 431.146 659.161 437.788 cv
+661.199 444.435 664.423 450.193 668.833 455.061 cv
+673.24 459.931 678.865 463.779 685.71 466.607 cv
+692.551 469.436 700.711 470.852 710.187 470.852 cv
+716.37 470.852 722.03 470.324 727.162 469.272 cv
+732.294 468.218 736.569 466.972 739.992 465.522 cv
+735.848 441.44 li
+732.424 442.89 728.673 443.942 724.597 444.6 cv
+cp
+639.458 444.6 mo
+635.378 445.259 631.562 445.586 628.01 445.586 cv
+618.008 445.586 610.999 443.02 606.987 437.887 cv
+602.972 432.755 600.967 425.52 600.967 416.174 cv
+600.967 407.359 603.104 400.253 607.382 394.855 cv
+611.656 389.461 618.141 386.763 626.824 386.763 cv
+630.902 386.763 634.588 387.157 637.879 387.947 cv
+641.166 388.736 644.195 389.658 646.959 390.711 cv
+653.078 367.221 li
+648.735 365.379 644.26 363.964 639.655 362.977 cv
+635.048 361.99 629.983 361.496 624.456 361.496 cv
+616.033 361.496 608.532 362.945 601.953 365.839 cv
+595.372 368.735 589.78 372.648 585.176 377.584 cv
+580.566 382.52 577.048 388.311 574.614 394.955 cv
+572.178 401.601 570.963 408.673 570.963 416.174 cv
+570.963 423.94 571.98 431.146 574.022 437.788 cv
+576.061 444.435 579.284 450.193 583.694 455.061 cv
+588.103 459.931 593.728 463.779 600.572 466.607 cv
+607.413 469.436 615.573 470.852 625.048 470.852 cv
+631.232 470.852 636.892 470.324 642.023 469.272 cv
+647.156 468.218 651.431 466.972 654.854 465.522 cv
+650.709 441.44 li
+647.286 442.89 643.535 443.942 639.458 444.6 cv
+cp
+422.836 375.116 mo
+419.413 370.906 414.773 367.65 408.92 365.346 cv
+403.063 363.045 395.725 361.891 386.911 361.891 cv
+378.226 361.891 369.935 362.518 362.039 363.766 cv
+354.143 365.019 347.695 366.366 342.695 367.813 cv
+342.695 468.089 li
+372.106 468.089 li
+372.106 387.947 li
+373.947 387.685 376.054 387.453 378.422 387.256 cv
+380.791 387.059 383.027 386.96 385.134 386.96 cv
+391.975 386.96 396.647 388.934 399.149 392.882 cv
+401.647 396.83 402.899 403.54 402.899 413.016 cv
+402.899 468.089 li
+432.311 468.089 li
+432.311 409.463 li
+432.311 402.356 431.586 395.91 430.14 390.118 cv
+428.689 384.329 426.256 379.33 422.836 375.116 cv
+cp
+297.472 443.414 mo
+295.628 443.809 293.49 444.073 291.057 444.203 cv
+288.62 444.336 285.693 444.4 282.273 444.4 cv
+275.957 444.4 270.429 443.316 265.691 441.145 cv
+260.954 438.973 257.007 435.913 253.849 431.966 cv
+250.69 428.018 248.322 423.314 246.743 417.852 cv
+245.163 412.393 244.374 406.305 244.374 399.594 cv
+244.374 385.775 247.563 374.889 253.947 366.924 cv
+260.328 358.964 270.692 354.982 285.036 354.982 cv
+291.483 354.982 297.438 355.806 302.9 357.449 cv
+308.359 359.097 313.196 361.037 317.408 363.272 cv
+326.291 338.6 li
+325.237 337.941 323.494 337.02 321.06 335.836 cv
+318.624 334.65 315.534 333.5 311.783 332.381 cv
+308.032 331.265 303.558 330.277 298.36 329.42 cv
+293.16 328.566 287.337 328.137 280.891 328.137 cv
+271.416 328.137 262.5 329.717 254.145 332.875 cv
+245.787 336.033 238.517 340.672 232.333 346.791 cv
+226.146 352.91 221.279 360.38 217.726 369.195 cv
+214.172 378.012 212.396 388.145 212.396 399.594 cv
+212.396 410.912 214.006 420.979 217.232 429.794 cv
+220.455 438.612 225.029 446.048 230.951 452.099 cv
+236.873 458.153 244.009 462.759 252.368 465.917 cv
+260.723 469.075 270.035 470.654 280.299 470.654 cv
+292.272 470.654 302.339 469.83 310.5 468.187 cv
+318.658 466.543 324.58 464.997 328.265 463.548 cv
+328.265 395.843 li
+297.472 395.843 li
+297.472 443.414 li
+cp
+0 0 0 0 cmyk
+f
+499.871 327.844 mo
+479.593 327.609 472.617 343.076 471.746 345.664 cv
+462.806 348.957 454.521 360.719 455.829 367.776 cv
+449.177 372.482 444.763 378.48 449.724 388.479 cv
+444.926 393.477 441.001 405.299 449.506 412.943 cv
+441.818 426.47 450.486 434.057 454.739 437.174 cv
+450.813 449.406 459.539 459.127 461.498 460.463 cv
+463.078 470.579 467.977 477.244 476.324 477.636 cv
+482.209 482.576 494.498 487.394 515.571 481.87 cv
+533.066 477.282 545.821 466.344 549.147 455.993 cv
+557.104 451.877 556.777 439.526 556.342 436.938 cv
+562.828 423.118 558.739 411.298 556.342 405.886 cv
+560.702 397.006 555.143 380.422 551.546 376.951 cv
+551.872 369.836 547.456 361.543 542.825 357.896 cv
+536.173 335.078 511.879 327.983 499.871 327.844 cv
+cp
+0 0.203922 0.847059 0 cmyk
+f
+502.25 467.75 mo
+495.838 466.606 492.5 462.25 489 455.25 cv
+486.897 453.815 478.75 444.25 477.25 432 cv
+474.695 430.128 471.25 418.5 471.5 409.75 cv
+469.75 403.75 468.349 397.448 470 388.75 cv
+467.75 379.25 467.599 372.865 472.75 367.5 cv
+472.75 358 475.359 351.052 482.5 346 cv
+481.349 339.791 484.277 333.904 491.679 328.695 cv
+477.657 331.937 472.487 343.462 471.746 345.664 cv
+462.806 348.957 454.521 360.719 455.829 367.776 cv
+449.177 372.482 444.763 378.48 449.724 388.479 cv
+444.926 393.477 441.001 405.299 449.506 412.943 cv
+441.818 426.47 450.486 434.057 454.739 437.174 cv
+450.813 449.406 459.539 459.127 461.498 460.463 cv
+463.078 470.579 467.977 477.244 476.324 477.636 cv
+482.209 482.576 494.498 487.394 515.571 481.87 cv
+522.207 480.13 528.155 477.474 533.171 474.285 cv
+516.934 476.368 507.505 472.161 502.25 467.75 cv
+cp
+0.027451 0.278431 0.905882 0 cmyk
+f
+479.905 346.547 mo
+479.905 346.547 498.071 344.899 507.586 346.71 cv
+517.031 348.507 533.404 356.603 533.404 356.603 cv
+533.404 356.603 508.984 349.163 501.732 348.135 cv
+493.03 346.898 479.905 346.547 479.905 346.547 cv
+cp
+0 0.376471 0.819608 0 cmyk
+f
+464.782 368.029 mo
+464.782 368.029 488.936 365.72 503.083 367.014 cv
+517.229 368.308 540.275 375.997 540.275 375.997 cv
+540.275 375.997 514.27 371.326 499.886 369.709 cv
+489.149 368.502 464.782 368.029 464.782 368.029 cv
+cp
+f
+460.468 387.674 mo
+460.468 387.674 484.75 385.621 499.593 386.067 cv
+514.435 386.512 540.681 391.008 540.681 391.008 cv
+540.681 391.008 506.098 388.892 494.801 388.754 cv
+483.505 388.617 460.468 387.674 460.468 387.674 cv
+cp
+f
+461.11 412.032 mo
+461.11 412.032 487.129 405.443 501.163 404.417 cv
+517.788 403.2 544.817 406.357 544.817 406.357 cv
+544.817 406.357 509.509 406.268 498.869 407.439 cv
+487.606 408.681 461.11 412.032 461.11 412.032 cv
+cp
+f
+464.962 436.38 mo
+464.962 436.38 490.357 427.354 504.871 425.765 cv
+519.387 424.175 546.102 424.177 546.102 424.177 cv
+546.102 424.177 511.032 427.614 500.03 429.181 cv
+489.032 430.748 464.962 436.38 464.962 436.38 cv
+cp
+f
+545.674 439.174 mo
+545.674 439.174 524.613 448.131 510.928 451.999 cv
+497.242 455.868 469.725 459.093 469.725 459.093 cv
+469.725 459.093 501.297 452.146 511.654 448.944 cv
+522.01 445.742 545.674 439.174 545.674 439.174 cv
+cp
+f
+484.328 475.342 mo
+484.328 475.342 498.696 467.484 507.908 464.136 cv
+525.13 457.875 538.541 456.817 538.541 456.817 cv
+538.541 456.817 514.27 464.576 505.585 467.402 cv
+498.535 469.697 484.328 475.342 484.328 475.342 cv
+cp
+f
+750.389 468.089 mo
+779.8 468.089 li
+779.8 423.76 li
+770.099 424.447 760.291 425.042 750.389 425.543 cv
+750.389 468.089 li
+cp
+724.597 444.6 mo
+720.516 445.259 716.7 445.586 713.147 445.586 cv
+703.146 445.586 696.138 443.02 692.125 437.887 cv
+689.906 435.051 688.324 431.549 687.332 427.428 cv
+682.405 427.474 677.462 427.5 672.5 427.5 cv
+667.27 427.5 662.06 427.471 656.868 427.419 cv
+657.378 431.016 658.142 434.472 659.161 437.788 cv
+661.199 444.435 664.423 450.193 668.833 455.061 cv
+673.24 459.931 678.865 463.779 685.71 466.607 cv
+692.551 469.436 700.711 470.852 710.187 470.852 cv
+716.37 470.852 722.03 470.324 727.162 469.272 cv
+732.294 468.218 736.569 466.972 739.992 465.522 cv
+735.848 441.44 li
+732.424 442.89 728.673 443.942 724.597 444.6 cv
+cp
+852.995 416.62 mo
+852.995 468.089 li
+882.407 468.089 li
+882.407 412.573 li
+872.766 414.02 862.957 415.37 852.995 416.62 cv
+cp
+810.594 468.089 mo
+840.005 468.089 li
+840.005 418.184 li
+830.335 419.297 820.527 420.317 810.594 421.24 cv
+810.594 468.089 li
+cp
+639.458 444.6 mo
+635.378 445.259 631.562 445.586 628.01 445.586 cv
+618.008 445.586 610.999 443.02 606.987 437.887 cv
+604.494 434.701 602.779 430.701 601.835 425.894 cv
+591.57 425.423 581.405 424.852 571.351 424.183 cv
+571.815 428.952 572.701 433.489 574.022 437.788 cv
+576.061 444.435 579.284 450.193 583.694 455.061 cv
+588.103 459.931 593.728 463.779 600.572 466.607 cv
+607.413 469.436 615.573 470.852 625.048 470.852 cv
+631.232 470.852 636.892 470.324 642.023 469.272 cv
+647.156 468.218 651.431 466.972 654.854 465.522 cv
+650.709 441.44 li
+647.286 442.89 643.535 443.942 639.458 444.6 cv
+cp
+402.117 401.792 mo
+402.637 404.961 402.899 408.698 402.899 413.016 cv
+402.899 468.089 li
+432.311 468.089 li
+432.311 409.463 li
+432.311 408.838 432.298 408.226 432.287 407.611 cv
+422.005 405.783 411.942 403.842 402.117 401.792 cv
+cp
+297.472 443.414 mo
+295.628 443.809 293.49 444.073 291.057 444.203 cv
+288.62 444.336 285.693 444.4 282.273 444.4 cv
+275.957 444.4 270.429 443.316 265.691 441.145 cv
+260.954 438.973 257.007 435.913 253.849 431.966 cv
+250.69 428.018 248.322 423.314 246.743 417.852 cv
+245.163 412.393 244.374 406.305 244.374 399.594 cv
+244.374 385.775 247.563 374.889 253.947 366.924 cv
+256.633 363.573 260.034 360.937 264.132 358.996 cv
+253.701 354.222 244.047 349.257 235.23 344.12 cv
+234.243 344.98 233.271 345.863 232.333 346.791 cv
+226.146 352.91 221.279 360.38 217.726 369.195 cv
+214.172 378.012 212.396 388.145 212.396 399.594 cv
+212.396 410.912 214.006 420.979 217.232 429.794 cv
+220.455 438.612 225.029 446.048 230.951 452.099 cv
+236.873 458.153 244.009 462.759 252.368 465.917 cv
+260.723 469.075 270.035 470.654 280.299 470.654 cv
+292.272 470.654 302.339 469.83 310.5 468.187 cv
+318.658 466.543 324.58 464.997 328.265 463.548 cv
+328.265 395.843 li
+297.472 395.843 li
+297.472 443.414 li
+cp
+342.695 468.089 mo
+372.106 468.089 li
+372.106 395.013 li
+361.997 392.548 352.188 389.961 342.695 387.26 cv
+342.695 468.089 li
+cp
+0 0 0 0.05 cmyk
+f
+0.5 lw
+0 lc
+0 lj
+4 ml
+[] 0 dsh
+true sadj
+27 804 mo
+0 804 li
+/0 
+<<
+/Name (All)
+/CSA /0 get_csa_by_name
+/MappedCSA /0 /CSA get_res
+/TintMethod /Subtractive
+/TintProc null
+/NComponents 4 
+/Components [ 0.858823 0.85098 0.788235 1 ] 
+>>
+/CSD add_res
+1 /0 /CSD get_res sepcs
+1 sep
+@
+36 813 mo
+36 840 li
+@
+27 36 mo
+0 36 li
+@
+36 27 mo
+36 0 li
+@
+1069 36 mo
+1096 36 li
+@
+1060 27 mo
+1060 0 li
+@
+1069 804 mo
+1096 804 li
+@
+1060 813 mo
+1060 840 li
+@
+%ADOBeginClientInjection: EndPageContent "AI11EPS"
+userdict /annotatepage 2 copy known {get exec}{pop pop} ifelse
+%ADOEndClientInjection: EndPageContent "AI11EPS"
+grestore
+grestore
+pgrs
+%%PageTrailer
+%ADOBeginClientInjection: PageTrailer Start "AI11EPS"
+[/EMC AI11_PDFMark5
[/NamespacePop AI11_PDFMark5
+%ADOEndClientInjection: PageTrailer Start "AI11EPS"
+[
+[/CSA [/0 ]]
+[/CSD [/0 ]]
+] del_res
+Adobe_AGM_Image/pt gx
+Adobe_CoolType_Core/pt get exec
Adobe_AGM_Core/pt gx
+currentdict Adobe_AGM_Utils eq {end} if
+%%Trailer
+Adobe_AGM_Image/dt get exec
+Adobe_CoolType_Core/dt get exec
Adobe_AGM_Core/dt get exec
+%%EOF
+%AI9_PrintingDataEnd

userdict /AI9_read_buffer 256 string put
userdict begin
/ai9_skip_data
{
	mark
	{
		currentfile AI9_read_buffer { readline } stopped
		{
		}
		{
			not
			{
				exit
			} if
			(%AI9_PrivateDataEnd) eq
			{
				exit
			} if
		} ifelse
	} loop
	cleartomark
} def
end
userdict /ai9_skip_data get exec
%AI9_PrivateDataBegin
%!PS-Adobe-3.0 EPSF-3.0
%%Creator: Adobe Illustrator(R) 13.0
%%AI8_CreatorVersion: 13.0.0
%%For: (Thierry Ung) ()
%%Title: (gnocchi.eps)
%%CreationDate: 4/3/17 10:02 AM
%AI9_DataStream
%Gb"-6gQL[>E?P#[nIPHD[c0ZP+b2fXTT.([>W(5PMDUHaV+`s?.s4oPCSi^Ie^]hmJ#)P[O0.<W!siG/XA"XFDBDtmT(IXKLl7%D
%hu/HdNZW^ts37&-h]4rZgX#?&*&@K2/pDM\QbjPGr7<lWs8%MkDE<:BqT!f`hgbLQ`t)Olro&X]LUS?hrp]NkdiVo0^\c!Nn)0C2
%GQ7I7mFr0OH?Xc6lKeHf^,Z&Xpq,j]\$)tW5/)C:l#+'#&/P0#*IiJ.p7&hon_HFErqQ)se1CqIIRc!h_[k*.[#4;ir3u.0If&.a
%YC?^m^Ad85hnd>ti!7a"kl%\lJ,0;=L5n[cj67B[pj`,=rVWh1T>u6F#C=iN1]3g5s7%_urRF.2rTiqPj53'f1]RD+r_.Y.n\b+i
%?@DaDcsd5]m.tf_jdA@2c0KT2^O>e>29jMcqn$93qH**8n^=<ik.>?A>IsV_0-,OEVXX8n]QsIZ5DOO]qT\n5++EuJ&,oXK^?/Q[
%I/NX&2ubPT6VOKFpc!]npHQB?rUCud^\J$2b6V;HX2"H!c0Mhq$1e&\gK3:S(ncHNRrA)HX,l/OrWK;$iU)\uhL'SJF')uK#hJ+<
%=+>JnnHWcFq;!^=J,T@*+9!5PpraA,qsePFj#p.GSGVDc=VAB@7uSH^Q96_H"/SN?#.C1!:U"$;Jr0Nha#DnhH2mIhF.R#Xn]O&8
%nUBO&rgSse^+&A0oli4lU2eu7mUsohmP'Uue#1gdAH77"VdG[3]'fP6^ZGe!k4aCYmpEcEM<gj@Qtfae093609BTBK?GRQSYe^;3
%otT<'s6p?bY(P=[i:!3+pTi`Gdn-7NJ:E=T590t+F;Xf%lP71ZhoZi*ar16&)=P]cn&kHPeM``tmo^F=B7UkJHWfj@A\l"J*WF'j
%:+*^?FM_sf:ADrqK_J`Tq!%I?1Of4I`Qt_hrXS[iqoers-h=21:&TGV_ADtdn%pg<DnHIuUa,!IE('I?n%fUpD]H73"Fh6.KYR%A
%*"i1iQ?]"2AK2<]`c:$GK]8BgR66u:I(Tj-Ktm/LlP3j>51jqJA"H,\-2c$FjH4NP\aS<4IQI1-f1>Ag(Z'l+_r')ZY98mp5N<Kr
%^N^QT:KNorKtiQ+_r(52NfM^5IripJK"ph.*$>1B<FdZ>H0+5L',:bVkDDL8B&#VBFhEXq:$74#cFq8iF2.L\gp4(!hk;^9#=k\Z
%M5.t,o;MI`*$>1"],8;&HoreS?2T]Vi98#oMV5RbGinc-N89B/MCXR$fQ5%H`Wb07\u%UYgcM*MIP/qcp0:Zea2WPJ]Cb@C3e*bd
%k83l_G:$fUY;so<[rA/[j)<W<r13>2mae\MFIij-1Nk;*(];0h(ECko?<13A\Z>o,87rRM?"QJ$Wr:K2EkD7o(d!'&?`&]X?@C.l
%Pe[.5,7Ir@0>G@?j2d>QQXA4[s-LLuRV2VLT(UdC]FG2GV#9?+n]:`s^\eFGSBhTmJ,+5tnGhT'^H-]r4iatJhn6,i^])9'@=3Ja
%V=AlGid<r[2lt5e^UM#I_`XKfF803*?dIQPru^Ch)&KQMqWU'4&,\V8j7PI($OlK6l?AW9IdG7kf]MiHUVD<F#rZ(mA$>Xc]C6!\
%cZA#K[jQ:MX3u=m,<`fk^#^@6Fr^$tpiLrd^=T:AZge^=:S*M^or]CdZZDIP<jW+aC3RO\Yif*c0.g6B9"&@Z*b0IjH&lh3He_8=
%(USPW0BJVo`.bt&]r#F;3l^9md!BorC#Z&]Vu"X?p7`I>4_gKE]L+p2*q1XF)b5<`@fTrW'sWI>ELKWYrC$XCI47tIIZn4<D&]P#
%OPYi/iBNe`lJ7jt2rfc9#YPn>Gc^aUl\FX<H5fZLD<^Z1I@`pJZMe_=+2epZU[(!(L1$'^*40_mH]6i7GOgQ8:NRiTTX9A8[CMPE
%3%M3*N%/.7H]HYu@>8]B(Xa)5&"eZE[aC]]o6=;M4r`OZrBHX>LoX;g&F&X7J#@D8rm8`CX-485$ndc6G+?TLl&Xb2U0Z2P%E3F:
%^R_J^CIYpJ_(p;6Hc=<rlriTU;^U:L#O?0c7s:e>\@-uFJLeb36F`nB-c$Ai-IPW*Je\]dNDT#sJa!lnao8@r<n+HVg:be.^P*lk
%pLkkN12D=Kdue5oGm^UQ;suM[O4V!+G=)UPITs-Oo_b^'fjc;Q[_eBS8MSRG:\Qe=fi*e5D.2bdbj/R/H#JGHo1P=IRc/i?J>9HC
%@K/-CpaeN:C9#^Z1c<6'F&(An_;]cR(FiM40g<59HT0,'X$YhMJ+8ss>rt1YPe+57RHKArU<c;Ven<_gT6Y&I??ca6"jb@Cqu0X&
%4N?d2$1G+8A&")JF6idahh,p@p&$qrKmMbH4j7a_0!e3_,U;Fq>uYXCkqMQ<RY.V_U^dXpWF+8'?59WLH76GLOlq*6=)#F124cGC
%\NnJ&)M-?,H=e4e1#t(],iVVO5Pt&kB/n;gWPZWqI#"k0]/BRrhEcFA)s6NkBHCS3SR@D?^0g^ND@]fkNJ5ikg*O[1],o<jW@,4$
%Us9RT\,QU"8$9AYAInAa/"S.,rM6&70i`._F*f;(/O,'Sd+??K?$#5Rs-J'q&&b@fmh4]*Z(J6bZ9Pl0F58$ha2@hF.o3I:0T=SS
%LJbT0jdCX_n(O;.Xr-&W^C-[S&>h%ia6tenPB6^MaUFd>))C-PhaX">GJb5`pYVY;P2Dfo]L.tU&$"7Ze]/2=5CCMDfmESVf<7F;
%pcWD.l2TZ>h$:Iec$Rs0L;0KZk26.&m>pbahgG@kj7e'#rn@tBY7BH;pug7Q^\N\Sm-j@kq9T*$p",gbG5#)<%K:bMpu[?5W*Na0
%J,efYpHO#/]jH8K)VgBta,`IanGBiTI/*1HgA&XBDgm*2qX4$k_'e\Yhsm7N>^1\kc'rXdrUGZpE)ZH0T?r4>p$1#pmuD<n^]3GO
%m1[eF:ET1P+('GKJ+(*sIJ5<OrW>A&DQc8(iMMB=_nUTF:VCmeYOC=rq"3]Q)r.KTTAKM8l-98)n[@udmsa?Vj/3h;[r0eEp!;QI
%VcRfo^4CG,rqC?F^>YJeQeT!qXZq42g"3^:5/OVRLY)n3$\/3BX1'0gfN*KHpWjr<^A6_.I)Fg-*Ym&^9q>YOAge[6@ajH;^V#$m
%iVW)KEI330qU6JPQuC(Hq$>rqFoD$Yqeq!%r;+GmTA7A5Gk_/l+0_:kJ."L<r=@97?2<1`HfXRppTHd;&UZ['?bHYIq]j>\2h-)P
%l5d"b2ueXsn>eMJJpRpoYfD^;!tt$YoTroK5e['_meGXer\m):hS?tsF.b/cnaB"Qro;o)hS+:8+92?*%J1hC/F:1@]"@t0peCO]
%n6Z7_J@hLJh:Uh,lB^kb*P56ol)prmN[Y$b*p:&O6Ti@e4ND6pos*u_Thdg$fB.PH&,iatot\q/rmG//DWlja84+Gq@3,orEB"Ej
%iA+Y:9RRgW8n6"X8kV4@@lgH-\tnZEN=jIq@8(/PVf<cEnM$QOGTJNKb;&a:aCEM@b&^n[+Wp;)TCX@9Vf%C-6R35q12</R"g5ab
%"A\DJ?%=Hhk"&Q<UQp$uo/PnNm[NBb!Ognlf4dFpQhe5.pm8M+LmDJ#mOdH#c[tk&16roG,@%/.%+bZclN(TJ*+3Lk0Em6j!96()
%Y7LF/*?b%X^;Ck*\5%"6%i;aYL[24?$RYNPP@oARY/1btjZ"=Q&]T?l447W"e-;nc:N2l`\f@oolNpT"pt9oW:GC%kKNIs7ZF7HJ
%0!F_[.t"3OmV%jj\8\:NXgP>*L"72EHjI1A;mki%i_e9SPZ=?2`N;8*IHT1Q<^5q%s6($P31^Z``[ftt@R1Vh=C]2o#VmM`%"Jm1
%,n>YBKG9.VKG9"OR;db_!qG7a!%eI2R]%qRC+sZJ#WYY5*1)6sJf=gN#2home<gOk(a[I\&r[BZ!e0eY>4YZqmDmed?<piD>[:[n
%>jZ0,iuj982qD)4=kEJCPs3A``rM$'Q?oF#oldN?]WAZH3itKG4KNmkV<>g)0>ESM>(7k(L]R9Ln:35Zaho853[d<KSPYP9U_j@d
%'J1"R'K(t_0[VT@#_3tP+H._+.Krt>Q6='2d$(cD0DPB>C,"@1\-87W\=*h5).O7kQQSi2-;&!+X;<sG&n*6K-rV;-9uK]Cb%XrZ
%+=*[+*]Q]21123`hAPf8!lg4MCC=P')6bX>\M=aH+W=VeI0Ms()3Eo9?(d+n49e'$KbjRn.m(O-cEM>%f]523ljWdeGW<SNmU>9R
%Yc+J["bnh'7_k2)l#aM,G\oa\]9'OHKH7f-i_4m/j#(+tCO_>qmW-!G0'CFBEeIq3!T]]"XGrPL'XYWn9*]M&ja97aJGL?!CBDk\
%l'u1c,hRMC3MHfgSp#j$*oY3KkrCNk,=-6of^BhWOUG!L'2XqroQ5ieB'=@=\c]uL3-IQG/jPXr0VedZCkfWd@8eMG@6IGn%Kub=
%4Sf0oDu_/T!MM:lGCjPn@<+E]%i:t0-eK`#!:r+dlt+gA$)Y"`%FtRY]th'<_%o/KL6uC^Asi-WHT"VA_S^7T_f?`*;&7f8'C>K+
%pATSq+Qa0t_?F!/"D`74Q(28G1TY>C[I%CV6/i+fEs>fhKQUXXra_Zk$*^*.S,afSY!B#4J0d]RQ3I!Z"S?L5$lT,,im@@"^oB3K
%ee)k;kt-L131#.^p4.d$]E19g!@s6$nAeYc3P=_$*e@78R?aUk`FTK3\ZWHDiOS8X<g=IY4D.ZFm`]Zsmb0R#pQ*%,T=qt2ZYppj
%iP8+aE\"6gp),q.ei)?4n[mgO&;J.#&Uaoco'>eami:E`6\tZ4I;8f"B?.@#F"3L3/ua9nn]_S+_A[h*O-imQK;0"h`c4Ku.h(4!
%75&K36nB:7)Q-Ck2IkC(<I77h3uV%X/9r^s=ccR(-'p>r_rppMSa_$LZfj,^(fOs>0.VTP\Pj>MYgKE]Z5,e7H[7FZP_Z8jW",0h
%d_OmK$R=I&R^\"n)2-..3/"[BV)$Oh,Uj`<quk&O04'=?iTO)N%7dtrKneW!lAgTe*-W9_/g;?2*>1%P;Je1sHO)L.'[WiKq8Z>/
%'FJ\4`RL59mqM.+h/<b<%3ZRbh&4'3K4k`>a>d5mQ91-46ocI6m-bBk`Ssm5!(.%[YZMt`lZH;?@_EO6N)2)r=V.?fs!:X/o;!`Y
%TR-b$k@#"s,@I0-_)nFXY-_8*I*5'-IB\dn6eGABk^o`e%0K;E#SR<a2bXp)dP,C%b8'H+e;jENTc4oZe2Noq%G0GIND?;:H.4m7
%b$!4qS-Cd1EW?7[hY-l@dt`6]B3=+kNfPrga8Nn,d$cY>WVDe#FA&4#*"i0DN@48H4uPoWQ68Gk3$e7*36DT66:96?7@CDYAWY??
%-\^(VGWQ!;*rqX'JC>VlSNFXX*md'@`rOaV_(Jht59'^ec`fX:(SJO0Sjq@W`njW@pRK6epd)`f<':W:P+QsaZ_lPU\YgT(kf2_V
%G,b2n,@^AN'!PNW7KFHn\pS`C8EY4Ap$O-a#,L_Qq1UrM7&j-K^Jm::2B?0YkEg3+Q&6uGNX3t5_J2bf!2!=Ri50=j[?*Isa0<qn
%!lOPiMPO94n=9,-,6Q/FcHIQV#Fq-KIcb$:KVSk[d[>793('JJmt@Pa@ou^&`om:ORmC/C5^W(]->js.`CO3jQ0*cd4$"s.=#"H9
%jrXZrCNNiUa`bR,FADd4is0&78Fq@hR&lYf"B?:AF>eql6_6e[UN_%E7#IGM_U\S1.0q:k`urVr-\sSb&#;[J_?HJK1GTTjJf+m7
%_4G14!3qd7nZRjVB=UcZdE-Vrd'\rp2j6df6<tqS%RiGnmY3u[0ffe4%ropNi'GXY\X3,V+49-(,X4H>$X`#W^P@kl50t<W&mF(U
%$E]B8`HF-TRuOD9N>-=ub'Z1?9]tHE))30akbL&t7(LQXP?6Mteq-Rg`HC#m(OMOSM];=ok\OLq0H7su'atCjhKYW1c+#h&>B%OP
%Y+,4mNq9UQn-9:t#@tYGb(:3C(l;KqG_et/qXHn]9QjA'Dpsa`+g<1'Reeq)7">]]j:SXePs+EYd/X7>&'*T#W]^;U\+P`Mf!gnf
%?]A8N';7T&d7,0h+Wa+@fY6j_HlN&9_X&]:4")o.VaTKbO2;\RXOSk/(lToXXSL_8Gh,QU1)&i^&H4))HRA)G%b@$&8=TbQ$H^"G
%gD<ZR9DfN70T=!'E6:m=q4ButT7Bn=nZKJG#%2-79ttdiTkMjNg^Qp6iu>W[igVcIj)Wc/c-tJ]n&K,r]ug^>Fo28de7g]ZhZ.SF
%DdO,7ZQohnG&N\LLK;E>5E'Tl9?V`%k.Ys'L"`<973,Mi6`^-^L\$Ek?PPIP.7OYr?"Op32V-[n_gP3=Q@?M4>L]QZJsjO`m76G&
%]LdX;'2SK^_`nuEN@Vh`TV9<k`9A>1SuMAH"kpgW=aW#3rEW1ma@G.o]@#s'RQ!A?$hA_9#QcR*pj=@6k%+;b3+i)/jFQ>1N+;#s
%is!<-j(5lq$.F;QAauDIdW>)F+$]\uh<Q'b-%?+0af;$kPq4\oR1r_Z,gCnY!gRPZUrfp>QYVMR!DP(>f:^V,RIc%N1j3!'Cm9$$
%S=A-0E?07j6GDO\:]Y>aamhsH3>UabbCJrm0(.He:A[MA%.[AHQ9<Es+NT3U9[>OGlToqR);j`m)Q"$,[OPRCnHq(/%WKi$OfenN
%\B5PeX0kFV$?>nnXT;)IQ$aTe@D8QqW"_pGm.%o_ll&r,?<u%FRPu%e'S6#d]ah+/\@PF]EF2nYSZ;!\>o.]jrP^9!A4^6QeZZep
%M6BMZ=F5h-b]]r_6WEb$SCZCCicupH?,\e*]5P;7Rld",Bmn*ATLWA5\#ql+MSG,8o"?E8T)oU+WL4])iK#D(H.<<%X>+ONJSPZ/
%biNGYnNBe"ki*2rUR7][V0P+jhM?S:pc@*c!]q'.HXYH]n@A6fFM??LHDaNF:OGU*1OKXT-"39CcVWK/I2YKW;5Ro>Ct.euFr!Og
%s'+jgXEug@2XK9QXJ^)_7u#97eGoCT]@k!2L&DapT[hV2D/e(c2EqN=LNM%(.c`'H)!g1=7pb`*S&it"c<+`MU$9c-02GLX%m4rJ
%;jY^7*3@fU%Y"u6%l9@ZeK>p#`W&gUliJ)^aLeTf^Uf/(*M4YQ_l$)3o0gohF)do_Ej1e'71U28i9CoL8mG?U-95Z4UsZM<1rk0-
%S26_S=5"7oW5cl=8pamq;es@_aF#Cs"W%#,.+W0qMksdoQl!kf:"<gfMpPmE#aD-a4d)eU)HTKH\+0Grh6(L;S[W5srT),_Za*2l
%]!lnc%cd_R.!#OWIDl@,Yq<3N=&?%9Q`ZZ<$l3g_-*oBN?7.@1m.WH&YJNT\rP[):36+hgm8L4&$i3.#nc@38Na<BlhN<n\rAp_3
%*F^)Dr5cc7!7Kg5G!t."TptoH19+X%-OcmfI<77_4V$NnG+o^AnqieVe:@Wa"[XP8$DG\(;un]Y7=9,:,^^bo,SXZJ_<A[JTF8KT
%Fd*R20?M9(8I`ZYYF;I$X"3ELb$1cA+.[HNQ>7sOnGe;F^3)$>f:foHPL&#^B*qjC@pbb-TM!CkW@h2!Gq^[V0j+R\+?Ib=1n/fN
%FsXa)]6_*eiCI3?&7oOc9%X",ACo47#BqbI"];(]CN<L"f2u=X\qY5/dRsD;54?,[g@2D7eTDA"oB'f$Nk<o:^4Q8+L@fQ"1*5op
%%-En]k<QIu&NjlWaIF&L+EHfhanAu]MDJCT-Sq)To-c[*1#a!*^,hRb0ji3$C=$Su1NkK__nN^MM2Q%$4$?g5fn-q;g5Vi]Lh*ON
%b`7.L"8$7fNhr?MSZnN8mZ@Y@7'`,aALoSQ^9B;X'lRt&jXfs(0DIp*a(*CSNF`X-mT[HB`tmOE6YYMMqd??b#lW0f0>c]r]"e%*
%05@)ol;:Y:5PoH!B-DPA<!^9'*UI:WSfKg&"r&%b=,l4@;'WnnNYH7DQQ\[,9ihE57-DPQO:<4nLu1(nY1f,@a:UB+"[(h<r6oTR
%[R7ZT2Qon_1&n^YhU]epX=s0AqeQZ1`^[&Dhei6tEV`l-Ge`ECUd!G.0AHIOgO;NUBtq,?KmC^KZjU\p:Zn"c'8:Z-SG,#2ns[(h
%QS@iFg=_$&a[4O':S[(XmUo>>R?6H(4p`BFjU"UqUrba9'M^?+"-ChZbSSUU"S7;QeC"16BkB+p$;Lk)6(WrOV?/,sN]`g8S:fMY
%>pM3R%pnLjd^Td@$Q:u<2^E3#k^MfKo8Zl/kc#!E#[).;;s.il.5'$bF\<,)-+tu9P9Z#X)#K^9noZ(I$$D].X##]__>4.]84?;g
%lsfsml@QGM!Ll".CEQn+=Y_q+c3)\onLNC8e)ku7'd!5@VUr5i_A_q!.u9bjZS"+>X3.^ge[HX2\ZEH%a^6PX.=N5XpK6Kg,V0ha
%NoK@9?B3V.0q1@_\$KaBeJ_S\->JMA7H$ZC\grZ5&nZ>BX\l86R22]InDTb52[WPAAhccR3C4/fC6DuLOm%g?[7Y5_Djkj<F`Y^_
%&"%d^`7qqs9>GB4-N:HQf?icGMiWS&%lX*0nk[fsa(k1sC:fCW7Z/-41kK/d)RMI%l?Tp>Cm(CeKbI"Xs&-aj'fQI"JVTLZ95Rt\
%i+cTnoi$Kmj9BmjB+%)/__uo5KAuSc;Cq*D!B.NKQ$+drU2/?M27e!OpNi92@[e/@=Y8UV\7nl\U$W($[sY=;fgk&k=rXoV%EnUn
%3Cm&#P0fk^?6<5D$OY.#;='INJ2V;tmT`W#(I)b;]^lk0q(da-IM=AoWo068JN^#ok//c"JQ9$%lM0T@Vta0?]XhRO44Y+i.n`%3
%(8i-NSc*8h!NGILSSIcpjifl8J:%=MdB&Ho8EskS;JIE@7<2Y&BhA&leln!n(S-"&<nen?;$ie^jA8`EgTqkn;'YuVTCmju354=H
%cJ0+Rh4&S[O"'ue&;uI%eV0'bi)2\qM</7F&#/$t=G6]^\3C&_0809o3jb"I(*.n+iqOl.il:$X6jga\lM=)9i\NW/E5$)bcX`o5
%-lE3B"Bj$PL!HbUlhtN.6o$Y0UAai_J&Zq<X!7Nfp'^"!A&(bB!DQ.g-@mTAa&*9JE/BHNOBbWm:cer;(ZfC/^(0FY3Amse__nm+
%YQCViYG$Au%B+cn[C2XcL2$bVH8:-O\IAME7?T%>5IUnm:Qa+$%uPKZIbu25fQ(D,W@c$=IJF(iofCm]WA+J=0lGBDHDP`jI4i@^
%U:Lb.GI^aZHi:rbHW_l&2'D`48Eesr0>.FV7K8YXZRE(QKc*oPbRt8kem^hAa"1uLYgHu6V@$G+[Tg>ec7-<-ll!)%:3#\:)Xb(u
%rShm;ldUWC.:e-<Rik_7,2;$W0KiRdPa/3D9h#Q6,,6!`2N2HG,AZ=1$d2L>c8qdRT#KX[jFV+s9Zp2M<9'EVgX0M8&9KF'';mX)
%@b_a^.]a"_W5]CPTO'oJetOto1&`U+H@<B_@;q*HO-2#s2N%AbhRmoL6XS3F\T5;V0i%PI^[(cDA3O%n!hRdFGmjf,2YL)PC$F#,
%)P*OP_<:9"F%TKZlH#RIjA<MupH9WsnJQe,[`""C?eCBfH,6[0Ef+0WeR`qhnKS%.ro`:G9[2\uIK24r^?dN',WgL6e+nEQDX_Ou
%Gt%MqrIeM*I462fh[Ral,3.L@mcPa>p[8o5OIb-uL_M;qO'$TWU'lD759IMg%Im[P<^cK1Vn,FcZ&b]UT3Ut+:&lN.mU:Y?I@B0T
%k33$fq`N2HKY;\Yf2jTh#,DNUT<]F8Phki,m"R`92uG,:iD8,9s#:JePhki,m"R`92uG,:iD8,9J')[(kE+NhU$RJoANp$>)=:ef
%I!GPb;bKrN:I&)&.VZ!uY)am;fjqOdjl=b+;`X]^4j6e.#+'C$3PM8ZVk(qL/RGX+Xk.X&U**p9EdAP]oU*g#5&[=)Q[-6-"g*ju
%\$/*1M57Q+U>/$H=&`;$I<g.^S0MXZ[A^D(.bQ.ooE)%i?%PoV)i,/(2T!S=Le9S&:2,S`'r<EQ=d6?/lD!pI:mb6t]O8iZc:o&C
%3c@\R:Q!R?TK6MF,HmpR0'7mTInI65MK"%$Z04_kGZ:U>7+)tH'/&-.36NEt(-/R\kW7K[8^IqBJO8H0>8FbEMPEqO`@\ODN)S;4
%!]/=l<gUNG%H:ONZ=JgoJcn&c<AqW<6J*]<R#5?t*R#5"`b7#iWE@.aR0[>S)!urYVH#NU+j:=%-ofcF)7>)$8QTTC.aiS3B4DfA
%Rm3Bdca]A3XfcJTQ9M"-NSUOZ[RelsE/ckM?:PhTUR>j/;[-eC-Sho>nn"]N\k,"[(/[JfX.De*%d'cMPM*W_T6eB$g.h$<q4+ZQ
%+U:jkTK7Xg$0Du"IO4>X_H?q"#9S_<P.q8_M/[KcJR[uC5Rni!)9tK%QCM<\MP7)A^@iS_ioXOJrsR)4q6/YTmsB\nn/9@J%&&<D
%0(:sD7sNMha?CU*#kC45hG&#Al-Fp1Ju"#\8bDbRUr(*7lRtiDhnF79#(8I_^JSg<RcRujkAni]44.@T`F,6Ms)PJ@6mWY'CgJ[J
%l9qY:6a8?MU=$s,BZ=jtF,Pk>j02-@N]1d4feF'(>?SEc_]ptRF`<?r_M<m)YqHN**M+KF8"(1Y\CaBTo0pN6F[(/@WNkj;pHW$!
%-*$02EU]6Xk*bue>^VHjItA)87.B\"-dR?KD#9N7$^0V'?&-]<fYQ@cO]:J$3(PITJnP*sWJZN;OAs!7`b<;W1,B/&c@VF#U_J0b
%iafmHGm>L&[AFug=TG3S%F%<nbIYjEmW1[RBJ$$];`i*qapjjB921>5JrK;$WC_3]XB)R^EusiJ'7;/;GJe[?N6Y2ngkMadB':d(
%:c$C[FV`gBC3,?$KaO;CSFMLMO(!Ld8of:#2P22jrPSO,YMQtlk3b;3h16CFJmT2XD03bN?f`G;a3CTs^LW8^[)uO7_C#ef%-hGV
%nrbu#S1NEek5+"`+IZ,['36@R^.<FH'1l_t)g66>%?pR;n!s$=I/9Oal8<qG@dmIcIG!NI>M7LBg;7&QW3c(mJ^_NK5]%3B[8s)V
%<aCMW3l3!pF<?jIU[Y9.EE@2fYTg*$4_Et98T0.fQ/3R7o;W[fdaUHb#=hlQoL]iYB4</[T]m/j:4%,D=6PH4LE11YQS0[WnsS\s
%P@#lQ%4B"U8:o$JpaZ0fJ^?r<ojIWs(M-Q*1\KeJ;b_+<%),AQ,(A)h`C\^l=\f`&^IVD-ia7+RIb!^\m;l/tP*<,Bd8CUHP[l,(
%^AO8sRG9X72%?]s'c]$2SEnc-8c.I;Ao[Rm8B7!:r@/A8,6iOpLT@smf2X=>7S!#S)P$drL)amNIaii%hUK1[(K=NV6T#*qYsC,Y
%FcOhILJLK8W%K3`^7!IQW7OG0&JKP(p?qI=%pttaEPiYO%qWW-M3?&EDfc:UP=qFVEpie:#+g*jVY1%Z[SAIQdX\0'P3f4\kfEul
%Qh]\Uk:Bb'9&B$3&3qRr)QBBT]/V$]083[-.SlCo<PYsaM<2)*gR9QsH:bB<M7%`b/>)Zo:NT67'jS%*G3i^/^I_=[:D3D0Yn'lE
%jWGoMUNdEW=5bIh)u4oEh\T]jQnigGG8397="M+=s/5i%IsWPu5ur__^h7^_)GC8"*Ks+@oYoL<Z-a`)UeG!j,k$nlUn[U'8!Q"?
%;9&)JBiL82jBtCjN@%[?0fY=U<oJUBafAB2A`AVR8D8>4QVb=bkbtNq6`peMXNEZX.HYY<Y,dWYSB,YIq/oX1PdE4?+"D(+=c3R,
%<[ib2%HglGo1(9FrjB(i:U[43]6.;N'-t(4A6UF[F_5C0cFO(0ej%jt/St`Y?E2I1Xj0^)1Gh!an77ZCS?'j02Na^IOlJbX:0);=
%@e5Op,\d8kHR&aBqcs=.:jkoQ#o#HWT'*k)`RG8Qpi&t;,$gn)=<fcNPf5ChpmdI!1F*dd#ci?6rp7_F33A$WDlHsFc,j1!P(ll^
%U%cAfX8!YV*TIfAq#jePo2W$%=$GH*:NIfaYOYlmCfYZgX"&1UcW90!;e]8^T8:XfS[t1'"9cnE-'C<F8RrF>#TH7c1r>L\7PU%0
%O^o#\j4$u#dRneu'PXFnEZ"WDF;T<"]%sT2TA^4La0B@HY$Pce._C-%)XA1t)-riJIand[>)U%"_1rc$&*ZP8>s3rtJ4u`7#%l&-
%^Cdk]@+.g."rme4%8HAA!M]"X[hiiNR8VPeQD2CQq.)3_k[:tCL3Ut(<fJS4TW)UYB`#<h[RDoYQ;:t0$4gC)A=Np.Kb%djl`fBR
%c+D`#GY@Y9hSHQ/gPe88eTFu"bNbNJi?K4Veqp&'_/5[[DftOpcdJTclS7*Qek+5Mm3jSDhS"*8]_t!#]lj!,N]I_=X\'J'`I*f+
%j5N^/Y-ULt:c&.ujmBE1-Rs"c`L[P2_q4`Bg6EW/,hL6PGdCYup0sNOI]AY_ccmeN6_.lOXBG:M8QU$`4XXmQ&pGWdZ:\?-A4K[W
%lOp4$1i.FZ8abT\Nsj'9V0_/jZR;-CY\4CN_nncTjV`UkE)5@Z1iU0e9S\Pg#[6H-3Dn-fL`sUO>WD'<5_mi['028[oLoMBOT^mn
%*GH>4"am/!Y*\$/*ISdZ6IS#Y/q#(]Kiq0m,Pa"Ngqie8>T\$=bSe\Y1Vg!N"UHg4i_r4.bT*VOWr%BF>7t$SOfSE82)ZZbVr&!!
%=TrG;OVqjZm)l16-KCsQnan#pmim&fer]GTdX?-I!LFOs(QQZE,[mp)Rj]I298LS5QK\XARbbLI#%(l19mA?VhmU,_2DaO4/@=Ir
%&=h,nR_OA"B.\Bi$fe!MHYD0gKC]pVZb%,jR@ZNtMd=_F`s*D+'^FbL`X@'0W5T8r:TH%/1.>e[[qr-G0%Qebfh8`@P'+*'_-.JU
%L8n#$.fV!CM1tE%UT-rgg(M5/EfejT82[Prfu@%haQdEV<VMu5rcM352Q#1u<4buli$,-n[1Fo6:`0ABDN\(m;[f]7>m3+i3'Y!0
%MAiU6!7,>2@pUmY(HK^@Dapu]?iq<+S$[Li^d6'Vc3k'FK%>78>$iN@Kc9$>DiHJXPZbF=?=ooRXn\5d5pV,P_mS)Rb[.HokHWi1
%gNYG0N#*d,GO5a>I,'1(mSs'JLBl8mD9EMfR>X,c0T*t_UiWXdFcY\[V?^=&k`/\Y`;1@O!7SICEm8id(;E%k[4d/j/K<c+iut`b
%kKL!`lH_1ea,csK]@;.@)2o%2IS(d6kGiGM[tjHpj\;k;W-_G8j[#`]l3%VnnA^R([Cr7-I%[MYWN1unXsRhlaTMBU/-gKV5?lA#
%D82QYd"F6>LtAF?"t':f*jrL`D%D&443sZQ95IKA!;eg!EZ4g+-Zt0-c<-*m!_-.&-$sO&PEo!H;nl;u79/[0@WAK2S^ju4@qg]u
%OK8S`/#c\ZWpZJXRZ@@m$6eGZi%"(h3TeZ;[ps@tX9+-0k:(sj>.Sek-Rj2#.M^m?$=&ZJE0L(5X=M/jQk`UgqAf?qFIH.Soj$k:
%\m7.VXIZ_W_kV<Z;@5_QcdMVEdophCqt?6T*&K?nIfN"BIGt<llR(e2^.q/OXs%=.2W6+)88!k9YL"bJl$MN3SRc**>\L$G/=eF<
%^9:po$%#,-Xn%VCrk:,;`:uh-:cdL*q:iMW)N)n/rH4OD-d4e&X4n^]SY^fh'hKeHr`(d78]RE@E6>3=$LVUgVTM?M<);@\86r%B
%VVZbAK&dNp_Fn_54kACc44cq`haZR[2AnHjMBg9\'@ld5f+k3hnT7t6h-?`W/)>$cS-/_P6m8Y==EhKGZHgVDH.J0QoKech&ddP@
%pe7.^)ZW\g9r3j[)Cos)%YMXo]cL$-LsP%t*\k^B$o/>n(+;;u\t6u`S:c*X-PnKRDm:TO`^XjW+0(F;Fhlm;N1A$eD?d=B^)VaR
%^UPFLh"c`f^9'8emK]XJg/f9MhYgr'aUoojqhG2sZ;W&<FO'e:^;PBhHU%M(/TOJ_:hfak'R$aTe1^*]*G&(Z]tS6=[-@bbD#:*8
%b0]CtS:NSaEbO2EMfl,nH,Z=#0'M`gZT#G$<Lf&PG3#bIlS'&\qkd8gKh@uG/q1.7Ir(+&>U@:"/6'T+*jZ>@?sHY7;6Yqob^fM.
%F.ZF)XrP)U3uE\iRTs>;rir&t'%jON8nu<k;;08;<Akau&PM@:Vn=OZ2g9k%_F?D^;cuWfe<AQeqT)SMoMa^ZD`k-sC0ckQ'kCWF
%lQ[&NS^,SfhC`%C7!FM\1>F<3_j#WF51&nR3Qdu=7D>-\ZglPQ=8W07n:-0&<NH:OP&K.9^?)'I7sT`*q52)a)?)s6p\W_qgWn"Z
%9Eo`^SaDkk<mE]^H2jK`p5_)900#pjVno2DH\#nm&d'dHXlh<9e+uS'=Q?t<lFjq&lMpEYj>p8,Ammd#D=1:LA6WVRj4Q"E*Hdr2
%I@mAFi;1Q3nTNH%HC)[5E>oM/=37i8ZFVr>ih_H7[%mtR%D`\.NL$qHEs`ml<e1lj,3]MA'q/I!?P(mAqM'"8j..JSjFQsJCY614
%`s\LK6?5Bq9+!E&L>g&^8#r_4p2dR/Mnr,>9gO"LY6+,I.p)c'<N=N=*q6q@.l^a&mqPR-.u1k0:+(Z`^G4AV0Z#,8\a$\VKI3bc
%3E[BoH*'4bc+1DT]@>SH0W)E_Ob<JKi)iH[)ElhW,/QuHpk.PliM"&+AefS6Xk3QVl2g_>E#R=\3::uRR/N_s*Q__B,*l3QD##CJ
%5Rrch=^Bc*qI*BV$M=I#X)i*]oW@(WS@!MHPoS<G0">'Gp@R^dK\9Y-DVNA5],ul3!\((Hq=TT4J#gXPj(@Z9I;WNL-#OCVp7_&D
%X%U@6;rb#TY!.$V.[bu<Q&f^c-tTDlZC[OAG)eXEA%,l+X5R+n:tqL!Z)XKQ4_s9+Fs>YVRkA?TjpM)\MJCg;V`NO0RW83h3On"e
%@bTdP6OW$h-YiR#NN%0rYl?NM;_FA.j`C#lDGrKs4FL7,1@J^QJ8r4la.Kth\;*f=')&duKPt9\BtTpiqed5thn"HGo)M!pSHu%r
%e"%<s4a$KT\eN-B*]O3;(XW"TqQZB[o(/CUqNe0I>pM[miP*t;Z42tFB_eaqB\n:F3KN5.#!Q$g0g_FcoBIg&WVRf_Q_8K41bd:Z
%kUp4mASATh46Q?G_UmM`r?0]h0!0dPZ$3"B]itF.W.D$9RNA;+C8ME+D!0lTm!QmGW>sa5;R:S5^6=7>'a.).S4*-6pl_YX7.!Fm
%e*IBGUeu_(7&krp%[8ZNcI$PZfM0dql$a63ajjj1=`^l>d.HRA#um!]>o-IUE]gYF)jqjVn9Ri^e)hiehHfANbbsiD&ueg5C0r##
%f?mmi4]1[!2V,3Q%f$mCZ$VJ`"fWk@,eJkj3[&SVgH;D*^oXUR.8Hq2grkr4W'!rTrbP-L&Y'XQ0GK-EW[!r>#$X\mXY<Rd%.oae
%0]C07KM)+1ncuV#'a3)fJSW5sW;Tii'I9i.ZBUWUn70BWSZ:#Q:oIVCX8#Z-a-o0>l31&L>tLTBb.O/&8=3W<:#bnmf5hjXOB_-s
%Y$7C&DZn3_<O:>(X(T!;)=eM!Cqr+khSf/4J!O]=5G&/!M<%6n*cJPp[_Xa\`go?[g3j-OGMKkAc"l!;UkB6k/$I@kji[!0h3I=V
%-]2L(3(;*`\Ba`VNiG<JLJ,o_c2,N,/+!9j"nH*lko\Fr7nYPd3?Us1aIPGT_/bd;3ne82T6-?hF;8e+WC]H5&S1DW8K1Hg[P*J;
%gC/F+fUPKRJ*B2`H;9)e>RNVA\;uLd[&gIX5gM<Fm\=Iu<C<&K_,0f]aQeqQ#/sVuX9DUZ#-.)=/1"7)/sYiOBBdF\l.AC%F)sYk
%4;`sa&Cf=so\M#-`5RH/&8#=ulJ)BYk=#8Ce8L,UG0&'!ZN7:c$2U!]Donh?CT>Aj^&D[Sp<UCQ`cZ7^Y1HaaIq^q<eO;4!lWVEr
%fD6UN<nG/n$PtR%>YZ*-fqi5'.2cGKa3-BeMe79liS6<=]j.R9YpMSeAo+fe,S!YQ;n^\kLTC>*[\_SYLY\(+)EET=%6Q]u=fJIV
%f=?WoHX`r%J[u36FjOP=Qe]iM&5\PGG*8C,"&TajBulrX'^Y;NGWu]0GSeEtC$5c!hg1t#3T7/2hF[&X2*uRA1gfan[57Q?qgRDr
%T;P'BRS'rib+E*WT$btFE<Jb%q9S7%e'*:h0tQR;g&HZ#s'm:4k542eBVXYcbE<0V[etSEQetS;qN7\'0LtJmDDSi9%]COq;1f2_
%CC,&o'k<JSS9%k&F?/S0-W1r*s$+V1/T,1G1_Gk/m[4$8!cTM9%]9JIY39Rp_]glNLMPKGA@B$$PnBG?-sI#7]b)../nCs>nA:qN
%;F[drQYnJH<+e4s3/.bNXG[32ATBWo`6n(iIZFR>l:PBCL`bu1HR;6L/4L3$#C`\9H8X/tm:>n$\EH#T5nYa!M6B:0/VdDa`+r?t
%=k=mli'q(:V"gcV,LI.J\5\m=>.MeuONN+='P4G;8p_Zk&#12!%=ns4@64II%1r8[G(3gDLW2U_Od"+e+_o-1DJZ:HZ7-<1m"0<K
%.Sa=,c"l_1[H$PKnO_\Z+$/j@n>C++G``23k(1jd.b'e`UkYaiojN:7rcQ)6b>\#LO4>M\2,pr.cIts#k)*8H?AY_e@]8M".c%=#
%e2+XNY-)>G_XM.GFojW.@J_\&SfnnaJ^m,\2dLBsHCq8F*_14O,Js_R2hK^@:)b"$Y&].n2n)A]7J+L_f>_!'&8'Q@k9:rsC?LDU
%,jc^10Zr@;jeu$`^R5QS/5lACAr12)-:5U8nb@7E?G:,X\SJkf%-^+epK*Zj2H#m.a'l6f($&YqlY0pO;i=kb<>7Lu=XlaLFdnrP
%m3Hu!cd(su(,r[*YVW-li\F<Q+:,N\_,[!UlY9Ei8lC&j'Z[Wp>bS<[D>@kqGIchKm/<9a#;[B@@Fp.b)`9Ge'PHX5O0k(](_>X@
%)W@\B=I&LFhr)&hNluomk)cGlj'cj'1Q3S`lc$H'LL(Fb88-lLN7X],Z/MSbDen^c_'\at%Bbq+T53N@2>MpSD6KF/oFED3T;79E
%2-Y<)'jiKO[gMq1TDIO-NC[A5?`E3='n&s-HUYV]oKoCXpH&2ZOi\#+VS"S5VU676+ZN2FV9O3&I;aWsgCt`(3U$,4XPn)_bY,eh
%oqq12VQ$WW0@fW=Ato-c1CQ,M7?8=@Snp"d&#*g^WqhJ0-p1s%)HM+:^sO&b_[8D>4444CccVL5N(IHuo_G`]F'T!r)3r(6jVaDU
%P`*76UU,H)Ao0or8aFNRhT"WY9Ue/o%pPK?a[9@SM>+i?M/-A&`\^37j_<(\Y(DBg*o$qLs*7bFg=KuiWPV$"X%0@Jh(%2f`%#m6
%*k#R@1,(h(bc8oj@aME]C!d9Q;6-(l*\qQ"BhL$^B%sm4<i"jXC>4&_)E3X\V4^+V=#="ofrRnRLDA(EZ,l(]C;!DKG)L2)VfGe,
%Fkk50TaC*@9X;se8Vu-QF<nC?WL=P>No5ncP,O2fN,II"Q'1aF^<S@lca9P1FPiq%]UWt:)e?F#);+o)8a*"HV\]$l8-sho2DV`.
%RHU6mb-MM!c$Gi]B#U7`9W+keg6$;8aekMpVFX?!2BcD'Y0])@/(a<0(>7e9cND^!ho*:7\jC0^Q2!4D;/$uh-+:`AW)$p6?uWqO
%Vm1'o.XgM+HFeb)^["]Rgs78Z7J/6lG9,/PSZa9cW+`W:g_B?eG+j0"3-png.R&2Bqudh"[^`.`_^TS>RWM0NB".%l`DDOb(\+.F
%3"<3.#MM:>mrTL]F$WsuZX\sJ+\&.GBZ"GkP)8e-*21UX\06OS2I"Y4"\bB&#jmEkmX1QSX3CS"!9t;tf3gke=:'1T#<]-?@6KG>
%=u8S%b(;VE>*UZ&[*#hq_d"LsM07Yt^1EOd^%tTk<(Le1e4CW\MGWG$f/Xlj\#bcYe6*UDd"o6hZIZ.Z<K=>eOtUh7--6i8/?@t]
%V^SJ5fk)1L/:#]AdL9o$<=5g8$(NWK>ZamS[8+/j5?$T6=-qZTrM7j3X*<p_;4^bK2NegQXILB%N#sA)8(]a%CQ?V^jbnmMM_q$$
%+UIq?9l:PlhQ>+=j=tAFh@+j<FXE;H3;iKH.dM4k#;5G[adfFjW9'7oLBL6pU$!4Rqoj54Be8qf"I!'HLsZr`3,VYu(04qN"'EI_
%p#n!_L's`!BC?ZqiT.dn4QPS`"!W&ZP?q-hNA`R=mIh`L,j4)XhJqXVG4<lngEj[Z1585mchY5GAMXVX'!r"GX,kX9n"PXFHj9*R
%N8IWoR`2$,(Ys:453A0A;#A<L5=S98V3V(d@S3TIOQ^KlgP.hObXV%6&1#C6>$/;'s#GdrHGm+gX$`9:%ct7"o7sJ]ACM$^_)U)2
%bR#Cm8[hi/(V<YFDH@l1^-'Au8%a)`DD#nSLQp#FU\X:Fj%&`$hrO6Bk:M2Y8?_W/c5\aHiN>f7`Q.+MY7BU"8rF0\W,n^qT$/1C
%DOM0td=OA5D<GH!f5u4;b.NCjf!KBW\'2f4B'eMH4VoE"rU$pg'#e3;YPi1So;rm3a*e>UGi!R!+2MrG2BVo`+mQk"1.troD5L#$
%n`%u6YL=0=a,=L>9[0*_;CZ!G5OBbW^@eqKERSf70.RLllb[=(Qq9DHRB&3Zp10$%264<o^4S2IHqG):=*$PHXOWJ/;jlXgRdV7K
%;tWNqZOlS/fNY>J`S$lNac.oGpH$]E<VY4\@LP^tQM*STZs)j]YV6C[/e@6cge/YQY90I-8:AG,!rKTT\m+1m3'!Vtk;UB!@C2HJ
%.MGFb)-#L,)$BV&WYi\O@>r*0>1[)+U%Yu?UXa$D]c+ZERPLh^;OH!XR9eqM@CV@IqbiC!Di=HKW+rO!Je%8s@20cRG1Cq6>,c/o
%C,fJl@Cf6:--,lS!c;A<<i%Eh\jnr9oRPt.4UeseH!$0sj$XGdOiHQ/JMnn!ho@HMKafU_'Y1_N<u.a0A0+]^BI6FIXd$eSDG/=(
%8N*ULV7Q:G^O/'7fs5`@NK9*bZ>Xd+^7lnup+SI4=Z'_[Xg1<1loJKP8QiI(Ptk4H+>IB2ZrQ2Y<gQ?)\R4G,OYk"9c"3U;/K%]^
%P=m]PQ#'^J!1te?AT&RW**1eHcB@VG3OlN?fu^)J;K3"4&SrRBqpm+ceo$'p3e]+l]k6TXHp*q'7s"Cr)g1-uK=\"gJh]:KVH.K%
%#/8(6\4l0rUY@7J\t\"1gi-->Ob0X!OQc9k0s1$#^M$66c`N<\$&R@NO7m?1V-E`kYR+7\X*Rarg;d>i<KIB#e?B6W-p#VYr3KN-
%8$`29!4LM=D)6T8QK%KY:8aU/A*\4$PBoj4N5,qKP01,,na(%h;or7TR)eI1q4.;?\J(:c/P02qdTfP?Ts5@!F37ge(X\rLU>O*k
%4G>Ue5u^U88qTQ39Jg2$M=p#n[I$+kg\FG<.Mp0>3GoAAqJiaeEQ22TVrb9.V$q-8(i%`S.:gA%Bh:2'MSU9;5oA%ER1;qa^j&XY
%et')&N9fkMV@[mB"[4gU)s`nop7#X(i91n&lQd<oYIJioli0<CA,][FF`a@9;)mlPZqO^PL#NaE%e:2Q2$1&o7lc2!:UYJ2.uiO<
%aTYQ%S>>r/V3nLVjSDMA7(@[$jR);PeBBFBKA$(*24`BSNV1'J=Ca_`_9-g5++V'Tja,co=")?_hRuI[EedXeeiGX,*>[:$PZsDR
%k_".#WR$\[:r-iSXlGC%[,T$Nl@/<opZ<J"I1K=@NL#2rWB.=hUt0la"gR0r*2Vp],b(MHQ"A#&11A3WUEq&(/$Y?K`;=?dGIT-!
%_Ga];>%n[cQ]sC+3gZ(0Kots:6H3b74[Q_(.A=;NAQ^+VGBc>(,VAF"4k(EQFHn&PVE/*F;e<@6V9oV0Vl:6m"Q'6RI%B]p\IENq
%\2LWprBuhfeS3"&[76B%WU_W6l?f5`0A#@X,egN4nr6Xl7TZbF'e?^r`O\X`2EA:Yc"t(PhTml8MmDWh[*IPs#MdZeU7'>Eqp1fO
%f5X=R5?m)lp_t\j/'9fQU3oP7\5b?R]e8a,p.kKSNstSY+!_ktU)=hj+$qLcB'P9d-uUiC.n/$sIbL'9B&Y:j\qW[7Qr_!tCQ2Yn
%*^jR.%A-^4K@R:SA9;'^H$)i^CYi;u86k^0q!8:J3j(+,UZT:tDQ?)*>[dkZLskVS04'JX9+n;+ILY_.B\CP[UX#5i>@N>NVID4e
%n)/SWS@]O)"M^uSVfh`NpLSCNF#0pOq$lu[X-?oBVGHs.ds^4t56[dZi?S>A62hI?-XQIt5AL[3A)J7m"`q3l?$q\I27e$#ngGhu
%=8<Z>.ScN$SVNc\I9A<_pik`NEt+s:"kef\.H3D@qn]E\`Y#U*QZm>>"u.s$BpfkY27n^9`E]1K9AHBr)g?5,Y4pi`4#1X'U]0.h
%fZaWO/aT[F"g,4VCP2Aa\F/Vsj:?M[/a@N,-ui)K$P`j)6u4f_&5e>S[>"0."Y4S3ZkT9XYpNL"Pe'2dO^8`3F1'."ZEBoLnJNa&
%rWtb]n:sf'Ot##'kXpujYZp)<ebCFJ<%7C-jHQWRGJucpd#h7Ac?:^j*$7C[e+4><ogq4]b8r,aK%/>1=7aN`fa^Q:Tbm;EY%UUR
%)=JbkQl,rOBp0ju.CB`4<CMSNn_i*O7CWKM9p:%[=/g>VVrlKMZ&HtunK:EC""dQ#?u-4JGje5()[,C@qDjH0mL8]DLt>M6paB9_
%B.6*aW\$e])[$0TBs_J<5=E9pau>!$^dnRuc'"=e5h\4t@n"3q`4SZ?1C).J9T$B3e8gD1EiPGcE:=6Lja,@rATDWTOUY[-i^c71
%6&\uD$FMjT*pZ4[=elBE#+g[BRgEF]GQL`qklda`UOb&--DifGM><L?ds7V&>N=1JW]I469=kb8/.CS$au<U)\Usk%l=+S5AbTA.
%M\:Lkk*A&pCIjX:'j76+;,Xg93iO)mMHEQ<J1HGeIr\,H1dfZ(H&@b&!nKY<Rn0K^M0WjV2RS1*)IZ<J5Dj/6LoCkpo"e"3Pis'+
%9rmKT<K*-\3iunj>>`jF$Z^,t*G'utF+!Eu0Id74.d[t^fN.gp'V!>MRphLhe*#B=-7e,`24GL]V:98lI:R?K<WsSGpgR5^pF'c?
%?h.3^IA6-;k)Jb$kq86EUZaT[3@'n_YZ_E8FW:dXFsefH4-JNmUti-&L*_tmn:cS,XE\fM-teYYGekJb59:3CBInKY8K01$iaWAb
%b=H9ToJ,\Z=+*WC]3TfbG8?Y4kdo9IC3W=j>9O"4H!/V>]&"DP\_UXRFZMQ\0@!S3+Mhm^nnSu,+[Q`7<s'7,k;>i.-:N\8iciYJ
%X#B&hEl8FDhp481s7@8CmEfaCc0LQpS@mT(DsLpsesF?!IC:6e`Y2It2mp=]MrVU!a88$7,5&c1IG+<Ag%H@gmo$Nq#sIa%anY8f
%]MeQJJ9s"5e\p9JD)edZ8Wne-CVBM<e3>V)]NArE6HBt&5`[ai8!q."k&(1iH%l(R\XHotE0`PRT-MTSVFeT15q7*nRn9K6THat[
%!U!iKHDadq*5'][V/[L_(Gp(XDFiE%d/DGtB<>')ePl`iKWG:sWKCqkFB^&D[$[kFfr>gb?ugq2PgYS>*QU$CCREGu)()c8K80C>
%"hRmdT`uDe6GTH-q&2e*8H,VbaX#>oAZr:Vb>"JlJQI*[_jauaq?h#RF0?_C]Us>GF)pYV[]OZ(.e?aYGq:R[6YKFY]WKDQMX@H:
%e=g$&qOqBJm1S<R2%$K+oerZ$\g2DuPe&@<D0C_#`,[OJ)`:*do9QCJJi-&m40$g\X\$9Ag.Q9WSFs*Jm8'*o?d^D\WdrtbolIG\
%:`jN=Dp,%;D:E_kq.9g<AdR22ea6)G`n)dWS##4ri<7To^XOrH`Jjpai*g3E;Hqa/\=+4_rRBaI\O#*4=Yb/9W1NGClj=&ci5SWO
%KP;>.h8W&K.Bi&W<?IQOYHNoNBr8)pFXYO^Z>$.sH?Le3if<5n&LZ"LDDuBE)j^/f1V5hrGJpO"DRCnKHIFXo4K5o_2Qc9!LToUF
%U3&'9\?'r<MmI"*U4==SMk_cs"eBNAU),kI)ZHfmZkDNLRIZ3"nrd*T<:_#IUHph;Yh0nS^KI(I9Qg',>m@+596R>[pQm]1n,_sP
%-qc^5T*f(6]=ekZnu\=';7CR7;`]S"Vr*?]2T]r<&nF[V-tt[&=Uq<"+f:"?H9rVl?Z=GT?1Xsi/lB\X2=6EE=C(^3$<mF2El7a"
%cj+X=<Ti/RE$#b1/qj2gPC$=NIh]ZJ;9l;cQ!ZYK`e*ma)k%Cb3b<;7/K4XFRAhFo#tsED4-A^kO[Q.+m"%^IVEqTh?N8@(cY?HT
%$QBiPM_STdK>8[75;`a8d6EY+(_@cj]GSa*jT)]@@!GPOq?0p$SGW<.2dBki$7eBQq-%FuQ7Hc:r=c?Am1_KY$Y\.U)=5[\pn'.$
%4@/tW7;.!F`m4n/3YfDJ:DD7b<*cIT__t'Y#'KT"$GrbsJkNKDYia@aXTqd.X%L;-BnOl?XqfQ(YLHa&=Y$[5K:dCdDY9?&J^'p=
%97pB3fVZLo.ii4npp&CZ5,cLQC<Wp0A1j_d=KAl7]$/GK\Zb?;Z1^q4mM22?V2\WMKj1H-1e]&Cm+Ok[GSSm9^Ok^lQ;&*hkV0(P
%5@8dF\&pD10o`8o,:tHF*Vn9#S+/$6V^E9[>@C]Y.]+-9'\eWhgOc)&Z'mYCGn]5H3*0No(krCf$L/Ae4%m`DUZ%hST\$(L*X8NE
%pF*TCC*Wc='!jTb?t61RjFSo8`O<PIqg2j;.OX%b28U7'H42.>n$'EBUtSW8IIfeV'Re=P`QqH7ln[AjJ4M;<hXfKb:h>#Ol9G)b
%EC!'<l$V[,P<4aZQFKn/0),)BTUCjCI]4YS`)f9`msB/#Fu.>m,#6OL,j^.B(_$l?%*LdXj5#gbk99[Eh\U$h6^XL%'Ihq#&Z-[[
%TQKH`+1p)5nQ[MD7#B+<N.lI.aeLbt6/sZ)koL%Rhkqp7r/<;VfX&6<a4TrP#:O\f=)W2S$b'BamWBpZW(N2;Jn00BkCDRXT[$a+
%e#4n-oEnSB5HU=@X,%a$I\0LFj=6kI/<9BAcs/'P9W"giU$O&dB<LB;<<A_T'U_gr.kN\dPo:`<OiYZN<@+5AJalhkEP4!P[#'A+
%'"(&'UEuC%7936LS/\qCkM<30b@KJ)o"T;7le_5Rm8Ot&C)aK<b3Vq6[q#^SF$tC.nJT:n(8"n\6_ArcI,5US49de[gHO.$SYp5T
%3RMPf&L=oU[,,"nXi*]s>bT0BTS=RY^JQNueah*[\qkY^Ldte!1"U_`%]V+D]-s0#*qt_$CObNEDglB2kHE\3=''D4<BlDJU!O*#
%:75&?=.+7I:0*L-;1R+r-+ETSk[8Z.WN'=0V<@2O-XptlbTG4<b[JlOH4Rq7AOdL_kA,A&&j^'gZ_\_!+k-!^P@ZK!$jX/fY3&\=
%![*r"+Ng+9<L15-U^)NS'LiD*Soq18J*U22-dlDG++f;>Yk/n"rh.*C5&osNr./oW5!e55s)ROE@H4oMo\HPfpXUsDQ(qY>+$K(U
%[k47W]^(S1LVU4;,=(?$Wal16?uF(/?:$O80[8[K/0r>*E"5o=<us;ia2Ll?`q9gb>L/40f31C>5'H('jnpdUbK@+Qh0@b_d\0f$
%\\5R5X_k"a0Q17Mi[7ka`lkptQR[H6?&QbI\cd%&J1RZe1]^*PqR@p[C,M5*$Ao)Ed3X4=Q.;PlQD1$#U,FP!e=m=0PhReo\+e'[
%\'SNrmV^!r>Ah4*hUcqab%6Ru'rLX5lNQ-Jh0NpBHWG<+E*6Ua`T"uS[%=_kT;Ft,7Ii\('>&(.@V&:"P\Qs#`<e=]d2Pf9DlLI6
%-Cdj9;)&H"C&&>"QYDJ&!Z_4+UeXuL@%K%SPpMo?H24?=jSLF.qtVg,:O[+,YQF#%jMsdP`KdYUlI/p;Hq[Gf=&Vd$Q(3**Pdi@l
%"tNX;V*pR^1U2eES9SL'E?cg9c$Bk'f^Su6;9d]3=jbc@9I_RtF$T9Dil.=meXT/*Fj(Am(q.bePjp>'jeG`78qaj1#:7I:j@<42
%S+Pota2n^M_+B\`O<+)5EJ#\WMX5ShOQYoU-<B5J\U^X0]+rRWBZcD$eM+PNTI4eW;@0Wb-RC-,#*U4$*6Ff2:5\]C@O9#jSX7Wj
%`97t>o241]/QhjWh;XWJ[(j4.V2:GrkH?(@0U_K'&g^sS;kOPR$6?+rgJQX]#;0TYCAj?\O-"a7S23rV@iU>1S?5$\/!YE`rDFPE
%EEVpMrI[\-K@h];e1.'/C:76t&CDU[%tMJg&T_]iT-Ve`D%5o*Rk8&)XQ3(tDI3Y(f2Z.ipStc,h'/1L.Yk0\PgGLj*g8i2'N.7D
%V;*VMmL(K&7Oc;:q7@o-?MkL\2E3^.O/#_4s,@Fj0sr4*KQY)Vb%P)_]tD%kF$B,s_VHfll>:<DBceC^9Q+)&Wk)=]$iUj.FB<.4
%TcoW&f6_7'V;>rmX`nI$V=f0'8Z#3$G1_peC?`+5ip3n)-d`f\QJD=%28/LP[/,<SC)BdN^XGb:8Q+@\<5X$?2cdT!h@8ibYBJQ7
%n%=fafBt?5c//.dLhh63@VonJr(:&V>YDLCmAhZ.NLBl/WMgW`DE)<42j7,QRiAi"mE7?\/BJ[pmHI%b>ANJKP3p\^CmUbkqD=h6
%3P*/J,r>-a&f4'[`DYl15"aE'1Gc1S6rVXAFg+6T)W<hPQ/=^8atK<BVdn]FU@ld/<;&c?Hr[S;V'e0gL.6*eq6urcDlYq,:@0a%
%23/V:T>\(3MsTQG^['[)jg1ldNP:7`\oj)Vs'KU@=%]#sG2IG6B$KODl;Ht[C"=-?%_YIfcLInoom,/i<p;@-IRO`KXVAsNP@1qC
%B#[4V09f_PSK$8FK6oR6ZFX`GaNl/3H'.D:+7Ue*OWVl:pe\OiX3mo"B@+TeRQ_9'=cc#qV6\&;1K2jVaaIb_c_>9eCXG%j_H>9E
%[LFF9A4P<*7bKn(;sJ@C+`i?hjA;2L1.oE`Q/XICQ<>k,X8H4#[$0Vt;"R9$l<\;pPNb_6>'=N1hGe=P-6FY([75pBFI,q6D,XU#
%h;lnX3=-DhcoDQ*-FcgFM&RbDKR[XX$U$RQNe7Ju*O-kJgGMr3G@;!mnabTF0q(I5;TEZKop(h#aIg4b+(i\C#Ci^&g,_->.5NEh
%=W_9SH:l%#r#miK6%V=6kB`h^(ZL_?K."-'(I_c0/f,KO_=>=Nr"jJ8UudZRoOXf@BYHGO3,66^b@a"8;L_H$`.8S);'6\sS0Ums
%6rG]_9NTLjChV0sLqjhMJ"-Xb$,SY_*0BBZPJeSX6K<Y/k>cuUMpH8SjH!O7ZKPl'(OeYqL-q6aqI2:S+@RAW!K'_gI9F3O\:!&Q
%fH)4LR\``YQBcQ,<l?j'%^EH;m00g11\:IS@K]LdOL<LHLeObSG=@:ro`R<8SSlj?S\u<\]n<JNUPkCbj?(deKUSq=(iqcYkoGF7
%lNNmc;[3iqD[sO7QkbL(Jc13K\[&^cc14*`'C8YObiK3&k9m[oF'boKk1dP"c0SKCWNBG%N3JMEC7CBdR@^F@9#G;kn6FtA:L%su
%V^DGt4+?]4*oLCZF51`=b!4I"e>b3Sb=Z0G9Zp.M^"AWPQ/dchDDtAc.WrC"ea8QCZ_D=CQEe&4@P>:EcU*DJQBV7ikOpG=0TsW6
%;Tt3<[cUtag-U]PUr%g[UtN^o62iu</D([GC=,:0&#L@%V(SZJKS43%A.]Q.8IMRs195)a4_Kjb]q1p$c;rNhU6THSdeDGeC"7ee
%fdP]BafU]-NE_1Ekito`l>sC^N]UCRfZ>ZnRO2G^1RVMC.^XPA=22>:d4EA2^MZWBA^S],mo:$iX)'i1e._C!,i)f$]A;!OWi?Ds
%S6B51+ug$K93(_RC3HAB%BEb4%qY'6;ZaiW=%\&uLiJ6UaHHeIVHHUNfs%J#&[#FB@9G;@]+>B>]#(8.la43_obHPKbJZZl9$Tgh
%3s5GDb/LTArZ]BNak'DQH<iK52b54*-1?pd>`QcDUj)s9pEJ#K=B(#<W%?>4Q9:ceamn%W]XnrXdGd!X<80FEW\dseL[*9?7R<K\
%j8$_AL=0-9QYI21ecZ_V=I#7=RkWV))s&W9-tc8>O%P7:FZI"fk3^sE?^J^W7m"VJWnZWVB<6/bXOFr)cXpn$Al2:,)W]%SQ)@QP
%4r%f8>n<*2mOFq0B8NJFW(ems_GPFo^WEa'll!ER\gk5s9A,H-K"On?AG&FidS%E=<k4?0[1V[u[HY,hZH-89ldEMa]$N_pWNRp%
%U\g&=b*j`HeD_g$Q!S57B"LSQSZQ<6N604Oi<`8K_gurIbQ?d\5jC\&([Pg<_([7h$$nPHo7oTq^eXC#$"uSb=hl)E5n\$l'+'n[
%UrG2]KU@>l`BDGQd$)+s;AnbRH<=`NkAp^`+2&u)38Y=H`M3U'MRd$goqL;(ESVs=DK2dEoK,&<dYSK4S"2[kkW*qg2*UOSiD+0j
%\VjC^q#&P*Udt;rO7m_cgK[ZNb.#oabW+E,2AFDn<G/,7Ltbaq/TJNiUkBbCHOaoJe&7Ok80'd,!9Ar9Cbpbb8uYPn6;/9tR1qDs
%*$Aroa*^+f8inuX(Mf1lfk>M2#rEn20?"fD/Y%qtCfFqW!MXZpNu/)c.d'!j%I-=3>,teCG?/'/l6*3Lc_,Hj`._>eS$gN1jk>*J
%14.:uAMue((K=*KVQ%I*nuu$XeCE35kb-q[kG,jS:'mS>S@%;e>29!S<I#4*p"A+f0<2Ync8&EHT(hL4)e[:TWb[/^Ss7SQ$*p](
%Z-B_^=7a=prXc-SD!<rQSnlT$?#luJH;Dd&!fnbo7S"g;j=LI,HBXLVV$OIt_GP)R.2epq</*XsZX'S6RH.0qYq)=,,"PXgmT=+t
%U(mQlOQsW!I&;[:(OOM,%p\8U@X7[pQEUKe^;IL;DbiBA;\dbH16'W8Z$!*c3n<[YU@c=:naYZr;ES2BjcQ8dS=I,hrUFl_H_jsc
%-!X^&'=n8B%<H1J6jK01dq:]iGn*G(T5FCbkj!s\`n_R)G.c"AW'Qq`/"[>,'I&*6X?N&C6W->L^/MXQV[a<@g]fFY(41BVV-h_]
%\OcD[;I7V,R8G3rXnE\RlTH3idFGCLYneEB;ulF"^\dO"=!->4s7GIPG@,g`c5Q^Kpq-"bPCYsoeZ2K<?@R>5LX>/KhnSto5JDFA
%&WWIR*YS`#Tsu%M$E*DH7c79-30bjb;pe;n^)r3,M8Ia>p(-fars<WD#6=es0&?Kcm[sK*GP;Q_J,B!0B@Z-+#Ei]f^&ls`=Z'$i
%r7/5="ML_2r=ZkapDs/Qmf!rMpESQ4@sRVV2da;1>K?-%qJEn\(/NVa:7[E.]W"O-Am3]rhnGiArVb4?T\gf(1cUM0JMrhu4,5)4
%H?Y%74P`eS,c@PrCd*9?^^+-i09Nh/V0aK`GB!]0ZR=+im`,JhKEEe<T&#Cu9c/foi=e['/Hjik(4lM@?>/\;MYF@pW"E?`r'0bI
%JO7i;8)2'V%@Z^9U+Fh[&D"muI^F$#]>AdpPJ<\FTF^dF,a'9;_G?ghpF4C1]*e0NLtrK'1\ka[(?&*G^oF=-3Pp.s_h,U1d8*ZL
%5f5Pn3R7i+8JrbjTJ;q$I$YI+>85GC)+PkjiFP'$Q_*X*XLeZYJ.ur3Et?erk5Kg)!:P7WPfmm6>VF42SX%qn^!T><UBtSM_%]c:
%X%Q-5aP4T%(:rF+Q@,<:C_F_hS<&Y@L$e?_H\+N3<-8J]3Ta!U+8`n5ar8:06Jif=/cMOTR]X#h!u7WO-NiqTGuETX@\EGch>6B`
%572RdT5t)I:+`g<k<PCh)(ikKUsoD,HHC![n`CFD^OE-D*G5nT#(*IGr&@[c'5X')13jAK1@maMEW)6<mX59!cI5kDG!%3l#EC"t
%=,lWJ>6;DrhTsB.l7Gm,j6j?E(s%TeV1*G%.m9M9ld2MZ4ql.=DjX*7:?Q#q?6A!"IN9Bg_PkhKkbsJd[DAP0dr^Z1!"Z!T9c]rM
%NY3"WjhAN<&[B#uX@B=Qcn<]hLN#9P.ssXc!:Cp4NuE89.-/cE-co.hCP!9j?!:-[CAQS/%lu>>hLrI60tL(]pRoCUH1A[6<dh2N
%`-5\^S8&*aZsc[+PsneRhA5[hOK>ii>C359,[83o7Ib(KpF1<bB#-h;0Gs]3cJ>7;]]3:VVgaf;0WZ/g:)gC#6Q?AKS5TUWdsXG4
%1=i/7n497MdqIVB@@I'IA:6qt0CA4,34i6D/9p?/H^M<tD,Fu#=5S`[Y_d]H(W%ZIW6#'hlcug$;(Xth"u,3eXN?-4*jn&1rV?C/
%Ibi!RldYa$ici_'bg%fe@iWh(_>#<fU#(Jd#*.?u(9G?#H-j&"TQV""4M9F-_bFrc5lo#9</BpSaHMT@XN@-:k=t2td\:pqeiG?(
%4$_Eb<ES2)N-CtDAlM:T$J%7Vef/bV3-]%EpS;*22GD/K&Lj[:(qFj!IBIZugf^7V2mS1q.LGM#k+9]`3sS-=$Ip,fXL;O5h6.aa
%l#ZhI>Zf6jH2ijP3Bg)31@lm6O$EX5HYpf&o5Xb4kjArMo@na(5JgFb6/#KRdXcd+bZtF#"7h_S%QVW3=8,?1I5;k(M[+a>!i,H4
%p&t)Km!u4#pXHl`U@e_UA!E`dK93BRa,1X)c`X4Lr2c6:UMA6s`_6U2&R)>aK4)It!MbL`2'bZ3Ds7@3=:fW="8HetecFRB>m-a\
%$PHjNNs$BBm4Jbh3J4!n-[AmaJJI.A:&SUnR&nIuQR6DI"N5kB1CX2afrM<Y]5`S9iX\j;UYSA2GQE$(S1lYY8QnU0>QKj5D(op`
%rHj\3;G&^)N,5UY/G7^S7/UX-:=M/ZjC,<9d.Fhl$LS+h+[#N:R&>4/+H09M[RSX4PC+BGBV"aDb]7?aa@TT@4^GYLa8:>-Jt@KI
%4C\YOa%d%4jYSP$nC72(*nT<@@_f6u5N5fm\)s941WgDPA'`I70PXK'N,7?P15Z*87qn^\/gJisO7K@q:B^%S3"fUX_AUYd_ETKk
%.(Yp<]m%u"F\?FV)73D>?sch7nU?l2]m#@h"'F-u$f&r*1j+1EB7g8)2^ruBG4.7]g7tV6`M(n=o]>s\Eooq2a+`r8RI1dXQVe[:
%?q%bm:)%H9>OVnq0Ij4.fZi-$/rg:'@7%#o?_d]F(7jlSGp+F7\i&ET(krf."=Dd3M]sJTVu1rCGIeHbeG""P0PA(C;-8^HCg>U>
%UO4.3@n'%WW7!YF#66Q8H-e^@FaN4iaGTF8g\r1Z?8m,O8gWUZp'Qc3)H`4(jt97:-k]NO;XS8>!>'/!@>t>AZ[=U`K*\7^'M:;.
%&eJY",t"db[l]Z%6lpY5#R6HNamEP<j-);"ZsKY"0H44"AlTqmfSU+`h;T9JA9/-V`h7CBq-e5/G<4]YJOA@75GE%#-9Nbu%'ZOu
%[Yp"M=^7_M=W1!g+$&SF\ig$U/NO'qPS=^q.F5%0+(5s]Yr'V\T3qk8S_8k)e*R9.2LG5MW5(U?g;X/t/*Zau!u$I[hKhC)Keu+)
%T,W?PL0gismYmSE7UX]X7puul\aoe24XT9S_fnA';+P;VURcL@1m0lF8I7[dPS,4Kc?:E)6o=`P0W3BJ$r@#b=e0X;LrDdP#2"!,
%2q/9F)-YSH!Ko@#G!PVmLEB?2)&\$C-d>#^[i0[f0edRTbrKu5*U*A,\P_2*F67Ng+gIM<>;#T)/D:,((iTQ_[\]CKf1,68B9Z92
%7(`i-d='mkiKAB)NjW&EE)XC/JGW-gJr]*cR,MIHn*U?%E8E<]4V!V*:]h!kAo.MY.I%bScP_Bo\U3IUT1:uR&/?Ak-b&>kWX,/f
%M"B(57fs)"OfRPK#A9s)ajXr$$D,^DFoo`Kd%-VnRO6rpOPnO"$!%%W1ZftO[=L'SRa6VWC-j*U#ad7M24gO)\+='V)kQ]5SX(>j
%Mk.0^DrtV!VG&$hEX.\'SN?"8>WQ#/,iA29[jF/cAp>eqWe+RcdSL)?W1hd;9gVNcIXiW_QmQ7i#GB/uj=lODE*t.+1t-%:*Z\;=
%QjUeM]QXsR(;OpMdV_AjNDB8QbgJoIZ`kPp&<$[DGu05eBbuBaUf0';dX<9`7)bEH9VpQj>)\_>4:&H<:qVdD_'`ri+.3&Mo^9-p
%KX.tB'!X?F%^7_'rB/o@XODC)3WmhQ!qpp?/Vn*gOc-e]=MlL=!lq6p.m-1'NHSEd>#uXkZq>R`P-tc8iNP868S148U,=XLM?sAL
%C=:eArW$+M$@od"h]-:bi'!RI)4ZASak^uUV^unM<(cT.qg_G.)2Z$h%/kWcqU'p^p'.gM$j?PqjuEMMYVe`(@g*N;BAb<[M$+Fp
%.MnWHi"e388NEO=<NlO2Wuf(PNFl^=TT9RgZ]`VXFu;SRXK;>iB\+JJ(mQ'BMCL:d)[-\0F4jm`RB:o$#MnB2fkJl]1GKaD.CqUO
%JS5[(cQWc!6-)gDcfZom!A9S=([sRN*LCkde.%UM+5AS.68oMX$:RGtAK-C!<hioX'"#<U=9_%c=+GtX!_g07Vq;g6f69ntoXhBo
%F@[hk68ll\e*k/1-/SlFlpdhU$f=&9ddsJ4baJMC:.n$,=h.'+8#;\8F]n/"m9uE>#iDV^6F&+#J0LSn/(-<grkT)p\X1bfnQH4(
%F<Y3G"(IcaN($,o9jin&<%f@\$8D/LI6`3@4;[(MB'gC0,Qm).o8ar,"9c0JO/(R2hsVNl_@5t+VfU,h2"@(AS=T"(7@k=r%`#9h
%bk??,!]Wr4=EDN(0Mfn.EWa1]!,r)"==7^BM#jh16eoT+L:C=f"9OaiW/CAn?\Zfg.`h`5Me6OT'hEXoNt]hs!kY9"QUg#[BB0qN
%]o/#LQq_o]p;1H9gm/@*9u`CX:(KNLHG:*^&3IW5&;X#IU2cXa;1R=ZK`M^.DDrf3/kV(8D'14F_EfI'.6I?rJD*9'k]@1i8&bo*
%gfP9nW`7c[KZ"<B,AO#9h<feb%$es&#5`G2Pj4F6*e?RHa)&i:oEKh7K/69FKHf(4)"AP-YCkkF7qI2VCl4VhHf@eN^cL]u=<@mi
%jCLm%Rl\D28rT:.+6,ZZ/1>(On5IR;C&aQ7*anF/kSOQT3$Ha%W\A/(^p\4rOa_sX;.kY&(8-,'<ioMbNb=ZA"1e-2Bpl@s=D,2s
%%'DMn^U'J%8%rs@GuQ=[eZ.DmoZjbR,8MIYS7OMsMlmacG>84`ffD!L_T3tS%eV[L[RFS'AqahnJ?pCM*l?\t`Y;Lr%1Ie4C3=C#
%XKhk@.*fDb$?D3ToI)-3nB$q2PMV*HA.cLS=[=gD1k=LeVlQG:=mqT;o2M-:fs3%QeBTHQKr5iIJ#Y(c[@+0,>2=3r0sYC6@G$8T
%?[W_j==q\:\4f>o/Q*`L5TLf*:7E5p=f_aO)+_&.\L:M`e86VV!hGYY%ktV=J.1dt_06]!)+P:Ld?pep6Q@9PHfMZJ`T2!jV07fU
%g&"F*n!@oi*Q=1D^;i7_Go/rK2]s'8oIlY&CI#4hdd&lliaM%Q@DlP#m/prKf;#I;<UJ_YN[d&('u<YLA?H)eUFBdE1h>NRgam^d
%E!%oO3uRj%`9YeU6h`t/Aa$H^`i2i75-jghof?B55!*Y>"Pte'T@/SY!qD;->+_<qC6BKe$h)l?2?L[iK.*+"?rJ@5rVD%2Z(@i*
%K6n3mb4E>`WU86[YgG2?1[s&DDteSM<'Y4AX0O%7G/A@FM^c"<4Wm_ogsU4m/(2CM/2V?SmhBRC+NY#n>dV7kT#O:=SmL(biU!KD
%4(X*$DY*qr7fO$c]>+3qh/E>^l5eOs0CQZTXGr8Y+UNH"i4H_/7c%s>S3V1H%1f5M5ZAln,?hIh">Yue+Wu.9r7),_F:\?uY]"%W
%ClQ)W+\O)g*]&M)U&3@s-L*UC7u#:P.tWj#PpH`f"$7g\I=jUoB%n1cn,T^j@/_"q6VePdOlSAA:O!+sa"qh#]4$dmAoqJ,RX-\3
%;[5Q##bX>pGJKC_J3E>qaEOLP2E_7C9HJi,53fgK)J*_u2^0)adh+-SHj0[t!&h?Nmk`s55-]GCj'gl]'*^et4o/4Dgn\'k!;:.u
%dTHuoAt2,k$ut%J%Ka\"*i*/]dh?\\;*0\9#u<k'K2*8?<<OQJQ%<5l#!g\b3>Z\!)]0r,i/qY\#*LhSg;'dT(o-eTM%)bkU!m5.
%#o@!;">UK$O:H'o!7&&35]2V;!,R.Q)8r#WaGf.Na2"P?"N-OS,SW\rc]D_[aX0O1LgV2`Y9Ym<$m\h5"49>-J4kdm^bT%s'_@_]
%"ebX9nrbX62``'u(80ku#s-`>;PH/si1QY+&b4u[L0VT;0bga-K!LNR&'W%fhe-Y_J-dTpJOnM*!RtCi"0B`ki3Fm8,f(1G+PWeV
%a]u_u8-8jP[KLSY3eh#BaQ&TCRNTa)UQsM%#u#!$Pi1>7k/3)R4*TCrB',?4>rj$G#$+q)bA'WNk#%IW"M)A&bt7YNL:+=1BCqs,
%idl"B$mZPi2aP),P"mR+Wk5pqK`l)f[''U+3gVU9%%N"S%SX86LjC]A!)kr`__.2ANcEVSP$opkeJOZP=imK"`R"iqL`C-pk",$_
%M]sC,Q;-L&KJCY:7,djL\,A^Y&<T?KFN0HQ$ZOZ?o[u*`#$j"DJI"a!)2=97!<Fo^K03k1]h`#YZ3$U8CZ<-qO1GAE)8234!VKsX
%J96p_c:m:0JJdjRGX01gJOFS%Chj+iQmk2V6\Kt]&V!&q/\=r-ci``R%?O'r2h8&1#XlLURWN!JR$)?LdAB1qnJ.Gl+O$]X(S@*$
%97ou4<%J))TfTZob^-J@8Kior*.`8]b,6EI$m=T#-G>b.)h;G(e#$b="[M![ZcjqFGn:OQ;2HC[Ts`EJ+%<V0#,/4P%q%.pU"t#C
%+a@<-1%W1+-DJZ>p5@9]!7&Kp\3=m7R16[324/5H@'BH]i^3X,m[]GRf3R9*]BEq#$t6[K1E`QTMC8ouCM'_UR[*?flk3Tp51-!q
%I?'LK5lMu`L7TIk/5k@E,EniY0Mb;Ik!;iENljXL5Rrnb"?CEYKg"Zk#"K$.JJk#6$_ca1qp+mn7a(:l937Zke3K)MV63.O(?&]l
%EN$-N2IMr\A!iiY3NQo]:<'P,h&P'bS<Z$po=j[PO#H\;J\Y8\ar]fsp(B!pr?YH%!$f)%(k(>AmDTrsVoH/h+5-!.Mjopc46;]L
%C7%r>#5UV%(Yp(@!A)N4V],!lY66u38g+2b\V]0fAL.8#&7@9)1QD?Bm:(M`6%uSeEpoA1mFf-E5iB%_4]hO;)B`IjF:<`Iou'dm
%L_XCOcm_8QiZq(DdjEY6RBh:V*:5cVTnn[<G(>md+Y'Gf-76DB,Ib(3:4\V"5$@Lk$oSeICY)Vh?D][sFpqdcj.$?5M%#?V/ibre
%EjLUNYj/'9HREA6@+[Lr(alN2<1L&kE\QEC']&n6%d8[75TiVO/b1;(TJA8&c=e2Z#+284Y`Dr!e`dXb=ihB7D@J.RG=+,D[g.(o
%i):)m;_j$rT!IJlE"FZU#*1kOn:Y-Ac,cJ\goWl``5l[NIahd&-6oXW7@6Yn:I%&nP-NO8KXsJ6<p[G3%?N)j?m8NW]N<%]ADA:/
%A!/p/W;J`-M,E6jU&mf7-j=[mkfgk.=?God0MU+`)O0PcI^G`+WD;e3`5$(Y8S;(l'*-@"eD:\d"&/PB%1%TAXY*J%8K%"=UNEh!
%5_DA\CrAB>gk*>\=<<=@i<C'-/eZ,D0Pj@FgmSV4<.,2A-i:IrR.0d&m6(S!K<fSL;@<RH(TEaJ7<sEfXP`JnD0G,:A-CtVa-5+:
%:\?8lDde&YJ%N$C!EHso4Ir^-Rj;Pb2jkQB&_JTHpI70J=$gK!mUI.HBgA&*e@(C4+N>O90EUDt9*7O!/+<`m(!XtZ:]lGi`Z,F"
%2S1T"@Nk#UU$":a^cPIQFB_4M?k=@u4`:Qc<"M1Up)#l'qZ]18-9hkH*,X"=002Y^:t\DGm(#5(UP;;WE9FMT'(m\DCLAk4G>ZU$
%2F.R2K."\!U$"kHQ_??!S]nggi++nubM;"fC<4Xl"p.QA7]V:ZJl,]!0TfTE:8sn!V[\kR8!q4($ml^O?S'M)\m$X6WiUn8IbSrB
%UeD]Gn*(T`XAV>,>&3e>;\C2Z/VQf3"2M3W%gM9810%3)FbmiE'?NVr^;&m*630XdS-Gfp'^*fXGu9M<AgQ[IcCHRSPW^q!L!Wb:
%#*P!@N[ie.,J3b*g63tHJZ+JC+t5L0g1.fPc$cl%@,GZ#I'O:9P`CT;h$bQ!QZ>jjpG3ccA@,M.V:^/=a>,F4oJ>c0Q_JmM.'Mm2
%R*u9_V,ECf8[%'BR?a:1dg6I#$>.A5<9>\3_nbsC=L<cr(_gHb<i-@O:1TosECn_4=l\U7&EK@@]1L5-19d75U/>#P2__R29[s.d
%^IcQ.,cW>9)ND=Bb>F,S`J1uYnOjm[abk$]/gds<)XWK0:aIC;MFJh*2l5%ea_9_Q-Pc;>3E5RC/3SfDoW3p@>Y.<F7Ds-J)Pp6.
%Dq#h9dO.K[=aN4U,D\[1E!TCG6p3'HKm(XBPm3KGJUul7ai'C50EE"bQ]]opKH$$F)q"iF*XGGlp-;Rh.C-$+BJ>Cr!87&@@7Vo_
%'(BA#-g<K*36b$/TBPQ=1kn']dAcLcP@@$0LMIMaXRN14b9^+$Bb-(0PYh6p*-JG3Nmt]$8p=V,9t(ApMHH"f%nZLi1b!p$YI:La
%4Kq;CJT-#%fN?H/RhbVqg&O`hg2>k4ojC>MFApIiGuW5PL5oe]!;"5[Xe'XSAim2Z#\VWhgQ8C^J&t8"5N,n3"O%[V`^!NR#qs.)
%TH$2[6Hk?O$]);5D6i:+Hg*;)eC0/fH3PXW]Fu@O](\ZK"sqcJVeLdBE&Qk3mo*qC)0H:>&O<".#=:\XJ_2P)%)iE_3rp,_<j0HQ
%B]^ugG0/IC6)UcfjB9tf$/%u3rIuWPpS/`Qm(pV*4,)p8PU?p_IJ-.ZMS_!.U&-?o)Oj6=`k^][Q-L_:l)pp<RGpKY:`i/4X9^3S
%kHXcu*<T;#Qrr%Z%?.NDPTZpc%`"eK^r1Bt*0r3-E[sqCi*=WU=_(o/PNX\4`(LeB\7;g-7d-2]X;V`s?p.g&A[pE0Ai9WFf9!<$
%d6o`V-^@9&C=_Dh+bQG03=2[=1OiA0,S<NN:;'4<`XW&P$(#D"dUW]s,2<7(L`fip%LAROV'S%'P:>a[3&gsNFb@8/KbHeWnL(+*
%?tQSm)="j?Yjh9q@H7V.oZ5V:?<B?]5!c1W=@@!fG%LtPk#=`V4Te\GM0J)bpRt8'![lG-LmKfHo?c/&`ge[cH)S/h&X1h&hRqQ@
%gKY$OdRjkLDB0O*LI[Re9n&Lp)8uUA?7?1,@6e4Bm'Hj1J[t8DkZ'M(7O37Umd5/('<g^M!?f%XAd-(VmAru*0TP/J;>(b-qo+dS
%7\WD0^U.X%,uB@7GhOZm*"-]#L._RtJJ!?YloDNV\!hG`,C%dDQ#Masgbigbq5bXVH(^Z_g]2'nFj!R$VN-9R-"8u+@$t+P%;Q&e
%&T3egQK,c:>&Ap).C\a@.1(#':2AgW9oetnU+RFL_bSb'O3+cC_OkfM,<Mmr(j$e)=lDhN7jE!DUW&e@%05%VR^o=90+92"Gel$[
%h>g59,=_4_+WuJ4Ek@MRkHKN=;SapLVfC]uYHf+CJKZP!W7h6q\<(Qc$:?o#*/_X4/!_I]p*b3O&T@iYZ)JtKkbJ7.l>Y5X!K!ij
%_P8kBJq_ZsZT+Gn%c_L4Ja!0S@_j.!<`dIl0D]fpjn/V2_DRfb"5pJ+-OG>.n6uSo[n13jZn\*b:4FNrIaN`_h*!r>;0+;!@XP#5
%b@[DGRYKB>!Jrt-*DP'HM5pU)rmrn&?Bt7Xe0QMp4K4rJ1+]`pn-GE9TV38e'-S!=^j52<MF72g3E(&F@GJPjVP+qkW&NQGD6Ea6
%A/d?TAh<)*;\XR'qC?;VbFqHIQqY")ndkJ<WoTR\+`^nA\JP]ADS^XtbagVaD5s^o@L3bMaL8PWFT;IdcprqP4a1Lh)Vkb]Ng"Jp
%=IJT9kEdsPio,O*.I,4B-$"XRCA(>5]"h?@mSF'.2W1GdQ3Ja"9@fSV=%9s3c3T8UT;'fF",6o&X"ZgaVj7s'g"jO5A]tL]9_W'9
%l\pteaol;'-fo#2+]LBLBNAZ59[_.\lr8k9*Zu=&i8sK+]/2*cd&ZgJ?-Sq':l!PiQUOg01Bd8?0XB=ZTVR5eWtu:3a3l(4MK1;,
%)@epfNR+JEdk<)rZB":>iZDIU?+FnUUXRAqYGZ_eY$`@M8T#96MC+8?q(6C0Zb&$NX0)K62a2XH$V3b]$l4aj'BKg>cROYe"@r-$
%lp\775g`Jj8pi>h5Rd6Te^ZY[UCXp]`."r^1(Tm+Lc:C_PCZME=aS_Y!Kb15jh.*RCH#g)Y$l3aCR:P#A/>!VaPq#4EB[N]\X_;F
%Bc"jmDT\!)8/W0$2Vou*9C-+:,?#@(M\:V3L6<8R@k:.,-ns0_N6Yal`J>ql`Wha!Cnb*YHIf=[oCDq%hOB&;F]=.5QFobs2=_q1
%>IJ*M>8h4_9[DN2D.j]E/9rKM)Hi4>=mkOg1XfAXj'K)P0>%;b91T<kZXk:fGTdUAT&l)f5EOcY5N]dP![cu@LEp4>K?\!J)f-Dk
%F)&>]p3M(omN18O/^9fgj]'d0Zdl$R3k[N6a_]h:Vl9-q"&l*0TaWS</sF7S)3hK!qA@u*#_>"t9k4jE_)344cs`]dam3mb`\[hG
%l/I4')g*.WQVQ(OEc[_V"Wr;*#u3826O?"RJS&>e#UJU'FJu5(@GnKG9n'Tj@[OMVDpb>r_*#q:(Wm]*N59"'l?W/f-nUqb&qtje
%ZBid3Z'ef%J@:9^ZWTgC!$)Z=jWeaII?^8fRoCAs-AE1RZK44'Ut6d.1=H#rn-\tlbS3YZbDqEEbNE%"Oc81#DR2,%'*IS\6bm>^
%=KD?Z$kQ7GiJqJlcD-4tP'+bSS(ZduTIqar!RFV0)A&ErKqA@,h#7`MEJpD#fYY#R%;6Z($PE?#[HdR5=YLtmV$kn-37Z!R6;?Bu
%!\CBk<KhdV9pTkJ"7cZ+4&m(_/XQXbi"N9Y*%O\@Uo\oB!+8snD]3-:GhR!ALC1H&YpY1YcKoj'JNA[en*#/-GVl1>5\?`s0/NoR
%?3+V_#`qeWQl/``6B$Vi&=CR"KK.5oB%`T76/D)V$nRkt?WqWJR=KR-/G!kd-J0cQ6-l2)_U$h9^ch.E#Nm;Cn_1?\/jd'bKl&U6
%VGL4<1?,WLM9&BkF59L1E6Q2[pbWf<\'5pS`,@)$&uWK,:`hD)c7bY-'O[\r0Qq9Q)j;Qf$A*8-B9mMdcnfpqTgtJl&+HN="FEla
%*i03/:$#EOfE7.P\):Vf!D?OR67t@:7F1$X!&9Uj`@"Xt*]+!MT'W/K;jL*qc22(5-.C%9N7b\<CrnLYCllh%^&o()H*qUm@UkM,
%W-e`&_[U%.\UOT-4,.^KH9f:R]gEC%)+,0p%(7=VM[*7%<SO2piAM.P;SDq</OI9VPS%A9o4V*3XuJaCZeH/+!NlcU!FXQ1(WO6q
%J-aYM(k[k[3HXC3=HiQ,nRsX1Ad*+C$6>!,-LaGHl$01tZmjHjTJ3OY$"9MB-#>p\-j="A<0]$:ebLb^jYB*BQ9qdW;'Ep?RAKZ]
%26h`EW;?Jd(6p54lB=<$(Xu?1Vi`gp!WcJ^&99\@8>?I"(toDR/VX_=&2HTm94:s8WPr^T"?ODj,Kh&/<Li7k<.lrAD5W>\j1'ai
%S?Qj]J\h:dI(fG!7iiFr5_Y!#P2fl/JDj'Pk4l/[?goK,0N_3g[s1;O"M#^c#WecUcgklW&Vrup"MI0FDR3e(N<b^%m$*.keUUgl
%N%8f=H+msdJ[6@LdGN'2@9\C(;P:&](ds"7;K>?6`$@.*"'o:Hra9]<c8LS-Y1++=f,urnSjjRLEu`j?@rE\lY)#ZHRi?m8,>P#E
%mgb7Z((\q2=#[@fc?h?`hPqC+b`A\7B_$-T:BPOd$W&]?LGnD%+%;*eA6;am@Hf&h!,k-3pp_#]7"L^$<?@2o!n/UF&b2tKKUb!:
%mlN7?l+eh^IAX9N,m6b*(MR#%O9+M-6A62f&A]9G:a-r,g8lm)OPR,P@)Wn6UFN`adkm#l#k"QTQu>/F?'$TA!V0H![KrrZl,iaL
%)4AHP5SIJJ[USfiJJBHuB]unVXGCoY+h<mbghL/_/Z*?gG\eU]@-.o;5)nOWCb,@[Ol+2%WHgKaP8c$b!A?c/k5<R=k<^*#[*13X
%-p(A,<QlE?6'j'FefY#fr2A,Yb^cQ7$9F2KT*;0lo_=#8LHursk\)[c#370#Wm?;#@"k!M.TKUh0e3+8,4U,$$KVpODWDD6_65PM
%c:i4r9GLW,E&4`a;#q*p*Y^gBj,k(l&GR8%Q!Tkd4G@&i'Xs>YK4ah`rA"$HZlHeq'GfVFYoH5TYsm.lQF6NC.'[J(d,b(i6uk%0
%`D5!gUuGgUX#s?baBu<&`DlooA!@244(sJ?DhS/UWc)V9ma14;8_O=1M12+jDAKNCJO3l+6/$Ft32b4nI(g,i;?9@?6K')S<Va$n
%@VUsC#q2rAEq>KN<L)HbA4@I;9lZeV*6rO9m$e5c'oFM?rcq6P![R5"d#t]sWH,;>BYJY:WU6C8"rtt?[Q#GqiX?/G9nV?"`IK^U
%k#aH97^-0V;N'qSG=udYIeVG0JX`NdkOI"Y0^ZR,APrds$VUc*oJR-P(N;mT.LtIb%DkkU0EbA65d#+*<,3."*h.'\9E?Dd?fZgE
%NL-dg)Ff=]08hUh(4ou#CGO`dEgN@D3ocnWkS'14SaUQf3sp*tj-s"Q>p-:I^5b>(f$roI7MNr23m@,'no9N5-Zo/>YE.tZXqYjl
%%<XPjRU@"&=f_JQ&l/[uih91!Z^79-k_@2,;4O"n#Im)f>K-+Tdp,oPMF9*Xq;a7X$9^fEV.ZS.?UgGP)]5MoC[=6tnBVs8CU"]`
%`47V>^t]K$7?f)>$/<&2-\3;eU<]V)Aqf]t+?+$d2@>T\KE+$[-plD#PA6M'&VXP?k9o1MG)7IpVFP@#\.50f_am2BAFnofFb?OF
%V0G#M4N;mRKd4D+:D0*]ojibG+n,^2h&;!=>`LKl)*/]i45`Guc)?A$"kYGKKW+lk5Pbp1)_VRM#,&GD'RlJuMi2>-GJp1:e,ZE>
%R2r9]_MtT%D`m0E@2E@cjId3'fkeY&8N4`m8eFn`%7^b)fO[QAV!&ta1T&A8i.lt2oZ#9<H&3(UDAII7JLO^`1N"t3cPQEkCd?PT
%F`BT8U5V(^P='j]X_Jb]72#>a)_f4LF@C.q>o/+3UCInQBA4KLdJBk&]N'W6TZ^:<@WTVCC.XR,$5:87hF/$158r0Iidr212o%c\
%!6G,X.SZ&P91"^\h.-tN3(=<MAqmB2/g4t9H38k';A;T$KD+a;c!*Lo.l;fE09+u"/C$rY"G6be>n]'5q(.+<^,\k.6>VOp5)&);
%Tg6f,po``MlAJ&3BQ+XhEY#<CS!kt4X*FHGC"U,8pC'(XUHm#\;nrot2-oPnQ/9(5%2]<$S5b3uhpB>*-A=p8;P9b(JM3`?1nEe/
%B2BA;cOTpl)%-TBi@S[;ja9Qs`MHM`]<.gS$P.?j=GRFCXRWM-Z:]7egYNOoAb]hIbSB:0>=eYpNod4up%b/'bSW_O.fr_9O+?K*
%^2$Q&cUFPe?!<#MA\pgB`AKrIj,X8T2=TF?[DcR-in[D97I75Ch<;1Z*^dq@/OMA1j183=]SX,0KV?3A]i@]N.\YOXBQ]4[Yk-@u
%&I.BQ"q!M9lQmA14:?u2qPk]*l$^0M-22%hc8;KVM>)H>:7FXslQt/Vq$E_R]8NNLi[,@oUf5]<'G3d4L_Do(n^/,_k!9oID1$)U
%b>3:/&V2^06^g&sk!3;jHkhb`NIWT`YW.oGENhP\e4!sR9BIJg1hUXUjJ9UY'/8Bt_b#cU":/NmAW#sA"m3+p\?P./ZSW+ABBIDS
%:U1RJC]Ie?32R^^3A9>m$tKu#bCq$:U9FZ&0N0.,0MdMD<=hkA,m`(5[^P8pK7bbDYCjpFJIZ4NYKF^'APJ[8-pE>'F0=5]P%:`8
%?k+!#.[tL/LM"CXBZ(GKMI3_U0L$:NW2YY>>r\\qmYJiX462?`bq0SYR.lBJ#S\>lJJj&IM&;FS=b`S7`[/JH8->6a;s;NN6EcPt
%kQjaOK`2e2k<)R@Q7C"RWf$2eYs[;0m,"j:q%"u)h!\RJ%"_3-S,"TR+$6g>MT0j^,mJu12)O8[CpZfAI%aq8g_jNTlGWT4iQ3&s
%Zn+5a&;R?W,uZLA8pfE!09ciq-4_I\7#6TZh-&>m7hc8"iGmlfGH7'iJ.GBJKru@EH!$;^/UeN]OAJ>m"?Y_>5jj=kG<+i^Q0jUY
%%`r^LLg39mA;5Y8Q%r>;#!X>0RIiE$*N!.d9Ve*sBQsbf>d#s%/M(q:A+:*K(?nkV^nAK%)eCQQKk'7N:Tss7Z3][%/XuV<*"f@:
%h8Z51;Z)W-+(72Sc\iC!`UQf.ru<`q?P).*UMi+IYbLW#/XZhm=Lo]N67`W-EK&tkPT/ue*7G^f@FE=?2N@+OGt^\1pDbh>>fC)!
%+`UZ_#Zql9504[:JP9Xp&/rHE8ELfnq?B?dMTgIi-G*5C;<8"sGL%@:&)tsnNtW`uEc[>7ENmKKWR<;T^gD-#a"IbA,+P!E#=b^P
%_HL#`Ok<=)qGW-"]d$s7Mc8/SnRl1N"R`Sf?\HC19jZ^blD3^[ZH<^K.M6(SUY+R,SOSn8$jSj<G6CR!8\m;\?U9,ZWSY]%aU^AS
%>_(NZEWL2&46KYUl-F7bQq[R^W\g)V`7I=5QrfhB7s:i3\HgIr5TQJ;n$Qgs*8ug17<`2f#Y%ET+(gtp[JVqi6A&so^a@7uPjTHB
%?M^R6L-ih9gd=LF_8^ij"@R8C,)/3s+cn5Nc&]h`Eg\;>h%[m^0Z5(I"<34hAO[YSAG[7U\?CNQBb7\-!u&nVcL1E$bM$M:g:i#M
%2$-WDLkF<<k(os!Y=f@!F9U%_QhAA]CUIWuM6:j(c8_GDOtbKN?"m\n3_&LE^1%OY5a-1Y$j*F:H0(6dI>"S"ke^iJ"HB4sgK1?H
%A=-;c@?4f9_I`;-IL%eGb6-5*5_(:Z?9n]3qDu+r@oZ3tJFMSLrjs2P+[W_f=:k_/FYE?nOlWMU#NGoU</8N[&aYK<58_CJd^Nnj
%c3#h^)3?CdbFi/H?)*Ntj`@Ht,"_=kOrhs6oU:Jag>n\+hTV/eTfM`p(LF.P(.@8YPg>VE"fi'<p7uq@NL`h*<$*3M.no,Yf3=(r
%;8G1hEFSEX.o)@$$s4AVGT]U6G7"[T0kFp3=?^<q"nr[bTKjFEN#UC-B1k9C3;o=NX3Lu)aX(r+q9,f1g*ZplbEJ77YE'SQ0CW@g
%VD*`2"_9b7X@BIZfIT3l##"*9ZcQ@]7R$U"6.0)GWDiWK)nbR`PG;$pgg^2:-X.fcP+#CjIC/:08mM>eQSkb3*ACQn>cpQ4jTHDC
%pF=*[S('PS^*)eL^ksk-"O[9KR5<TV\BRZG-;?"qVFm/q"ieBl`.t)6lRA8=+mB"0*7G!/Nj-=3<]TDEV8i^[g9K"#?WhKKdOWk^
%<]sgj!tXb9en`epY0&aqF:EKSb@GDhWL6=5$2oOA8Q^r7>.?Vj-a%P(lV+prkiFCZ#oN85X2I+c2_sf3!0S0:@L6GB0pC]aPjr;b
%K*'/'/Tk4TJXscGL3og4QC!O1H#=>?9h;_sB%[5$bZBNfrhFomdmt-@3"I_7QG_2$Z/Yp$q%HsKeF%"-"a%l6+;)PQ?JiQBju,kc
%B(4GK'gl;8aL&\'do9_,/EV+P9n$(`4b>d@Sjgn#![Oqm"S5#"e/aK>V_$(cT1!k,C3u@fd"4.nd-"<YBX2qd!EGp-_os[]'[gf;
%ouR<q=U_kX'3tXC@<f>-1nHoIDp7G;GRcE8TeC*FM/.YfrtII3+r2HIJ7[3]L^9+*n*oH>c[jK)`[?mR`Znl/%cY^\P\56q^lcf@
%_.:$>&m^&'_eU*D_%ZLHV8XAqgn-*q!)J2H:'McUi,oqR1>n91h?RL:]gme;ifCtpKV2FG'g16GD7m6OX2/Y'Vo'j>H1PkjEoW%Y
%U2P+XTP348,W?(_Pnr3(\_#;0\Ed>-CjlLUmXl/+"q-DLJ4Gb<4,qcF!Oti$7Q_mt!`eNf![9CFEec,l?uWjN^`0VfrG^2@iJ3(<
%DSB!`b6Pq,=?M\8<NQ=321=phLDE\UC.OHqAIcHZA-f(7P;=JUK\mr^\1`cq&[Jca0W$<2W@WI*jpptn,7$)>W)m+0GSs+$!?HD8
%&2WV@dQC(<LKneE:`D:&'k@uCPKsZZCP]OXQl:jf;rn>T$.?cS@T/mJK,W5M1_KrOAP>2e.gPsK8]N5AHN%7ZltaNEp35a+qadI`
%CH;L@;2.gU0Ppc2;kc_\MeSD-NfS)V9BegJ(>.M8NB+habUpAar![t)Nb*sGZ?psQ>9"o8[B#7H]A%r0T;b9D#K2Y/W=DLu#W3Em
%4E??V`>LOd(5\^sK_jmPgBPDt`uE;,LZY36s+jYn/Q$R5C<U4"Ir*W",HT,u4I?We+#q=NE,1Rld2G]qO%=?ig-FSQ"1dc35*[^s
%ZBMu8'MR/hs,4#^e+enHe$$cOlkrQpTp`&InY<J!X7+^?OTW.iMa%]J'#dg&)pb.YpVf%ik=d4nKs,e'XaFlr<-/KeV(Gu$rV:iR
%X@-MIPtpJt/:OcpI=&:hmlUHJG]qf[ms='_NDeZ)fi/$'A'et610F$F4C+[-cdjb*OpfG6&n_MOm5g7`TuV/M"liG?n09ra8f"<k
%O+rp;lOck%WQD,KkTTKU$U1He]dN[*do5cM9%]9&P#/I#E3RQ<%ja16J=@qsj@\)Ni&6[4YW]W29b:>07>llH$Po\I-,bVnKhW=U
%q=.A:bpM5MdmL0jS/:$X$UtkS4F+D48r);m]_I)>8I[bSCajEZ;2h,f_iq:=b/-f]Qu%ErgN^Bh%d2XQ90fc@j`Xe/"Wq/22c`Te
%'+I?u2p)1NI5mL0@L3-Z_Jl.d"&\=lUcP:FEDH84T:OKP\BZQTnFDP"9)4.P-QetP8gQgI]klo/8S-YH"fAR7QqLg@C(!M[BXYUZ
%jJLa\]U6lt*fsHsA\2P.Q^Q,6;T1>ZAj-bKb"KXdqhEl4Bj5-8[Dt6NCTi.Ve8a'2*a*hC$YsZ)*sXYfI3=k+7.srVgO#XgAEVWm
%Tq+i=\JLZ\Ogu9rH]biTR<sDZkg^K-_:NHR(5kskF;A3s7+OX>h`r6Is,hA`:7h>J5-tn5fmOlXpViV*kE),u/!aCrKg#^aa-3K#
%ppK5&(OScIf;;IT=/oU@a3nGEY*5i,]GR)?G+qaC5F8"6b,'0CB041t$ZGQ'CoqR+VGl,N`69E;2Qn's\WFE]k%;<#?.[3f`d+j)
%ep7$JknGc$KTiW30%Qc1IW/bji"eCfT.ebO\FIoq9Y9o"n0@1b>p!C][(5`?3`<?V:.7NK?$%Q"CQ-Z?kfd1_[TM1+k^aE\nT9#\
%b"=<Lmg_LP8/en>VU5<TmY=-$6Oll1ZWT4Mk,-s&1S00?M@4q5(&HF)VLttOfJ5HDPU]A)is@[)ArHQHdiB]m13$tWoMJpb=@^Vi
%fXA&0PpQE"Wq]Etr"sUqjJVK>*\ZBiCY1A)35!\k+mu#>>WS5/ce1gdn@S5;Ao5/$L[ZPQQou(RF18]g)HuM\'U(oPWDjiLC;T]<
%=6cUFR#/KA4qu]e4-<.5Ru%tS[pZ%l#1D"F!`KH0g6/c/9=5H/>UKBcOM^DN^+AE`CUCWQBRuj+C`JWBL%eKndL1Q9T0=8[)gY1#
%N/I.Q;o4HYDh6Lt)k++P'8]Vae2TG'2_]`H"Wpt*-0B(.i?a\e@d6o+K+et8Ht_IbY]"p%qZC@7@0[[O[GW9(Vm:3QJIM+hR#%XR
%qZ.=UG[?(>R3_<=.MHaR<=a:L]Mp;Tin8m)WUZ77p+_@9M^K@]?7`oHiRal89Y8:+rJ6&6g;Mg:=>[iJiCD<F]=nU)dd@U9cn>%;
%Mh[(/_?EFIKu5GmP?9[#0VS0cc3T13^G(d4C0t6i$AXb<E0pfp6W#g:Q1\jVIC+7:@?d*O3)'*)lki;6Ku:8$^Ip,HE1$,>l'OUH
%Ku;,53)!PKs)_!m:D:sI6^[Xt2a?Qd%!E0iD^(o1>(q6+L&&npd/f.ZIgEE[o?P\:fY@1!&8!"-;Xm.r0%Nka?0@$Cn8?G[8bfV?
%e*9E5E[%[;,Zcd/c<Msni$5B?EOCV'[5h=CS^/#p74ht_*Pg+YI'-=GGf7uRNlF(_UngF!/`LFU8*bP_2Or'XkTQ"VSWA!m3eshk
%*rO4F4>5DH@CD0gEo&^J#9RfPQhe)SD;pDM2LfkMM351@ZWkGDHZlHH&TuI,>C/.TH9a0G981YI&O!a[fQHPe>N=s;fl]>'P-LF[
%F@n+EU@]BmAl,<ff.IITa7&1E$C7o0Sp@$QK!^bdP>+2Ff.!5m.7pmK(gJi/lCL,e6>"eg=Z[7@X(8JSZ&*QTQg?"cg,bgc-VeQC
%-n%"p:S`k\\&ts&7-`H;Y#*Tq$\*l*RH9nmLMP:L)TK2NTY4<lMC3JC#8bjL>5[Be3<n-unoX$;ec*&Q)pNeDT:c\V1=Nd#.TCT-
%G2[YeeAOa(I'Tr!kB0K/Vc+Gf@^6eF:Sko5];EtC%,^f6@.Cs^Puh2Z6&**k="*/XiX'h1\R[pU9'pt(QgMf/>ddIfB>a2EML+YO
%7*6.2>J<@n:3*"kD7+%T.4SNb>oJ2@PpnS1==;6Epn5jXGtm'Nr6gh9fiOr6=bDe^N3IrS<^[Dt%YR!)0Hnn2g0S4A9pF)rr4u9E
%YBQcd74M32+uLrV6F1+7-$PuLc=PfYPbcHZmAhOQ>as:I.k>e5^(dP>Oh11j<gUhY*fflhG1)^h0XQ")RqV^rj>)d$k4?/.TgcuC
%#'Z>\97]MJS@$-D,lAuLfnqp*=q<dLTVR&[8mAJiK,=mo5kGg#,b6EF``%j2rQ^UqM_[].fe^FPQ^RAkW]68`MrC!NP+D![WS:[$
%L*&sUbKfT$-^($_ls.VO[Cb(]OTcb^c:&s=43%jpRukK6pL&mpZkKc3cT=AXJ@1b/"?M>M(@/$%JX6QWPq"R7:NLku1Q2"tdcg)H
%Z;F)0QS.LmA/=!BEJf!OXkHPP&pD@WVspM'M_Fk0pH[7?YHi>C3,;hsGD\$,R1HGPk.>qt:j%XnI'i?Cl4'lm[<kr5K8]^Imt*Nt
%(7P5soGF]B!U/l=dR#NuZ#,o+<RQ'J0/Md3m=2PWL%;W-CC#?lYu>nb9__r+ZNhclrn:2ECq##UN_R%ZiDb9Rq+0XTdWWo]E\Ta!
%INCPdTrCYX:HuqQO#6%)%+Jus.]uZ-EN"N"aeGQ9QmKl&bCe+67!.!H3`B?;lti=p+XY.[_->NHoC6X*,/d+em6g>u48$HD7QkT8
%3bZ-04Ys`n')=m=In4W266bUhe*2-j$Ct\mb`#bLb5$;&`+mRfkFMVU<<(egUT$%eqF'OLS*1.t`3/G5Rt].8,YLU?"eDB6W!W.P
%=/1rrD;NOu:5*;a0ZXGr.QA[;(9Fhd.GIg);@N>)QJhd[n2(!S%DE,8#O%E13(7_8V!k+k;5Q8O;0SuS\]M^LA_`hm6IrKk$\,3-
%I6)g8oU%m</&q0TWHGRq$R24LTfGS)OXOd5l$/uIHN(QV^+'LS[_+\mNe`\:N$jgg@]C"Boaaes=R`#<P\IkJn\,MFr54OK=K;TA
%7+8.)oU1N5'Dlj1hAGtrc`>Cob<>)bUp<^*W<?sZj2I]=35;XdMp.r@2O0,CPl2Z:E7j$ma[I'E*5,G0^?cuY?''suJ)OkZpDj#'
%,QE4^AM&ef0=>$8U+#ug_@+U8T7L^+WAtWOTJND,,.f?:j^AKJ6TaIe@n*OG3_F8oJ`nM-[R\Ha^$jL3StkKt\_chMUZBR3C&=Z9
%ZHd]^XNK:d>4^tQ\.uGKFF5&K+%OAldIHb.L5C]+n^'JfkkMZXX$k"Kc)s]]I,*T8]BK#L3^-lc[;#>?Qo=+59b+("R)3;n#X@XP
%e%*q97h`R`>a=K[puUP_Aj"n09g8qh;6PD'89mOCb87)j3Sdm'FJIW)gLT/`ZeL#T]%b#>8PS?USP&"lDK/n;9lA/9j4VQ$U[At;
%S.U&,,&^`INJkGh@GFi@[BR^u(V/TNQX!IPfFE>Z(Omiu.2<f7!oi*2]_m?pRi-NGDAmt<b2qZad5$3t:l:Ve,KLa,'b)rec(krk
%SNDYgBCN'5jK!!K9%1T5fD1FFAT<'3_N0`EaPCKBd^/EfXJjd'^li;"fsF8LZ=;n(Br0^AH"j57L,j70q^1mg7"1WHD_p#*MYoAp
%h^7pK7"1WHDc=B^(/*^VjFiD\:c#ssN*/W4(TP'def$3k%#3@$cDX0W#<9kud;DW$I$0+?c^teu+Dm]7,CC\s."B6'BthR6GcHo'
%[.C*U%$NNU\(kdX`4PDJAs>6L?"IZ5H`Gn4B9[bC?"N2nb<+7U;UFiPF%.Ku,Wcg%m-L8&rME^iP]*Q>"Bs=NU<#8N<f@hn&!k(:
%+&TnSD&!AT3enACjPu)+4FJ?=$?'n3artDPpU&:MrL1osf#Hm#iCD<FH=p^d`*V3<0kO(HK<W`u2%d(TYHmZK>:ak=l?Ku"%d.,Z
%);2s]n,Zn4=tL#A)Cq\BM$Aapr_s\9qJ[q?`!Q_6FHTU*I,Uu].L%^,hOM[M1?"RS'Q1a+6Kagek/-F)\Q0R5;)Du$9j0`jejYc\
%+do*5dO:Ki1P/3]`GK_,PTMQT@@MAeiE[*TDO]BX;5Dj=p=Mm\enXj)X307,.3uQ3h$RAtR?9=F_tliCVA0R]4o@iGVijcme?dS?
%4eaC7nHB6VOEg:!+&i3r'8LY^iES9\ma7`V5cWb^9?$\%HQp+41%_G6k;[=QjLQA*lSOE@M3@25E!qA+Rq-bpS8<mIc:ptHgu?*n
%j9UYm>>'3]$a2E6ca7ZG+!bCK5oW"4hg4VQW@$COL#Ucto#f>-/I@;=K\,<$',t'/$&tBKp!]$0NLCmKb:K8_Y9LksOa&sH-AKt*
%mZ(V]R:UXloN3/LTaiiH919Z`,AgjI_eOCL+Z_5YeAim&_@30^Jb93[>n2fG#=]Nq4%aBKXTl/Eb`".ZL6eC$@k0M,]3gN+"5@BE
%k5CHb#HYoB+V4efbs;WRI"XXNQSmMo1.2]q*MI)"%a0hU4JE+BfkZFi2qc^SknMFIR#ZEd8<62JO>XZ#(8EV2/uu93*i]]PON9Iu
%46rtL0tJ/mYX9'KC%#/Z7<l4->MN:A:,J<T-h;%@IZ='+?5B5A_i4L3D6"-#O^Fk^35]4rCP4(IaKO!7@Q3jnZm-rZr#1])UL!g:
%M^;fNU.JE0005^p'-lcfT8(E?FeV1'FQCrm/In[Jd@X;feF[iL?>U\4@Pc(l)_mnM`*&,+39PmbW<LB&4>2S-7qO/cL0M$sX!>2t
%\#:qi6j@"/n3mOgH]P*]%DL1==CgoD[)'4RT2JjO-=>t]gs._O64YZRp`bVLZM5W%0=(@VAFXg:moAE^]<b(;q'!gEaGC#0lmO\V
%E`;[0n:3Tgb5/m:3\\h4+CC,!c&Ci6>s/;[]X0n)._*o'mW<V^QRqgIs2RlYQ/J9e?#n.c'uP,=3MtK<WYtSalKF*45KJ^kR0YNh
%6[!;D_,:^_$QG*T>IJW)%]q.B9`?"n$bu%\+!(F/Bh=Zb9D9EmV'PbYW2nN*'fcJ(W02"b>"m<X>KW1]Q'c9bNeBYDk/aES5`fH8
%X:h0A;\:kt3!]sq\e27Wg\-1g#hY$VNc##hY]Mr2K<bTMq1/r/g9(G1<m3I=hQ@*,@'V=P$?YFGm,2[`&ECRB/ijD>Zi"B)#[!(#
%Z=4'-n4ep[`uIg^b(*Md[hOb.>;<l;>na;N=k=Yk6AK\!'CUL[Qu"2iRq"lFFTfS5Z;%UNA%H,24s6C=4_E>31ELdoXbZ:WAtTH'
%2e.kV)hH<45_^+m=+G-DAh#."GRnW0*7_Kn5\E&BN_3AU+NAtSK#B?&GbciDA?Q<sjER6kJIJg"lLR*c2)#NaUZ]oRDY0_]R?&^o
%/oVG?,Ao%A-sN!`*S<8)TGg2JfIinJ%nq0>67Mg4EN)FO0J-Sq<AZs:d7oIGB/3ZK.+th7<D,B=$.O.V7mguigJi]RJZP]!6l2R/
%[^1NYo>Q&b)%<p0&b0&1Wke0aXJNlEi(p)5@N)I*)qD6aQiTk#C'!+q<4d-%,GnT^<bd*+O]W%4+]CQ*SIQ&.4'#&2P-G'sYWDb+
%_`SKRCr2\c41=oc$KK.A_/5hni5S%ZRMAa@$TY,=4Gdou91MT1r8<\[fEUc;F0O7e%C"I*"SkBBl+2MKa3nFcY-C`MjVbR?1VlOq
%J41[r_.HAC4@cFh.0l3!Zkb:E_-S?"?$CI7T)bFY(f8Jg:N.ld`32'+)NL<5*WdKi.-=)$o3nLMFIuA[$Y#^]I*jGg976oQ>WPt6
%)]qkfHV17_<L^QqJA,usbE7*/9#ltg<!OoFX/c8H[o3&mZFcZ=G7>-nRi<[72MuCgY+T5l!T'aV;lM?;1c]//$M$JgM.m1AoK8ur
%gUqmKh_B5L9T%aL1dT8BW,8Q==pD&`YV\R3e70i0W.o_Q:j$TdS=PE$)jLk,;\U<8SNGBBYN'l-O5>QT@oZHUXQlgH<F,:&6E<:r
%%FrK0??"Fb2[J5S\nYr:TNfZa5SL(^[t"rB@bl#D/kiZV_UPiA^H_rEh-t4K#/m2nWX\#(1KEXmBKs0a.4-GaNVg(lRKKTWFU!s(
%^f@JU[<03@.LfA+cp7PF<67%KES4CGec8GQCSWegD<We00K>dV(k&jJGY3Nj<DY7EI>4@lAD@qQLpnBjkeo`jWWqm!L[$Me-Ur1q
%$T&q/9hX[_[Fe$<R<N4s.hJ8Pf&THB>gTNK!oDBjc(#^q'+G[6A7083@3L7(6=X3*an9AtVD[q0X>m**UfRZ^gf0-4Bpl3Pcq>\k
%2@X2;A9Yi]CEO>lBUCNKI$ch[*n(3;YVHJm92Q^Z3<uQFMA^6V"7j0?FTD\QD.A?P2B;l6%lC*[]lP<1^r+W&ef0aTe]Q:gm4L2L
%Ndo,N2F1t3FMk)F-NQ,&S2iX"-X2_<<Pnld9=)fRM?3NXWA6#3TI`V9h5"e^f;]&acB\IJ\5Yt\!f[8jL.*f>-Aqp#A2.VCA\DMt
%NE4=)cB[B:[:l.HbS01SS>ZXhWi?km!r@U@Fod"X6.?WTU94js$ceM&9@act3_rpN`:a2&J6l)bQkPMiF,-4&n0ToKI-G,\$<p44
%-4%R.;67I],[eTc.Md,$oIc6RbdILcc]&4R!RlU7I]sQ(RI*iG7FeBcCf$B9JI[Wt[AgC<E9L]Crb6KXbCgb3?qN/j0rPFJ(<Fq'
%1?eCl5R$G7"phAd7DYk'ONr6+<H3%YWTjm#_A$&d3.:\aKp1RF+YNJ#:V$:qRk0aIKB^nXF:)AR#_7\."V`DeoW]G+=,D;8&cr%&
%mBl9p`/d*P9"S='01<5gMAM*?r<,Y7=N*FZ(Z%S-f<EOm`sr+-*jNkt#KVj,dW%h^72CaHfMq5n#bpGEhmSSHFbu2.U=2hU)2]6j
%>:!!b\rQ0km"-+'f%L58h_?m,a6a!&=q<*"^Gl1]hg,!6DgTYH?<apPgT:m3gE#P15P"?Z4nna3TDd9[+8N%&HR/`"4dc):s7N@@
%r78W4s82iqXag8uT>'1L?]FRJY<)UeJ,+e:5Q0pV00K.([m0oUmjO3>,CJ>2WSdaIs7<?74St%D4T!Ve+91!J$4#-*r;(uDhg4bK
%I/`I3p>hG;p@Za]ia&`,J)tG!miM5Thu:I,H0k#"ris4aoW;cHLH%UZl[.DoHi9GkqURV(k,J65[@;RHle+2/De*DIOM-*T'6=3"
%12%I`a%.bjF".0WTRd2&2']9K37;d1Tngoc8@gu2VY^KG<C2s2EGRH/"m,>E>"H"SP00BW"M9XMGbV8glD^0t/#oA^O:oYf;^d:N
%Y\j70+5("o`)rf%[I2pEjVs%p,IWF=m2<]&jPLuD%,8R^A_8#rQl.[85rD7WLmFcbgCjA.'e?eFr7f9?$k;sWY@dX1:I[-XG0VgF
%@.\'<l/iAY=X#Z2Ls7(uF+W%,iA4uFjqXL+@'p"01kjJ3i=cj)"idG`<\;.c%TY1"V\W=8`i'lR<hmPrm>OrD&IEh/Peqh%%A+$^
%&uFe*IA0O0M*B="Y!25"k1W`oM,7s^BfG^sSLp'a_5dI9h3MPN'qZ7N<<o.^@+>g\"i`#;4q!*F;tqmD<\qO(?EoqR54a?Xm#d]'
%`9%i'=X#bRa>AV-*&kJtI)=+DOsNaS]^R-9\a[J4%4aa/M_A4^g#XaBF89C60"^nQV=girn`;:(\p!b+2m$5(duCD8EV7JtQ<apt
%e1Q47b%kn[ma^^,gU:t,p"0I_p!m?kCK+HBo]"t<fA=0^)Er93\2u]20n0&i^7FPD2:k*Y.Q*MsGff'JenfBp31bKiml^OAGJ)u'
%DgB69:JDnDh;5MNe`E-&=C,LZL!*M'^Y[[gUA3Jmr1Un%oOFNhb[[:El];"dgKsX4nU9!blW5\hX0La&j]C<g4)fEC[ibG;F'ne?
%GJ<gTs8"D+q8QK"s65TXlh(eF2:-^!Q-07eYl#DNYPXc4rn1ral$H5*n>FV1lt=ZBoZ'c+qmW1WpLirBNOr7p';ts,>e*<E3VeF(
%s$';,mfu)UYFiT!,(*_l`UhFTq"rrsipYb(L6EkIRO^P&Q7T^[j\<lgk4_(9pc"iCa`G3Vs3`RBkG@c'ro1J\4Sd7B5Pf_p&`omT
%QghOhHL^+Kp[13?HQW:)GFs="MfUqs3koq5l0j)[j1feYDt!@c?aPIs@+G)SpC/5C_Y8NOgl#KY>3D^uTDR%1^@9N/\-)%uZ!f2s
%\8.(HpM8Xcpih5[I!L!8j7sdFp[S"X`\HMN'c/Ns*F,pbhh1Cq]5Kn3gl(:58X1*aHhn\Rrkn*lL9$[".$e,Ni[4!dDghQ,s7u#>
%03-g`0>7/IJ,\Iu?@,XWI2S&Sl*@A$LNDq^b]eR-M[Fhk<^6WS[m'JnadpjP^^2^+[eJ.L5P1d"s/RV9'WGaFrUYMZC0qM0";NOn
%Xl'";TD7+NIpO-=XDn17++30[YJ%:2'pi7:AB@9,he4AYrmZ.@qEh:o?kgBNnMgID%EJ.lQXPR_2f5+1qd\N1VC"dZ`="p/cDTQ_
%Y]#3&K*lD&:VPjFs8''J'jc:M?QORk`3u.9YR4=DJ$)(kTDe(ghD)Qh9f!O0rVk:t.Pr26!2Z0n_(>OXh,t'n)V<q"T<>d9JSE)g
%55p;Orl1"J;0/RBiI9i>gkD*/,'N`=+8s=mSi^;CQHh_hK<K[p22>[4)F1E+DqJ-;n)s];B/O]*jU4<M\'MasZk#(umY@(:lu4'G
%jO\nVI#Tb/p3uf0:Hn.5\-9tnan9t*m+:.pq1!bRKrQ*OQS6&AbeJT*rP(NC?[ir2l6WZef(Dg&r)[8(rdRr(E\G-$?f1nK?TgO)
%^[<SW75'1J>qM,/IcRbP4^Gl\.,0"C>E:pfH;n4qp^HYs/#BAg?h],tIJ8LsfusuAqmd6MmeBiI9$YPE>]f3f4[Tm$pupnU6Q$cF
%_\h'N56'"h9FUT!F"(U0-q[OANpSq(VN4PSq'+jaJlR9\9df\S<j2#2$=U(m1?6,rik++Jn[$RAie;Db/GiO"A`$RjOU8WapVaQ@
%rM^k@^CRPqH-I)Iap#]u+AhKPnFc8HfV^t6Q#D'5?iD?":p*o,+bHZ63r\('hY&<=qeM9rogP/XMY?F--VqGL%<$N=,GE2smPSc<
%W&s_o5!JC0\.-A/df[1a?7g#MTk&'ifNFJNQ`[ps'qdYDe6$i9<L,2Lki-0?'($rtE^Hf4r?mR#,:_5EYe6o#N#<$n$m+oo5)A40
%k:!K;BD3G:b*!an%(qZ+o]<#,?5t^@UE\JTB$(=g]ktf#__M'Rn?9oPFP3qO>u8eNIDOeuDT'b?JFX[K,\-bMD9Al#>V\.XW#Cfb
%.t\sJNrC%C,$D<JA*+rV/cAAVl(*mJ(I!nmo8O96XRVSeIeDsg??l#<q%mC_mcf5tn=(dE<dEVa^[h#B^Bcq$eCQn^f"1fS^@RtG
%=t*V#s6.XU>e@WGj$0QH+oLUqk=RVs'Ya?/NB4iNeVde7?E<Rlg=d$-s"BnA<p(RieH%E'jiY=&pI"g=h./aGes?`[H2GY%F^R-,
%hEUUqJ,`u63ktOAe*lp/H?ZLa.KB<e:=P^.El^;VHM1X=dT_/;[aM@nm.*aj3o@G(qu#AH*4rD/Y4J^W5Jen`l07]&IeE`G=b#UU
%9iQH(NO7,\C(kgQkIEgOQ/3HtAa/B37qeb!)1M.,PhnKS9'9hdl$k<fV>&8Cp@q$<d`acB.;t(fpu9#m);9G+Onf+Ze3p,\31K\>
%pmLpQUO3`hq$3TD4o$e8PRNW15GeRIGiY9SFe8S"O+h/^DYPksI#RM)T*eT]8O'Po+SiQ%D#*8W;Z99)S?=*`puhS<K)u$A9qsa&
%IIYejje&mZ:qs3#^0"*[(@-#5QbHQSdTBj`GeRd\(V`d>]Kc2d:MSDTiW!,`;<NN=:;6>C5H*r9q="@$]B\ZBm[HH;[3,3i`f!(V
%d\@>Nl;]""eD!1ZPp`2LmcQh7q/\cKo#i,$;b0@D!r$p=khY]Qc[4OI8&*fgO#HoXc@n&RrRDGqa5l0t0hi#1RP291<acjrO_$XJ
%bEun*6c0bFE\p--.90>QYEK8`Z!s_a\IdE.?8`C9g"2Hkc2n`e+*eXlijl:/aU5eAX/l#&<V'>_?eD4:k^Oc"@g-Ib_9Cr$B7W^A
%Lf;(n#C71lI*(s@b:gPNq=8*`TS'@i\`K/ahR2p3/$<!bElI.,0!A,qab:Na^MLk(='`N)g/hO+mAd4j>e/kBAJ\Ef3\J,0P"Sk]
%EV6`kXhNbK)fNAtp%?pBHdri]0"p>?V%j0#?.O/K[Fa;Te!aKYOec3!??=R):q#\qAS";s>X\LJ2r&M]FPGMln6(+-FRr5Vd<JO8
%$[W0o]^Ee/V7#X?Ds2buL@skMPLeN.*>ArB8=*Y1BbqPtFkuncc!,L@HadhEe?aV[XZ,-^*BH9KqYFO4ii6)8E:[r&)`DVkp#-\P
%Mf%.S!K,b'MQ9eQqHnR=AQN-KHHiSJ4e#r0MQ4K_<Vh##6d[clYJ&pnd3pnuolW2blS`R>af:((`R$JBN^6P,VX'mPNp2^b0>6@K
%k^39*?<PfQbHc!.13?Oah6d0iRocds.D4Y50=h!1CpI\H*u5!6kW$"^l:TqLB_9;G>^>!K9A("2>B0?/CS8e:U)B9''p!:B3gQI*
%UYWE9FM6lZcs4+rp!%82CrRglR!8H$D;-$L)NAS\h01Hc8<gd\L0N5HJBkDk-jW88;-iSD]O<B9.SA]Nr$?RT,UEm7&7'YS%DJ"a
%gB9WW#`33GeZ3JGIEhFdMm:/N<LloNZ\Qcfja9(.fj=c9el\\_='g2-?VTa5eb-b$2nX:JHGsoE`c9tCbHE0Z#IG`a>)I1bkpEdu
%ULj)e"!QQR7=.%C!mu/J#,HV]1ats52ocZ[s"1^^gTZO]I.*BG4WDu.FplYZ@-QDe9.sIt3>@27Z*o!HcR/GZD!515S)a?F='Qg!
%Ed5I*i^tW1k*B6'Y3Uhg]!]$(!ZMUV/e;i0;Y5iHY?26FV-Dn'h6I5\$V8,)Z]U04?bZ(\A0H2pE79e*>^tS0X)POLYU4%+)6fU2
%J1FoLrUncJG:f$]``.qQ&QX,/MNq7JH.CGH?ZkHF>q9YF42BdKMjK:iM<2o5^8MC:@mu_8Ku00-UO9L6=6<@"(H*\]$\o0$RJj%)
%PhLc7Ajco>@-T:#0VUu>P:Tg^3-Eo@kJ:b%+M8+t1$IC>-R"A-$+m6LTeTFE[8j6Zq(umuXO-=KVupch`A3f99bP/C''4'T9d=/f
%CsCO`AK<Y*@G^K=X$338f<@EB"5P_T?F4IH2!%Fc<KHFgKB$D+/*e.@f4sCe,SjN()Noft!0pOdlV4ORLIRLo&1pEP%\_IBZ8Qo1
%mFA[fK7N1MQEICcl:S%"kJ"3rGMRE6<VprW\b.HPG:Ot\c]R;\R90F.DnPqjFRmKoj-D=$ig0Qg;lBZ9_gh1DpUoZpJ+2V#lh&\t
%]/K9G.s_EgcFrB!GCT,.rp#]>r5>3jqQ>U',CA$McO0O9NE_HS?Mh2*rRn)4qgZ?T`W,P_^]!s)hYN]5S^*5UbKDg2M4](R,9QsX
%kJ$l??i/PY^\%<!55*(dSr)ph7-<KDARtA^DuF&CqqOF6o/FGUm;IIHMkBgg5CMju-V"k+ZE*(!pXe<nVgnN107NR5hu;1crojfO
%rPeZjo'7q*p9L:GpQ^3jO$EQac[PD@YC?/$07NV554m.X0&="KLX,H"f')1EHdfSkoSL?[m,O.Vp@&"T#QORCIr05U]Rc&Uh:>Y%
%SpC8rhYY-Ms2A>mr.FH`qer#tNc&.7Je.VlA)Hh#YPmUQhXQANpl7<5k1PsmgofQ$:"BJLX`O1V:#noZcB`L-,I^nC!YZ$1hn!C^
%^<siR7rR:njjqRS]&80?\#`fKIbi8L)Q\&5:M37^g%"-8dd]9tRGlY=Z/@#9K;mg..JmR$?5:o,m//??M:m,SkW!27>Nn>6Xhji/
%K-\6UqtE_]V.*2o`0YekpP]A*Nb!%_4b4l!E4k,k:V"R\GIV9]cpB:[ARB]njhLONL'D1`a>V59GrQnl(++(tbZ=K^REJ,)f11q&
%>,0n#gG.lg).fu)OlrS^9rZtZ>q8KGp-$U9B=(DGZXF.d6kQt<&WXPtXhECS0ok],,$-n3G6iH@oYsMogO7(9P]4.K$9k>`5k/YC
%.k-3O0BMND'!Ou^;Z9WJII>Lg%.cXd(*/AUgM2gG<AmAtFj#ZA.W"HF&8[di*A\<Pd#M7hjY0Hjd%l]sP(RW84s9S(CR/Q8b;u:<
%.ZP-XeH9h#GZIfK-LXiGQ5"#_0k6LmE],Z]30:_("h).NhRIs8<mEuJJ$:EO5UfZrahO^K>JM1Xnt(N2R2'^>)A^1HK[>RWMLU!K
%(N_2d#o5j,FOTCbNn.r'CIRSZ=iIbc1'Y%P,c@f%RJcTCKX-lK^`pWc!>IVm!m]f_?N(*IM6,r@d-7Q9RETa_1\Re&166SeD$_<m
%I,q(YCL<_KEMT)\IuQ8YR@9hn,]d=;S[7"8k8]'K6YSG<bTS`_K#[0IR9ML[N6H[B0`ttK%umHJR;XoK,-rt:1dkq!dAV]A_P9sC
%R9$$/<g[.=:AH?m#EoPL.?d;lTQ*7Z@1n!_:\9K-dHBW\Be*=+@8cf5'=FqO2<qWUG?T\l<?&.gaOJ(;JWZpa`^1lu&<2B0-6sN$
%ND*8abTASr5\&+0R10XILs2B^0cPN&(4d?22)fA)=gD"S)T6GE=aK&tQ)h&rT2dD.f?';/R7OL8YiCarUZ?LcNZB(A]4+og,X)#U
%?+-b[k?-m*%t'=a54EORd.U[F<"3-HE5c[HWJ<_/f[p,_.JERuY<DU<2r<#19pOio>g[We_k6k\P<X?lr(HBtrqW2akEk(m*P?7.
%Kb];#hY5)):Ha3aTE"#>0>7.eDu]4BGP@=:a27<Dk@Z8ChRrLPVk/.o^5[^'[.)DA>qc-;q.K^&]n,0c$=iZlKAo$Epe[<[kc%LJ
%'S#33gQo+729TmLqiNpnU%gE>4?U-2ic="X_2"Y-VXB_jo1sci[=1-X\?i7uouX,shr"Coo3[!fDUS=..s=,S[HD>g4JIg"rQ/"t
%@Xd9Y@ip0*dFI/^56&$GG@%QHJgJT!A01V>_i(&JTD\32FVeAgAEAikLqpCqjKZp`j\(oB]&NnmdJ/_WU9\:jP,6chIe9TN2bMEr
%`s;$7[1l5_/55;pT).[72,0>7=PK'PGW.E+:!In'[J],^5Egf/$qVg"4oH0e='lf-"qg6b<D,^-2aa)%SYmD`Na%M0#2#9ShspC9
%NPG*AZ&P)$?f,7$rpo70Ic!;Y75WflkF_UcZb;n-1h%lS:55n`XGo!9.Qiof!K>B8<Bbk:/ZBb%Z5sr!]`)iarNl)1ft1uQnCn/R
%n"]7KQb7!KiNRFW28/a7./D2eInF8Y'Rd9qi)rCce-H!41K7,#N;CV"C'umC!dBSCbu,Dbno&>W*J$eFE36^Cb#^&slNq'II=oZ0
%qJt=tUS^d_56FoNS$kEa<FrO2V@B82=s#KK3*qC%TMceX0O0>"XdGk6OqP)A6^)i\1rDP8M&dGEl+i@[#2L2d68%1G(_ho`<>rTS
%WS?8?iYr`*%<uXSj_e!tbWnq'#4JtooP,4]CcIk:Ui9_75ER@*%crT/dIogFZQ.G0ha!lT\lB$5m#.9n%#s29("(ATAjnU?R!BI#
%;_sLpr)s4I<MdD3\`+bkOio,'qXEf?Ut'\1NMlNkTnbELU4'7qmiFs*LG?B)IT7\+'Q>*DSkZgE]`&d8EukE(N'K73nVh:@a+m9<
%fAb!,Q=[k9<J+muZRq0MU$Y/!Y!J]qdhLW2O'ojDR3FC]nH8%C,pd0WAm&8@@rH%-D$gjASCV7*7tLZl%40DlUW.E!2M,4U+MG3/
%d.EY>"7ij'0m:4;eC!OFn==P?[kAbg0\EZ,AYicp![_LChdPs3Wsp[4I0,V/r:6bX^n6*MFc[^*cV.V+O$Btp>#1:O,XpGus+],(
%jO6?iA3CO]3>Yk2R."O9M/Zd=c*\':Y0m.@gcf,I_4T8iVLS!J4a<$B5B!*=h8=1,*tN2*T=+Q&rPgeqp\T+2q3G=\5PX/n=%]jn
%VX<+s]r[&F5Tb,d7TC$IjmX.D'@OgX"U_@p$^!bG0H6^kJ6/Jfe,a,50;RBpbIE+-./nQA1!=&#o4/;09+;VDp&hk=B:>isUOmD6
%#0_FJ.am87MA<I?bG+j,K$0Hg2jA1&6I!t[1">*B7`rMY9X[6dUL6pa'Leu'eml"%gC0%:TX%$D<Dt=`<a%4Wc%cT+A"$VmX3oJ5
%VB,g`Z5?]#3LX^6#$Z+A6/uU2.5[ou346SR7SG1AfqqXd*'g`K@RiZg@Rb7'lA-:W0h]!t7;!pDX-uc7_+#Y:GC8>219_?hA[(Dg
%ar75[(6)#W"k"*k]MlJ-Nk^g$l3$ofpGGhNm-D$P3S<]LfpbEWJQO7QZ7WFtiH>gWY\I[:-,\0IJoYtAH2oi[[S3&9KlKF5fG"\"
%A]C;.R%`1?]`nRuW']N&<,u=[M$WgL6tb*%<!MEM8-'!$PQ7s;YkSjTg]/a:f:bo9TW4+G"#c+i0XW[/'!kShJHscA[T<ph)+h1=
%5Q`pQjtNcE>IHnM3f]:[!9iXb<(1AONW=B%AV\*Da5Ko`V6%Xl.B:XV25i5udr!5p^e1J8<T.m^L?&W5ChON]C5+>Zi*b*ITW8.g
%+Jat`@n(/>Zjf@,Ll5kJ_J%A.Kh[L-7QM3;N[(TH$6@0[g(n(pfR=r[^qb'L>$4uAKV[c!/-lA`1rDsg_83@u.a<9oirYfA`O;Ym
%"pf'+NXs4_5VY*tX=PosCb`IR.gcIF^a^%`Y(EF:^4[4p7t:XNo9$g6.WiWd.O0O1"[iBp0;*ihk"sY41b(R6j,fS$3",g3'$<-7
%dZ2O0N@m#MKoP"%XIp=U,3$lrQ1B%LMO'V2#A@!]%;7JSX!G8/lP-.^<Z);O6T)O!$jn6[h=IB-R^`cQK[BrW)O7U3"r'I=lK)S?
%V?';L@2NQ@i7c8E#-0PI1hGW"o`hQ>\\[\NYj4&U@,MX$G&3's[Z86ZD.q3m(rh7,4<P`O7Q5t'/8fIX:hcuAasPmG!X:.#A*=$4
%OojLV3AZ5U(t<JXEs>mm#NTqQlU8bgbBi1MRmp7,WTkIr]4'0Z2#0BBlVCruZpD?<U[]OAM:k6=A.$:<&M/^`::jI3<%A@E4u8bQ
%(VM#uB6ST3Z=k3F.A`31?^kjubT4o&VVP^LJ.!XWd6l&+97acc&Zs8V./2$j_8u*[L[%3T0[-Z$i@7re9(V0Vq`rPOK)e3EU79I2
%'$AaA=;"?uJVe]G"8B&@-.%=El]h#<47`Ol@^KDAk//%85.V>JPZQ*tl$sD-;s$nVX@DJH'bhP?eg/*N<L'JReB"$%o%oC<r)K:A
%<AbIV[PX(r,m$\n+m:(2)ot8Z,^C*U%B=pu=+Th&.UYGk:feQ7Z'`$G<BqUVFDb%]Gs4bf;?PJTN(KpE%1qHbo4"BgH,J)>#8HWX
%KQl'Fhk$u:rFK>\oO%U:%!DWUENAI-qSfOZ!H"RR5,^3a&RmCL)j\%@WH)eT3LH`bCUO\P'E4XcW!CC31GBDMnB<Tkf;&krP_!`d
%!s[@GA_)tFh_89O#6rF=dq,"c;\':o/QI93@p,S5iG]qN,D`=tOF'7(rBY#A'\s!8@9`hpEWSt&3\H`\"?Q,V).KsY!Tq9:WBN@X
%6:K[oF&SN+%PnB!dYqLfV#:<4dd5\h[24(gVVrFnDKK+iN[^n>2J>kehV&?26</lSW*X%6JL+Bj&&MZ7Y[%hNem.HG:e5D$[[XCp
%5Z+Ff,qs$QgT.,2`+GuQ#F$f/8;U[*7ppB%%4f8PK.S)(a!'Ua'fF/94!;V,,:6ZZ#tnZ0GICt,aA#@=H2,-J2dL)=VG'C2l,7+G
%K"8/%=Oq;H&N-RZgq-)!:fEO(?Skd1-qMl&*nB0c2R>#m&"4G,#3Oqi<[3DnDV&R!Y&UdF(o"U]N(2WS9g^llH5[N^R7e<MAoKDt
%ig?1Q&j7^,q]kiI)M"N]0qdeC!]P[LEAQ$N'GK]R@C@%q5gtO?Z32L0[GluUY3L*<f#fEjHXnEqM-OXR=3;kd_Gkp:PqonGl_Bl&
%SinBI>898qj;ist4lP:0/lW`l;2#SF(KKXllAS`em\"7*m#)JG7?p<A[[`UUce>NI("RP.\uJAQ'u]Ap?1u6npnA,?=4nN-]4eG"
%A7Ne?#cS&n!>@:\X&37r'3%kYIO>bnp637B&QLuHXj0R1NC+/rW#A1bLKH@p.=@h%c;eNc<VQpI3B.Dl_RIiBGY?:K\gFp[q-*NC
%!1TEJAWq"<)CqYAmRFY+a%#6Bqol7^@_"6/OYiIG<@H9l(O(Vu9Z:sEiQ\\LinR$/0[#sib)33<-TjBsl_T/@Nm'W,.P-+k=DXAc
%OO_3ZTjE+:JO/!.,t:PZ<7\(K;*n70(SM-W[dML00'pF8#JDBY0m+5`.W^M'lLp5LJ5B47*KUjl.,?U3F"6i#=QnJ?13ffgnp+(d
%\Js3sjG5P.%ee8"A*@t%^-N2hJsW7<AC,Q`6-^W]LRFNXR>528ZGs>PY#==gAn9.1(rM)nfqF*erLa:'>o.BN.qZR$@Q7TYXBZPB
%JU5ON8a2ldn\e>o^7cF]3UbU2J)YiboH`H<L;qM`9Z>tng9N_HIa5Kfh1?>Pl-Khs!@WK^L^-t'=u)rQ&OBJ0F<Mjlq?1)k>CsSq
%MfI\?9'%D"=Uo_7AIKSe.\V-l#?>%RaUVkk74k3YCkh2DQ*eLEKc*f*=#4G^?T]`L(>Fa:(scrL;*@`&o3.DYGsM))Lm>G4$ZZtC
%o#qE+#>r`TgX>>d7JEq6SHJba8N")Bo=#*I[WmR>"+-f#*1.D&=u=ta>A%luP6isb'4?Pu;`3#ITM#"h#UO.L1uijq;_R._</q/_
%QTGeEKaQairN-ZlJjqH:s,6pk.`EeAriD%Z<,/`n$3pZ1HV'0VCml@-E3C79eu4<FbkA*=nrO6<Y2V_&iYYe+BRn$W&Y!$hAR8Eg
%A7j9MfcA,Kd[:2flpS#?.Z1D)^jKD(dW]e'W]CABhjWdQB*4$f3h\0Coo]S'<L9*]_6Pn'KGnkbE,p[RE%t@:=:bq0^0VEh`8[=I
%O9YtmR\Q.52*h3RDWYlf"#::p1=*c`YWA0W_m*6jPNH9bQs^G/Ip$\p^/ccH!%1nDa_/US-jCrONUVE<=U$0Y4,0I/54AimFK!SS
%$jEj*nXrqtYr&0a%l4bX#2=&<CmYH0CQ?@YRqu<IMF-2=%H:i)XB5'&Z/G!(<i8@0bGuI4`kG8kM`L<9LVR5(kJn(tj&hmiAelBL
%-o?lWV)"JEcDlce"9q*16a7>pQ$5(<o'@tETb]1T_U)g/<bUKKpu-fo_jd;]3?R8J4#'[cR:((iTfF2R#tFO%=MF-mPk)Ee@:GRl
%A[P$Kjafgann\t',cs)*6blUhNE(Uu5klI\+A3hAKj5rnZE,Ld<7K(WlJY5e5Gl:-q+Wt.b>a(e)g235^nu_Lk#;@sGecd^=Mm*]
%b3R3cN8eW'3@6!$&KDrp43A\9"W8[UU]6KOBjih":5[/be;;j<K1#inZscf8.QaWJQpmFR6(5T^c4g9]ZO!boD3%Zb&gJ0":%<Kt
%A1XrR<;r1f9T70GEHf:/(B3C6G1[-:K<R;>k=cHRn$rWhc)r`WU0+k-bE;(Wcr!r9A0!i6RBoLg`hAJO,Hi$-Gg-5l5m)&E.54j,
%IO19/=ei"GbJ5[7\N1SSQ5/5W9pdmY)a+b^o:Y[D)[5r)9($[tUh's1-$rOf"/M^-_g!*!>J=':VmG(D<G`gXG3Rh)!+&25"cS9i
%"!KUoZOof@:lIG@)NCG"m;#UnYE9`25I)<)30UC]-P#4CUS)ZC61*t)of\#fs.\hp1k&r`Dar2%q"s"E")ajc`SU/'aHm64df+JL
%4p`R__)rQ$)s_u$d4Yabl#^Yq+.o,s-!8:,0XO%.i2LtTPuMJ/fN4p57]@2mW%;l:-m'iuRZ<RK,A\Q?nZ!%^-H_5gb(8*AKkVAs
%;CjA+E?Lg)=gTOYAI%5JD!G?REUHg93U]R4H#\^7h)]<L/dCq;Zam/*)S0#!eU'T;],-P"L/Ze%22=hY30`)E`@.,S>m6$pNesmX
%KE>"r6j(1Pn,rSC-.nSmf"cc/^SlqN/@m]`[g)b-818F1E!OWqYm#4+0-t/cXTj7ppqt-%cc]G0D@f0H@F'rrDgdKp-,uq*1ATi%
%B7>07Gcqn\BsQ@$V"naE&+CcIr7hO=+i&"=3[>b?8W/Sf@eE=jO&8DRp/+jY[Kun_k0r#q?aV5p=qG&+KaM$XXgCoM1#Yc_n%>C<
%=Q7*p``nQ3Q=6lI*hq;:T>X'Pjp;pJASY&1=rj*")@iC:M:J<Wn16&CY.d!'\^O^P$YVXR0YHiSTF'mS?U&3kVd04<h]VNo%]@aJ
%]jUZP.2\UD$W&7b;:3FACoLHLX;Fa'XC#Za?:ZC?2=e;ADVZclq)NjWccSR!ohs<3d:-p*;Y\TJ@ZpHp\J*CLWCuOH21l7k2Mq&U
%D<mA]7cX;RQ*C`!LFT-X22:(n32>^jlOE$QIYE*S,]qU!$h!F]DY@J$A3h=om7lEQd#M/qM=R\8P1R=,6_]FNGo%<NSFqH?$\Dg,
%'@B>8HUA9ih9&+;CS1mOeUO+PEJ.Q)VFJt`-f4MA?]_kLp^E.1Nd+::CKeMh_V\C.0?,dY)n$uJ@+I9fRO"Yr'0sOKa\#r%]U,=K
%91d:`X"*6g1mN)kn^KBL8?2;jdO<*bbGmgi_l7+V+WD&8>NF>&(Q,_oFQUE.#5'kZ3o`\5"7u.V4aB0<F&*d-bnI"o7:YsMCQ_Y0
%9,@dhZRUY3R+Fi'N4Sh<>Llt)GKRVt`+UmX&?.@Hb#2$&'!#\\31_P\o"iW-M);0JG;13X5dlmpbY>"j6FDus#V]le;)uKj_+/;D
%IGXDX;lTPpg<eJ?%Cq[JQXULtHN&A;,j?OFhk[%QRESgDc-FlO`gF;giZ!r)'RHtsEV9.dlFK)J9HC74H/e)%MGf_;j$@9OSmLs;
%N*1ILo'g94/>J9^',,7\9"NG=3`i=Q'!SEp\Pcb&cKCkp66S2uQ7R]B_pp>mO@1,.(Rh`3\EA'DWk.4VU#l?HkK5O0!Hf#\&2=pK
%cciLOn8dM-k]NLBD/e?2_<Qnc*A()oZn+S06h-+\pT.NhkurFZZT7_#!/Y1hKA"%UVAN@3&iIgRN-XB*6i#NE1c?(Ek"\thotejs
%hN]cm!]aP'-<F5#/5;$:^=2UOhTm+[e(J9bL/,b[,c.]e!TWDWXV8$$-oW/B:lqnVVfMTg!_`^&7f0ei'_7)!:(5?+Tjh\uc-OS3
%1IE_k/2Z'I,+XdJ;C%Jf>U[!d->AUke[*;=*oa2f\g9.ZSD36Y-saj9=nq*q)rsi/)?\`b4lrP^e'kIQ`/<nSYbmjG3ts*o[(mio
%+:.O+ic6fO#i;/rVhi^`3?$4>DjC1jM(.=NMP4!X3ZUFXJ$%,:Oa+IuqRIdRM^ak5MDLq?8V;OM;!5u.N?KtH,nC^'W^+?UOI@qu
%iiC$C'(jb5"QXm;)+fO-1/j-kX@-8_JLL,j(UQfQMi"Wbe7qb?G6^ui9][]H*.#jtnR%+cGOf8cS>i0A1saYF1)ARaElqYQN7k.>
%aH_8,=_Gs_5?`EXN0!U"a3:rZbQ8hA0+M9`rWB1,;be*thc(Gkp&([%?;o14g3J.r\IU0]LSKoBfV]@ec^-Tf>U]!SSc&q`UWAml
%hmqjYX8!Hdd1-YLQ7@K@eeU1Pk/,&B9^H[=85*;3pWI$*Y>eY6973d<2B4cY0q1NFW`LdJ-_l<MUn^ElMOP*O$?!L%-$,n=mD)O7
%0lO/>@-SrliSN*F\^+h8,_ArpnPi!*n5W@h_[oS$\nbqeVmi4[Vh>H-Rqm&;j$#FW;1R'Q:%sNrP9]Ze4<:&qm?$h&#s2H*V4F)(
%'U]c*\Q71Y$k(`4/qXD7`T`JbfMTSh;ri7I6J(M(cc'3k2in59Bl"uV?7h@I0,MIS^mU.Ji"kmL62S]c7ta/Q*jjK$B"qT2Mkc*c
%B#D5)b(qGj@a9We@n_Gt/2EPcG`JQHjHsh<)Z&G(8JKS:,("Xo"X!ANOpoFec,m8o[if"jNK3e!)'/85j[]6^<UN>uSUAT[nreg_
%gkF=E2`:W/F]ofr=T2T=L"<ZSGqsL0;3Zas*gNVlc3bDA0Mdo.<5I`o),"P<e&%g,N"E.US4fSZ\Z3hX^7@b[rY1hi5+a?"](:cl
%DTi^hKX"%-X2o;2[UD:hVplOu3fP$+m7Oe+1+Mo725Zc&MN&^t2&iBsB#AKrW;%h\*kdbA"#Bk!EaD_6Q^,Y'#Du0ioI&Xpipd53
%^1*k!2jh\0.&_(TG="uPs2,VUER63WY\O)&X?tgeWV%H1Q(]]c_uRp.]K;VTZ8=L6ofNR2%B+kh7&/ccFQ]6B%_:'O*mM_Qh;],>
%]*l,2e5Z_@)GO1+&hI!ES"SYmdF!:NjBsXHVS(*h%R+MLdef5[LJTg#LQ2eI`atNc`?mL4_d9HR&f"(.MjLaKdO%?#-ZZI/hL,!]
%ii+&?`H[lX(u!p$Gh,sKk*@2=#r*6<*o)p^Yf7d9Yt4YK5_9d.%oWi3LTeA#5ihEYBl*bUO%eG?64,OuNt>+<..jbf10lsU8!Au"
%cl?b-#A>.7'_D%:$U6A$-c`<O1^t@$2%(=#D.lHr77.h;bLF]Rip3KKMujFhkf=7\Hia`pr9YDXoo;9-?"WThP8#c1d1Bc_^u*M%
%/6^JH@2jB\b\0AV=iC'/9!*QnP(>#)M?>pKLQe$XgQbb$,YVBN\O8Hg`=t@O]Hn;*8L)][Z'JAZ5+@%jpi`m\SZk@H$CPd3pGZUg
%_R)Je3(8W^VGnJ3`R\C_EG,h+AFan+!r%[JggIJ`qQ"_DBL;ulCg2I6/=dG=I=t.amcE@?:Fc0=X/ZkJg)`OQ`u-i3N8*\*"-)^F
%R)<(*D[(@`<g,!SETF7D3*D!P#Kb$u=.M89gX+e"BdqbnVMd1g'iW[YduR+AHq`=<MIH%0b4ZRPYOEQCZ6c>(@^(T/8stKuWg\O9
%ZD=mEe$,]krfE:3-d[X+U*2"pbo1Qb54\/#'$<_T;G7\+]*YYWU1-G^3Z4.,0l\W8ACUh_i3a=!2Oq;NaI\B$,Yfa-q1'[4I#c.n
%EtXa/,'D2\0Q@5($B2Q-ghnhELm&G+-%RC*%`3LS5r%Q7`8Y$"dJ5EBKUdp66C&#"TZ::oOid`012LV1#NbGu`&%aKVR+F^jTmr)
%4!DRB;+*E';#+?V*iB[Pk_U`1([#/RLI(i3>U+<m.\&YA$5TD4%#PEA@%-j(rZHlO6`AE[oZbFUdjOOE%V_SL9]m.EZm)J6pnMBl
%nT6s5.&RbR-[Unm)2"-*pOcj#U,O]A7$_K#["3XadKWe(k,k)BKiJ$oSACZP=_p7R]7.+e)j)G9dc=B*da@h,'OcW!)t[G:j:GVi
%/IXCJ(qsFAh7so&&>057)?(hPVd5a-pc<or9quF+=h?"u>(qbE>NO,X'\?eIiS/ScU2h#In_0TbSmN./Ka8F0hK1A79Kn6%\Tj+t
%I$.H()+nkQO?A@:k,Nu&<@1`=aE2CT(:%6]43BL]%Z[)T.A'J.c_&24rskZ29>$k:OVC\fAkuQm<lp;8KV-[57%HH3-RkV/SpM01
%935$VT*s2J_qGM?3dF0AM$1ho)S0c_Tg#d+ld[i/ZsN^>WB7?KVFe17dr?j+*\D5L:09+7U1\[Z.eR0u`D'MhG6@iTa4K0fC"co4
%.k=ND*gh^s(B\7^\Itk>q1<_<*CoS<k[`&A$(%QWE,2dbjdnhVA54E;ln[p;]:'rY6'0?Mr*_c"Ue\JjdM\QO8j-u7-c'!Y.1?Oe
%E2a[Ql0gP1?),/=r-+_2DXuZoqRaRn2sKeB:e9g=gqJTtoQQT#mVmjSn%:sWqriPabdm].bi;>@WC!lF>J['Llh[dMH0)Tg3#K56
%iDLeHcf>,+i+p3^bQZ?nY#NX(,?OLBWDLa0b,*EIMV)GeaWBddM!$3j0\lW;;SeWQEi6/PH]P]"p07g:hjYAD@Q5:p@JHE]DIUc5
%_uC\!+3(*ie/f*hpn:rePQ<"V-shkmH_:4[Be0#(kM!f1O6n#LOA!/b^YC1/SB7S5n9&Z^=Nm\_91Ai1B4$-P+((B^k+I-0.:@Gc
%ADUnD[e#;ir[n+L'Q9E5XNqYWS;QJ;gO*/Ir'aEOlsrp.iIr33nE>N@A!dr3^NJc,A91m4`5JHmM3Dg4bfoR>>?X+knPhCQLajRf
%#/i7Y((ZKm1`i:(oGfI_/8%;K3tn=-T>J'c_[q:7<Gh7q:Qu4*>\:^YH7*^;Xf"Jr%Z?&hjnY[hXbA_UH_AT8/JaWlkC$]rTfV'h
%W%<[/>*q;*`mVB9gRsX',s%S5CGPSFnakmH@m(cCFjILbq1<H$F3k<F?o2L_d;#^P@jt$oY`2,)3LrbXVnrju'l<91<8!;2R/3i!
%W2AA"jAZt>/kWR>oQZHgXT=FEMP0Ht\.ci6q%O[+@>7fcV[]Fd@=)Lp;Ej%L@-ge7`BOX-<hkarMt&'+6aObLR3`niPK`t:L:Vbd
%&L&F8qY)Is`4qL6Kj6ZFFJWu9Ug^7?Ojf2TVuFAi[,3H/8Vc;ZU74qA6pm7N\PTP!6]p[kIkhCDBWb=!-IEb^KTa+?K;IQa'WBp%
%Bc*;TAe-E2o>U6)4J.iN\TBGW.=aXT-.Q(U6p4>d@0%!RdFjeL$7opP7$AJ7)n,W+2mj)p=7+]f<?IqEMu!uu=B6Q;";YH'mhX^V
%.9ARsSgd:DPGr#=4I&YVc^)llPSLnacu)M*dRZJ^=c-"ddV*Ne0:hMu@5s7/jB3bF=-_dtVDR;(EmotJml>LX;8.c_-?n%cAmGhI
%\hfgJ!3(#0Xk#:Dg!*IfMiq`7QSX%aK[NT8PK>RXb`HVA!Dm2c=2#aVFiI`>4rqc0<%L":R-)'6=oE]Dr_bHu8Cjoq!ff)XT*G3$
%WD"`]pW-I9\V-6[8<tU@4MVG!Vm<I)KN5*Yk%Z1M,cm7AbbNuZiB.uCmjT&?hPKa%#&0>1]Ws#DFD;^J7f2Z`&D\W%\^ml;bt0U5
%b0<_WPI458/VDJgbY?t90*5^Dj:^u>;F#e\?VM/jZ7ng.^N[b+df8uK[,%t<MFMp)ZQ05@-,<$V9Zr81lt9P2ps6ApCG@pnJ8,d+
%=9eZNA'D%C_0g.35OZU?)"C6,i[YY:c*?[-Kh%HR;0heE@*TcAS+ELZ8nH*ojU.u].s<2X`E(`W^<NTK5i$Q@TRW4ufg-m>5t2E@
%qukuV3`;41QbF]/g]di@jqg"aoH#%n+3u:%"=b&liC9V6]19p,%4NE\B]"%\7I&4J^'X*VmlSU&Z%q$H;-ET8gSJDfFt#!YHGX@c
%`\SAM.]q6GX:9uIdrV.ZY4bRck@<!H(hA$UAWlIL]Td%*d8Yd5='j0bgfR=r;LVgN&%j5efbAC7i'F>a%ei9"oI4^o;kW2dI3cs3
%WEmT+8[\@;KnsohYcWu@"1dqEDNt[.o<A_RY(p+C)9_+h@s*oHAXFjnGFu(O'Y+jqgM0jY<#s1SU$6[YCi*(DAJ3Z_]d\Xj7F5n]
%%U52L-Sm,["Wf='V[UWQ2qait!r-tVPUEcU=>Cn!P,`(#V/\[KX]eMS=PD6(Wo?p4NMg`6eh*&--f^ecWDP'paZHg$bG5l;/+I'P
%6RaNeR#39I]^)i<WhiNdaL[(G%d5QgAi`tqR8b\hMFh['Tg3KVO_[dd:hT!f5dJn0G$EN_r@OO8k$^SKfgI6S5YB)'=g$(s19[EO
%,=\3EpfL&4?k2P>UbmI<c_5g<P4%_j4*FFu<)^8PeZGi&lh>:uNu9c5S^1$+dU`Vc=W,Rag_@4mHgWoZNqXPc--n`CleA$6)oX+^
%gNP$??B;WR?M&oY#Uh/;]bZK>/oc]5jQ8g(2-+5K]bLrSB0(n@Ra-XWC-!6DEOr*C7eLU(67.u6e7r]AU)9n<On5q@_O-Y5HP3KA
%8>]stVlQM%c&kD6%:s<XZ&0DfBpj&NWQD6foVtB7=HG*#3:h7B=sHnk8(P(o-KX6@nPh4VYJZL,-nUXdFJVqOn%bLa^)3<CL&**L
%)).YB-^Y4A>.'#q/rrV7]E&(1Dus,m^W"cPU)L5T;T._e[oGf=!$'JXl,ME_.p#5n+O\oeU2^ues%r5[2NYD>@HtY&(%G$B_o;A#
%>=DEAd\u.1[DhF0:W!r>ALHb6-W1ATD(t8p//0*D\eN0Eq*MbX0D$7::CS10TWhf:+j/pLMK#?=IR[bs/=q^]:DSOaLFDMo[`2m,
%=GJ;1`nkIoK%'OP>diM?,oTltR\WZY]>U@J\](6,6kln@8kZmg]b&Al>04l?N!M9mb(g&4W&j%=-`VaO'A0uuPsq\n0e9t7LK>/^
%K=[QcRBn:,YV`OkXuPP>rHlJ(MJeZ-?<m'V7a3tgjdP?3iJK)VMNg-E.f:o!>mLljp$+R^&R']@=JM(&oFriW3163>R?F[.X&pn`
%(/)C"@`S<@,uJ`A(Yrh)^FqO+JXsN3QYXi3eTmr:lM2F#qU6q@7$L/rL/2biqVH>*7+cH2O8a6gcGB94or=pC\PmX3nWjBA4(BZh
%NO('X\#=%!d9<=9?IZ1Ta)g%A1E0=7oJk&8WGJm6HIo19SGck#\gJRa44A0Rl]r'f*l;$c-GFWfmE]?bM5bkTj*LZM;cK=(euN"=
%?G5c3@10K`X,#uPlIo:(?bA!cbrcE"qt%eU\bsR3q@^2J]gE53B$H3glUp9-eHUCFG:(m#DqI.jU42($DW,!BO@PU#%rHUj&Eg#m
%>p2o_h__?=pEZY6jf6?p@CTs",+fg8(J!Ith0f09.TpsNds.sX/WN`,!j9m`/tK$*0BCYdQ+biRn8&qLfQrM>U&IqrXaulO6l].E
%&7]Bl&dFGd8;6[q1`/D(H_2^TV736)B-Kn%=`B3c54!?n<c_8e!X&CR.K;DL?$;21"ZjHXWD[=j)c*<rBZOZ7!p(MHa,d%Odiels
%23tB`%AbA0fc:_R]T&^^[8-"0;'r9Z@YWFM,K;h/ce!0)nA9tPWQ]fJCDn!PjA(F\8S3"YDiN=['-R#Behr.EL-cl#Lr*R46;u&O
%0-B%J[t.Q%.h&(Z3mbrp'e"L?Y&PF0HDh]47f4$J=T7Y)Bm$luX`rD6/^p)iKPJbhf%#i$9&h>7_>ZTNnucuuFN>nKC%RA(oC:&8
%%hsSXrN&adDK)r+1#`g-/@hI=PpTCc.H%UsT0^%k0._r,SUu<"d[bVp<,`qB'!u5t$d`_K_3-=>b*CnCr4,<nJR\-\?9cZpP(]+\
%ce2P0Y9ceT=*0ma=Uaqm.g6kbJpI!MPfdC6.O\Y1'iWW#Vl)iA:t5Pm":T8MBVoWt70NlJ4]pFB2`jF_S6N4UnIuZCoTR7pVcdE,
%"WZEA*6&\Jed>fkd$?';#A>PhiX!K>M'_@CO\\^K:?"Xmj*fO'e5rU\<cjB;j4Rs*+3kI#6WuF7M6L!WVaGbLH(@<e9[<q2/AW-%
%nJi6@fh_/4WesfpXC`_HBG1=hLY<dMK,'rHVH4k_!]e&HQ4D?Hr"2m1.;o+XNt(O5CE#Vn@ZFg\Y<9>oT$?#Nmb"`qIVd1kD>h<-
%JR/(7RAm<Pfj"1[hn#Xs_dFr8/$(Pq]R@9"G3c:mp[$\=^N/Y]]c6\6b*Naln8<B`h6#o;>$3Hd4YS_'7:*H=gO6>$]=)XG0'k(C
%U%WVOn(K[?CRu?r<udU?rDb)S[d3A6?u!a-`)&"n@BuW.OlTfpT/AI\^"']@:N>TLirf/6]j(=h\WK5YhQc(ik+1YgddW@4mc_Fg
%km^.p\q_WnIH+<H>A#r0era%iF)V*;_:Xn(SbgNH4hb_/al29l[ME>?bsP^H9>`o>OqV-WA?psY#6Gg43.B?q8FK.mgH%&C<?.O7
%mT'D<EW>m/KJN,VY3iFJE0Rd5-l3*VS%LBZ5HTESBQ%q[XTQ91"7,g(HIlaX%`dSN;HL@!5gST2mY(,1T"ulS`4#(3h:h_M!sc9i
%^NnGhL-(.qh9r0k8+s'Ug6P:J:Z>0Lc+>>mVJ+;5g_[HjWHIrR:#3933KWZXjbDXik'Kc",@g$]7/T"3g9tollfpC;*BCj75F^;e
%ft:hp[QZ3_muEWDobAg\rd81YZMEQ^7C8F!g/g68dq3H9XeV>TgmT/JMo5B@qfh])DK,p0enBeui3&41?%[L5Y<BIDMdmH/?2!,b
%Y2*e$k3h!:rCHLNXm5]8j.tA!fkqYR:34lf>`sHXUZSV_3);U@:2Vn8)/1-`Nl]B5[JPaUinN&qhn&XO4SE7D(\a%m8<L&mH2P+a
%3r+'mcJe0WVsQuuha*/2DV9G&D+qW9h$^$*9]!F'kO'qecmpJfY]c&pfY/s5^k)C(8d^6F]n6CklNIV)lk9XEpCL/IFr\@cY"iBE
%XTLp<aA`7/H_kW:D*F'F5<SSRk+RXG4!*/tO)J,Z@#j.?.VH-cfU_sG#7!_R-H8pomX"Btf#b)_!WB1=-g5*tD'n`.I6uV7A3`B1
%IrKYrJYORs;\,7DgEn]3PJ`"dRhV7*SbH@>Tk)I7cQ7EeF%?8HO\L$.%rG"s60Etu1o5V&rShqc<L[?@,lSo,cq`&Mn(MrpG[(4,
%IDo+Gd)otO2\19ZNob3@9o[!73B1.-lJ?r5Z?oK_J,:aOjPbH0BnQVthnOSa#cM7^QH]LlkJ$Mp$I5`&@JN5db'E)JTgWK!`o<Vg
%I3X+8:[D!c`GFEo^$YgXSr#O<rUT["GH2\7h7&q!&tBH<FNAHWHe(u2*1SbpS_Uo(,W>!d.VsXIEl_\/FsiD=GJU0!XG<LOh)IAS
%[C]g`9AFfkhn!#T>.t39\#58ig:hFMmbTPU::2Bf::bWXG'3O$hOiR3*+l*)1b`t%a_KIkP.ks`m]n0Ie*L+HFcX8OP&cGT_])+p
%gP1KB]@G.fpY=Z\ea8>(7[9>XhE>CO"(q#<,D>$oPhV78+e9,9h*8Sei.5>m1Q]m'an6aI3joNe\7;jY@!_R5g];PBIBRchs.P:K
%%8cHeq>k[/a;!a%)i#aughm2%\LGZ)G98hg4(mO!_&^Q,CW!b2G8rsSJ,dm*5'=74cL+(D=DiQeTu6W#m0D(QgBgCA"9J/SI@o7A
%D]^sl1S#H@^.7GXnb0R*D2S5R"D!@M=Y'&fBm7m`#EO1oVX.N[jIZCd^%Y3<SQQR/=26tTOsn]khT4-/Osk5tmjN8<BtC3P@buQG
%*OE#mb^q2BK'(&<+!!<.q5=?k-7cd/lDq*B^!?/pF6u/t]%)-.Nr?=-QheJ\<U':-]4*nQ9D?E5AgItkgn+T`BJ#MFbJ@KKG]4k[
%:[)HKA'j^FloqO&Z/'HKVf40d(A(7"h0E,\[eor;geRELKA5?,MT2jLg*,(ZlCL8qCY=+)N5(rWgie$MLL4+%0T,fPCJ@!Wi5<A[
%r-GK4:]FEs5P?V^m3AsXeE2m7JE4Hpr:=-*6fDZ;MZ"N-k5<T(>8e$E)@pL*fOOIfHhj\0Im04dWP^.=+3uWa8j>$/@IlBhiIh^>
%R49;j(2cmhSdg%##%^_:4!]Uk:O`29[H@>JC6Q?OT&L5sJ"1Dn5?G)1bN1nY_6#^X=qJpsIM'M;hDCIin!fNPp"dVZFruIU;(Nh,
%E4Xj^enRp.L]=BCn!WZeN/ZXLI'MQBiXL8"\1]a.m;<4@(pN3)D5b.scE/,d(4kqAdT=uQ(COZRANE&7L.^TQ"A7B\A'NK^Qo/Hq
%jG%l/G!Ft-=Fah`Gk$KL$3<kJjVWkPV.\>b"W#@JE"#qkh%Fh4b^W<V2b1,\a!QjGp/%RjLV39=9?oG\0`Hao?,g+2;]:_2YVt&5
%I"NgQPCYq9."H^j:"tnD/E<Aue/lE3nVfV5TX:UW"01u[)hD`id<KuR35<_6I<^a(Y;hpSCE#USB/^oRN"Wdi(4)CP%q?^lhuK62
%6FnoIEr"FkH)F:_B05#Dic=2b%':>^.BiEm2lJ40Xnr!6H_hcZkMNepj,.HK>.I'u)lk("o7^,iP:Q+2GK&:ba57NY,TPO.gcl(Q
%-Cn=Ik1n:OUhNk:F/_m@O\t^6fLfH(.Yc.!jVE_db=TD<2E8:Lmd0k:SI6q)F\50]ec"LtD>+WYdP'H?8qmb`=CPj&qn-.Vh:I.h
%cO9?DX4;u`ZesJ8p7tMcZ9liF[e&V$f4l[=CH>GLPO3p=K>gZ<Y;P89NjUoj3,Xj`$i4N7k9ZdZ$OK#?r=jF9Y"LaX*10@hV4V=T
%\n*_QE#QpHd$#^,L1=GB['W?P(3@&.DEhq>;>\R]S&QM>-9_k2oO>F+<eMDeXKt0u%P<U,)0URC*U(VO1[N2@@2cJslfq5J=bc?C
%O'j1S(9c0J_o,9AX:]?^_5fb=EghT5p)D.L^:Nu5IIMd3!.O1EWLeOH:.kD]4&)sS3^B=oW<Cmr%%\DqP-tXh;>sO$Y<9gkB:<N&
%?Zc"BNK6I4aQmdSI7d<)/),"?c9LT?Al[Q+GM<J/&_E=_d*hGs0a$=Irp`cSS,5PB1p3%-KT:YJF9d?h+cZ!%#aZV@Q+-)S$;C9`
%cJ8Af4=iG+;uYFoJ/*=u,$F!3Jl^PVXQ;fOO5#^HFQZ\r^HO[5VJVZX67LO<C=da3+16QEP?t3>^'FC!6N>INAn;bd=.9!8g+FHg
%'L>b,WV4r81!r9K;**Jo5!rDlJaq[rT4gLcB8KPO#*0!olSnjskj<T*;+G4<`]Nmi,F\sA06XsGVp``Uq'L5+9<<0'X7p:4=1B'[
%ZK::ss0pc797o"?B2<kof,e2QQLr^uSFtf.qTHX?i+XcIm*9N.lfhAL!bQQb`b"l>4-fm2FB#I$V;cF'<jB1.Gl(c/5Kk[\]Od,\
%62^!I#gXOHH,tKbbZ2-al$Io-M:XXZ0:YJGY0NR0)bdqs;Hbh$?i]ko8(!u:dJ!D4dB?9XNennOJgLEf;L.`E/(L!A'ker%^P\r(
%=<`?\`R4']*$i!b]cu@4m!R53/%cYTX/FYVrR+H1IqG1[^9dQ=IK%[$[X:Zl='q7P\S@tO^a*.P-bb^UIMaE+cI_0CcM^Y]J5c'4
%;#\Z\n^-[n\#jhTm%%lM[Cb23'hnQ`?Ml>2-=[?T@_Gp3#@(>59E6@1/X/9![E$/G]3C%6NdX@!'16*8?3-mGIuiEK>;7g7>n_"E
%g?nn"*U"iN3XkM@W]aq'l]RL^%tObQCJ5NuL)1Y`CR_4peZH%LX0R+rOY7`P%phn!-^f9*dC<&s+8g33WMl1Jf_%RRl`-4>FlS9a
%!q*^Elo04WA81SDI^cQ2Y.anKb+IBs[^r#'-#Mb`cm)j1++Ot"r9DJb1&@-i_r@=%Ls_u(/X'4ASDk"PlM%H/><H-VpT9Q^YfX(g
%V;J9pQ0"3WcLkR*c>?iEedNS=+^`>W^AIB\A`\@9E#2KbCoL`X2q*4KF1ir6<c<GiY5MZ-Xj;7V/?S>'XmUOB>4(+B50/_oZM1s,
%?H`=Qo"Z,S^2tpXD]3!]oJ4/TO)^O1**VQW,hW?W1$rIL\S+t9.VG4llnF#]h=q_`c$r6fai-K"G%c!;jT3$W^D)+efmu7SH$0m5
%^&HVgX+%Kf>7!3rnTH'^I6#%AWP$P1b(/-mId)2hlc5p0!^jNsT;Q%EpSGZlp/buj`RXN4:o-PN2J0C`/9Z+2!-rUiUu"6&<F>Cq
%6LD)-?h2!2s4:O?!P*^70)`HMV`gtfWCnhOqq(Q&@8`S!%QrEH=%#7Ud6C72PEMpf]qX5*=+**qm4_"<bdFuXYEWp2l]pJq;(U+"
%>j&RgHJtX[s&(<EYBY5roh]"rp)UI?::r8f>WM8c(:E&fS\m$qG>JrNC"b)!@\)8+\%oCt<m&oSG<]l&ceYeKL>$;Vs"7C-ei&B#
%-u1u)OW8sjR7[E_.\=>D$lj\6.,0Ahroc@D>hT#KiD@hYT+beMr`q6&?P^3OQ%W36$Y5k;prf:`\'FAh/7F.A,<<Tfg`KLb\%Q**
%qcL&(V%s`BJ;`g0o%3kq4H(qKo,th40$>?;HkNrZPQ4/>UelJ"l#%8<\X+sr'VN7DhT^HN7.a6Z]5ZN.^AN*E4p\3Ae9ju!B^MO#
%H.`mealc,^0/un7lAHJYhE`=no#FtBZ'"7#MP2QE"o`UefsA7qXXCTLcdB887NQPHmB#,Q$ttamnjkL+9PJNkoc/(J`kTqc?P^K!
%hY]WsDrZ&X8W0Ga"M^N9N\7hJp/9R'Q"hU*[r[@aK(k9lYL#+Elb\M'q4&"K?#>#AT&f@$i`[t,nc@<^8Nkq4_]H/q8jB/=+b#KI
%!8McT:BqY25$q*;rkuLQlt/YIV'eq+8<b#%m9.ZMN.#Yn+QAUR&gqMH%frZt\_9uo.p%YRh`l0tZTkD:.1DI[mG]M4D+V@\XD_H`
%4(&.GkGJ0V()K7U7oR9\,:aVc;:>X.(Ip!dQaO!(;W`X?/r8=qMCX1*rn_u2pgKlp=F9,[Q9i!%kg;k\$9bDU4s)g&'O&:S)-8lq
%C%\O[rP)uD;u.>5o=!`gNNuN1RTFCBP)4DK2=1Zc0onRss-cG016bjI^6Y)#E;uiGFWleMYR;G_Q4LhKVGR?V=0G]OCRP._%"Rh*
%(\f>i\$6<O\?cGcfCP[\T,6*ClR?$?GMnk=fl7;R!=Ad+p1jFe'.=g59AO3@[%R*Q@B,=\a^3qS@:bE4>/r%#rU."b!0:)CMsA8s
%Xe[h#IEXgpeW\auZO23YeQ_FB/:G@'O&F8c2u^N"4`^.nj5uY[P4nYYHEomd&Au1DHt\,7pVX,I$p%ZIlhsCV$]ZkkmgC:uo#L(:
%TPYe&lbupjn`.I-1LY2Q&W4=1RCr_0ZibIPJ^+GG8(MiVa4V['bA6t/K:93G_>IS1bN7na*E3<TS/_aOd<DK>I69_EB;[\$R^uX9
%<o1!NkL%;<0<Q=Q\(.%?[<X9'n,7(1rif?PY25\#d]":"3$J1n"?cYr\0]5OQf$6=VgRoA_@II%?Ku"6]!Esii\-OGh,;37&CVI=
%@+C*&.3!d$mHRX3iM2ij00Bie6>qq0ZRg92E@Ql<?+Y,3%PH:qqh6Yk>j:85/U+DrEP;+h^LCMj\-%G;V9F7bp8e6o5+\"aX09sr
%#AdASk\s/"QhHbaC,n_2#I]qiB0Hlj?.DuIT:\kZp21rWp%t5p)s5$h,KIA""fo=5rhqILX'InP?CK8V=-#QGrar<>3f8-@'GacZ
%aP0L@AN%!Q4OSH]aM-qih;^p/1o9cXIiHYPaW6b^VP-[[^8O3Ms(5ZVITFtIgWZqecm3"ioB":<J'tjV:/6qfh.)L'YX?lhosOWp
%cNABQaDT('S's//H'mqC19;q&#L<RHg!d%WFpAnHX"PmW_r`;7N(`@CIKQ-BGlUN&go]H2bEi7MgokMj+Ku;@TO(r*le9@TJ)ADo
%>PYM1W0F.8\"]lM^X#)MCW56084>JfQZ@R@ITu"'+'pVMP<3?X[(hega*ab.5bR5QB0;Y!='j&*AWNde?=5@dTA,o?^%ut&"Ej6:
%>i"'<m>9"m<4Cm@SBH>0#cc$bI2OH;g!KH%C9kBrQZc51H,%V<Q;4Hn[)3^^.JAJHCYCp,:g[?>RB==GW7Z:j;Hp:?>Pp:6/_dnN
%Q5rpHE@iR.UjE#hdO&E')3J\rV[!o0r'39gDt53ZqomX.#uAN+ol]5>O`f6VhJL/R5"T=^plp!*<:'ZcWaE5n?VI-ZS6*kKM\9<4
%s"^2*?LDjGL#)&Z)4C!&e<>>HRCl+(@5`m$=SVgpOSq&#i+;EcrmC0i[G?cOH?]0,;!%-G"@;>>Ir/<,Hg[uQ.\2u`=bdonRMkK[
%D[iVL@uZGFYk%PIgH5UMFgu>8^&ZW1@0:*TZ\og-7*p;sUF]nfMf1LaAja:61Ri$<"g3s?9kQBAqBGnOUp\)Td<]bFrLsOtioWhI
%%*D$!UUb?Nrd\%cL@l85h3%7D:Pm?hGcn7Z>UH&qpfNHPp+H#_p?`kAh7r%2RDI$c4M,r;NT[fWJLdh2P;KolYG;Z)T$RS--<EF-
%c2IMKZ)id#BZOPIj:nA24SpSZ]^(+7mrgY@+%t;E^$Xg>hV6DGCZkNic_Y_alX!q=e]W:9UqU/=J)WcipbkSR(Cr<e>l"`GCFMQm
%AIHY=/KOK,iKm`]DKiL4!eY$m\:ShHQ=8+@Vq(ZW_X5[i')0&We&_E0\(r"+[g%7rK?Qe0^-i@,IAq\V$U(U8Bn*2`i6TCjI,1s:
%#JKQ^ls:e1&)'iujcut&]^MN&MgN7+4i?!+.doHYK`@]\nRT,L-bB;0*q7e0]WdR-#E:%r@=-2Nh-@agnltZn\R"eK<,,cZFY:An
%46V&4V=Gh/EI3.Mjm"5VeUTrNKuCfIW>%2Q4kEfG4LGEjlsX\^7]s86opEM`"n;3"#K7PP@!UE-??*CQq+QDB/j@kAHeJZt^(IgJ
%E7i\0Y3)p`p"`4E#*jXl;!E/**]U+r\Neu\#IQ:S%K"Mc>tUUj$sJ-Y^6]S^GL0[\7LU1LC`)F"Wr:tmf!Y<aDcpB2q@2.!h"-A%
%EqAS9Dc!U>f/Dt]@2/I=/fpX-=8`Ahg,E:8VKq77deS3-2=XO'48'U-oU&*kYSXNEARW.@>20WjBtVg[r>mNLT]mWh*nJf0C,*F1
%K;u\kKQjOM[G3YED<I(R6'^K]`u,i'G5Y9^Xn35>Q>3__TWpn'^A6AZ58F_D)V."6DS#]d3,%jBY#5TDoI@g#YB5(I.,pU!^cT*m
%S``bVmlTb'h<&J1TR5;8mBYWBaSD[5$?rq+U%nb7^9oXSIXQUpHj;fMnNVSWSi-q8:Du'Un6,&eGP3O&!sSq2iV*2`j?)T$k%!+`
%:uKGTm+ksHIi\UJCPf[V5=tY]]!q_9n"RCZp)We&T0K,;;8*IBp$3a,F:g?CS\$u]>OrqY^R4:mQ$O1kTJ-]^>Pjac!W];Zj6(bo
%M[fM:hf$#]B!XQY^QNQ_bRi1g0<D$g#@D[?Q-W+h?11tcdmo>9THAL6,AEIRh*%/1'p^eA:%t=VDXV^p!=-m&K>K#@K4sB,f*b1k
%"tfP'=Q/q6c3E)uf!6@d`_6"'AN&aHIq@@<hUH7O[CO!\UVO*f@9.WqkLA)258^akn.+0#hb4o?XrQT[.rClD'2**tW47-,C=/\6
%$<ui5<DS(V3l"%l+TUuq9Citu_^u^)(uiisdc:(9-6]Pf5:pZdPM<";lYYPBK894?[rn8+#<Z;Ki#d7M$[tl[#O9ueU:aUIVg]V_
%q$9GpI?DS56YJ:5I/>ciRGZK^Y.B,3PW_gEh/E#a)S^9kqL?2d'JJmHf+%nSp/cIH[MO8h:Ns=<=JD)&aFf$+b2$BDA$?sBFQ_an
%76Sii51T?ZGuKk5=W`I-\4(0dF2Le=V:14#g.&m,[H(J%mC_Y.C-eq$:7j1_hoc%-2m[]CjlG.ql#*L"D`oDDr1EBtJOKLh];oKE
%SmVEm&pjd*i9AkN:[Y!Qq*/`R*%4;f3VKQc`,=")(3U4<IJ7qtnjB%I%i<+ZHrd?S6*,.4=7=K(GPo4)iGRM<s)$$X5#%Oh5+a.]
%H^fI[]A<.l_cT5V*[[*G4?raK-u\JpQG;qR&/4>pm_4#;S)p[=e%tuT17_O*hp^$tLr&]U<4[=GI^$TDpe@mh=r-!K0rYVq\bM5q
%(Y?;aW*#pbl=ZPG/W4k"B9eNjX%2@Cm+EuCQ$:P%n`3\bJA2T@riF%Z4*R>*?$rU;Z]4XM&"(U]OiF^t0'm?1b@$?:%*7'E/*UWi
%)KqhD^AaK$%<&rbk"`b;2nS[uo(kYt0Qn4]S0E2f5t(F]rPa_da2OeaVqcJ+/#BD$#G&'UjoPeLl=FNk?r1D8XJBcSp!+W02/)kF
%AC*eN%N`")kdITZ7FWq0_eRGM5@neN%bg]""8KkrW-#]9"Rfdha>)oMdaE_1.#bJ.H+IW!lN&\KgTUXM;S*r3k^<>oF&Ac\=L&qK
%"pJcTCkp'(5p$.57;]]RXTYel1m5X?;'.e-!cQX2P\Cs%[;IRAG*UC*Y!KtS04Q7##1&FggL'/hBKtG"q>Ku2K_N'1]8qVt>Mtr%
%DL'"1koj@-3h+?XIsQGMqr(T[otN'[N!WXFiZ%bb!9;n:n]rQFE8Qb,#jk55HfV!#K-aodb?#Egh@E+fR-;]=`c(GD`9T:4iQHqa
%:,RV&/6E"M4`2migB7&XWq,EF^H0(SZ7]t%\$8e'3;rs(B^=E-,KoWJ_:"tOMQpF0G6(Y)[cKo/r]t(6)&gN=p8o'/1E2BadfD]=
%g`*-rnE1EVY!JMr,C0Zf<6m?Lc]g,HQ_f0K4INs=HEg<N?3XUb?"sg<".fEOEn]S<leCKE>(5dq-(HK]Ii^5BIQM63T=B/iZ9)h$
%ZF^=TTKRrf2S;ADm:n[%=OHlHEVp8])%0OiSXfp/HRoa0h/-P>0?\h];)-86.gof]bkg9?(71M#5s>SB"WNZ'LAYO7*:C'c@!PjM
%48\DV4gjUuYB'\Zokr=o98qYcgDXG2kr#Op7"51pG*WQ5ba0epfDGNnq%.Jji]gL>Vs=o\C1op%pp'3oW_<RK9jCrZBoTMW[hN>F
%^0VWcl6Sne5ni3<%sOrFWgl'2>#f<o<oqXsgY#rCQl**$!"Mr>Y3'"0`ZdbdSo^HTr/t8cGAbl!j8+hP@el38"3eq\nhNa.+'M4!
%F^T5:UpE\m.RIeI>liKdI'=QN_N1&O*p-[V&a1j;l3NRqlIJT/H^V!Ifg!J_n_a=ZnL2][D's'jmL@-b$nb(GC`hGWR,@!-EMN$s
%P?IY(M_KG<048D/T2<8O\,@,2/LPtR0^k=;@S%L\g95-^i9Sj0.'JWQH5``C*A#eE"Oecid=mM)%6%@2Lo"3_bl.Le#30Jprj33"
%IuK-b$eJGs3@0);6rr%5M4i;5G]*sU;^r]m+*@ej<B>cSh@5c<FahpPN`,,9mpFZZ<MG>ldrb;rr(mcn="FrkZVbm9fXtHDEU#(\
%kJpr*XjtYr@W"q&-,eRYD/>"GSh1&lK^qPnk;HS781M%;[:=QdjQFSZoJ]6Y%YmC:cc9@]Ja.A/0r/4SlO$@i,sDK4^,C\76$[)O
%6^^AaLNg&=RBU+0=Q8@.-b@t9YVC7C+m:`m^7r1N!nO[)Z^F,k]gUMM0GDbg&Z.#Q:1Y+ULb]RGDE/;&7ZkAE&^s2R9e,6TU<49r
%dWdZ1$Y`csDH@=ir<`_MO.<B?A@4YY!!,iS/Jd;^N57[Z#R&AUq7nGu&n=XN63^iVR"OK!D9[k;_"sA:[J'dT\%-Oh)\2F@7%dWY
%[]U1`TAe5ZS\@;HVYA*V#-!+qgCAE0P:tn/c6/VSh\Pt'caP.WB5I2M<r5`c4OEFLX<[q749_s<SLifM^$d+`jCA,]+G/-,g:O`Z
%-1E#f8XM.3o]4!\f`^U/6QQk[E<R?dUQ"1<0%k)GIlB*-HoD@ROc8S^5gJgOKXQZSnc\B?YALGkq`'W_!dkVX(UQ>]i1l3inC!/O
%qO\H:0S*W*;=;$UpcsH/2#s0iIuPD2*IQ>tBlL-$R$u70<TtO-.d[Fh/.k192YLP?E+))cRJIbi:N0!BPU>aKh=$A.%#Sq,Cb.l^
%E]=/Ig%!5#$1N?(0MYDH]Q-ng[((CM&0i_S(67W@*hbM)"8L]';*=De1;JQ)$Y.)K/jPalkc!O`+\S/5qNtL:V32^?rW]Ihi]*Ir
%_Pth?ahWS-N_c>>4m,u>:fK9O^C!%Q7%kEOA)[Dfm#7F"-Wc+b'aI[s.d^DqaE,;p7ebq$,fc:3*>>?"VPZWDjkB%,GOX[5qAE6"
%3UD0dcJ0#KZ%T&+5"H=^ptKR^@b[t.$ku5*#b6_l00%RVTMKNc01^r03/Io5L\_l]pt=gG7n`_u[+"P*>*+LBBO_5dXnUe6<_(d"
%I6%`GqO"QD@K[!!';jBG.UXG?cVI42Vg]V&!nojZT;25X/uYE,3SN_6@^";-?!ultaE@?W"[\h`TP&&t[:-+@TEX]i*W#(MQ>UD1
%AJkp7NO1(4.r+'2KUf*$ZTpl=H:T*cduVUS*r,9X/9Ec[P9nonA@([\nZs=M`;Hq=-cH]s.k$d*OGUtc7eYkBjMmVtXMsO=l4aNh
%`E*]mNkCG>-^\oU<H?u1!d-Duf.Tf5Ae!XO>8j)/UYi\Fo_OVZiH,hW>$*!PR)R*891.noMYS);$XK&%F>gPQV#ia.=e&fUSWE6f
%ns*\Ie*-P/?49srPnZ=Se*m6]M'YXb'1j*sb<hE14O!7$dmuie/B!eBX=Xg>;20*u-;$eFeR/*n6<dJOPGdb(N'LYKk&hVVNk.JI
%<#P,/5\"5U7N"M@20gs!(5X.YBY)QU4Na]9b%ofEqbBQ88]E=a/Y\/ZF<22\"]@Vc=RY94JBO!KVt3[*T1BhH7j'IG@*8A&V08M6
%$M5\N$DtjMLliI7\JB#M\%T?E6(j0%>[WDT00[6iUSoV[($Or/.p<qi104#O46oC+ifOe1i_L&A%(LKo1"hh@)qnGFMG_J36BZ6f
%,;;L8^kkC28VO^/6\rpKaG(5H<Z"q:\BfEFZWWG]MeYBOp?rhKj'(uCQFlahTSk+.;aV@-5rVat1gpi!GQf+o?Dk!#$a632Wtl\Q
%L+^,jc<W"]Gn"&("^NZ6Ri8m-$9Ctji5@Kpi>$I>`,'2P_COgu]42%&FQ)[cp!X=ake\k6S6i^>C?WMbkR;lUF:Fd&+Z['Lm+5%f
%%aBP8Xop6RNA[OT:[c+tG?!SqqI;9I]p(nMRkh>D?ti"uBQR$'_!,6s>k6eihna>PVV,'j)Qp@`c880a/-a9cE2'jPHm,%lVd:HZ
%YbN`3U9?47MI%J/h%S!M4f.)Z`t<3nO=:X*bb96&bbDX]]V_6BOL6UJB=n;G0iC:Sm0=qigt$'NRq'"(^;WM#/M0qE\$Bm@(;%+5
%+ieX4Rdt#nO0MjSjr_&%>"Krsh'P_tEH=bQ9LrF?Bpt&=6a/)ARXrTZ0fsHS@"HSVaLEHQFcT8b;u%8mB0FIGKVpdhN8Brf%)\bJ
%XToW0N`BP'eF>sZ@2s.=fDT13%XlRe3ZIHGbRD*TSf<+^7$C'@5.'7bI_R?db#J_K"2!iRWtXOCnV&&j&38))\Ik3;m**#GSQ<S'
%BcI#qm=6X>W7$,9GaMHL%_T2lOnMRX*0gUj7NLB1pfR\,A(:%.XGI6Q#$28;dFn'aCbX%mHj!NJN742!W_<`(lOd1@=:biMKoE]9
%K;2-J0t61Em1(F'of%1<I;D4PijCePJC,M45egl$]ptEn"Ee*.SA<:f';#.YK"CiKM\1o8=;57`aL)srliimk+dCaQ9@J!6AQZZI
%Mpm![G<,^Nc;M1ZGA&rk&o@.BQai$PrR'O/%P`:V4qJQkEtq/q,></mL*(\MSTYW?NtcJXO*S2U\M^B:Q;n?If(tc!52YD1/VZqF
%;c=#TORqEl_%fgr4"ud\##e,^F^%uE=<O.1af+8=?=pYd)_n=jE:QJ<gaHQ2fL8,X;3c-P;tAE_@l`:G*gr*=M;q14YR8!U*gomY
%kn/g46Dig,]47&^mWgoMihHE6_PW7NOkJf>k,6O'gQ(3SZ!r)*b_E=gBfn0=]*A&$jlr,oj,pUQ4//(8NUN#"W>="`-;>%pmaY-&
%l=BR5j*5)9r;6hf;47+iMfSkV8gI)jQ6fBJ$l,57*6(M3NCQj3Is5L%PIPn\LM%#`0f576DJ(k&*Enn=k,_c27k$f0:Q:tlkE.F4
%4-<DW7[B*\;AXqb"\7+nNL.iPS"[D,2.&#LY0N>QTp<Ki'1Ae/0)1MX-RM>VeUru#Q9AsJd$>*daC9`R2Y)7A=k(?oI$bL`nYtj5
%-96Qlr-C@&/Eg6g_bHDi)S$o,7#'uclJ[OH:JHD8lJ[+d":`QcS?s?_Rgiq`s(5IGkB$&>FuJB,\-Su8bb-$&YqOCe^)>P?q:K-W
%@3,!99H'Sqe;Mkp&L_ZpV!!Qm@,3S1:?56;`+66)3dl9:180MlO<aW/b7'CNiPg7.`q)0rZ1Kd9BA+8Nf#t;Rm%4AER\QF.3lJ],
%c3EZkP+bZ"%nkQl$ZTE>B\i[J?g0G3MQ$K&CjLflm-H9jEWZ#<+;rb5gCog8HF.[BpC,-IrB$4lJ>o.p15ohY;En*ATsGOX_C<gB
%@Jp>Le,<(k$;@eGcJ1q6N&hEmg""G`NOZiaa+o@c,?<t^KA'1bH9R7EeEaje<hr&pZ`?X1rgQ2B.R,T4U*B8=IQ&5&9=0RUk[uBZ
%l)5DVFK3F'g%_"3j&VsF56fhU')Ukb&V5TTRQ>>L&Y>r-P0=DuiC\./7RZi`)5V;'Hu,[VE2\hWbij%"Ee73!TRE5F.GRmObK-N!
%j4T]]QGRpm+M-n<D^gD3od\1u'J@Hk(=>;Jq&+l$Gg9(p.=?.`8;qa:]N61jcg3nS/3#HAa5LYA7W9J[BC:DnVpJinHgX7U`sXN0
%X3iHn6DP/f8Ep7Wq$m/`S>/J#DY?aQ,"=Q)UH[\Rn,OK7>N:+Yfi7q6JkYk!l:%g]f$BA7KmpR4le\f%Xe/QWpfO7g/o7.](HU<k
%e]l>.anj>37Y+p+"*emt;ST`E57>XRo2VY$P.T?):HnA!G2(bnem`((%Rh#T@^h&tp4:\@jXL;c=K,Q5K8SJ;!isW9Aj/DS_N(eL
%%N&/J^e;(RJr8AR<7q/,_1UaTg4=n9V:r+=[9+l%Kq5-om6DF,K=X^kkc/?4:iETX?BFt#<+./N67V?alOWg,6+Qj;g-NNIGTU9Z
%g8j'8't`:U'tZ%I_1OWU(G>)@L[O1qm(`!Q":]Yk!oCV]ZbSkXO4A!CFn`$kZ3(KY?cHr+KCjI5dX,GWo=W--`VV!+T3`s8kV9F7
%e$%H!'@ts)]S`Y),q]!"e4NigrQ(4-Y6GABPuI)f;)L[l;mauEFjI$*dp2_aI['V(c;jA0.O4LU_t,uAlZHeuanaa;kBj)K<&_pY
%l%L';oPEN%aIcRd)f^nJ40L=0'"JU"I5AH&/oZ4>pV')?eK$ZHO`q7&#'tr%9%R3\PM[GuSR-b7SX8f3mMbHo\l#'UnZ1n$otu_i
%:$6-ofNu'j<R^codeTLG1JK`tT23gh?Dag$o;e^WU@N;_0Qpj$rP[6.*jBm)X(PC+kL\8*'10^AVXlV/UIjNdWp0H@1_*YKUM;j)
%=uoGiA-US+IXW(,\T"mRVXsDegXb\lWp)jq\XHqaSR3t>4*Pc^eT?7,Rs':YeT?7=nt'Y[I5RSQrl#L[WM^?.l2:8$2]HM3C:`B,
%7c#RV5rDrPDE5mT]!3A9EAcQk=*U%bXK1S4L"LY#FFG!+D!'8iIN;7%oh;FNr&TYY]UI,!=u)<E5BSiS?S.i#HC_d=cEDW>*>t86
%24\)+SQEO!?VT\g%+f?!1T@70:rtSIC-'+5\rcgaVDeK6PLV9'J=cYW?:Uq2&IZP:*k;+&bn7)I.5JA?g;U/^X7MfYW-0oCI<D7;
%75Qu`]\1kiU_]3-FlHU;'T6WQ;!m!@$@u4'rf\7l;C#9Sl#sVBnB#T@/A#CB69T`r3;FjuKQ3@j3-CrnYH*i0D%ZT^T"h_i,/GYO
%3lEi;OCH$Bh2P:<k'tXHqja^#.f&kkg]_bdm0N7@W7TgY<,tDXUl0=AG^+\CEREU,$ecX%`25,(M/^^F'T_8Ipt,HIH5&_!\r0Ie
%<!g2!cE)F5-P&2`.jT#(Aet<q4OuMo^H46PZe/MCN)]^93^j@@[Z[/lQKNH>;D&13Vj0@/?V!-8\q8?)\RX^lc_-A;AF=@e;MAOu
%&kCHO1^&amP9dH)5hDVEPG?p;76GV#T'5)_:sf$1%2BTG_/cs]";`P4Z_<ClQOS("bA=-;LtPTL&t9EGU(Z@>b/`#]Tb-K8n2To>
%@_@.:'6u2\M>EX+!iZtHmgc?3*hCtQSW,8'%Cq&G>?GY];='sGkG@ij.38@d5jYT#+%sKZf(lR,+];p^6ZrWbnm<d3!G-r]?:0q_
%eCIOS$u0JI?IC[I&eE3[4rm:=&%O--AiCR$XING4k1NO<+i?:Hhtr4OdEq+"a49ghl9o4YPi@Ooe^Voiopc'W=:H9J4R&lYpEYcE
%&W_]4_@%7$N:<S0Po_Dc=T07BR1bVjVn1j4Y-7K4j)-%r*4k*Hnp@FX\`#//)lGdcAl8A+c-5CK]Fj3<q51fme0P:SEB!TkI#mtB
%W_84idH&q=gl8i42*:0B)L]M/e5OUH8S'6&>t2CpYk8pJYkDN)DF?'m/:(al_u-:5b92BKlDf6%6"00RT3la<\E@,%I`D%mq4/)u
%rQLq=(4.bLS\f6KdJX3?$g13cEL[=Q=N5GH<dhRj7K9roUA(XW%iJ\ImFQC6q\FR`N(XV9:rIJ-p"3Gc.5nJjBrE4,Rb@,]QU&Ie
%@d7JT#?>(uE`5IW@_1lOD"l'=8hBUn>/Xt-.6Pha!2=,BN-QtnOB&+=UR!W';@G)OS4><P;(DARZ6Z/RfFCjh.IUa$)V=Q!>Mr'^
%^1Z$''A<O=_FBn];'Rt-@bs0b-ZgJ@=2)u%8T^>nr/HiTL![F8Vn_ZiYt#uk;UtAPNn+\'@EMOA**2*7;_M4NhrXdU1X"!'Q#:f0
%:TtJFP>/G*92mS1m??gHX]m>6Q':T-3WE'XPRBR?`d.Jt$D,26H[:Eja`A!Il0mf#?ThXFJ]2>)+W&rTXrKDua]/k<=af!7.`.D,
%dYYC,O-:s'$atY&s*F;0k<e+">lCIGkVm0+_<t`Q<f)),]aqa;/uLc8F]XEBHRkMU_ft(QkYA^6CTRfjrllem9@LJ)Heu4;%j4=A
%?<dAKgc=OH_t2Zdk?dCQ=QSG,i:=^1C]AiY:'(Z+C3AC)K*L`<%;ObsVX5C1"9Oskm9MRN#1]0Fb6u"9*n$\kM7c*LBhX&+)+BoC
%6bDgehJ1tq7]s-Zp2ql`O&G7n]k</CQK"Z]RY4leHk,!2'^@%uqFS<'[)i_<?-[MLg!W[Gr37s/HrY=0'nci6P2MM!^Sk(?'bDOP
%)ClOLW3?U(fSi_/Y,[&0<Gimp"AdI^Z8Vq=_UeP=b]6](0=<:*!6ff\jQ'<TUG=Zc4P34"o]&6_8TT6Yj.pLORZ\UIIHVt4d%b5f
%OZtk-'JS084=7apjCo@^I"l]Ph<ZWk1DIK>B-$_XqH%]+@op.IW8Yo2+"Ws%2j72#C:O-sW^aS%bok.29=peP5B^8#_7>Q0kC+T?
%1!d8!d'N)`*NdTG&6c.9N*7kmdV?VDK!VhYpr>Kg,EK^&[qbVF^(nBEW#Xqs(sL,lNo1D-kR=U!k01)D6Bpg%>eo&&54$G<AOd:>
%]2jY]L2\FM1e++Ki71=!/b/!nLi=?;OA+u$^,%'U+ka!lS-++^a0)ZcV7a@OZ0%:*lX,t04+ZQ77*rrE;]<eWR\CMCc0p'R3BcP1
%"d]9c[=>>?MrjW<grtl=/FS_u;fu,s0J@WmD+J:a0E3$t@O(@9M7uMhQ4O^B#r*m5Z_;Eu@I;F=QV"1WUXfEk1V+q^&;sqBR\sm'
%FYU2g3)U2.mf)9IJDU54WW@3PW(p.m(gb*7'Gm7C$IFDTJJRBe*c0159lm'NK@lu"J?CI1W<"!=YmLWS1cO\[Cubo&!/%31p6Bm^
%Q#&WOX7Bq"@pdA;er=JZ?cqQG/aIo<V0m_uL"ThU2u6"P=^9+iCuj^2^a\bN5)LVGG6?\)Qh'rtiF[Qg?g.I01V8l)lMZuH>fu7#
%.c@++dWN@$p.lou"p`rK2iG#9n6Wkt<-$7MCoHUdolDe2>M(bu[]T#3Ic4TVT,Obeme-0ZV<:mo8^E.^hWl!G'ah=da1U"W@#'\F
%rIW=L)Y[RQ8=2O4]meq.+V)iK<;65XW8sD`]md&G/'+8sad4IFoa>-\nGSW9h<Pl+X5?<(JG@pc(VV$l%+Xn:p%.":n5IRZ\CWAr
%j"0+>BK(%-Q6-8G+$$^BWUBEe(N^AQRDHRciOAZCGlh"(]meq;DEo!0f_>7<APLjeqFuA[h[F8VUYkARh@&`M%.<cr&#_HC\u3-k
%0>)8TV]p8XHi<jE.d!j^T%b,0^=R9_%F07"T:)T6EI!F]Y?s7FMg8jbV"a`]`b?t=Jlt[ohs2)ME:'f!SbueY=8g^sXaWY#>YTIH
%?T8&1)qh+%f.WB>:(`\&EN`_oFEicApng$i_gNWaoQ:]D\,)(kc/F/U?[.Ad388VhHi:f=GujN#rOm[=\,+@H.W&.67P$9`Hi:hA
%c/("'8al<[^X[7[qtS0%dKmIGDsG5^XBYX8EZa10><9Xc%E%r:NH5i,LVlf0YMW0]L$Mc(X(#bbHi5Mfo"ERlP,bnZeXSlE)tRCN
%0K&%i&+'7_h!6nW]%>@(qF.H3Sb?ASX-\WdUJH':hfq._554nJ?4:c.lK>`#fbmd.]B1R/<kj_pcGmHI07]*$C;>oFS+'b!n3j/t
%T@/Deq?</)8Y[CD^#j#T)3$,D^=R9c]5l>HrEP:skZ5H4Z3QrpqLrEI/>A>Pe@g=iMP1bVSFU"$n:FbH,#<Ed?hgaP55No"p\KRm
%5tm'%`P$^tFdVF]olKStpK#&UlImu\oa;QG,H<^Jhs2(2*"5OJ6^@Hjn^*FcVTe,<E5m[r>UaYis!TS3mr7Yl'Jki@_\;LpAW%Tn
%-=r7nd@2EufdFP)/h^\XeqG5G-=E-)Bhi<M2hMAd_W&\<e0(>5/FV`?QG.%HeEV1g6E_)!^NL+q\\uXRGeP\pA0o]OgFE+0*gW99
%9i#\6/RH`C\,*(KWI@g6Dr3#pMoXuge\oN&EGN,Vp7$sdPiL?PSbhm3H7aU$n%J+1Ncfd=](pPnbl@+D*5<N.J"AP*]7[H%7Oi\-
%=379;[H2o8?O*"LQf$"uZ/87q-^VGNEA9ZWe+kF<\Abr-e7It1/>Hj$P8f"0?S/)P`-8WDh-;srVf*KcEM:&FR9=s$MHl_,gu.HF
%kB3r4n?^.mp@mFTh=o0!GOW&Ggfr(l4lX?=7]#4d?S/Uem9S\:`2W*_X.?56h;7Jk!`J-/a-Rqeqa0tP)ng^nZ<h:rf1YF*p(`\U
%*l6t/hiU.-5'cDfm;rs4jDOKlfo)Kkm:?.Gb#UR5cS)/^&f1e2g?N<2EH'^c?G8Y\>*nlX>pWtqLVbrTf5bVAkoZQeVV[fGDrp!7
%pD^nY/DrVldB_YPdo44W550n0QZ!Vh$,bY([[1T!+"&$de`BnBos48(dJ,i9\1TYX\&2,;QRcZiq.m`Ikc\h>$bb`R\t7\Oe]uA*
%]it/Fm>c)5KKoL^i0ADLeh!'?R2R;hMQ^hQcBJ#RRAjd3K34k>Ci4+[[G-tSb54PWa)1[U4am7m>iFZe-epSOZWuJJ08Iu)&T]1;
%pbdlQoq)b7o5m/lh3qSt@@>=`Xrp@dq@d7<`4OG)nk+WTJ?Y+6i6PkmrHZlXAEfGj\@-Qf^Iql"E%e]>fA^NqhM$7DY_fZ"B"@AW
%lTs8#TTXPY-*0U>kG(uTfm8a(5'U@_Yo[kE#J%gU2tOjlMH7kY>4W9S7bi#;j*87WRt%_McT**FY&N#o0SG(ka'n5\(Z/1q')j>a
%^)q8,%qZ'%)0uJYjMtUK:[C'b=j1LjcmFT.`>n#C5>Om?^csh?5b<I?maIdt[ni>l4;f\TTj>tYG)njUG^>OLVa=!YH+"l>P%UP/
%WC;d[5*$AbXId!TY&\05:?2Q?a]L(HR_s<M+TnCPK&%O8r[`BV*&HX`$b`sll"X;Q30j20lHOC5fj\M9Fa4rcn@*flPeMTM^!b4%
%q7Vbo6>H#f@4qK5JLX!o\W/`03Q#a.HF_!N[')[\bOd<N&?;A#K$cWh\g=WuC8m!#+W6*!bu$@1rgRj\!/i?*(]`."!t4U2!6XH3
%aT-cKJ_51UX+Q1[I<rl!!I/M12/j/.d@$C(loIQC!fj]`<1>UL3r9#"cK9+fP47XW&"aUm[Bkc<!qk9gD3juC?+<IMA&=jQRaM,P
%r3=3"@Z+par#,A5Zqk8bW*NY[N'HV`2!l9]CQ+7eS??,m\k.E.>T9VNrPNG+5.g_n5Y=d38R1_ZpNIup:&+u?[0:X,W"Au#Jq\Wa
%BlLeGXTBk7mVFY-Yb@XG/,(-^B@%iPbOc/4Z,TBC2en\^5mHOd$q9s2Bi9SSIbjb:fDJS[Vtr/Zfj"30@%F](TTE\d0Dm%HN36:F
%3qIC.QDE#bpYHr0,(7.oEN24P_:n$kL1N`W0".phH$:'C)lImobVhZMUEXdE.NiBHblY7d"/Z2Q>mf<r6,uUZfCi>PLri!PE4CgW
%X,H+<doPlJ-EOKoG6mt^G#&)mZ2r`7gW\TZ][^&U2U$`ehLI3$*eS.j-bBf7\7GTe!I5>pFR6oYH+LgQZHV-CHB<MZ4Lt_"OW*(d
%&!0SL*sQ-tV/@<"cW[`#CoTUs2Ek4s'V`q&4dW1ZoL8Yo%Zdi4Jj0#DDI%J<_HPC-3<s,,huC?Bb\l.>TW>6jBO=C!R^_qP2f"La
%CVm.<OGa%WT;`nI`Ji+\)&YM-R4TC>E*_M*ceE)MTH5hK]?amER!1`%hDD19&U7s\9u'4pDO,?0ek$QQ2Zb4)Af'(ESQu+SZ9#Mb
%FPu`_Ob8$2AAH[2I-,tV&iL,!(O+^*Qf6)<VdjG<>K3.V2s`Q7+a.fF!&5!&*o-Q8XS\M"I';#*3/lA$[,5t4%$#9*_uk@F7_!]4
%]\,aNb:ut;\,GB8Z[H\!<r:T2V44@V\8tI&KkFaRIFn):\8+9@hl]2u5H[3m]<4BU>[8@t%,Ca.^AE'Oq=s.LZfC>PrRQ&gq;LVN
%rB5,qq)@#5f=]jjH0:fd$Ob6E+!4c%H0i$fo(_@9aQ#e:%C*rlf2Ngk##(\+aNYtG'kQ0URP8HkFo"Od=Il99hE&$!LW-H4dMV5u
%ff%:Zd>L?r9?p@"/?R"?R@9G!Kd3?$n+8f*=Z=g*<ha(G:W.']NB\q=A;SRD<QcUG*@dAWI=Qnn-*<$\E6lYeMYUX+D*MtXEuW*"
%rBOKACi.*'k+d7Zn`XS(>;RG/3;\8JGYifZ,!/;4'JhZ6m#ak<Wor_=!5MN@=,g7s:c]Qm*9rJ#EDc0,I5Xh(ll-YZLV@ZuI8J&D
%.p;X;"dALb.q^=e"7q0WBPlkq,oGPA35WKDLE_V2J"H2V$eDGiGlj._p+Nh@:RPhB(@<B+;W1]:6/29^)FZ2mU:/dd9t^PFhN[kc
%q,:ME*8(U`HH&#;])?ITYC0C>o]XmKmXF4<:H?$Wj`GpI;EA)e]s#)J427VUde>aG].T>Od^OtMo5sM?>H]EKKjON!rI&M8aRT>2
%D&uBWa!3A/ZZmrcMZ%jg59nu%6%5>9^WMmP'.oe6+O\u0Gq'rZF5%X#H6#qMdj8ka;!o1?+8cshEUf/Gam9f:mpeOgGb73&!\F04
%UI*QgD2A*F:hBHsSYOO\3?4CqJdr$+2t13WD(n;_W!U0tj+uF1bD:H(GmXtT'n==rc^c?oZgoB1F=U&(8p=Am+/!HRg6:,M5t6+-
%P5$o3%(+)L%d!e3d3"sNhL#/KJ'NPqq<W$RX)>->E:D:>Dpo8U_(ud=XJa>9m:D*Ufrh:R[1Q=P<R8j0R]O[$T>K"6Zd[b!^;%C3
%cXgUeYsN/_k>/jbJ*PibkG7^!orC;QK<NREIr8i#:cbaqD/"$uoCVY0RA5@Mq-J4-SSSK7ieg$.dT*qNVd+Yt)^!=YU\0E-$eO6S
%cjsji.C$FD*`PrOigGde[<QHT;NGJJhVll\90SpbX;o9e],0"BNnY:6gMHc,+WT!NWI/!SjmMG!lFFu!q\[m'l]5FFmqikbo\P>R
%>W^9`IkIS(>5.lVUBBo)q+*V\fN[4\".d;F60XjCTM.IL&ta1#c1kaJ4,@O@Eej2/iVY1.FW5ht`V##?1-EYHj"$J#k'n3;m+H4b
%5:`JrO9h1RYsb7tR\@+*,9?sJeI8:>/X2"NeI88h#pk(H228I>4qoKKkV,#HGr7s'AO1-7?Hl;M`=fN@EKmi)i3U22R\@)4--fk(
%Gsb9nYV:G*HYYBeQ#Hp-[#ZcRQN:XsI[^ND(p"]"^MCiqWF@`@!A&O7!5h7`JVI)4Q<>DEIbLU:j'UiCLHfl%\'_N/-.+)l1jdiX
%f7[GS+F>s?49%A*+D[jAHfAW4O8af^g->C8_XQ1oj#;^&^66gj+YNu,_YZbdcQqIrFsTKs:N:B^[dBC`!F*%BA,?"[J+19'NIkKT
%%5m6T$]KfuehcHQGHurk4lu9!Y5El#0jC10`8g5dJ1;N"5D=CH5\?m-Ku>*AW9]UFWe<c]Cb."GAbap0l>iJSF@\/<eo#'BWTN4Q
%lfFWXYpXD/j&CrWqoPGIWHKuFKp6K9V.Su8o`"fbLV82J?/'cU,k\V0o)1!FnO.Z-8fch<lVC/@?'G_W0)593K=B[j"rX/"7bt9;
%^2[=$m1'_=KCI?+48Yd2JloWhGi/PHfIhJ:(74/CM4;EF;eD-?j4Y^O'*e2uQO,$S\Xu&+@:@Y97m#d07OfhXlD[:Q4nCAV=3fpD
%ihLcc5."\3=iV.]QQ0bV:;;eIP.Pm50/o1!'R"uIo=!MJb#u*^EmXP3LcM.RDhN,2''j"\GO/l14T/Z2OO&25E$dL]2o87K$q<#<
%n/PhC6>&$;Lc=`WgrUIVIebhD3*e!.FoZUYF[?Lh./jd!UEQ'eRD9l7n"'#0K>WAS_T<>rlL?#)iT:obG_>^:o8RA8*X^lukBIYn
%6d#&Hs5WM@0V!0%"EJ$:.?rTb(-Qfp[31V-b_M'GrjH`>c"6Y(P%T!I%KhmfPA)JC9&;Jc?^?p7A2r5OjtAni1<%Zn*?d>"B$'?i
%L(:<C$KH,k]_Q!L@#98DLc`6HG%[5CSqu'P,EV>PF(\\p/'bigN,rWMeW0<qi-b#uZ/bVl1GW;99=$mp1PjpJ9g2a3FCtlonL@Zb
%r4P44F>/0%*)hHX\N8`i*maJ2k2m9TH'ib$3'=oe&@R@K'(N?-Xr(t5]1_S@1B3SCa,N;7(r>mi/Hjh%dI#EA.<UOR5HcZ#,*']m
%-d8f6.OQCUSi?]pUc+.3Q772\rli-S9*\6GXm424r7K;n\($+h]/[],*EgBZRe='+p*c^H>O/`&0)#kl40_36k#-OX*=iqCCR<?P
%kUnmSD>\Mp?gV(F7'@9G)+`Xa/Q=aV):A)o$Bk72_*"e5IRE"R2_"[DRi:WRja[6e09Q2ge%B3IOT'mu.kIqV(?;aWDZf%o4)&:e
%TsE!$fIUG>jfY7)kD\2]o@a;qC-)rMH,BVO,oD;/f'TUl#h@^1L<.T5VAs"Sd2KXFiPGpnRhng$nbH85b?Gr54!#:d$"dHERER)(
%o,_\8<f2IEgZL)U1=BWlOWi21h%XOXXr$bl:ZbXI,dj!\UG?GijGih`EQh#`GKi7)hZ);cJZXdsU:].RZdVng9-g1l\C2PtiX><_
%\b#[sh0Sk":,g2=Z2\h55Q"U5kBK:^'4`![OKl4d(e[mN55thX3Bf=PqMlli$DhXTqYnuos7q--rn#p42I5[X::I6E.s&_RGOY1g
%>S?2:h8/FH/;5.F4.,&6N12?6s"F?bJ,I/'WpjL_`o[$,f,//E#+ki2r36\qr^#"_c:N)hF&t<UgbXD*'[maJ0<;B4-Rj@JPsf'K
%bL*2DGt$643DC+V]Z)ICOtPf!5snG;P1OqRL)))=lO#lN'[n>k6A$n$:5$)%APSHa4JsS?Zm^3f=.r(Do*QkdFpi[F5jtGC3Src7
%]YnTGNo5bm$]6[/7)O#k\KFO];PGWBfVQ5)kW53CUr\Q3F*UP97iS(%Gu]L+Pe8L3g70G3GtCl;7F0.<O"FUCj`k4:'#V1H:4kRJ
%30XF#o$E4Ej`O:Zk#>'-eI)@7!"CLlL=5k0,cUhj=be_WISNn^49q(D_$*,(k#;N8aN6QP8?/EB:fa'_k;a!uWT@fGq*R(pU&oDr
%"WbA]S$%W&D&?I+6j>VFi?XqG/EAYpVN&sQ<1JT:_-7n.Vr[r(!joc/*2hIAn=9\4<+X2I(;NQU"P1EtfFuOq@c,!cnMk`a#;@$b
%-Z]8oQof,LVCT1B4PN%g8$Ii[GqcV*3Z9h(0?QD/_/9AEah4]?=4ek8nejtU!2'BVa=>561^R3d^uf;;cWUb<<&?i[8J)iu'bNpb
%37r%uD2$97_OL[s,O:M-\%m3<qD-AYg(XOca9uf_2-lC.'EFbudQHX(/c\qfGG:\lAPF9MiAk24;a_K'K>#IT'+V+"Lk'OZd#F!?
%$nE*d90qV&*CU(S;$8a0+i_@uo.V@F!;ajJ&Em5Y"Jd/oLV\'6$ER,0#Ts-[63C+8*%[CSo;[>a+HWN)'&43s:^rKdG\%nm-uboc
%=I?c1=:M=jm?XZ;XK$OX[aai5U&kB/2&IHu@:3D@<$t4I==::cD#Ggn#7)WU)LNk4iH*A,#TKoKPtD$>J3;*igLO(-WJsDcKIr\s
%P8&RDkqY.>#4S^JP;U9!).<oZLVBHX!O5qWb['6"Z4%Zq$_dr"?iUQ2Nn9p5HO#QSb/Co6,iUgUKfO,s@sL=Vco'aG,&fM7r2C"4
%?GJ]6-RGb5Ar("!;\9'+,mEG5X"(NuZAq!O(^[-uBS((.1/nFFY-&Tn!c%gWF0r`,!'4Eh).UDbLn^gf@t'#idhCX[*YD-^9[3_<
%!7TG;Gm\&(&dJQYiUd*UOq+Qg!ka'`j0m$"RlZ!'/-\(j6uBRHS-<,qF+CHi!Ubfc1DOG;5Rt^m.PXdZL`(2;7!SePQj4*P&O$l$
%JJI9,*`3>i!>4BA-E=:s]#lF.QXJ91TL2#!m6IJ2H.WW>=L6:oMVS?k8VkIm!jf?9%tO!)>9(P(&U'`*3(CY:Bk3ANY6#8Sm=;[/
%L@YO5nM77G<fN]4KK)h;JAQM<IDGSf8RmFN#s_NCk_hsi!s8Vd_FJ\BP/uZV5WG:,_3+hBYh@+T@<TF_?ja,L0Mr8sTE+:Ujodq*
%73ZH#*!C?i!BH^0Gco/6+E<N-_em2m?j/#R8cVOq'1[VL_B[td0KoOOhA'uRn<jWf7YV@6%^Z<OLWRFES]Q*kKeBS4Ue72&[#.a'
%^&s`/.-!l+!EBKJ%0=9N&V0r?1I)4?k7tT44p)/R)=<f0$+Rl.'*&+#5"J3-=^a(H3';QmE0<Kcd=Mrd!:rIsU1sbJ#XV"KirN=K
%\^3.VTAp3(Gh(`283%[2VE+De3?1gs8'IH(E(qaL6-^(gJF@Gs1=Cg]W20J9jcQ2#^mRG,!MVooc>!BkOZ5e*mu0.:VPhP?;qMb=
%!XmX"Y]!g?nMh$#"OKJi-N"A,VLk0D>6_rP%YQft;/K&2ION),K$KVd$-E>cCX`@A^oiji&7LU+iB/4mn2C%K,Qe9Td2[*efJo7e
%#]`3IH.i9"W!p*=g41=KIhIi/$9DY-#UID_ZuO#DX;9T0Pt>-U/HDUOl[ou*B3:7gdbM&O^aWM)TF(c_?p,X1Lq(Y0+-:/&6k]VO
%@AkH&\7(7t"]/>*`7H=3N'fTVE^5;nS>^5O,t(R=5R#O$(*GpU^rImeh$-7Q].,#jZG2WsG8]n.k+EkC:_"Us,N*g,<";d&WJceG
%J8]rLG4l'?;q#m22$[(jM8._bOC7bhE#ZD`1=[Z:*MaJ1($kuu$dCt&KbAYfYR)?g#Ab,Fg]\a6%">F=S;>#nd@N'=>poT8j<JaR
%D5*`hJ>/qK`.7ni(f(3`nj)9;78QPY_0<7OZOLH!_4!C6j\V<F$Nfai;VrZs/:kj7FW_6F;^;@8$oAFZ&85!bQo)oi>soq?j[YG!
%N<'=n3f])nK_kt+U\k=>#b4[)6*UF`3JYIU'F_VZjZta,\um0!MTAkM)p&?fAE_AN82?b.!!%gAUS7CAg4R[QJVZ,@nKksQ7:BXj
%e5/pE'^'&c@'o#d.j2h%\2ahE"iM`;4Ij8E;?;db'1BN=,,?nri%#gl=a%/$Vu[8so/uRN!0mVMW<8_jf.(:L$BMV)+h>`V<KK.S
%#Y.i`d#Ls+Ka]../?!RpIK^L_.*-K5<+fU&'EFm0&;(bFIs"^3!CC>&+Ekk_;P7J#Ca;;YD^h32MCG3TXKp\S<s+9?;+Y.,@JVMd
%$V3,!!",ZlLe9][c<N!+>on,h#>c+oB=MBbV.iW2Z[i05JZM6e:dCEKGp+lq_&k.''7a120Tes7bR^Y(k6Xg)=NL=]dmW9]/S8@`
%4i0:k"'@+Yo11u%rE9tuK76fs0L4T@d'!f3!CX60a2mP0;=d7XMp,8SR.UXb'U2;3>JN%'S")VBXCPUUnM(IYMMU"A#;gg"hHFb7
%Y)qiI>'/8H_%+f7ZsZ-4lj$K+TGGrNL;Yo!],,jjdi@cF60_"2._*JgiXd*F"&[5MIo/I"&1H7M+faRB)ltr.8miqPfTjtkYa.9F
%&uMKU9`M#]e5/cse59QU'iLsDJA!2Sal#IUfAX(:%2X92goqaX.J=BI%q=`1&]h0\]+nJZX9"$(OWMEM3jeE<au*;#6p_kn:69M[
%^&_@:oT(R\:R$V9;u*[-E#+*YbS1I\SQ]R.2PC<J9Qde:!i2U`OWA@k'dF#GRh^c^6,9(:VX'B`JRTS"(U0*r%46_?";N<4F$s+d
%co.h1Yk)DaMkK6S+SkLgfPq)FFA\<kImO.:!$'KZhc;R5-[?E>a?WU6";XcOQd/PB;<Jgp&Q!gS;7qO+<cGsY(5E6-\"TNag*=rb
%fiG=,5RNXN$"=4K/)YJ6,f]D2dA'hF\i!8k!?gTZ!?=o3Us:#a/[u6F;O6o+ggj8,R0PbU%&Y6=UE5B6-#+['o`>P46mXYs3>%Mm
%'sUjc23@G[e<YqC8a+qp%C&5l[\0D3$c?GG/^SK&<toX30Fm?.JqaOVn^O:KUl$C,,=<=qV%?!.:gZXQ$I)3MnN\C#!2V<20au-n
%BW05YlfF+?F!A2]i.m29J:L7f%*Or9<+B>I6#\WrN-;,#O\puac(g)V<&?lQ`INla=;&"AZ&,?dcJ'q,XRJ8#nCC5\IXk^#kJWA`
%dd3@Pc'(SRR%j_bP22n.KFj5To*SF6BpX+9l*Y2'Th,aZ/=A/s9L_%?l(E&!cp4GWLeSG[Tba_]?4MbnF%$THWq\9^MJs7@^VQcK
%,fl2&$poX3i+@U/;)aq3YW9e,L>?eoZb^>;"Os;W0Mmb&74#R5-_4IN&dO7i6uNg$nt.2'N*<'.91)$7%10]e*)iFpU!cn>51#Aq
%\.InD*.EZt.N%1G8=5R7Whep!RV369L`OuocufB3/eS>OL8a'%W4cEH]%M\i9mk"O''V->:*&jI;?=D*4CckZ0?PV&atu4Q<VQd:
%1SKj8*/d2O=?uU1g.ih'KS4,0JhYnTUF#$$A[>OS2N.7:J1i.^!Hf0V(N^K'rKU_Eg]6KtJCt$X1btWCcNsdY,8K9X^`%=co8nP2
%QFha[R7Pi-diK[=7Y;=&RN-7SUVL\k,5XVJS?h"'EHRDXj-eC4!UE#ko@,^8!HSs:7DmRgF:FWqRepc\+Br@@QYE5Q`(;+=7tGB`
%QpS+?YZlM/JGt)$!+B@V1/@K0+g1<J#/GmWJ?%*CUH9@F,7+GKcHs74`ZV^7C*M+99d#TBA7=ERkY[PF^RrUF!QC21hE]a>Tp<ik
%3Z?@#a9nIh(S3"0+HcnTMc1Lb=XFc-M*Mp1-54b<Nb0#V`+j?Eh_Lu)g47+LdB5D"<@MTiMWb;o&9(0La@PfCSoFn[m5-R'6ljN!
%A7V@%Ogb`H,VV4^JqrJPKUo&7TIqmkedUb2,2`hCIbh4_dE@Ec/<_k"%0^hunRs>D4-7A5Qn;\oB\91Cg11rsSkL"FaW[_[K=hA<
%&'WLT<f+!M5cQ=)%bN$EoI&Y*jCP"QX;2@:RQrXZDBjoLciL[m3TT$:.ci-TJ7ll`K^p4<0tDR@_`Q!cgi]uMJ6&lV`"%[7aQW+Y
%MpH-(3s]Du+?JCHStbM'M."qcnURcTo)+qiNp^TQnLt0m*;%<B:uT1rLhQc94;,2"4"?oR%-@sB._804fO-L'N"PQ6Qm=kH@2J9&
%TEl'+%Or.%T4F$-\\i7*U=B^RNdO1'CGH/&(4guO)0L#_&6F+g"ZI,W7@A#+V4n<W]F]E;?aG?%(P?_s$;ZJ.%S0U82OW7]0Hh"%
%keBLn(`fO81fmO?+VTs];kRnDBb;AuCtiqDRQ)3uLu0tjS4D#iE)#D2P**5Pk^NfQ9TDp>\!&V)*<1g#<f7'2lo&B95Z':7-[A9`
%e5R_P*[dUF#I'Hj(cY;$eK70A<A9YjJJ'_SAMgmCZpngNeMU2Z49Z!+ABEI?/<j(@9@6-T<=Bb;J0#.fC7/looGQj,"[ODG[NC54
%'V#p'<iX5/,&+QqiGQ*^[KH.j6*f5Z'\qoh`"iNMY5nSL/A]&7.VQ<o#=JVZAjrCX+FV/h.iP!c"logf#fXEOOt9GV)FN).Td9hS
%Mn1+qVhc)tcmAp4aR2Kg^nIbe`.<^rTsuGMf]Gha-F%6P`Kr:.V/g,$(`Z%3]bah[K.e[p(/PgRha6?o7.%d#9FA?\NGUTK'#qh&
%aqe>^5i<9)Z1sL<@\]4EB(@878WZJq0_c7j-O[lU/\Zn%;KWBRR=#;g%P"6+j!\U6:i+J?9*bYqiZ0[dW&k[51"h_ESi<F`A4u-t
%Up-E/Q[gN@a:g,5":O.,'-)B+h%]uECG*=(()/NHRk8&E3r8b+[Y.D&*?BMc>Jg@"d9s<5&luX6fR_"\m*MTRQ5!^e4@Oq)[q%9a
%A?mM'lYnn_3SBRIquqk\fFiLd-lQuD,mC546k.T,8f5-=.6_Th-$3>\jI/$<Os5]!7FI`3$(YA_T<Ym=./eZ5S76Oc]>]=bG+6=5
%'T.+.0@f,U3$'Zo_3G`*>Ng[`Q-b^!=JsnY$2nJ)k[I/gnUR*"67,Vt-Q0m@/>f59dt2eL,f3pZKUXMa6(\D.3>`<KS"XW5Ak@fZ
%'OnK-dSLfMa0`gK^d@5+Olg%CJ;F?%O2[8]$8q(Bc&u%J%q@`Zac^`SSJ.^O4:1kI"Oq'9SqPf23ArF[1rOj).:Ir7m?0iO#=T=2
%"!CbC]rDr&-qh=QF:#=s*W!I,2CN-BQjQ#<e&YO%WU?,Z5m[u1"^a]BN[@DZ[q(=UGS'[.A7@%b1ms8PB01o`&";JJQI,(2.Z+SQ
%)m_nn83)tfU$>#f3]".1s,aWK1,_fjUQs9cBQGT3f!g*FaCI-[C2/s#0,<`.&.Ee/+Hsn-^8A_1OO"R"@Upu[jEjcR@6nQ""2PU9
%Ti.tOWVHZJ19=/:!4,CA])c?cK(,WXTV@0u7YS_i^uQ9YBa-7E-[K?k).<F10E^Z"HhGSr.d?qL[M`J\=_<2=nHj50Qt?>q#uM,T
%@0UC9'U0\JRK4n:W%>j.1]rJp(f=Li%p1)G7Ss3?p_W$F;3$].9G0NuHpL*/"1r2+:Y</(!#%V^(F_YfG_^*N$:#QD80VP,ej,U1
%"l"_0!/tKQL87[="m%L2$l'd7\nMH(D*UdZ6=KTkP(XO!HTVb$"_:6LZ6lEi9]NMC%?-ZErR,#Bfn'N9-3UEZ$kQS?=C?60L>IE;
%n69tn-0EO\kA@Ud<bJn00X,l/i&%fXCbnd5\;=g)%O\agouo=l2k.:df5R5>gQu%=e3DWIRk&<'pK;OogsaU$n'BqhH*6o8nCsB.
%qK_Kg1AqD3NjAOIc@?`:f$,AI7_]]"q#O>3Xh:M;+I+T$N!)Cs,lgi/M+UN,KrM.\0m'T"h20WNnL*D8?qq"22B=eB.KI8,'M<9T
%d3c$7:u[+h'I^;CdVsHUd*YCr8*<$r=\Lm:lkG,hTVdObLHmm'-2A^<!h0eCKdZJ&PbUQ7.N5[Ep-a`b+G)=Q-"3#?'d<tN6k2X1
%O=[6jiFa_2VXr#?O.W,($Y,&DlHpS-+p%^?S/Eb%3,qsJR?:4>Nmj5[CCJEWK6;_T6!>TBIa[(\\rr(N7YDm>+p86VTdhi8,]mn_
%13LgO5qeBNUX9FuA4/r8jX4If'ReI"[S/tL'"4$T6G;uX7VIm9XE#k"a(+,g;[hL_2N1;KhD'ctZ]m:tA)U9?gLZ$tWVDXr+pJm2
%j]NXZLe9ar:GR[^<H9nrGZ7P6(=HrcB8nf8`]^5E/EeCu6Y.NVJfSH]E1hp;h]XN3,o5d2fL+$R=l,d0R;.tTJRCpY?,jRDaPESr
%BK%8H@H%k:`Y*?_b91@IZQ!VKZ5EgDGSRY4pl+bd.AK[KJP>WBX6><?,j.j.[$qbk':d)7\R]ICiZ+oAO'&&t6?7W1-R0K'E'f_=
%0..t_r,=Es_9#G?3!c+t8-cV71?*j<-R>>&h1Poa+&59lpuQ$S"CFnn7e#AiKTs"DNq"7>^43lE,=DZgBh&jHa)iU:?0=I)S.'uV
%e'g%J"*6FU&kOu/$>^aT3g=%i84-a:8H/^QL"W:H3H*9]1/dXQ$DoK\N&HSF)GJq`*!l39"%G(B^WN'DPkN'ph^V9id9(&q<&#%q
%1-J*\L&PKJB7[W07Hsp&ARrs%&Ma->gB7-UZ2oeB$eWX=DRbNYUL_R]^`Sg4:@8.Nrki&TJ)Yk'\rb<0$E0gp'Tp>#cj,5e6T0[!
%,`8F,i0,HVK4`Ca:bc0?7JfG0163rM2l^8%#"+6;,-m=[IhUKPC`<=<;ZFM6eCPD#Wf+D\)J*=BR&,[r/"iXG^XFVH)l$IqbKYh4
%JE>u3rY7.X=@<kBA5?fFJLug(68u#H5UOcCBgj\0JrVr97UB/(3CNJ["siu;kStqE@<@K!/'-.CNPZFK/Y,6Y5m%k8kJ.375[;i1
%mO&5N/I#^C'NWfJrZd/3[@G;U"W`h(%UbD)D^Ap75[r)%L(cG,eSp%MUJlPI78GUd7huO*Kbaca&k:?ldM\M(If[(+9E\Ct')CbT
%ks0t,,9WJm1cBC?UI``77WlKO6t<OdIDYFe*L#V>,9-6PYtGG?i`n3h<G1Yf_-.B/^D`9G-rG!d:/s,+AMAXq<nbojfM!TWb!eJq
%U]@b+crI0Rr!esUYD=_s$!dN263RTZJ$*I<#:5>?;\GaXDB3%k?6u*lBB6T*61^IP7bQ2MZ9bpT$'<LMCkUnJ&2f#M3P+DH?U8@m
%6n\eL2Ej!"3:;"J^L-/ej`9VMC%-9;dso/7*LMYi$W;\`$6;XPXhhpZN/_=d8g]+DnL':H@j8NsDN1Y>9>pIHFN7d_Mh\dlPRW7#
%(tX\9#\3DO`m=r(GUP]gErn'!nfK\2;)W\WaW3/oXg%Kh$8?*Mid*%D=<\3-;CHLtk_$a/@reJ8jTq5mcES@#!Q/KY?qcGGlW6Me
%%KmpGA4#lWMo[eYB)*k-QUj)lV%)LV<0<C0U4J;o8F]<rn7I[ppU"P>?<#d;-;Xrr%"uVW6S-(fM3Ljg@?=JS+/^ZO."NfsP_Gfo
%g;"A37+j36N:0]U%pI:7k'jXi@IbpA'H4rh'r?]e!<.V<1WL^GA;iAa3i.1,_<]@>a'."Q+kd-qP>Y4[<i^gP7Z8uMP_k3r"GVDf
%0>\?$9'+Dro\_9C-;?25S>SD1!Am:TdK)N%RXrfiZb[0?Hr?%OR!=^T[/YcuP=80n``N5+&Ilu7Ck%"E!9lpUmV.X@_/i:)N&4/n
%1WpeIT!(?R'ji3V.o=+`)T=U.+i9BjM6pcD,t1_kL.&a%<!04l%a)!5KRfSQQiG/cr9p.maKYkG<6?Hf\mY@E6)Ki,/-mRM^l1[A
%>h%Dmb+Xp7!"T8P.?@,"=\^I.S+8W&!ub0.?n%etN0P7^/m4oh&05OD0N-#V5ig>dpn*MTs2g)7N&V0BURd'H@noC,(Y]WTTbJpl
%?K[k^<+sWU-i]\,Wc/)I_$gU,.&ngeKZ_E8"VX&`AW12#.&.<W7Nu\ATM0Ff"BY^lFb2"ML=]Q'Dj"1uZ+O@!&IF(Ddu$KcUq*]F
%/2^fA%4"+e0d%lFG)U9;5mNhZbA0UJM.N/aafeAfQHmuLXCFOqP8UR"<#!R"+d5sF$t-0bpn)Aks"i=b_m.:d&[=k92cIPGE/YT2
%'qC$M!P#+rL-sKUGT$Tt<d+5+^^_@R%t?Vb;jrT;D'TM(kYc*$$ABs$\JE.'<NPZohNuhsN"TDa,&54D-d<)2=S_EK?%g3UU'8+P
%Z'(:8n&_>VA^)'`<%+f<R3RC)J/&1iU.)sSU0q0.6=LA:BF6cTfPf"%i!k[44iGQOR50fmCJ=F%eUd"mWC<R+(Wou+5"1J7#r>.,
%5nYmmoju"ms2dP%5E^RQP\0G#Sn<OgWkb@=Wb>@P+k)`3#tu)$LBT\$er&+nUHaFnJ,&)IbTui8.^RT9C>[?Zek@]OIuE[cR7d5:
%[hg"U3o+Ec@'5"j^43A0pS%K3S,<![q9SN?n"9U7s)"AD1U+a<7klPr`fMqOk7%*sb!O1H(G9uUeG9(Crnm.]CZbn_Y!N*C`MNV1
%10D:HFM^]]K@F85*1.]c!9G&$F(c6'Imeu/m,6Uuhc:K:G[DcRKjS6=.Xn*hm>^uV*]&Q'7j+6\lRNNmP49^o%/cDI^Z=bMBSu0.
%cn+Qks3KKt$el0D*jurC_uYDRV#0Z\g$-L_HOp/(R="`.7\dE`$W#nk]:E:.VqZdirqT?+JPXMA'8bFl4nemX1`;d<~>
%AI9_PrivateDataEnd
\ No newline at end of file
diff --git a/releasenotes/notes/.placeholder b/releasenotes/notes/.placeholder
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/releasenotes/notes/add-parameter-granularity-7f22c677dc1b1238.yaml b/releasenotes/notes/add-parameter-granularity-7f22c677dc1b1238.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..2f83380871dca911094d72898ae776fda314da9a
--- /dev/null
+++ b/releasenotes/notes/add-parameter-granularity-7f22c677dc1b1238.yaml
@@ -0,0 +1,4 @@
+---
+features:
+  - Allow to search for values in metrics by using
+    one or more granularities.
diff --git a/releasenotes/notes/add_update_archive_policy_rule-c2ac6b989a3138db.yaml b/releasenotes/notes/add_update_archive_policy_rule-c2ac6b989a3138db.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9b09e012ce0fcb4a736126d1247acb85ff0fe4da
--- /dev/null
+++ b/releasenotes/notes/add_update_archive_policy_rule-c2ac6b989a3138db.yaml
@@ -0,0 +1,3 @@
+---
+features:
+  - Possibility to rename an archive policy.
diff --git a/releasenotes/notes/aggregates-API-d31db66e674cbf60.yaml b/releasenotes/notes/aggregates-API-d31db66e674cbf60.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5b15939ba273209fd5318658be4098f62272d0ba
--- /dev/null
+++ b/releasenotes/notes/aggregates-API-d31db66e674cbf60.yaml
@@ -0,0 +1,10 @@
+---
+features:
+  - |
+    New API endpoint allows to retrieve, transform, aggregates measurements on the
+    fly in an flexible way. The endpoint location is `/v1/aggregates`.
+    This endpoint allows to describe `operations` to be done on a metrics list.
+    Example: `(* 5  (rolling mean 3 (aggregate sum (metric (metric1 mean)
+    (metric2 mean)))))`. The metrics list can be retrieved by searching in
+    resources by setting 'resource_type' and 'search'. More details are
+    available in the documentation.
diff --git a/releasenotes/notes/aggregates-api-output-change-2bc6620c7f595925.yaml b/releasenotes/notes/aggregates-api-output-change-2bc6620c7f595925.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..c9a7115c13e3cc2bc61e898d683f9941d7115f9e
--- /dev/null
+++ b/releasenotes/notes/aggregates-api-output-change-2bc6620c7f595925.yaml
@@ -0,0 +1,6 @@
+---
+fixes:
+  - |
+    Aggregates API output introduced in 4.1.0 doesn't allow for easy identification of which timeseries
+    is associated with what metrics/resources that have been queried. This have been fixed, but
+    the new output format is not backwards compatible with the format released in 4.1.0.
diff --git a/releasenotes/notes/aggregates-metric-wildcard-d489260c685c5727.yaml b/releasenotes/notes/aggregates-metric-wildcard-d489260c685c5727.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..6280060325683ea104afd2916220bf2c94ba997c
--- /dev/null
+++ b/releasenotes/notes/aggregates-metric-wildcard-d489260c685c5727.yaml
@@ -0,0 +1,4 @@
+---
+features:
+  - |
+    Wildcard can be used instead of metric name in Dynamic aggregates API.
diff --git a/releasenotes/notes/aggregates-rateofchange-94785a381b7bc3b5.yaml b/releasenotes/notes/aggregates-rateofchange-94785a381b7bc3b5.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..38f5b8fdbab64eb0dd68133d2085c19a0455271c
--- /dev/null
+++ b/releasenotes/notes/aggregates-rateofchange-94785a381b7bc3b5.yaml
@@ -0,0 +1,4 @@
+---
+features:
+  - |
+    Dynamic Aggregate API have a new method called 'rateofchange'.
diff --git a/releasenotes/notes/amqp1-driver-78a9401768df7367.yaml b/releasenotes/notes/amqp1-driver-78a9401768df7367.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..85142ffeded6005a9f4be29f457e67b668d6ff0f
--- /dev/null
+++ b/releasenotes/notes/amqp1-driver-78a9401768df7367.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    Gnocchi provides a new service to receive metrics and measures from an AMQP 1.0.
+    The expected payload format is the one from Collectd write AMQP 1.0. The daemon is
+    called ``gnocchi-amqp1d``.
diff --git a/releasenotes/notes/ap-in-batch-d83f6aa163d200e9.yaml b/releasenotes/notes/ap-in-batch-d83f6aa163d200e9.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..4752566f0d2bef3391848f4b3c2c6dc021ba21c3
--- /dev/null
+++ b/releasenotes/notes/ap-in-batch-d83f6aa163d200e9.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - |
+    A new format for the batch payload is available to allow to pass
+    the archive policy description
diff --git a/releasenotes/notes/api-user-input-invalid-09b045f5ab12c.yaml b/releasenotes/notes/api-user-input-invalid-09b045f5ab12c.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..bc5028e461072d3fa59fcdc892f2555d3ce6099e
--- /dev/null
+++ b/releasenotes/notes/api-user-input-invalid-09b045f5ab12c.yaml
@@ -0,0 +1,4 @@
+---
+upgrade:
+  - |
+    The error format for many requests API has changed and the error is now reported in a better way.
diff --git a/releasenotes/notes/archive_policy_bool-9313cae7122c4a2f.yaml b/releasenotes/notes/archive_policy_bool-9313cae7122c4a2f.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..682a4e4c474b25697430603dacee33a6b6d81a06
--- /dev/null
+++ b/releasenotes/notes/archive_policy_bool-9313cae7122c4a2f.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - >-
+    A new archive policy named *bool* is provided by default. It provides a
+    cheap and easy way to store boolean measures (0 and 1).
diff --git a/releasenotes/notes/auth_type_option-c335b219afba5569.yaml b/releasenotes/notes/auth_type_option-c335b219afba5569.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5372786410db9509b10e7cf1d94ffa7033d001ed
--- /dev/null
+++ b/releasenotes/notes/auth_type_option-c335b219afba5569.yaml
@@ -0,0 +1,5 @@
+---
+upgrade:
+  - >-
+    The new `auth_type` option specifies which authentication system to use for
+    the REST API. Its default is still `noauth`.
diff --git a/releasenotes/notes/auth_type_pluggable-76a3c73cac8eec6a.yaml b/releasenotes/notes/auth_type_pluggable-76a3c73cac8eec6a.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f198eb8afa9859c550b58cfac404bd19450625a4
--- /dev/null
+++ b/releasenotes/notes/auth_type_pluggable-76a3c73cac8eec6a.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - >-
+    The REST API authentication mechanism is now pluggable. You can write your
+    own plugin to specify how segregation and policy should be enforced.
diff --git a/releasenotes/notes/backfill-cross-aggregation-2de54c7c30b2eb67.yaml b/releasenotes/notes/backfill-cross-aggregation-2de54c7c30b2eb67.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..cdfeee45d12b9b9a34f5e873efc9fe1f53780dd1
--- /dev/null
+++ b/releasenotes/notes/backfill-cross-aggregation-2de54c7c30b2eb67.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - Add support to backfill timestamps with missing points in a subset of
+    timeseries when computing aggregation across multiple metrics. User can
+    specify `fill` value with either a float or `null` value. A granularity
+    must be specified in addition to `fill`.
diff --git a/releasenotes/notes/batch_resource_measures_create_metrics-f73790a8475ad628.yaml b/releasenotes/notes/batch_resource_measures_create_metrics-f73790a8475ad628.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..afccc58bba24e7e1fc03d37fc65f5cd7b89ca2d6
--- /dev/null
+++ b/releasenotes/notes/batch_resource_measures_create_metrics-f73790a8475ad628.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - "When sending measures in batch for resources, it is now possible to pass
+    `create_metric=true` to the query parameters so missing metrics are created.
+    This only works if an archive policy rule matching those named metrics matches."
diff --git a/releasenotes/notes/calendar-groups-1336b6d097c01b64.yaml b/releasenotes/notes/calendar-groups-1336b6d097c01b64.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e5fffd9cd24182e5a2ff765ff34e794abb4783eb
--- /dev/null
+++ b/releasenotes/notes/calendar-groups-1336b6d097c01b64.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    Resampling based on calendar dates are now supported. Using the `resample`
+    parameter, users can specify grouping by: `Y`, `H`, `Q`, `M`, or `W`.
+    Details on each grouping can be found in docs.
diff --git a/releasenotes/notes/carbonara-truncate-timespan-3694b96449709083.yaml b/releasenotes/notes/carbonara-truncate-timespan-3694b96449709083.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a048a4dea80178dd5bb4ecbf135da8e836d63675
--- /dev/null
+++ b/releasenotes/notes/carbonara-truncate-timespan-3694b96449709083.yaml
@@ -0,0 +1,9 @@
+---
+features:
+  - |
+    Gnocchi now strictly respects the archive policy configured timespan when
+    storing aggregates. Before, it could keep up to the number of points
+    defined in the archive policy, keeping more than the configured timespan.
+    The timespan duration is now strictly respected. Gnocchi only keeps the
+    points between the last aggregated timestamp and the last aggregated
+    timestamp minus the duration of the archive policy timespan.
diff --git a/releasenotes/notes/ceph-omap-34e069dfb3df764d.yaml b/releasenotes/notes/ceph-omap-34e069dfb3df764d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d053330b572bb6670da6d1ca7fdb4d3b31670f58
--- /dev/null
+++ b/releasenotes/notes/ceph-omap-34e069dfb3df764d.yaml
@@ -0,0 +1,5 @@
+---
+upgrade:
+  - Ceph driver has moved the storage of measures metadata
+    from xattr to omap API. Already created measures are migrated
+    during gnocchi-upgrade run.
diff --git a/releasenotes/notes/ceph-read-async-ca2f7512c6842adb.yaml b/releasenotes/notes/ceph-read-async-ca2f7512c6842adb.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..2dfe37dea0216f7171130444c703d39fc4548e16
--- /dev/null
+++ b/releasenotes/notes/ceph-read-async-ca2f7512c6842adb.yaml
@@ -0,0 +1,4 @@
+---
+other:
+  - ceph driver now uses the rados async api to retrieve
+    measurements to process in parallel.
diff --git a/releasenotes/notes/change-file-driver-layout-41c7a458160c4cb7.yaml b/releasenotes/notes/change-file-driver-layout-41c7a458160c4cb7.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..844958570903bbd107aa62472b6af53fd5cecd05
--- /dev/null
+++ b/releasenotes/notes/change-file-driver-layout-41c7a458160c4cb7.yaml
@@ -0,0 +1,8 @@
+---
+features:
+  - |
+    By default, the file driver creates a subdirectory every two bytes of the metric
+    uuid. This avoid to reach limitation of certain filesystems and improve
+    performance in certain case. This is configurable with the option ``file_subdir_len``.
+    If the backend already have data coming from a previous version of Gnocchi, it
+    kept unchanged, ``file_subdir_len`` is set to 0.
diff --git a/releasenotes/notes/creator_field-6b715c917f6afc93.yaml b/releasenotes/notes/creator_field-6b715c917f6afc93.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..e9b3bfd1b71116cc6b8e486b837c76b79ff7ed54
--- /dev/null
+++ b/releasenotes/notes/creator_field-6b715c917f6afc93.yaml
@@ -0,0 +1,6 @@
+---
+deprecations:
+  - >-
+    The `created_by_user_id` and `created_by_project_id` field are now
+    deprecated and being merged into a unique `creator` field. The old fields
+    are still returned and managed by the API for now.
diff --git a/releasenotes/notes/datetime-resource-attribute-type-1e627a686568f72a.yaml b/releasenotes/notes/datetime-resource-attribute-type-1e627a686568f72a.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..8d9f3912809c06621cf79d473903e1de31c892d3
--- /dev/null
+++ b/releasenotes/notes/datetime-resource-attribute-type-1e627a686568f72a.yaml
@@ -0,0 +1,4 @@
+---
+features:
+  - |
+    A new date type is available for resource type attribute: datetime.
diff --git a/releasenotes/notes/delete-resources-f10d21fc02f53f16.yaml b/releasenotes/notes/delete-resources-f10d21fc02f53f16.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0f6b04214347d075dafd0e3cdc19dcce2818b767
--- /dev/null
+++ b/releasenotes/notes/delete-resources-f10d21fc02f53f16.yaml
@@ -0,0 +1,3 @@
+---
+feature:
+  - A new REST API call is provided to delete multiple resources at once using a search filter.
diff --git a/releasenotes/notes/deprecate-moving-average-a7596a0009be5b12.yaml b/releasenotes/notes/deprecate-moving-average-a7596a0009be5b12.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..8d39853c686d863cdff02da8519e05a42ab70347
--- /dev/null
+++ b/releasenotes/notes/deprecate-moving-average-a7596a0009be5b12.yaml
@@ -0,0 +1,4 @@
+---
+deprecations:
+  - |
+    ``moving_average`` aggregate is deprecated.
diff --git a/releasenotes/notes/deprecate-noauth-01b7e961d9a17e9e.yaml b/releasenotes/notes/deprecate-noauth-01b7e961d9a17e9e.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..635097c639c79a15ce1e01267ef6260beeaba338
--- /dev/null
+++ b/releasenotes/notes/deprecate-noauth-01b7e961d9a17e9e.yaml
@@ -0,0 +1,4 @@
+---
+deprecations:
+  - The `noauth` authentication mechanism is deprecated and will be removed in
+    a next version.
diff --git a/releasenotes/notes/dynamic-resampling-b5e545b1485c152f.yaml b/releasenotes/notes/dynamic-resampling-b5e545b1485c152f.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b2c5167bef0d54923a27807852a7279fdbe44e49
--- /dev/null
+++ b/releasenotes/notes/dynamic-resampling-b5e545b1485c152f.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - Add `resample` parameter to support resampling stored time-series to
+    another granularity not necessarily in existing archive policy. If both
+    resampling and reaggregation parameters are specified, resampling will
+    occur prior to reaggregation.
diff --git a/releasenotes/notes/fill=dropna-9e055895e7bff778.yaml b/releasenotes/notes/fill=dropna-9e055895e7bff778.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..16e17d3df5f0141848acb467237dc2734b35c30c
--- /dev/null
+++ b/releasenotes/notes/fill=dropna-9e055895e7bff778.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    Aggregates API and cross metrics aggregation API can take `dropna` for the
+    `fill` parameter. This acts like `null`, but NaN values are removed from
+    the result.
diff --git a/releasenotes/notes/filter-param-for-aggregation-f68c47c59ca81dc0.yaml b/releasenotes/notes/filter-param-for-aggregation-f68c47c59ca81dc0.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..156833c06d706832e955b46d3f84d593b8bc7b68
--- /dev/null
+++ b/releasenotes/notes/filter-param-for-aggregation-f68c47c59ca81dc0.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - |
+    /v1/aggregation/resources endpoint can now take the STRING format in
+    `filter` parameter instead of the JSON format into the request payload.
diff --git a/releasenotes/notes/fnmatch-python-2.7-c524ce1e1b238b0a.yaml b/releasenotes/notes/fnmatch-python-2.7-c524ce1e1b238b0a.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..bab5e73a912e44649f0902d45f9e60a056645a89
--- /dev/null
+++ b/releasenotes/notes/fnmatch-python-2.7-c524ce1e1b238b0a.yaml
@@ -0,0 +1,5 @@
+---
+other:
+  - |
+    A workaround for a Python 2.7 bug in `fnmatch` has been removed. Makes sure
+    you use at least Python 2.7.9 to avoid running into it.
diff --git a/releasenotes/notes/forbid-slash-b3ec2bc77cc34b49.yaml b/releasenotes/notes/forbid-slash-b3ec2bc77cc34b49.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5999cb7f3f9c6270288827dd9e152b7585d0cd2a
--- /dev/null
+++ b/releasenotes/notes/forbid-slash-b3ec2bc77cc34b49.yaml
@@ -0,0 +1,7 @@
+---
+fixes:
+  - \'/\' in resource id and metric name have been accepted by mistake, because
+    they can be POSTed but not GETed/PATCHed/DELETEd. Now this char is forbidden
+    in resource id and metric name, REST api will return 400 if it presents.
+    Metric name and resource id already present with a \'/\' have their \'/\' replaced
+    by \'_\'.
diff --git a/releasenotes/notes/gnocchi-api-uwsgi-f16d958cb26ad90e.yaml b/releasenotes/notes/gnocchi-api-uwsgi-f16d958cb26ad90e.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..525c72f737dcbb0ac0012375db755ea59f5e6334
--- /dev/null
+++ b/releasenotes/notes/gnocchi-api-uwsgi-f16d958cb26ad90e.yaml
@@ -0,0 +1,7 @@
+---
+features:
+  - |
+    The `gnocchi-api` script is now a wrapper around uWSGI. Using a
+    WSGI-compliant HTTP server always have been recommended, but since most
+    users want to just run gnocchi-api, it'll now be fast and efficient by
+    default.
diff --git a/releasenotes/notes/gnocchi_config_generator-0fc337ba8e3afd5f.yaml b/releasenotes/notes/gnocchi_config_generator-0fc337ba8e3afd5f.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..73af05f2a96f5769aafe0d021214e6a7b96a0349
--- /dev/null
+++ b/releasenotes/notes/gnocchi_config_generator-0fc337ba8e3afd5f.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - >-
+    The `gnocchi-config-generator` program can now generates a default
+    configuration file, usable as a template for custom tweaking.
diff --git a/releasenotes/notes/healthcheck-middleware-81c2f0d02ebdb5cc.yaml b/releasenotes/notes/healthcheck-middleware-81c2f0d02ebdb5cc.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5e28af9c8e50f93be82fe14dc21319168bad67e2
--- /dev/null
+++ b/releasenotes/notes/healthcheck-middleware-81c2f0d02ebdb5cc.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - A healthcheck endpoint is provided by default at /healthcheck. It leverages
+    oslo_middleware healthcheck middleware. It allows to retrieve information
+    about the health of the API service.
diff --git a/releasenotes/notes/incoming-sacks-413f4818882ab83d.yaml b/releasenotes/notes/incoming-sacks-413f4818882ab83d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ef6d788d03a48767c0ce53d342420d797b27ab07
--- /dev/null
+++ b/releasenotes/notes/incoming-sacks-413f4818882ab83d.yaml
@@ -0,0 +1,14 @@
+---
+features:
+  - |
+    New measures are now sharded into sacks to better distribute data across
+    storage driver as well as allow for improved scheduling of aggregation
+    workload.
+upgrade:
+  - |
+    The storage driver needs to be upgraded. The number of sacks to distribute
+    across can be configured on upgrade by passing in ``sacks-number``
+    value on upgrade. A default number of sacks will be created if not set.
+    This can be reconfigured post-upgrade as well by using
+    ``gnocchi-change-sack-size`` cli. See documentation for hints on the number
+    of sacks to set for your environment and upgrade notes
diff --git a/releasenotes/notes/influxdb-endpoint-13cbd82cf287d91c.yaml b/releasenotes/notes/influxdb-endpoint-13cbd82cf287d91c.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..75bb6d2fe802514bf2db8d75300d9fea11c28683
--- /dev/null
+++ b/releasenotes/notes/influxdb-endpoint-13cbd82cf287d91c.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    Gnocchi now provides a new `/v1/influxdb` endpoint that allows to ingest
+    data from InfluxDB clients. Only write is implemented. This should ease
+    transition of users coming from InfluxDB tools such as Telegraf.
diff --git a/releasenotes/notes/injector-af9e68fdfe02d322.yaml b/releasenotes/notes/injector-af9e68fdfe02d322.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..86e520d64878736ff0c43b81514662b777167561
--- /dev/null
+++ b/releasenotes/notes/injector-af9e68fdfe02d322.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    The `gnocchi-injector` tool has been added. It allows to inject random
+    measures to a configured number of metrics in order to generate load for
+    `metricd`.
diff --git a/releasenotes/notes/lighten-default-archive-policies-455561c027edf4ad.yaml b/releasenotes/notes/lighten-default-archive-policies-455561c027edf4ad.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a213d3e3820a8b8a1df45b3e797a06be9ebb9d5e
--- /dev/null
+++ b/releasenotes/notes/lighten-default-archive-policies-455561c027edf4ad.yaml
@@ -0,0 +1,5 @@
+---
+other:
+  - The default archive policies "low" and "medium" are now storing less data
+    than they used to be. They are only using respectively 1 and 2 definition
+    of archiving policy, which speeds up by 66% and 33% their computing speed.
diff --git a/releasenotes/notes/mandatory-boundaries-for-overlap-af28dc1e0946c500.yaml b/releasenotes/notes/mandatory-boundaries-for-overlap-af28dc1e0946c500.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..37dcdbfcfd708216b0cc03d4dd99231f7676df2b
--- /dev/null
+++ b/releasenotes/notes/mandatory-boundaries-for-overlap-af28dc1e0946c500.yaml
@@ -0,0 +1,7 @@
+---
+fixes:
+  - |
+    When specifying `needed_overlap` while aggregating across metrics without
+    bounds, the result did not necessarily honour the required overlap
+    provided. Aggregation without bounds now requires 100% overlap; an error is
+    raised otherwise
diff --git a/releasenotes/notes/metricd-respect-processing-delay-option-b8cc9895dec75567.yaml b/releasenotes/notes/metricd-respect-processing-delay-option-b8cc9895dec75567.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..61a12ce12ee09acb6ab5fcd75bdb9faaf6edf3a5
--- /dev/null
+++ b/releasenotes/notes/metricd-respect-processing-delay-option-b8cc9895dec75567.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    Metricd exposes a new option called `greedy` (true by default) that allows
+    to control whether eager processing of new measures is enabled when
+    available.
diff --git a/releasenotes/notes/mysql_precise_datetime-57f868f3f42302e2.yaml b/releasenotes/notes/mysql_precise_datetime-57f868f3f42302e2.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..579c835db5ec6402e4df1c0981c0b45428a3440f
--- /dev/null
+++ b/releasenotes/notes/mysql_precise_datetime-57f868f3f42302e2.yaml
@@ -0,0 +1,4 @@
+---
+other:
+  - Gnocchi now leverages microseconds timestamps available since MySQL 5.6.4,
+    meaning it is now the minimum required version of MySQL.
diff --git a/releasenotes/notes/no-auth-removed-b6e936dcefb4b9b1.yaml b/releasenotes/notes/no-auth-removed-b6e936dcefb4b9b1.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..1c63e155246fbd4e772ddccd2b645d975248eef7
--- /dev/null
+++ b/releasenotes/notes/no-auth-removed-b6e936dcefb4b9b1.yaml
@@ -0,0 +1,4 @@
+---
+other:
+  - |
+    The deprecated `noauth` authentication mode has been removed.
diff --git a/releasenotes/notes/noauth-force-headers-dda926ce83f810e8.yaml b/releasenotes/notes/noauth-force-headers-dda926ce83f810e8.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..004ef170d5ecd05c7b8eec95a0e62b23b6fd5bc5
--- /dev/null
+++ b/releasenotes/notes/noauth-force-headers-dda926ce83f810e8.yaml
@@ -0,0 +1,5 @@
+---
+other:
+  - >-
+    The `noauth` authentication mode now requires that the `X-User-Id` and/or
+    `X-Project-Id` to be present.
diff --git a/releasenotes/notes/noauth-keystone-compat-e8f760591d593f07.yaml b/releasenotes/notes/noauth-keystone-compat-e8f760591d593f07.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0aaffc38e4b78fe00ec13a6b1965557d9345d1ba
--- /dev/null
+++ b/releasenotes/notes/noauth-keystone-compat-e8f760591d593f07.yaml
@@ -0,0 +1,9 @@
+---
+upgrade:
+  - >-
+    The `auth_type` option has a new default value set to "basic". This mode
+    does not do any segregation and uses the standard HTTP `Authorization`
+    header for authentication. The old "noauth" authentication mechanism based
+    on the Keystone headers (`X-User-Id`, `X-Creator-Id` and `X-Roles`) and the
+    Keystone segregation rules, which was the default up to Gnocchi 3.0, is
+    still available.
diff --git a/releasenotes/notes/oslo.log-removal-69a17397b10bc2bb.yaml b/releasenotes/notes/oslo.log-removal-69a17397b10bc2bb.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..8fef1f8731d442b429c45327b34f7d876f9f5674
--- /dev/null
+++ b/releasenotes/notes/oslo.log-removal-69a17397b10bc2bb.yaml
@@ -0,0 +1,5 @@
+---
+upgrade:
+  - |
+    The logging library oslo.log has been removed for daiquiri. Some
+    superfluous configuration options have been removed.
diff --git a/releasenotes/notes/pagination-link-3cc64889ac414d28.yaml b/releasenotes/notes/pagination-link-3cc64889ac414d28.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..db26fc33928134fea6ed5e3d2b14870b07227f18
--- /dev/null
+++ b/releasenotes/notes/pagination-link-3cc64889ac414d28.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    All listing endpoints (/v1/metric, /v1/resource/<type>, /v1/search/resource, ...)
+    now returns a `Link` header as described by the RFC5988. For now, only the
+    next page link is provided.
diff --git a/releasenotes/notes/parallel_operations_replaces_aggregation_workers_numbers-cb3a8cf62211bd5b.yaml b/releasenotes/notes/parallel_operations_replaces_aggregation_workers_numbers-cb3a8cf62211bd5b.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b31838db3629b14cff8b91aba65961089416f78f
--- /dev/null
+++ b/releasenotes/notes/parallel_operations_replaces_aggregation_workers_numbers-cb3a8cf62211bd5b.yaml
@@ -0,0 +1,7 @@
+---
+upgrade:
+  - |
+    The `storage.aggregation_workers_number` parameter has been replaced by a
+    more general `parallel_operations` option. It controls the number of
+    parallel jobs that can be run by a worker using threads in various code
+    paths.
diff --git a/releasenotes/notes/pecan-debug-removed-1a9dbc4a0a6ad581.yaml b/releasenotes/notes/pecan-debug-removed-1a9dbc4a0a6ad581.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9098b81fd7fa88d7768b79ddac267480deaac3e3
--- /dev/null
+++ b/releasenotes/notes/pecan-debug-removed-1a9dbc4a0a6ad581.yaml
@@ -0,0 +1,3 @@
+---
+upgrade:
+  - The api.pecan_debug has been removed.
diff --git a/releasenotes/notes/prometheus-bc2153962b9a237a.yaml b/releasenotes/notes/prometheus-bc2153962b9a237a.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..7000a45bef0b70d1db646d94d85b1adafa1a5794
--- /dev/null
+++ b/releasenotes/notes/prometheus-bc2153962b9a237a.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |
+    Gnocchi API can act as Prometheus Remote Write Adapter to receive
+    Prometheus metrics. The endpoint to configure in Prometheus configuration
+    is https://<gnocchi-host-port>/v1/prometheus/write.
diff --git a/releasenotes/notes/rate-archive-policy-74888634f90a81e3.yaml b/releasenotes/notes/rate-archive-policy-74888634f90a81e3.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..7da8b32cf43c2adcb58164eff3b9e2589d14957b
--- /dev/null
+++ b/releasenotes/notes/rate-archive-policy-74888634f90a81e3.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - |
+    New aggregation methods are available for archive policy; rate:mean, rate:last, .... These new methods
+    allow to compute the timeseries rate of change before applying the selected aggregation method.
diff --git a/releasenotes/notes/redis-driver-299dc443170364bc.yaml b/releasenotes/notes/redis-driver-299dc443170364bc.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b8214f272566f3bbd044f8012901f1b9c1a26c69
--- /dev/null
+++ b/releasenotes/notes/redis-driver-299dc443170364bc.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - |
+    A Redis driver has been introduced for storing incoming measures and
+    computed timeseries.
diff --git a/releasenotes/notes/reloading-734a639a667c93ee.yaml b/releasenotes/notes/reloading-734a639a667c93ee.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..0cf2eb7304ccc0b5decc99a86c04e16234ad2d98
--- /dev/null
+++ b/releasenotes/notes/reloading-734a639a667c93ee.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - gnocchi-metricd now uses the cotyledon/oslo.config helper to handle
+    configuration file reloading. You can dynamically change the number
+    of workers by changing the configuration file and sending SIGHUP to the
+    metricd master process.
diff --git a/releasenotes/notes/remoteuser-auth-plugin-00f0cefb6b003a6e.yaml b/releasenotes/notes/remoteuser-auth-plugin-00f0cefb6b003a6e.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..93c6c558c5e5b2a63daae91d48592a3571a8ffe1
--- /dev/null
+++ b/releasenotes/notes/remoteuser-auth-plugin-00f0cefb6b003a6e.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - |
+    Gnocchi provides a new authentication mode 'remoteuser'. It uses the HTTP
+    server REMOTE_USER environment variable to retrieve the username.
diff --git a/releasenotes/notes/remove-deprecated-dynamic-aggregation-e14ece1d0fcaf313.yaml b/releasenotes/notes/remove-deprecated-dynamic-aggregation-e14ece1d0fcaf313.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..12da6f2081ccf0b4e3543028c830cd27f45b8c19
--- /dev/null
+++ b/releasenotes/notes/remove-deprecated-dynamic-aggregation-e14ece1d0fcaf313.yaml
@@ -0,0 +1,4 @@
+---
+features:
+  - |
+    The deprecated dynamic aggregation (moving average) has been removed.
diff --git a/releasenotes/notes/remove-legacy-ceilometer-resources-16da2061d6d3f506.yaml b/releasenotes/notes/remove-legacy-ceilometer-resources-16da2061d6d3f506.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..4d6e0f8752a49f4f1e075faddc4d3ec18114a7e6
--- /dev/null
+++ b/releasenotes/notes/remove-legacy-ceilometer-resources-16da2061d6d3f506.yaml
@@ -0,0 +1,3 @@
+---
+deprecations:
+  - The creation of the legacy Ceilometer resource types has been removed.
diff --git a/releasenotes/notes/remove-options-in-accept-header-7e5e074d8fccfb0f.yaml b/releasenotes/notes/remove-options-in-accept-header-7e5e074d8fccfb0f.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..b8db5e95c960f5001474de032ec69aa393e3547d
--- /dev/null
+++ b/releasenotes/notes/remove-options-in-accept-header-7e5e074d8fccfb0f.yaml
@@ -0,0 +1,7 @@
+---
+upgrade:
+  - |
+    The API offered several features that accepted option via the the use of
+    the `Accept` header. This usage was not compatible with the RFC7231 and has
+    therefore been removed. This created compatibility problem with WebOb 1.8.0
+    and above.
diff --git a/releasenotes/notes/removed-median-and-95pct-from-default-aggregation-methods-2f5ec059855e17f9.yaml b/releasenotes/notes/removed-median-and-95pct-from-default-aggregation-methods-2f5ec059855e17f9.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..75ff241a6b5e03d0dbb0768f5683c17ead6374cb
--- /dev/null
+++ b/releasenotes/notes/removed-median-and-95pct-from-default-aggregation-methods-2f5ec059855e17f9.yaml
@@ -0,0 +1,5 @@
+---
+other:
+  - The default archive policies list does not contain the 95pct and median
+    aggregation methods by default. These are the least used methods and should
+    make gnocchi-metricd faster by more than 25% in the default scenario.
diff --git a/releasenotes/notes/resource-type-patch-8b6a85009db0671c.yaml b/releasenotes/notes/resource-type-patch-8b6a85009db0671c.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a837c72da227b204e07188cfc27f6c088ab4174f
--- /dev/null
+++ b/releasenotes/notes/resource-type-patch-8b6a85009db0671c.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - |-
+    A new REST API endpoint have been added to be able to update a
+    resource-type: "PATCH /v1/resource-type/foobar". The expected payload is in
+    RFC6902 format. Some examples can be found in the documentation.
diff --git a/releasenotes/notes/resource-type-required-attributes-f446c220d54c8eb7.yaml b/releasenotes/notes/resource-type-required-attributes-f446c220d54c8eb7.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..a91c8176c6ea0492770508e8ca989d424cb586b7
--- /dev/null
+++ b/releasenotes/notes/resource-type-required-attributes-f446c220d54c8eb7.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - When updating a resource attribute, it's now possible to pass the option
+    'fill' for each attribute to fill existing resources.
+  - required=True is now supported when updating resource type. This requires
+    the option 'fill' to be set.
diff --git a/releasenotes/notes/s3-bucket-limit-224951bb6a81ddce.yaml b/releasenotes/notes/s3-bucket-limit-224951bb6a81ddce.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..1dba0232e2b4414991ce7098bc038679eac8874b
--- /dev/null
+++ b/releasenotes/notes/s3-bucket-limit-224951bb6a81ddce.yaml
@@ -0,0 +1,8 @@
+---
+fixes:
+  - |
+    Previously, s3 storage driver stored aggregates in a bucket per metric.
+    This would quickly run into bucket limit set by s3. s3 storage driver is
+    fixed so it stores all aggregates for all metrics in a single bucket.
+    Buckets previously created by Gnocchi will need to be deleted as they will
+    no longer be handled.
diff --git a/releasenotes/notes/s3_consistency_check_timeout-a30db3bd07a9a281.yaml b/releasenotes/notes/s3_consistency_check_timeout-a30db3bd07a9a281.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..5b5426ee2be4a5a57ea2bffe1d1b1626c5730628
--- /dev/null
+++ b/releasenotes/notes/s3_consistency_check_timeout-a30db3bd07a9a281.yaml
@@ -0,0 +1,9 @@
+---
+features:
+  - |
+    The S3 driver now checks for data consistency by default. S3 does not
+    guarantee read-after-write consistency when overwriting data. Gnocchi now
+    waits up to `s3_check_consistency_timeout` seconds before returning and
+    unlocking a metric for new processing. This makes sure that the data that
+    will be read by the next workers will be consistent and that no data will
+    be lost. This feature can be disabled by setting the value to 0.
diff --git a/releasenotes/notes/s3_driver-4b30122bdbe0385d.yaml b/releasenotes/notes/s3_driver-4b30122bdbe0385d.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..535c6d1e70338bcde93924e17b43ebe625cf132b
--- /dev/null
+++ b/releasenotes/notes/s3_driver-4b30122bdbe0385d.yaml
@@ -0,0 +1,5 @@
+---
+features:
+  - New storage driver for AWS S3.
+    This new driver works in the same way that the Swift driver, expect that it
+    leverages the Amazon Web Services S3 object storage API.
diff --git a/releasenotes/notes/storage-engine-v3-b34bd0723abf292f.yaml b/releasenotes/notes/storage-engine-v3-b34bd0723abf292f.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..cb2ef22a89e6692bc19b7afadefb14d87a808ba5
--- /dev/null
+++ b/releasenotes/notes/storage-engine-v3-b34bd0723abf292f.yaml
@@ -0,0 +1,13 @@
+---
+features:
+  - The Carbonara based storage engine has been updated and greatly improved.
+    It now features fast write for Ceph (no change for file and Swift based
+    drivers) by using an append method.
+    It also features on the fly data compression (using LZ4) of the aggregated
+    time serie, reducing the data space usage by at least 50 %.
+upgrade:
+  - gnocchi-upgrade must be run before running the new version of
+    gnocchi-metricd and the HTTP REST API in order to upgrade from version 2 of
+    the Carbonara storage engine to version 3. It will read all metrics and
+    convert them to new version 3 serialization format (compressing the data),
+    which might take some time.
diff --git a/releasenotes/notes/storage-incoming-586b3e81de8deb4f.yaml b/releasenotes/notes/storage-incoming-586b3e81de8deb4f.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..f1d63bb66f4db212c9a1ddaea260b6195c6e21f3
--- /dev/null
+++ b/releasenotes/notes/storage-incoming-586b3e81de8deb4f.yaml
@@ -0,0 +1,6 @@
+---
+features:
+  - The storage of new measures that ought to be processed by *metricd* can now
+    be stored using different storage drivers. By default, the driver used is
+    still the regular storage driver configured. See the `[incoming]` section
+    in the configuration file.
diff --git a/releasenotes/notes/swift_keystone_v3-606da8228fc13a32.yaml b/releasenotes/notes/swift_keystone_v3-606da8228fc13a32.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..9a52e062bcd1ced77b4618b759f3189e1beddfc9
--- /dev/null
+++ b/releasenotes/notes/swift_keystone_v3-606da8228fc13a32.yaml
@@ -0,0 +1,3 @@
+---
+features:
+  - Swift now supports authentication with Keystone v3 API.
diff --git a/releasenotes/notes/upgrade-code-removal-from-2.2-and-3.0-a01fc64ecb39c327.yaml b/releasenotes/notes/upgrade-code-removal-from-2.2-and-3.0-a01fc64ecb39c327.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..bd0480ca50da9381889048bf3f7bfa2d80fa4c3b
--- /dev/null
+++ b/releasenotes/notes/upgrade-code-removal-from-2.2-and-3.0-a01fc64ecb39c327.yaml
@@ -0,0 +1,4 @@
+---
+upgrade:
+  - |
+    The storage upgrade is only supported from version 3.1.
diff --git a/releasenotes/notes/uuid5-change-8a8c467d2b2d4c85.yaml b/releasenotes/notes/uuid5-change-8a8c467d2b2d4c85.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..ec6b6c51865ad76b9a9614b4afb43218666a373e
--- /dev/null
+++ b/releasenotes/notes/uuid5-change-8a8c467d2b2d4c85.yaml
@@ -0,0 +1,12 @@
+---
+issues:
+  - >-
+    The conversion mechanism provided by the API to convert non-UUID resource
+    id to UUID is now also based on the user creating/accessing the resource.
+    This makes sure that the conversion generates a unique UUID for the user
+    and that several users can use the same string as `original_resource_id`.
+upgrade:
+  - >-
+    Since `original_resource_id` is now unique per creator, that means users
+    cannot refer to resource by using the `original_resource_id` if the
+    resource was not created by them.
diff --git a/releasenotes/notes/wsgi-script-deprecation-c6753a844ca0b411.yaml b/releasenotes/notes/wsgi-script-deprecation-c6753a844ca0b411.yaml
new file mode 100644
index 0000000000000000000000000000000000000000..d2739ec71fb00bf044e4b00ac54cbfec034bf6fc
--- /dev/null
+++ b/releasenotes/notes/wsgi-script-deprecation-c6753a844ca0b411.yaml
@@ -0,0 +1,7 @@
+---
+deprecations:
+  - |
+    The custom gnocchi/rest/app.wsgi is now deprecated, the gnocchi-api binary
+    should be used as wsgi script file. For example, with uwsgi "--wsgi-file
+    /usr/lib/python2.7/gnocchi/rest/app.wsgi" should be replaced by
+    "--wsgi-file /usr/bin/gnocchi-api".
diff --git a/run-func-tests.sh b/run-func-tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..da261380f2d73d63fef36dd68c27020615d18fcc
--- /dev/null
+++ b/run-func-tests.sh
@@ -0,0 +1,72 @@
+#!/bin/bash -x
+set -e
+
+cleanup(){
+    type -t indexer_stop >/dev/null && indexer_stop || true
+    type -t storage_stop >/dev/null && storage_stop || true
+}
+trap cleanup EXIT
+
+check_empty_var() {
+    local x=$(eval echo `echo \\$${1}`)
+    if [ -z "$x" ]; then
+        echo "Variable \$${1} is unset"
+        exit 15
+    fi
+}
+
+PYTHON_VERSION_MAJOR=$(python -c 'import sys; print(sys.version_info.major)')
+
+GNOCCHI_TEST_STORAGE_DRIVERS=${GNOCCHI_TEST_STORAGE_DRIVERS:-file}
+GNOCCHI_TEST_INDEXER_DRIVERS=${GNOCCHI_TEST_INDEXER_DRIVERS:-postgresql}
+for storage in ${GNOCCHI_TEST_STORAGE_DRIVERS}; do
+    if [ "$storage" == "swift" ] && [ "$PYTHON_VERSION_MAJOR" == "3" ]; then
+        echo "WARNING: swift does not support python 3 skipping"
+        continue
+    fi
+    for indexer in ${GNOCCHI_TEST_INDEXER_DRIVERS}; do
+        unset STORAGE_URL
+        unset INDEXER_URL
+        case $storage in
+            ceph)
+                eval $(pifpaf -e STORAGE run ceph)
+                check_empty_var STORAGE_URL
+                rados -c $STORAGE_CEPH_CONF mkpool gnocchi
+                STORAGE_URL=ceph://$STORAGE_CEPH_CONF
+                ;;
+            s3)
+                if ! which s3rver >/dev/null 2>&1
+                then
+                    mkdir -p npm-s3rver
+                    export NPM_CONFIG_PREFIX=npm-s3rver
+                    npm install s3rver --global
+                    export PATH=$PWD/npm-s3rver/bin:$PATH
+                fi
+                eval $(pifpaf -e STORAGE run s3rver)
+                ;;
+            file)
+                STORAGE_URL=file://
+                ;;
+
+            swift|redis)
+                eval $(pifpaf -e STORAGE run $storage)
+                ;;
+            *)
+                echo "Unsupported storage backend by functional tests: $storage"
+                exit 1
+                ;;
+        esac
+
+        check_empty_var STORAGE_URL
+
+        eval $(pifpaf -e INDEXER run $indexer)
+        check_empty_var INDEXER_URL
+
+        export GNOCCHI_SERVICE_TOKEN="" # Just make gabbi happy
+        export GNOCCHI_AUTHORIZATION="basic YWRtaW46" # admin in base64
+        export GNOCCHI_TEST_PATH=gnocchi/tests/functional_live
+        pifpaf -e GNOCCHI run gnocchi --indexer-url $INDEXER_URL --storage-url $STORAGE_URL --coordination-driver redis -- ./tools/pretty_tox.sh $*
+
+        cleanup
+    done
+done
diff --git a/run-tests.sh b/run-tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..320198ae6ceaaa44552bdfb743bef603c83a6604
--- /dev/null
+++ b/run-tests.sh
@@ -0,0 +1,57 @@
+#!/bin/bash -x
+set -e
+
+# NOTE(sileht): Enable bash process tracking and send sigterm to the whole
+# process group
+
+cleanup(){
+    for PID in $PIDS; do
+        PGID=$(ps -o pgid "$PID" | grep [0-9] | tr -d ' ')
+        kill -- -$PGID
+    done
+}
+trap cleanup EXIT
+
+PIDS=""
+GNOCCHI_TEST_STORAGE_DRIVERS=${GNOCCHI_TEST_STORAGE_DRIVERS:-file}
+GNOCCHI_TEST_INDEXER_DRIVERS=${GNOCCHI_TEST_INDEXER_DRIVERS:-postgresql}
+for storage in ${GNOCCHI_TEST_STORAGE_DRIVERS}
+do
+    export GNOCCHI_TEST_STORAGE_DRIVER=$storage
+    for indexer in ${GNOCCHI_TEST_INDEXER_DRIVERS}
+    do
+        {
+        case $GNOCCHI_TEST_STORAGE_DRIVER in
+            ceph|redis)
+                pifpaf run $GNOCCHI_TEST_STORAGE_DRIVER -- pifpaf -g GNOCCHI_INDEXER_URL run $indexer -- ./tools/pretty_tox.sh $*
+                ;;
+            s3)
+                if ! which s3rver >/dev/null 2>&1
+                then
+                    mkdir npm-s3rver
+                    export NPM_CONFIG_PREFIX=npm-s3rver
+                    npm install s3rver --global
+                    export PATH=$PWD/npm-s3rver/bin:$PATH
+                fi
+                pifpaf -e GNOCCHI_STORAGE run s3rver -- \
+                       pifpaf -e GNOCCHI_INDEXER run $indexer -- \
+                       ./tools/pretty_tox.sh $*
+                ;;
+            *)
+                pifpaf -g GNOCCHI_INDEXER_URL run $indexer -- ./tools/pretty_tox.sh $*
+                ;;
+        esac
+        # NOTE(sileht): Start all storage tests at once
+        } &
+        PIDS="$PIDS $!"
+    done
+    # NOTE(sileht): Wait all storage tests, we tracks pid
+    # because wait without pid always return 0
+    for pid in $PIDS; do
+        wait $pid
+    done
+    PIDS=""
+    # TODO(sileht): the output can be a mess with this
+    # Create a less verbose testrun output (with dot like nose ?)
+    # merge all subunit output and print it in after_script in travis
+done
diff --git a/run-upgrade-tests.sh b/run-upgrade-tests.sh
new file mode 100755
index 0000000000000000000000000000000000000000..bb0e405a12b01aed675e4142a5529f739dea9337
--- /dev/null
+++ b/run-upgrade-tests.sh
@@ -0,0 +1,104 @@
+#!/bin/bash
+set -e
+
+export GNOCCHI_DATA=$(mktemp -d -t gnocchi.XXXX)
+
+old_version=$(pip freeze | sed -n '/gnocchi==/s/.*==\(.*\)/\1/p')
+
+RESOURCE_IDS=(
+    "5a301761-aaaa-46e2-8900-8b4f6fe6675a"
+    "5a301761-bbbb-46e2-8900-8b4f6fe6675a"
+    "5a301761-cccc-46e2-8900-8b4f6fe6675a"
+    "non-uuid"
+)
+
+dump_data(){
+    dir="$1"
+    mkdir -p $dir
+    echo "* Dumping measures aggregations to $dir"
+    gnocchi resource list -c id -c type -c project_id -c user_id -c original_resource_id -c started_at -c ended_at -c revision_start -c revision_end | tee $dir/resources.list
+    for resource_id in ${RESOURCE_IDS[@]} $RESOURCE_ID_EXT; do
+        for agg in min max mean sum ; do
+            gnocchi measures show --aggregation $agg --resource-id $resource_id metric -f json > $dir/${agg}.json
+        done
+    done
+}
+
+inject_data() {
+    echo "* Injecting measures in Gnocchi"
+    # TODO(sileht): Generate better data that ensure we have enought split that cover all
+    # situation
+
+    for resource_id in ${RESOURCE_IDS[@]}; do
+        gnocchi resource create generic --attribute id:$resource_id -n metric:high > /dev/null
+    done
+
+    # Create a resource with an history
+    gnocchi resource-type create ext --attribute someattr:string:false:max_length=32 > /dev/null
+    gnocchi resource create --type ext --attribute someattr:foobar -n metric:high historized_resource > /dev/null
+    gnocchi resource update --type ext --attribute someattr:foobaz historized_resource > /dev/null
+
+    {
+        measures_sep=""
+        MEASURES=$(python -c 'import datetime, random, json; now = datetime.datetime.utcnow(); print(json.dumps([{"timestamp": (now - datetime.timedelta(seconds=i)).isoformat(), "value": random.uniform(-100000, 100000)} for i in range(0, 288000, 10)]))')
+        echo -n '{'
+        resource_sep=""
+        for resource_id in ${RESOURCE_IDS[@]} $RESOURCE_ID_EXT; do
+            echo -n "$resource_sep \"$resource_id\": { \"metric\": $MEASURES }"
+            resource_sep=","
+        done
+        echo -n '}'
+    } | gnocchi measures batch-resources-metrics -
+
+    echo "* Waiting for measures computation"
+    while [ $(gnocchi status -f value -c "storage/total number of measures to process") -gt 0 ]; do sleep 1 ; done
+}
+
+pifpaf_stop(){
+    :
+}
+
+cleanup(){
+    pifpaf_stop
+    rm -rf $GNOCCHI_DATA
+}
+trap cleanup EXIT
+
+
+if [ "$STORAGE_DAEMON" == "ceph" ]; then
+    rados -c $STORAGE_CEPH_CONF mkpool gnocchi
+    STORAGE_URL=ceph://$STORAGE_CEPH_CONF
+else
+    STORAGE_URL=file://$GNOCCHI_DATA
+fi
+
+eval $(pifpaf run gnocchi --indexer-url $INDEXER_URL --storage-url $STORAGE_URL)
+export OS_AUTH_TYPE=gnocchi-basic
+export GNOCCHI_USER=$GNOCCHI_USER_ID
+original_statsd_resource_id=$GNOCCHI_STATSD_RESOURCE_ID
+inject_data $GNOCCHI_DATA
+dump_data $GNOCCHI_DATA/old
+pifpaf_stop
+
+new_version=$(python setup.py --version)
+echo "* Upgrading Gnocchi from $old_version to $new_version"
+pip install -v -U .[${GNOCCHI_VARIANT}]
+
+eval $(pifpaf --debug run gnocchi --indexer-url $INDEXER_URL --storage-url $STORAGE_URL)
+# Gnocchi 3.1 uses basic auth by default
+export OS_AUTH_TYPE=gnocchi-basic
+export GNOCCHI_USER=$GNOCCHI_USER_ID
+
+# pifpaf creates a new statsd resource on each start
+gnocchi resource delete $GNOCCHI_STATSD_RESOURCE_ID
+
+dump_data $GNOCCHI_DATA/new
+
+echo "* Checking output difference between Gnocchi $old_version and $new_version"
+# This asserts we find the new measures in the old ones. Gnocchi > 4.1 will
+# store less points because it uses the timespan and not the points of the
+# archive policy
+for old in $GNOCCHI_DATA/old/*.json; do
+    new=$GNOCCHI_DATA/new/$(basename $old)
+    python -c "import json; old = json.load(open('$old')); new = json.load(open('$new')); assert all(i in old for i in new)"
+done
diff --git a/setup.cfg b/setup.cfg
new file mode 100644
index 0000000000000000000000000000000000000000..9a1fc1a3c396865997d544cb8ebe1f7640a0325d
--- /dev/null
+++ b/setup.cfg
@@ -0,0 +1,167 @@
+[metadata]
+name = gnocchi
+url = http://gnocchi.xyz
+description = Metric as a Service
+long_description = file: README.rst
+long_description_content_type = text/x-rst
+author = Gnocchi developers
+author_email = invalid@gnocchi.xyz
+classifier =
+    Intended Audience :: Information Technology
+    Intended Audience :: System Administrators
+    License :: OSI Approved :: Apache Software License
+    Operating System :: POSIX :: Linux
+    Programming Language :: Python
+    Programming Language :: Python :: 2
+    Programming Language :: Python :: 2.7
+    Programming Language :: Python :: 3
+    Programming Language :: Python :: 3.5
+    Topic :: System :: Monitoring
+
+[options]
+packages =
+    gnocchi
+
+include_package_data = true
+
+install_requires =
+    numpy>=1.9.0
+    iso8601
+    oslo.config>=3.22.0
+    oslo.policy>=0.3.0
+    oslo.middleware>=3.22.0
+    pytimeparse
+    pecan>=0.9
+    futures; python_version < '3'
+    jsonpatch
+    cotyledon>=1.5.0
+    six
+    stevedore
+    ujson
+    voluptuous>=0.8.10
+    werkzeug
+    trollius; python_version < '3.4'
+    tenacity>=4.6.0
+    WebOb>=1.4.1
+    Paste
+    PasteDeploy
+    monotonic
+    daiquiri
+    pyparsing>=2.2.0
+    lz4>=0.9.0
+    tooz>=1.38
+    cachetools
+
+[options.extras_require]
+keystone =
+    keystonemiddleware>=4.0.0,!=4.19.0
+mysql =
+    pymysql
+    oslo.db>=4.29.0
+    sqlalchemy
+    sqlalchemy-utils
+    alembic>=0.7.6,!=0.8.1,!=0.9.0
+postgresql =
+    psycopg2
+    oslo.db>=4.29.0
+    sqlalchemy
+    sqlalchemy-utils
+    alembic>=0.7.6,!=0.8.1,!=0.9.0
+s3 =
+    boto3
+    botocore>=1.5
+redis =
+    redis>=2.10.0 # MIT
+    hiredis
+swift =
+    python-swiftclient>=3.1.0
+ceph =
+    cradox>=2.0.0
+ceph_alternative =
+    python-rados>=12.2.0 # not available on pypi
+prometheus =
+    python-snappy
+    protobuf
+amqp1:
+    python-qpid-proton>=0.17.0
+doc =
+    sphinx
+    sphinx_rtd_theme
+    sphinxcontrib-httpdomain
+    PyYAML
+    Jinja2
+    reno>=1.6.2
+test =
+    pifpaf[ceph,gnocchi]>=1.0.1
+    gabbi>=1.37.0
+    coverage>=3.6
+    fixtures
+    mock
+    python-subunit>=0.0.18
+    os-testr
+    testrepository
+    testscenarios
+    testresources>=0.2.4 # Apache-2.0/BSD
+    testtools>=0.9.38
+    WebTest>=2.0.16
+    keystonemiddleware>=4.0.0,!=4.19.0
+    wsgi_intercept>=1.4.1
+    xattr!=0.9.4  # https://github.com/gnocchixyz/gnocchi/issues/951
+test-swift =
+    python-swiftclient
+
+[options.entry_points]
+gnocchi.indexer.sqlalchemy.resource_type_attribute =
+    string = gnocchi.indexer.sqlalchemy_extension:StringSchema
+    uuid = gnocchi.indexer.sqlalchemy_extension:UUIDSchema
+    number = gnocchi.indexer.sqlalchemy_extension:NumberSchema
+    bool = gnocchi.indexer.sqlalchemy_extension:BoolSchema
+    datetime = gnocchi.indexer.sqlalchemy_extension:DatetimeSchema
+
+gnocchi.storage =
+    swift = gnocchi.storage.swift:SwiftStorage
+    ceph = gnocchi.storage.ceph:CephStorage
+    file = gnocchi.storage.file:FileStorage
+    s3 = gnocchi.storage.s3:S3Storage
+    redis = gnocchi.storage.redis:RedisStorage
+
+gnocchi.incoming =
+    ceph = gnocchi.incoming.ceph:CephStorage
+    file = gnocchi.incoming.file:FileStorage
+    swift = gnocchi.incoming.swift:SwiftStorage
+    s3 = gnocchi.incoming.s3:S3Storage
+    redis = gnocchi.incoming.redis:RedisStorage
+
+gnocchi.indexer =
+    mysql = gnocchi.indexer.sqlalchemy:SQLAlchemyIndexer
+    mysql+pymysql = gnocchi.indexer.sqlalchemy:SQLAlchemyIndexer
+    postgresql = gnocchi.indexer.sqlalchemy:SQLAlchemyIndexer
+    postgresql+psycopg2 = gnocchi.indexer.sqlalchemy:SQLAlchemyIndexer
+
+gnocchi.rest.auth_helper =
+    keystone = gnocchi.rest.auth_helper:KeystoneAuthHelper
+    basic = gnocchi.rest.auth_helper:BasicAuthHelper
+    remoteuser = gnocchi.rest.auth_helper:RemoteUserAuthHelper
+
+console_scripts =
+    gnocchi-config-generator = gnocchi.cli.manage:config_generator
+    gnocchi-upgrade = gnocchi.cli.manage:upgrade
+    gnocchi-change-sack-size = gnocchi.cli.manage:change_sack_size
+    gnocchi-statsd = gnocchi.cli.statsd:statsd
+    gnocchi-amqpd = gnocchi.cli.amqpd:amqpd
+    gnocchi-metricd = gnocchi.cli.metricd:metricd
+    gnocchi-injector = gnocchi.cli.injector:injector
+
+oslo.config.opts =
+    gnocchi = gnocchi.opts:list_opts
+
+oslo.config.opts.defaults =
+    gnocchi = gnocchi.opts:set_defaults
+
+[build_sphinx]
+all_files = 1
+build-dir = doc/build
+source-dir = doc/source
+
+[bdist_wheel]
+universal=1
diff --git a/setup.py b/setup.py
new file mode 100755
index 0000000000000000000000000000000000000000..f3972f2531e9ec33d93e38f09536a7407e72c1ea
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,40 @@
+#!/usr/bin/env python
+# Copyright (c) 2014 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import setuptools
+
+import gnocchi.setuptools
+
+cmdclass = {
+    'egg_info': gnocchi.setuptools.local_egg_info,
+    'develop': gnocchi.setuptools.local_develop,
+    'install_scripts': gnocchi.setuptools.local_install_scripts,
+}
+
+try:
+    from sphinx import setup_command
+    cmdclass['build_sphinx'] = setup_command.BuildDoc
+except ImportError:
+    pass
+
+
+setuptools.setup(
+    setup_requires=['setuptools>=30.3.0',
+                    'setuptools_scm!=1.16.0,!=1.16.1,!=1.16.2'],
+    # Remove any local stuff to mimic pbr
+    use_scm_version={'local_scheme': lambda v: ""},
+    cmdclass=cmdclass,
+)
diff --git a/tools/duration_perf_analyse.py b/tools/duration_perf_analyse.py
new file mode 100644
index 0000000000000000000000000000000000000000..a6e35ad9ff8005db79315d297fb577b1d026a832
--- /dev/null
+++ b/tools/duration_perf_analyse.py
@@ -0,0 +1,79 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# Tools to analyse the result of multiple call of duration_perf_test.py:
+#
+#   $ clients=10
+#   $ parallel --progress -j $clients python duration_perf_test.py \
+#       --result myresults/client{} ::: $(seq 0 $clients)
+#   $ python duration_perf_analyse.py myresults
+#    * get_measures:
+#                  Time
+#    count  1000.000000
+#    mean      0.032090
+#    std       0.028287
+#    ...
+#
+
+
+import argparse
+import os
+
+import pandas
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument('result',
+                        help=('Path of the results of perf_tool.py.'),
+                        default='result')
+
+    data = {
+        'get_measures': [],
+        'write_measures': [],
+        'write_metric': [],
+    }
+    args = parser.parse_args()
+    for root, dirs, files in os.walk(args.result):
+        for name in files:
+            for method in data:
+                if name.endswith('_%s.csv' % method):
+                    datum = data[method]
+                    filepath = os.path.join(root, name)
+                    datum.append(pandas.read_csv(filepath))
+                    cname = name.replace('_%s.csv' % method, '')
+                    datum[-1].rename(columns={'Duration': cname}, inplace=True)
+
+    for method in data:
+        merged = pandas.DataFrame(columns=['Index', 'Duration'])
+        append = pandas.DataFrame(columns=['Duration'])
+        for datum in data[method]:
+            datum.dropna(axis=1, inplace=True)
+            datum.drop('Count', axis=1, inplace=True)
+            merged = merged.merge(datum, on='Index')
+            cname = datum.columns.values[1]
+            datum.rename(columns={cname: 'Duration'}, inplace=True)
+            append = append.append(datum.drop('Index', axis=1))
+        merged.to_csv(os.path.join(args.result, '%s_merged.csv' % method),
+                      index=False)
+        print("* %s:" % method)
+        print(append.describe())
+        print("")
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/duration_perf_test.py b/tools/duration_perf_test.py
new file mode 100644
index 0000000000000000000000000000000000000000..275cb05c361b84300f59e910c5bf4afab82f0bd5
--- /dev/null
+++ b/tools/duration_perf_test.py
@@ -0,0 +1,194 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2014 eNovance
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+#
+# Tools to measure the duration of a get and a write request, can be used like:
+#
+#   $ python duration_perf_test.py
+#
+# or to simulate multiple clients workload:
+#
+#   $ clients=10
+#   $ parallel --progress -j $clients python duration_perf_test.py \
+#       --result myresults/client{} ::: $(seq 0 $clients)
+#   $ python duration_perf_analyse.py myresults
+#    * get_measures:
+#                  Time
+#    count  1000.000000
+#    mean      0.032090
+#    std       0.028287
+#    ...
+#
+
+import argparse
+import datetime
+import json
+import os
+import random
+import time
+
+from keystoneclient.v2_0 import client as keystone_client
+import requests
+
+
+def timer(func):
+    def inner(self, index, *args, **kwargs):
+        start = time.time()
+        count = func(self, index, *args, **kwargs)
+        elapsed = time.time() - start
+        self._timers.setdefault(func.__name__, []).append(
+            (index, elapsed, count)
+        )
+        print(("{name} #{index} processed "
+               "{count} objects in {elapsed} sec").format(
+                   name=func.__name__,
+                   index=index,
+                   count=count or 0,
+                   elapsed=elapsed))
+        return count
+    return inner
+
+
+class PerfTools(object):
+    def __init__(self, args):
+        self.args = args
+        self.keystone = keystone_client.Client(
+            username=args.username,
+            password=args.password,
+            tenant_name=args.tenant_name,
+            auth_url=args.auth_url)
+        self.headers = {'X-Auth-Token': self.keystone.auth_token,
+                        'Content-Type': 'application/json'}
+        self._metrics = []
+        self._timers = {}
+        self.timestamp = datetime.datetime.utcnow()
+
+    @timer
+    def write_metric(self, index):
+        data = json.dumps({"archive_policy_name": self.args.archive_policy})
+        resp = requests.post(self.args.gnocchi_url + "/v1/metric",
+                             data=data, headers=self.headers)
+        try:
+            self._metrics.append(json.loads(resp.content)["id"])
+        except Exception:
+            raise RuntimeError("Can't continue without all metrics created "
+                               "(%s)" % resp.content)
+
+    @timer
+    def write_measures(self, index, metric):
+        data = []
+        for i in range(self.args.batch_size):
+            self.timestamp += datetime.timedelta(minutes=1)
+            data.append({'timestamp': self.timestamp.isoformat(),
+                         'value': 100})
+        resp = requests.post(
+            "%s/v1/metric/%s/measures" % (self.args.gnocchi_url, metric),
+            data=json.dumps(data),
+            headers=self.headers)
+        if resp.status_code / 100 != 2:
+            print('Failed POST request to measures #%d: %s' % (index,
+                                                               resp.content))
+            return 0
+        return self.args.batch_size
+
+    @timer
+    def get_measures(self, index, metric):
+        resp = requests.get(
+            "%s/v1/metric/%s/measures" % (self.args.gnocchi_url, metric),
+            headers=self.headers)
+        try:
+            return len(json.loads(resp.content))
+        except Exception:
+            print('Failed GET request to measures #%d: %s' % (index,
+                                                              resp.content))
+            return 0
+
+    def _get_random_metric(self):
+        return self._metrics[random.randint(0, len(self._metrics) - 1)]
+
+    def run(self):
+        try:
+            for index in range(self.args.metric_count):
+                self.write_metric(index)
+
+            for index in range(self.args.measure_count):
+                metric = self._get_random_metric()
+                self.write_measures(index, metric)
+                self.get_measures(index, metric)
+        finally:
+            self.dump_logs()
+
+    def dump_logs(self):
+        for name, data in self._timers.items():
+            filepath = "%s_%s.csv" % (self.args.result_path, name)
+            dirpath = os.path.dirname(filepath)
+            if dirpath and not os.path.exists(dirpath):
+                os.makedirs(dirpath)
+            with open(filepath, 'w') as f:
+                f.write("Index,Duration,Count\n")
+                for meter in data:
+                    f.write("%s\n" % ",".join("%.2f" % (m if m else 0)
+                                              for m in meter))
+
+
+def main():
+    parser = argparse.ArgumentParser()
+    parser.add_argument("--metric-count",
+                        help=('Number of metrics to be created. '
+                              'metrics are created one by one.'),
+                        default=100,
+                        type=int)
+    parser.add_argument("--measure-count",
+                        help='Number of measures batches to be sent.',
+                        default=100,
+                        type=int)
+    parser.add_argument("--gnocchi-url",
+                        help='Gnocchi API URL to use.',
+                        default="http://localhost:8041")
+    parser.add_argument("--archive-policy",
+                        help='Archive policy to use.',
+                        default="low")
+    parser.add_argument("--os-username",
+                        dest='username',
+                        help='User name to use for OpenStack service access.',
+                        default="admin")
+    parser.add_argument("--os-tenant-name",
+                        dest='tenant_name',
+                        help=('Tenant name to use for '
+                              'OpenStack service access.'),
+                        default="admin")
+    parser.add_argument("--os-password",
+                        dest='password',
+                        help='Password to use for OpenStack service access.',
+                        default="password")
+    parser.add_argument("--os-auth-url",
+                        dest='auth_url',
+                        help='Auth URL to use for OpenStack service access.',
+                        default="http://localhost:5000/v2.0")
+    parser.add_argument("--result",
+                        help='path prefix to write results to.',
+                        dest='result_path',
+                        default="./perf_gnocchi")
+    parser.add_argument("--batch-size",
+                        dest='batch_size',
+                        help='Number of measurements in the batch.',
+                        default=100,
+                        type=int)
+    PerfTools(parser.parse_args()).run()
+
+if __name__ == '__main__':
+    main()
diff --git a/tools/gnocchi-archive-policy-size.py b/tools/gnocchi-archive-policy-size.py
new file mode 100755
index 0000000000000000000000000000000000000000..c635ec72fb8ef12ed54536702d4615980c1f120d
--- /dev/null
+++ b/tools/gnocchi-archive-policy-size.py
@@ -0,0 +1,52 @@
+#!/usr/bin/env python
+#
+# Copyright (c) 2016 Red Hat, Inc.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+from gnocchi import utils
+
+
+WORST_CASE_BYTES_PER_POINT = 8.04
+
+
+if (len(sys.argv) - 2) % 2 != 0:
+    print("Usage: %s <number of agg methods> <granularity> <timespan> ..."
+          % sys.argv[0])
+    sys.exit(1)
+
+
+def sizeof_fmt(num, suffix='B'):
+    for unit in ('', 'Ki', 'Mi', 'Gi', 'Ti', 'Pi', 'Ei', 'Zi'):
+        if abs(num) < 1024.0:
+            return "%3.1f%s%s" % (num, unit, suffix)
+        num /= 1024.0
+    return "%.1f%s%s" % (num, 'Yi', suffix)
+
+
+size = 0
+agg_methods = int(sys.argv[1])
+for g, t in utils.grouper(sys.argv[2:], 2):
+    granularity = utils.to_timespan(g)
+    timespan = utils.to_timespan(t)
+    points = timespan / granularity
+    cursize = points * WORST_CASE_BYTES_PER_POINT
+    size += cursize
+    print("%s over %s = %d points = %s" % (g, t, points, sizeof_fmt(cursize)))
+
+size *= agg_methods
+
+print("Total: " + sizeof_fmt(size))
diff --git a/tools/measures_injector.py b/tools/measures_injector.py
new file mode 100755
index 0000000000000000000000000000000000000000..6a188256fd43855e2e074676d8ecbcd37118957a
--- /dev/null
+++ b/tools/measures_injector.py
@@ -0,0 +1,61 @@
+#!/usr/bin/env python
+# Copyright (c) 2016 Red Hat
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#    http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+# implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+import random
+import uuid
+
+from concurrent import futures
+from oslo_config import cfg
+import six
+
+from gnocchi import incoming
+from gnocchi import indexer
+from gnocchi import service
+from gnocchi import utils
+
+
+def injector():
+    conf = cfg.ConfigOpts()
+    conf.register_cli_opts([
+        cfg.IntOpt("metrics", default=1, min=1),
+        cfg.StrOpt("archive-policy-name", default="low"),
+        cfg.StrOpt("creator", default="admin"),
+        cfg.IntOpt("batch-of-measures", default=1000),
+        cfg.IntOpt("measures-per-batch", default=10),
+    ])
+    conf = service.prepare_service(conf=conf)
+    index = indexer.get_driver(conf)
+    instore = incoming.get_driver(conf)
+
+    def todo():
+        metric = index.create_metric(
+            uuid.uuid4(),
+            creator=conf.creator,
+            archive_policy_name=conf.archive_policy_name)
+
+        for _ in six.moves.range(conf.batch_of_measures):
+            measures = [
+                incoming.Measure(
+                    utils.dt_in_unix_ns(utils.utcnow()), random.random())
+                for __ in six.moves.range(conf.measures_per_batch)]
+            instore.add_measures(metric, measures)
+
+    with futures.ThreadPoolExecutor(max_workers=conf.metrics) as executor:
+        for m in six.moves.range(conf.metrics):
+            executor.submit(todo)
+
+
+if __name__ == '__main__':
+    injector()
diff --git a/tools/pretty_tox.sh b/tools/pretty_tox.sh
new file mode 100755
index 0000000000000000000000000000000000000000..799ac18487557403a02cb48675e87e08dba5592b
--- /dev/null
+++ b/tools/pretty_tox.sh
@@ -0,0 +1,16 @@
+#!/usr/bin/env bash
+
+set -o pipefail
+
+TESTRARGS=$1
+
+# --until-failure is not compatible with --subunit see:
+#
+# https://bugs.launchpad.net/testrepository/+bug/1411804
+#
+# this work around exists until that is addressed
+if [[ "$TESTARGS" =~ "until-failure" ]]; then
+    python setup.py testr --slowest --testr-args="$TESTRARGS"
+else
+    python setup.py testr --slowest --testr-args="--subunit $TESTRARGS" | subunit-trace -f
+fi
diff --git a/tox.ini b/tox.ini
index 945087dfac5ccca716abf6aa8c8f257f7060c5b2..7e54929bf44622197914d233a0fd0a7cb942df71 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,106 +1,114 @@
-# Source charm: ./src/tox.ini
-# This file is managed centrally by release-tools and should not be modified
-# within individual charm repos.
 [tox]
-envlist = pep8
+minversion = 2.4
+envlist = py{37,27}-{postgresql,mysql}{,-file,-swift,-ceph,-s3},pep8
 skipsdist = True
-envlist = pep8,py3
-skip_missing_interpreters = True
-
-[bundleenv]
-setenv = VIRTUAL_ENV={envdir}
-         PYTHONHASHSEED=0
-         TERM=linux
-         LAYER_PATH={toxinidir}/layers
-         INTERFACE_PATH={toxinidir}/interfaces
-         JUJU_REPOSITORY={toxinidir}/build
-install_command =
-  pip install {opts} {packages}
-deps =
-    -r{toxinidir}/requirements.txt
 
 [testenv]
-setenv = VIRTUAL_ENV={envdir}
-         PYTHONHASHSEED=0
-         AMULET_SETUP_TIMEOUT=2700
-whitelist_externals = juju
-passenv = HOME TERM AMULET_*
-deps = -r{toxinidir}/test-requirements.txt
-install_command =
-  pip install python-apt {opts} {packages}
+skip_install = True
+sitepackages = False
+passenv = LANG GNOCCHI_TEST_* AWS_*
+setenv =
+    GNOCCHI_TEST_STORAGE_DRIVER=file
+    GNOCCHI_TEST_INDEXER_DRIVER=postgresql
+    GNOCCHI_TEST_STORAGE_DRIVERS=file swift ceph s3 redis
+    GNOCCHI_TEST_INDEXER_DRIVERS=postgresql mysql
+    file: GNOCCHI_TEST_STORAGE_DRIVERS=file
+    swift: GNOCCHI_TEST_STORAGE_DRIVERS=swift
+    ceph: GNOCCHI_TEST_STORAGE_DRIVERS=ceph
+    redis: GNOCCHI_TEST_STORAGE_DRIVERS=redis
+    s3: GNOCCHI_TEST_STORAGE_DRIVERS=s3
+    postgresql: GNOCCHI_TEST_INDEXER_DRIVERS=postgresql
+    mysql: GNOCCHI_TEST_INDEXER_DRIVERS=mysql
 
-[testenv:pep8]
-basepython = python2.7
-commands = charm-proof
+    GNOCCHI_STORAGE_DEPS=file,swift,test-swift,s3,ceph,redis
+    ceph: GNOCCHI_STORAGE_DEPS=ceph
+    swift: GNOCCHI_STORAGE_DEPS=swift,test-swift
+    file: GNOCCHI_STORAGE_DEPS=file
+    redis: GNOCCHI_STORAGE_DEPS=redis
+    s3: GNOCCHI_STORAGE_DEPS=s3
 
-[testenv:func27-noop]
-# DRY RUN - For Debug
-basepython = python2.7
+    GNOCCHI_INDEXER_DEPS=mysql,postgresql
+    mysql: GNOCCHI_INDEXER_DEPS=mysql
+    postgresql: GNOCCHI_INDEXER_DEPS=postgresql
+
+    # FIXME(sileht): pbr doesn't support url in setup.cfg extras, so we do this crap
+    GNOCCHI_TEST_TARBALLS=http://tarballs.openstack.org/swift/swift-master.tar.gz#egg=swift
+    ceph: GNOCCHI_TEST_TARBALLS=
+    swift: GNOCCHI_TEST_TARBALLS=http://tarballs.openstack.org/swift/swift-master.tar.gz#egg=swift
+    s3: GNOCCHI_TEST_TARBALLS=
+    redis: GNOCCHI_TEST_TARBALLS=
+    file: GNOCCHI_TEST_TARBALLS=
+# NOTE(jd) Install redis as a test dependency since it is used as a
+# coordination driver in functional tests (--coordination-driver is passed to
+# pifpaf)
+deps =
+   -e
+   .[test,redis,prometheus,amqp1,{env:GNOCCHI_STORAGE_DEPS:},{env:GNOCCHI_INDEXER_DEPS:}]
+   {env:GNOCCHI_TEST_TARBALLS:}
+   cliff!=2.9.0
 commands =
-    bundletester -vl DEBUG -r json -o func-results.json --test-pattern "gate-*" -n --no-destroy
+    {toxinidir}/run-tests.sh {posargs}
+    {toxinidir}/run-func-tests.sh {posargs}
 
-[testenv:func27]
-# Run all gate tests which are +x (expected to always pass)
-basepython = python2.7
-# Reactive source charms are Python3-only, but a py27 unit test target
-# is required by OpenStack Governance.  Remove this shim as soon as
-# permitted.  https://governance.openstack.org/tc/reference/cti/python_cti.html
-whitelist_externals = true
-commands = true
+[testenv:py37-postgresql-file-upgrade-from-4.3]
+# We should always recreate since the script upgrade
+# Gnocchi we can't reuse the virtualenv
+recreate = True
+setenv = GNOCCHI_VARIANT=test,postgresql,file
+deps = gnocchi[{env:GNOCCHI_VARIANT}]>=4.3,<4.4
+  pifpaf[gnocchi]>=0.13
+  gnocchiclient>=2.8.0
+  xattr!=0.9.4
+commands = pifpaf --env-prefix INDEXER run postgresql {toxinidir}/run-upgrade-tests.sh {posargs}
 
-[testenv:py3]
-basepython = python3
-deps = -r{toxinidir}/test-requirements.txt
-commands = stestr run {posargs}
+[testenv:py27-mysql-ceph-upgrade-from-4.3]
+# We should always recreate since the script upgrade
+# Gnocchi we can't reuse the virtualenv
+recreate = True
+setenv = GNOCCHI_VARIANT=test,mysql,ceph,ceph_recommended_lib
+deps = gnocchi[{env:GNOCCHI_VARIANT}]>=4.3,<4.4
+  gnocchiclient>=2.8.0
+  pifpaf[ceph,gnocchi]>=0.13
+  xattr!=0.9.4
+commands = pifpaf --env-prefix INDEXER run mysql -- pifpaf --env-prefix STORAGE run ceph {toxinidir}/run-upgrade-tests.sh {posargs}
 
-[testenv:py34]
-basepython = python3.4
-deps = -r{toxinidir}/test-requirements.txt
-commands = stestr run {posargs}
+[testenv:pep8]
+deps = hacking>=0.12,<0.13
+commands = flake8
 
-[testenv:py35]
-basepython = python3.5
-deps = -r{toxinidir}/test-requirements.txt
-commands = stestr run {posargs}
+[testenv:py27-cover]
+commands = pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- python setup.py testr --coverage --testr-args="{posargs}"
 
-[testenv:py36]
-basepython = python3.6
-deps = -r{toxinidir}/test-requirements.txt
-commands = stestr run {posargs}
+[flake8]
+exclude = .tox,.eggs,doc,gnocchi/rest/prometheus/remote_pb2.py
+show-source = true
+enable-extensions = H904
 
-[testenv:pep8]
+[testenv:docs]
 basepython = python3
-deps = -r{toxinidir}/test-requirements.txt
-commands = flake8 {posargs} src unit_tests
+## This does not work, see: https://github.com/tox-dev/tox/issues/509
+# deps = {[testenv]deps}
+#        .[postgresql,doc]
+# setenv = GNOCCHI_STORAGE_DEPS=file
+deps =
+    -e
+    .[test,file,postgresql,doc]
+    doc8
+setenv = GNOCCHI_TEST_DEBUG=1
+commands = doc8 --ignore-path doc/source/rest.rst,doc/source/comparison-table.rst doc/source
+           pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- python setup.py build_sphinx -W
 
-[testenv:cover]
-# Technique based heavily upon
-# https://github.com/openstack/nova/blob/master/tox.ini
-basepython = python3
-deps = -r{toxinidir}/requirements.txt
-       -r{toxinidir}/test-requirements.txt
-setenv =
-    {[testenv]setenv}
-    PYTHON=coverage run
+[testenv:docs-gnocchi.xyz]
+basepython = python2.7
+whitelist_externals = bash rm
+setenv = GNOCCHI_STORAGE_DEPS=file
+         GNOCCHI_TEST_DEBUG=1
+install_command = pip install -U {opts} {packages}
+deps = {[testenv:docs]deps}
+       setuptools
 commands =
-    coverage erase
-    stestr run {posargs}
-    coverage combine
-    coverage html -d cover
-    coverage xml -o cover/coverage.xml
-    coverage report
+    rm -rf doc/build/html
+    pifpaf -g GNOCCHI_INDEXER_URL run postgresql -- python setup.py build_sphinx
 
-[coverage:run]
-branch = True
-concurrency = multiprocessing
-parallel = True
-source =
-    .
-omit =
-    .tox/*
-    */charmhelpers/*
-    unit_tests/*
-
-[testenv:venv]
-basepython = python3
-commands = {posargs}
+[doc8]
+ignore-path = doc/source/rest.rst,doc/source/comparison-table.rst